These changes are the raw update to linux-4.4.6-rt14. Kernel sources
[kvmfornfv.git] / kernel / drivers / dma / dw / core.c
index 1022c2e..4f099ea 100644 (file)
@@ -163,7 +163,7 @@ static void dwc_initialize(struct dw_dma_chan *dwc)
 
 /*----------------------------------------------------------------------*/
 
-static inline unsigned int dwc_fast_fls(unsigned long long v)
+static inline unsigned int dwc_fast_ffs(unsigned long long v)
 {
        /*
         * We can be a lot more clever here, but this should take care
@@ -536,16 +536,17 @@ EXPORT_SYMBOL(dw_dma_get_dst_addr);
 
 /* Called with dwc->lock held and all DMAC interrupts disabled */
 static void dwc_handle_cyclic(struct dw_dma *dw, struct dw_dma_chan *dwc,
-               u32 status_err, u32 status_xfer)
+               u32 status_block, u32 status_err, u32 status_xfer)
 {
        unsigned long flags;
 
-       if (dwc->mask) {
+       if (status_block & dwc->mask) {
                void (*callback)(void *param);
                void *callback_param;
 
                dev_vdbg(chan2dev(&dwc->chan), "new cyclic period llp 0x%08x\n",
                                channel_readl(dwc, LLP));
+               dma_writel(dw, CLEAR.BLOCK, dwc->mask);
 
                callback = dwc->cdesc->period_callback;
                callback_param = dwc->cdesc->period_callback_param;
@@ -577,6 +578,7 @@ static void dwc_handle_cyclic(struct dw_dma *dw, struct dw_dma_chan *dwc,
                channel_writel(dwc, CTL_LO, 0);
                channel_writel(dwc, CTL_HI, 0);
 
+               dma_writel(dw, CLEAR.BLOCK, dwc->mask);
                dma_writel(dw, CLEAR.ERROR, dwc->mask);
                dma_writel(dw, CLEAR.XFER, dwc->mask);
 
@@ -585,6 +587,9 @@ static void dwc_handle_cyclic(struct dw_dma *dw, struct dw_dma_chan *dwc,
 
                spin_unlock_irqrestore(&dwc->lock, flags);
        }
+
+       /* Re-enable interrupts */
+       channel_set_bit(dw, MASK.BLOCK, dwc->mask);
 }
 
 /* ------------------------------------------------------------------------- */
@@ -593,10 +598,12 @@ static void dw_dma_tasklet(unsigned long data)
 {
        struct dw_dma *dw = (struct dw_dma *)data;
        struct dw_dma_chan *dwc;
+       u32 status_block;
        u32 status_xfer;
        u32 status_err;
        int i;
 
+       status_block = dma_readl(dw, RAW.BLOCK);
        status_xfer = dma_readl(dw, RAW.XFER);
        status_err = dma_readl(dw, RAW.ERROR);
 
@@ -605,16 +612,15 @@ static void dw_dma_tasklet(unsigned long data)
        for (i = 0; i < dw->dma.chancnt; i++) {
                dwc = &dw->chan[i];
                if (test_bit(DW_DMA_IS_CYCLIC, &dwc->flags))
-                       dwc_handle_cyclic(dw, dwc, status_err, status_xfer);
+                       dwc_handle_cyclic(dw, dwc, status_block, status_err,
+                                       status_xfer);
                else if (status_err & (1 << i))
                        dwc_handle_error(dw, dwc);
                else if (status_xfer & (1 << i))
                        dwc_scan_descriptors(dw, dwc);
        }
 
-       /*
-        * Re-enable interrupts.
-        */
+       /* Re-enable interrupts */
        channel_set_bit(dw, MASK.XFER, dw->all_chan_mask);
        channel_set_bit(dw, MASK.ERROR, dw->all_chan_mask);
 }
@@ -635,6 +641,7 @@ static irqreturn_t dw_dma_interrupt(int irq, void *dev_id)
         * softirq handler.
         */
        channel_clear_bit(dw, MASK.XFER, dw->all_chan_mask);
+       channel_clear_bit(dw, MASK.BLOCK, dw->all_chan_mask);
        channel_clear_bit(dw, MASK.ERROR, dw->all_chan_mask);
 
        status = dma_readl(dw, STATUS_INT);
@@ -645,6 +652,7 @@ static irqreturn_t dw_dma_interrupt(int irq, void *dev_id)
 
                /* Try to recover */
                channel_clear_bit(dw, MASK.XFER, (1 << 8) - 1);
+               channel_clear_bit(dw, MASK.BLOCK, (1 << 8) - 1);
                channel_clear_bit(dw, MASK.SRC_TRAN, (1 << 8) - 1);
                channel_clear_bit(dw, MASK.DST_TRAN, (1 << 8) - 1);
                channel_clear_bit(dw, MASK.ERROR, (1 << 8) - 1);
@@ -712,7 +720,7 @@ dwc_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
                           dw->data_width[dwc->dst_master]);
 
        src_width = dst_width = min_t(unsigned int, data_width,
-                                     dwc_fast_fls(src | dest | len));
+                                     dwc_fast_ffs(src | dest | len));
 
        ctllo = DWC_DEFAULT_CTLLO(chan)
                        | DWC_CTLL_DST_WIDTH(dst_width)
@@ -791,7 +799,7 @@ dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
 
        switch (direction) {
        case DMA_MEM_TO_DEV:
-               reg_width = __fls(sconfig->dst_addr_width);
+               reg_width = __ffs(sconfig->dst_addr_width);
                reg = sconfig->dst_addr;
                ctllo = (DWC_DEFAULT_CTLLO(chan)
                                | DWC_CTLL_DST_WIDTH(reg_width)
@@ -811,7 +819,7 @@ dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
                        len = sg_dma_len(sg);
 
                        mem_width = min_t(unsigned int,
-                                         data_width, dwc_fast_fls(mem | len));
+                                         data_width, dwc_fast_ffs(mem | len));
 
 slave_sg_todev_fill_desc:
                        desc = dwc_desc_get(dwc);
@@ -848,7 +856,7 @@ slave_sg_todev_fill_desc:
                }
                break;
        case DMA_DEV_TO_MEM:
-               reg_width = __fls(sconfig->src_addr_width);
+               reg_width = __ffs(sconfig->src_addr_width);
                reg = sconfig->src_addr;
                ctllo = (DWC_DEFAULT_CTLLO(chan)
                                | DWC_CTLL_SRC_WIDTH(reg_width)
@@ -868,7 +876,7 @@ slave_sg_todev_fill_desc:
                        len = sg_dma_len(sg);
 
                        mem_width = min_t(unsigned int,
-                                         data_width, dwc_fast_fls(mem | len));
+                                         data_width, dwc_fast_ffs(mem | len));
 
 slave_sg_fromdev_fill_desc:
                        desc = dwc_desc_get(dwc);
@@ -1111,6 +1119,7 @@ static void dw_dma_off(struct dw_dma *dw)
        dma_writel(dw, CFG, 0);
 
        channel_clear_bit(dw, MASK.XFER, dw->all_chan_mask);
+       channel_clear_bit(dw, MASK.BLOCK, dw->all_chan_mask);
        channel_clear_bit(dw, MASK.SRC_TRAN, dw->all_chan_mask);
        channel_clear_bit(dw, MASK.DST_TRAN, dw->all_chan_mask);
        channel_clear_bit(dw, MASK.ERROR, dw->all_chan_mask);
@@ -1216,6 +1225,7 @@ static void dwc_free_chan_resources(struct dma_chan *chan)
 
        /* Disable interrupts */
        channel_clear_bit(dw, MASK.XFER, dwc->mask);
+       channel_clear_bit(dw, MASK.BLOCK, dwc->mask);
        channel_clear_bit(dw, MASK.ERROR, dwc->mask);
 
        spin_unlock_irqrestore(&dwc->lock, flags);
@@ -1245,7 +1255,7 @@ static void dwc_free_chan_resources(struct dma_chan *chan)
 int dw_dma_cyclic_start(struct dma_chan *chan)
 {
        struct dw_dma_chan      *dwc = to_dw_dma_chan(chan);
-       struct dw_dma           *dw = to_dw_dma(dwc->chan.device);
+       struct dw_dma           *dw = to_dw_dma(chan->device);
        unsigned long           flags;
 
        if (!test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) {
@@ -1255,25 +1265,10 @@ int dw_dma_cyclic_start(struct dma_chan *chan)
 
        spin_lock_irqsave(&dwc->lock, flags);
 
-       /* Assert channel is idle */
-       if (dma_readl(dw, CH_EN) & dwc->mask) {
-               dev_err(chan2dev(&dwc->chan),
-                       "%s: BUG: Attempted to start non-idle channel\n",
-                       __func__);
-               dwc_dump_chan_regs(dwc);
-               spin_unlock_irqrestore(&dwc->lock, flags);
-               return -EBUSY;
-       }
-
-       dma_writel(dw, CLEAR.ERROR, dwc->mask);
-       dma_writel(dw, CLEAR.XFER, dwc->mask);
+       /* Enable interrupts to perform cyclic transfer */
+       channel_set_bit(dw, MASK.BLOCK, dwc->mask);
 
-       /* Setup DMAC channel registers */
-       channel_writel(dwc, LLP, dwc->cdesc->desc[0]->txd.phys);
-       channel_writel(dwc, CTL_LO, DWC_CTLL_LLP_D_EN | DWC_CTLL_LLP_S_EN);
-       channel_writel(dwc, CTL_HI, 0);
-
-       channel_set_bit(dw, CH_EN, dwc->mask);
+       dwc_dostart(dwc, dwc->cdesc->desc[0]);
 
        spin_unlock_irqrestore(&dwc->lock, flags);
 
@@ -1479,6 +1474,7 @@ void dw_dma_cyclic_free(struct dma_chan *chan)
 
        dwc_chan_disable(dw, dwc);
 
+       dma_writel(dw, CLEAR.BLOCK, dwc->mask);
        dma_writel(dw, CLEAR.ERROR, dwc->mask);
        dma_writel(dw, CLEAR.XFER, dwc->mask);
 
@@ -1499,9 +1495,8 @@ EXPORT_SYMBOL(dw_dma_cyclic_free);
 int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
 {
        struct dw_dma           *dw;
-       bool                    autocfg;
+       bool                    autocfg = false;
        unsigned int            dw_params;
-       unsigned int            nr_channels;
        unsigned int            max_blk_size = 0;
        int                     err;
        int                     i;
@@ -1515,33 +1510,42 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
 
        pm_runtime_get_sync(chip->dev);
 
-       dw_params = dma_read_byaddr(chip->regs, DW_PARAMS);
-       autocfg = dw_params >> DW_PARAMS_EN & 0x1;
+       if (!pdata) {
+               dw_params = dma_read_byaddr(chip->regs, DW_PARAMS);
+               dev_dbg(chip->dev, "DW_PARAMS: 0x%08x\n", dw_params);
 
-       dev_dbg(chip->dev, "DW_PARAMS: 0x%08x\n", dw_params);
+               autocfg = dw_params >> DW_PARAMS_EN & 1;
+               if (!autocfg) {
+                       err = -EINVAL;
+                       goto err_pdata;
+               }
 
-       if (!pdata && autocfg) {
                pdata = devm_kzalloc(chip->dev, sizeof(*pdata), GFP_KERNEL);
                if (!pdata) {
                        err = -ENOMEM;
                        goto err_pdata;
                }
 
+               /* Get hardware configuration parameters */
+               pdata->nr_channels = (dw_params >> DW_PARAMS_NR_CHAN & 7) + 1;
+               pdata->nr_masters = (dw_params >> DW_PARAMS_NR_MASTER & 3) + 1;
+               for (i = 0; i < pdata->nr_masters; i++) {
+                       pdata->data_width[i] =
+                               (dw_params >> DW_PARAMS_DATA_WIDTH(i) & 3) + 2;
+               }
+               max_blk_size = dma_readl(dw, MAX_BLK_SIZE);
+
                /* Fill platform data with the default values */
                pdata->is_private = true;
+               pdata->is_memcpy = true;
                pdata->chan_allocation_order = CHAN_ALLOCATION_ASCENDING;
                pdata->chan_priority = CHAN_PRIORITY_ASCENDING;
-       } else if (!pdata || pdata->nr_channels > DW_DMA_MAX_NR_CHANNELS) {
+       } else if (pdata->nr_channels > DW_DMA_MAX_NR_CHANNELS) {
                err = -EINVAL;
                goto err_pdata;
        }
 
-       if (autocfg)
-               nr_channels = (dw_params >> DW_PARAMS_NR_CHAN & 0x7) + 1;
-       else
-               nr_channels = pdata->nr_channels;
-
-       dw->chan = devm_kcalloc(chip->dev, nr_channels, sizeof(*dw->chan),
+       dw->chan = devm_kcalloc(chip->dev, pdata->nr_channels, sizeof(*dw->chan),
                                GFP_KERNEL);
        if (!dw->chan) {
                err = -ENOMEM;
@@ -1549,29 +1553,16 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
        }
 
        /* Get hardware configuration parameters */
-       if (autocfg) {
-               max_blk_size = dma_readl(dw, MAX_BLK_SIZE);
-
-               dw->nr_masters = (dw_params >> DW_PARAMS_NR_MASTER & 3) + 1;
-               for (i = 0; i < dw->nr_masters; i++) {
-                       dw->data_width[i] =
-                               (dw_params >> DW_PARAMS_DATA_WIDTH(i) & 3) + 2;
-               }
-       } else {
-               dw->nr_masters = pdata->nr_masters;
-               for (i = 0; i < dw->nr_masters; i++)
-                       dw->data_width[i] = pdata->data_width[i];
-       }
+       dw->nr_masters = pdata->nr_masters;
+       for (i = 0; i < dw->nr_masters; i++)
+               dw->data_width[i] = pdata->data_width[i];
 
        /* Calculate all channel mask before DMA setup */
-       dw->all_chan_mask = (1 << nr_channels) - 1;
+       dw->all_chan_mask = (1 << pdata->nr_channels) - 1;
 
        /* Force dma off, just in case */
        dw_dma_off(dw);
 
-       /* Disable BLOCK interrupts as well */
-       channel_clear_bit(dw, MASK.BLOCK, dw->all_chan_mask);
-
        /* Create a pool of consistent memory blocks for hardware descriptors */
        dw->desc_pool = dmam_pool_create("dw_dmac_desc_pool", chip->dev,
                                         sizeof(struct dw_desc), 4, 0);
@@ -1589,9 +1580,8 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
                goto err_pdata;
 
        INIT_LIST_HEAD(&dw->dma.channels);
-       for (i = 0; i < nr_channels; i++) {
+       for (i = 0; i < pdata->nr_channels; i++) {
                struct dw_dma_chan      *dwc = &dw->chan[i];
-               int                     r = nr_channels - i - 1;
 
                dwc->chan.device = &dw->dma;
                dma_cookie_init(&dwc->chan);
@@ -1603,7 +1593,7 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
 
                /* 7 is highest priority & 0 is lowest. */
                if (pdata->chan_priority == CHAN_PRIORITY_ASCENDING)
-                       dwc->priority = r;
+                       dwc->priority = pdata->nr_channels - i - 1;
                else
                        dwc->priority = i;
 
@@ -1622,6 +1612,7 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
                /* Hardware configuration */
                if (autocfg) {
                        unsigned int dwc_params;
+                       unsigned int r = DW_DMA_MAX_NR_CHANNELS - i - 1;
                        void __iomem *addr = chip->regs + r * sizeof(u32);
 
                        dwc_params = dma_read_byaddr(addr, DWC_PARAMS);
@@ -1656,10 +1647,13 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
        dma_writel(dw, CLEAR.DST_TRAN, dw->all_chan_mask);
        dma_writel(dw, CLEAR.ERROR, dw->all_chan_mask);
 
-       dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask);
+       /* Set capabilities */
        dma_cap_set(DMA_SLAVE, dw->dma.cap_mask);
        if (pdata->is_private)
                dma_cap_set(DMA_PRIVATE, dw->dma.cap_mask);
+       if (pdata->is_memcpy)
+               dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask);
+
        dw->dma.dev = chip->dev;
        dw->dma.device_alloc_chan_resources = dwc_alloc_chan_resources;
        dw->dma.device_free_chan_resources = dwc_free_chan_resources;
@@ -1687,7 +1681,7 @@ int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata)
                goto err_dma_register;
 
        dev_info(chip->dev, "DesignWare DMA Controller, %d channels\n",
-                nr_channels);
+                pdata->nr_channels);
 
        pm_runtime_put_sync_suspend(chip->dev);
 
@@ -1746,4 +1740,4 @@ EXPORT_SYMBOL_GPL(dw_dma_enable);
 MODULE_LICENSE("GPL v2");
 MODULE_DESCRIPTION("Synopsys DesignWare DMA Controller core driver");
 MODULE_AUTHOR("Haavard Skinnemoen (Atmel)");
-MODULE_AUTHOR("Viresh Kumar <viresh.linux@gmail.com>");
+MODULE_AUTHOR("Viresh Kumar <vireshk@kernel.org>");