diff options
-rw-r--r-- | drivers/dma/imx-dma.c | 5 | ||||
-rw-r--r-- | drivers/dma/ioat/dma_v3.c | 2 | ||||
-rw-r--r-- | drivers/dma/tegra20-apb-dma.c | 8 |
3 files changed, 9 insertions, 6 deletions
diff --git a/drivers/dma/imx-dma.c b/drivers/dma/imx-dma.c index dbf0e6f8de8a..a7dcf78b1ff8 100644 --- a/drivers/dma/imx-dma.c +++ b/drivers/dma/imx-dma.c @@ -684,9 +684,8 @@ static int imxdma_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd, break; } - imxdmac->hw_chaining = 1; - if (!imxdma_hw_chain(imxdmac)) - return -EINVAL; + imxdmac->hw_chaining = 0; + imxdmac->ccr_from_device = (mode | IMX_DMA_TYPE_FIFO) | ((IMX_DMA_MEMSIZE_32 | IMX_DMA_TYPE_LINEAR) << 2) | CCR_REN; diff --git a/drivers/dma/ioat/dma_v3.c b/drivers/dma/ioat/dma_v3.c index e5fc944de1f0..3e9d66920eb3 100644 --- a/drivers/dma/ioat/dma_v3.c +++ b/drivers/dma/ioat/dma_v3.c @@ -951,7 +951,7 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device) goto free_resources; } } - dma_sync_single_for_device(dev, dest_dma, PAGE_SIZE, DMA_TO_DEVICE); + dma_sync_single_for_device(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE); /* skip validate if the capability is not present */ if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask)) diff --git a/drivers/dma/tegra20-apb-dma.c b/drivers/dma/tegra20-apb-dma.c index c39e61bc8172..3cad856fe67f 100644 --- a/drivers/dma/tegra20-apb-dma.c +++ b/drivers/dma/tegra20-apb-dma.c @@ -266,6 +266,7 @@ static struct tegra_dma_desc *tegra_dma_desc_get( if (async_tx_test_ack(&dma_desc->txd)) { list_del(&dma_desc->node); spin_unlock_irqrestore(&tdc->lock, flags); + dma_desc->txd.flags = 0; return dma_desc; } } @@ -1050,7 +1051,9 @@ struct dma_async_tx_descriptor *tegra_dma_prep_dma_cyclic( TEGRA_APBDMA_AHBSEQ_WRAP_SHIFT; ahb_seq |= TEGRA_APBDMA_AHBSEQ_BUS_WIDTH_32; - csr |= TEGRA_APBDMA_CSR_FLOW | TEGRA_APBDMA_CSR_IE_EOC; + csr |= TEGRA_APBDMA_CSR_FLOW; + if (flags & DMA_PREP_INTERRUPT) + csr |= TEGRA_APBDMA_CSR_IE_EOC; csr |= tdc->dma_sconfig.slave_id << TEGRA_APBDMA_CSR_REQ_SEL_SHIFT; apb_seq |= TEGRA_APBDMA_APBSEQ_WRAP_WORD_1; @@ -1095,7 +1098,8 @@ struct dma_async_tx_descriptor *tegra_dma_prep_dma_cyclic( mem += len; } sg_req->last_sg = true; - dma_desc->txd.flags = 0; + if (flags & DMA_CTRL_ACK) + dma_desc->txd.flags = DMA_CTRL_ACK; /* * Make sure that mode should not be conflicting with currently |