summaryrefslogtreecommitdiffstats
path: root/drivers/dma
diff options
context:
space:
mode:
authorMatthew Murrian <matthew.murrian@goctsi.com>2020-11-04 12:30:05 +0530
committerVinod Koul <vkoul@kernel.org>2020-11-09 17:54:22 +0530
commitc8ae7932997d0cc92d016829138074c7520248e5 (patch)
tree7bfa6a014238dd9274611e72c3fcf01de23e3cb2 /drivers/dma
parent0ba2df09f1500d3f27398a3382b86d39c3e6abe2 (diff)
downloadlinux-c8ae7932997d0cc92d016829138074c7520248e5.tar.gz
linux-c8ae7932997d0cc92d016829138074c7520248e5.tar.bz2
linux-c8ae7932997d0cc92d016829138074c7520248e5.zip
dmaengine: xilinx_dma: Fix usage of xilinx_aximcdma_tx_segment
Several code sections incorrectly use struct xilinx_axidma_tx_segment instead of struct xilinx_aximcdma_tx_segment when operating as Multichannel DMA. As their structures are similar, this just works. Fixes: 6ccd692bfb7f ("dmaengine: xilinx_dma: Add Xilinx AXI MCDMA Engine driver support") Signed-off-by: Matthew Murrian <matthew.murrian@goctsi.com> Signed-off-by: Radhey Shyam Pandey <radhey.shyam.pandey@xilinx.com> Link: https://lore.kernel.org/r/1604473206-32573-3-git-send-email-radhey.shyam.pandey@xilinx.com Signed-off-by: Vinod Koul <vkoul@kernel.org>
Diffstat (limited to 'drivers/dma')
-rw-r--r--drivers/dma/xilinx/xilinx_dma.c29
1 files changed, 24 insertions, 5 deletions
diff --git a/drivers/dma/xilinx/xilinx_dma.c b/drivers/dma/xilinx/xilinx_dma.c
index d858d540b88a..9fd924cf2337 100644
--- a/drivers/dma/xilinx/xilinx_dma.c
+++ b/drivers/dma/xilinx/xilinx_dma.c
@@ -948,8 +948,10 @@ static u32 xilinx_dma_get_residue(struct xilinx_dma_chan *chan,
{
struct xilinx_cdma_tx_segment *cdma_seg;
struct xilinx_axidma_tx_segment *axidma_seg;
+ struct xilinx_aximcdma_tx_segment *aximcdma_seg;
struct xilinx_cdma_desc_hw *cdma_hw;
struct xilinx_axidma_desc_hw *axidma_hw;
+ struct xilinx_aximcdma_desc_hw *aximcdma_hw;
struct list_head *entry;
u32 residue = 0;
@@ -961,13 +963,23 @@ static u32 xilinx_dma_get_residue(struct xilinx_dma_chan *chan,
cdma_hw = &cdma_seg->hw;
residue += (cdma_hw->control - cdma_hw->status) &
chan->xdev->max_buffer_len;
- } else {
+ } else if (chan->xdev->dma_config->dmatype ==
+ XDMA_TYPE_AXIDMA) {
axidma_seg = list_entry(entry,
struct xilinx_axidma_tx_segment,
node);
axidma_hw = &axidma_seg->hw;
residue += (axidma_hw->control - axidma_hw->status) &
chan->xdev->max_buffer_len;
+ } else {
+ aximcdma_seg =
+ list_entry(entry,
+ struct xilinx_aximcdma_tx_segment,
+ node);
+ aximcdma_hw = &aximcdma_seg->hw;
+ residue +=
+ (aximcdma_hw->control - aximcdma_hw->status) &
+ chan->xdev->max_buffer_len;
}
}
@@ -1135,7 +1147,7 @@ static int xilinx_dma_alloc_chan_resources(struct dma_chan *dchan)
upper_32_bits(chan->seg_p + sizeof(*chan->seg_mv) *
((i + 1) % XILINX_DMA_NUM_DESCS));
chan->seg_mv[i].phys = chan->seg_p +
- sizeof(*chan->seg_v) * i;
+ sizeof(*chan->seg_mv) * i;
list_add_tail(&chan->seg_mv[i].node,
&chan->free_seg_list);
}
@@ -1560,7 +1572,7 @@ static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan)
static void xilinx_mcdma_start_transfer(struct xilinx_dma_chan *chan)
{
struct xilinx_dma_tx_descriptor *head_desc, *tail_desc;
- struct xilinx_axidma_tx_segment *tail_segment;
+ struct xilinx_aximcdma_tx_segment *tail_segment;
u32 reg;
/*
@@ -1582,7 +1594,7 @@ static void xilinx_mcdma_start_transfer(struct xilinx_dma_chan *chan)
tail_desc = list_last_entry(&chan->pending_list,
struct xilinx_dma_tx_descriptor, node);
tail_segment = list_last_entry(&tail_desc->segments,
- struct xilinx_axidma_tx_segment, node);
+ struct xilinx_aximcdma_tx_segment, node);
reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest));
@@ -1864,6 +1876,7 @@ static void append_desc_queue(struct xilinx_dma_chan *chan,
struct xilinx_vdma_tx_segment *tail_segment;
struct xilinx_dma_tx_descriptor *tail_desc;
struct xilinx_axidma_tx_segment *axidma_tail_segment;
+ struct xilinx_aximcdma_tx_segment *aximcdma_tail_segment;
struct xilinx_cdma_tx_segment *cdma_tail_segment;
if (list_empty(&chan->pending_list))
@@ -1885,11 +1898,17 @@ static void append_desc_queue(struct xilinx_dma_chan *chan,
struct xilinx_cdma_tx_segment,
node);
cdma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys;
- } else {
+ } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
axidma_tail_segment = list_last_entry(&tail_desc->segments,
struct xilinx_axidma_tx_segment,
node);
axidma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys;
+ } else {
+ aximcdma_tail_segment =
+ list_last_entry(&tail_desc->segments,
+ struct xilinx_aximcdma_tx_segment,
+ node);
+ aximcdma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys;
}
/*