static void zynqmp_dma_chan_desc_cleanup(struct zynqmp_dma_chan *chan)
{
struct zynqmp_dma_desc_sw *desc, *next;
+ unsigned long irqflags;
+
+ spin_lock_irqsave(&chan->lock, irqflags);
list_for_each_entry_safe(desc, next, &chan->done_list, node) {
dma_async_tx_callback callback;
/* Run any dependencies, then free the descriptor */
zynqmp_dma_free_descriptor(chan, desc);
}
+
+ spin_unlock_irqrestore(&chan->lock, irqflags);
}
/**
*/
static void zynqmp_dma_free_descriptors(struct zynqmp_dma_chan *chan)
{
+ unsigned long irqflags;
+
+ spin_lock_irqsave(&chan->lock, irqflags);
zynqmp_dma_free_desc_list(chan, &chan->active_list);
zynqmp_dma_free_desc_list(chan, &chan->pending_list);
zynqmp_dma_free_desc_list(chan, &chan->done_list);
+ spin_unlock_irqrestore(&chan->lock, irqflags);
}
/**
static void zynqmp_dma_free_chan_resources(struct dma_chan *dchan)
{
struct zynqmp_dma_chan *chan = to_chan(dchan);
- unsigned long irqflags;
- spin_lock_irqsave(&chan->lock, irqflags);
zynqmp_dma_free_descriptors(chan);
- spin_unlock_irqrestore(&chan->lock, irqflags);
dma_free_coherent(chan->dev,
(2 * ZYNQMP_DMA_DESC_SIZE(chan) * ZYNQMP_DMA_NUM_DESCS),
chan->desc_pool_v, chan->desc_pool_p);
*/
static void zynqmp_dma_reset(struct zynqmp_dma_chan *chan)
{
+ unsigned long irqflags;
+
writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
+ spin_lock_irqsave(&chan->lock, irqflags);
zynqmp_dma_complete_descriptor(chan);
+ spin_unlock_irqrestore(&chan->lock, irqflags);
zynqmp_dma_chan_desc_cleanup(chan);
zynqmp_dma_free_descriptors(chan);
+
zynqmp_dma_init(chan);
}
u32 count;
unsigned long irqflags;
- spin_lock_irqsave(&chan->lock, irqflags);
-
if (chan->err) {
zynqmp_dma_reset(chan);
chan->err = false;
- goto unlock;
+ return;
}
+ spin_lock_irqsave(&chan->lock, irqflags);
count = readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
-
while (count) {
zynqmp_dma_complete_descriptor(chan);
count--;
}
+ spin_unlock_irqrestore(&chan->lock, irqflags);
zynqmp_dma_chan_desc_cleanup(chan);
- if (chan->idle)
+ if (chan->idle) {
+ spin_lock_irqsave(&chan->lock, irqflags);
zynqmp_dma_start_transfer(chan);
-
-unlock:
- spin_unlock_irqrestore(&chan->lock, irqflags);
+ spin_unlock_irqrestore(&chan->lock, irqflags);
+ }
}
/**
static int zynqmp_dma_device_terminate_all(struct dma_chan *dchan)
{
struct zynqmp_dma_chan *chan = to_chan(dchan);
- unsigned long irqflags;
- spin_lock_irqsave(&chan->lock, irqflags);
writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
zynqmp_dma_free_descriptors(chan);
- spin_unlock_irqrestore(&chan->lock, irqflags);
return 0;
}