|
|
|
@ -755,8 +755,7 @@ static void xilinx_dma_chan_desc_cleanup(struct xilinx_dma_chan *chan) |
|
|
|
|
spin_lock_irqsave(&chan->lock, flags); |
|
|
|
|
|
|
|
|
|
list_for_each_entry_safe(desc, next, &chan->done_list, node) { |
|
|
|
|
dma_async_tx_callback callback; |
|
|
|
|
void *callback_param; |
|
|
|
|
struct dmaengine_desc_callback cb; |
|
|
|
|
|
|
|
|
|
if (desc->cyclic) { |
|
|
|
|
xilinx_dma_chan_handle_cyclic(chan, desc, &flags); |
|
|
|
@ -767,11 +766,10 @@ static void xilinx_dma_chan_desc_cleanup(struct xilinx_dma_chan *chan) |
|
|
|
|
list_del(&desc->node); |
|
|
|
|
|
|
|
|
|
/* Run the link descriptor callback function */ |
|
|
|
|
callback = desc->async_tx.callback; |
|
|
|
|
callback_param = desc->async_tx.callback_param; |
|
|
|
|
if (callback) { |
|
|
|
|
dmaengine_desc_get_callback(&desc->async_tx, &cb); |
|
|
|
|
if (dmaengine_desc_callback_valid(&cb)) { |
|
|
|
|
spin_unlock_irqrestore(&chan->lock, flags); |
|
|
|
|
callback(callback_param); |
|
|
|
|
dmaengine_desc_callback_invoke(&cb, NULL); |
|
|
|
|
spin_lock_irqsave(&chan->lock, flags); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|