Commit 23b5e3ad authored by Nicolas Ferre's avatar Nicolas Ferre Committed by Vinod Koul

dmaengine: at_hdmac: implement pause and resume in atc_control

Pause and resume controls are useful for audio devices. This also returns
correct status from atc_tx_status() in case chan is paused.

Idea from dw_dmac patch by Linus Walleij.
Signed-off-by: default avatarNicolas Ferre <nicolas.ferre@atmel.com>
Acked-by: default avatarLinus Walleij <linus.walleij@linaro.org>
Signed-off-by: default avatarVinod Koul <vinod.koul@intel.com>
parent 543aabc7
...@@ -508,7 +508,8 @@ static irqreturn_t at_dma_interrupt(int irq, void *dev_id) ...@@ -508,7 +508,8 @@ static irqreturn_t at_dma_interrupt(int irq, void *dev_id)
if (pending & (AT_DMA_BTC(i) | AT_DMA_ERR(i))) { if (pending & (AT_DMA_BTC(i) | AT_DMA_ERR(i))) {
if (pending & AT_DMA_ERR(i)) { if (pending & AT_DMA_ERR(i)) {
/* Disable channel on AHB error */ /* Disable channel on AHB error */
dma_writel(atdma, CHDR, atchan->mask); dma_writel(atdma, CHDR,
AT_DMA_RES(i) | atchan->mask);
/* Give information to tasklet */ /* Give information to tasklet */
set_bit(ATC_IS_ERROR, &atchan->status); set_bit(ATC_IS_ERROR, &atchan->status);
} }
...@@ -952,39 +953,78 @@ static int atc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd, ...@@ -952,39 +953,78 @@ static int atc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
{ {
struct at_dma_chan *atchan = to_at_dma_chan(chan); struct at_dma_chan *atchan = to_at_dma_chan(chan);
struct at_dma *atdma = to_at_dma(chan->device); struct at_dma *atdma = to_at_dma(chan->device);
struct at_desc *desc, *_desc; int chan_id = atchan->chan_common.chan_id;
LIST_HEAD(list); LIST_HEAD(list);
/* Only supports DMA_TERMINATE_ALL */ dev_vdbg(chan2dev(chan), "atc_control (%d)\n", cmd);
if (cmd != DMA_TERMINATE_ALL)
return -ENXIO;
/* if (cmd == DMA_PAUSE) {
* This is only called when something went wrong elsewhere, so int pause_timeout = 1000;
* we don't really care about the data. Just disable the
* channel. We still have to poll the channel enable bit due
* to AHB/HSB limitations.
*/
spin_lock_bh(&atchan->lock);
dma_writel(atdma, CHDR, atchan->mask); spin_lock_bh(&atchan->lock);
/* confirm that this channel is disabled */ dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id));
while (dma_readl(atdma, CHSR) & atchan->mask)
cpu_relax(); /* wait for FIFO to be empty */
while (!(dma_readl(atdma, CHSR) & AT_DMA_EMPT(chan_id))) {
if (pause_timeout-- > 0) {
/* the FIFO can only drain if the peripheral
* is still requesting data:
* -> timeout if it is not the case. */
dma_writel(atdma, CHDR, AT_DMA_RES(chan_id));
spin_unlock_bh(&atchan->lock);
return -ETIMEDOUT;
}
cpu_relax();
}
/* active_list entries will end up before queued entries */ set_bit(ATC_IS_PAUSED, &atchan->status);
list_splice_init(&atchan->queue, &list);
list_splice_init(&atchan->active_list, &list);
/* Flush all pending and queued descriptors */ spin_unlock_bh(&atchan->lock);
list_for_each_entry_safe(desc, _desc, &list, desc_node) } else if (cmd == DMA_RESUME) {
atc_chain_complete(atchan, desc); if (!test_bit(ATC_IS_PAUSED, &atchan->status))
return 0;
/* if channel dedicated to cyclic operations, free it */ spin_lock_bh(&atchan->lock);
clear_bit(ATC_IS_CYCLIC, &atchan->status);
spin_unlock_bh(&atchan->lock); dma_writel(atdma, CHDR, AT_DMA_RES(chan_id));
clear_bit(ATC_IS_PAUSED, &atchan->status);
spin_unlock_bh(&atchan->lock);
} else if (cmd == DMA_TERMINATE_ALL) {
struct at_desc *desc, *_desc;
/*
* This is only called when something went wrong elsewhere, so
* we don't really care about the data. Just disable the
* channel. We still have to poll the channel enable bit due
* to AHB/HSB limitations.
*/
spin_lock_bh(&atchan->lock);
/* disabling channel: must also remove suspend state */
dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask);
/* confirm that this channel is disabled */
while (dma_readl(atdma, CHSR) & atchan->mask)
cpu_relax();
/* active_list entries will end up before queued entries */
list_splice_init(&atchan->queue, &list);
list_splice_init(&atchan->active_list, &list);
/* Flush all pending and queued descriptors */
list_for_each_entry_safe(desc, _desc, &list, desc_node)
atc_chain_complete(atchan, desc);
clear_bit(ATC_IS_PAUSED, &atchan->status);
/* if channel dedicated to cyclic operations, free it */
clear_bit(ATC_IS_CYCLIC, &atchan->status);
spin_unlock_bh(&atchan->lock);
} else {
return -ENXIO;
}
return 0; return 0;
} }
...@@ -1032,8 +1072,11 @@ atc_tx_status(struct dma_chan *chan, ...@@ -1032,8 +1072,11 @@ atc_tx_status(struct dma_chan *chan,
else else
dma_set_tx_state(txstate, last_complete, last_used, 0); dma_set_tx_state(txstate, last_complete, last_used, 0);
dev_vdbg(chan2dev(chan), "tx_status: %d (d%d, u%d)\n", if (test_bit(ATC_IS_PAUSED, &atchan->status))
cookie, last_complete ? last_complete : 0, ret = DMA_PAUSED;
dev_vdbg(chan2dev(chan), "tx_status %d: cookie = %d (d%d, u%d)\n",
ret, cookie, last_complete ? last_complete : 0,
last_used ? last_used : 0); last_used ? last_used : 0);
return ret; return ret;
......
...@@ -191,6 +191,7 @@ txd_to_at_desc(struct dma_async_tx_descriptor *txd) ...@@ -191,6 +191,7 @@ txd_to_at_desc(struct dma_async_tx_descriptor *txd)
*/ */
enum atc_status { enum atc_status {
ATC_IS_ERROR = 0, ATC_IS_ERROR = 0,
ATC_IS_PAUSED = 1,
ATC_IS_CYCLIC = 24, ATC_IS_CYCLIC = 24,
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment