Commit 4facfe7f authored by Maxime Ripard's avatar Maxime Ripard Committed by Vinod Koul

dmaengine: hdmac: Split device_control

Split the device_control callback of the Atmel HDMAC driver to make use
of the newly introduced callbacks, that will eventually be used to retrieve
slave capabilities.
Signed-off-by: default avatarMaxime Ripard <maxime.ripard@free-electrons.com>
Acked-by: default avatarNicolas Ferre <nicolas.ferre@atmel.com>
Signed-off-by: default avatarVinod Koul <vinod.koul@intel.com>
parent bcd1b0b9
...@@ -972,11 +972,13 @@ atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, ...@@ -972,11 +972,13 @@ atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
return NULL; return NULL;
} }
static int set_runtime_config(struct dma_chan *chan, static int atc_config(struct dma_chan *chan,
struct dma_slave_config *sconfig) struct dma_slave_config *sconfig)
{ {
struct at_dma_chan *atchan = to_at_dma_chan(chan); struct at_dma_chan *atchan = to_at_dma_chan(chan);
dev_vdbg(chan2dev(chan), "%s\n", __func__);
/* Check if it is chan is configured for slave transfers */ /* Check if it is chan is configured for slave transfers */
if (!chan->private) if (!chan->private)
return -EINVAL; return -EINVAL;
...@@ -989,9 +991,28 @@ static int set_runtime_config(struct dma_chan *chan, ...@@ -989,9 +991,28 @@ static int set_runtime_config(struct dma_chan *chan,
return 0; return 0;
} }
static int atc_pause(struct dma_chan *chan)
{
struct at_dma_chan *atchan = to_at_dma_chan(chan);
struct at_dma *atdma = to_at_dma(chan->device);
int chan_id = atchan->chan_common.chan_id;
unsigned long flags;
static int atc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd, LIST_HEAD(list);
unsigned long arg)
dev_vdbg(chan2dev(chan), "%s\n", __func__);
spin_lock_irqsave(&atchan->lock, flags);
dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id));
set_bit(ATC_IS_PAUSED, &atchan->status);
spin_unlock_irqrestore(&atchan->lock, flags);
return 0;
}
static int atc_resume(struct dma_chan *chan)
{ {
struct at_dma_chan *atchan = to_at_dma_chan(chan); struct at_dma_chan *atchan = to_at_dma_chan(chan);
struct at_dma *atdma = to_at_dma(chan->device); struct at_dma *atdma = to_at_dma(chan->device);
...@@ -1000,60 +1021,61 @@ static int atc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd, ...@@ -1000,60 +1021,61 @@ static int atc_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
LIST_HEAD(list); LIST_HEAD(list);
dev_vdbg(chan2dev(chan), "atc_control (%d)\n", cmd); dev_vdbg(chan2dev(chan), "%s\n", __func__);
if (cmd == DMA_PAUSE) { if (!atc_chan_is_paused(atchan))
spin_lock_irqsave(&atchan->lock, flags); return 0;
dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id)); spin_lock_irqsave(&atchan->lock, flags);
set_bit(ATC_IS_PAUSED, &atchan->status);
spin_unlock_irqrestore(&atchan->lock, flags); dma_writel(atdma, CHDR, AT_DMA_RES(chan_id));
} else if (cmd == DMA_RESUME) { clear_bit(ATC_IS_PAUSED, &atchan->status);
if (!atc_chan_is_paused(atchan))
return 0;
spin_lock_irqsave(&atchan->lock, flags); spin_unlock_irqrestore(&atchan->lock, flags);
dma_writel(atdma, CHDR, AT_DMA_RES(chan_id)); return 0;
clear_bit(ATC_IS_PAUSED, &atchan->status); }
spin_unlock_irqrestore(&atchan->lock, flags); static int atc_terminate_all(struct dma_chan *chan)
} else if (cmd == DMA_TERMINATE_ALL) { {
struct at_desc *desc, *_desc; struct at_dma_chan *atchan = to_at_dma_chan(chan);
/* struct at_dma *atdma = to_at_dma(chan->device);
* This is only called when something went wrong elsewhere, so int chan_id = atchan->chan_common.chan_id;
* we don't really care about the data. Just disable the struct at_desc *desc, *_desc;
* channel. We still have to poll the channel enable bit due unsigned long flags;
* to AHB/HSB limitations.
*/
spin_lock_irqsave(&atchan->lock, flags);
/* disabling channel: must also remove suspend state */ LIST_HEAD(list);
dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask);
/* confirm that this channel is disabled */ dev_vdbg(chan2dev(chan), "%s\n", __func__);
while (dma_readl(atdma, CHSR) & atchan->mask)
cpu_relax();
/* active_list entries will end up before queued entries */ /*
list_splice_init(&atchan->queue, &list); * This is only called when something went wrong elsewhere, so
list_splice_init(&atchan->active_list, &list); * we don't really care about the data. Just disable the
* channel. We still have to poll the channel enable bit due
* to AHB/HSB limitations.
*/
spin_lock_irqsave(&atchan->lock, flags);
/* Flush all pending and queued descriptors */ /* disabling channel: must also remove suspend state */
list_for_each_entry_safe(desc, _desc, &list, desc_node) dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask);
atc_chain_complete(atchan, desc);
clear_bit(ATC_IS_PAUSED, &atchan->status); /* confirm that this channel is disabled */
/* if channel dedicated to cyclic operations, free it */ while (dma_readl(atdma, CHSR) & atchan->mask)
clear_bit(ATC_IS_CYCLIC, &atchan->status); cpu_relax();
spin_unlock_irqrestore(&atchan->lock, flags); /* active_list entries will end up before queued entries */
} else if (cmd == DMA_SLAVE_CONFIG) { list_splice_init(&atchan->queue, &list);
return set_runtime_config(chan, (struct dma_slave_config *)arg); list_splice_init(&atchan->active_list, &list);
} else {
return -ENXIO; /* Flush all pending and queued descriptors */
} list_for_each_entry_safe(desc, _desc, &list, desc_node)
atc_chain_complete(atchan, desc);
clear_bit(ATC_IS_PAUSED, &atchan->status);
/* if channel dedicated to cyclic operations, free it */
clear_bit(ATC_IS_CYCLIC, &atchan->status);
spin_unlock_irqrestore(&atchan->lock, flags);
return 0; return 0;
} }
...@@ -1505,7 +1527,10 @@ static int __init at_dma_probe(struct platform_device *pdev) ...@@ -1505,7 +1527,10 @@ static int __init at_dma_probe(struct platform_device *pdev)
/* controller can do slave DMA: can trigger cyclic transfers */ /* controller can do slave DMA: can trigger cyclic transfers */
dma_cap_set(DMA_CYCLIC, atdma->dma_common.cap_mask); dma_cap_set(DMA_CYCLIC, atdma->dma_common.cap_mask);
atdma->dma_common.device_prep_dma_cyclic = atc_prep_dma_cyclic; atdma->dma_common.device_prep_dma_cyclic = atc_prep_dma_cyclic;
atdma->dma_common.device_control = atc_control; atdma->dma_common.device_config = atc_config;
atdma->dma_common.device_pause = atc_pause;
atdma->dma_common.device_resume = atc_resume;
atdma->dma_common.device_terminate_all = atc_terminate_all;
} }
dma_writel(atdma, EN, AT_DMA_ENABLE); dma_writel(atdma, EN, AT_DMA_ENABLE);
...@@ -1622,7 +1647,7 @@ static void atc_suspend_cyclic(struct at_dma_chan *atchan) ...@@ -1622,7 +1647,7 @@ static void atc_suspend_cyclic(struct at_dma_chan *atchan)
if (!atc_chan_is_paused(atchan)) { if (!atc_chan_is_paused(atchan)) {
dev_warn(chan2dev(chan), dev_warn(chan2dev(chan),
"cyclic channel not paused, should be done by channel user\n"); "cyclic channel not paused, should be done by channel user\n");
atc_control(chan, DMA_PAUSE, 0); atc_pause(chan);
} }
/* now preserve additional data for cyclic operations */ /* now preserve additional data for cyclic operations */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment