Commit f26e03ad authored by Fabio Baltieri's avatar Fabio Baltieri

dmaengine: ste_dma40: minor cosmetic fixes

This patch contains various non functional cosmetic fixes.
Acked-by: default avatarLinus Walleij <linus.walleij@linaro.org>
Acked-by: default avatarVinod Koul <vinod.koul@intel.com>
Signed-off-by: default avatarFabio Baltieri <fabio.baltieri@linaro.org>
parent 762eb33f
...@@ -1609,13 +1609,11 @@ static void dma_tasklet(unsigned long data) ...@@ -1609,13 +1609,11 @@ static void dma_tasklet(unsigned long data)
if (async_tx_test_ack(&d40d->txd)) { if (async_tx_test_ack(&d40d->txd)) {
d40_desc_remove(d40d); d40_desc_remove(d40d);
d40_desc_free(d40c, d40d); d40_desc_free(d40c, d40d);
} else { } else if (!d40d->is_in_client_list) {
if (!d40d->is_in_client_list) { d40_desc_remove(d40d);
d40_desc_remove(d40d); d40_lcla_free_all(d40c, d40d);
d40_lcla_free_all(d40c, d40d); list_add_tail(&d40d->node, &d40c->client);
list_add_tail(&d40d->node, &d40c->client); d40d->is_in_client_list = true;
d40d->is_in_client_list = true;
}
} }
} }
...@@ -2123,7 +2121,6 @@ static bool d40_is_paused(struct d40_chan *d40c) ...@@ -2123,7 +2121,6 @@ static bool d40_is_paused(struct d40_chan *d40c)
} }
static u32 stedma40_residue(struct dma_chan *chan) static u32 stedma40_residue(struct dma_chan *chan)
{ {
struct d40_chan *d40c = struct d40_chan *d40c =
...@@ -2199,7 +2196,6 @@ d40_prep_sg_phy(struct d40_chan *chan, struct d40_desc *desc, ...@@ -2199,7 +2196,6 @@ d40_prep_sg_phy(struct d40_chan *chan, struct d40_desc *desc,
return ret < 0 ? ret : 0; return ret < 0 ? ret : 0;
} }
static struct d40_desc * static struct d40_desc *
d40_prep_desc(struct d40_chan *chan, struct scatterlist *sg, d40_prep_desc(struct d40_chan *chan, struct scatterlist *sg,
unsigned int sg_len, unsigned long dma_flags) unsigned int sg_len, unsigned long dma_flags)
...@@ -2225,7 +2221,6 @@ d40_prep_desc(struct d40_chan *chan, struct scatterlist *sg, ...@@ -2225,7 +2221,6 @@ d40_prep_desc(struct d40_chan *chan, struct scatterlist *sg,
goto err; goto err;
} }
desc->lli_current = 0; desc->lli_current = 0;
desc->txd.flags = dma_flags; desc->txd.flags = dma_flags;
desc->txd.tx_submit = d40_tx_submit; desc->txd.tx_submit = d40_tx_submit;
...@@ -2274,7 +2269,6 @@ d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src, ...@@ -2274,7 +2269,6 @@ d40_prep_sg(struct dma_chan *dchan, struct scatterlist *sg_src,
return NULL; return NULL;
} }
spin_lock_irqsave(&chan->lock, flags); spin_lock_irqsave(&chan->lock, flags);
desc = d40_prep_desc(chan, sg_src, sg_len, dma_flags); desc = d40_prep_desc(chan, sg_src, sg_len, dma_flags);
...@@ -2432,11 +2426,11 @@ static int d40_alloc_chan_resources(struct dma_chan *chan) ...@@ -2432,11 +2426,11 @@ static int d40_alloc_chan_resources(struct dma_chan *chan)
if (d40c->dma_cfg.dir == STEDMA40_PERIPH_TO_MEM) if (d40c->dma_cfg.dir == STEDMA40_PERIPH_TO_MEM)
d40c->lcpa = d40c->base->lcpa_base + d40c->lcpa = d40c->base->lcpa_base +
d40c->dma_cfg.src_dev_type * D40_LCPA_CHAN_SIZE; d40c->dma_cfg.src_dev_type * D40_LCPA_CHAN_SIZE;
else else
d40c->lcpa = d40c->base->lcpa_base + d40c->lcpa = d40c->base->lcpa_base +
d40c->dma_cfg.dst_dev_type * d40c->dma_cfg.dst_dev_type *
D40_LCPA_CHAN_SIZE + D40_LCPA_CHAN_DST_DELTA; D40_LCPA_CHAN_SIZE + D40_LCPA_CHAN_DST_DELTA;
} }
dev_dbg(chan2dev(d40c), "allocated %s channel (phy %d%s)\n", dev_dbg(chan2dev(d40c), "allocated %s channel (phy %d%s)\n",
...@@ -2471,7 +2465,6 @@ static void d40_free_chan_resources(struct dma_chan *chan) ...@@ -2471,7 +2465,6 @@ static void d40_free_chan_resources(struct dma_chan *chan)
return; return;
} }
spin_lock_irqsave(&d40c->lock, flags); spin_lock_irqsave(&d40c->lock, flags);
err = d40_free_dma(d40c); err = d40_free_dma(d40c);
...@@ -2514,12 +2507,10 @@ d40_prep_memcpy_sg(struct dma_chan *chan, ...@@ -2514,12 +2507,10 @@ d40_prep_memcpy_sg(struct dma_chan *chan,
return d40_prep_sg(chan, src_sg, dst_sg, src_nents, DMA_NONE, dma_flags); return d40_prep_sg(chan, src_sg, dst_sg, src_nents, DMA_NONE, dma_flags);
} }
static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan, static struct dma_async_tx_descriptor *
struct scatterlist *sgl, d40_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
unsigned int sg_len, unsigned int sg_len, enum dma_transfer_direction direction,
enum dma_transfer_direction direction, unsigned long dma_flags, void *context)
unsigned long dma_flags,
void *context)
{ {
if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV) if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV)
return NULL; return NULL;
......
...@@ -251,7 +251,7 @@ d40_phy_buf_to_lli(struct d40_phy_lli *lli, dma_addr_t addr, u32 size, ...@@ -251,7 +251,7 @@ d40_phy_buf_to_lli(struct d40_phy_lli *lli, dma_addr_t addr, u32 size,
return lli; return lli;
err: err:
return NULL; return NULL;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment