Commit a55e07c8 authored by Laurent Pinchart's avatar Laurent Pinchart Committed by Vinod Koul

dmaengine: rcar-dmac: Fix uninitialized variable usage

The desc variable is used uninitialized in the rcar_dmac_desc_get() and
rcar_dmac_xfer_chunk_get() functions if descriptors need to be
allocated. Fix it.
Reported-by: default avatarDan Carpenter <dan.carpenter@oracle.com>
Signed-off-by: default avatarLaurent Pinchart <laurent.pinchart+renesas@ideasonboard.com>
Signed-off-by: default avatarVinod Koul <vinod.koul@intel.com>
parent bf44a417
...@@ -549,26 +549,22 @@ static struct rcar_dmac_desc *rcar_dmac_desc_get(struct rcar_dmac_chan *chan) ...@@ -549,26 +549,22 @@ static struct rcar_dmac_desc *rcar_dmac_desc_get(struct rcar_dmac_chan *chan)
spin_lock_irq(&chan->lock); spin_lock_irq(&chan->lock);
do { while (list_empty(&chan->desc.free)) {
if (list_empty(&chan->desc.free)) { /*
/* * No free descriptors, allocate a page worth of them and try
* No free descriptors, allocate a page worth of them * again, as someone else could race us to get the newly
* and try again, as someone else could race us to get * allocated descriptors. If the allocation fails return an
* the newly allocated descriptors. If the allocation * error.
* fails return an error. */
*/ spin_unlock_irq(&chan->lock);
spin_unlock_irq(&chan->lock); ret = rcar_dmac_desc_alloc(chan, GFP_NOWAIT);
ret = rcar_dmac_desc_alloc(chan, GFP_NOWAIT); if (ret < 0)
if (ret < 0) return NULL;
return NULL; spin_lock_irq(&chan->lock);
spin_lock_irq(&chan->lock); }
continue;
}
desc = list_first_entry(&chan->desc.free, struct rcar_dmac_desc, desc = list_first_entry(&chan->desc.free, struct rcar_dmac_desc, node);
node); list_del(&desc->node);
list_del(&desc->node);
} while (!desc);
spin_unlock_irq(&chan->lock); spin_unlock_irq(&chan->lock);
...@@ -621,26 +617,23 @@ rcar_dmac_xfer_chunk_get(struct rcar_dmac_chan *chan) ...@@ -621,26 +617,23 @@ rcar_dmac_xfer_chunk_get(struct rcar_dmac_chan *chan)
spin_lock_irq(&chan->lock); spin_lock_irq(&chan->lock);
do { while (list_empty(&chan->desc.chunks_free)) {
if (list_empty(&chan->desc.chunks_free)) { /*
/* * No free descriptors, allocate a page worth of them and try
* No free descriptors, allocate a page worth of them * again, as someone else could race us to get the newly
* and try again, as someone else could race us to get * allocated descriptors. If the allocation fails return an
* the newly allocated descriptors. If the allocation * error.
* fails return an error. */
*/ spin_unlock_irq(&chan->lock);
spin_unlock_irq(&chan->lock); ret = rcar_dmac_xfer_chunk_alloc(chan, GFP_NOWAIT);
ret = rcar_dmac_xfer_chunk_alloc(chan, GFP_NOWAIT); if (ret < 0)
if (ret < 0) return NULL;
return NULL; spin_lock_irq(&chan->lock);
spin_lock_irq(&chan->lock); }
continue;
}
chunk = list_first_entry(&chan->desc.chunks_free, chunk = list_first_entry(&chan->desc.chunks_free,
struct rcar_dmac_xfer_chunk, node); struct rcar_dmac_xfer_chunk, node);
list_del(&chunk->node); list_del(&chunk->node);
} while (!chunk);
spin_unlock_irq(&chan->lock); spin_unlock_irq(&chan->lock);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment