Commit 55f5f986 authored by Wolfram Sang's avatar Wolfram Sang Committed by Wolfram Sang

i2c: sh_mobile: rework deferred probing

DMA is opt-in for this driver. So, we can't use deferred probing for
requesting DMA channels in probe, because our driver would get endlessly
deferred if DMA support is compiled in AND the DMA driver is missing.
Because we can't know when the DMA driver might show up, we always try
again when a DMA transfer would be possible. The downside is that there
is more overhead for setting up PIO transfers under the above scenario.
But well, having DMA enabled and the proper DMA driver missing looks
like a broken or test config anyhow.
Reported-by: default avatarGeert Uytterhoeven <geert+renesas@glider.be>
Signed-off-by: default avatarWolfram Sang <wsa+renesas@sang-engineering.com>
Signed-off-by: default avatarWolfram Sang <wsa@the-dreams.de>
parent e844a799
......@@ -140,6 +140,7 @@ struct sh_mobile_i2c_data {
int sr;
bool send_stop;
struct resource *res;
struct dma_chan *dma_tx;
struct dma_chan *dma_rx;
struct scatterlist sg;
......@@ -539,6 +540,41 @@ static void sh_mobile_i2c_dma_callback(void *data)
iic_set_clr(pd, ICIC, 0, ICIC_TDMAE | ICIC_RDMAE);
}
static struct dma_chan *sh_mobile_i2c_request_dma_chan(struct device *dev,
enum dma_transfer_direction dir, dma_addr_t port_addr)
{
struct dma_chan *chan;
struct dma_slave_config cfg;
char *chan_name = dir == DMA_MEM_TO_DEV ? "tx" : "rx";
int ret;
chan = dma_request_slave_channel_reason(dev, chan_name);
if (IS_ERR(chan)) {
dev_dbg(dev, "request_channel failed for %s (%d)\n", chan_name, ret);
return chan;
}
memset(&cfg, 0, sizeof(cfg));
cfg.direction = dir;
if (dir == DMA_MEM_TO_DEV) {
cfg.dst_addr = port_addr;
cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
} else {
cfg.src_addr = port_addr;
cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
}
ret = dmaengine_slave_config(chan, &cfg);
if (ret) {
dev_dbg(dev, "slave_config failed for %s (%d)\n", chan_name, ret);
dma_release_channel(chan);
return ERR_PTR(ret);
}
dev_dbg(dev, "got DMA channel for %s\n", chan_name);
return chan;
}
static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data *pd)
{
bool read = pd->msg->flags & I2C_M_RD;
......@@ -548,6 +584,15 @@ static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data *pd)
dma_addr_t dma_addr;
dma_cookie_t cookie;
if (PTR_ERR(chan) == -EPROBE_DEFER) {
if (read)
chan = pd->dma_rx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_DEV_TO_MEM,
pd->res->start + ICDR);
else
chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV,
pd->res->start + ICDR);
}
if (IS_ERR(chan))
return;
......@@ -747,41 +792,6 @@ static const struct of_device_id sh_mobile_i2c_dt_ids[] = {
};
MODULE_DEVICE_TABLE(of, sh_mobile_i2c_dt_ids);
static struct dma_chan *sh_mobile_i2c_request_dma_chan(struct device *dev,
enum dma_transfer_direction dir, dma_addr_t port_addr)
{
struct dma_chan *chan;
struct dma_slave_config cfg;
char *chan_name = dir == DMA_MEM_TO_DEV ? "tx" : "rx";
int ret;
chan = dma_request_slave_channel_reason(dev, chan_name);
if (IS_ERR(chan)) {
dev_dbg(dev, "request_channel failed for %s (%d)\n", chan_name, ret);
return chan;
}
memset(&cfg, 0, sizeof(cfg));
cfg.direction = dir;
if (dir == DMA_MEM_TO_DEV) {
cfg.dst_addr = port_addr;
cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
} else {
cfg.src_addr = port_addr;
cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
}
ret = dmaengine_slave_config(chan, &cfg);
if (ret) {
dev_dbg(dev, "slave_config failed for %s (%d)\n", chan_name, ret);
dma_release_channel(chan);
return ERR_PTR(ret);
}
dev_dbg(dev, "got DMA channel for %s\n", chan_name);
return chan;
}
static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data *pd)
{
if (!IS_ERR(pd->dma_tx)) {
......@@ -844,6 +854,7 @@ static int sh_mobile_i2c_probe(struct platform_device *dev)
res = platform_get_resource(dev, IORESOURCE_MEM, 0);
pd->res = res;
pd->reg = devm_ioremap_resource(&dev->dev, res);
if (IS_ERR(pd->reg))
return PTR_ERR(pd->reg);
......@@ -884,19 +895,7 @@ static int sh_mobile_i2c_probe(struct platform_device *dev)
/* Init DMA */
sg_init_table(&pd->sg, 1);
pd->dma_direction = DMA_NONE;
pd->dma_rx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_DEV_TO_MEM,
res->start + ICDR);
ret = PTR_ERR(pd->dma_rx);
if (ret == -EPROBE_DEFER)
return ret;
pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV,
res->start + ICDR);
ret = PTR_ERR(pd->dma_tx);
if (ret == -EPROBE_DEFER) {
sh_mobile_i2c_release_dma(pd);
return ret;
}
pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
/* Enable Runtime PM for this device.
*
......@@ -934,8 +933,7 @@ static int sh_mobile_i2c_probe(struct platform_device *dev)
return ret;
}
dev_info(&dev->dev, "I2C adapter %d, bus speed %lu Hz, DMA=%c\n",
adap->nr, pd->bus_speed, (pd->dma_rx || pd->dma_tx) ? 'y' : 'n');
dev_info(&dev->dev, "I2C adapter %d, bus speed %lu Hz\n", adap->nr, pd->bus_speed);
return 0;
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment