Commit 82c5de0a authored by Christoph Hellwig's avatar Christoph Hellwig

dma-mapping: remove the DMA_MEMORY_EXCLUSIVE flag

All users of dma_declare_coherent want their allocations to be
exclusive, so default to exclusive allocations.
Signed-off-by: default avatarChristoph Hellwig <hch@lst.de>
Reviewed-by: default avatarGreg Kroah-Hartman <gregkh@linuxfoundation.org>
parent 91a6fda9
...@@ -566,8 +566,7 @@ boundaries when doing this. ...@@ -566,8 +566,7 @@ boundaries when doing this.
int int
dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr, dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
dma_addr_t device_addr, size_t size, int dma_addr_t device_addr, size_t size);
flags)
Declare region of memory to be handed out by dma_alloc_coherent() when Declare region of memory to be handed out by dma_alloc_coherent() when
it's asked for coherent memory for this device. it's asked for coherent memory for this device.
...@@ -581,12 +580,6 @@ dma_addr_t in dma_alloc_coherent()). ...@@ -581,12 +580,6 @@ dma_addr_t in dma_alloc_coherent()).
size is the size of the area (must be multiples of PAGE_SIZE). size is the size of the area (must be multiples of PAGE_SIZE).
flags can be ORed together and are:
- DMA_MEMORY_EXCLUSIVE - only allocate memory from the declared regions.
Do not allow dma_alloc_coherent() to fall back to system memory when
it's out of memory in the declared region.
As a simplification for the platforms, only *one* such region of As a simplification for the platforms, only *one* such region of
memory may be declared per device. memory may be declared per device.
......
...@@ -258,8 +258,7 @@ static void __init visstrim_analog_camera_init(void) ...@@ -258,8 +258,7 @@ static void __init visstrim_analog_camera_init(void)
return; return;
dma_declare_coherent_memory(&pdev->dev, mx2_camera_base, dma_declare_coherent_memory(&pdev->dev, mx2_camera_base,
mx2_camera_base, MX2_CAMERA_BUF_SIZE, mx2_camera_base, MX2_CAMERA_BUF_SIZE);
DMA_MEMORY_EXCLUSIVE);
} }
static void __init visstrim_reserve(void) static void __init visstrim_reserve(void)
...@@ -445,8 +444,7 @@ static void __init visstrim_coda_init(void) ...@@ -445,8 +444,7 @@ static void __init visstrim_coda_init(void)
dma_declare_coherent_memory(&pdev->dev, dma_declare_coherent_memory(&pdev->dev,
mx2_camera_base + MX2_CAMERA_BUF_SIZE, mx2_camera_base + MX2_CAMERA_BUF_SIZE,
mx2_camera_base + MX2_CAMERA_BUF_SIZE, mx2_camera_base + MX2_CAMERA_BUF_SIZE,
MX2_CAMERA_BUF_SIZE, MX2_CAMERA_BUF_SIZE);
DMA_MEMORY_EXCLUSIVE);
} }
/* DMA deinterlace */ /* DMA deinterlace */
...@@ -465,8 +463,7 @@ static void __init visstrim_deinterlace_init(void) ...@@ -465,8 +463,7 @@ static void __init visstrim_deinterlace_init(void)
dma_declare_coherent_memory(&pdev->dev, dma_declare_coherent_memory(&pdev->dev,
mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE, mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE,
mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE, mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE,
MX2_CAMERA_BUF_SIZE, MX2_CAMERA_BUF_SIZE);
DMA_MEMORY_EXCLUSIVE);
} }
/* Emma-PrP for format conversion */ /* Emma-PrP for format conversion */
...@@ -485,8 +482,7 @@ static void __init visstrim_emmaprp_init(void) ...@@ -485,8 +482,7 @@ static void __init visstrim_emmaprp_init(void)
*/ */
ret = dma_declare_coherent_memory(&pdev->dev, ret = dma_declare_coherent_memory(&pdev->dev,
mx2_camera_base, mx2_camera_base, mx2_camera_base, mx2_camera_base,
MX2_CAMERA_BUF_SIZE, MX2_CAMERA_BUF_SIZE);
DMA_MEMORY_EXCLUSIVE);
if (ret) if (ret)
pr_err("Failed to declare memory for emmaprp\n"); pr_err("Failed to declare memory for emmaprp\n");
} }
......
...@@ -475,8 +475,7 @@ static int __init mx31moboard_init_cam(void) ...@@ -475,8 +475,7 @@ static int __init mx31moboard_init_cam(void)
ret = dma_declare_coherent_memory(&pdev->dev, ret = dma_declare_coherent_memory(&pdev->dev,
mx3_camera_base, mx3_camera_base, mx3_camera_base, mx3_camera_base,
MX3_CAMERA_BUF_SIZE, MX3_CAMERA_BUF_SIZE);
DMA_MEMORY_EXCLUSIVE);
if (ret) if (ret)
goto err; goto err;
......
...@@ -529,9 +529,8 @@ static int __init ap325rxa_devices_setup(void) ...@@ -529,9 +529,8 @@ static int __init ap325rxa_devices_setup(void)
device_initialize(&ap325rxa_ceu_device.dev); device_initialize(&ap325rxa_ceu_device.dev);
arch_setup_pdev_archdata(&ap325rxa_ceu_device); arch_setup_pdev_archdata(&ap325rxa_ceu_device);
dma_declare_coherent_memory(&ap325rxa_ceu_device.dev, dma_declare_coherent_memory(&ap325rxa_ceu_device.dev,
ceu_dma_membase, ceu_dma_membase, ceu_dma_membase, ceu_dma_membase,
ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1, ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1);
DMA_MEMORY_EXCLUSIVE);
platform_device_add(&ap325rxa_ceu_device); platform_device_add(&ap325rxa_ceu_device);
......
...@@ -1440,8 +1440,7 @@ static int __init arch_setup(void) ...@@ -1440,8 +1440,7 @@ static int __init arch_setup(void)
dma_declare_coherent_memory(&ecovec_ceu_devices[0]->dev, dma_declare_coherent_memory(&ecovec_ceu_devices[0]->dev,
ceu0_dma_membase, ceu0_dma_membase, ceu0_dma_membase, ceu0_dma_membase,
ceu0_dma_membase + ceu0_dma_membase +
CEU_BUFFER_MEMORY_SIZE - 1, CEU_BUFFER_MEMORY_SIZE - 1);
DMA_MEMORY_EXCLUSIVE);
platform_device_add(ecovec_ceu_devices[0]); platform_device_add(ecovec_ceu_devices[0]);
device_initialize(&ecovec_ceu_devices[1]->dev); device_initialize(&ecovec_ceu_devices[1]->dev);
...@@ -1449,8 +1448,7 @@ static int __init arch_setup(void) ...@@ -1449,8 +1448,7 @@ static int __init arch_setup(void)
dma_declare_coherent_memory(&ecovec_ceu_devices[1]->dev, dma_declare_coherent_memory(&ecovec_ceu_devices[1]->dev,
ceu1_dma_membase, ceu1_dma_membase, ceu1_dma_membase, ceu1_dma_membase,
ceu1_dma_membase + ceu1_dma_membase +
CEU_BUFFER_MEMORY_SIZE - 1, CEU_BUFFER_MEMORY_SIZE - 1);
DMA_MEMORY_EXCLUSIVE);
platform_device_add(ecovec_ceu_devices[1]); platform_device_add(ecovec_ceu_devices[1]);
gpiod_add_lookup_table(&cn12_power_gpiod_table); gpiod_add_lookup_table(&cn12_power_gpiod_table);
......
...@@ -603,9 +603,8 @@ static int __init kfr2r09_devices_setup(void) ...@@ -603,9 +603,8 @@ static int __init kfr2r09_devices_setup(void)
device_initialize(&kfr2r09_ceu_device.dev); device_initialize(&kfr2r09_ceu_device.dev);
arch_setup_pdev_archdata(&kfr2r09_ceu_device); arch_setup_pdev_archdata(&kfr2r09_ceu_device);
dma_declare_coherent_memory(&kfr2r09_ceu_device.dev, dma_declare_coherent_memory(&kfr2r09_ceu_device.dev,
ceu_dma_membase, ceu_dma_membase, ceu_dma_membase, ceu_dma_membase,
ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1, ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1);
DMA_MEMORY_EXCLUSIVE);
platform_device_add(&kfr2r09_ceu_device); platform_device_add(&kfr2r09_ceu_device);
......
...@@ -603,9 +603,8 @@ static int __init migor_devices_setup(void) ...@@ -603,9 +603,8 @@ static int __init migor_devices_setup(void)
device_initialize(&migor_ceu_device.dev); device_initialize(&migor_ceu_device.dev);
arch_setup_pdev_archdata(&migor_ceu_device); arch_setup_pdev_archdata(&migor_ceu_device);
dma_declare_coherent_memory(&migor_ceu_device.dev, dma_declare_coherent_memory(&migor_ceu_device.dev,
ceu_dma_membase, ceu_dma_membase, ceu_dma_membase, ceu_dma_membase,
ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1, ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1);
DMA_MEMORY_EXCLUSIVE);
platform_device_add(&migor_ceu_device); platform_device_add(&migor_ceu_device);
......
...@@ -941,8 +941,7 @@ static int __init devices_setup(void) ...@@ -941,8 +941,7 @@ static int __init devices_setup(void)
dma_declare_coherent_memory(&ms7724se_ceu_devices[0]->dev, dma_declare_coherent_memory(&ms7724se_ceu_devices[0]->dev,
ceu0_dma_membase, ceu0_dma_membase, ceu0_dma_membase, ceu0_dma_membase,
ceu0_dma_membase + ceu0_dma_membase +
CEU_BUFFER_MEMORY_SIZE - 1, CEU_BUFFER_MEMORY_SIZE - 1);
DMA_MEMORY_EXCLUSIVE);
platform_device_add(ms7724se_ceu_devices[0]); platform_device_add(ms7724se_ceu_devices[0]);
device_initialize(&ms7724se_ceu_devices[1]->dev); device_initialize(&ms7724se_ceu_devices[1]->dev);
...@@ -950,8 +949,7 @@ static int __init devices_setup(void) ...@@ -950,8 +949,7 @@ static int __init devices_setup(void)
dma_declare_coherent_memory(&ms7724se_ceu_devices[1]->dev, dma_declare_coherent_memory(&ms7724se_ceu_devices[1]->dev,
ceu1_dma_membase, ceu1_dma_membase, ceu1_dma_membase, ceu1_dma_membase,
ceu1_dma_membase + ceu1_dma_membase +
CEU_BUFFER_MEMORY_SIZE - 1, CEU_BUFFER_MEMORY_SIZE - 1);
DMA_MEMORY_EXCLUSIVE);
platform_device_add(ms7724se_ceu_devices[1]); platform_device_add(ms7724se_ceu_devices[1]);
return platform_add_devices(ms7724se_devices, return platform_add_devices(ms7724se_devices,
......
...@@ -63,8 +63,7 @@ static void gapspci_fixup_resources(struct pci_dev *dev) ...@@ -63,8 +63,7 @@ static void gapspci_fixup_resources(struct pci_dev *dev)
BUG_ON(dma_declare_coherent_memory(&dev->dev, BUG_ON(dma_declare_coherent_memory(&dev->dev,
res.start, res.start,
region.start, region.start,
resource_size(&res), resource_size(&res)));
DMA_MEMORY_EXCLUSIVE));
break; break;
default: default:
printk("PCI: Failed resource fixup\n"); printk("PCI: Failed resource fixup\n");
......
...@@ -1708,8 +1708,7 @@ static int sh_mobile_ceu_probe(struct platform_device *pdev) ...@@ -1708,8 +1708,7 @@ static int sh_mobile_ceu_probe(struct platform_device *pdev)
if (res) { if (res) {
err = dma_declare_coherent_memory(&pdev->dev, res->start, err = dma_declare_coherent_memory(&pdev->dev, res->start,
res->start, res->start,
resource_size(res), resource_size(res));
DMA_MEMORY_EXCLUSIVE);
if (err) { if (err) {
dev_err(&pdev->dev, "Unable to declare CEU memory.\n"); dev_err(&pdev->dev, "Unable to declare CEU memory.\n");
return err; return err;
......
...@@ -126,8 +126,7 @@ static int ohci_hcd_sm501_drv_probe(struct platform_device *pdev) ...@@ -126,8 +126,7 @@ static int ohci_hcd_sm501_drv_probe(struct platform_device *pdev)
retval = dma_declare_coherent_memory(dev, mem->start, retval = dma_declare_coherent_memory(dev, mem->start,
mem->start - mem->parent->start, mem->start - mem->parent->start,
resource_size(mem), resource_size(mem));
DMA_MEMORY_EXCLUSIVE);
if (retval) { if (retval) {
dev_err(dev, "cannot declare coherent memory\n"); dev_err(dev, "cannot declare coherent memory\n");
goto err1; goto err1;
......
...@@ -225,7 +225,7 @@ static int ohci_hcd_tmio_drv_probe(struct platform_device *dev) ...@@ -225,7 +225,7 @@ static int ohci_hcd_tmio_drv_probe(struct platform_device *dev)
} }
ret = dma_declare_coherent_memory(&dev->dev, sram->start, sram->start, ret = dma_declare_coherent_memory(&dev->dev, sram->start, sram->start,
resource_size(sram), DMA_MEMORY_EXCLUSIVE); resource_size(sram));
if (ret) if (ret)
goto err_dma_declare; goto err_dma_declare;
......
...@@ -736,17 +736,14 @@ static inline int dma_get_cache_alignment(void) ...@@ -736,17 +736,14 @@ static inline int dma_get_cache_alignment(void)
return 1; return 1;
} }
/* flags for the coherent memory api */
#define DMA_MEMORY_EXCLUSIVE 0x01
#ifdef CONFIG_DMA_DECLARE_COHERENT #ifdef CONFIG_DMA_DECLARE_COHERENT
int dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr, int dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
dma_addr_t device_addr, size_t size, int flags); dma_addr_t device_addr, size_t size);
void dma_release_declared_memory(struct device *dev); void dma_release_declared_memory(struct device *dev);
#else #else
static inline int static inline int
dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr, dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
dma_addr_t device_addr, size_t size, int flags) dma_addr_t device_addr, size_t size)
{ {
return -ENOSYS; return -ENOSYS;
} }
......
...@@ -14,7 +14,6 @@ struct dma_coherent_mem { ...@@ -14,7 +14,6 @@ struct dma_coherent_mem {
dma_addr_t device_base; dma_addr_t device_base;
unsigned long pfn_base; unsigned long pfn_base;
int size; int size;
int flags;
unsigned long *bitmap; unsigned long *bitmap;
spinlock_t spinlock; spinlock_t spinlock;
bool use_dev_dma_pfn_offset; bool use_dev_dma_pfn_offset;
...@@ -38,9 +37,9 @@ static inline dma_addr_t dma_get_device_base(struct device *dev, ...@@ -38,9 +37,9 @@ static inline dma_addr_t dma_get_device_base(struct device *dev,
return mem->device_base; return mem->device_base;
} }
static int dma_init_coherent_memory( static int dma_init_coherent_memory(phys_addr_t phys_addr,
phys_addr_t phys_addr, dma_addr_t device_addr, size_t size, int flags, dma_addr_t device_addr, size_t size,
struct dma_coherent_mem **mem) struct dma_coherent_mem **mem)
{ {
struct dma_coherent_mem *dma_mem = NULL; struct dma_coherent_mem *dma_mem = NULL;
void *mem_base = NULL; void *mem_base = NULL;
...@@ -73,7 +72,6 @@ static int dma_init_coherent_memory( ...@@ -73,7 +72,6 @@ static int dma_init_coherent_memory(
dma_mem->device_base = device_addr; dma_mem->device_base = device_addr;
dma_mem->pfn_base = PFN_DOWN(phys_addr); dma_mem->pfn_base = PFN_DOWN(phys_addr);
dma_mem->size = pages; dma_mem->size = pages;
dma_mem->flags = flags;
spin_lock_init(&dma_mem->spinlock); spin_lock_init(&dma_mem->spinlock);
*mem = dma_mem; *mem = dma_mem;
...@@ -110,12 +108,12 @@ static int dma_assign_coherent_memory(struct device *dev, ...@@ -110,12 +108,12 @@ static int dma_assign_coherent_memory(struct device *dev,
} }
int dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr, int dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
dma_addr_t device_addr, size_t size, int flags) dma_addr_t device_addr, size_t size)
{ {
struct dma_coherent_mem *mem; struct dma_coherent_mem *mem;
int ret; int ret;
ret = dma_init_coherent_memory(phys_addr, device_addr, size, flags, &mem); ret = dma_init_coherent_memory(phys_addr, device_addr, size, &mem);
if (ret) if (ret)
return ret; return ret;
...@@ -190,15 +188,7 @@ int dma_alloc_from_dev_coherent(struct device *dev, ssize_t size, ...@@ -190,15 +188,7 @@ int dma_alloc_from_dev_coherent(struct device *dev, ssize_t size,
return 0; return 0;
*ret = __dma_alloc_from_coherent(mem, size, dma_handle); *ret = __dma_alloc_from_coherent(mem, size, dma_handle);
if (*ret) return 1;
return 1;
/*
* In the case where the allocation can not be satisfied from the
* per-device area, try to fall back to generic memory if the
* constraints allow it.
*/
return mem->flags & DMA_MEMORY_EXCLUSIVE;
} }
void *dma_alloc_from_global_coherent(ssize_t size, dma_addr_t *dma_handle) void *dma_alloc_from_global_coherent(ssize_t size, dma_addr_t *dma_handle)
...@@ -327,8 +317,7 @@ static int rmem_dma_device_init(struct reserved_mem *rmem, struct device *dev) ...@@ -327,8 +317,7 @@ static int rmem_dma_device_init(struct reserved_mem *rmem, struct device *dev)
if (!mem) { if (!mem) {
ret = dma_init_coherent_memory(rmem->base, rmem->base, ret = dma_init_coherent_memory(rmem->base, rmem->base,
rmem->size, rmem->size, &mem);
DMA_MEMORY_EXCLUSIVE, &mem);
if (ret) { if (ret) {
pr_err("Reserved memory: failed to init DMA memory pool at %pa, size %ld MiB\n", pr_err("Reserved memory: failed to init DMA memory pool at %pa, size %ld MiB\n",
&rmem->base, (unsigned long)rmem->size / SZ_1M); &rmem->base, (unsigned long)rmem->size / SZ_1M);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment