Commit 702b94bf authored by Russell King's avatar Russell King

ARM: dma-mapping: remove dmac_clean_range and dmac_inv_range

These are now unused, and so can be removed.
Signed-off-by: default avatarRussell King <rmk+kernel@arm.linux.org.uk>
Tested-By: default avatarSantosh Shilimkar <santosh.shilimkar@ti.com>
parent a9c9147e
...@@ -182,21 +182,6 @@ ...@@ -182,21 +182,6 @@
* DMA Cache Coherency * DMA Cache Coherency
* =================== * ===================
* *
* dma_inv_range(start, end)
*
* Invalidate (discard) the specified virtual address range.
* May not write back any entries. If 'start' or 'end'
* are not cache line aligned, those lines must be written
* back.
* - start - virtual start address
* - end - virtual end address
*
* dma_clean_range(start, end)
*
* Clean (write back) the specified virtual address range.
* - start - virtual start address
* - end - virtual end address
*
* dma_flush_range(start, end) * dma_flush_range(start, end)
* *
* Clean and invalidate the specified virtual address range. * Clean and invalidate the specified virtual address range.
...@@ -216,8 +201,6 @@ struct cpu_cache_fns { ...@@ -216,8 +201,6 @@ struct cpu_cache_fns {
void (*dma_map_area)(const void *, size_t, int); void (*dma_map_area)(const void *, size_t, int);
void (*dma_unmap_area)(const void *, size_t, int); void (*dma_unmap_area)(const void *, size_t, int);
void (*dma_inv_range)(const void *, const void *);
void (*dma_clean_range)(const void *, const void *);
void (*dma_flush_range)(const void *, const void *); void (*dma_flush_range)(const void *, const void *);
}; };
...@@ -249,8 +232,6 @@ extern struct cpu_cache_fns cpu_cache; ...@@ -249,8 +232,6 @@ extern struct cpu_cache_fns cpu_cache;
*/ */
#define dmac_map_area cpu_cache.dma_map_area #define dmac_map_area cpu_cache.dma_map_area
#define dmac_unmap_area cpu_cache.dma_unmap_area #define dmac_unmap_area cpu_cache.dma_unmap_area
#define dmac_inv_range cpu_cache.dma_inv_range
#define dmac_clean_range cpu_cache.dma_clean_range
#define dmac_flush_range cpu_cache.dma_flush_range #define dmac_flush_range cpu_cache.dma_flush_range
#else #else
...@@ -277,14 +258,10 @@ extern void __cpuc_flush_dcache_area(void *, size_t); ...@@ -277,14 +258,10 @@ extern void __cpuc_flush_dcache_area(void *, size_t);
*/ */
#define dmac_map_area __glue(_CACHE,_dma_map_area) #define dmac_map_area __glue(_CACHE,_dma_map_area)
#define dmac_unmap_area __glue(_CACHE,_dma_unmap_area) #define dmac_unmap_area __glue(_CACHE,_dma_unmap_area)
#define dmac_inv_range __glue(_CACHE,_dma_inv_range)
#define dmac_clean_range __glue(_CACHE,_dma_clean_range)
#define dmac_flush_range __glue(_CACHE,_dma_flush_range) #define dmac_flush_range __glue(_CACHE,_dma_flush_range)
extern void dmac_map_area(const void *, size_t, int); extern void dmac_map_area(const void *, size_t, int);
extern void dmac_unmap_area(const void *, size_t, int); extern void dmac_unmap_area(const void *, size_t, int);
extern void dmac_inv_range(const void *, const void *);
extern void dmac_clean_range(const void *, const void *);
extern void dmac_flush_range(const void *, const void *); extern void dmac_flush_range(const void *, const void *);
#endif #endif
......
...@@ -157,7 +157,7 @@ ENTRY(fa_flush_kern_dcache_area) ...@@ -157,7 +157,7 @@ ENTRY(fa_flush_kern_dcache_area)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(fa_dma_inv_range) fa_dma_inv_range:
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c14, 1 @ clean & invalidate D entry mcrne p15, 0, r0, c7, c14, 1 @ clean & invalidate D entry
...@@ -180,7 +180,7 @@ ENTRY(fa_dma_inv_range) ...@@ -180,7 +180,7 @@ ENTRY(fa_dma_inv_range)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(fa_dma_clean_range) fa_dma_clean_range:
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
add r0, r0, #CACHE_DLINESIZE add r0, r0, #CACHE_DLINESIZE
...@@ -241,7 +241,5 @@ ENTRY(fa_cache_fns) ...@@ -241,7 +241,5 @@ ENTRY(fa_cache_fns)
.long fa_flush_kern_dcache_area .long fa_flush_kern_dcache_area
.long fa_dma_map_area .long fa_dma_map_area
.long fa_dma_unmap_area .long fa_dma_unmap_area
.long fa_dma_inv_range
.long fa_dma_clean_range
.long fa_dma_flush_range .long fa_dma_flush_range
.size fa_cache_fns, . - fa_cache_fns .size fa_cache_fns, . - fa_cache_fns
...@@ -83,20 +83,6 @@ ENTRY(v3_coherent_user_range) ...@@ -83,20 +83,6 @@ ENTRY(v3_coherent_user_range)
ENTRY(v3_flush_kern_dcache_area) ENTRY(v3_flush_kern_dcache_area)
/* FALLTHROUGH */ /* FALLTHROUGH */
/*
* dma_inv_range(start, end)
*
* Invalidate (discard) the specified virtual address range.
* May not write back any entries. If 'start' or 'end'
* are not cache line aligned, those lines must be written
* back.
*
* - start - virtual start address
* - end - virtual end address
*/
ENTRY(v3_dma_inv_range)
/* FALLTHROUGH */
/* /*
* dma_flush_range(start, end) * dma_flush_range(start, end)
* *
...@@ -108,17 +94,6 @@ ENTRY(v3_dma_inv_range) ...@@ -108,17 +94,6 @@ ENTRY(v3_dma_inv_range)
ENTRY(v3_dma_flush_range) ENTRY(v3_dma_flush_range)
mov r0, #0 mov r0, #0
mcr p15, 0, r0, c7, c0, 0 @ flush ID cache mcr p15, 0, r0, c7, c0, 0 @ flush ID cache
/* FALLTHROUGH */
/*
* dma_clean_range(start, end)
*
* Clean (write back) the specified virtual address range.
*
* - start - virtual start address
* - end - virtual end address
*/
ENTRY(v3_dma_clean_range)
mov pc, lr mov pc, lr
/* /*
...@@ -129,7 +104,7 @@ ENTRY(v3_dma_clean_range) ...@@ -129,7 +104,7 @@ ENTRY(v3_dma_clean_range)
*/ */
ENTRY(v3_dma_unmap_area) ENTRY(v3_dma_unmap_area)
teq r2, #DMA_TO_DEVICE teq r2, #DMA_TO_DEVICE
bne v3_dma_inv_range bne v3_dma_flush_range
/* FALLTHROUGH */ /* FALLTHROUGH */
/* /*
...@@ -155,7 +130,5 @@ ENTRY(v3_cache_fns) ...@@ -155,7 +130,5 @@ ENTRY(v3_cache_fns)
.long v3_flush_kern_dcache_area .long v3_flush_kern_dcache_area
.long v3_dma_map_area .long v3_dma_map_area
.long v3_dma_unmap_area .long v3_dma_unmap_area
.long v3_dma_inv_range
.long v3_dma_clean_range
.long v3_dma_flush_range .long v3_dma_flush_range
.size v3_cache_fns, . - v3_cache_fns .size v3_cache_fns, . - v3_cache_fns
...@@ -93,20 +93,6 @@ ENTRY(v4_coherent_user_range) ...@@ -93,20 +93,6 @@ ENTRY(v4_coherent_user_range)
ENTRY(v4_flush_kern_dcache_area) ENTRY(v4_flush_kern_dcache_area)
/* FALLTHROUGH */ /* FALLTHROUGH */
/*
* dma_inv_range(start, end)
*
* Invalidate (discard) the specified virtual address range.
* May not write back any entries. If 'start' or 'end'
* are not cache line aligned, those lines must be written
* back.
*
* - start - virtual start address
* - end - virtual end address
*/
ENTRY(v4_dma_inv_range)
/* FALLTHROUGH */
/* /*
* dma_flush_range(start, end) * dma_flush_range(start, end)
* *
...@@ -120,17 +106,6 @@ ENTRY(v4_dma_flush_range) ...@@ -120,17 +106,6 @@ ENTRY(v4_dma_flush_range)
mov r0, #0 mov r0, #0
mcr p15, 0, r0, c7, c7, 0 @ flush ID cache mcr p15, 0, r0, c7, c7, 0 @ flush ID cache
#endif #endif
/* FALLTHROUGH */
/*
* dma_clean_range(start, end)
*
* Clean (write back) the specified virtual address range.
*
* - start - virtual start address
* - end - virtual end address
*/
ENTRY(v4_dma_clean_range)
mov pc, lr mov pc, lr
/* /*
...@@ -141,7 +116,7 @@ ENTRY(v4_dma_clean_range) ...@@ -141,7 +116,7 @@ ENTRY(v4_dma_clean_range)
*/ */
ENTRY(v4_dma_unmap_area) ENTRY(v4_dma_unmap_area)
teq r2, #DMA_TO_DEVICE teq r2, #DMA_TO_DEVICE
bne v4_dma_inv_range bne v4_dma_flush_range
/* FALLTHROUGH */ /* FALLTHROUGH */
/* /*
...@@ -167,7 +142,5 @@ ENTRY(v4_cache_fns) ...@@ -167,7 +142,5 @@ ENTRY(v4_cache_fns)
.long v4_flush_kern_dcache_area .long v4_flush_kern_dcache_area
.long v4_dma_map_area .long v4_dma_map_area
.long v4_dma_unmap_area .long v4_dma_unmap_area
.long v4_dma_inv_range
.long v4_dma_clean_range
.long v4_dma_flush_range .long v4_dma_flush_range
.size v4_cache_fns, . - v4_cache_fns .size v4_cache_fns, . - v4_cache_fns
...@@ -173,7 +173,7 @@ ENTRY(v4wb_coherent_user_range) ...@@ -173,7 +173,7 @@ ENTRY(v4wb_coherent_user_range)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(v4wb_dma_inv_range) v4wb_dma_inv_range:
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -194,7 +194,7 @@ ENTRY(v4wb_dma_inv_range) ...@@ -194,7 +194,7 @@ ENTRY(v4wb_dma_inv_range)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(v4wb_dma_clean_range) v4wb_dma_clean_range:
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
add r0, r0, #CACHE_DLINESIZE add r0, r0, #CACHE_DLINESIZE
...@@ -252,7 +252,5 @@ ENTRY(v4wb_cache_fns) ...@@ -252,7 +252,5 @@ ENTRY(v4wb_cache_fns)
.long v4wb_flush_kern_dcache_area .long v4wb_flush_kern_dcache_area
.long v4wb_dma_map_area .long v4wb_dma_map_area
.long v4wb_dma_unmap_area .long v4wb_dma_unmap_area
.long v4wb_dma_inv_range
.long v4wb_dma_clean_range
.long v4wb_dma_flush_range .long v4wb_dma_flush_range
.size v4wb_cache_fns, . - v4wb_cache_fns .size v4wb_cache_fns, . - v4wb_cache_fns
...@@ -142,23 +142,12 @@ ENTRY(v4wt_flush_kern_dcache_area) ...@@ -142,23 +142,12 @@ ENTRY(v4wt_flush_kern_dcache_area)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(v4wt_dma_inv_range) v4wt_dma_inv_range:
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 1: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
add r0, r0, #CACHE_DLINESIZE add r0, r0, #CACHE_DLINESIZE
cmp r0, r1 cmp r0, r1
blo 1b blo 1b
/* FALLTHROUGH */
/*
* dma_clean_range(start, end)
*
* Clean the specified virtual address range.
*
* - start - virtual start address
* - end - virtual end address
*/
ENTRY(v4wt_dma_clean_range)
mov pc, lr mov pc, lr
/* /*
...@@ -207,7 +196,5 @@ ENTRY(v4wt_cache_fns) ...@@ -207,7 +196,5 @@ ENTRY(v4wt_cache_fns)
.long v4wt_flush_kern_dcache_area .long v4wt_flush_kern_dcache_area
.long v4wt_dma_map_area .long v4wt_dma_map_area
.long v4wt_dma_unmap_area .long v4wt_dma_unmap_area
.long v4wt_dma_inv_range
.long v4wt_dma_clean_range
.long v4wt_dma_flush_range .long v4wt_dma_flush_range
.size v4wt_cache_fns, . - v4wt_cache_fns .size v4wt_cache_fns, . - v4wt_cache_fns
...@@ -195,7 +195,7 @@ ENTRY(v6_flush_kern_dcache_area) ...@@ -195,7 +195,7 @@ ENTRY(v6_flush_kern_dcache_area)
* - start - virtual start address of region * - start - virtual start address of region
* - end - virtual end address of region * - end - virtual end address of region
*/ */
ENTRY(v6_dma_inv_range) v6_dma_inv_range:
tst r0, #D_CACHE_LINE_SIZE - 1 tst r0, #D_CACHE_LINE_SIZE - 1
bic r0, r0, #D_CACHE_LINE_SIZE - 1 bic r0, r0, #D_CACHE_LINE_SIZE - 1
#ifdef HARVARD_CACHE #ifdef HARVARD_CACHE
...@@ -228,7 +228,7 @@ ENTRY(v6_dma_inv_range) ...@@ -228,7 +228,7 @@ ENTRY(v6_dma_inv_range)
* - start - virtual start address of region * - start - virtual start address of region
* - end - virtual end address of region * - end - virtual end address of region
*/ */
ENTRY(v6_dma_clean_range) v6_dma_clean_range:
bic r0, r0, #D_CACHE_LINE_SIZE - 1 bic r0, r0, #D_CACHE_LINE_SIZE - 1
1: 1:
#ifdef HARVARD_CACHE #ifdef HARVARD_CACHE
...@@ -299,7 +299,5 @@ ENTRY(v6_cache_fns) ...@@ -299,7 +299,5 @@ ENTRY(v6_cache_fns)
.long v6_flush_kern_dcache_area .long v6_flush_kern_dcache_area
.long v6_dma_map_area .long v6_dma_map_area
.long v6_dma_unmap_area .long v6_dma_unmap_area
.long v6_dma_inv_range
.long v6_dma_clean_range
.long v6_dma_flush_range .long v6_dma_flush_range
.size v6_cache_fns, . - v6_cache_fns .size v6_cache_fns, . - v6_cache_fns
...@@ -216,7 +216,7 @@ ENDPROC(v7_flush_kern_dcache_area) ...@@ -216,7 +216,7 @@ ENDPROC(v7_flush_kern_dcache_area)
* - start - virtual start address of region * - start - virtual start address of region
* - end - virtual end address of region * - end - virtual end address of region
*/ */
ENTRY(v7_dma_inv_range) v7_dma_inv_range:
dcache_line_size r2, r3 dcache_line_size r2, r3
sub r3, r2, #1 sub r3, r2, #1
tst r0, r3 tst r0, r3
...@@ -240,7 +240,7 @@ ENDPROC(v7_dma_inv_range) ...@@ -240,7 +240,7 @@ ENDPROC(v7_dma_inv_range)
* - start - virtual start address of region * - start - virtual start address of region
* - end - virtual end address of region * - end - virtual end address of region
*/ */
ENTRY(v7_dma_clean_range) v7_dma_clean_range:
dcache_line_size r2, r3 dcache_line_size r2, r3
sub r3, r2, #1 sub r3, r2, #1
bic r0, r0, r3 bic r0, r0, r3
...@@ -307,7 +307,5 @@ ENTRY(v7_cache_fns) ...@@ -307,7 +307,5 @@ ENTRY(v7_cache_fns)
.long v7_flush_kern_dcache_area .long v7_flush_kern_dcache_area
.long v7_dma_map_area .long v7_dma_map_area
.long v7_dma_unmap_area .long v7_dma_unmap_area
.long v7_dma_inv_range
.long v7_dma_clean_range
.long v7_dma_flush_range .long v7_dma_flush_range
.size v7_cache_fns, . - v7_cache_fns .size v7_cache_fns, . - v7_cache_fns
...@@ -265,7 +265,7 @@ ENTRY(arm1020_flush_kern_dcache_area) ...@@ -265,7 +265,7 @@ ENTRY(arm1020_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm1020_dma_inv_range) arm1020_dma_inv_range:
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_DISABLE #ifndef CONFIG_CPU_DCACHE_DISABLE
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
...@@ -295,7 +295,7 @@ ENTRY(arm1020_dma_inv_range) ...@@ -295,7 +295,7 @@ ENTRY(arm1020_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm1020_dma_clean_range) arm1020_dma_clean_range:
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_DISABLE #ifndef CONFIG_CPU_DCACHE_DISABLE
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
...@@ -363,8 +363,6 @@ ENTRY(arm1020_cache_fns) ...@@ -363,8 +363,6 @@ ENTRY(arm1020_cache_fns)
.long arm1020_flush_kern_dcache_area .long arm1020_flush_kern_dcache_area
.long arm1020_dma_map_area .long arm1020_dma_map_area
.long arm1020_dma_unmap_area .long arm1020_dma_unmap_area
.long arm1020_dma_inv_range
.long arm1020_dma_clean_range
.long arm1020_dma_flush_range .long arm1020_dma_flush_range
.align 5 .align 5
......
...@@ -258,7 +258,7 @@ ENTRY(arm1020e_flush_kern_dcache_area) ...@@ -258,7 +258,7 @@ ENTRY(arm1020e_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm1020e_dma_inv_range) arm1020e_dma_inv_range:
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_DISABLE #ifndef CONFIG_CPU_DCACHE_DISABLE
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
...@@ -284,7 +284,7 @@ ENTRY(arm1020e_dma_inv_range) ...@@ -284,7 +284,7 @@ ENTRY(arm1020e_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm1020e_dma_clean_range) arm1020e_dma_clean_range:
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_DISABLE #ifndef CONFIG_CPU_DCACHE_DISABLE
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
...@@ -349,8 +349,6 @@ ENTRY(arm1020e_cache_fns) ...@@ -349,8 +349,6 @@ ENTRY(arm1020e_cache_fns)
.long arm1020e_flush_kern_dcache_area .long arm1020e_flush_kern_dcache_area
.long arm1020e_dma_map_area .long arm1020e_dma_map_area
.long arm1020e_dma_unmap_area .long arm1020e_dma_unmap_area
.long arm1020e_dma_inv_range
.long arm1020e_dma_clean_range
.long arm1020e_dma_flush_range .long arm1020e_dma_flush_range
.align 5 .align 5
......
...@@ -247,7 +247,7 @@ ENTRY(arm1022_flush_kern_dcache_area) ...@@ -247,7 +247,7 @@ ENTRY(arm1022_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm1022_dma_inv_range) arm1022_dma_inv_range:
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_DISABLE #ifndef CONFIG_CPU_DCACHE_DISABLE
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
...@@ -273,7 +273,7 @@ ENTRY(arm1022_dma_inv_range) ...@@ -273,7 +273,7 @@ ENTRY(arm1022_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm1022_dma_clean_range) arm1022_dma_clean_range:
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_DISABLE #ifndef CONFIG_CPU_DCACHE_DISABLE
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
...@@ -338,8 +338,6 @@ ENTRY(arm1022_cache_fns) ...@@ -338,8 +338,6 @@ ENTRY(arm1022_cache_fns)
.long arm1022_flush_kern_dcache_area .long arm1022_flush_kern_dcache_area
.long arm1022_dma_map_area .long arm1022_dma_map_area
.long arm1022_dma_unmap_area .long arm1022_dma_unmap_area
.long arm1022_dma_inv_range
.long arm1022_dma_clean_range
.long arm1022_dma_flush_range .long arm1022_dma_flush_range
.align 5 .align 5
......
...@@ -241,7 +241,7 @@ ENTRY(arm1026_flush_kern_dcache_area) ...@@ -241,7 +241,7 @@ ENTRY(arm1026_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm1026_dma_inv_range) arm1026_dma_inv_range:
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_DISABLE #ifndef CONFIG_CPU_DCACHE_DISABLE
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
...@@ -267,7 +267,7 @@ ENTRY(arm1026_dma_inv_range) ...@@ -267,7 +267,7 @@ ENTRY(arm1026_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm1026_dma_clean_range) arm1026_dma_clean_range:
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_DISABLE #ifndef CONFIG_CPU_DCACHE_DISABLE
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
...@@ -332,8 +332,6 @@ ENTRY(arm1026_cache_fns) ...@@ -332,8 +332,6 @@ ENTRY(arm1026_cache_fns)
.long arm1026_flush_kern_dcache_area .long arm1026_flush_kern_dcache_area
.long arm1026_dma_map_area .long arm1026_dma_map_area
.long arm1026_dma_unmap_area .long arm1026_dma_unmap_area
.long arm1026_dma_inv_range
.long arm1026_dma_clean_range
.long arm1026_dma_flush_range .long arm1026_dma_flush_range
.align 5 .align 5
......
...@@ -239,7 +239,7 @@ ENTRY(arm920_flush_kern_dcache_area) ...@@ -239,7 +239,7 @@ ENTRY(arm920_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm920_dma_inv_range) arm920_dma_inv_range:
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -262,7 +262,7 @@ ENTRY(arm920_dma_inv_range) ...@@ -262,7 +262,7 @@ ENTRY(arm920_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm920_dma_clean_range) arm920_dma_clean_range:
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
add r0, r0, #CACHE_DLINESIZE add r0, r0, #CACHE_DLINESIZE
...@@ -321,8 +321,6 @@ ENTRY(arm920_cache_fns) ...@@ -321,8 +321,6 @@ ENTRY(arm920_cache_fns)
.long arm920_flush_kern_dcache_area .long arm920_flush_kern_dcache_area
.long arm920_dma_map_area .long arm920_dma_map_area
.long arm920_dma_unmap_area .long arm920_dma_unmap_area
.long arm920_dma_inv_range
.long arm920_dma_clean_range
.long arm920_dma_flush_range .long arm920_dma_flush_range
#endif #endif
......
...@@ -241,7 +241,7 @@ ENTRY(arm922_flush_kern_dcache_area) ...@@ -241,7 +241,7 @@ ENTRY(arm922_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm922_dma_inv_range) arm922_dma_inv_range:
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -264,7 +264,7 @@ ENTRY(arm922_dma_inv_range) ...@@ -264,7 +264,7 @@ ENTRY(arm922_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm922_dma_clean_range) arm922_dma_clean_range:
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
add r0, r0, #CACHE_DLINESIZE add r0, r0, #CACHE_DLINESIZE
...@@ -323,8 +323,6 @@ ENTRY(arm922_cache_fns) ...@@ -323,8 +323,6 @@ ENTRY(arm922_cache_fns)
.long arm922_flush_kern_dcache_area .long arm922_flush_kern_dcache_area
.long arm922_dma_map_area .long arm922_dma_map_area
.long arm922_dma_unmap_area .long arm922_dma_unmap_area
.long arm922_dma_inv_range
.long arm922_dma_clean_range
.long arm922_dma_flush_range .long arm922_dma_flush_range
#endif #endif
......
...@@ -283,7 +283,7 @@ ENTRY(arm925_flush_kern_dcache_area) ...@@ -283,7 +283,7 @@ ENTRY(arm925_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm925_dma_inv_range) arm925_dma_inv_range:
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -308,7 +308,7 @@ ENTRY(arm925_dma_inv_range) ...@@ -308,7 +308,7 @@ ENTRY(arm925_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm925_dma_clean_range) arm925_dma_clean_range:
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -374,8 +374,6 @@ ENTRY(arm925_cache_fns) ...@@ -374,8 +374,6 @@ ENTRY(arm925_cache_fns)
.long arm925_flush_kern_dcache_area .long arm925_flush_kern_dcache_area
.long arm925_dma_map_area .long arm925_dma_map_area
.long arm925_dma_unmap_area .long arm925_dma_unmap_area
.long arm925_dma_inv_range
.long arm925_dma_clean_range
.long arm925_dma_flush_range .long arm925_dma_flush_range
ENTRY(cpu_arm925_dcache_clean_area) ENTRY(cpu_arm925_dcache_clean_area)
......
...@@ -246,7 +246,7 @@ ENTRY(arm926_flush_kern_dcache_area) ...@@ -246,7 +246,7 @@ ENTRY(arm926_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm926_dma_inv_range) arm926_dma_inv_range:
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -271,7 +271,7 @@ ENTRY(arm926_dma_inv_range) ...@@ -271,7 +271,7 @@ ENTRY(arm926_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(arm926_dma_clean_range) arm926_dma_clean_range:
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -337,8 +337,6 @@ ENTRY(arm926_cache_fns) ...@@ -337,8 +337,6 @@ ENTRY(arm926_cache_fns)
.long arm926_flush_kern_dcache_area .long arm926_flush_kern_dcache_area
.long arm926_dma_map_area .long arm926_dma_map_area
.long arm926_dma_unmap_area .long arm926_dma_unmap_area
.long arm926_dma_inv_range
.long arm926_dma_clean_range
.long arm926_dma_flush_range .long arm926_dma_flush_range
ENTRY(cpu_arm926_dcache_clean_area) ENTRY(cpu_arm926_dcache_clean_area)
......
...@@ -171,7 +171,7 @@ ENTRY(arm940_flush_kern_dcache_area) ...@@ -171,7 +171,7 @@ ENTRY(arm940_flush_kern_dcache_area)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(arm940_dma_inv_range) arm940_dma_inv_range:
mov ip, #0 mov ip, #0
mov r1, #(CACHE_DSEGMENTS - 1) << 4 @ 4 segments mov r1, #(CACHE_DSEGMENTS - 1) << 4 @ 4 segments
1: orr r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries 1: orr r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries
...@@ -192,7 +192,7 @@ ENTRY(arm940_dma_inv_range) ...@@ -192,7 +192,7 @@ ENTRY(arm940_dma_inv_range)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(arm940_dma_clean_range) arm940_dma_clean_range:
ENTRY(cpu_arm940_dcache_clean_area) ENTRY(cpu_arm940_dcache_clean_area)
mov ip, #0 mov ip, #0
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
...@@ -266,8 +266,6 @@ ENTRY(arm940_cache_fns) ...@@ -266,8 +266,6 @@ ENTRY(arm940_cache_fns)
.long arm940_flush_kern_dcache_area .long arm940_flush_kern_dcache_area
.long arm940_dma_map_area .long arm940_dma_map_area
.long arm940_dma_unmap_area .long arm940_dma_unmap_area
.long arm940_dma_inv_range
.long arm940_dma_clean_range
.long arm940_dma_flush_range .long arm940_dma_flush_range
__INIT __INIT
......
...@@ -215,7 +215,7 @@ ENTRY(arm946_flush_kern_dcache_area) ...@@ -215,7 +215,7 @@ ENTRY(arm946_flush_kern_dcache_area)
* - end - virtual end address * - end - virtual end address
* (same as arm926) * (same as arm926)
*/ */
ENTRY(arm946_dma_inv_range) arm946_dma_inv_range:
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -240,7 +240,7 @@ ENTRY(arm946_dma_inv_range) ...@@ -240,7 +240,7 @@ ENTRY(arm946_dma_inv_range)
* *
* (same as arm926) * (same as arm926)
*/ */
ENTRY(arm946_dma_clean_range) arm946_dma_clean_range:
#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -308,8 +308,6 @@ ENTRY(arm946_cache_fns) ...@@ -308,8 +308,6 @@ ENTRY(arm946_cache_fns)
.long arm946_flush_kern_dcache_area .long arm946_flush_kern_dcache_area
.long arm946_dma_map_area .long arm946_dma_map_area
.long arm946_dma_unmap_area .long arm946_dma_unmap_area
.long arm946_dma_inv_range
.long arm946_dma_clean_range
.long arm946_dma_flush_range .long arm946_dma_flush_range
......
...@@ -274,7 +274,7 @@ ENTRY(feroceon_range_flush_kern_dcache_area) ...@@ -274,7 +274,7 @@ ENTRY(feroceon_range_flush_kern_dcache_area)
* (same as v4wb) * (same as v4wb)
*/ */
.align 5 .align 5
ENTRY(feroceon_dma_inv_range) feroceon_dma_inv_range:
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -288,7 +288,7 @@ ENTRY(feroceon_dma_inv_range) ...@@ -288,7 +288,7 @@ ENTRY(feroceon_dma_inv_range)
mov pc, lr mov pc, lr
.align 5 .align 5
ENTRY(feroceon_range_dma_inv_range) feroceon_range_dma_inv_range:
mrs r2, cpsr mrs r2, cpsr
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -314,7 +314,7 @@ ENTRY(feroceon_range_dma_inv_range) ...@@ -314,7 +314,7 @@ ENTRY(feroceon_range_dma_inv_range)
* (same as v4wb) * (same as v4wb)
*/ */
.align 5 .align 5
ENTRY(feroceon_dma_clean_range) feroceon_dma_clean_range:
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
add r0, r0, #CACHE_DLINESIZE add r0, r0, #CACHE_DLINESIZE
...@@ -324,7 +324,7 @@ ENTRY(feroceon_dma_clean_range) ...@@ -324,7 +324,7 @@ ENTRY(feroceon_dma_clean_range)
mov pc, lr mov pc, lr
.align 5 .align 5
ENTRY(feroceon_range_dma_clean_range) feroceon_range_dma_clean_range:
mrs r2, cpsr mrs r2, cpsr
cmp r1, r0 cmp r1, r0
subne r1, r1, #1 @ top address is inclusive subne r1, r1, #1 @ top address is inclusive
...@@ -414,8 +414,6 @@ ENTRY(feroceon_cache_fns) ...@@ -414,8 +414,6 @@ ENTRY(feroceon_cache_fns)
.long feroceon_flush_kern_dcache_area .long feroceon_flush_kern_dcache_area
.long feroceon_dma_map_area .long feroceon_dma_map_area
.long feroceon_dma_unmap_area .long feroceon_dma_unmap_area
.long feroceon_dma_inv_range
.long feroceon_dma_clean_range
.long feroceon_dma_flush_range .long feroceon_dma_flush_range
ENTRY(feroceon_range_cache_fns) ENTRY(feroceon_range_cache_fns)
...@@ -427,8 +425,6 @@ ENTRY(feroceon_range_cache_fns) ...@@ -427,8 +425,6 @@ ENTRY(feroceon_range_cache_fns)
.long feroceon_range_flush_kern_dcache_area .long feroceon_range_flush_kern_dcache_area
.long feroceon_range_dma_map_area .long feroceon_range_dma_map_area
.long feroceon_dma_unmap_area .long feroceon_dma_unmap_area
.long feroceon_range_dma_inv_range
.long feroceon_range_dma_clean_range
.long feroceon_range_dma_flush_range .long feroceon_range_dma_flush_range
.align 5 .align 5
......
...@@ -218,7 +218,7 @@ ENTRY(mohawk_flush_kern_dcache_area) ...@@ -218,7 +218,7 @@ ENTRY(mohawk_flush_kern_dcache_area)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(mohawk_dma_inv_range) mohawk_dma_inv_range:
tst r0, #CACHE_DLINESIZE - 1 tst r0, #CACHE_DLINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
tst r1, #CACHE_DLINESIZE - 1 tst r1, #CACHE_DLINESIZE - 1
...@@ -241,7 +241,7 @@ ENTRY(mohawk_dma_inv_range) ...@@ -241,7 +241,7 @@ ENTRY(mohawk_dma_inv_range)
* *
* (same as v4wb) * (same as v4wb)
*/ */
ENTRY(mohawk_dma_clean_range) mohawk_dma_clean_range:
bic r0, r0, #CACHE_DLINESIZE - 1 bic r0, r0, #CACHE_DLINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
add r0, r0, #CACHE_DLINESIZE add r0, r0, #CACHE_DLINESIZE
...@@ -301,8 +301,6 @@ ENTRY(mohawk_cache_fns) ...@@ -301,8 +301,6 @@ ENTRY(mohawk_cache_fns)
.long mohawk_flush_kern_dcache_area .long mohawk_flush_kern_dcache_area
.long mohawk_dma_map_area .long mohawk_dma_map_area
.long mohawk_dma_unmap_area .long mohawk_dma_unmap_area
.long mohawk_dma_inv_range
.long mohawk_dma_clean_range
.long mohawk_dma_flush_range .long mohawk_dma_flush_range
ENTRY(cpu_mohawk_dcache_clean_area) ENTRY(cpu_mohawk_dcache_clean_area)
......
...@@ -257,7 +257,7 @@ ENTRY(xsc3_flush_kern_dcache_area) ...@@ -257,7 +257,7 @@ ENTRY(xsc3_flush_kern_dcache_area)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(xsc3_dma_inv_range) xsc3_dma_inv_range:
tst r0, #CACHELINESIZE - 1 tst r0, #CACHELINESIZE - 1
bic r0, r0, #CACHELINESIZE - 1 bic r0, r0, #CACHELINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean L1 D line mcrne p15, 0, r0, c7, c10, 1 @ clean L1 D line
...@@ -278,7 +278,7 @@ ENTRY(xsc3_dma_inv_range) ...@@ -278,7 +278,7 @@ ENTRY(xsc3_dma_inv_range)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(xsc3_dma_clean_range) xsc3_dma_clean_range:
bic r0, r0, #CACHELINESIZE - 1 bic r0, r0, #CACHELINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean L1 D line 1: mcr p15, 0, r0, c7, c10, 1 @ clean L1 D line
add r0, r0, #CACHELINESIZE add r0, r0, #CACHELINESIZE
...@@ -337,8 +337,6 @@ ENTRY(xsc3_cache_fns) ...@@ -337,8 +337,6 @@ ENTRY(xsc3_cache_fns)
.long xsc3_flush_kern_dcache_area .long xsc3_flush_kern_dcache_area
.long xsc3_dma_map_area .long xsc3_dma_map_area
.long xsc3_dma_unmap_area .long xsc3_dma_unmap_area
.long xsc3_dma_inv_range
.long xsc3_dma_clean_range
.long xsc3_dma_flush_range .long xsc3_dma_flush_range
ENTRY(cpu_xsc3_dcache_clean_area) ENTRY(cpu_xsc3_dcache_clean_area)
......
...@@ -315,7 +315,7 @@ ENTRY(xscale_flush_kern_dcache_area) ...@@ -315,7 +315,7 @@ ENTRY(xscale_flush_kern_dcache_area)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(xscale_dma_inv_range) xscale_dma_inv_range:
tst r0, #CACHELINESIZE - 1 tst r0, #CACHELINESIZE - 1
bic r0, r0, #CACHELINESIZE - 1 bic r0, r0, #CACHELINESIZE - 1
mcrne p15, 0, r0, c7, c10, 1 @ clean D entry mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
...@@ -336,7 +336,7 @@ ENTRY(xscale_dma_inv_range) ...@@ -336,7 +336,7 @@ ENTRY(xscale_dma_inv_range)
* - start - virtual start address * - start - virtual start address
* - end - virtual end address * - end - virtual end address
*/ */
ENTRY(xscale_dma_clean_range) xscale_dma_clean_range:
bic r0, r0, #CACHELINESIZE - 1 bic r0, r0, #CACHELINESIZE - 1
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
add r0, r0, #CACHELINESIZE add r0, r0, #CACHELINESIZE
...@@ -409,8 +409,6 @@ ENTRY(xscale_cache_fns) ...@@ -409,8 +409,6 @@ ENTRY(xscale_cache_fns)
.long xscale_flush_kern_dcache_area .long xscale_flush_kern_dcache_area
.long xscale_dma_map_area .long xscale_dma_map_area
.long xscale_dma_unmap_area .long xscale_dma_unmap_area
.long xscale_dma_inv_range
.long xscale_dma_clean_range
.long xscale_dma_flush_range .long xscale_dma_flush_range
/* /*
...@@ -436,8 +434,6 @@ ENTRY(xscale_80200_A0_A1_cache_fns) ...@@ -436,8 +434,6 @@ ENTRY(xscale_80200_A0_A1_cache_fns)
.long xscale_dma_a0_map_area .long xscale_dma_a0_map_area
.long xscale_dma_unmap_area .long xscale_dma_unmap_area
.long xscale_dma_flush_range .long xscale_dma_flush_range
.long xscale_dma_clean_range
.long xscale_dma_flush_range
ENTRY(cpu_xscale_dcache_clean_area) ENTRY(cpu_xscale_dcache_clean_area)
1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment