Commit f2655125 authored by Christophe Leroy's avatar Christophe Leroy Committed by Michael Ellerman

powerpc/32s: Move _tlbie() and _tlbia() in a new file

_tlbie() and _tlbia() are used only on 603 cores while the
other functions are used only on cores having a hash table.

Move them into a new file named nohash_low.S

Add mmu_hash_lock var is used by both, it needs to go
in a common file.
Signed-off-by: default avatarChristophe Leroy <christophe.leroy@csgroup.eu>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/9a265b1b17a64153463d361280cb4b43eb1266a4.1603348103.git.christophe.leroy@csgroup.eu
parent b91280f3
...@@ -6,4 +6,4 @@ ifdef CONFIG_KASAN ...@@ -6,4 +6,4 @@ ifdef CONFIG_KASAN
CFLAGS_mmu.o += -DDISABLE_BRANCH_PROFILING CFLAGS_mmu.o += -DDISABLE_BRANCH_PROFILING
endif endif
obj-y += mmu.o hash_low.o mmu_context.o tlb.o obj-y += mmu.o hash_low.o mmu_context.o tlb.o nohash_low.o
...@@ -26,13 +26,6 @@ ...@@ -26,13 +26,6 @@
#include <asm/feature-fixups.h> #include <asm/feature-fixups.h>
#include <asm/code-patching-asm.h> #include <asm/code-patching-asm.h>
#ifdef CONFIG_SMP
.section .bss
.align 2
mmu_hash_lock:
.space 4
#endif /* CONFIG_SMP */
/* /*
* Load a PTE into the hash table, if possible. * Load a PTE into the hash table, if possible.
* The address is in r4, and r3 contains an access flag: * The address is in r4, and r3 contains an access flag:
...@@ -618,74 +611,3 @@ _GLOBAL(flush_hash_pages) ...@@ -618,74 +611,3 @@ _GLOBAL(flush_hash_pages)
.previous .previous
EXPORT_SYMBOL(flush_hash_pages) EXPORT_SYMBOL(flush_hash_pages)
_ASM_NOKPROBE_SYMBOL(flush_hash_pages) _ASM_NOKPROBE_SYMBOL(flush_hash_pages)
/*
* Flush an entry from the TLB
*/
#ifdef CONFIG_SMP
_GLOBAL(_tlbie)
lwz r8,TASK_CPU(r2)
oris r8,r8,11
mfmsr r10
rlwinm r0,r10,0,17,15 /* clear bit 16 (MSR_EE) */
rlwinm r0,r0,0,28,26 /* clear DR */
mtmsr r0
isync
lis r9,mmu_hash_lock@h
ori r9,r9,mmu_hash_lock@l
tophys(r9,r9)
10: lwarx r7,0,r9
cmpwi 0,r7,0
bne- 10b
stwcx. r8,0,r9
bne- 10b
eieio
tlbie r3
sync
TLBSYNC
li r0,0
stw r0,0(r9) /* clear mmu_hash_lock */
mtmsr r10
isync
blr
_ASM_NOKPROBE_SYMBOL(_tlbie)
#endif /* CONFIG_SMP */
/*
* Flush the entire TLB. 603/603e only
*/
_GLOBAL(_tlbia)
#if defined(CONFIG_SMP)
lwz r8,TASK_CPU(r2)
oris r8,r8,10
mfmsr r10
rlwinm r0,r10,0,17,15 /* clear bit 16 (MSR_EE) */
rlwinm r0,r0,0,28,26 /* clear DR */
mtmsr r0
isync
lis r9,mmu_hash_lock@h
ori r9,r9,mmu_hash_lock@l
tophys(r9,r9)
10: lwarx r7,0,r9
cmpwi 0,r7,0
bne- 10b
stwcx. r8,0,r9
bne- 10b
#endif /* CONFIG_SMP */
li r5, 32
lis r4, KERNELBASE@h
mtctr r5
sync
0: tlbie r4
addi r4, r4, 0x1000
bdnz 0b
sync
#ifdef CONFIG_SMP
TLBSYNC
li r0,0
stw r0,0(r9) /* clear mmu_hash_lock */
mtmsr r10
isync
#endif /* CONFIG_SMP */
blr
_ASM_NOKPROBE_SYMBOL(_tlbia)
...@@ -46,6 +46,10 @@ static struct batrange { /* stores address ranges mapped by BATs */ ...@@ -46,6 +46,10 @@ static struct batrange { /* stores address ranges mapped by BATs */
phys_addr_t phys; phys_addr_t phys;
} bat_addrs[8]; } bat_addrs[8];
#ifdef CONFIG_SMP
unsigned long mmu_hash_lock;
#endif
/* /*
* Return PA for this VA if it is mapped by a BAT, or 0 * Return PA for this VA if it is mapped by a BAT, or 0
*/ */
......
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
* This file contains low-level assembler routines for managing
* the PowerPC 603 tlb invalidation.
*/
#include <asm/page.h>
#include <asm/ppc_asm.h>
#include <asm/asm-offsets.h>
/*
* Flush an entry from the TLB
*/
#ifdef CONFIG_SMP
_GLOBAL(_tlbie)
lwz r8,TASK_CPU(r2)
oris r8,r8,11
mfmsr r10
rlwinm r0,r10,0,17,15 /* clear bit 16 (MSR_EE) */
rlwinm r0,r0,0,28,26 /* clear DR */
mtmsr r0
isync
lis r9,mmu_hash_lock@h
ori r9,r9,mmu_hash_lock@l
tophys(r9,r9)
10: lwarx r7,0,r9
cmpwi 0,r7,0
bne- 10b
stwcx. r8,0,r9
bne- 10b
eieio
tlbie r3
sync
TLBSYNC
li r0,0
stw r0,0(r9) /* clear mmu_hash_lock */
mtmsr r10
isync
blr
_ASM_NOKPROBE_SYMBOL(_tlbie)
#endif /* CONFIG_SMP */
/*
* Flush the entire TLB. 603/603e only
*/
_GLOBAL(_tlbia)
#if defined(CONFIG_SMP)
lwz r8,TASK_CPU(r2)
oris r8,r8,10
mfmsr r10
rlwinm r0,r10,0,17,15 /* clear bit 16 (MSR_EE) */
rlwinm r0,r0,0,28,26 /* clear DR */
mtmsr r0
isync
lis r9,mmu_hash_lock@h
ori r9,r9,mmu_hash_lock@l
tophys(r9,r9)
10: lwarx r7,0,r9
cmpwi 0,r7,0
bne- 10b
stwcx. r8,0,r9
bne- 10b
#endif /* CONFIG_SMP */
li r5, 32
lis r4, KERNELBASE@h
mtctr r5
sync
0: tlbie r4
addi r4, r4, 0x1000
bdnz 0b
sync
#ifdef CONFIG_SMP
TLBSYNC
li r0,0
stw r0,0(r9) /* clear mmu_hash_lock */
mtmsr r10
isync
#endif /* CONFIG_SMP */
blr
_ASM_NOKPROBE_SYMBOL(_tlbia)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment