Commit 0482b505 authored by Christoffer Dall's avatar Christoffer Dall

arm64: mm: Add additional parameter to uaccess_ttbr0_disable

Add an extra temporary register parameter to uaccess_ttbr0_disable which
is about to be required for arm64 PAN support.

This patch doesn't introduce any functional change but ensures that the
kernel compiles once the KVM/ARM tree is merged with the arm64 tree by
ensuring a trivially mergable conflict with commit
6b88a32c
("arm64: kpti: Fix the interaction between ASID switching and software PAN").

Cc: Will Deacon <will.deacon@arm.com>
Acked-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Acked-by: default avatarMarc Zyngier <marc.zyngier@arm.com>
Signed-off-by: default avatarChristoffer Dall <christoffer.dall@linaro.org>
parent 448fadc8
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
isb isb
.endm .endm
.macro uaccess_ttbr0_disable, tmp1 .macro uaccess_ttbr0_disable, tmp1, tmp2
alternative_if_not ARM64_HAS_PAN alternative_if_not ARM64_HAS_PAN
__uaccess_ttbr0_disable \tmp1 __uaccess_ttbr0_disable \tmp1
alternative_else_nop_endif alternative_else_nop_endif
...@@ -39,7 +39,7 @@ alternative_if_not ARM64_HAS_PAN ...@@ -39,7 +39,7 @@ alternative_if_not ARM64_HAS_PAN
alternative_else_nop_endif alternative_else_nop_endif
.endm .endm
#else #else
.macro uaccess_ttbr0_disable, tmp1 .macro uaccess_ttbr0_disable, tmp1, tmp2
.endm .endm
.macro uaccess_ttbr0_enable, tmp1, tmp2, tmp3 .macro uaccess_ttbr0_enable, tmp1, tmp2, tmp3
...@@ -49,8 +49,8 @@ alternative_else_nop_endif ...@@ -49,8 +49,8 @@ alternative_else_nop_endif
/* /*
* These macros are no-ops when UAO is present. * These macros are no-ops when UAO is present.
*/ */
.macro uaccess_disable_not_uao, tmp1 .macro uaccess_disable_not_uao, tmp1, tmp2
uaccess_ttbr0_disable \tmp1 uaccess_ttbr0_disable \tmp1, \tmp2
alternative_if ARM64_ALT_PAN_NOT_UAO alternative_if ARM64_ALT_PAN_NOT_UAO
SET_PSTATE_PAN(1) SET_PSTATE_PAN(1)
alternative_else_nop_endif alternative_else_nop_endif
......
...@@ -50,7 +50,7 @@ uao_user_alternative 9f, strh, sttrh, wzr, x0, 2 ...@@ -50,7 +50,7 @@ uao_user_alternative 9f, strh, sttrh, wzr, x0, 2
b.mi 5f b.mi 5f
uao_user_alternative 9f, strb, sttrb, wzr, x0, 0 uao_user_alternative 9f, strb, sttrb, wzr, x0, 0
5: mov x0, #0 5: mov x0, #0
uaccess_disable_not_uao x2 uaccess_disable_not_uao x2, x3
ret ret
ENDPROC(__clear_user) ENDPROC(__clear_user)
......
...@@ -67,7 +67,7 @@ ENTRY(__arch_copy_from_user) ...@@ -67,7 +67,7 @@ ENTRY(__arch_copy_from_user)
uaccess_enable_not_uao x3, x4 uaccess_enable_not_uao x3, x4
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
uaccess_disable_not_uao x3 uaccess_disable_not_uao x3, x4
mov x0, #0 // Nothing to copy mov x0, #0 // Nothing to copy
ret ret
ENDPROC(__arch_copy_from_user) ENDPROC(__arch_copy_from_user)
......
...@@ -68,7 +68,7 @@ ENTRY(raw_copy_in_user) ...@@ -68,7 +68,7 @@ ENTRY(raw_copy_in_user)
uaccess_enable_not_uao x3, x4 uaccess_enable_not_uao x3, x4
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
uaccess_disable_not_uao x3 uaccess_disable_not_uao x3, x4
mov x0, #0 mov x0, #0
ret ret
ENDPROC(raw_copy_in_user) ENDPROC(raw_copy_in_user)
......
...@@ -66,7 +66,7 @@ ENTRY(__arch_copy_to_user) ...@@ -66,7 +66,7 @@ ENTRY(__arch_copy_to_user)
uaccess_enable_not_uao x3, x4 uaccess_enable_not_uao x3, x4
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
uaccess_disable_not_uao x3 uaccess_disable_not_uao x3, x4
mov x0, #0 mov x0, #0
ret ret
ENDPROC(__arch_copy_to_user) ENDPROC(__arch_copy_to_user)
......
...@@ -63,7 +63,7 @@ user_alt 9f, "dc cvau, x4", "dc civac, x4", ARM64_WORKAROUND_CLEAN_CACHE ...@@ -63,7 +63,7 @@ user_alt 9f, "dc cvau, x4", "dc civac, x4", ARM64_WORKAROUND_CLEAN_CACHE
invalidate_icache_by_line x0, x1, x2, x3, 9f invalidate_icache_by_line x0, x1, x2, x3, 9f
mov x0, #0 mov x0, #0
1: 1:
uaccess_ttbr0_disable x1 uaccess_ttbr0_disable x1, x2
ret ret
9: 9:
mov x0, #-EFAULT mov x0, #-EFAULT
...@@ -85,7 +85,7 @@ ENTRY(invalidate_icache_range) ...@@ -85,7 +85,7 @@ ENTRY(invalidate_icache_range)
invalidate_icache_by_line x0, x1, x2, x3, 2f invalidate_icache_by_line x0, x1, x2, x3, 2f
mov x0, xzr mov x0, xzr
1: 1:
uaccess_ttbr0_disable x1 uaccess_ttbr0_disable x1, x2
ret ret
2: 2:
mov x0, #-EFAULT mov x0, #-EFAULT
......
...@@ -107,6 +107,6 @@ ENTRY(privcmd_call) ...@@ -107,6 +107,6 @@ ENTRY(privcmd_call)
/* /*
* Disable userspace access from kernel once the hyp call completed. * Disable userspace access from kernel once the hyp call completed.
*/ */
uaccess_ttbr0_disable x6 uaccess_ttbr0_disable x6, x7
ret ret
ENDPROC(privcmd_call); ENDPROC(privcmd_call);
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment