diff --git a/arch/arm64/Kconfig b/arch/arm64/Kconfig index 5d91259ee7b5..b6e8920364de 100644 --- a/arch/arm64/Kconfig +++ b/arch/arm64/Kconfig @@ -1649,6 +1649,7 @@ config RODATA_FULL_DEFAULT_ENABLED config ARM64_SW_TTBR0_PAN bool "Emulate Privileged Access Never using TTBR0_EL1 switching" + depends on !KCSAN help Enabling this option prevents the kernel from accessing user-space memory directly by pointing TTBR0_EL1 to a reserved diff --git a/arch/arm64/include/asm/runtime-const.h b/arch/arm64/include/asm/runtime-const.h index 8dc83d48a202..dde4c11ec0d5 100644 --- a/arch/arm64/include/asm/runtime-const.h +++ b/arch/arm64/include/asm/runtime-const.h @@ -50,6 +50,12 @@ static inline void __runtime_fixup_16(__le32 *p, unsigned int val) *p = cpu_to_le32(insn); } +static inline void __runtime_fixup_caches(void *where, unsigned int insns) +{ + unsigned long va = (unsigned long)where; + caches_clean_inval_pou(va, va + 4*insns); +} + static inline void __runtime_fixup_ptr(void *where, unsigned long val) { __le32 *p = lm_alias(where); @@ -57,7 +63,7 @@ static inline void __runtime_fixup_ptr(void *where, unsigned long val) __runtime_fixup_16(p+1, val >> 16); __runtime_fixup_16(p+2, val >> 32); __runtime_fixup_16(p+3, val >> 48); - caches_clean_inval_pou((unsigned long)p, (unsigned long)(p + 4)); + __runtime_fixup_caches(where, 4); } /* Immediate value is 6 bits starting at bit #16 */ @@ -68,7 +74,7 @@ static inline void __runtime_fixup_shift(void *where, unsigned long val) insn &= 0xffc0ffff; insn |= (val & 63) << 16; *p = cpu_to_le32(insn); - caches_clean_inval_pou((unsigned long)p, (unsigned long)(p + 1)); + __runtime_fixup_caches(where, 1); } static inline void runtime_const_fixup(void (*fn)(void *, unsigned long), diff --git a/arch/arm64/include/asm/uaccess.h b/arch/arm64/include/asm/uaccess.h index 37abd893c6ee..1f21190d4db5 100644 --- a/arch/arm64/include/asm/uaccess.h +++ b/arch/arm64/include/asm/uaccess.h @@ -424,6 +424,14 @@ static __must_check __always_inline bool user_access_begin(const void __user *pt #define unsafe_get_user(x, ptr, label) \ __raw_get_mem("ldtr", x, uaccess_mask_ptr(ptr), label, U) +/* + * KCSAN uses these to save and restore ttbr state. + * We do not support KCSAN with ARM64_SW_TTBR0_PAN, so + * they are no-ops. + */ +static inline unsigned long user_access_save(void) { return 0; } +static inline void user_access_restore(unsigned long enabled) { } + /* * We want the unsafe accessors to always be inlined and use * the error labels - thus the macro games.