diff --git a/arch/x86/kernel/cpu/common.c b/arch/x86/kernel/cpu/common.c index 3e9037690814..2044d516f06f 100644 --- a/arch/x86/kernel/cpu/common.c +++ b/arch/x86/kernel/cpu/common.c @@ -423,24 +423,40 @@ void native_write_cr0(unsigned long val) } EXPORT_SYMBOL(native_write_cr0); -void __no_profile native_write_cr4(unsigned long val) +void __no_profile __native_write_cr4(unsigned long val, unsigned long *bits_changed) { - unsigned long bits_changed = 0; - set_register: asm volatile("mov %0,%%cr4": "+r" (val) : : "memory"); if (static_branch_likely(&cr_pinning)) { if (unlikely((val & cr4_pinned_mask) != cr4_pinned_bits)) { - bits_changed = (val & cr4_pinned_mask) ^ cr4_pinned_bits; + *bits_changed = (val & cr4_pinned_mask) ^ cr4_pinned_bits; val = (val & ~cr4_pinned_mask) | cr4_pinned_bits; goto set_register; } - /* Warn after we've corrected the changed bits. */ - WARN_ONCE(bits_changed, "pinned CR4 bits changed: 0x%lx!?\n", - bits_changed); } } + +void __no_profile native_write_cr4(unsigned long val) +{ + unsigned long bits_changed = 0; + + __native_write_cr4(val, &bits_changed); + + if (!bits_changed) + return + + WARN_ONCE(bits_changed, "pinned CR4 bits changed: 0x%lx!?\n", + bits_changed); +} + +void __no_profile native_write_cr4_nowarn(unsigned long val) +{ + unsigned long bits_changed = 0; + + __native_write_cr4(val, &bits_changed); +} + #if IS_MODULE(CONFIG_LKDTM) EXPORT_SYMBOL_GPL(native_write_cr4); #endif