@@ -97,18 +97,24 @@ extern volatile unsigned long cr4_pin;
static inline void native_write_cr4(unsigned long val)
{
+ unsigned long warn = 0;
+
again:
val |= cr4_pin;
asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
/*
* If the MOV above was used directly as a ROP gadget we can
* notice the lack of pinned bits in "val" and start the function
- * from the beginning to gain the cr4_pin bits for sure.
+ * from the beginning to gain the cr4_pin bits for sure. Note
+ * that "val" must be volatile to keep the compiler from
+ * optimizing away this check.
*/
- if (WARN_ONCE((val & cr4_pin) != cr4_pin,
- "Attempt to unpin cr4 bits: %lx, cr4 bypass attack?!",
- ~val & cr4_pin))
+ if ((val & cr4_pin) != cr4_pin) {
+ warn = ~val & cr4_pin;
goto again;
+ }
+ WARN_ONCE(warn, "Attempt to unpin cr4 bits: %lx; bypass attack?!\n",
+ warn);
}
#ifdef CONFIG_X86_64
Instead of taking a full WARN() exception before restoring a potentially missed CR4 bit, this retains the missing bit for later reporting. This matches the logic done for the CR0 pinning. Additionally updates the comments to note the required use of "volatile". Suggested-by: Solar Designer <solar@openwall.com> Signed-off-by: Kees Cook <keescook@chromium.org> --- arch/x86/include/asm/special_insns.h | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-)