@@ -112,6 +112,7 @@ void native_play_dead(void);
void play_dead_common(void);
void wbinvd_on_cpu(int cpu);
int wbinvd_on_all_cpus(void);
+int wbnoinvd_on_all_cpus(void);
void smp_kick_mwait_play_dead(void);
@@ -160,6 +161,12 @@ static inline int wbinvd_on_all_cpus(void)
return 0;
}
+static inline int wbnoinvd_on_all_cpus(void)
+{
+ wbnoinvd();
+ return 0;
+}
+
static inline struct cpumask *cpu_llc_shared_mask(int cpu)
{
return (struct cpumask *)cpumask_of(0);
@@ -117,7 +117,24 @@ static inline void wrpkru(u32 pkru)
static __always_inline void wbinvd(void)
{
- asm volatile("wbinvd": : :"memory");
+ asm volatile("wbinvd" : : : "memory");
+}
+
+/* Instruction encoding provided for binutils backwards compatibility. */
+#define WBNOINVD ".byte 0xf3,0x0f,0x09"
+
+/*
+ * Cheaper version of wbinvd(). Call when caches
+ * need to be written back but not invalidated.
+ */
+static __always_inline void wbnoinvd(void)
+{
+ /*
+ * If WBNOINVD is unavailable, fall back to the compatible but
+ * more destructive WBINVD (which still writes the caches back
+ * but also invalidates them).
+ */
+ alternative("wbinvd", WBNOINVD, X86_FEATURE_WBNOINVD);
}
static inline unsigned long __read_cr4(void)
@@ -20,3 +20,15 @@ int wbinvd_on_all_cpus(void)
return 0;
}
EXPORT_SYMBOL(wbinvd_on_all_cpus);
+
+static void __wbnoinvd(void *dummy)
+{
+ wbnoinvd();
+}
+
+int wbnoinvd_on_all_cpus(void)
+{
+ on_each_cpu(__wbnoinvd, NULL, 1);
+ return 0;
+}
+EXPORT_SYMBOL(wbnoinvd_on_all_cpus);
In line with WBINVD usage, add WBONINVD helper functions. For the wbnoinvd() helper, fall back to WBINVD if via alternative() if X86_FEATURE_WBNOINVD is not present. alternative() ensures compatibility with early boot code if needed. Signed-off-by: Kevin Loughlin <kevinloughlin@google.com> --- arch/x86/include/asm/smp.h | 7 +++++++ arch/x86/include/asm/special_insns.h | 19 ++++++++++++++++++- arch/x86/lib/cache-smp.c | 12 ++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-)