@@ -423,6 +423,7 @@ do { \
({ \
int __gu_err; \
unsigned long __gu_val; \
+ barrier_nospec(); \
__get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT); \
(x) = (__force __typeof__(*(ptr)))__gu_val; \
__gu_err; \
@@ -529,7 +530,7 @@ struct __large_struct { unsigned long bu
* get_user_ex(...);
* } get_user_catch(err)
*/
-#define get_user_try uaccess_try
+#define get_user_try uaccess_try_nospec
#define get_user_catch(err) uaccess_catch(err)
#define get_user_ex(x, ptr) do { \
@@ -48,14 +48,17 @@ __copy_to_user_inatomic(void __user *to,
switch (n) {
case 1:
+ barrier_nospec();
__put_user_size(*(u8 *)from, (u8 __user *)to,
1, ret, 1);
return ret;
case 2:
+ barrier_nospec();
__put_user_size(*(u16 *)from, (u16 __user *)to,
2, ret, 2);
return ret;
case 4:
+ barrier_nospec();
__put_user_size(*(u32 *)from, (u32 __user *)to,
4, ret, 4);
return ret;
@@ -98,12 +101,15 @@ __copy_from_user_inatomic(void *to, cons
switch (n) {
case 1:
+ barrier_nospec();
__get_user_size(*(u8 *)to, from, 1, ret, 1);
return ret;
case 2:
+ barrier_nospec();
__get_user_size(*(u16 *)to, from, 2, ret, 2);
return ret;
case 4:
+ barrier_nospec();
__get_user_size(*(u32 *)to, from, 4, ret, 4);
return ret;
}
@@ -142,12 +148,15 @@ __copy_from_user(void *to, const void __
switch (n) {
case 1:
+ barrier_nospec();
__get_user_size(*(u8 *)to, from, 1, ret, 1);
return ret;
case 2:
+ barrier_nospec();
__get_user_size(*(u16 *)to, from, 2, ret, 2);
return ret;
case 4:
+ barrier_nospec();
__get_user_size(*(u32 *)to, from, 4, ret, 4);
return ret;
}
@@ -164,12 +173,15 @@ static __always_inline unsigned long __c
switch (n) {
case 1:
+ barrier_nospec();
__get_user_size(*(u8 *)to, from, 1, ret, 1);
return ret;
case 2:
+ barrier_nospec();
__get_user_size(*(u16 *)to, from, 2, ret, 2);
return ret;
case 4:
+ barrier_nospec();
__get_user_size(*(u32 *)to, from, 4, ret, 4);
return ret;
}
@@ -75,19 +75,28 @@ int __copy_from_user_nocheck(void *dst,
if (!__builtin_constant_p(size))
return copy_user_generic(dst, (__force void *)src, size);
switch (size) {
- case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
+ case 1:
+ barrier_nospec();
+ __get_user_asm(*(u8 *)dst, (u8 __user *)src,
ret, "b", "b", "=q", 1);
return ret;
- case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
+ case 2:
+ barrier_nospec();
+ __get_user_asm(*(u16 *)dst, (u16 __user *)src,
ret, "w", "w", "=r", 2);
return ret;
- case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
+ case 4:
+ barrier_nospec();
+ __get_user_asm(*(u32 *)dst, (u32 __user *)src,
ret, "l", "k", "=r", 4);
return ret;
- case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
+ case 8:
+ barrier_nospec();
+ __get_user_asm(*(u64 *)dst, (u64 __user *)src,
ret, "q", "", "=r", 8);
return ret;
case 10:
+ barrier_nospec();
__get_user_asm(*(u64 *)dst, (u64 __user *)src,
ret, "q", "", "=r", 10);
if (unlikely(ret))
@@ -97,6 +106,7 @@ int __copy_from_user_nocheck(void *dst,
ret, "w", "w", "=r", 2);
return ret;
case 16:
+ barrier_nospec();
__get_user_asm(*(u64 *)dst, (u64 __user *)src,
ret, "q", "", "=r", 16);
if (unlikely(ret))
@@ -179,6 +189,7 @@ int __copy_in_user(void __user *dst, con
switch (size) {
case 1: {
u8 tmp;
+ barrier_nospec();
__get_user_asm(tmp, (u8 __user *)src,
ret, "b", "b", "=q", 1);
if (likely(!ret))
@@ -188,6 +199,7 @@ int __copy_in_user(void __user *dst, con
}
case 2: {
u16 tmp;
+ barrier_nospec();
__get_user_asm(tmp, (u16 __user *)src,
ret, "w", "w", "=r", 2);
if (likely(!ret))
@@ -198,6 +210,7 @@ int __copy_in_user(void __user *dst, con
case 4: {
u32 tmp;
+ barrier_nospec();
__get_user_asm(tmp, (u32 __user *)src,
ret, "l", "k", "=r", 4);
if (likely(!ret))
@@ -207,6 +220,7 @@ int __copy_in_user(void __user *dst, con
}
case 8: {
u64 tmp;
+ barrier_nospec();
__get_user_asm(tmp, (u64 __user *)src,
ret, "q", "", "=r", 8);
if (likely(!ret))
@@ -774,6 +774,7 @@ survive:
return n;
}
#endif
+ barrier_nospec();
if (movsl_is_ok(to, from, n))
__copy_user(to, from, n);
else
@@ -785,6 +786,7 @@ EXPORT_SYMBOL(__copy_to_user_ll);
unsigned long __copy_from_user_ll(void *to, const void __user *from,
unsigned long n)
{
+ barrier_nospec();
if (movsl_is_ok(to, from, n))
__copy_user_zeroing(to, from, n);
else
@@ -796,6 +798,7 @@ EXPORT_SYMBOL(__copy_from_user_ll);
unsigned long __copy_from_user_ll_nozero(void *to, const void __user *from,
unsigned long n)
{
+ barrier_nospec();
if (movsl_is_ok(to, from, n))
__copy_user(to, from, n);
else
@@ -808,6 +811,7 @@ EXPORT_SYMBOL(__copy_from_user_ll_nozero
unsigned long __copy_from_user_ll_nocache(void *to, const void __user *from,
unsigned long n)
{
+ barrier_nospec();
#ifdef CONFIG_X86_INTEL_USERCOPY
if (n > 64 && cpu_has_xmm2)
n = __copy_user_zeroing_intel_nocache(to, from, n);
@@ -823,6 +827,7 @@ EXPORT_SYMBOL(__copy_from_user_ll_nocach
unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
unsigned long n)
{
+ barrier_nospec();
#ifdef CONFIG_X86_INTEL_USERCOPY
if (n > 64 && cpu_has_xmm2)
n = __copy_user_intel_nocache(to, from, n);