@@ -55,7 +55,7 @@ static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \
#include <linux/irqflags.h>
#define ATOMIC_OP(op, c_op) \
-static inline void generic_atomic_##op(int i, atomic_t *v) \
+static inline void __signed_wrap generic_atomic_##op(int i, atomic_t *v)\
{ \
unsigned long flags; \
\
@@ -65,7 +65,7 @@ static inline void generic_atomic_##op(int i, atomic_t *v) \
}
#define ATOMIC_OP_RETURN(op, c_op) \
-static inline int generic_atomic_##op##_return(int i, atomic_t *v) \
+static inline int __signed_wrap generic_atomic_##op##_return(int i, atomic_t *v)\
{ \
unsigned long flags; \
int ret; \
@@ -78,7 +78,7 @@ static inline int generic_atomic_##op##_return(int i, atomic_t *v) \
}
#define ATOMIC_FETCH_OP(op, c_op) \
-static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \
+static inline int __signed_wrap generic_atomic_fetch_##op(int i, atomic_t *v)\
{ \
unsigned long flags; \
int ret; \
@@ -19,13 +19,13 @@ extern s64 generic_atomic64_read(const atomic64_t *v);
extern void generic_atomic64_set(atomic64_t *v, s64 i);
#define ATOMIC64_OP(op) \
-extern void generic_atomic64_##op(s64 a, atomic64_t *v);
+extern void __signed_wrap generic_atomic64_##op(s64 a, atomic64_t *v);
#define ATOMIC64_OP_RETURN(op) \
-extern s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v);
+extern s64 __signed_wrap generic_atomic64_##op##_return(s64 a, atomic64_t *v);
#define ATOMIC64_FETCH_OP(op) \
-extern s64 generic_atomic64_fetch_##op(s64 a, atomic64_t *v);
+extern s64 __signed_wrap generic_atomic64_fetch_##op(s64 a, atomic64_t *v);
#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op)
@@ -7,6 +7,7 @@
#define _LINUX_ATOMIC_FALLBACK_H
#include <linux/compiler.h>
+#include <linux/overflow.h>
#if defined(arch_xchg)
#define raw_xchg arch_xchg
@@ -2428,7 +2429,7 @@ raw_atomic_fetch_add_unless(atomic_t *v, int a, int u)
do {
if (unlikely(c == u))
break;
- } while (!raw_atomic_try_cmpxchg(v, &c, c + a));
+ } while (!raw_atomic_try_cmpxchg(v, &c, wrapping_add(int, c, a)));
return c;
#endif
@@ -2500,7 +2501,7 @@ raw_atomic_inc_unless_negative(atomic_t *v)
do {
if (unlikely(c < 0))
return false;
- } while (!raw_atomic_try_cmpxchg(v, &c, c + 1));
+ } while (!raw_atomic_try_cmpxchg(v, &c, wrapping_add(int, c, 1)));
return true;
#endif
@@ -2528,7 +2529,7 @@ raw_atomic_dec_unless_positive(atomic_t *v)
do {
if (unlikely(c > 0))
return false;
- } while (!raw_atomic_try_cmpxchg(v, &c, c - 1));
+ } while (!raw_atomic_try_cmpxchg(v, &c, wrapping_sub(int, c, 1)));
return true;
#endif
@@ -2554,7 +2555,7 @@ raw_atomic_dec_if_positive(atomic_t *v)
int dec, c = raw_atomic_read(v);
do {
- dec = c - 1;
+ dec = wrapping_sub(int, c, 1);
if (unlikely(dec < 0))
break;
} while (!raw_atomic_try_cmpxchg(v, &c, dec));
@@ -4554,7 +4555,7 @@ raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
do {
if (unlikely(c == u))
break;
- } while (!raw_atomic64_try_cmpxchg(v, &c, c + a));
+ } while (!raw_atomic64_try_cmpxchg(v, &c, wrapping_add(s64, c, a)));
return c;
#endif
@@ -4626,7 +4627,7 @@ raw_atomic64_inc_unless_negative(atomic64_t *v)
do {
if (unlikely(c < 0))
return false;
- } while (!raw_atomic64_try_cmpxchg(v, &c, c + 1));
+ } while (!raw_atomic64_try_cmpxchg(v, &c, wrapping_add(s64, c, 1)));
return true;
#endif
@@ -4654,7 +4655,7 @@ raw_atomic64_dec_unless_positive(atomic64_t *v)
do {
if (unlikely(c > 0))
return false;
- } while (!raw_atomic64_try_cmpxchg(v, &c, c - 1));
+ } while (!raw_atomic64_try_cmpxchg(v, &c, wrapping_sub(s64, c, 1)));
return true;
#endif
@@ -4680,7 +4681,7 @@ raw_atomic64_dec_if_positive(atomic64_t *v)
s64 dec, c = raw_atomic64_read(v);
do {
- dec = c - 1;
+ dec = wrapping_sub(s64, c, 1);
if (unlikely(dec < 0))
break;
} while (!raw_atomic64_try_cmpxchg(v, &c, dec));
@@ -4690,4 +4691,4 @@ raw_atomic64_dec_if_positive(atomic64_t *v)
}
#endif /* _LINUX_ATOMIC_FALLBACK_H */
-// 14850c0b0db20c62fdc78ccd1d42b98b88d76331
+// 1278e3a674d0a36c2f0eb9f5fd0ddfcbf3690406
@@ -15,6 +15,7 @@
#include <linux/build_bug.h>
#include <linux/compiler.h>
#include <linux/instrumented.h>
+#include <linux/overflow.h>
/**
* atomic_read() - atomic load with relaxed ordering
@@ -5050,4 +5051,4 @@ atomic_long_dec_if_positive(atomic_long_t *v)
#endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
-// ce5b65e0f1f8a276268b667194581d24bed219d4
+// b9cd8314e11c4c818fb469dbd18c7390fcaf9b3c
@@ -7,6 +7,7 @@
#define _LINUX_ATOMIC_LONG_H
#include <linux/compiler.h>
+#include <linux/overflow.h>
#include <asm/types.h>
#ifdef CONFIG_64BIT
@@ -1809,4 +1810,4 @@ raw_atomic_long_dec_if_positive(atomic_long_t *v)
}
#endif /* _LINUX_ATOMIC_LONG_H */
-// 1c4a26fc77f345342953770ebe3c4d08e7ce2f9a
+// 01a5fe70d091e84c1de5eea7e9c09ebeaf7799b3
@@ -67,7 +67,7 @@ void generic_atomic64_set(atomic64_t *v, s64 i)
EXPORT_SYMBOL(generic_atomic64_set);
#define ATOMIC64_OP(op, c_op) \
-void generic_atomic64_##op(s64 a, atomic64_t *v) \
+void __signed_wrap generic_atomic64_##op(s64 a, atomic64_t *v) \
{ \
unsigned long flags; \
raw_spinlock_t *lock = lock_addr(v); \
@@ -79,7 +79,7 @@ void generic_atomic64_##op(s64 a, atomic64_t *v) \
EXPORT_SYMBOL(generic_atomic64_##op);
#define ATOMIC64_OP_RETURN(op, c_op) \
-s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v) \
+s64 __signed_wrap generic_atomic64_##op##_return(s64 a, atomic64_t *v) \
{ \
unsigned long flags; \
raw_spinlock_t *lock = lock_addr(v); \
@@ -93,7 +93,7 @@ s64 generic_atomic64_##op##_return(s64 a, atomic64_t *v) \
EXPORT_SYMBOL(generic_atomic64_##op##_return);
#define ATOMIC64_FETCH_OP(op, c_op) \
-s64 generic_atomic64_fetch_##op(s64 a, atomic64_t *v) \
+s64 __signed_wrap generic_atomic64_fetch_##op(s64 a, atomic64_t *v) \
{ \
unsigned long flags; \
raw_spinlock_t *lock = lock_addr(v); \
@@ -135,7 +135,7 @@ s64 generic_atomic64_dec_if_positive(atomic64_t *v)
s64 val;
raw_spin_lock_irqsave(lock, flags);
- val = v->counter - 1;
+ val = wrapping_sub(typeof(val), v->counter, 1);
if (val >= 0)
v->counter = val;
raw_spin_unlock_irqrestore(lock, flags);
@@ -181,7 +181,7 @@ s64 generic_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
raw_spin_lock_irqsave(lock, flags);
val = v->counter;
if (val != u)
- v->counter += a;
+ wrapping_assign_add(v->counter, a);
raw_spin_unlock_irqrestore(lock, flags);
return val;
@@ -2,7 +2,7 @@ cat <<EOF
${int} dec, c = raw_${atomic}_read(v);
do {
- dec = c - 1;
+ dec = wrapping_sub(${int}, c, 1);
if (unlikely(dec < 0))
break;
} while (!raw_${atomic}_try_cmpxchg(v, &c, dec));
@@ -4,7 +4,7 @@ cat <<EOF
do {
if (unlikely(c > 0))
return false;
- } while (!raw_${atomic}_try_cmpxchg(v, &c, c - 1));
+ } while (!raw_${atomic}_try_cmpxchg(v, &c, wrapping_sub(${int}, c, 1)));
return true;
EOF
@@ -4,7 +4,7 @@ cat << EOF
do {
if (unlikely(c == u))
break;
- } while (!raw_${atomic}_try_cmpxchg(v, &c, c + a));
+ } while (!raw_${atomic}_try_cmpxchg(v, &c, wrapping_add(${int}, c, a)));
return c;
EOF
@@ -4,7 +4,7 @@ cat <<EOF
do {
if (unlikely(c < 0))
return false;
- } while (!raw_${atomic}_try_cmpxchg(v, &c, c + 1));
+ } while (!raw_${atomic}_try_cmpxchg(v, &c, wrapping_add(${int}, c, 1)));
return true;
EOF
@@ -297,6 +297,7 @@ cat << EOF
#define _LINUX_ATOMIC_FALLBACK_H
#include <linux/compiler.h>
+#include <linux/overflow.h>
EOF
@@ -146,6 +146,7 @@ cat << EOF
#include <linux/build_bug.h>
#include <linux/compiler.h>
#include <linux/instrumented.h>
+#include <linux/overflow.h>
EOF
@@ -75,6 +75,7 @@ cat << EOF
#define _LINUX_ATOMIC_LONG_H
#include <linux/compiler.h>
+#include <linux/overflow.h>
#include <asm/types.h>
#ifdef CONFIG_64BIT
Because atomics depend on signed wrap-around, we need to use helpers to perform the operations so that it is not instrumented by the signed wrap-around sanitizer. Refresh generated files by running scripts/atomic/gen-atomics.sh. Signed-off-by: Kees Cook <keescook@chromium.org> --- Cc: Mark Rutland <mark.rutland@arm.com> Cc: Will Deacon <will@kernel.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Arnd Bergmann <arnd@arndb.de> Cc: Andrew Morton <akpm@linux-foundation.org> Cc: linux-arch@vger.kernel.org --- include/asm-generic/atomic.h | 6 +++--- include/asm-generic/atomic64.h | 6 +++--- include/linux/atomic/atomic-arch-fallback.h | 19 ++++++++++--------- include/linux/atomic/atomic-instrumented.h | 3 ++- include/linux/atomic/atomic-long.h | 3 ++- lib/atomic64.c | 10 +++++----- scripts/atomic/fallbacks/dec_if_positive | 2 +- scripts/atomic/fallbacks/dec_unless_positive | 2 +- scripts/atomic/fallbacks/fetch_add_unless | 2 +- scripts/atomic/fallbacks/inc_unless_negative | 2 +- scripts/atomic/gen-atomic-fallback.sh | 1 + scripts/atomic/gen-atomic-instrumented.sh | 1 + scripts/atomic/gen-atomic-long.sh | 1 + 13 files changed, 32 insertions(+), 26 deletions(-)