Message ID | 20220808071318.3335746-8-guoren@kernel.org (mailing list archive) |
---|---|
State | New, archived |
Headers | show |
Series | arch: Add qspinlock support and atomic cleanup | expand |
diff --git a/arch/riscv/include/asm/cmpxchg.h b/arch/riscv/include/asm/cmpxchg.h index 67ab6375b650..567ed2e274c4 100644 --- a/arch/riscv/include/asm/cmpxchg.h +++ b/arch/riscv/include/asm/cmpxchg.h @@ -76,18 +76,6 @@ (__typeof__(*(ptr))) __xchg((ptr), _x_, sizeof(*(ptr))); \ }) -#define xchg32(ptr, x) \ -({ \ - BUILD_BUG_ON(sizeof(*(ptr)) != 4); \ - arch_xchg((ptr), (x)); \ -}) - -#define xchg64(ptr, x) \ -({ \ - BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ - arch_xchg((ptr), (x)); \ -}) - /* * Atomic compare and exchange. Compare OLD with MEM, if identical, * store NEW in MEM. Return the initial value in MEM. Success is