Message ID | 20231121142347.241356-2-wuqiang.matt@bytedance.com (mailing list archive) |
---|---|
State | Handled Elsewhere |
Headers | show |
Series | arch,locking/atomic: add arch_cmpxchg[64]_local | expand |
Hi Wuqiang, On Tue, Nov 21, 2023 at 10:23:43PM +0800, wuqiang.matt wrote: > arch_cmpxchg() should check data size rather than pointer size in case > CONFIG_ARC_HAS_LLSC is defined. So rename __cmpxchg to __cmpxchg_32 to > emphasize it's explicit support of 32bit data size with BUILD_BUG_ON() > added to avoid any possible misuses with unsupported data types. > > In case CONFIG_ARC_HAS_LLSC is undefined, arch_cmpxchg() uses spinlock > to accomplish SMP-safety, so the BUILD_BUG_ON checking is uncecessary. > > v2 -> v3: > - Patches regrouped and has the improvement for xtensa included > - Comments refined to address why these changes are needed > > v1 -> v2: > - Try using native cmpxchg variants if avaialble, as Arnd advised > > Signed-off-by: wuqiang.matt <wuqiang.matt@bytedance.com> > Reviewed-by: Masami Hiramatsu (Google) <mhiramat@kernel.org> > --- > arch/arc/include/asm/cmpxchg.h | 12 ++++++------ > 1 file changed, 6 insertions(+), 6 deletions(-) > > diff --git a/arch/arc/include/asm/cmpxchg.h b/arch/arc/include/asm/cmpxchg.h > index e138fde067de..bf46514f6f12 100644 > --- a/arch/arc/include/asm/cmpxchg.h > +++ b/arch/arc/include/asm/cmpxchg.h > @@ -18,14 +18,16 @@ > * if (*ptr == @old) > * *ptr = @new > */ > -#define __cmpxchg(ptr, old, new) \ > +#define __cmpxchg_32(ptr, old, new) \ > ({ \ > __typeof__(*(ptr)) _prev; \ > \ > + BUILD_BUG_ON(sizeof(*(ptr)) != 4); \ > + \ > __asm__ __volatile__( \ > - "1: llock %0, [%1] \n" \ > + "1: llock %0, [%1] \n" \ > " brne %0, %2, 2f \n" \ > - " scond %3, [%1] \n" \ > + " scond %3, [%1] \n" \ > " bnz 1b \n" \ > "2: \n" \ > : "=&r"(_prev) /* Early clobber prevent reg reuse */ \ > @@ -47,7 +49,7 @@ > \ > switch(sizeof((_p_))) { \ > case 4: \ > - _prev_ = __cmpxchg(_p_, _o_, _n_); \ > + _prev_ = __cmpxchg_32(_p_, _o_, _n_); \ > break; \ > default: \ > BUILD_BUG(); \ > @@ -65,8 +67,6 @@ > __typeof__(*(ptr)) _prev_; \ > unsigned long __flags; \ > \ > - BUILD_BUG_ON(sizeof(_p_) != 4); \ > - \ I think I made some comments here that have not been addressed or replied. Thanks, Andi > /* \ > * spin lock/unlock provide the needed smp_mb() before/after \ > */ \ > -- > 2.40.1
Hello Andi, On 2023/11/23 06:17, Andi Shyti wrote: > Hi Wuqiang, > > On Tue, Nov 21, 2023 at 10:23:43PM +0800, wuqiang.matt wrote: >> arch_cmpxchg() should check data size rather than pointer size in case >> CONFIG_ARC_HAS_LLSC is defined. So rename __cmpxchg to __cmpxchg_32 to >> emphasize it's explicit support of 32bit data size with BUILD_BUG_ON() >> added to avoid any possible misuses with unsupported data types. >> >> In case CONFIG_ARC_HAS_LLSC is undefined, arch_cmpxchg() uses spinlock >> to accomplish SMP-safety, so the BUILD_BUG_ON checking is uncecessary. >> >> v2 -> v3: >> - Patches regrouped and has the improvement for xtensa included >> - Comments refined to address why these changes are needed >> >> v1 -> v2: >> - Try using native cmpxchg variants if avaialble, as Arnd advised BTW, the changelog should be in the cover letter. I'll correct it in next version, so don't bother resending to make more noises. >> Signed-off-by: wuqiang.matt <wuqiang.matt@bytedance.com> >> Reviewed-by: Masami Hiramatsu (Google) <mhiramat@kernel.org> >> --- >> arch/arc/include/asm/cmpxchg.h | 12 ++++++------ >> 1 file changed, 6 insertions(+), 6 deletions(-) >> >> diff --git a/arch/arc/include/asm/cmpxchg.h b/arch/arc/include/asm/cmpxchg.h >> index e138fde067de..bf46514f6f12 100644 >> --- a/arch/arc/include/asm/cmpxchg.h >> +++ b/arch/arc/include/asm/cmpxchg.h >> @@ -18,14 +18,16 @@ >> * if (*ptr == @old) >> * *ptr = @new >> */ >> -#define __cmpxchg(ptr, old, new) \ >> +#define __cmpxchg_32(ptr, old, new) \ >> ({ \ >> __typeof__(*(ptr)) _prev; \ >> \ >> + BUILD_BUG_ON(sizeof(*(ptr)) != 4); \ >> + \ >> __asm__ __volatile__( \ >> - "1: llock %0, [%1] \n" \ >> + "1: llock %0, [%1] \n" \ >> " brne %0, %2, 2f \n" \ >> - " scond %3, [%1] \n" \ >> + " scond %3, [%1] \n" \ >> " bnz 1b \n" \ >> "2: \n" \ >> : "=&r"(_prev) /* Early clobber prevent reg reuse */ \ >> @@ -47,7 +49,7 @@ >> \ >> switch(sizeof((_p_))) { \ >> case 4: \ >> - _prev_ = __cmpxchg(_p_, _o_, _n_); \ >> + _prev_ = __cmpxchg_32(_p_, _o_, _n_); \ >> break; \ >> default: \ >> BUILD_BUG(); \ >> @@ -65,8 +67,6 @@ >> __typeof__(*(ptr)) _prev_; \ >> unsigned long __flags; \ >> \ >> - BUILD_BUG_ON(sizeof(_p_) != 4); \ >> - \ > > I think I made some comments here that have not been addressed or > replied. Sorry that I haven't seen your message. Could you resend ? I rechecked my mailbox and the mailing lists, but no luck. > Thanks, > Andi Regards, Wuqiang > >> /* \ >> * spin lock/unlock provide the needed smp_mb() before/after \ >> */ \ >> -- >> 2.40.1
diff --git a/arch/arc/include/asm/cmpxchg.h b/arch/arc/include/asm/cmpxchg.h index e138fde067de..bf46514f6f12 100644 --- a/arch/arc/include/asm/cmpxchg.h +++ b/arch/arc/include/asm/cmpxchg.h @@ -18,14 +18,16 @@ * if (*ptr == @old) * *ptr = @new */ -#define __cmpxchg(ptr, old, new) \ +#define __cmpxchg_32(ptr, old, new) \ ({ \ __typeof__(*(ptr)) _prev; \ \ + BUILD_BUG_ON(sizeof(*(ptr)) != 4); \ + \ __asm__ __volatile__( \ - "1: llock %0, [%1] \n" \ + "1: llock %0, [%1] \n" \ " brne %0, %2, 2f \n" \ - " scond %3, [%1] \n" \ + " scond %3, [%1] \n" \ " bnz 1b \n" \ "2: \n" \ : "=&r"(_prev) /* Early clobber prevent reg reuse */ \ @@ -47,7 +49,7 @@ \ switch(sizeof((_p_))) { \ case 4: \ - _prev_ = __cmpxchg(_p_, _o_, _n_); \ + _prev_ = __cmpxchg_32(_p_, _o_, _n_); \ break; \ default: \ BUILD_BUG(); \ @@ -65,8 +67,6 @@ __typeof__(*(ptr)) _prev_; \ unsigned long __flags; \ \ - BUILD_BUG_ON(sizeof(_p_) != 4); \ - \ /* \ * spin lock/unlock provide the needed smp_mb() before/after \ */ \