diff mbox series

[2/5] s390/cmpxchg: make variables local to each case label

Message ID Y2J7+HqgAZwnfxsh@osiris (mailing list archive)
State Accepted
Commit ce968f654570dbd9cac7de694681640061559d3b
Headers show
Series [1/5] s390/cmpxchg: use symbolic names for inline assembly operands | expand

Commit Message

Heiko Carstens Nov. 2, 2022, 2:17 p.m. UTC
Make variables local to each case label. This limits the scope of
variables and allows to use proper types everywhere.

Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
---
 arch/s390/include/asm/cmpxchg.h | 23 ++++++++++++++++-------
 1 file changed, 16 insertions(+), 7 deletions(-)
diff mbox series

Patch

diff --git a/arch/s390/include/asm/cmpxchg.h b/arch/s390/include/asm/cmpxchg.h
index 56fb8aa08945..2ad057b94481 100644
--- a/arch/s390/include/asm/cmpxchg.h
+++ b/arch/s390/include/asm/cmpxchg.h
@@ -88,11 +88,10 @@  static __always_inline unsigned long __cmpxchg(unsigned long address,
 					       unsigned long old,
 					       unsigned long new, int size)
 {
-	unsigned long prev, tmp;
-	int shift;
-
 	switch (size) {
-	case 1:
+	case 1: {
+		unsigned int prev, tmp, shift;
+
 		shift = (3 ^ (address & 3)) << 3;
 		address ^= address & 3;
 		asm volatile(
@@ -115,7 +114,10 @@  static __always_inline unsigned long __cmpxchg(unsigned long address,
 			  [mask] "d" (~(0xff << shift))
 			: "memory", "cc");
 		return prev >> shift;
-	case 2:
+	}
+	case 2: {
+		unsigned int prev, tmp, shift;
+
 		shift = (2 ^ (address & 2)) << 3;
 		address ^= address & 2;
 		asm volatile(
@@ -138,7 +140,10 @@  static __always_inline unsigned long __cmpxchg(unsigned long address,
 			  [mask] "d" (~(0xffff << shift))
 			: "memory", "cc");
 		return prev >> shift;
-	case 4:
+	}
+	case 4: {
+		unsigned int prev;
+
 		asm volatile(
 			"	cs	%[prev],%[new],%[address]\n"
 			: [prev] "=&d" (prev),
@@ -147,7 +152,10 @@  static __always_inline unsigned long __cmpxchg(unsigned long address,
 			  [new] "d" (new)
 			: "memory", "cc");
 		return prev;
-	case 8:
+	}
+	case 8: {
+		unsigned long prev;
+
 		asm volatile(
 			"	csg	%[prev],%[new],%[address]\n"
 			: [prev] "=&d" (prev),
@@ -157,6 +165,7 @@  static __always_inline unsigned long __cmpxchg(unsigned long address,
 			: "memory", "cc");
 		return prev;
 	}
+	}
 	__cmpxchg_called_with_bad_pointer();
 	return old;
 }