@@ -1,8 +1,6 @@
#ifndef _ARM_ARM32_BITOPS_H
#define _ARM_ARM32_BITOPS_H
-#define flsl fls
-
/*
* Little endian assembly bitops. nr = 0 -> byte 0 bit 0.
*/
@@ -1,18 +1,6 @@
#ifndef _ARM_ARM64_BITOPS_H
#define _ARM_ARM64_BITOPS_H
-static inline int flsl(unsigned long x)
-{
- uint64_t ret;
-
- if (__builtin_constant_p(x))
- return generic_flsl(x);
-
- asm("clz\t%0, %1" : "=r" (ret) : "r" (x));
-
- return BITS_PER_LONG - ret;
-}
-
/* Based on linux/include/asm-generic/bitops/find.h */
#ifndef CONFIG_GENERIC_FIND_FIRST_BIT
@@ -140,25 +140,10 @@ static inline int test_bit(int nr, const volatile void *addr)
return 1UL & (p[BITOP_WORD(nr)] >> (nr & (BITOP_BITS_PER_WORD-1)));
}
-/*
- * On ARMv5 and above those functions can be implemented around
- * the clz instruction for much better code efficiency.
- */
-
-static inline int fls(unsigned int x)
-{
- int ret;
-
- if (__builtin_constant_p(x))
- return generic_flsl(x);
-
- asm("clz\t%"__OP32"0, %"__OP32"1" : "=r" (ret) : "r" (x));
- return 32 - ret;
-}
-
-
#define arch_ffs(x) ((x) ? 1 + __builtin_ctz(x) : 0)
#define arch_ffsl(x) ((x) ? 1 + __builtin_ctzl(x) : 0)
+#define arch_fls(x) ((x) ? 32 - __builtin_clz(x) : 0)
+#define arch_flsl(x) ((x) ? BITS_PER_LONG - __builtin_clzl(x) : 0)
/**
* hweightN - returns the hamming weight of a N-bit word
@@ -171,10 +171,10 @@ static inline int __test_and_clear_bit(int nr, volatile void *addr)
return (old & mask) != 0;
}
-#define flsl(x) generic_flsl(x)
-#define fls(x) generic_flsl(x)
#define arch_ffs(x) ((x) ? 1 + __builtin_ctz(x) : 0)
#define arch_ffsl(x) ((x) ? 1 + __builtin_ctzl(x) : 0)
+#define arch_fls(x) ((x) ? 32 - __builtin_clz(x) : 0)
+#define arch_flsl(x) ((x) ? BITS_PER_LONG - __builtin_clzl(x) : 0)
/**
* hweightN - returns the hamming weight of a N-bit word
@@ -447,33 +447,41 @@ static always_inline unsigned int arch_ffsl(unsigned long x)
}
#define arch_ffsl arch_ffsl
-/**
- * fls - find last bit set
- * @x: the word to search
- *
- * This is defined the same way as ffs.
- */
-static inline int flsl(unsigned long x)
+static always_inline unsigned int arch_fls(unsigned int x)
{
- long r;
+ unsigned int r;
+
+ /* See arch_ffs() for safety discussions. */
+ if ( __builtin_constant_p(x > 0) && x > 0 )
+ asm ( "bsr %[val], %[res]"
+ : [res] "=r" (r)
+ : [val] "rm" (x) );
+ else
+ asm ( "bsr %[val], %[res]"
+ : [res] "=r" (r)
+ : [val] "rm" (x), "[res]" (-1) );
- asm ( "bsr %1,%0\n\t"
- "jnz 1f\n\t"
- "mov $-1,%0\n"
- "1:" : "=r" (r) : "rm" (x));
- return (int)r+1;
+ return r + 1;
}
+#define arch_fls arch_fls
-static inline int fls(unsigned int x)
+static always_inline unsigned int arch_flsl(unsigned long x)
{
- int r;
+ unsigned int r;
+
+ /* See arch_ffs() for safety discussions. */
+ if ( __builtin_constant_p(x > 0) && x > 0 )
+ asm ( "bsr %[val], %q[res]"
+ : [res] "=r" (r)
+ : [val] "rm" (x) );
+ else
+ asm ( "bsr %[val], %q[res]"
+ : [res] "=r" (r)
+ : [val] "rm" (x), "[res]" (-1) );
- asm ( "bsr %1,%0\n\t"
- "jnz 1f\n\t"
- "mov $-1,%0\n"
- "1:" : "=r" (r) : "rm" (x));
return r + 1;
}
+#define arch_flsl arch_flsl
/**
* hweightN - returns the hamming weight of a N-bit word
@@ -26,7 +26,32 @@ static void __init test_ffs(void)
#endif
}
+static void __init test_fls(void)
+{
+ /* unsigned int fls(unsigned int) */
+ CHECK(fls, 0, 0);
+ CHECK(fls, 1, 1);
+ CHECK(fls, 3, 2);
+ CHECK(fls, 7, 3);
+ CHECK(fls, 6, 3);
+ CHECK(fls, 0x80000000U, 32);
+
+ /* unsigned int flsl(unsigned long) */
+ CHECK(flsl, 0, 0);
+ CHECK(flsl, 1, 1);
+ CHECK(flsl, 3, 2);
+ CHECK(flsl, 7, 3);
+ CHECK(flsl, 6, 3);
+
+ CHECK(flsl, 1 | (1UL << (BITS_PER_LONG - 1)), BITS_PER_LONG);
+#if BITS_PER_LONG > 32
+ CHECK(flsl, 1 | (1UL << 32), 33);
+ CHECK(flsl, 1 | (1UL << 63), 64);
+#endif
+}
+
static void __init __constructor test_bitops(void)
{
test_ffs();
+ test_fls();
}
@@ -60,6 +60,30 @@ static always_inline __pure unsigned int ffsl(unsigned long x)
#endif
}
+static always_inline __pure unsigned int fls(unsigned int x)
+{
+ if ( __builtin_constant_p(x) )
+ return x ? 32 - __builtin_clz(x) : 0;
+
+#ifdef arch_fls
+ return arch_fls(x);
+#else
+ return generic_flsl(x);
+#endif
+}
+
+static always_inline __pure unsigned int flsl(unsigned long x)
+{
+ if ( __builtin_constant_p(x) )
+ return x ? BITS_PER_LONG - __builtin_clzl(x) : 0;
+
+#ifdef arch_fls
+ return arch_flsl(x);
+#else
+ return generic_flsl(x);
+#endif
+}
+
/* --------------------- Please tidy below here --------------------- */
#ifndef find_next_bit