===================================================================
@@ -75,6 +75,31 @@
return lws_errno;
}
+static inline long
+__kernel_cmpxchg2 (void * oldval, void * newval, void *mem, int val_size)
+{
+
+ register unsigned long lws_mem asm("r26") = (unsigned long) (mem);
+ register long lws_ret asm("r28");
+ register long lws_errno asm("r21");
+ register unsigned long lws_old asm("r25") = (unsigned long) oldval;
+ register unsigned long lws_new asm("r24") = (unsigned long) newval;
+ register int lws_size asm("r23") = val_size;
+ asm volatile ( "ble 0xb0(%%sr2, %%r0) \n\t"
+ "ldi %2, %%r20 \n\t"
+ : "=r" (lws_ret), "=r" (lws_errno)
+ : "i" (2), "r" (lws_mem), "r" (lws_old), "r" (lws_new), "r" (lws_size)
+ : "r1", "r20", "r22", "r29", "r31", "fr4", "memory"
+ );
+ if (__builtin_expect (lws_errno == -EFAULT || lws_errno == -ENOSYS, 0))
+ ABORT_INSTRUCTION;
+
+ /* If the kernel LWS call fails, retrun EBUSY */
+ if (!lws_errno && lws_ret)
+ lws_errno = -EBUSY;
+
+ return lws_errno;
+}
#define HIDDEN __attribute__ ((visibility ("hidden")))
/* Big endian masks */
@@ -84,6 +109,29 @@
#define MASK_1 0xffu
#define MASK_2 0xffffu
+#define FETCH_AND_OP_DWORD(OP, PFX_OP, INF_OP) \
+ long long HIDDEN \
+ __sync_fetch_and_##OP##_8 (long long *ptr, long long val) \
+ { \
+ long long tmp, newval; \
+ int failure; \
+ \
+ do { \
+ tmp = *ptr; \
+ newval = PFX_OP (tmp INF_OP val); \
+ failure = __kernel_cmpxchg2 (&tmp, &newval, ptr, 3); \
+ } while (failure != 0); \
+ \
+ return tmp; \
+ }
+
+FETCH_AND_OP_DWORD (add, , +)
+FETCH_AND_OP_DWORD (sub, , -)
+FETCH_AND_OP_DWORD (or, , |)
+FETCH_AND_OP_DWORD (and, , &)
+FETCH_AND_OP_DWORD (xor, , ^)
+FETCH_AND_OP_DWORD (nand, ~, &)
+
#define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \
int HIDDEN \
__sync_fetch_and_##OP##_4 (int *ptr, int val) \
@@ -147,6 +195,29 @@
SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, oldval)
SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, oldval)
+#define OP_AND_FETCH_DWORD(OP, PFX_OP, INF_OP) \
+ long long HIDDEN \
+ __sync_##OP##_and_fetch_8 (long long *ptr, long long val) \
+ { \
+ long long tmp, newval; \
+ int failure; \
+ \
+ do { \
+ tmp = *ptr; \
+ newval = PFX_OP (tmp INF_OP val); \
+ failure = __kernel_cmpxchg2 (&tmp, &newval, ptr, 3); \
+ } while (failure != 0); \
+ \
+ return PFX_OP (tmp INF_OP val); \
+ }
+
+OP_AND_FETCH_DWORD (add, , +)
+OP_AND_FETCH_DWORD (sub, , -)
+OP_AND_FETCH_DWORD (or, , |)
+OP_AND_FETCH_DWORD (and, , &)
+OP_AND_FETCH_DWORD (xor, , ^)
+OP_AND_FETCH_DWORD (nand, ~, &)
+
#define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \
int HIDDEN \
__sync_##OP##_and_fetch_4 (int *ptr, int val) \
@@ -182,6 +253,26 @@
SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, newval)
SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, newval)
+long long HIDDEN
+__sync_val_compare_and_swap_8 (long long *ptr, long long oldval, long long newval)
+{
+ long long actual_oldval;
+ int fail;
+
+ while (1)
+ {
+ actual_oldval = *ptr;
+
+ if (__builtin_expect (oldval != actual_oldval, 0))
+ return actual_oldval;
+
+ fail = __kernel_cmpxchg2 (&actual_oldval, &newval, ptr, 3);
+
+ if (__builtin_expect (!fail, 1))
+ return actual_oldval;
+ }
+}
+
int HIDDEN
__sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
{
@@ -256,6 +347,20 @@
SUBWORD_BOOL_CAS (unsigned short, 2)
SUBWORD_BOOL_CAS (unsigned char, 1)
+long long HIDDEN
+__sync_lock_test_and_set_8 (long long *ptr, long long val)
+{
+ long long oldval;
+ int failure;
+
+ do {
+ oldval = *ptr;
+ failure = __kernel_cmpxchg2 (&oldval, &val, ptr, 3);
+ } while (failure != 0);
+
+ return oldval;
+}
+
int HIDDEN
__sync_lock_test_and_set_4 (int *ptr, int val)
{
@@ -294,6 +399,17 @@
SUBWORD_TEST_AND_SET (unsigned char, 1)
void HIDDEN
+__sync_lock_release_8 (int *ptr)
+{
+ long long failure, oldval, zero = 0;
+
+ do {
+ oldval = *ptr;
+ failure = __kernel_cmpxchg2 (&oldval, &zero, ptr, 3);
+ } while (failure != 0);
+}
+
+void HIDDEN
__sync_lock_release_4 (int *ptr)
{
int failure, oldval;