|
@@ -1,168 +1,64 @@
|
|
|
-#if __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
|
|
|
+__attribute__((__visibility__("hidden")))
|
|
|
+extern const void *__arm_atomics[3]; /* gettp, cas, barrier */
|
|
|
|
|
|
-#define a_barrier a_barrier
|
|
|
-static inline void a_barrier()
|
|
|
-{
|
|
|
- __asm__ __volatile__("dmb ish");
|
|
|
-}
|
|
|
+#if ((__ARM_ARCH_6__ || __ARM_ARCH_6K__ || __ARM_ARCH_6ZK__) && !__thumb__) \
|
|
|
+ || __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
|
|
|
|
|
|
-#define a_cas a_cas
|
|
|
-static inline int a_cas(volatile int *p, int t, int s)
|
|
|
+#define a_ll a_ll
|
|
|
+static inline int a_ll(volatile int *p)
|
|
|
{
|
|
|
- int old;
|
|
|
- __asm__ __volatile__(
|
|
|
- " dmb ish\n"
|
|
|
- "1: ldrex %0,%3\n"
|
|
|
- " cmp %0,%1\n"
|
|
|
- " bne 1f\n"
|
|
|
- " strex %0,%2,%3\n"
|
|
|
- " cmp %0, #0\n"
|
|
|
- " bne 1b\n"
|
|
|
- " mov %0, %1\n"
|
|
|
- "1: dmb ish\n"
|
|
|
- : "=&r"(old)
|
|
|
- : "r"(t), "r"(s), "Q"(*p)
|
|
|
- : "memory", "cc" );
|
|
|
- return old;
|
|
|
+ int v;
|
|
|
+ __asm__ __volatile__ ("ldrex %0, %1" : "=r"(v) : "Q"(*p));
|
|
|
+ return v;
|
|
|
}
|
|
|
|
|
|
-#define a_swap a_swap
|
|
|
-static inline int a_swap(volatile int *x, int v)
|
|
|
+#define a_sc a_sc
|
|
|
+static inline int a_sc(volatile int *p, int v)
|
|
|
{
|
|
|
- int old, tmp;
|
|
|
- __asm__ __volatile__(
|
|
|
- " dmb ish\n"
|
|
|
- "1: ldrex %0,%3\n"
|
|
|
- " strex %1,%2,%3\n"
|
|
|
- " cmp %1, #0\n"
|
|
|
- " bne 1b\n"
|
|
|
- " dmb ish\n"
|
|
|
- : "=&r"(old), "=&r"(tmp)
|
|
|
- : "r"(v), "Q"(*x)
|
|
|
- : "memory", "cc" );
|
|
|
- return old;
|
|
|
+ int r;
|
|
|
+ __asm__ __volatile__ ("strex %0,%1,%2" : "=&r"(r) : "r"(v), "Q"(*p) : "memory");
|
|
|
+ return !r;
|
|
|
}
|
|
|
|
|
|
-#define a_fetch_add a_fetch_add
|
|
|
-static inline int a_fetch_add(volatile int *x, int v)
|
|
|
-{
|
|
|
- int old, tmp;
|
|
|
- __asm__ __volatile__(
|
|
|
- " dmb ish\n"
|
|
|
- "1: ldrex %0,%3\n"
|
|
|
- " add %0,%0,%2\n"
|
|
|
- " strex %1,%0,%3\n"
|
|
|
- " cmp %1, #0\n"
|
|
|
- " bne 1b\n"
|
|
|
- " dmb ish\n"
|
|
|
- : "=&r"(old), "=&r"(tmp)
|
|
|
- : "r"(v), "Q"(*x)
|
|
|
- : "memory", "cc" );
|
|
|
- return old-v;
|
|
|
-}
|
|
|
+#if __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
|
|
|
|
|
|
-#define a_inc a_inc
|
|
|
-static inline void a_inc(volatile int *x)
|
|
|
+#define a_barrier a_barrier
|
|
|
+static inline void a_barrier()
|
|
|
{
|
|
|
- int tmp, tmp2;
|
|
|
- __asm__ __volatile__(
|
|
|
- " dmb ish\n"
|
|
|
- "1: ldrex %0,%2\n"
|
|
|
- " add %0,%0,#1\n"
|
|
|
- " strex %1,%0,%2\n"
|
|
|
- " cmp %1, #0\n"
|
|
|
- " bne 1b\n"
|
|
|
- " dmb ish\n"
|
|
|
- : "=&r"(tmp), "=&r"(tmp2)
|
|
|
- : "Q"(*x)
|
|
|
- : "memory", "cc" );
|
|
|
+ __asm__ __volatile__ ("dmb ish" : : : "memory");
|
|
|
}
|
|
|
|
|
|
-#define a_dec a_dec
|
|
|
-static inline void a_dec(volatile int *x)
|
|
|
-{
|
|
|
- int tmp, tmp2;
|
|
|
- __asm__ __volatile__(
|
|
|
- " dmb ish\n"
|
|
|
- "1: ldrex %0,%2\n"
|
|
|
- " sub %0,%0,#1\n"
|
|
|
- " strex %1,%0,%2\n"
|
|
|
- " cmp %1, #0\n"
|
|
|
- " bne 1b\n"
|
|
|
- " dmb ish\n"
|
|
|
- : "=&r"(tmp), "=&r"(tmp2)
|
|
|
- : "Q"(*x)
|
|
|
- : "memory", "cc" );
|
|
|
-}
|
|
|
+#endif
|
|
|
|
|
|
-#define a_and a_and
|
|
|
-static inline void a_and(volatile int *x, int v)
|
|
|
-{
|
|
|
- int tmp, tmp2;
|
|
|
- __asm__ __volatile__(
|
|
|
- " dmb ish\n"
|
|
|
- "1: ldrex %0,%3\n"
|
|
|
- " and %0,%0,%2\n"
|
|
|
- " strex %1,%0,%3\n"
|
|
|
- " cmp %1, #0\n"
|
|
|
- " bne 1b\n"
|
|
|
- " dmb ish\n"
|
|
|
- : "=&r"(tmp), "=&r"(tmp2)
|
|
|
- : "r"(v), "Q"(*x)
|
|
|
- : "memory", "cc" );
|
|
|
-}
|
|
|
+#define a_pre_llsc a_barrier
|
|
|
+#define a_post_llsc a_barrier
|
|
|
|
|
|
-#define a_or a_or
|
|
|
-static inline void a_or(volatile int *x, int v)
|
|
|
-{
|
|
|
- int tmp, tmp2;
|
|
|
- __asm__ __volatile__(
|
|
|
- " dmb ish\n"
|
|
|
- "1: ldrex %0,%3\n"
|
|
|
- " orr %0,%0,%2\n"
|
|
|
- " strex %1,%0,%3\n"
|
|
|
- " cmp %1, #0\n"
|
|
|
- " bne 1b\n"
|
|
|
- " dmb ish\n"
|
|
|
- : "=&r"(tmp), "=&r"(tmp2)
|
|
|
- : "r"(v), "Q"(*x)
|
|
|
- : "memory", "cc" );
|
|
|
-}
|
|
|
+#else
|
|
|
|
|
|
-#define a_store a_store
|
|
|
-static inline void a_store(volatile int *p, int x)
|
|
|
+#define a_cas a_cas
|
|
|
+static inline int a_cas(volatile int *p, int t, int s)
|
|
|
{
|
|
|
- __asm__ __volatile__(
|
|
|
- " dmb ish\n"
|
|
|
- " str %1,%0\n"
|
|
|
- " dmb ish\n"
|
|
|
- : "=m"(*p)
|
|
|
- : "r"(x)
|
|
|
- : "memory", "cc" );
|
|
|
+ for (;;) {
|
|
|
+ register int r0 __asm__("r0") = t;
|
|
|
+ register int r1 __asm__("r1") = s;
|
|
|
+ register volatile int *r2 __asm__("r2") = p;
|
|
|
+ int old;
|
|
|
+ __asm__ __volatile__ (
|
|
|
+ "bl __a_cas"
|
|
|
+ : "+r"(r0) : "r"(r1), "r"(r2)
|
|
|
+ : "memory", "r3", "lr", "ip", "cc" );
|
|
|
+ if (!r0) return t;
|
|
|
+ if ((old=*p)!=t) return old;
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
-#else
|
|
|
-
|
|
|
-int __a_cas(int, int, volatile int *) __attribute__((__visibility__("hidden")));
|
|
|
-#define __k_cas __a_cas
|
|
|
+#endif
|
|
|
|
|
|
+#ifndef a_barrier
|
|
|
#define a_barrier a_barrier
|
|
|
static inline void a_barrier()
|
|
|
{
|
|
|
__asm__ __volatile__("bl __a_barrier"
|
|
|
: : : "memory", "cc", "ip", "lr" );
|
|
|
}
|
|
|
-
|
|
|
-#define a_cas a_cas
|
|
|
-static inline int a_cas(volatile int *p, int t, int s)
|
|
|
-{
|
|
|
- int old;
|
|
|
- for (;;) {
|
|
|
- if (!__k_cas(t, s, p))
|
|
|
- return t;
|
|
|
- if ((old=*p) != t)
|
|
|
- return old;
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
#endif
|