atomic_arch.h 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576
  1. __attribute__((__visibility__("hidden")))
  2. extern const void *__arm_atomics[3]; /* gettp, cas, barrier */
  3. #if ((__ARM_ARCH_6__ || __ARM_ARCH_6K__ || __ARM_ARCH_6ZK__) && !__thumb__) \
  4. || __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
  5. #define a_ll a_ll
  6. static inline int a_ll(volatile int *p)
  7. {
  8. int v;
  9. __asm__ __volatile__ ("ldrex %0, %1" : "=r"(v) : "Q"(*p));
  10. return v;
  11. }
  12. #define a_sc a_sc
  13. static inline int a_sc(volatile int *p, int v)
  14. {
  15. int r;
  16. __asm__ __volatile__ ("strex %0,%2,%1" : "=&r"(r), "=Q"(*p) : "r"(v) : "memory");
  17. return !r;
  18. }
  19. #if __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
  20. #define a_barrier a_barrier
  21. static inline void a_barrier()
  22. {
  23. __asm__ __volatile__ ("dmb ish" : : : "memory");
  24. }
  25. #endif
  26. #define a_pre_llsc a_barrier
  27. #define a_post_llsc a_barrier
  28. #else
  29. #define a_cas a_cas
  30. static inline int a_cas(volatile int *p, int t, int s)
  31. {
  32. for (;;) {
  33. register int r0 __asm__("r0") = t;
  34. register int r1 __asm__("r1") = s;
  35. register volatile int *r2 __asm__("r2") = p;
  36. int old;
  37. __asm__ __volatile__ (
  38. "bl __a_cas"
  39. : "+r"(r0) : "r"(r1), "r"(r2)
  40. : "memory", "r3", "lr", "ip", "cc" );
  41. if (!r0) return t;
  42. if ((old=*p)!=t) return old;
  43. }
  44. }
  45. #endif
  46. #ifndef a_barrier
  47. #define a_barrier a_barrier
  48. static inline void a_barrier()
  49. {
  50. __asm__ __volatile__("bl __a_barrier"
  51. : : : "memory", "cc", "ip", "lr" );
  52. }
  53. #endif
  54. #define a_crash a_crash
  55. static inline void a_crash()
  56. {
  57. __asm__ __volatile__(
  58. #ifndef __thumb__
  59. ".word 0xe7f000f0"
  60. #else
  61. ".short 0xdeff"
  62. #endif
  63. : : : "memory");
  64. }