1
0

atomic.h 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149
  1. #ifndef _INTERNAL_ATOMIC_H
  2. #define _INTERNAL_ATOMIC_H
  3. #include <stdint.h>
  4. static inline int a_ctz_l(unsigned long x)
  5. {
  6. static const char debruijn32[32] = {
  7. 0, 1, 23, 2, 29, 24, 19, 3, 30, 27, 25, 11, 20, 8, 4, 13,
  8. 31, 22, 28, 18, 26, 10, 7, 12, 21, 17, 9, 6, 16, 5, 15, 14
  9. };
  10. return debruijn32[(x&-x)*0x076be629 >> 27];
  11. }
  12. static inline int a_ctz_64(uint64_t x)
  13. {
  14. uint32_t y = x;
  15. if (!y) {
  16. y = x>>32;
  17. return 32 + a_ctz_l(y);
  18. }
  19. return a_ctz_l(y);
  20. }
  21. #if ((__ARM_ARCH_6__ || __ARM_ARCH_6K__ || __ARM_ARCH_6ZK__) && !__thumb__) \
  22. || __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
  23. #if __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
  24. #define MEM_BARRIER "dmb ish"
  25. #else
  26. #define MEM_BARRIER "mcr p15,0,r0,c7,c10,5"
  27. #endif
  28. static inline int __k_cas(int t, int s, volatile int *p)
  29. {
  30. int ret;
  31. __asm__(
  32. " " MEM_BARRIER "\n"
  33. "1: ldrex %0,%3\n"
  34. " subs %0,%0,%1\n"
  35. #ifdef __thumb__
  36. " itt eq\n"
  37. #endif
  38. " strexeq %0,%2,%3\n"
  39. " teqeq %0,#1\n"
  40. " beq 1b\n"
  41. " " MEM_BARRIER "\n"
  42. : "=&r"(ret)
  43. : "r"(t), "r"(s), "Q"(*p)
  44. : "memory", "cc" );
  45. return ret;
  46. }
  47. #else
  48. #define __k_cas ((int (*)(int, int, volatile int *))0xffff0fc0)
  49. #endif
  50. static inline int a_cas(volatile int *p, int t, int s)
  51. {
  52. int old;
  53. for (;;) {
  54. if (!__k_cas(t, s, p))
  55. return t;
  56. if ((old=*p) != t)
  57. return old;
  58. }
  59. }
  60. static inline void *a_cas_p(volatile void *p, void *t, void *s)
  61. {
  62. return (void *)a_cas(p, (int)t, (int)s);
  63. }
  64. static inline int a_swap(volatile int *x, int v)
  65. {
  66. int old;
  67. do old = *x;
  68. while (__k_cas(old, v, x));
  69. return old;
  70. }
  71. static inline int a_fetch_add(volatile int *x, int v)
  72. {
  73. int old;
  74. do old = *x;
  75. while (__k_cas(old, old+v, x));
  76. return old;
  77. }
  78. static inline void a_inc(volatile int *x)
  79. {
  80. a_fetch_add(x, 1);
  81. }
  82. static inline void a_dec(volatile int *x)
  83. {
  84. a_fetch_add(x, -1);
  85. }
  86. static inline void a_store(volatile int *p, int x)
  87. {
  88. while (__k_cas(*p, x, p));
  89. }
  90. #define a_spin a_barrier
  91. static inline void a_barrier()
  92. {
  93. __k_cas(0, 0, &(int){0});
  94. }
  95. static inline void a_crash()
  96. {
  97. *(volatile char *)0=0;
  98. }
  99. static inline void a_and(volatile int *p, int v)
  100. {
  101. int old;
  102. do old = *p;
  103. while (__k_cas(old, old&v, p));
  104. }
  105. static inline void a_or(volatile int *p, int v)
  106. {
  107. int old;
  108. do old = *p;
  109. while (__k_cas(old, old|v, p));
  110. }
  111. static inline void a_or_l(volatile void *p, long v)
  112. {
  113. a_or(p, v);
  114. }
  115. static inline void a_and_64(volatile uint64_t *p, uint64_t v)
  116. {
  117. union { uint64_t v; uint32_t r[2]; } u = { v };
  118. a_and((int *)p, u.r[0]);
  119. a_and((int *)p+1, u.r[1]);
  120. }
  121. static inline void a_or_64(volatile uint64_t *p, uint64_t v)
  122. {
  123. union { uint64_t v; uint32_t r[2]; } u = { v };
  124. a_or((int *)p, u.r[0]);
  125. a_or((int *)p+1, u.r[1]);
  126. }
  127. #endif