atomic.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206
  1. #ifndef _INTERNAL_ATOMIC_H
  2. #define _INTERNAL_ATOMIC_H
  3. #include <stdint.h>
  4. static inline int a_ctz_64(uint64_t x)
  5. {
  6. __asm__(
  7. " rbit %0, %1\n"
  8. " clz %0, %0\n"
  9. : "=r"(x) : "r"(x));
  10. return x;
  11. }
  12. static inline int a_ctz_l(unsigned long x)
  13. {
  14. return a_ctz_64(x);
  15. }
  16. static inline void a_barrier()
  17. {
  18. __asm__ __volatile__("dmb ish");
  19. }
  20. static inline void *a_cas_p(volatile void *p, void *t, void *s)
  21. {
  22. void *old;
  23. __asm__ __volatile__(
  24. " dmb ish\n"
  25. "1: ldxr %0,%3\n"
  26. " cmp %0,%1\n"
  27. " b.ne 1f\n"
  28. " stxr %w0,%2,%3\n"
  29. " cbnz %w0,1b\n"
  30. " mov %0,%1\n"
  31. "1: dmb ish\n"
  32. : "=&r"(old)
  33. : "r"(t), "r"(s), "Q"(*(long*)p)
  34. : "memory", "cc");
  35. return old;
  36. }
  37. static inline int a_cas(volatile int *p, int t, int s)
  38. {
  39. int old;
  40. __asm__ __volatile__(
  41. " dmb ish\n"
  42. "1: ldxr %w0,%3\n"
  43. " cmp %w0,%w1\n"
  44. " b.ne 1f\n"
  45. " stxr %w0,%w2,%3\n"
  46. " cbnz %w0,1b\n"
  47. " mov %w0,%w1\n"
  48. "1: dmb ish\n"
  49. : "=&r"(old)
  50. : "r"(t), "r"(s), "Q"(*p)
  51. : "memory", "cc");
  52. return old;
  53. }
  54. static inline int a_swap(volatile int *x, int v)
  55. {
  56. int old, tmp;
  57. __asm__ __volatile__(
  58. " dmb ish\n"
  59. "1: ldxr %w0,%3\n"
  60. " stxr %w1,%w2,%3\n"
  61. " cbnz %w1,1b\n"
  62. " dmb ish\n"
  63. : "=&r"(old), "=&r"(tmp)
  64. : "r"(v), "Q"(*x)
  65. : "memory", "cc" );
  66. return old;
  67. }
  68. static inline int a_fetch_add(volatile int *x, int v)
  69. {
  70. int old, tmp;
  71. __asm__ __volatile__(
  72. " dmb ish\n"
  73. "1: ldxr %w0,%3\n"
  74. " add %w0,%w0,%w2\n"
  75. " stxr %w1,%w0,%3\n"
  76. " cbnz %w1,1b\n"
  77. " dmb ish\n"
  78. : "=&r"(old), "=&r"(tmp)
  79. : "r"(v), "Q"(*x)
  80. : "memory", "cc" );
  81. return old-v;
  82. }
  83. static inline void a_inc(volatile int *x)
  84. {
  85. int tmp, tmp2;
  86. __asm__ __volatile__(
  87. " dmb ish\n"
  88. "1: ldxr %w0,%2\n"
  89. " add %w0,%w0,#1\n"
  90. " stxr %w1,%w0,%2\n"
  91. " cbnz %w1,1b\n"
  92. " dmb ish\n"
  93. : "=&r"(tmp), "=&r"(tmp2)
  94. : "Q"(*x)
  95. : "memory", "cc" );
  96. }
  97. static inline void a_dec(volatile int *x)
  98. {
  99. int tmp, tmp2;
  100. __asm__ __volatile__(
  101. " dmb ish\n"
  102. "1: ldxr %w0,%2\n"
  103. " sub %w0,%w0,#1\n"
  104. " stxr %w1,%w0,%2\n"
  105. " cbnz %w1,1b\n"
  106. " dmb ish\n"
  107. : "=&r"(tmp), "=&r"(tmp2)
  108. : "Q"(*x)
  109. : "memory", "cc" );
  110. }
  111. static inline void a_and_64(volatile uint64_t *p, uint64_t v)
  112. {
  113. int tmp, tmp2;
  114. __asm__ __volatile__(
  115. " dmb ish\n"
  116. "1: ldxr %0,%3\n"
  117. " and %0,%0,%2\n"
  118. " stxr %w1,%0,%3\n"
  119. " cbnz %w1,1b\n"
  120. " dmb ish\n"
  121. : "=&r"(tmp), "=&r"(tmp2)
  122. : "r"(v), "Q"(*p)
  123. : "memory", "cc" );
  124. }
  125. static inline void a_and(volatile int *p, int v)
  126. {
  127. int tmp, tmp2;
  128. __asm__ __volatile__(
  129. " dmb ish\n"
  130. "1: ldxr %w0,%3\n"
  131. " and %w0,%w0,%w2\n"
  132. " stxr %w1,%w0,%3\n"
  133. " cbnz %w1,1b\n"
  134. " dmb ish\n"
  135. : "=&r"(tmp), "=&r"(tmp2)
  136. : "r"(v), "Q"(*p)
  137. : "memory", "cc" );
  138. }
  139. static inline void a_or_64(volatile uint64_t *p, uint64_t v)
  140. {
  141. int tmp, tmp2;
  142. __asm__ __volatile__(
  143. " dmb ish\n"
  144. "1: ldxr %0,%3\n"
  145. " orr %0,%0,%2\n"
  146. " stxr %w1,%0,%3\n"
  147. " cbnz %w1,1b\n"
  148. " dmb ish\n"
  149. : "=&r"(tmp), "=&r"(tmp2)
  150. : "r"(v), "Q"(*p)
  151. : "memory", "cc" );
  152. }
  153. static inline void a_or_l(volatile void *p, long v)
  154. {
  155. return a_or_64(p, v);
  156. }
  157. static inline void a_or(volatile int *p, int v)
  158. {
  159. int tmp, tmp2;
  160. __asm__ __volatile__(
  161. " dmb ish\n"
  162. "1: ldxr %w0,%3\n"
  163. " orr %w0,%w0,%w2\n"
  164. " stxr %w1,%w0,%3\n"
  165. " cbnz %w1,1b\n"
  166. " dmb ish\n"
  167. : "=&r"(tmp), "=&r"(tmp2)
  168. : "r"(v), "Q"(*p)
  169. : "memory", "cc" );
  170. }
  171. static inline void a_store(volatile int *p, int x)
  172. {
  173. __asm__ __volatile__(
  174. " dmb ish\n"
  175. " str %w1,%0\n"
  176. " dmb ish\n"
  177. : "=m"(*p)
  178. : "r"(x)
  179. : "memory", "cc" );
  180. }
  181. #define a_spin a_barrier
  182. static inline void a_crash()
  183. {
  184. *(volatile char *)0=0;
  185. }
  186. #endif