atomic_arch.h 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. #define a_ctz_64 a_ctz_64
  2. static inline int a_ctz_64(uint64_t x)
  3. {
  4. __asm__(
  5. " rbit %0, %1\n"
  6. " clz %0, %0\n"
  7. : "=r"(x) : "r"(x));
  8. return x;
  9. }
  10. #define a_barrier a_barrier
  11. static inline void a_barrier()
  12. {
  13. __asm__ __volatile__("dmb ish");
  14. }
  15. #define a_cas_p a_cas_p
  16. static inline void *a_cas_p(volatile void *p, void *t, void *s)
  17. {
  18. void *old;
  19. __asm__ __volatile__(
  20. " dmb ish\n"
  21. "1: ldxr %0,%3\n"
  22. " cmp %0,%1\n"
  23. " b.ne 1f\n"
  24. " stxr %w0,%2,%3\n"
  25. " cbnz %w0,1b\n"
  26. " mov %0,%1\n"
  27. "1: dmb ish\n"
  28. : "=&r"(old)
  29. : "r"(t), "r"(s), "Q"(*(long*)p)
  30. : "memory", "cc");
  31. return old;
  32. }
  33. #define a_cas a_cas
  34. static inline int a_cas(volatile int *p, int t, int s)
  35. {
  36. int old;
  37. __asm__ __volatile__(
  38. " dmb ish\n"
  39. "1: ldxr %w0,%3\n"
  40. " cmp %w0,%w1\n"
  41. " b.ne 1f\n"
  42. " stxr %w0,%w2,%3\n"
  43. " cbnz %w0,1b\n"
  44. " mov %w0,%w1\n"
  45. "1: dmb ish\n"
  46. : "=&r"(old)
  47. : "r"(t), "r"(s), "Q"(*p)
  48. : "memory", "cc");
  49. return old;
  50. }
  51. #define a_swap a_swap
  52. static inline int a_swap(volatile int *x, int v)
  53. {
  54. int old, tmp;
  55. __asm__ __volatile__(
  56. " dmb ish\n"
  57. "1: ldxr %w0,%3\n"
  58. " stxr %w1,%w2,%3\n"
  59. " cbnz %w1,1b\n"
  60. " dmb ish\n"
  61. : "=&r"(old), "=&r"(tmp)
  62. : "r"(v), "Q"(*x)
  63. : "memory", "cc" );
  64. return old;
  65. }
  66. #define a_fetch_add a_fetch_add
  67. static inline int a_fetch_add(volatile int *x, int v)
  68. {
  69. int old, tmp;
  70. __asm__ __volatile__(
  71. " dmb ish\n"
  72. "1: ldxr %w0,%3\n"
  73. " add %w0,%w0,%w2\n"
  74. " stxr %w1,%w0,%3\n"
  75. " cbnz %w1,1b\n"
  76. " dmb ish\n"
  77. : "=&r"(old), "=&r"(tmp)
  78. : "r"(v), "Q"(*x)
  79. : "memory", "cc" );
  80. return old-v;
  81. }
  82. #define a_inc a_inc
  83. static inline void a_inc(volatile int *x)
  84. {
  85. int tmp, tmp2;
  86. __asm__ __volatile__(
  87. " dmb ish\n"
  88. "1: ldxr %w0,%2\n"
  89. " add %w0,%w0,#1\n"
  90. " stxr %w1,%w0,%2\n"
  91. " cbnz %w1,1b\n"
  92. " dmb ish\n"
  93. : "=&r"(tmp), "=&r"(tmp2)
  94. : "Q"(*x)
  95. : "memory", "cc" );
  96. }
  97. #define a_dec a_dec
  98. static inline void a_dec(volatile int *x)
  99. {
  100. int tmp, tmp2;
  101. __asm__ __volatile__(
  102. " dmb ish\n"
  103. "1: ldxr %w0,%2\n"
  104. " sub %w0,%w0,#1\n"
  105. " stxr %w1,%w0,%2\n"
  106. " cbnz %w1,1b\n"
  107. " dmb ish\n"
  108. : "=&r"(tmp), "=&r"(tmp2)
  109. : "Q"(*x)
  110. : "memory", "cc" );
  111. }
  112. #define a_and_64 a_and_64
  113. static inline void a_and_64(volatile uint64_t *p, uint64_t v)
  114. {
  115. int tmp, tmp2;
  116. __asm__ __volatile__(
  117. " dmb ish\n"
  118. "1: ldxr %0,%3\n"
  119. " and %0,%0,%2\n"
  120. " stxr %w1,%0,%3\n"
  121. " cbnz %w1,1b\n"
  122. " dmb ish\n"
  123. : "=&r"(tmp), "=&r"(tmp2)
  124. : "r"(v), "Q"(*p)
  125. : "memory", "cc" );
  126. }
  127. #define a_and a_and
  128. static inline void a_and(volatile int *p, int v)
  129. {
  130. int tmp, tmp2;
  131. __asm__ __volatile__(
  132. " dmb ish\n"
  133. "1: ldxr %w0,%3\n"
  134. " and %w0,%w0,%w2\n"
  135. " stxr %w1,%w0,%3\n"
  136. " cbnz %w1,1b\n"
  137. " dmb ish\n"
  138. : "=&r"(tmp), "=&r"(tmp2)
  139. : "r"(v), "Q"(*p)
  140. : "memory", "cc" );
  141. }
  142. #define a_or_64 a_or_64
  143. static inline void a_or_64(volatile uint64_t *p, uint64_t v)
  144. {
  145. int tmp, tmp2;
  146. __asm__ __volatile__(
  147. " dmb ish\n"
  148. "1: ldxr %0,%3\n"
  149. " orr %0,%0,%2\n"
  150. " stxr %w1,%0,%3\n"
  151. " cbnz %w1,1b\n"
  152. " dmb ish\n"
  153. : "=&r"(tmp), "=&r"(tmp2)
  154. : "r"(v), "Q"(*p)
  155. : "memory", "cc" );
  156. }
  157. #define a_or_l a_or_l
  158. static inline void a_or_l(volatile void *p, long v)
  159. {
  160. return a_or_64(p, v);
  161. }
  162. #define a_or a_or
  163. static inline void a_or(volatile int *p, int v)
  164. {
  165. int tmp, tmp2;
  166. __asm__ __volatile__(
  167. " dmb ish\n"
  168. "1: ldxr %w0,%3\n"
  169. " orr %w0,%w0,%w2\n"
  170. " stxr %w1,%w0,%3\n"
  171. " cbnz %w1,1b\n"
  172. " dmb ish\n"
  173. : "=&r"(tmp), "=&r"(tmp2)
  174. : "r"(v), "Q"(*p)
  175. : "memory", "cc" );
  176. }
  177. #define a_store a_store
  178. static inline void a_store(volatile int *p, int x)
  179. {
  180. __asm__ __volatile__(
  181. " dmb ish\n"
  182. " str %w1,%0\n"
  183. " dmb ish\n"
  184. : "=m"(*p)
  185. : "r"(x)
  186. : "memory", "cc" );
  187. }
  188. #define a_spin a_barrier