1
0

atomic.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191
  1. #ifndef _INTERNAL_ATOMIC_H
  2. #define _INTERNAL_ATOMIC_H
  3. #include <stdint.h>
  4. static inline int a_ctz_l(unsigned long x)
  5. {
  6. static const char debruijn32[32] = {
  7. 0, 1, 23, 2, 29, 24, 19, 3, 30, 27, 25, 11, 20, 8, 4, 13,
  8. 31, 22, 28, 18, 26, 10, 7, 12, 21, 17, 9, 6, 16, 5, 15, 14
  9. };
  10. return debruijn32[(x&-x)*0x076be629 >> 27];
  11. }
  12. static inline int a_ctz_64(uint64_t x)
  13. {
  14. uint32_t y = x;
  15. if (!y) {
  16. y = x>>32;
  17. return 32 + a_ctz_l(y);
  18. }
  19. return a_ctz_l(y);
  20. }
  21. static inline int a_cas(volatile int *p, int t, int s)
  22. {
  23. int dummy;
  24. __asm__ __volatile__(
  25. ".set push\n"
  26. ".set noreorder\n"
  27. "1: ll %0, 0(%2)\n"
  28. " bne %0, %3, 1f\n"
  29. " addu %1, %4, $0\n"
  30. " sc %1, 0(%2)\n"
  31. " beq %1, $0, 1b\n"
  32. " nop\n"
  33. "1: \n"
  34. ".set pop\n"
  35. : "=&r"(t), "=&r"(dummy) : "r"(p), "r"(t), "r"(s) : "memory" );
  36. return t;
  37. }
  38. static inline void *a_cas_p(volatile void *p, void *t, void *s)
  39. {
  40. return (void *)a_cas(p, (int)t, (int)s);
  41. }
  42. static inline long a_cas_l(volatile void *p, long t, long s)
  43. {
  44. return a_cas(p, t, s);
  45. }
  46. static inline int a_swap(volatile int *x, int v)
  47. {
  48. int old, dummy;
  49. __asm__ __volatile__(
  50. ".set push\n"
  51. ".set noreorder\n"
  52. "1: ll %0, 0(%2)\n"
  53. " addu %1, %3, $0\n"
  54. " sc %1, 0(%2)\n"
  55. " beq %1, $0, 1b\n"
  56. " nop\n"
  57. "1: \n"
  58. ".set pop\n"
  59. : "=&r"(old), "=&r"(dummy) : "r"(x), "r"(v) : "memory" );
  60. return old;
  61. }
  62. static inline int a_fetch_add(volatile int *x, int v)
  63. {
  64. int new;
  65. __asm__ __volatile__(
  66. ".set push\n"
  67. ".set noreorder\n"
  68. "1: ll %0, 0(%1)\n"
  69. " addu %0, %0, %2\n"
  70. " sc %0, 0(%1)\n"
  71. " beq %0, $0, 1b\n"
  72. " nop\n"
  73. "1: \n"
  74. ".set pop\n"
  75. : "=&r"(new) : "r"(x), "r"(v) : "memory" );
  76. return new-v;
  77. }
  78. static inline void a_inc(volatile int *x)
  79. {
  80. int dummy;
  81. __asm__ __volatile__(
  82. ".set push\n"
  83. ".set noreorder\n"
  84. "1: ll %0, 0(%1)\n"
  85. " addu %0, %0, 1\n"
  86. " sc %0, 0(%1)\n"
  87. " beq %0, $0, 1b\n"
  88. " nop\n"
  89. "1: \n"
  90. ".set pop\n"
  91. : "=&r"(dummy) : "r"(x) : "memory" );
  92. }
  93. static inline void a_dec(volatile int *x)
  94. {
  95. int dummy;
  96. __asm__ __volatile__(
  97. ".set push\n"
  98. ".set noreorder\n"
  99. "1: ll %0, 0(%1)\n"
  100. " subu %0, %0, 1\n"
  101. " sc %0, 0(%1)\n"
  102. " beq %0, $0, 1b\n"
  103. " nop\n"
  104. "1: \n"
  105. ".set pop\n"
  106. : "=&r"(dummy) : "r"(x) : "memory" );
  107. }
  108. static inline void a_store(volatile int *p, int x)
  109. {
  110. int dummy;
  111. __asm__ __volatile__(
  112. ".set push\n"
  113. ".set noreorder\n"
  114. "1: ll %0, 0(%1)\n"
  115. " addu %0, %2, $0\n"
  116. " sc %0, 0(%1)\n"
  117. " beq %0, $0, 1b\n"
  118. " nop\n"
  119. "1: \n"
  120. ".set pop\n"
  121. : "=&r"(dummy) : "r"(p), "r"(x) : "memory" );
  122. }
  123. static inline void a_spin()
  124. {
  125. }
  126. static inline void a_crash()
  127. {
  128. *(volatile char *)0=0;
  129. }
  130. static inline void a_and(volatile int *p, int v)
  131. {
  132. int dummy;
  133. __asm__ __volatile__(
  134. ".set push\n"
  135. ".set noreorder\n"
  136. "1: ll %0, 0(%1)\n"
  137. " and %0, %0, %2\n"
  138. " sc %0, 0(%1)\n"
  139. " beq %0, $0, 1b\n"
  140. " nop\n"
  141. "1: \n"
  142. ".set pop\n"
  143. : "=&r"(dummy) : "r"(p), "r"(v) : "memory" );
  144. }
  145. static inline void a_or(volatile int *p, int v)
  146. {
  147. int dummy;
  148. __asm__ __volatile__(
  149. ".set push\n"
  150. ".set noreorder\n"
  151. "1: ll %0, 0(%1)\n"
  152. " or %0, %0, %2\n"
  153. " sc %0, 0(%1)\n"
  154. " beq %0, $0, 1b\n"
  155. " nop\n"
  156. "1: \n"
  157. ".set pop\n"
  158. : "=&r"(dummy) : "r"(p), "r"(v) : "memory" );
  159. }
  160. static inline void a_and_64(volatile uint64_t *p, uint64_t v)
  161. {
  162. union { uint64_t v; uint32_t r[2]; } u = { v };
  163. a_and((int *)p, u.r[0]);
  164. a_and((int *)p+1, u.r[1]);
  165. }
  166. static inline void a_or_64(volatile uint64_t *p, uint64_t v)
  167. {
  168. union { uint64_t v; uint32_t r[2]; } u = { v };
  169. a_or((int *)p, u.r[0]);
  170. a_or((int *)p+1, u.r[1]);
  171. }
  172. #endif