atomic.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. #ifndef _INTERNAL_ATOMIC_H
  2. #define _INTERNAL_ATOMIC_H
  3. #include <stdint.h>
  4. static inline int a_ctz_l(unsigned long x)
  5. {
  6. static const char debruijn32[32] = {
  7. 0, 1, 23, 2, 29, 24, 19, 3, 30, 27, 25, 11, 20, 8, 4, 13,
  8. 31, 22, 28, 18, 26, 10, 7, 12, 21, 17, 9, 6, 16, 5, 15, 14
  9. };
  10. return debruijn32[(x&-x)*0x076be629 >> 27];
  11. }
  12. static inline int a_ctz_64(uint64_t x)
  13. {
  14. uint32_t y = x;
  15. if (!y) {
  16. y = x>>32;
  17. return 32 + a_ctz_l(y);
  18. }
  19. return a_ctz_l(y);
  20. }
  21. #define LLSC_CLOBBERS "r0", "t", "memory"
  22. #define LLSC_START(mem) "synco\n" \
  23. "0: movli.l @" mem ", r0\n"
  24. #define LLSC_END(mem) \
  25. "1: movco.l r0, @" mem "\n" \
  26. " bf 0b\n" \
  27. " synco\n"
  28. static inline int __sh_cas_llsc(volatile int *p, int t, int s)
  29. {
  30. int old;
  31. __asm__ __volatile__(
  32. LLSC_START("%1")
  33. " mov r0, %0\n"
  34. " cmp/eq %0, %2\n"
  35. " bf 1f\n"
  36. " mov %3, r0\n"
  37. LLSC_END("%1")
  38. : "=&r"(old) : "r"(p), "r"(t), "r"(s) : LLSC_CLOBBERS);
  39. return old;
  40. }
  41. static inline int __sh_swap_llsc(volatile int *x, int v)
  42. {
  43. int old;
  44. __asm__ __volatile__(
  45. LLSC_START("%1")
  46. " mov r0, %0\n"
  47. " mov %2, r0\n"
  48. LLSC_END("%1")
  49. : "=&r"(old) : "r"(x), "r"(v) : LLSC_CLOBBERS);
  50. return old;
  51. }
  52. static inline int __sh_fetch_add_llsc(volatile int *x, int v)
  53. {
  54. int old;
  55. __asm__ __volatile__(
  56. LLSC_START("%1")
  57. " mov r0, %0\n"
  58. " add %2, r0\n"
  59. LLSC_END("%1")
  60. : "=&r"(old) : "r"(x), "r"(v) : LLSC_CLOBBERS);
  61. return old;
  62. }
  63. static inline void __sh_store_llsc(volatile int *p, int x)
  64. {
  65. __asm__ __volatile__(
  66. " synco\n"
  67. " mov.l %1, @%0\n"
  68. " synco\n"
  69. : : "r"(p), "r"(x) : "memory");
  70. }
  71. static inline void __sh_and_llsc(volatile int *x, int v)
  72. {
  73. __asm__ __volatile__(
  74. LLSC_START("%0")
  75. " and %1, r0\n"
  76. LLSC_END("%0")
  77. : : "r"(x), "r"(v) : LLSC_CLOBBERS);
  78. }
  79. static inline void __sh_or_llsc(volatile int *x, int v)
  80. {
  81. __asm__ __volatile__(
  82. LLSC_START("%0")
  83. " or %1, r0\n"
  84. LLSC_END("%0")
  85. : : "r"(x), "r"(v) : LLSC_CLOBBERS);
  86. }
  87. #ifdef __SH4A__
  88. #define a_cas(p,t,s) __sh_cas_llsc(p,t,s)
  89. #define a_swap(x,v) __sh_swap_llsc(x,v)
  90. #define a_fetch_add(x,v) __sh_fetch_add_llsc(x, v)
  91. #define a_store(x,v) __sh_store_llsc(x, v)
  92. #define a_and(x,v) __sh_and_llsc(x, v)
  93. #define a_or(x,v) __sh_or_llsc(x, v)
  94. #else
  95. int __sh_cas(volatile int *, int, int);
  96. int __sh_swap(volatile int *, int);
  97. int __sh_fetch_add(volatile int *, int);
  98. void __sh_store(volatile int *, int);
  99. void __sh_and(volatile int *, int);
  100. void __sh_or(volatile int *, int);
  101. #define a_cas(p,t,s) __sh_cas(p,t,s)
  102. #define a_swap(x,v) __sh_swap(x,v)
  103. #define a_fetch_add(x,v) __sh_fetch_add(x, v)
  104. #define a_store(x,v) __sh_store(x, v)
  105. #define a_and(x,v) __sh_and(x, v)
  106. #define a_or(x,v) __sh_or(x, v)
  107. #endif
  108. static inline void *a_cas_p(volatile void *p, void *t, void *s)
  109. {
  110. return (void *)a_cas(p, (int)t, (int)s);
  111. }
  112. static inline void a_inc(volatile int *x)
  113. {
  114. a_fetch_add(x, 1);
  115. }
  116. static inline void a_dec(volatile int *x)
  117. {
  118. a_fetch_add(x, -1);
  119. }
  120. #define a_spin a_barrier
  121. static inline void a_barrier()
  122. {
  123. a_cas(&(int){0}, 0, 0);
  124. }
  125. static inline void a_crash()
  126. {
  127. *(volatile char *)0=0;
  128. }
  129. static inline void a_or_l(volatile void *p, long v)
  130. {
  131. a_or(p, v);
  132. }
  133. static inline void a_and_64(volatile uint64_t *p, uint64_t v)
  134. {
  135. union { uint64_t v; uint32_t r[2]; } u = { v };
  136. a_and((int *)p, u.r[0]);
  137. a_and((int *)p+1, u.r[1]);
  138. }
  139. static inline void a_or_64(volatile uint64_t *p, uint64_t v)
  140. {
  141. union { uint64_t v; uint32_t r[2]; } u = { v };
  142. a_or((int *)p, u.r[0]);
  143. a_or((int *)p+1, u.r[1]);
  144. }
  145. #endif