atomic.h 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125
  1. #ifndef _INTERNAL_ATOMIC_H
  2. #define _INTERNAL_ATOMIC_H
  3. #include <stdint.h>
  4. static inline int a_ctz_64(uint64_t x)
  5. {
  6. __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) );
  7. return x;
  8. }
  9. static inline int a_ctz_l(unsigned long x)
  10. {
  11. __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) );
  12. return x;
  13. }
  14. static inline void a_and_64(volatile uint64_t *p, uint64_t v)
  15. {
  16. __asm__( "lock ; and %1, %0"
  17. : "=m"(*p) : "r"(v) : "memory" );
  18. }
  19. static inline void a_or_64(volatile uint64_t *p, uint64_t v)
  20. {
  21. __asm__( "lock ; or %1, %0"
  22. : "=m"(*p) : "r"(v) : "memory" );
  23. }
  24. static inline void a_store_l(volatile void *p, long x)
  25. {
  26. __asm__( "mov %1, %0" : "=m"(*(long *)p) : "r"(x) : "memory" );
  27. }
  28. static inline void a_or_l(volatile void *p, long v)
  29. {
  30. __asm__( "lock ; or %1, %0"
  31. : "=m"(*(long *)p) : "r"(v) : "memory" );
  32. }
  33. static inline void *a_cas_p(volatile void *p, void *t, void *s)
  34. {
  35. __asm__( "lock ; cmpxchg %3, %1"
  36. : "=a"(t), "=m"(*(long *)p) : "a"(t), "r"(s) : "memory" );
  37. return t;
  38. }
  39. static inline long a_cas_l(volatile void *p, long t, long s)
  40. {
  41. __asm__( "lock ; cmpxchg %3, %1"
  42. : "=a"(t), "=m"(*(long *)p) : "a"(t), "r"(s) : "memory" );
  43. return t;
  44. }
  45. static inline int a_cas(volatile int *p, int t, int s)
  46. {
  47. __asm__( "lock ; cmpxchg %3, %1"
  48. : "=a"(t), "=m"(*p) : "a"(t), "r"(s) : "memory" );
  49. return t;
  50. }
  51. static inline void *a_swap_p(void *volatile *x, void *v)
  52. {
  53. __asm__( "xchg %0, %1" : "=r"(v), "=m"(*(void **)x) : "0"(v) : "memory" );
  54. return v;
  55. }
  56. static inline long a_swap_l(volatile void *x, long v)
  57. {
  58. __asm__( "xchg %0, %1" : "=r"(v), "=m"(*(long *)x) : "0"(v) : "memory" );
  59. return v;
  60. }
  61. static inline void a_or(volatile void *p, int v)
  62. {
  63. __asm__( "lock ; or %1, %0"
  64. : "=m"(*(int *)p) : "r"(v) : "memory" );
  65. }
  66. static inline void a_and(volatile void *p, int v)
  67. {
  68. __asm__( "lock ; and %1, %0"
  69. : "=m"(*(int *)p) : "r"(v) : "memory" );
  70. }
  71. static inline int a_swap(volatile int *x, int v)
  72. {
  73. __asm__( "xchg %0, %1" : "=r"(v), "=m"(*x) : "0"(v) : "memory" );
  74. return v;
  75. }
  76. #define a_xchg a_swap
  77. static inline int a_fetch_add(volatile int *x, int v)
  78. {
  79. __asm__( "lock ; xadd %0, %1" : "=r"(v), "=m"(*x) : "0"(v) : "memory" );
  80. return v;
  81. }
  82. static inline void a_inc(volatile int *x)
  83. {
  84. __asm__( "lock ; incl %0" : "=m"(*x) : "m"(*x) : "memory" );
  85. }
  86. static inline void a_dec(volatile int *x)
  87. {
  88. __asm__( "lock ; decl %0" : "=m"(*x) : "m"(*x) : "memory" );
  89. }
  90. static inline void a_store(volatile int *p, int x)
  91. {
  92. __asm__( "mov %1, %0" : "=m"(*p) : "r"(x) : "memory" );
  93. }
  94. static inline void a_spin()
  95. {
  96. __asm__ __volatile__( "pause" : : : "memory" );
  97. }
  98. static inline void a_crash()
  99. {
  100. __asm__ __volatile__( "hlt" : : : "memory" );
  101. }
  102. #endif