atomic_arch.h 2.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106
  1. #define a_ctz_64 a_ctz_64
  2. static inline int a_ctz_64(uint64_t x)
  3. {
  4. __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) );
  5. return x;
  6. }
  7. #define a_ctz_l a_ctz_l
  8. static inline int a_ctz_l(unsigned long x)
  9. {
  10. __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) );
  11. return x;
  12. }
  13. #define a_and_64 a_and_64
  14. static inline void a_and_64(volatile uint64_t *p, uint64_t v)
  15. {
  16. __asm__( "lock ; and %1, %0"
  17. : "=m"(*p) : "r"(v) : "memory" );
  18. }
  19. #define a_or_64 a_or_64
  20. static inline void a_or_64(volatile uint64_t *p, uint64_t v)
  21. {
  22. __asm__( "lock ; or %1, %0"
  23. : "=m"(*p) : "r"(v) : "memory" );
  24. }
  25. #define a_or_l a_or_l
  26. static inline void a_or_l(volatile void *p, long v)
  27. {
  28. __asm__( "lock ; or %1, %0"
  29. : "=m"(*(long *)p) : "r"(v) : "memory" );
  30. }
  31. #define a_cas a_cas
  32. static inline int a_cas(volatile int *p, int t, int s)
  33. {
  34. __asm__( "lock ; cmpxchg %3, %1"
  35. : "=a"(t), "=m"(*p) : "a"(t), "r"(s) : "memory" );
  36. return t;
  37. }
  38. #define a_or a_or
  39. static inline void a_or(volatile int *p, int v)
  40. {
  41. __asm__( "lock ; or %1, %0"
  42. : "=m"(*p) : "r"(v) : "memory" );
  43. }
  44. #define a_and a_and
  45. static inline void a_and(volatile int *p, int v)
  46. {
  47. __asm__( "lock ; and %1, %0"
  48. : "=m"(*p) : "r"(v) : "memory" );
  49. }
  50. #define a_swap a_swap
  51. static inline int a_swap(volatile int *x, int v)
  52. {
  53. __asm__( "xchg %0, %1" : "=r"(v), "=m"(*x) : "0"(v) : "memory" );
  54. return v;
  55. }
  56. #define a_fetch_add a_fetch_add
  57. static inline int a_fetch_add(volatile int *x, int v)
  58. {
  59. __asm__( "lock ; xadd %0, %1" : "=r"(v), "=m"(*x) : "0"(v) : "memory" );
  60. return v;
  61. }
  62. #define a_inc a_inc
  63. static inline void a_inc(volatile int *x)
  64. {
  65. __asm__( "lock ; incl %0" : "=m"(*x) : "m"(*x) : "memory" );
  66. }
  67. #define a_dec a_dec
  68. static inline void a_dec(volatile int *x)
  69. {
  70. __asm__( "lock ; decl %0" : "=m"(*x) : "m"(*x) : "memory" );
  71. }
  72. #define a_store a_store
  73. static inline void a_store(volatile int *p, int x)
  74. {
  75. __asm__( "mov %1, %0 ; lock ; orl $0,(%%rsp)" : "=m"(*p) : "r"(x) : "memory" );
  76. }
  77. #define a_spin a_spin
  78. static inline void a_spin()
  79. {
  80. __asm__ __volatile__( "pause" : : : "memory" );
  81. }
  82. #define a_barrier a_barrier
  83. static inline void a_barrier()
  84. {
  85. __asm__ __volatile__( "" : : : "memory" );
  86. }
  87. #define a_crash a_crash
  88. static inline void a_crash()
  89. {
  90. __asm__ __volatile__( "hlt" : : : "memory" );
  91. }