atomic_arch.h 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. #define a_ll a_ll
  2. static inline int a_ll(volatile int *p)
  3. {
  4. int v;
  5. __asm__ __volatile__ ("lwarx %0, 0, %2" : "=r"(v) : "m"(*p), "r"(p));
  6. return v;
  7. }
  8. #define a_sc a_sc
  9. static inline int a_sc(volatile int *p, int v)
  10. {
  11. int r;
  12. __asm__ __volatile__ (
  13. "stwcx. %2, 0, %3 ; mfcr %0"
  14. : "=r"(r), "=m"(*p) : "r"(v), "r"(p) : "memory", "cc");
  15. return r & 0x20000000; /* "bit 2" of "cr0" (backwards bit order) */
  16. }
  17. #define a_ll_p a_ll_p
  18. static inline void *a_ll_p(volatile void *p)
  19. {
  20. void *v;
  21. __asm__ __volatile__ ("ldarx %0, 0, %2" : "=r"(v) : "m"(*(void *volatile *)p), "r"(p));
  22. return v;
  23. }
  24. #define a_sc_p a_sc_p
  25. static inline int a_sc_p(volatile void *p, void *v)
  26. {
  27. int r;
  28. __asm__ __volatile__ (
  29. "stdcx. %2, 0, %3 ; mfcr %0"
  30. : "=r"(r), "=m"(*(void *volatile *)p) : "r"(v), "r"(p) : "memory", "cc");
  31. return r & 0x20000000; /* "bit 2" of "cr0" (backwards bit order) */
  32. }
  33. #define a_barrier a_barrier
  34. static inline void a_barrier()
  35. {
  36. __asm__ __volatile__ ("sync" : : : "memory");
  37. }
  38. #define a_pre_llsc a_barrier
  39. #define a_post_llsc a_post_llsc
  40. static inline void a_post_llsc()
  41. {
  42. __asm__ __volatile__ ("isync" : : : "memory");
  43. }
  44. #define a_crash a_crash
  45. static inline void a_crash()
  46. {
  47. __asm__ __volatile__ (".long 0");
  48. }
  49. #define a_clz_64 a_clz_64
  50. static inline int a_clz_64(uint64_t x)
  51. {
  52. __asm__ ("cntlzd %0, %1" : "=r"(x) : "r"(x));
  53. return x;
  54. }