atomic_arch.h 966 B

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. #if __mips_isa_rev < 6
  2. #define LLSC_M "m"
  3. #else
  4. #define LLSC_M "ZC"
  5. #endif
  6. #define a_ll a_ll
  7. static inline int a_ll(volatile int *p)
  8. {
  9. int v;
  10. __asm__ __volatile__ (
  11. "ll %0, %1"
  12. : "=r"(v) : LLSC_M(*p));
  13. return v;
  14. }
  15. #define a_sc a_sc
  16. static inline int a_sc(volatile int *p, int v)
  17. {
  18. int r;
  19. __asm__ __volatile__ (
  20. "sc %0, %1"
  21. : "=r"(r), "="LLSC_M(*p) : "0"(v) : "memory");
  22. return r;
  23. }
  24. #define a_ll_p a_ll_p
  25. static inline void *a_ll_p(volatile void *p)
  26. {
  27. void *v;
  28. __asm__ __volatile__ (
  29. "lld %0, %1"
  30. : "=r"(v) : LLSC_M(*(void *volatile *)p));
  31. return v;
  32. }
  33. #define a_sc_p a_sc_p
  34. static inline int a_sc_p(volatile void *p, void *v)
  35. {
  36. long r;
  37. __asm__ __volatile__ (
  38. "scd %0, %1"
  39. : "=r"(r), "="LLSC_M(*(void *volatile *)p) : "0"(v) : "memory");
  40. return r;
  41. }
  42. #define a_barrier a_barrier
  43. static inline void a_barrier()
  44. {
  45. __asm__ __volatile__ ("sync" : : : "memory");
  46. }
  47. #define a_pre_llsc a_barrier
  48. #define a_post_llsc a_barrier
  49. #undef LLSC_M