1
0

atomic_arch.h 880 B

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152
  1. #if __mips_isa_rev < 6
  2. #define LLSC_M "m"
  3. #else
  4. #define LLSC_M "ZC"
  5. #endif
  6. #define a_ll a_ll
  7. static inline int a_ll(volatile int *p)
  8. {
  9. int v;
  10. #if __mips < 2
  11. __asm__ __volatile__ (
  12. ".set push ; .set mips2\n\t"
  13. "ll %0, %1"
  14. "\n\t.set pop"
  15. : "=r"(v) : "m"(*p));
  16. #else
  17. __asm__ __volatile__ (
  18. "ll %0, %1"
  19. : "=r"(v) : LLSC_M(*p));
  20. #endif
  21. return v;
  22. }
  23. #define a_sc a_sc
  24. static inline int a_sc(volatile int *p, int v)
  25. {
  26. int r;
  27. #if __mips < 2
  28. __asm__ __volatile__ (
  29. ".set push ; .set mips2\n\t"
  30. "sc %0, %1"
  31. "\n\t.set pop"
  32. : "=r"(r), "=m"(*p) : "0"(v) : "memory");
  33. #else
  34. __asm__ __volatile__ (
  35. "sc %0, %1"
  36. : "=r"(r), "="LLSC_M(*p) : "0"(v) : "memory");
  37. #endif
  38. return r;
  39. }
  40. #define a_barrier a_barrier
  41. static inline void a_barrier()
  42. {
  43. __asm__ __volatile__ ("sync" : : : "memory");
  44. }
  45. #define a_pre_llsc a_barrier
  46. #define a_post_llsc a_barrier
  47. #undef LLSC_M