atomic_arch.h 961 B

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. #if defined(__SH4A__)
  2. #define a_ll a_ll
  3. static inline int a_ll(volatile int *p)
  4. {
  5. int v;
  6. __asm__ __volatile__ ("movli.l @%1, %0" : "=z"(v) : "r"(p), "m"(*p));
  7. return v;
  8. }
  9. #define a_sc a_sc
  10. static inline int a_sc(volatile int *p, int v)
  11. {
  12. int r;
  13. __asm__ __volatile__ (
  14. "movco.l %2, @%3 ; movt %0"
  15. : "=r"(r), "=m"(*p) : "z"(v), "r"(p) : "memory", "cc");
  16. return r;
  17. }
  18. #define a_barrier a_barrier
  19. static inline void a_barrier()
  20. {
  21. __asm__ __volatile__ ("synco" : : "memory");
  22. }
  23. #define a_pre_llsc a_barrier
  24. #define a_post_llsc a_barrier
  25. #else
  26. #define a_cas a_cas
  27. __attribute__((__visibility__("hidden"))) extern const void *__sh_cas_ptr;
  28. static inline int a_cas(volatile int *p, int t, int s)
  29. {
  30. register int r1 __asm__("r1");
  31. register int r2 __asm__("r2") = t;
  32. register int r3 __asm__("r3") = s;
  33. __asm__ __volatile__ (
  34. "jsr @%4 ; nop"
  35. : "=r"(r1), "+r"(r3) : "z"(p), "r"(r2), "r"(__sh_cas_ptr)
  36. : "memory", "pr", "cc");
  37. return r3;
  38. }
  39. #endif