atomic_arch.h 1.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. #define a_ll a_ll
  2. static inline int a_ll(volatile int *p)
  3. {
  4. int v;
  5. __asm__ __volatile__ ("lwarx %0, 0, %2" : "=r"(v) : "m"(*p), "r"(p));
  6. return v;
  7. }
  8. #define a_sc a_sc
  9. static inline int a_sc(volatile int *p, int v)
  10. {
  11. int r;
  12. __asm__ __volatile__ (
  13. "stwcx. %2, 0, %3 ; mfcr %0"
  14. : "=r"(r), "=m"(*p) : "r"(v), "r"(p) : "memory", "cc");
  15. return r & 0x20000000; /* "bit 2" of "cr0" (backwards bit order) */
  16. }
  17. #define a_ll_p a_ll_p
  18. static inline void *a_ll_p(volatile void *p)
  19. {
  20. void *v;
  21. __asm__ __volatile__ ("ldarx %0, 0, %2" : "=r"(v) : "m"(*(void *volatile *)p), "r"(p));
  22. return v;
  23. }
  24. #define a_sc_p a_sc_p
  25. static inline int a_sc_p(volatile void *p, void *v)
  26. {
  27. int r;
  28. __asm__ __volatile__ (
  29. "stdcx. %2, 0, %3 ; mfcr %0"
  30. : "=r"(r), "=m"(*(void *volatile *)p) : "r"(v), "r"(p) : "memory", "cc");
  31. return r & 0x20000000; /* "bit 2" of "cr0" (backwards bit order) */
  32. }
  33. #define a_barrier a_barrier
  34. static inline void a_barrier()
  35. {
  36. __asm__ __volatile__ ("sync" : : : "memory");
  37. }
  38. #define a_pre_llsc a_barrier
  39. #define a_post_llsc a_post_llsc
  40. static inline void a_post_llsc()
  41. {
  42. __asm__ __volatile__ ("isync" : : : "memory");
  43. }
  44. #define a_store a_store
  45. static inline void a_store(volatile int *p, int v)
  46. {
  47. a_pre_llsc();
  48. *p = v;
  49. a_post_llsc();
  50. }
  51. #define a_crash a_crash
  52. static inline void a_crash()
  53. {
  54. __asm__ __volatile__ (".long 0");
  55. }