atomic_arch.h 2.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. #define LLSC_CLOBBERS "r0", "t", "memory"
  2. #define LLSC_START(mem) "synco\n" \
  3. "0: movli.l @" mem ", r0\n"
  4. #define LLSC_END(mem) \
  5. "1: movco.l r0, @" mem "\n" \
  6. " bf 0b\n" \
  7. " synco\n"
  8. static inline int __sh_cas_llsc(volatile int *p, int t, int s)
  9. {
  10. int old;
  11. __asm__ __volatile__(
  12. LLSC_START("%1")
  13. " mov r0, %0\n"
  14. " cmp/eq %0, %2\n"
  15. " bf 1f\n"
  16. " mov %3, r0\n"
  17. LLSC_END("%1")
  18. : "=&r"(old) : "r"(p), "r"(t), "r"(s) : LLSC_CLOBBERS);
  19. return old;
  20. }
  21. static inline int __sh_swap_llsc(volatile int *x, int v)
  22. {
  23. int old;
  24. __asm__ __volatile__(
  25. LLSC_START("%1")
  26. " mov r0, %0\n"
  27. " mov %2, r0\n"
  28. LLSC_END("%1")
  29. : "=&r"(old) : "r"(x), "r"(v) : LLSC_CLOBBERS);
  30. return old;
  31. }
  32. static inline int __sh_fetch_add_llsc(volatile int *x, int v)
  33. {
  34. int old;
  35. __asm__ __volatile__(
  36. LLSC_START("%1")
  37. " mov r0, %0\n"
  38. " add %2, r0\n"
  39. LLSC_END("%1")
  40. : "=&r"(old) : "r"(x), "r"(v) : LLSC_CLOBBERS);
  41. return old;
  42. }
  43. static inline void __sh_store_llsc(volatile int *p, int x)
  44. {
  45. __asm__ __volatile__(
  46. " synco\n"
  47. " mov.l %1, @%0\n"
  48. " synco\n"
  49. : : "r"(p), "r"(x) : "memory");
  50. }
  51. static inline void __sh_and_llsc(volatile int *x, int v)
  52. {
  53. __asm__ __volatile__(
  54. LLSC_START("%0")
  55. " and %1, r0\n"
  56. LLSC_END("%0")
  57. : : "r"(x), "r"(v) : LLSC_CLOBBERS);
  58. }
  59. static inline void __sh_or_llsc(volatile int *x, int v)
  60. {
  61. __asm__ __volatile__(
  62. LLSC_START("%0")
  63. " or %1, r0\n"
  64. LLSC_END("%0")
  65. : : "r"(x), "r"(v) : LLSC_CLOBBERS);
  66. }
  67. #ifdef __SH4A__
  68. #define a_cas(p,t,s) __sh_cas_llsc(p,t,s)
  69. #define a_swap(x,v) __sh_swap_llsc(x,v)
  70. #define a_fetch_add(x,v) __sh_fetch_add_llsc(x, v)
  71. #define a_store(x,v) __sh_store_llsc(x, v)
  72. #define a_and(x,v) __sh_and_llsc(x, v)
  73. #define a_or(x,v) __sh_or_llsc(x, v)
  74. #else
  75. int __sh_cas(volatile int *, int, int);
  76. int __sh_swap(volatile int *, int);
  77. int __sh_fetch_add(volatile int *, int);
  78. void __sh_store(volatile int *, int);
  79. void __sh_and(volatile int *, int);
  80. void __sh_or(volatile int *, int);
  81. #define a_cas(p,t,s) __sh_cas(p,t,s)
  82. #define a_swap(x,v) __sh_swap(x,v)
  83. #define a_fetch_add(x,v) __sh_fetch_add(x, v)
  84. #define a_store(x,v) __sh_store(x, v)
  85. #define a_and(x,v) __sh_and(x, v)
  86. #define a_or(x,v) __sh_or(x, v)
  87. #endif