syscall_arch.h 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165
  1. #define __SYSCALL_LL_E(x) \
  2. ((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
  3. ((union { long long ll; long l[2]; }){ .ll = x }).l[1]
  4. #define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
  5. #ifdef SHARED
  6. __attribute__((visibility("hidden")))
  7. #endif
  8. long (__syscall)(long, ...);
  9. #define SYSCALL_RLIM_INFINITY (-1UL/2)
  10. #if _MIPSEL || __MIPSEL || __MIPSEL__
  11. #define __stat_fix(st) ((st),(void)0)
  12. #else
  13. #include <sys/stat.h>
  14. static inline void __stat_fix(long p)
  15. {
  16. struct stat *st = (struct stat *)p;
  17. st->st_dev >>= 32;
  18. st->st_rdev >>= 32;
  19. }
  20. #endif
  21. #ifndef __clang__
  22. static inline long __syscall0(long n)
  23. {
  24. register long r7 __asm__("$7");
  25. register long r2 __asm__("$2");
  26. __asm__ __volatile__ (
  27. "addu $2,$0,%2 ; syscall"
  28. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
  29. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  30. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  31. return r7 ? -r2 : r2;
  32. }
  33. static inline long __syscall1(long n, long a)
  34. {
  35. register long r4 __asm__("$4") = a;
  36. register long r7 __asm__("$7");
  37. register long r2 __asm__("$2");
  38. __asm__ __volatile__ (
  39. "addu $2,$0,%2 ; syscall"
  40. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  41. "r"(r4)
  42. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  43. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  44. return r7 ? -r2 : r2;
  45. }
  46. static inline long __syscall2(long n, long a, long b)
  47. {
  48. register long r4 __asm__("$4") = a;
  49. register long r5 __asm__("$5") = b;
  50. register long r7 __asm__("$7");
  51. register long r2 __asm__("$2");
  52. __asm__ __volatile__ (
  53. "addu $2,$0,%2 ; syscall"
  54. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  55. "r"(r4), "r"(r5)
  56. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  57. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  58. if (r7) return -r2;
  59. long ret = r2;
  60. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  61. return ret;
  62. }
  63. static inline long __syscall3(long n, long a, long b, long c)
  64. {
  65. register long r4 __asm__("$4") = a;
  66. register long r5 __asm__("$5") = b;
  67. register long r6 __asm__("$6") = c;
  68. register long r7 __asm__("$7");
  69. register long r2 __asm__("$2");
  70. __asm__ __volatile__ (
  71. "addu $2,$0,%2 ; syscall"
  72. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  73. "r"(r4), "r"(r5), "r"(r6)
  74. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  75. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  76. if (r7) return -r2;
  77. long ret = r2;
  78. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  79. return ret;
  80. }
  81. static inline long __syscall4(long n, long a, long b, long c, long d)
  82. {
  83. register long r4 __asm__("$4") = a;
  84. register long r5 __asm__("$5") = b;
  85. register long r6 __asm__("$6") = c;
  86. register long r7 __asm__("$7") = d;
  87. register long r2 __asm__("$2");
  88. __asm__ __volatile__ (
  89. "addu $2,$0,%2 ; syscall"
  90. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  91. "r"(r4), "r"(r5), "r"(r6)
  92. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  93. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  94. if (r7) return -r2;
  95. long ret = r2;
  96. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  97. if (n == SYS_fstatat) __stat_fix(c);
  98. return ret;
  99. }
  100. #else
  101. static inline long __syscall0(long n)
  102. {
  103. return (__syscall)(n);
  104. }
  105. static inline long __syscall1(long n, long a)
  106. {
  107. return (__syscall)(n, a);
  108. }
  109. static inline long __syscall2(long n, long a, long b)
  110. {
  111. long r2 = (__syscall)(n, a, b);
  112. if (r2 > -4096UL) return r2;
  113. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  114. return r2;
  115. }
  116. static inline long __syscall3(long n, long a, long b, long c)
  117. {
  118. long r2 = (__syscall)(n, a, b, c);
  119. if (r2 > -4096UL) return r2;
  120. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  121. return r2;
  122. }
  123. static inline long __syscall4(long n, long a, long b, long c, long d)
  124. {
  125. long r2 = (__syscall)(n, a, b, c, d);
  126. if (r2 > -4096UL) return r2;
  127. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  128. if (n == SYS_fstatat) __stat_fix(c);
  129. return r2;
  130. }
  131. #endif
  132. static inline long __syscall5(long n, long a, long b, long c, long d, long e)
  133. {
  134. long r2 = (__syscall)(n, a, b, c, d, e);
  135. if (r2 > -4096UL) return r2;
  136. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  137. if (n == SYS_fstatat) __stat_fix(c);
  138. return r2;
  139. }
  140. static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
  141. {
  142. long r2 = (__syscall)(n, a, b, c, d, e, f);
  143. if (r2 > -4096UL) return r2;
  144. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  145. if (n == SYS_fstatat) __stat_fix(c);
  146. return r2;
  147. }