syscall_arch.h 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. #define __SYSCALL_LL_E(x) \
  2. ((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
  3. ((union { long long ll; long l[2]; }){ .ll = x }).l[1]
  4. #define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
  5. long (__syscall)(long, ...);
  6. #define SYSCALL_RLIM_INFINITY (-1UL/2)
  7. #if _MIPSEL || __MIPSEL || __MIPSEL__
  8. #define __stat_fix(st) ((st),(void)0)
  9. #else
  10. #include <sys/stat.h>
  11. static inline void __stat_fix(long p)
  12. {
  13. struct stat *st = (struct stat *)p;
  14. st->st_dev >>= 32;
  15. st->st_rdev >>= 32;
  16. }
  17. #endif
  18. #ifndef __clang__
  19. static inline long __syscall0(long n)
  20. {
  21. register long r7 __asm__("$7");
  22. register long r2 __asm__("$2");
  23. __asm__ __volatile__ (
  24. "addu $2,$0,%2 ; syscall"
  25. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
  26. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  27. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  28. return r7 ? -r2 : r2;
  29. }
  30. static inline long __syscall1(long n, long a)
  31. {
  32. register long r4 __asm__("$4") = a;
  33. register long r7 __asm__("$7");
  34. register long r2 __asm__("$2");
  35. __asm__ __volatile__ (
  36. "addu $2,$0,%2 ; syscall"
  37. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  38. "r"(r4)
  39. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  40. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  41. return r7 ? -r2 : r2;
  42. }
  43. static inline long __syscall2(long n, long a, long b)
  44. {
  45. register long r4 __asm__("$4") = a;
  46. register long r5 __asm__("$5") = b;
  47. register long r7 __asm__("$7");
  48. register long r2 __asm__("$2");
  49. __asm__ __volatile__ (
  50. "addu $2,$0,%2 ; syscall"
  51. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  52. "r"(r4), "r"(r5)
  53. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  54. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  55. if (r7) return -r2;
  56. long ret = r2;
  57. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  58. return ret;
  59. }
  60. static inline long __syscall3(long n, long a, long b, long c)
  61. {
  62. register long r4 __asm__("$4") = a;
  63. register long r5 __asm__("$5") = b;
  64. register long r6 __asm__("$6") = c;
  65. register long r7 __asm__("$7");
  66. register long r2 __asm__("$2");
  67. __asm__ __volatile__ (
  68. "addu $2,$0,%2 ; syscall"
  69. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  70. "r"(r4), "r"(r5), "r"(r6)
  71. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  72. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  73. if (r7) return -r2;
  74. long ret = r2;
  75. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  76. return ret;
  77. }
  78. static inline long __syscall4(long n, long a, long b, long c, long d)
  79. {
  80. register long r4 __asm__("$4") = a;
  81. register long r5 __asm__("$5") = b;
  82. register long r6 __asm__("$6") = c;
  83. register long r7 __asm__("$7") = d;
  84. register long r2 __asm__("$2");
  85. __asm__ __volatile__ (
  86. "addu $2,$0,%2 ; syscall"
  87. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  88. "r"(r4), "r"(r5), "r"(r6)
  89. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  90. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  91. if (r7) return -r2;
  92. long ret = r2;
  93. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  94. if (n == SYS_fstatat) __stat_fix(c);
  95. return ret;
  96. }
  97. #else
  98. static inline long __syscall0(long n)
  99. {
  100. return (__syscall)(n);
  101. }
  102. static inline long __syscall1(long n, long a)
  103. {
  104. return (__syscall)(n, a);
  105. }
  106. static inline long __syscall2(long n, long a, long b)
  107. {
  108. long r2 = (__syscall)(n, a, b);
  109. if (r2 > -4096UL) return r2;
  110. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  111. return r2;
  112. }
  113. static inline long __syscall3(long n, long a, long b, long c)
  114. {
  115. long r2 = (__syscall)(n, a, b, c);
  116. if (r2 > -4096UL) return r2;
  117. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  118. return r2;
  119. }
  120. static inline long __syscall4(long n, long a, long b, long c, long d)
  121. {
  122. long r2 = (__syscall)(n, a, b, c, d);
  123. if (r2 > -4096UL) return r2;
  124. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  125. if (n == SYS_fstatat) __stat_fix(c);
  126. return r2;
  127. }
  128. #endif
  129. static inline long __syscall5(long n, long a, long b, long c, long d, long e)
  130. {
  131. long r2 = (__syscall)(n, a, b, c, d, e);
  132. if (r2 > -4096UL) return r2;
  133. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  134. if (n == SYS_fstatat) __stat_fix(c);
  135. return r2;
  136. }
  137. static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
  138. {
  139. long r2 = (__syscall)(n, a, b, c, d, e, f);
  140. if (r2 > -4096UL) return r2;
  141. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  142. if (n == SYS_fstatat) __stat_fix(c);
  143. return r2;
  144. }