syscall_arch.h 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. #define __SYSCALL_LL_E(x) \
  2. ((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
  3. ((union { long long ll; long l[2]; }){ .ll = x }).l[1]
  4. #define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
  5. long (__syscall)(long, ...);
  6. #define SYSCALL_RLIM_INFINITY (-1UL/2)
  7. #if _MIPSEL || __MIPSEL || __MIPSEL__
  8. #define __stat_fix(st) ((st),(void)0)
  9. #else
  10. #include <sys/stat.h>
  11. static inline void __stat_fix(long p)
  12. {
  13. struct stat *st = (struct stat *)p;
  14. st->st_dev >>= 32;
  15. st->st_rdev >>= 32;
  16. }
  17. #endif
  18. #ifndef __clang__
  19. static inline long __syscall0(long n)
  20. {
  21. register long r7 __asm__("$7");
  22. register long r2 __asm__("$2");
  23. __asm__ __volatile__ (
  24. "addu $2,$0,%2 ; syscall"
  25. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
  26. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  27. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  28. return r7 ? -r2 : r2;
  29. }
  30. static inline long __syscall1(long n, long a)
  31. {
  32. register long r4 __asm__("$4") = a;
  33. register long r7 __asm__("$7");
  34. register long r2 __asm__("$2");
  35. __asm__ __volatile__ (
  36. "addu $2,$0,%2 ; syscall"
  37. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  38. "r"(r4)
  39. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  40. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  41. return r7 ? -r2 : r2;
  42. }
  43. static inline long __syscall2(long n, long a, long b)
  44. {
  45. register long r4 __asm__("$4") = a;
  46. register long r5 __asm__("$5") = b;
  47. register long r7 __asm__("$7");
  48. register long r2 __asm__("$2");
  49. __asm__ __volatile__ (
  50. "addu $2,$0,%2 ; syscall"
  51. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  52. "r"(r4), "r"(r5)
  53. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  54. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  55. if (r7) return -r2;
  56. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  57. return r2;
  58. }
  59. static inline long __syscall3(long n, long a, long b, long c)
  60. {
  61. register long r4 __asm__("$4") = a;
  62. register long r5 __asm__("$5") = b;
  63. register long r6 __asm__("$6") = c;
  64. register long r7 __asm__("$7");
  65. register long r2 __asm__("$2");
  66. __asm__ __volatile__ (
  67. "addu $2,$0,%2 ; syscall"
  68. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  69. "r"(r4), "r"(r5), "r"(r6)
  70. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  71. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  72. if (r7) return -r2;
  73. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  74. return r2;
  75. }
  76. static inline long __syscall4(long n, long a, long b, long c, long d)
  77. {
  78. register long r4 __asm__("$4") = a;
  79. register long r5 __asm__("$5") = b;
  80. register long r6 __asm__("$6") = c;
  81. register long r7 __asm__("$7") = d;
  82. register long r2 __asm__("$2");
  83. __asm__ __volatile__ (
  84. "addu $2,$0,%2 ; syscall"
  85. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  86. "r"(r4), "r"(r5), "r"(r6)
  87. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  88. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  89. if (r7) return -r2;
  90. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  91. if (n == SYS_fstatat) __stat_fix(c);
  92. return r2;
  93. }
  94. #else
  95. static inline long __syscall0(long n)
  96. {
  97. return (__syscall)(n);
  98. }
  99. static inline long __syscall1(long n, long a)
  100. {
  101. return (__syscall)(n, a);
  102. }
  103. static inline long __syscall2(long n, long a, long b)
  104. {
  105. long r2 = (__syscall)(n, a, b);
  106. if (r2 > -4096UL) return r2;
  107. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  108. return r2;
  109. }
  110. static inline long __syscall3(long n, long a, long b, long c)
  111. {
  112. long r2 = (__syscall)(n, a, b, c);
  113. if (r2 > -4096UL) return r2;
  114. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  115. return r2;
  116. }
  117. static inline long __syscall4(long n, long a, long b, long c, long d)
  118. {
  119. long r2 = (__syscall)(n, a, b, c, d);
  120. if (r2 > -4096UL) return r2;
  121. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  122. if (n == SYS_fstatat) __stat_fix(c);
  123. return r2;
  124. }
  125. #endif
  126. static inline long __syscall5(long n, long a, long b, long c, long d, long e)
  127. {
  128. long r2 = (__syscall)(n, a, b, c, d, e);
  129. if (r2 > -4096UL) return r2;
  130. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  131. if (n == SYS_fstatat) __stat_fix(c);
  132. return r2;
  133. }
  134. static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
  135. {
  136. long r2 = (__syscall)(n, a, b, c, d, e, f);
  137. if (r2 > -4096UL) return r2;
  138. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  139. if (n == SYS_fstatat) __stat_fix(c);
  140. return r2;
  141. }