syscall_arch.h 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167
  1. #define __SYSCALL_LL_E(x) \
  2. ((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
  3. ((union { long long ll; long l[2]; }){ .ll = x }).l[1]
  4. #define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
  5. __attribute__((visibility("hidden")))
  6. long (__syscall)(long, ...);
  7. #define SYSCALL_RLIM_INFINITY (-1UL/2)
  8. #if _MIPSEL || __MIPSEL || __MIPSEL__
  9. #define __stat_fix(st) ((st),(void)0)
  10. #else
  11. #include <sys/stat.h>
  12. static inline void __stat_fix(long p)
  13. {
  14. struct stat *st = (struct stat *)p;
  15. st->st_dev >>= 32;
  16. st->st_rdev >>= 32;
  17. }
  18. #endif
  19. #ifndef __clang__
  20. static inline long __syscall0(long n)
  21. {
  22. register long r7 __asm__("$7");
  23. register long r2 __asm__("$2");
  24. __asm__ __volatile__ (
  25. "addu $2,$0,%2 ; syscall"
  26. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
  27. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  28. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  29. return r7 ? -r2 : r2;
  30. }
  31. static inline long __syscall1(long n, long a)
  32. {
  33. register long r4 __asm__("$4") = a;
  34. register long r7 __asm__("$7");
  35. register long r2 __asm__("$2");
  36. __asm__ __volatile__ (
  37. "addu $2,$0,%2 ; syscall"
  38. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  39. "r"(r4)
  40. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  41. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  42. return r7 ? -r2 : r2;
  43. }
  44. static inline long __syscall2(long n, long a, long b)
  45. {
  46. register long r4 __asm__("$4") = a;
  47. register long r5 __asm__("$5") = b;
  48. register long r7 __asm__("$7");
  49. register long r2 __asm__("$2");
  50. __asm__ __volatile__ (
  51. "addu $2,$0,%2 ; syscall"
  52. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  53. "r"(r4), "r"(r5)
  54. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  55. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  56. if (r7) return -r2;
  57. long ret = r2;
  58. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  59. return ret;
  60. }
  61. static inline long __syscall3(long n, long a, long b, long c)
  62. {
  63. register long r4 __asm__("$4") = a;
  64. register long r5 __asm__("$5") = b;
  65. register long r6 __asm__("$6") = c;
  66. register long r7 __asm__("$7");
  67. register long r2 __asm__("$2");
  68. __asm__ __volatile__ (
  69. "addu $2,$0,%2 ; syscall"
  70. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  71. "r"(r4), "r"(r5), "r"(r6)
  72. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  73. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  74. if (r7) return -r2;
  75. long ret = r2;
  76. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  77. return ret;
  78. }
  79. static inline long __syscall4(long n, long a, long b, long c, long d)
  80. {
  81. register long r4 __asm__("$4") = a;
  82. register long r5 __asm__("$5") = b;
  83. register long r6 __asm__("$6") = c;
  84. register long r7 __asm__("$7") = d;
  85. register long r2 __asm__("$2");
  86. __asm__ __volatile__ (
  87. "addu $2,$0,%2 ; syscall"
  88. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  89. "r"(r4), "r"(r5), "r"(r6)
  90. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  91. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  92. if (r7) return -r2;
  93. long ret = r2;
  94. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  95. if (n == SYS_fstatat) __stat_fix(c);
  96. return ret;
  97. }
  98. #else
  99. static inline long __syscall0(long n)
  100. {
  101. return (__syscall)(n);
  102. }
  103. static inline long __syscall1(long n, long a)
  104. {
  105. return (__syscall)(n, a);
  106. }
  107. static inline long __syscall2(long n, long a, long b)
  108. {
  109. long r2 = (__syscall)(n, a, b);
  110. if (r2 > -4096UL) return r2;
  111. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  112. return r2;
  113. }
  114. static inline long __syscall3(long n, long a, long b, long c)
  115. {
  116. long r2 = (__syscall)(n, a, b, c);
  117. if (r2 > -4096UL) return r2;
  118. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  119. return r2;
  120. }
  121. static inline long __syscall4(long n, long a, long b, long c, long d)
  122. {
  123. long r2 = (__syscall)(n, a, b, c, d);
  124. if (r2 > -4096UL) return r2;
  125. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  126. if (n == SYS_fstatat) __stat_fix(c);
  127. return r2;
  128. }
  129. #endif
  130. static inline long __syscall5(long n, long a, long b, long c, long d, long e)
  131. {
  132. long r2 = (__syscall)(n, a, b, c, d, e);
  133. if (r2 > -4096UL) return r2;
  134. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  135. if (n == SYS_fstatat) __stat_fix(c);
  136. return r2;
  137. }
  138. static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
  139. {
  140. long r2 = (__syscall)(n, a, b, c, d, e, f);
  141. if (r2 > -4096UL) return r2;
  142. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  143. if (n == SYS_fstatat) __stat_fix(c);
  144. return r2;
  145. }
  146. #define VDSO_USEFUL
  147. #define VDSO_CGT_SYM "__vdso_clock_gettime"
  148. #define VDSO_CGT_VER "LINUX_2.6"