syscall_arch.h 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165
  1. #define __SYSCALL_LL_E(x) (x)
  2. #define __SYSCALL_LL_O(x) (x)
  3. __attribute__((visibility("hidden")))
  4. long (__syscall)(long, ...);
  5. #define SYSCALL_RLIM_INFINITY (-1UL/2)
  6. #if _MIPSEL || __MIPSEL || __MIPSEL__
  7. #define __stat_fix(st) ((st),(void)0)
  8. #else
  9. #include <sys/stat.h>
  10. static inline void __stat_fix(long p)
  11. {
  12. struct stat *st = (struct stat *)p;
  13. st->st_dev >>= 32;
  14. st->st_rdev >>= 32;
  15. }
  16. #endif
  17. #ifndef __clang__
  18. static inline long __syscall0(long n)
  19. {
  20. register long r7 __asm__("$7");
  21. register long r2 __asm__("$2");
  22. __asm__ __volatile__ (
  23. "addu $2,$0,%2 ; syscall"
  24. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
  25. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  26. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  27. return r7 ? -r2 : r2;
  28. }
  29. static inline long __syscall1(long n, long a)
  30. {
  31. register long r4 __asm__("$4") = a;
  32. register long r7 __asm__("$7");
  33. register long r2 __asm__("$2");
  34. __asm__ __volatile__ (
  35. "addu $2,$0,%2 ; syscall"
  36. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  37. "r"(r4)
  38. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  39. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  40. return r7 ? -r2 : r2;
  41. }
  42. static inline long __syscall2(long n, long a, long b)
  43. {
  44. register long r4 __asm__("$4") = a;
  45. register long r5 __asm__("$5") = b;
  46. register long r7 __asm__("$7");
  47. register long r2 __asm__("$2");
  48. __asm__ __volatile__ (
  49. "addu $2,$0,%2 ; syscall"
  50. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  51. "r"(r4), "r"(r5)
  52. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  53. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  54. if (r7) return -r2;
  55. long ret = r2;
  56. if (n == SYS_stat || n == SYS_fstat || n == SYS_lstat) __stat_fix(b);
  57. return ret;
  58. }
  59. static inline long __syscall3(long n, long a, long b, long c)
  60. {
  61. register long r4 __asm__("$4") = a;
  62. register long r5 __asm__("$5") = b;
  63. register long r6 __asm__("$6") = c;
  64. register long r7 __asm__("$7");
  65. register long r2 __asm__("$2");
  66. __asm__ __volatile__ (
  67. "addu $2,$0,%2 ; syscall"
  68. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  69. "r"(r4), "r"(r5), "r"(r6)
  70. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  71. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  72. if (r7) return -r2;
  73. long ret = r2;
  74. if (n == SYS_stat || n == SYS_fstat || n == SYS_lstat) __stat_fix(b);
  75. return ret;
  76. }
  77. static inline long __syscall4(long n, long a, long b, long c, long d)
  78. {
  79. register long r4 __asm__("$4") = a;
  80. register long r5 __asm__("$5") = b;
  81. register long r6 __asm__("$6") = c;
  82. register long r7 __asm__("$7") = d;
  83. register long r2 __asm__("$2");
  84. __asm__ __volatile__ (
  85. "addu $2,$0,%2 ; syscall"
  86. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  87. "r"(r4), "r"(r5), "r"(r6)
  88. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  89. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  90. if (r7) return -r2;
  91. long ret = r2;
  92. if (n == SYS_stat || n == SYS_fstat || n == SYS_lstat) __stat_fix(b);
  93. if (n == SYS_fstatat) __stat_fix(c);
  94. return ret;
  95. }
  96. #else
  97. static inline long __syscall0(long n)
  98. {
  99. return (__syscall)(n);
  100. }
  101. static inline long __syscall1(long n, long a)
  102. {
  103. return (__syscall)(n, a);
  104. }
  105. static inline long __syscall2(long n, long a, long b)
  106. {
  107. long r2 = (__syscall)(n, a, b);
  108. if (r2 > -4096UL) return r2;
  109. if (n == SYS_stat || n == SYS_fstat || n == SYS_lstat) __stat_fix(b);
  110. return r2;
  111. }
  112. static inline long __syscall3(long n, long a, long b, long c)
  113. {
  114. long r2 = (__syscall)(n, a, b, c);
  115. if (r2 > -4096UL) return r2;
  116. if (n == SYS_stat || n == SYS_fstat || n == SYS_lstat) __stat_fix(b);
  117. return r2;
  118. }
  119. static inline long __syscall4(long n, long a, long b, long c, long d)
  120. {
  121. long r2 = (__syscall)(n, a, b, c, d);
  122. if (r2 > -4096UL) return r2;
  123. if (n == SYS_stat || n == SYS_fstat || n == SYS_lstat) __stat_fix(b);
  124. if (n == SYS_fstatat) __stat_fix(c);
  125. return r2;
  126. }
  127. #endif
  128. static inline long __syscall5(long n, long a, long b, long c, long d, long e)
  129. {
  130. long r2 = (__syscall)(n, a, b, c, d, e);
  131. if (r2 > -4096UL) return r2;
  132. if (n == SYS_stat || n == SYS_fstat || n == SYS_lstat) __stat_fix(b);
  133. if (n == SYS_fstatat) __stat_fix(c);
  134. return r2;
  135. }
  136. static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
  137. {
  138. long r2 = (__syscall)(n, a, b, c, d, e, f);
  139. if (r2 > -4096UL) return r2;
  140. if (n == SYS_stat || n == SYS_fstat || n == SYS_lstat) __stat_fix(b);
  141. if (n == SYS_fstatat) __stat_fix(c);
  142. return r2;
  143. }
  144. #define VDSO_USEFUL
  145. #define VDSO_CGT_SYM "__vdso_clock_gettime"
  146. #define VDSO_CGT_VER "LINUX_2.6"