syscall_arch.h 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177
  1. #define __SYSCALL_LL_E(x) \
  2. ((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
  3. ((union { long long ll; long l[2]; }){ .ll = x }).l[1]
  4. #define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
  5. #define SYSCALL_RLIM_INFINITY (-1UL/2)
  6. #if _MIPSEL || __MIPSEL || __MIPSEL__
  7. #define __stat_fix(st) ((st),(void)0)
  8. #else
  9. #include <sys/stat.h>
  10. static inline void __stat_fix(long p)
  11. {
  12. struct stat *st = (struct stat *)p;
  13. st->st_dev >>= 32;
  14. st->st_rdev >>= 32;
  15. }
  16. #endif
  17. static inline long __syscall0(long n)
  18. {
  19. register long r7 __asm__("$7");
  20. register long r2 __asm__("$2");
  21. __asm__ __volatile__ (
  22. "addu $2,$0,%2 ; syscall"
  23. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
  24. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  25. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  26. return r7 ? -r2 : r2;
  27. }
  28. static inline long __syscall1(long n, long a)
  29. {
  30. register long r4 __asm__("$4") = a;
  31. register long r7 __asm__("$7");
  32. register long r2 __asm__("$2");
  33. __asm__ __volatile__ (
  34. "addu $2,$0,%2 ; syscall"
  35. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  36. "r"(r4)
  37. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  38. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  39. return r7 ? -r2 : r2;
  40. }
  41. static inline long __syscall2(long n, long a, long b)
  42. {
  43. register long r4 __asm__("$4") = a;
  44. register long r5 __asm__("$5") = b;
  45. register long r7 __asm__("$7");
  46. register long r2 __asm__("$2");
  47. __asm__ __volatile__ (
  48. "addu $2,$0,%2 ; syscall"
  49. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  50. "r"(r4), "r"(r5)
  51. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  52. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  53. if (r7) return -r2;
  54. long ret = r2;
  55. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  56. return ret;
  57. }
  58. static inline long __syscall3(long n, long a, long b, long c)
  59. {
  60. register long r4 __asm__("$4") = a;
  61. register long r5 __asm__("$5") = b;
  62. register long r6 __asm__("$6") = c;
  63. register long r7 __asm__("$7");
  64. register long r2 __asm__("$2");
  65. __asm__ __volatile__ (
  66. "addu $2,$0,%2 ; syscall"
  67. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  68. "r"(r4), "r"(r5), "r"(r6)
  69. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  70. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  71. if (r7) return -r2;
  72. long ret = r2;
  73. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  74. return ret;
  75. }
  76. static inline long __syscall4(long n, long a, long b, long c, long d)
  77. {
  78. register long r4 __asm__("$4") = a;
  79. register long r5 __asm__("$5") = b;
  80. register long r6 __asm__("$6") = c;
  81. register long r7 __asm__("$7") = d;
  82. register long r2 __asm__("$2");
  83. __asm__ __volatile__ (
  84. "addu $2,$0,%2 ; syscall"
  85. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  86. "r"(r4), "r"(r5), "r"(r6)
  87. : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
  88. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  89. if (r7) return -r2;
  90. long ret = r2;
  91. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  92. if (n == SYS_fstatat64) __stat_fix(c);
  93. return ret;
  94. }
  95. static inline long __syscall5(long n, long a, long b, long c, long d, long e)
  96. {
  97. register long r4 __asm__("$4") = a;
  98. register long r5 __asm__("$5") = b;
  99. register long r6 __asm__("$6") = c;
  100. register long r7 __asm__("$7") = d;
  101. register long r8 __asm__("$8") = e;
  102. register long r2 __asm__("$2");
  103. __asm__ __volatile__ (
  104. "subu $sp,$sp,32 ; sw $8,16($sp) ; "
  105. "addu $2,$0,%3 ; syscall ;"
  106. "addu $sp,$sp,32"
  107. : "=&r"(r2), "=r"(r7), "+r"(r8)
  108. : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
  109. : "$1", "$3", "$9", "$10", "$11", "$12", "$13",
  110. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  111. if (r7) return -r2;
  112. long ret = r2;
  113. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  114. if (n == SYS_fstatat64) __stat_fix(c);
  115. return r2;
  116. }
  117. static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
  118. {
  119. register long r4 __asm__("$4") = a;
  120. register long r5 __asm__("$5") = b;
  121. register long r6 __asm__("$6") = c;
  122. register long r7 __asm__("$7") = d;
  123. register long r8 __asm__("$8") = e;
  124. register long r9 __asm__("$9") = f;
  125. register long r2 __asm__("$2");
  126. __asm__ __volatile__ (
  127. "subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; "
  128. "addu $2,$0,%4 ; syscall ;"
  129. "addu $sp,$sp,32"
  130. : "=&r"(r2), "=r"(r7), "+r"(r8), "+r"(r9)
  131. : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
  132. : "$1", "$3", "$10", "$11", "$12", "$13",
  133. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  134. if (r7) return -r2;
  135. long ret = r2;
  136. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  137. if (n == SYS_fstatat64) __stat_fix(c);
  138. return r2;
  139. }
  140. static inline long __syscall7(long n, long a, long b, long c, long d, long e, long f, long g)
  141. {
  142. register long r4 __asm__("$4") = a;
  143. register long r5 __asm__("$5") = b;
  144. register long r6 __asm__("$6") = c;
  145. register long r7 __asm__("$7") = d;
  146. register long r8 __asm__("$8") = e;
  147. register long r9 __asm__("$9") = f;
  148. register long r10 __asm__("$10") = g;
  149. register long r2 __asm__("$2");
  150. __asm__ __volatile__ (
  151. "subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; sw $10,24($sp) ; "
  152. "addu $2,$0,%5 ; syscall ;"
  153. "addu $sp,$sp,32"
  154. : "=&r"(r2), "=r"(r7), "+r"(r8), "+r"(r9), "+r"(r10)
  155. : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
  156. : "$1", "$3", "$11", "$12", "$13",
  157. "$14", "$15", "$24", "$25", "hi", "lo", "memory");
  158. if (r7) return -r2;
  159. long ret = r2;
  160. if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
  161. if (n == SYS_fstatat64) __stat_fix(c);
  162. return r2;
  163. }
  164. #define VDSO_USEFUL
  165. #define VDSO_CGT_SYM "__vdso_clock_gettime"
  166. #define VDSO_CGT_VER "LINUX_2.6"