syscall_arch.h 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153
  1. #define __SYSCALL_LL_E(x) \
  2. ((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
  3. ((union { long long ll; long l[2]; }){ .ll = x }).l[1]
  4. #define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
  5. #define SYSCALL_RLIM_INFINITY (-1UL/2)
  6. #if __mips_isa_rev >= 6
  7. #define SYSCALL_CLOBBERLIST \
  8. "$1", "$3", "$11", "$12", "$13", \
  9. "$14", "$15", "$24", "$25", "memory"
  10. #else
  11. #define SYSCALL_CLOBBERLIST \
  12. "$1", "$3", "$11", "$12", "$13", \
  13. "$14", "$15", "$24", "$25", "hi", "lo", "memory"
  14. #endif
  15. static inline long __syscall0(long n)
  16. {
  17. register long r7 __asm__("$7");
  18. register long r2 __asm__("$2");
  19. __asm__ __volatile__ (
  20. "addu $2,$0,%2 ; syscall"
  21. : "=&r"(r2), "=r"(r7)
  22. : "ir"(n), "0"(r2)
  23. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  24. return r7 && r2>0 ? -r2 : r2;
  25. }
  26. static inline long __syscall1(long n, long a)
  27. {
  28. register long r4 __asm__("$4") = a;
  29. register long r7 __asm__("$7");
  30. register long r2 __asm__("$2");
  31. __asm__ __volatile__ (
  32. "addu $2,$0,%2 ; syscall"
  33. : "=&r"(r2), "=r"(r7)
  34. : "ir"(n), "0"(r2), "r"(r4)
  35. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  36. return r7 && r2>0 ? -r2 : r2;
  37. }
  38. static inline long __syscall2(long n, long a, long b)
  39. {
  40. register long r4 __asm__("$4") = a;
  41. register long r5 __asm__("$5") = b;
  42. register long r7 __asm__("$7");
  43. register long r2 __asm__("$2");
  44. __asm__ __volatile__ (
  45. "addu $2,$0,%2 ; syscall"
  46. : "=&r"(r2), "=r"(r7)
  47. : "ir"(n), "0"(r2), "r"(r4), "r"(r5)
  48. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  49. return r7 && r2>0 ? -r2 : r2;
  50. }
  51. static inline long __syscall3(long n, long a, long b, long c)
  52. {
  53. register long r4 __asm__("$4") = a;
  54. register long r5 __asm__("$5") = b;
  55. register long r6 __asm__("$6") = c;
  56. register long r7 __asm__("$7");
  57. register long r2 __asm__("$2");
  58. __asm__ __volatile__ (
  59. "addu $2,$0,%2 ; syscall"
  60. : "=&r"(r2), "=r"(r7)
  61. : "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
  62. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  63. return r7 && r2>0 ? -r2 : r2;
  64. }
  65. static inline long __syscall4(long n, long a, long b, long c, long d)
  66. {
  67. register long r4 __asm__("$4") = a;
  68. register long r5 __asm__("$5") = b;
  69. register long r6 __asm__("$6") = c;
  70. register long r7 __asm__("$7") = d;
  71. register long r2 __asm__("$2");
  72. __asm__ __volatile__ (
  73. "addu $2,$0,%2 ; syscall"
  74. : "=&r"(r2), "+r"(r7)
  75. : "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
  76. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  77. return r7 && r2>0 ? -r2 : r2;
  78. }
  79. static inline long __syscall5(long n, long a, long b, long c, long d, long e)
  80. {
  81. register long r4 __asm__("$4") = a;
  82. register long r5 __asm__("$5") = b;
  83. register long r6 __asm__("$6") = c;
  84. register long r7 __asm__("$7") = d;
  85. register long r8 __asm__("$8") = e;
  86. register long r2 __asm__("$2");
  87. __asm__ __volatile__ (
  88. "subu $sp,$sp,32 ; sw $8,16($sp) ; "
  89. "addu $2,$0,%3 ; syscall ;"
  90. "addu $sp,$sp,32"
  91. : "=&r"(r2), "+r"(r7), "+r"(r8)
  92. : "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
  93. : SYSCALL_CLOBBERLIST, "$9", "$10");
  94. return r7 && r2>0 ? -r2 : r2;
  95. }
  96. static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
  97. {
  98. register long r4 __asm__("$4") = a;
  99. register long r5 __asm__("$5") = b;
  100. register long r6 __asm__("$6") = c;
  101. register long r7 __asm__("$7") = d;
  102. register long r8 __asm__("$8") = e;
  103. register long r9 __asm__("$9") = f;
  104. register long r2 __asm__("$2");
  105. __asm__ __volatile__ (
  106. "subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; "
  107. "addu $2,$0,%4 ; syscall ;"
  108. "addu $sp,$sp,32"
  109. : "=&r"(r2), "+r"(r7), "+r"(r8), "+r"(r9)
  110. : "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
  111. : SYSCALL_CLOBBERLIST, "$10");
  112. return r7 && r2>0 ? -r2 : r2;
  113. }
  114. static inline long __syscall7(long n, long a, long b, long c, long d, long e, long f, long g)
  115. {
  116. register long r4 __asm__("$4") = a;
  117. register long r5 __asm__("$5") = b;
  118. register long r6 __asm__("$6") = c;
  119. register long r7 __asm__("$7") = d;
  120. register long r8 __asm__("$8") = e;
  121. register long r9 __asm__("$9") = f;
  122. register long r10 __asm__("$10") = g;
  123. register long r2 __asm__("$2");
  124. __asm__ __volatile__ (
  125. "subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; sw $10,24($sp) ; "
  126. "addu $2,$0,%5 ; syscall ;"
  127. "addu $sp,$sp,32"
  128. : "=&r"(r2), "+r"(r7), "+r"(r8), "+r"(r9), "+r"(r10)
  129. : "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
  130. : SYSCALL_CLOBBERLIST);
  131. return r7 && r2>0 ? -r2 : r2;
  132. }
  133. #define VDSO_USEFUL
  134. #define VDSO_CGT32_SYM "__vdso_clock_gettime"
  135. #define VDSO_CGT32_VER "LINUX_2.6"
  136. #define VDSO_CGT_SYM "__vdso_clock_gettime64"
  137. #define VDSO_CGT_VER "LINUX_2.6"
  138. #define SO_SNDTIMEO_OLD 0x1005
  139. #define SO_RCVTIMEO_OLD 0x1006
  140. #undef SYS_socketcall