syscall_arch.h 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. #define __SYSCALL_LL_E(x) \
  2. ((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
  3. ((union { long long ll; long l[2]; }){ .ll = x }).l[1]
  4. #define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
  5. #define SYSCALL_RLIM_INFINITY (-1UL/2)
  6. #if __mips_isa_rev >= 6
  7. #define SYSCALL_CLOBBERLIST \
  8. "$1", "$3", "$11", "$12", "$13", \
  9. "$14", "$15", "$24", "$25", "memory"
  10. #else
  11. #define SYSCALL_CLOBBERLIST \
  12. "$1", "$3", "$11", "$12", "$13", \
  13. "$14", "$15", "$24", "$25", "hi", "lo", "memory"
  14. #endif
  15. static inline long __syscall0(long n)
  16. {
  17. register long r7 __asm__("$7");
  18. register long r2 __asm__("$2");
  19. __asm__ __volatile__ (
  20. "addu $2,$0,%2 ; syscall"
  21. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
  22. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  23. return r7 ? -r2 : r2;
  24. }
  25. static inline long __syscall1(long n, long a)
  26. {
  27. register long r4 __asm__("$4") = a;
  28. register long r7 __asm__("$7");
  29. register long r2 __asm__("$2");
  30. __asm__ __volatile__ (
  31. "addu $2,$0,%2 ; syscall"
  32. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  33. "r"(r4)
  34. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  35. return r7 ? -r2 : r2;
  36. }
  37. static inline long __syscall2(long n, long a, long b)
  38. {
  39. register long r4 __asm__("$4") = a;
  40. register long r5 __asm__("$5") = b;
  41. register long r7 __asm__("$7");
  42. register long r2 __asm__("$2");
  43. __asm__ __volatile__ (
  44. "addu $2,$0,%2 ; syscall"
  45. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  46. "r"(r4), "r"(r5)
  47. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  48. return r7 ? -r2 : r2;
  49. }
  50. static inline long __syscall3(long n, long a, long b, long c)
  51. {
  52. register long r4 __asm__("$4") = a;
  53. register long r5 __asm__("$5") = b;
  54. register long r6 __asm__("$6") = c;
  55. register long r7 __asm__("$7");
  56. register long r2 __asm__("$2");
  57. __asm__ __volatile__ (
  58. "addu $2,$0,%2 ; syscall"
  59. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  60. "r"(r4), "r"(r5), "r"(r6)
  61. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  62. return r7 ? -r2 : r2;
  63. }
  64. static inline long __syscall4(long n, long a, long b, long c, long d)
  65. {
  66. register long r4 __asm__("$4") = a;
  67. register long r5 __asm__("$5") = b;
  68. register long r6 __asm__("$6") = c;
  69. register long r7 __asm__("$7") = d;
  70. register long r2 __asm__("$2");
  71. __asm__ __volatile__ (
  72. "addu $2,$0,%2 ; syscall"
  73. : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
  74. "r"(r4), "r"(r5), "r"(r6)
  75. : SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
  76. return r7 ? -r2 : r2;
  77. }
  78. static inline long __syscall5(long n, long a, long b, long c, long d, long e)
  79. {
  80. register long r4 __asm__("$4") = a;
  81. register long r5 __asm__("$5") = b;
  82. register long r6 __asm__("$6") = c;
  83. register long r7 __asm__("$7") = d;
  84. register long r8 __asm__("$8") = e;
  85. register long r2 __asm__("$2");
  86. __asm__ __volatile__ (
  87. "subu $sp,$sp,32 ; sw $8,16($sp) ; "
  88. "addu $2,$0,%3 ; syscall ;"
  89. "addu $sp,$sp,32"
  90. : "=&r"(r2), "=r"(r7), "+r"(r8)
  91. : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
  92. : SYSCALL_CLOBBERLIST, "$9", "$10");
  93. return r7 ? -r2 : r2;
  94. }
  95. static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
  96. {
  97. register long r4 __asm__("$4") = a;
  98. register long r5 __asm__("$5") = b;
  99. register long r6 __asm__("$6") = c;
  100. register long r7 __asm__("$7") = d;
  101. register long r8 __asm__("$8") = e;
  102. register long r9 __asm__("$9") = f;
  103. register long r2 __asm__("$2");
  104. __asm__ __volatile__ (
  105. "subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; "
  106. "addu $2,$0,%4 ; syscall ;"
  107. "addu $sp,$sp,32"
  108. : "=&r"(r2), "=r"(r7), "+r"(r8), "+r"(r9)
  109. : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
  110. : SYSCALL_CLOBBERLIST, "$10");
  111. return r7 ? -r2 : r2;
  112. }
  113. static inline long __syscall7(long n, long a, long b, long c, long d, long e, long f, long g)
  114. {
  115. register long r4 __asm__("$4") = a;
  116. register long r5 __asm__("$5") = b;
  117. register long r6 __asm__("$6") = c;
  118. register long r7 __asm__("$7") = d;
  119. register long r8 __asm__("$8") = e;
  120. register long r9 __asm__("$9") = f;
  121. register long r10 __asm__("$10") = g;
  122. register long r2 __asm__("$2");
  123. __asm__ __volatile__ (
  124. "subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; sw $10,24($sp) ; "
  125. "addu $2,$0,%5 ; syscall ;"
  126. "addu $sp,$sp,32"
  127. : "=&r"(r2), "=r"(r7), "+r"(r8), "+r"(r9), "+r"(r10)
  128. : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
  129. : SYSCALL_CLOBBERLIST);
  130. return r7 ? -r2 : r2;
  131. }
  132. #define VDSO_USEFUL
  133. #define VDSO_CGT_SYM "__vdso_clock_gettime"
  134. #define VDSO_CGT_VER "LINUX_2.6"
  135. #define SO_SNDTIMEO_OLD 0x1005
  136. #define SO_RCVTIMEO_OLD 0x1006