소스 검색

mark arm thread-pointer-loading inline asm as volatile

this builds on commits a603a75a72bb469c6be4963ed1b55fabe675fe15 and
0ba35d69c0e77b225ec640d2bd112ff6d9d3b2af to ensure that a compiler
cannot conclude that it's valid to reorder the asm to a point before
the thread pointer is set up, or to treat the inline function as if it
were declared with attribute((const)).

other archs already use volatile asm for thread pointer loading.
Rich Felker 9 년 전
부모
커밋
74483c5955
1개의 변경된 파일3개의 추가작업 그리고 3개의 파일을 삭제
  1. 3 3
      arch/arm/pthread_arch.h

+ 3 - 3
arch/arm/pthread_arch.h

@@ -4,7 +4,7 @@
 static inline pthread_t __pthread_self()
 {
 	char *p;
-	__asm__( "mrc p15,0,%0,c13,c0,3" : "=r"(p) );
+	__asm__ __volatile__ ( "mrc p15,0,%0,c13,c0,3" : "=r"(p) );
 	return (void *)(p+8-sizeof(struct pthread));
 }
 
@@ -14,10 +14,10 @@ static inline pthread_t __pthread_self()
 {
 #ifdef __clang__
 	char *p;
-	__asm__( "bl __a_gettp\n\tmov %0,r0" : "=r"(p) : : "cc", "r0", "lr" );
+	__asm__ __volatile__ ( "bl __a_gettp\n\tmov %0,r0" : "=r"(p) : : "cc", "r0", "lr" );
 #else
 	register char *p __asm__("r0");
-	__asm__( "bl __a_gettp" : "=r"(p) : : "cc", "lr" );
+	__asm__ __volatile__ ( "bl __a_gettp" : "=r"(p) : : "cc", "lr" );
 #endif
 	return (void *)(p+8-sizeof(struct pthread));
 }