Skip to content
Snippets Groups Projects
Commit c4559f67 authored by Maciej W. Rozycki's avatar Maciej W. Rozycki Committed by Ralf Baechle
Browse files

Always use ".set mips3" rather than select between "mips2" or "mips3"

for assembling ll/sc sequences to avoid problems with 64-bit
configurations.

Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
parent 69c75fb4
No related branches found
No related tags found
No related merge requests found
...@@ -42,7 +42,7 @@ static inline int __sem_update_count(struct semaphore *sem, int incr) ...@@ -42,7 +42,7 @@ static inline int __sem_update_count(struct semaphore *sem, int incr)
if (cpu_has_llsc && R10000_LLSC_WAR) { if (cpu_has_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %2 # __sem_update_count \n" "1: ll %0, %2 # __sem_update_count \n"
" sra %1, %0, 31 \n" " sra %1, %0, 31 \n"
" not %1 \n" " not %1 \n"
...@@ -55,7 +55,7 @@ static inline int __sem_update_count(struct semaphore *sem, int incr) ...@@ -55,7 +55,7 @@ static inline int __sem_update_count(struct semaphore *sem, int incr)
: "r" (incr), "m" (sem->count)); : "r" (incr), "m" (sem->count));
} else if (cpu_has_llsc) { } else if (cpu_has_llsc) {
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %2 # __sem_update_count \n" "1: ll %0, %2 # __sem_update_count \n"
" sra %1, %0, 31 \n" " sra %1, %0, 31 \n"
" not %1 \n" " not %1 \n"
......
...@@ -62,7 +62,7 @@ static __inline__ void atomic_add(int i, atomic_t * v) ...@@ -62,7 +62,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %1 # atomic_add \n" "1: ll %0, %1 # atomic_add \n"
" addu %0, %2 \n" " addu %0, %2 \n"
" sc %0, %1 \n" " sc %0, %1 \n"
...@@ -74,7 +74,7 @@ static __inline__ void atomic_add(int i, atomic_t * v) ...@@ -74,7 +74,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %1 # atomic_add \n" "1: ll %0, %1 # atomic_add \n"
" addu %0, %2 \n" " addu %0, %2 \n"
" sc %0, %1 \n" " sc %0, %1 \n"
...@@ -104,7 +104,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v) ...@@ -104,7 +104,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %1 # atomic_sub \n" "1: ll %0, %1 # atomic_sub \n"
" subu %0, %2 \n" " subu %0, %2 \n"
" sc %0, %1 \n" " sc %0, %1 \n"
...@@ -116,7 +116,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v) ...@@ -116,7 +116,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %1 # atomic_sub \n" "1: ll %0, %1 # atomic_sub \n"
" subu %0, %2 \n" " subu %0, %2 \n"
" sc %0, %1 \n" " sc %0, %1 \n"
...@@ -144,7 +144,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) ...@@ -144,7 +144,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %1, %2 # atomic_add_return \n" "1: ll %1, %2 # atomic_add_return \n"
" addu %0, %1, %3 \n" " addu %0, %1, %3 \n"
" sc %0, %2 \n" " sc %0, %2 \n"
...@@ -159,7 +159,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) ...@@ -159,7 +159,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %1, %2 # atomic_add_return \n" "1: ll %1, %2 # atomic_add_return \n"
" addu %0, %1, %3 \n" " addu %0, %1, %3 \n"
" sc %0, %2 \n" " sc %0, %2 \n"
...@@ -191,7 +191,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) ...@@ -191,7 +191,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %1, %2 # atomic_sub_return \n" "1: ll %1, %2 # atomic_sub_return \n"
" subu %0, %1, %3 \n" " subu %0, %1, %3 \n"
" sc %0, %2 \n" " sc %0, %2 \n"
...@@ -206,7 +206,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) ...@@ -206,7 +206,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %1, %2 # atomic_sub_return \n" "1: ll %1, %2 # atomic_sub_return \n"
" subu %0, %1, %3 \n" " subu %0, %1, %3 \n"
" sc %0, %2 \n" " sc %0, %2 \n"
...@@ -245,7 +245,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) ...@@ -245,7 +245,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %1, %2 # atomic_sub_if_positive\n" "1: ll %1, %2 # atomic_sub_if_positive\n"
" subu %0, %1, %3 \n" " subu %0, %1, %3 \n"
" bltz %0, 1f \n" " bltz %0, 1f \n"
...@@ -261,7 +261,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) ...@@ -261,7 +261,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %1, %2 # atomic_sub_if_positive\n" "1: ll %1, %2 # atomic_sub_if_positive\n"
" subu %0, %1, %3 \n" " subu %0, %1, %3 \n"
" bltz %0, 1f \n" " bltz %0, 1f \n"
......
...@@ -20,14 +20,12 @@ ...@@ -20,14 +20,12 @@
#define SZLONG_MASK 31UL #define SZLONG_MASK 31UL
#define __LL "ll " #define __LL "ll "
#define __SC "sc " #define __SC "sc "
#define __SET_MIPS ".set mips2 "
#define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x)) #define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x))
#elif (_MIPS_SZLONG == 64) #elif (_MIPS_SZLONG == 64)
#define SZLONG_LOG 6 #define SZLONG_LOG 6
#define SZLONG_MASK 63UL #define SZLONG_MASK 63UL
#define __LL "lld " #define __LL "lld "
#define __SC "scd " #define __SC "scd "
#define __SET_MIPS ".set mips3 "
#define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x)) #define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x))
#endif #endif
...@@ -74,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -74,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
if (cpu_has_llsc && R10000_LLSC_WAR) { if (cpu_has_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # set_bit \n" "1: " __LL "%0, %1 # set_bit \n"
" or %0, %2 \n" " or %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -84,7 +82,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -84,7 +82,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
: "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
} else if (cpu_has_llsc) { } else if (cpu_has_llsc) {
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # set_bit \n" "1: " __LL "%0, %1 # set_bit \n"
" or %0, %2 \n" " or %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -138,7 +136,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -138,7 +136,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
if (cpu_has_llsc && R10000_LLSC_WAR) { if (cpu_has_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # clear_bit \n" "1: " __LL "%0, %1 # clear_bit \n"
" and %0, %2 \n" " and %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -148,7 +146,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -148,7 +146,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
: "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m));
} else if (cpu_has_llsc) { } else if (cpu_has_llsc) {
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # clear_bit \n" "1: " __LL "%0, %1 # clear_bit \n"
" and %0, %2 \n" " and %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -201,7 +199,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -201,7 +199,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # change_bit \n" "1: " __LL "%0, %1 # change_bit \n"
" xor %0, %2 \n" " xor %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -214,7 +212,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) ...@@ -214,7 +212,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # change_bit \n" "1: " __LL "%0, %1 # change_bit \n"
" xor %0, %2 \n" " xor %0, %2 \n"
" " __SC "%0, %1 \n" " " __SC "%0, %1 \n"
...@@ -267,7 +265,7 @@ static inline int test_and_set_bit(unsigned long nr, ...@@ -267,7 +265,7 @@ static inline int test_and_set_bit(unsigned long nr,
unsigned long temp, res; unsigned long temp, res;
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # test_and_set_bit \n" "1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
...@@ -289,7 +287,7 @@ static inline int test_and_set_bit(unsigned long nr, ...@@ -289,7 +287,7 @@ static inline int test_and_set_bit(unsigned long nr,
__asm__ __volatile__( __asm__ __volatile__(
" .set push \n" " .set push \n"
" .set noreorder \n" " .set noreorder \n"
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # test_and_set_bit \n" "1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
...@@ -361,7 +359,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -361,7 +359,7 @@ static inline int test_and_clear_bit(unsigned long nr,
unsigned long temp, res; unsigned long temp, res;
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # test_and_clear_bit \n" "1: " __LL "%0, %1 # test_and_clear_bit \n"
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" xor %2, %3 \n" " xor %2, %3 \n"
...@@ -384,7 +382,7 @@ static inline int test_and_clear_bit(unsigned long nr, ...@@ -384,7 +382,7 @@ static inline int test_and_clear_bit(unsigned long nr,
__asm__ __volatile__( __asm__ __volatile__(
" .set push \n" " .set push \n"
" .set noreorder \n" " .set noreorder \n"
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # test_and_clear_bit \n" "1: " __LL "%0, %1 # test_and_clear_bit \n"
" or %2, %0, %3 \n" " or %2, %0, %3 \n"
" xor %2, %3 \n" " xor %2, %3 \n"
...@@ -457,7 +455,7 @@ static inline int test_and_change_bit(unsigned long nr, ...@@ -457,7 +455,7 @@ static inline int test_and_change_bit(unsigned long nr,
unsigned long temp, res; unsigned long temp, res;
__asm__ __volatile__( __asm__ __volatile__(
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # test_and_change_bit \n" "1: " __LL "%0, %1 # test_and_change_bit \n"
" xor %2, %0, %3 \n" " xor %2, %0, %3 \n"
" " __SC "%2, %1 \n" " " __SC "%2, %1 \n"
...@@ -479,7 +477,7 @@ static inline int test_and_change_bit(unsigned long nr, ...@@ -479,7 +477,7 @@ static inline int test_and_change_bit(unsigned long nr,
__asm__ __volatile__( __asm__ __volatile__(
" .set push \n" " .set push \n"
" .set noreorder \n" " .set noreorder \n"
" " __SET_MIPS " \n" " .set mips3 \n"
"1: " __LL "%0, %1 # test_and_change_bit \n" "1: " __LL "%0, %1 # test_and_change_bit \n"
" xor %2, %0, %3 \n" " xor %2, %0, %3 \n"
" " __SC "\t%2, %1 \n" " " __SC "\t%2, %1 \n"
......
...@@ -176,7 +176,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) ...@@ -176,7 +176,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
unsigned long dummy; unsigned long dummy;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %3 # xchg_u32 \n" "1: ll %0, %3 # xchg_u32 \n"
" move %2, %z4 \n" " move %2, %z4 \n"
" sc %2, %1 \n" " sc %2, %1 \n"
...@@ -193,7 +193,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) ...@@ -193,7 +193,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
unsigned long dummy; unsigned long dummy;
__asm__ __volatile__( __asm__ __volatile__(
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %3 # xchg_u32 \n" "1: ll %0, %3 # xchg_u32 \n"
" move %2, %z4 \n" " move %2, %z4 \n"
" sc %2, %1 \n" " sc %2, %1 \n"
...@@ -301,7 +301,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old, ...@@ -301,7 +301,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
__asm__ __volatile__( __asm__ __volatile__(
" .set push \n" " .set push \n"
" .set noat \n" " .set noat \n"
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %2 # __cmpxchg_u32 \n" "1: ll %0, %2 # __cmpxchg_u32 \n"
" bne %0, %z3, 2f \n" " bne %0, %z3, 2f \n"
" move $1, %z4 \n" " move $1, %z4 \n"
...@@ -320,7 +320,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old, ...@@ -320,7 +320,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
__asm__ __volatile__( __asm__ __volatile__(
" .set push \n" " .set push \n"
" .set noat \n" " .set noat \n"
" .set mips2 \n" " .set mips3 \n"
"1: ll %0, %2 # __cmpxchg_u32 \n" "1: ll %0, %2 # __cmpxchg_u32 \n"
" bne %0, %z3, 2f \n" " bne %0, %z3, 2f \n"
" move $1, %z4 \n" " move $1, %z4 \n"
...@@ -376,7 +376,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old, ...@@ -376,7 +376,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
__asm__ __volatile__( __asm__ __volatile__(
" .set push \n" " .set push \n"
" .set noat \n" " .set noat \n"
" .set mips2 \n" " .set mips3 \n"
"1: lld %0, %2 # __cmpxchg_u64 \n" "1: lld %0, %2 # __cmpxchg_u64 \n"
" bne %0, %z3, 2f \n" " bne %0, %z3, 2f \n"
" move $1, %z4 \n" " move $1, %z4 \n"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment