]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/commitdiff
MIPS: Fix microMIPS LL/SC immediate offsets
authorMaciej W. Rozycki <macro@codesourcery.com>
Sat, 15 Nov 2014 22:08:48 +0000 (22:08 +0000)
committerRalf Baechle <ralf@linux-mips.org>
Mon, 24 Nov 2014 06:45:36 +0000 (07:45 +0100)
In the microMIPS encoding some memory access instructions have their
immediate offset reduced to 12 bits only.  That does not match the GCC
`R' constraint we use in some places to satisfy the requirement,
resulting in build failures like this:

{standard input}: Assembler messages:
{standard input}:720: Error: macro used $at after ".set noat"
{standard input}:720: Warning: macro instruction expanded into multiple instructions

Fix the problem by defining a macro, `GCC_OFF12_ASM', that expands to
the right constraint depending on whether microMIPS or standard MIPS
code is produced.  Also apply the fix to where `m' is used as in the
worst case this change does nothing, e.g. where the pointer was already
in a register such as a function argument and no further offset was
requested, and in the best case it avoids an extraneous sequence of up
to two instructions to load the high 20 bits of the address in the LL/SC
loop.  This reduces the risk of lock contention that is the higher the
more instructions there are in the critical section between LL and SC.

Strictly speaking we could just bulk-replace `R' with `ZC' as the latter
constraint adjusts automatically depending on the ISA selected.
However it was only introduced with GCC 4.9 and we keep supporing older
compilers for the standard MIPS configuration, hence the slightly more
complicated approach I chose.

The choice of a zero-argument function-like rather than an object-like
macro was made so that it does not look like a function call taking the
C expression used for the constraint as an argument.  This is so as not
to confuse the reader or formatting checkers like `checkpatch.pl' and
follows previous practice.

Signed-off-by: Maciej W. Rozycki <macro@codesourcery.com>
Signed-off-by: Steven J. Hill <Steven.Hill@imgtec.com>
Cc: linux-mips@linux-mips.org
Patchwork: https://patchwork.linux-mips.org/patch/8482/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
arch/mips/include/asm/atomic.h
arch/mips/include/asm/bitops.h
arch/mips/include/asm/cmpxchg.h
arch/mips/include/asm/compiler.h
arch/mips/include/asm/edac.h
arch/mips/include/asm/futex.h
arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h
arch/mips/include/asm/octeon/cvmx-cmd-queue.h
arch/mips/include/asm/spinlock.h

index 6dd6bfc607e9a6bee5a2beead58741526d23369e..ec4b4d658bc4bfe78b9aaaaeff18e72fa5e1143d 100644 (file)
@@ -17,6 +17,7 @@
 #include <linux/irqflags.h>
 #include <linux/types.h>
 #include <asm/barrier.h>
+#include <asm/compiler.h>
 #include <asm/cpu-features.h>
 #include <asm/cmpxchg.h>
 #include <asm/war.h>
@@ -53,7 +54,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v)                               \
                "       sc      %0, %1                                  \n"     \
                "       beqzl   %0, 1b                                  \n"     \
                "       .set    mips0                                   \n"     \
-               : "=&r" (temp), "+m" (v->counter)                               \
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)                \
                : "Ir" (i));                                                    \
        } else if (kernel_uses_llsc) {                                          \
                int temp;                                                       \
@@ -65,7 +66,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v)                               \
                        "       " #asm_op " %0, %2                      \n"     \
                        "       sc      %0, %1                          \n"     \
                        "       .set    mips0                           \n"     \
-                       : "=&r" (temp), "+m" (v->counter)                       \
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)        \
                        : "Ir" (i));                                            \
                } while (unlikely(!temp));                                      \
        } else {                                                                \
@@ -95,7 +96,8 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v)                       \
                "       beqzl   %0, 1b                                  \n"     \
                "       " #asm_op " %0, %1, %3                          \n"     \
                "       .set    mips0                                   \n"     \
-               : "=&r" (result), "=&r" (temp), "+m" (v->counter)               \
+               : "=&r" (result), "=&r" (temp),                                 \
+                 "+" GCC_OFF12_ASM() (v->counter)                              \
                : "Ir" (i));                                                    \
        } else if (kernel_uses_llsc) {                                          \
                int temp;                                                       \
@@ -107,7 +109,8 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v)                     \
                        "       " #asm_op " %0, %1, %3                  \n"     \
                        "       sc      %0, %2                          \n"     \
                        "       .set    mips0                           \n"     \
-                       : "=&r" (result), "=&r" (temp), "+m" (v->counter)       \
+                       : "=&r" (result), "=&r" (temp),                         \
+                         "+" GCC_OFF12_ASM() (v->counter)                      \
                        : "Ir" (i));                                            \
                } while (unlikely(!result));                                    \
                                                                                \
@@ -167,8 +170,9 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
                "       .set    reorder                                 \n"
                "1:                                                     \n"
                "       .set    mips0                                   \n"
-               : "=&r" (result), "=&r" (temp), "+m" (v->counter)
-               : "Ir" (i), "m" (v->counter)
+               : "=&r" (result), "=&r" (temp),
+                 "+" GCC_OFF12_ASM() (v->counter)
+               : "Ir" (i), GCC_OFF12_ASM() (v->counter)
                : "memory");
        } else if (kernel_uses_llsc) {
                int temp;
@@ -185,7 +189,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
                "       .set    reorder                                 \n"
                "1:                                                     \n"
                "       .set    mips0                                   \n"
-               : "=&r" (result), "=&r" (temp), "+m" (v->counter)
+               : "=&r" (result), "=&r" (temp),
+                 "+" GCC_OFF12_ASM() (v->counter)
                : "Ir" (i));
        } else {
                unsigned long flags;
@@ -328,7 +333,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v)                        \
                "       scd     %0, %1                                  \n"     \
                "       beqzl   %0, 1b                                  \n"     \
                "       .set    mips0                                   \n"     \
-               : "=&r" (temp), "+m" (v->counter)                               \
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)                \
                : "Ir" (i));                                                    \
        } else if (kernel_uses_llsc) {                                          \
                long temp;                                                      \
@@ -340,7 +345,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v)                        \
                        "       " #asm_op " %0, %2                      \n"     \
                        "       scd     %0, %1                          \n"     \
                        "       .set    mips0                           \n"     \
-                       : "=&r" (temp), "+m" (v->counter)                       \
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)        \
                        : "Ir" (i));                                            \
                } while (unlikely(!temp));                                      \
        } else {                                                                \
@@ -370,7 +375,8 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)               \
                "       beqzl   %0, 1b                                  \n"     \
                "       " #asm_op " %0, %1, %3                          \n"     \
                "       .set    mips0                                   \n"     \
-               : "=&r" (result), "=&r" (temp), "+m" (v->counter)               \
+               : "=&r" (result), "=&r" (temp),                                 \
+                 "+" GCC_OFF12_ASM() (v->counter)                              \
                : "Ir" (i));                                                    \
        } else if (kernel_uses_llsc) {                                          \
                long temp;                                                      \
@@ -382,8 +388,9 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)               \
                        "       " #asm_op " %0, %1, %3                  \n"     \
                        "       scd     %0, %2                          \n"     \
                        "       .set    mips0                           \n"     \
-                       : "=&r" (result), "=&r" (temp), "=m" (v->counter)       \
-                       : "Ir" (i), "m" (v->counter)                            \
+                       : "=&r" (result), "=&r" (temp),                         \
+                         "=" GCC_OFF12_ASM() (v->counter)                      \
+                       : "Ir" (i), GCC_OFF12_ASM() (v->counter)                \
                        : "memory");                                            \
                } while (unlikely(!result));                                    \
                                                                                \
@@ -443,8 +450,9 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
                "       .set    reorder                                 \n"
                "1:                                                     \n"
                "       .set    mips0                                   \n"
-               : "=&r" (result), "=&r" (temp), "=m" (v->counter)
-               : "Ir" (i), "m" (v->counter)
+               : "=&r" (result), "=&r" (temp),
+                 "=" GCC_OFF12_ASM() (v->counter)
+               : "Ir" (i), GCC_OFF12_ASM() (v->counter)
                : "memory");
        } else if (kernel_uses_llsc) {
                long temp;
@@ -461,7 +469,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
                "       .set    reorder                                 \n"
                "1:                                                     \n"
                "       .set    mips0                                   \n"
-               : "=&r" (result), "=&r" (temp), "+m" (v->counter)
+               : "=&r" (result), "=&r" (temp),
+                 "+" GCC_OFF12_ASM() (v->counter)
                : "Ir" (i));
        } else {
                unsigned long flags;
index bae6b0fa8ab5975d90a7a861b57368acc8adf575..6663bcca9d0c626886529ae5eb7a92e75b75cd46 100644 (file)
@@ -17,6 +17,7 @@
 #include <linux/types.h>
 #include <asm/barrier.h>
 #include <asm/byteorder.h>             /* sigh ... */
+#include <asm/compiler.h>
 #include <asm/cpu-features.h>
 #include <asm/sgidefs.h>
 #include <asm/war.h>
@@ -78,8 +79,8 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
                "       " __SC  "%0, %1                                 \n"
                "       beqzl   %0, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*m)
-               : "ir" (1UL << bit), "m" (*m));
+               : "=&r" (temp), "=" GCC_OFF12_ASM() (*m)
+               : "ir" (1UL << bit), GCC_OFF12_ASM() (*m));
 #ifdef CONFIG_CPU_MIPSR2
        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
                do {
@@ -87,7 +88,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
                        "       " __LL "%0, %1          # set_bit       \n"
                        "       " __INS "%0, %3, %2, 1                  \n"
                        "       " __SC "%0, %1                          \n"
-                       : "=&r" (temp), "+m" (*m)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
                        : "ir" (bit), "r" (~0));
                } while (unlikely(!temp));
 #endif /* CONFIG_CPU_MIPSR2 */
@@ -99,7 +100,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
                        "       or      %0, %2                          \n"
                        "       " __SC  "%0, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (*m)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
                        : "ir" (1UL << bit));
                } while (unlikely(!temp));
        } else
@@ -130,7 +131,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
                "       " __SC "%0, %1                                  \n"
                "       beqzl   %0, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "+m" (*m)
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
                : "ir" (~(1UL << bit)));
 #ifdef CONFIG_CPU_MIPSR2
        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
@@ -139,7 +140,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
                        "       " __LL "%0, %1          # clear_bit     \n"
                        "       " __INS "%0, $0, %2, 1                  \n"
                        "       " __SC "%0, %1                          \n"
-                       : "=&r" (temp), "+m" (*m)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
                        : "ir" (bit));
                } while (unlikely(!temp));
 #endif /* CONFIG_CPU_MIPSR2 */
@@ -151,7 +152,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
                        "       and     %0, %2                          \n"
                        "       " __SC "%0, %1                          \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (*m)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
                        : "ir" (~(1UL << bit)));
                } while (unlikely(!temp));
        } else
@@ -196,7 +197,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
                "       " __SC  "%0, %1                         \n"
                "       beqzl   %0, 1b                          \n"
                "       .set    mips0                           \n"
-               : "=&r" (temp), "+m" (*m)
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
                : "ir" (1UL << bit));
        } else if (kernel_uses_llsc) {
                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -209,7 +210,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
                        "       xor     %0, %2                          \n"
                        "       " __SC  "%0, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (*m)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
                        : "ir" (1UL << bit));
                } while (unlikely(!temp));
        } else
@@ -244,7 +245,7 @@ static inline int test_and_set_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "+m" (*m), "=&r" (res)
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
                : "r" (1UL << bit)
                : "memory");
        } else if (kernel_uses_llsc) {
@@ -258,7 +259,7 @@ static inline int test_and_set_bit(unsigned long nr,
                        "       or      %2, %0, %3                      \n"
                        "       " __SC  "%2, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
                        : "r" (1UL << bit)
                        : "memory");
                } while (unlikely(!res));
@@ -312,7 +313,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
                        "       or      %2, %0, %3                      \n"
                        "       " __SC  "%2, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
                        : "r" (1UL << bit)
                        : "memory");
                } while (unlikely(!res));
@@ -354,7 +355,7 @@ static inline int test_and_clear_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "+m" (*m), "=&r" (res)
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
                : "r" (1UL << bit)
                : "memory");
 #ifdef CONFIG_CPU_MIPSR2
@@ -368,7 +369,7 @@ static inline int test_and_clear_bit(unsigned long nr,
                        "       " __EXT "%2, %0, %3, 1                  \n"
                        "       " __INS "%0, $0, %3, 1                  \n"
                        "       " __SC  "%0, %1                         \n"
-                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
                        : "ir" (bit)
                        : "memory");
                } while (unlikely(!temp));
@@ -385,7 +386,7 @@ static inline int test_and_clear_bit(unsigned long nr,
                        "       xor     %2, %3                          \n"
                        "       " __SC  "%2, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
                        : "r" (1UL << bit)
                        : "memory");
                } while (unlikely(!res));
@@ -427,7 +428,7 @@ static inline int test_and_change_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "+m" (*m), "=&r" (res)
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
                : "r" (1UL << bit)
                : "memory");
        } else if (kernel_uses_llsc) {
@@ -441,7 +442,7 @@ static inline int test_and_change_bit(unsigned long nr,
                        "       xor     %2, %0, %3                      \n"
                        "       " __SC  "\t%2, %1                       \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+m" (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
                        : "r" (1UL << bit)
                        : "memory");
                } while (unlikely(!res));
index eefcaa363a875f3f0a0f0c727bcd0cefe2e5a701..28b1edf195016b80a89854b9ae11c28a1a9c04ce 100644 (file)
@@ -10,6 +10,7 @@
 
 #include <linux/bug.h>
 #include <linux/irqflags.h>
+#include <asm/compiler.h>
 #include <asm/war.h>
 
 static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
@@ -30,8 +31,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
                "       sc      %2, %1                                  \n"
                "       beqzl   %2, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (retval), "=m" (*m), "=&r" (dummy)
-               : "R" (*m), "Jr" (val)
+               : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy)
+               : GCC_OFF12_ASM() (*m), "Jr" (val)
                : "memory");
        } else if (kernel_uses_llsc) {
                unsigned long dummy;
@@ -45,8 +46,9 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
                        "       .set    arch=r4000                      \n"
                        "       sc      %2, %1                          \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (retval), "=m" (*m), "=&r" (dummy)
-                       : "R" (*m), "Jr" (val)
+                       : "=&r" (retval), "=" GCC_OFF12_ASM() (*m),
+                         "=&r" (dummy)
+                       : GCC_OFF12_ASM() (*m), "Jr" (val)
                        : "memory");
                } while (unlikely(!dummy));
        } else {
@@ -80,8 +82,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
                "       scd     %2, %1                                  \n"
                "       beqzl   %2, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (retval), "=m" (*m), "=&r" (dummy)
-               : "R" (*m), "Jr" (val)
+               : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy)
+               : GCC_OFF12_ASM() (*m), "Jr" (val)
                : "memory");
        } else if (kernel_uses_llsc) {
                unsigned long dummy;
@@ -93,8 +95,9 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
                        "       move    %2, %z4                         \n"
                        "       scd     %2, %1                          \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (retval), "=m" (*m), "=&r" (dummy)
-                       : "R" (*m), "Jr" (val)
+                       : "=&r" (retval), "=" GCC_OFF12_ASM() (*m),
+                         "=&r" (dummy)
+                       : GCC_OFF12_ASM() (*m), "Jr" (val)
                        : "memory");
                } while (unlikely(!dummy));
        } else {
@@ -155,8 +158,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
                "       beqzl   $1, 1b                          \n"     \
                "2:                                             \n"     \
                "       .set    pop                             \n"     \
-               : "=&r" (__ret), "=R" (*m)                              \
-               : "R" (*m), "Jr" (old), "Jr" (new)                      \
+               : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m)               \
+               : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new)          \
                : "memory");                                            \
        } else if (kernel_uses_llsc) {                                  \
                __asm__ __volatile__(                                   \
@@ -172,8 +175,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
                "       beqz    $1, 1b                          \n"     \
                "       .set    pop                             \n"     \
                "2:                                             \n"     \
-               : "=&r" (__ret), "=R" (*m)                              \
-               : "R" (*m), "Jr" (old), "Jr" (new)                      \
+               : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m)               \
+               : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new)          \
                : "memory");                                            \
        } else {                                                        \
                unsigned long __flags;                                  \
index 71f5c5cfc58abbd9b893a90e19dadb132eb93260..c73815e0123a756bc0579806c7e8333b26712d08 100644 (file)
 #define GCC_REG_ACCUM "accum"
 #endif
 
+#ifndef CONFIG_CPU_MICROMIPS
+#define GCC_OFF12_ASM() "R"
+#elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
+#define GCC_OFF12_ASM() "ZC"
+#else
+#error "microMIPS compilation unsupported with GCC older than 4.9"
+#endif
+
 #endif /* _ASM_COMPILER_H */
index 4da0c1fe30d9fedcbfa1641d35f358ca25d9f8ce..ae6fedcb0060f22c69091480f55228dd2ef4383d 100644 (file)
@@ -1,6 +1,8 @@
 #ifndef ASM_EDAC_H
 #define ASM_EDAC_H
 
+#include <asm/compiler.h>
+
 /* ECC atomic, DMA, SMP and interrupt safe scrub function */
 
 static inline void atomic_scrub(void *va, u32 size)
@@ -24,8 +26,8 @@ static inline void atomic_scrub(void *va, u32 size)
                "       sc      %0, %1                                  \n"
                "       beqz    %0, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=m" (*virt_addr)
-               : "m" (*virt_addr));
+               : "=&r" (temp), "=" GCC_OFF12_ASM() (*virt_addr)
+               : GCC_OFF12_ASM() (*virt_addr));
 
                virt_addr++;
        }
index 194cda0396a345f2d8cd890677028ab5d5da98fa..d0177bf915bb359fd39814cd3b23dad412ee7ee7 100644 (file)
@@ -14,6 +14,7 @@
 #include <linux/uaccess.h>
 #include <asm/asm-eva.h>
 #include <asm/barrier.h>
+#include <asm/compiler.h>
 #include <asm/errno.h>
 #include <asm/war.h>
 
                "       "__UA_ADDR "\t1b, 4b                    \n"     \
                "       "__UA_ADDR "\t2b, 4b                    \n"     \
                "       .previous                               \n"     \
-               : "=r" (ret), "=&r" (oldval), "=R" (*uaddr)             \
-               : "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT)    \
+               : "=r" (ret), "=&r" (oldval),                           \
+                 "=" GCC_OFF12_ASM() (*uaddr)                          \
+               : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg),      \
+                 "i" (-EFAULT)                                         \
                : "memory");                                            \
        } else if (cpu_has_llsc) {                                      \
                __asm__ __volatile__(                                   \
                "       "__UA_ADDR "\t1b, 4b                    \n"     \
                "       "__UA_ADDR "\t2b, 4b                    \n"     \
                "       .previous                               \n"     \
-               : "=r" (ret), "=&r" (oldval), "=R" (*uaddr)             \
-               : "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT)    \
+               : "=r" (ret), "=&r" (oldval),                           \
+                 "=" GCC_OFF12_ASM() (*uaddr)                          \
+               : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg),      \
+                 "i" (-EFAULT)                                         \
                : "memory");                                            \
        } else                                                          \
                ret = -ENOSYS;                                          \
@@ -166,8 +171,9 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
                "       "__UA_ADDR "\t1b, 4b                            \n"
                "       "__UA_ADDR "\t2b, 4b                            \n"
                "       .previous                                       \n"
-               : "+r" (ret), "=&r" (val), "=R" (*uaddr)
-               : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
+               : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr)
+               : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
+                 "i" (-EFAULT)
                : "memory");
        } else if (cpu_has_llsc) {
                __asm__ __volatile__(
@@ -193,8 +199,9 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
                "       "__UA_ADDR "\t1b, 4b                            \n"
                "       "__UA_ADDR "\t2b, 4b                            \n"
                "       .previous                                       \n"
-               : "+r" (ret), "=&r" (val), "=R" (*uaddr)
-               : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
+               : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr)
+               : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
+                 "i" (-EFAULT)
                : "memory");
        } else
                return -ENOSYS;
index fc946c8359952a5682e76aa361657eefd3a538c7..2e54b4bff5cf59e744b9cb3a83e44bca747a9136 100644 (file)
@@ -49,6 +49,7 @@
 
 #include <linux/types.h>
 
+#include <asm/compiler.h>
 #include <asm/war.h>
 
 #ifndef R10000_LLSC_WAR
@@ -84,8 +85,8 @@ static inline void set_value_reg32(volatile u32 *const addr,
        "       "__beqz"%0, 1b                          \n"
        "       nop                                     \n"
        "       .set    pop                             \n"
-       : "=&r" (temp), "=m" (*addr)
-       : "ir" (~mask), "ir" (value), "m" (*addr));
+       : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr)
+       : "ir" (~mask), "ir" (value), GCC_OFF12_ASM() (*addr));
 }
 
 /*
@@ -105,8 +106,8 @@ static inline void set_reg32(volatile u32 *const addr,
        "       "__beqz"%0, 1b                          \n"
        "       nop                                     \n"
        "       .set    pop                             \n"
-       : "=&r" (temp), "=m" (*addr)
-       : "ir" (mask), "m" (*addr));
+       : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr)
+       : "ir" (mask), GCC_OFF12_ASM() (*addr));
 }
 
 /*
@@ -126,8 +127,8 @@ static inline void clear_reg32(volatile u32 *const addr,
        "       "__beqz"%0, 1b                          \n"
        "       nop                                     \n"
        "       .set    pop                             \n"
-       : "=&r" (temp), "=m" (*addr)
-       : "ir" (~mask), "m" (*addr));
+       : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr)
+       : "ir" (~mask), GCC_OFF12_ASM() (*addr));
 }
 
 /*
@@ -147,8 +148,8 @@ static inline void toggle_reg32(volatile u32 *const addr,
        "       "__beqz"%0, 1b                          \n"
        "       nop                                     \n"
        "       .set    pop                             \n"
-       : "=&r" (temp), "=m" (*addr)
-       : "ir" (mask), "m" (*addr));
+       : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr)
+       : "ir" (mask), GCC_OFF12_ASM() (*addr));
 }
 
 /*
@@ -219,8 +220,8 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr)
        "       .set    arch=r4000                      \n"     \
        "1:     ll      %0, %1  #custom_read_reg32      \n"     \
        "       .set    pop                             \n"     \
-       : "=r" (tmp), "=m" (*address)                           \
-       : "m" (*address))
+       : "=r" (tmp), "=" GCC_OFF12_ASM() (*address)            \
+       : GCC_OFF12_ASM() (*address))
 
 #define custom_write_reg32(address, tmp)                       \
        __asm__ __volatile__(                                   \
@@ -230,7 +231,7 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr)
        "       "__beqz"%0, 1b                          \n"     \
        "       nop                                     \n"     \
        "       .set    pop                             \n"     \
-       : "=&r" (tmp), "=m" (*address)                          \
-       : "0" (tmp), "m" (*address))
+       : "=&r" (tmp), "=" GCC_OFF12_ASM() (*address)           \
+       : "0" (tmp), GCC_OFF12_ASM() (*address))
 
 #endif /* __ASM_REGOPS_H__ */
index 024a71b2bff91d641c22259db0c8b8451bf8b09a..75739c83f07e74bb26ab5dbc0fd32c34401c838b 100644 (file)
@@ -76,6 +76,8 @@
 
 #include <linux/prefetch.h>
 
+#include <asm/compiler.h>
+
 #include <asm/octeon/cvmx-fpa.h>
 /**
  * By default we disable the max depth support. Most programs
@@ -273,7 +275,7 @@ static inline void __cvmx_cmd_queue_lock(cvmx_cmd_queue_id_t queue_id,
                " lbu   %[ticket], %[now_serving]\n"
                "4:\n"
                ".set pop\n" :
-               [ticket_ptr] "=m"(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]),
+               [ticket_ptr] "=" GCC_OFF12_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]),
                [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp),
                [my_ticket] "=r"(my_ticket)
            );
index 78d201fb6c87c93608b8295327277a6e8804818e..c6d06d383ef90df1cf7bb8a4f69aaa641b40e7fd 100644 (file)
@@ -12,6 +12,7 @@
 #include <linux/compiler.h>
 
 #include <asm/barrier.h>
+#include <asm/compiler.h>
 #include <asm/war.h>
 
 /*
@@ -88,7 +89,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock)
                "        subu   %[ticket], %[ticket], 1                 \n"
                "       .previous                                       \n"
                "       .set pop                                        \n"
-               : [ticket_ptr] "+m" (lock->lock),
+               : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock),
                  [serving_now_ptr] "+m" (lock->h.serving_now),
                  [ticket] "=&r" (tmp),
                  [my_ticket] "=&r" (my_ticket)
@@ -121,7 +122,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock)
                "        subu   %[ticket], %[ticket], 1                 \n"
                "       .previous                                       \n"
                "       .set pop                                        \n"
-               : [ticket_ptr] "+m" (lock->lock),
+               : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock),
                  [serving_now_ptr] "+m" (lock->h.serving_now),
                  [ticket] "=&r" (tmp),
                  [my_ticket] "=&r" (my_ticket)
@@ -163,7 +164,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock)
                "        li     %[ticket], 0                            \n"
                "       .previous                                       \n"
                "       .set pop                                        \n"
-               : [ticket_ptr] "+m" (lock->lock),
+               : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock),
                  [ticket] "=&r" (tmp),
                  [my_ticket] "=&r" (tmp2),
                  [now_serving] "=&r" (tmp3)
@@ -187,7 +188,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock)
                "        li     %[ticket], 0                            \n"
                "       .previous                                       \n"
                "       .set pop                                        \n"
-               : [ticket_ptr] "+m" (lock->lock),
+               : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock),
                  [ticket] "=&r" (tmp),
                  [my_ticket] "=&r" (tmp2),
                  [now_serving] "=&r" (tmp3)
@@ -234,8 +235,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
                "       beqzl   %1, 1b                                  \n"
                "        nop                                            \n"
                "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
+               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
+               : GCC_OFF12_ASM() (rw->lock)
                : "memory");
        } else {
                do {
@@ -244,8 +245,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
                        "       bltz    %1, 1b                          \n"
                        "        addu   %1, 1                           \n"
                        "2:     sc      %1, %0                          \n"
-                       : "=m" (rw->lock), "=&r" (tmp)
-                       : "m" (rw->lock)
+                       : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
+                       : GCC_OFF12_ASM() (rw->lock)
                        : "memory");
                } while (unlikely(!tmp));
        }
@@ -268,8 +269,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
                "       sub     %1, 1                                   \n"
                "       sc      %1, %0                                  \n"
                "       beqzl   %1, 1b                                  \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
+               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
+               : GCC_OFF12_ASM() (rw->lock)
                : "memory");
        } else {
                do {
@@ -277,8 +278,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
                        "1:     ll      %1, %2  # arch_read_unlock      \n"
                        "       sub     %1, 1                           \n"
                        "       sc      %1, %0                          \n"
-                       : "=m" (rw->lock), "=&r" (tmp)
-                       : "m" (rw->lock)
+                       : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
+                       : GCC_OFF12_ASM() (rw->lock)
                        : "memory");
                } while (unlikely(!tmp));
        }
@@ -298,8 +299,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
                "       beqzl   %1, 1b                                  \n"
                "        nop                                            \n"
                "       .set    reorder                                 \n"
-               : "=m" (rw->lock), "=&r" (tmp)
-               : "m" (rw->lock)
+               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
+               : GCC_OFF12_ASM() (rw->lock)
                : "memory");
        } else {
                do {
@@ -308,8 +309,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
                        "       bnez    %1, 1b                          \n"
                        "        lui    %1, 0x8000                      \n"
                        "2:     sc      %1, %0                          \n"
-                       : "=m" (rw->lock), "=&r" (tmp)
-                       : "m" (rw->lock)
+                       : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
+                       : GCC_OFF12_ASM() (rw->lock)
                        : "memory");
                } while (unlikely(!tmp));
        }
@@ -348,8 +349,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
                __WEAK_LLSC_MB
                "       li      %2, 1                                   \n"
                "2:                                                     \n"
-               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : "m" (rw->lock)
+               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : GCC_OFF12_ASM() (rw->lock)
                : "memory");
        } else {
                __asm__ __volatile__(
@@ -365,8 +366,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
                __WEAK_LLSC_MB
                "       li      %2, 1                                   \n"
                "2:                                                     \n"
-               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : "m" (rw->lock)
+               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : GCC_OFF12_ASM() (rw->lock)
                : "memory");
        }
 
@@ -392,8 +393,8 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
                "       li      %2, 1                                   \n"
                "       .set    reorder                                 \n"
                "2:                                                     \n"
-               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : "m" (rw->lock)
+               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : GCC_OFF12_ASM() (rw->lock)
                : "memory");
        } else {
                do {
@@ -405,8 +406,9 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
                        "       sc      %1, %0                          \n"
                        "       li      %2, 1                           \n"
                        "2:                                             \n"
-                       : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
-                       : "m" (rw->lock)
+                       : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp),
+                         "=&r" (ret)
+                       : GCC_OFF12_ASM() (rw->lock)
                        : "memory");
                } while (unlikely(!tmp));