]> git.proxmox.com Git - mirror_ubuntu-kernels.git/commitdiff
MIPS: asm: Rename GCC_OFF12_ASM to GCC_OFF_SMALL_ASM
authorMarkos Chandras <markos.chandras@imgtec.com>
Mon, 26 Jan 2015 12:44:11 +0000 (12:44 +0000)
committerMarkos Chandras <markos.chandras@imgtec.com>
Tue, 17 Feb 2015 15:37:21 +0000 (15:37 +0000)
The GCC_OFF12_ASM macro is used for 12-bit immediate constrains
but we will also use it for 9-bit constrains on MIPS R6 so we
rename it to something more appropriate.

Cc: Maciej W. Rozycki <macro@linux-mips.org>
Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
arch/mips/include/asm/atomic.h
arch/mips/include/asm/bitops.h
arch/mips/include/asm/cmpxchg.h
arch/mips/include/asm/compiler.h
arch/mips/include/asm/edac.h
arch/mips/include/asm/futex.h
arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h
arch/mips/include/asm/octeon/cvmx-cmd-queue.h
arch/mips/include/asm/spinlock.h

index 857da84cfc92eb20bd7f29cb5d9b3c1e16b86203..3a44c2f17e5305d57dc7a70308b722a7d03a64d6 100644 (file)
@@ -54,7 +54,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v)                             \
                "       sc      %0, %1                                  \n"   \
                "       beqzl   %0, 1b                                  \n"   \
                "       .set    mips0                                   \n"   \
-               : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)              \
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
                : "Ir" (i));                                                  \
        } else if (kernel_uses_llsc) {                                        \
                int temp;                                                     \
@@ -66,7 +66,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v)                             \
                        "       " #asm_op " %0, %2                      \n"   \
                        "       sc      %0, %1                          \n"   \
                        "       .set    mips0                           \n"   \
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)      \
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
                        : "Ir" (i));                                          \
                } while (unlikely(!temp));                                    \
        } else {                                                              \
@@ -97,7 +97,7 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v)                     \
                "       " #asm_op " %0, %1, %3                          \n"   \
                "       .set    mips0                                   \n"   \
                : "=&r" (result), "=&r" (temp),                               \
-                 "+" GCC_OFF12_ASM() (v->counter)                            \
+                 "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
                : "Ir" (i));                                                  \
        } else if (kernel_uses_llsc) {                                        \
                int temp;                                                     \
@@ -110,7 +110,7 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v)                   \
                        "       sc      %0, %2                          \n"   \
                        "       .set    mips0                           \n"   \
                        : "=&r" (result), "=&r" (temp),                       \
-                         "+" GCC_OFF12_ASM() (v->counter)                    \
+                         "+" GCC_OFF_SMALL_ASM() (v->counter)                \
                        : "Ir" (i));                                          \
                } while (unlikely(!result));                                  \
                                                                              \
@@ -171,8 +171,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
                "1:                                                     \n"
                "       .set    mips0                                   \n"
                : "=&r" (result), "=&r" (temp),
-                 "+" GCC_OFF12_ASM() (v->counter)
-               : "Ir" (i), GCC_OFF12_ASM() (v->counter)
+                 "+" GCC_OFF_SMALL_ASM() (v->counter)
+               : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
                : "memory");
        } else if (kernel_uses_llsc) {
                int temp;
@@ -190,7 +190,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
                "1:                                                     \n"
                "       .set    mips0                                   \n"
                : "=&r" (result), "=&r" (temp),
-                 "+" GCC_OFF12_ASM() (v->counter)
+                 "+" GCC_OFF_SMALL_ASM() (v->counter)
                : "Ir" (i));
        } else {
                unsigned long flags;
@@ -333,7 +333,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v)                      \
                "       scd     %0, %1                                  \n"   \
                "       beqzl   %0, 1b                                  \n"   \
                "       .set    mips0                                   \n"   \
-               : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)              \
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
                : "Ir" (i));                                                  \
        } else if (kernel_uses_llsc) {                                        \
                long temp;                                                    \
@@ -345,7 +345,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v)                      \
                        "       " #asm_op " %0, %2                      \n"   \
                        "       scd     %0, %1                          \n"   \
                        "       .set    mips0                           \n"   \
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)      \
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
                        : "Ir" (i));                                          \
                } while (unlikely(!temp));                                    \
        } else {                                                              \
@@ -376,7 +376,7 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)             \
                "       " #asm_op " %0, %1, %3                          \n"   \
                "       .set    mips0                                   \n"   \
                : "=&r" (result), "=&r" (temp),                               \
-                 "+" GCC_OFF12_ASM() (v->counter)                            \
+                 "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
                : "Ir" (i));                                                  \
        } else if (kernel_uses_llsc) {                                        \
                long temp;                                                    \
@@ -389,8 +389,8 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)             \
                        "       scd     %0, %2                          \n"   \
                        "       .set    mips0                           \n"   \
                        : "=&r" (result), "=&r" (temp),                       \
-                         "=" GCC_OFF12_ASM() (v->counter)                    \
-                       : "Ir" (i), GCC_OFF12_ASM() (v->counter)              \
+                         "=" GCC_OFF_SMALL_ASM() (v->counter)                \
+                       : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)          \
                        : "memory");                                          \
                } while (unlikely(!result));                                  \
                                                                              \
@@ -452,8 +452,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
                "1:                                                     \n"
                "       .set    mips0                                   \n"
                : "=&r" (result), "=&r" (temp),
-                 "=" GCC_OFF12_ASM() (v->counter)
-               : "Ir" (i), GCC_OFF12_ASM() (v->counter)
+                 "=" GCC_OFF_SMALL_ASM() (v->counter)
+               : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
                : "memory");
        } else if (kernel_uses_llsc) {
                long temp;
@@ -471,7 +471,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
                "1:                                                     \n"
                "       .set    mips0                                   \n"
                : "=&r" (result), "=&r" (temp),
-                 "+" GCC_OFF12_ASM() (v->counter)
+                 "+" GCC_OFF_SMALL_ASM() (v->counter)
                : "Ir" (i));
        } else {
                unsigned long flags;
index 6663bcca9d0c626886529ae5eb7a92e75b75cd46..6cc1f539c79ac4f4646d3c91c89627e4e7d06635 100644 (file)
@@ -79,8 +79,8 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
                "       " __SC  "%0, %1                                 \n"
                "       beqzl   %0, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=" GCC_OFF12_ASM() (*m)
-               : "ir" (1UL << bit), GCC_OFF12_ASM() (*m));
+               : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
+               : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
 #ifdef CONFIG_CPU_MIPSR2
        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
                do {
@@ -88,7 +88,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
                        "       " __LL "%0, %1          # set_bit       \n"
                        "       " __INS "%0, %3, %2, 1                  \n"
                        "       " __SC "%0, %1                          \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                        : "ir" (bit), "r" (~0));
                } while (unlikely(!temp));
 #endif /* CONFIG_CPU_MIPSR2 */
@@ -100,7 +100,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
                        "       or      %0, %2                          \n"
                        "       " __SC  "%0, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                        : "ir" (1UL << bit));
                } while (unlikely(!temp));
        } else
@@ -131,7 +131,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
                "       " __SC "%0, %1                                  \n"
                "       beqzl   %0, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                : "ir" (~(1UL << bit)));
 #ifdef CONFIG_CPU_MIPSR2
        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
@@ -140,7 +140,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
                        "       " __LL "%0, %1          # clear_bit     \n"
                        "       " __INS "%0, $0, %2, 1                  \n"
                        "       " __SC "%0, %1                          \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                        : "ir" (bit));
                } while (unlikely(!temp));
 #endif /* CONFIG_CPU_MIPSR2 */
@@ -152,7 +152,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
                        "       and     %0, %2                          \n"
                        "       " __SC "%0, %1                          \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                        : "ir" (~(1UL << bit)));
                } while (unlikely(!temp));
        } else
@@ -197,7 +197,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
                "       " __SC  "%0, %1                         \n"
                "       beqzl   %0, 1b                          \n"
                "       .set    mips0                           \n"
-               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                : "ir" (1UL << bit));
        } else if (kernel_uses_llsc) {
                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -210,7 +210,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
                        "       xor     %0, %2                          \n"
                        "       " __SC  "%0, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
                        : "ir" (1UL << bit));
                } while (unlikely(!temp));
        } else
@@ -245,7 +245,7 @@ static inline int test_and_set_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                : "r" (1UL << bit)
                : "memory");
        } else if (kernel_uses_llsc) {
@@ -259,7 +259,7 @@ static inline int test_and_set_bit(unsigned long nr,
                        "       or      %2, %0, %3                      \n"
                        "       " __SC  "%2, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                        : "r" (1UL << bit)
                        : "memory");
                } while (unlikely(!res));
@@ -313,7 +313,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
                        "       or      %2, %0, %3                      \n"
                        "       " __SC  "%2, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                        : "r" (1UL << bit)
                        : "memory");
                } while (unlikely(!res));
@@ -355,7 +355,7 @@ static inline int test_and_clear_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                : "r" (1UL << bit)
                : "memory");
 #ifdef CONFIG_CPU_MIPSR2
@@ -369,7 +369,7 @@ static inline int test_and_clear_bit(unsigned long nr,
                        "       " __EXT "%2, %0, %3, 1                  \n"
                        "       " __INS "%0, $0, %3, 1                  \n"
                        "       " __SC  "%0, %1                         \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                        : "ir" (bit)
                        : "memory");
                } while (unlikely(!temp));
@@ -386,7 +386,7 @@ static inline int test_and_clear_bit(unsigned long nr,
                        "       xor     %2, %3                          \n"
                        "       " __SC  "%2, %1                         \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                        : "r" (1UL << bit)
                        : "memory");
                } while (unlikely(!res));
@@ -428,7 +428,7 @@ static inline int test_and_change_bit(unsigned long nr,
                "       beqzl   %2, 1b                                  \n"
                "       and     %2, %0, %3                              \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
+               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                : "r" (1UL << bit)
                : "memory");
        } else if (kernel_uses_llsc) {
@@ -442,7 +442,7 @@ static inline int test_and_change_bit(unsigned long nr,
                        "       xor     %2, %0, %3                      \n"
                        "       " __SC  "\t%2, %1                       \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res)
+                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
                        : "r" (1UL << bit)
                        : "memory");
                } while (unlikely(!res));
index 28b1edf195016b80a89854b9ae11c28a1a9c04ce..68baa0cf521a412fff48ba6ff118b718b23856a3 100644 (file)
@@ -31,8 +31,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
                "       sc      %2, %1                                  \n"
                "       beqzl   %2, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy)
-               : GCC_OFF12_ASM() (*m), "Jr" (val)
+               : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
+               : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
                : "memory");
        } else if (kernel_uses_llsc) {
                unsigned long dummy;
@@ -46,9 +46,9 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
                        "       .set    arch=r4000                      \n"
                        "       sc      %2, %1                          \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (retval), "=" GCC_OFF12_ASM() (*m),
+                       : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
                          "=&r" (dummy)
-                       : GCC_OFF12_ASM() (*m), "Jr" (val)
+                       : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
                        : "memory");
                } while (unlikely(!dummy));
        } else {
@@ -82,8 +82,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
                "       scd     %2, %1                                  \n"
                "       beqzl   %2, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy)
-               : GCC_OFF12_ASM() (*m), "Jr" (val)
+               : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
+               : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
                : "memory");
        } else if (kernel_uses_llsc) {
                unsigned long dummy;
@@ -95,9 +95,9 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
                        "       move    %2, %z4                         \n"
                        "       scd     %2, %1                          \n"
                        "       .set    mips0                           \n"
-                       : "=&r" (retval), "=" GCC_OFF12_ASM() (*m),
+                       : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
                          "=&r" (dummy)
-                       : GCC_OFF12_ASM() (*m), "Jr" (val)
+                       : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
                        : "memory");
                } while (unlikely(!dummy));
        } else {
@@ -158,8 +158,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
                "       beqzl   $1, 1b                          \n"     \
                "2:                                             \n"     \
                "       .set    pop                             \n"     \
-               : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m)               \
-               : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new)          \
+               : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
+               : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)              \
                : "memory");                                            \
        } else if (kernel_uses_llsc) {                                  \
                __asm__ __volatile__(                                   \
@@ -175,8 +175,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
                "       beqz    $1, 1b                          \n"     \
                "       .set    pop                             \n"     \
                "2:                                             \n"     \
-               : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m)               \
-               : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new)          \
+               : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
+               : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)              \
                : "memory");                                            \
        } else {                                                        \
                unsigned long __flags;                                  \
index 10b642f55eb7d7661923c82862313306f65671ec..34ad65a7801f5e76a33e9d6da21a65a270c75f36 100644 (file)
@@ -17,9 +17,9 @@
 #endif
 
 #ifndef CONFIG_CPU_MICROMIPS
-#define GCC_OFF12_ASM() "R"
+#define GCC_OFF_SMALL_ASM() "R"
 #elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
-#define GCC_OFF12_ASM() "ZC"
+#define GCC_OFF_SMALL_ASM() "ZC"
 #else
 #error "microMIPS compilation unsupported with GCC older than 4.9"
 #endif
index ae6fedcb0060f22c69091480f55228dd2ef4383d..94105d3f58f4882849643cfcb8668857198f2117 100644 (file)
@@ -26,8 +26,8 @@ static inline void atomic_scrub(void *va, u32 size)
                "       sc      %0, %1                                  \n"
                "       beqz    %0, 1b                                  \n"
                "       .set    mips0                                   \n"
-               : "=&r" (temp), "=" GCC_OFF12_ASM() (*virt_addr)
-               : GCC_OFF12_ASM() (*virt_addr));
+               : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*virt_addr)
+               : GCC_OFF_SMALL_ASM() (*virt_addr));
 
                virt_addr++;
        }
index ef9987a61d88c62e79e38e3c406a3bbcc42e1a23..f666c0608c114fab14a30a2b0aeb12e2ac645667 100644 (file)
@@ -45,8 +45,8 @@
                "       "__UA_ADDR "\t2b, 4b                    \n"     \
                "       .previous                               \n"     \
                : "=r" (ret), "=&r" (oldval),                           \
-                 "=" GCC_OFF12_ASM() (*uaddr)                          \
-               : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg),      \
+                 "=" GCC_OFF_SMALL_ASM() (*uaddr)                              \
+               : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg),  \
                  "i" (-EFAULT)                                         \
                : "memory");                                            \
        } else if (cpu_has_llsc) {                                      \
@@ -74,8 +74,8 @@
                "       "__UA_ADDR "\t2b, 4b                    \n"     \
                "       .previous                               \n"     \
                : "=r" (ret), "=&r" (oldval),                           \
-                 "=" GCC_OFF12_ASM() (*uaddr)                          \
-               : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg),      \
+                 "=" GCC_OFF_SMALL_ASM() (*uaddr)                              \
+               : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg),  \
                  "i" (-EFAULT)                                         \
                : "memory");                                            \
        } else                                                          \
@@ -174,8 +174,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
                "       "__UA_ADDR "\t1b, 4b                            \n"
                "       "__UA_ADDR "\t2b, 4b                            \n"
                "       .previous                                       \n"
-               : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr)
-               : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
+               : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
+               : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
                  "i" (-EFAULT)
                : "memory");
        } else if (cpu_has_llsc) {
@@ -203,8 +203,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
                "       "__UA_ADDR "\t1b, 4b                            \n"
                "       "__UA_ADDR "\t2b, 4b                            \n"
                "       .previous                                       \n"
-               : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr)
-               : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
+               : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
+               : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
                  "i" (-EFAULT)
                : "memory");
        } else
index 2e54b4bff5cf59e744b9cb3a83e44bca747a9136..90dbe43c8d272d2cc5a95d1e4a2fbf20a4f1d4a6 100644 (file)
@@ -85,8 +85,8 @@ static inline void set_value_reg32(volatile u32 *const addr,
        "       "__beqz"%0, 1b                          \n"
        "       nop                                     \n"
        "       .set    pop                             \n"
-       : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr)
-       : "ir" (~mask), "ir" (value), GCC_OFF12_ASM() (*addr));
+       : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
+       : "ir" (~mask), "ir" (value), GCC_OFF_SMALL_ASM() (*addr));
 }
 
 /*
@@ -106,8 +106,8 @@ static inline void set_reg32(volatile u32 *const addr,
        "       "__beqz"%0, 1b                          \n"
        "       nop                                     \n"
        "       .set    pop                             \n"
-       : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr)
-       : "ir" (mask), GCC_OFF12_ASM() (*addr));
+       : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
+       : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr));
 }
 
 /*
@@ -127,8 +127,8 @@ static inline void clear_reg32(volatile u32 *const addr,
        "       "__beqz"%0, 1b                          \n"
        "       nop                                     \n"
        "       .set    pop                             \n"
-       : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr)
-       : "ir" (~mask), GCC_OFF12_ASM() (*addr));
+       : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
+       : "ir" (~mask), GCC_OFF_SMALL_ASM() (*addr));
 }
 
 /*
@@ -148,8 +148,8 @@ static inline void toggle_reg32(volatile u32 *const addr,
        "       "__beqz"%0, 1b                          \n"
        "       nop                                     \n"
        "       .set    pop                             \n"
-       : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr)
-       : "ir" (mask), GCC_OFF12_ASM() (*addr));
+       : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
+       : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr));
 }
 
 /*
@@ -220,8 +220,8 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr)
        "       .set    arch=r4000                      \n"     \
        "1:     ll      %0, %1  #custom_read_reg32      \n"     \
        "       .set    pop                             \n"     \
-       : "=r" (tmp), "=" GCC_OFF12_ASM() (*address)            \
-       : GCC_OFF12_ASM() (*address))
+       : "=r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address)                \
+       : GCC_OFF_SMALL_ASM() (*address))
 
 #define custom_write_reg32(address, tmp)                       \
        __asm__ __volatile__(                                   \
@@ -231,7 +231,7 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr)
        "       "__beqz"%0, 1b                          \n"     \
        "       nop                                     \n"     \
        "       .set    pop                             \n"     \
-       : "=&r" (tmp), "=" GCC_OFF12_ASM() (*address)           \
-       : "0" (tmp), GCC_OFF12_ASM() (*address))
+       : "=&r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address)               \
+       : "0" (tmp), GCC_OFF_SMALL_ASM() (*address))
 
 #endif /* __ASM_REGOPS_H__ */
index 75739c83f07e74bb26ab5dbc0fd32c34401c838b..8d05d90698238e4deb6bc0b649a7929e3d0e2b76 100644 (file)
@@ -275,7 +275,7 @@ static inline void __cvmx_cmd_queue_lock(cvmx_cmd_queue_id_t queue_id,
                " lbu   %[ticket], %[now_serving]\n"
                "4:\n"
                ".set pop\n" :
-               [ticket_ptr] "=" GCC_OFF12_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]),
+               [ticket_ptr] "=" GCC_OFF_SMALL_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]),
                [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp),
                [my_ticket] "=r"(my_ticket)
            );
index c6d06d383ef90df1cf7bb8a4f69aaa641b40e7fd..b5238404c05918fbf8c56af26a50ce6abbcd14c2 100644 (file)
@@ -89,7 +89,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock)
                "        subu   %[ticket], %[ticket], 1                 \n"
                "       .previous                                       \n"
                "       .set pop                                        \n"
-               : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock),
+               : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
                  [serving_now_ptr] "+m" (lock->h.serving_now),
                  [ticket] "=&r" (tmp),
                  [my_ticket] "=&r" (my_ticket)
@@ -122,7 +122,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock)
                "        subu   %[ticket], %[ticket], 1                 \n"
                "       .previous                                       \n"
                "       .set pop                                        \n"
-               : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock),
+               : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
                  [serving_now_ptr] "+m" (lock->h.serving_now),
                  [ticket] "=&r" (tmp),
                  [my_ticket] "=&r" (my_ticket)
@@ -164,7 +164,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock)
                "        li     %[ticket], 0                            \n"
                "       .previous                                       \n"
                "       .set pop                                        \n"
-               : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock),
+               : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
                  [ticket] "=&r" (tmp),
                  [my_ticket] "=&r" (tmp2),
                  [now_serving] "=&r" (tmp3)
@@ -188,7 +188,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock)
                "        li     %[ticket], 0                            \n"
                "       .previous                                       \n"
                "       .set pop                                        \n"
-               : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock),
+               : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock),
                  [ticket] "=&r" (tmp),
                  [my_ticket] "=&r" (tmp2),
                  [now_serving] "=&r" (tmp3)
@@ -235,8 +235,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
                "       beqzl   %1, 1b                                  \n"
                "        nop                                            \n"
                "       .set    reorder                                 \n"
-               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
-               : GCC_OFF12_ASM() (rw->lock)
+               : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
+               : GCC_OFF_SMALL_ASM() (rw->lock)
                : "memory");
        } else {
                do {
@@ -245,8 +245,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw)
                        "       bltz    %1, 1b                          \n"
                        "        addu   %1, 1                           \n"
                        "2:     sc      %1, %0                          \n"
-                       : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
-                       : GCC_OFF12_ASM() (rw->lock)
+                       : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
+                       : GCC_OFF_SMALL_ASM() (rw->lock)
                        : "memory");
                } while (unlikely(!tmp));
        }
@@ -269,8 +269,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
                "       sub     %1, 1                                   \n"
                "       sc      %1, %0                                  \n"
                "       beqzl   %1, 1b                                  \n"
-               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
-               : GCC_OFF12_ASM() (rw->lock)
+               : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
+               : GCC_OFF_SMALL_ASM() (rw->lock)
                : "memory");
        } else {
                do {
@@ -278,8 +278,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
                        "1:     ll      %1, %2  # arch_read_unlock      \n"
                        "       sub     %1, 1                           \n"
                        "       sc      %1, %0                          \n"
-                       : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
-                       : GCC_OFF12_ASM() (rw->lock)
+                       : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
+                       : GCC_OFF_SMALL_ASM() (rw->lock)
                        : "memory");
                } while (unlikely(!tmp));
        }
@@ -299,8 +299,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
                "       beqzl   %1, 1b                                  \n"
                "        nop                                            \n"
                "       .set    reorder                                 \n"
-               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
-               : GCC_OFF12_ASM() (rw->lock)
+               : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
+               : GCC_OFF_SMALL_ASM() (rw->lock)
                : "memory");
        } else {
                do {
@@ -309,8 +309,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw)
                        "       bnez    %1, 1b                          \n"
                        "        lui    %1, 0x8000                      \n"
                        "2:     sc      %1, %0                          \n"
-                       : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp)
-                       : GCC_OFF12_ASM() (rw->lock)
+                       : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp)
+                       : GCC_OFF_SMALL_ASM() (rw->lock)
                        : "memory");
                } while (unlikely(!tmp));
        }
@@ -349,8 +349,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
                __WEAK_LLSC_MB
                "       li      %2, 1                                   \n"
                "2:                                                     \n"
-               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : GCC_OFF12_ASM() (rw->lock)
+               : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : GCC_OFF_SMALL_ASM() (rw->lock)
                : "memory");
        } else {
                __asm__ __volatile__(
@@ -366,8 +366,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
                __WEAK_LLSC_MB
                "       li      %2, 1                                   \n"
                "2:                                                     \n"
-               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : GCC_OFF12_ASM() (rw->lock)
+               : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : GCC_OFF_SMALL_ASM() (rw->lock)
                : "memory");
        }
 
@@ -393,8 +393,8 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
                "       li      %2, 1                                   \n"
                "       .set    reorder                                 \n"
                "2:                                                     \n"
-               : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
-               : GCC_OFF12_ASM() (rw->lock)
+               : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : GCC_OFF_SMALL_ASM() (rw->lock)
                : "memory");
        } else {
                do {
@@ -406,9 +406,9 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
                        "       sc      %1, %0                          \n"
                        "       li      %2, 1                           \n"
                        "2:                                             \n"
-                       : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp),
+                       : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp),
                          "=&r" (ret)
-                       : GCC_OFF12_ASM() (rw->lock)
+                       : GCC_OFF_SMALL_ASM() (rw->lock)
                        : "memory");
                } while (unlikely(!tmp));