]>
git.proxmox.com Git - mirror_ubuntu-kernels.git/blob - arch/alpha/include/asm/xchg.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_CMPXCHG_H
3 #error Do not include xchg.h directly!
6 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
7 * except that local version do not have the expensive memory barrier.
8 * So this file is included twice from asm/cmpxchg.h.
13 * Since it can be used to implement critical sections
14 * it must clobber "memory" (also for interrupts in UP).
17 static inline unsigned long
18 ____xchg(_u8
, volatile char *m
, unsigned long val
)
20 unsigned long ret
, tmp
, addr64
;
35 : "=&r" (ret
), "=&r" (val
), "=&r" (tmp
), "=&r" (addr64
)
36 : "r" ((long)m
), "1" (val
) : "memory");
41 static inline unsigned long
42 ____xchg(_u16
, volatile short *m
, unsigned long val
)
44 unsigned long ret
, tmp
, addr64
;
59 : "=&r" (ret
), "=&r" (val
), "=&r" (tmp
), "=&r" (addr64
)
60 : "r" ((long)m
), "1" (val
) : "memory");
65 static inline unsigned long
66 ____xchg(_u32
, volatile int *m
, unsigned long val
)
79 : "=&r" (val
), "=&r" (dummy
), "=m" (*m
)
80 : "rI" (val
), "m" (*m
) : "memory");
85 static inline unsigned long
86 ____xchg(_u64
, volatile long *m
, unsigned long val
)
99 : "=&r" (val
), "=&r" (dummy
), "=m" (*m
)
100 : "rI" (val
), "m" (*m
) : "memory");
105 /* This function doesn't exist, so you'll get a linker error
106 if something tries to do an invalid xchg(). */
107 extern void __xchg_called_with_bad_pointer(void);
109 static __always_inline
unsigned long
110 ____xchg(, volatile void *ptr
, unsigned long x
, int size
)
114 return ____xchg(_u8
, ptr
, x
);
116 return ____xchg(_u16
, ptr
, x
);
118 return ____xchg(_u32
, ptr
, x
);
120 return ____xchg(_u64
, ptr
, x
);
122 __xchg_called_with_bad_pointer();
127 * Atomic compare and exchange. Compare OLD with MEM, if identical,
128 * store NEW in MEM. Return the initial value in MEM. Success is
129 * indicated by comparing RETURN with OLD.
131 * The memory barrier should be placed in SMP only when we actually
132 * make the change. If we don't change anything (so if the returned
133 * prev is equal to old) then we aren't acquiring anything new and
134 * we don't need any memory barrier as far I can tell.
137 static inline unsigned long
138 ____cmpxchg(_u8
, volatile char *m
, unsigned char old
, unsigned char new)
140 unsigned long prev
, tmp
, cmp
, addr64
;
142 __asm__
__volatile__(
145 "1: ldq_l %2,0(%4)\n"
158 : "=&r" (prev
), "=&r" (new), "=&r" (tmp
), "=&r" (cmp
), "=&r" (addr64
)
159 : "r" ((long)m
), "Ir" (old
), "1" (new) : "memory");
164 static inline unsigned long
165 ____cmpxchg(_u16
, volatile short *m
, unsigned short old
, unsigned short new)
167 unsigned long prev
, tmp
, cmp
, addr64
;
169 __asm__
__volatile__(
172 "1: ldq_l %2,0(%4)\n"
185 : "=&r" (prev
), "=&r" (new), "=&r" (tmp
), "=&r" (cmp
), "=&r" (addr64
)
186 : "r" ((long)m
), "Ir" (old
), "1" (new) : "memory");
191 static inline unsigned long
192 ____cmpxchg(_u32
, volatile int *m
, int old
, int new)
194 unsigned long prev
, cmp
;
196 __asm__
__volatile__(
208 : "=&r"(prev
), "=&r"(cmp
), "=m"(*m
)
209 : "r"((long) old
), "r"(new), "m"(*m
) : "memory");
214 static inline unsigned long
215 ____cmpxchg(_u64
, volatile long *m
, unsigned long old
, unsigned long new)
217 unsigned long prev
, cmp
;
219 __asm__
__volatile__(
231 : "=&r"(prev
), "=&r"(cmp
), "=m"(*m
)
232 : "r"((long) old
), "r"(new), "m"(*m
) : "memory");
237 /* This function doesn't exist, so you'll get a linker error
238 if something tries to do an invalid cmpxchg(). */
239 extern void __cmpxchg_called_with_bad_pointer(void);
241 static __always_inline
unsigned long
242 ____cmpxchg(, volatile void *ptr
, unsigned long old
, unsigned long new,
247 return ____cmpxchg(_u8
, ptr
, old
, new);
249 return ____cmpxchg(_u16
, ptr
, old
, new);
251 return ____cmpxchg(_u32
, ptr
, old
, new);
253 return ____cmpxchg(_u64
, ptr
, old
, new);
255 __cmpxchg_called_with_bad_pointer();