]>
git.proxmox.com Git - mirror_ubuntu-zesty-kernel.git/blob - arch/sparc/lib/atomic32.c
2 * atomic32.c: 32-bit atomic_t implementation
4 * Copyright (C) 2004 Keith M Wesolowski
5 * Copyright (C) 2007 Kyle McMartin
7 * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
10 #include <linux/atomic.h>
11 #include <linux/spinlock.h>
12 #include <linux/module.h>
15 #define ATOMIC_HASH_SIZE 4
16 #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
18 spinlock_t __atomic_hash
[ATOMIC_HASH_SIZE
] = {
19 [0 ... (ATOMIC_HASH_SIZE
-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash
)
24 static DEFINE_SPINLOCK(dummy
);
25 #define ATOMIC_HASH_SIZE 1
26 #define ATOMIC_HASH(a) (&dummy)
30 #define ATOMIC_OP_RETURN(op, c_op) \
31 int atomic_##op##_return(int i, atomic_t *v) \
34 unsigned long flags; \
35 spin_lock_irqsave(ATOMIC_HASH(v), flags); \
37 ret = (v->counter c_op i); \
39 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
42 EXPORT_SYMBOL(atomic_##op##_return);
44 #define ATOMIC_OP(op, c_op) \
45 void atomic_##op(int i, atomic_t *v) \
47 unsigned long flags; \
48 spin_lock_irqsave(ATOMIC_HASH(v), flags); \
52 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
54 EXPORT_SYMBOL(atomic_##op);
56 ATOMIC_OP_RETURN(add
, +=)
61 #undef ATOMIC_OP_RETURN
64 int atomic_xchg(atomic_t
*v
, int new)
69 spin_lock_irqsave(ATOMIC_HASH(v
), flags
);
72 spin_unlock_irqrestore(ATOMIC_HASH(v
), flags
);
75 EXPORT_SYMBOL(atomic_xchg
);
77 int atomic_cmpxchg(atomic_t
*v
, int old
, int new)
82 spin_lock_irqsave(ATOMIC_HASH(v
), flags
);
84 if (likely(ret
== old
))
87 spin_unlock_irqrestore(ATOMIC_HASH(v
), flags
);
90 EXPORT_SYMBOL(atomic_cmpxchg
);
92 int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
97 spin_lock_irqsave(ATOMIC_HASH(v
), flags
);
101 spin_unlock_irqrestore(ATOMIC_HASH(v
), flags
);
104 EXPORT_SYMBOL(__atomic_add_unless
);
106 /* Atomic operations are already serializing */
107 void atomic_set(atomic_t
*v
, int i
)
111 spin_lock_irqsave(ATOMIC_HASH(v
), flags
);
113 spin_unlock_irqrestore(ATOMIC_HASH(v
), flags
);
115 EXPORT_SYMBOL(atomic_set
);
117 unsigned long ___set_bit(unsigned long *addr
, unsigned long mask
)
119 unsigned long old
, flags
;
121 spin_lock_irqsave(ATOMIC_HASH(addr
), flags
);
124 spin_unlock_irqrestore(ATOMIC_HASH(addr
), flags
);
128 EXPORT_SYMBOL(___set_bit
);
130 unsigned long ___clear_bit(unsigned long *addr
, unsigned long mask
)
132 unsigned long old
, flags
;
134 spin_lock_irqsave(ATOMIC_HASH(addr
), flags
);
137 spin_unlock_irqrestore(ATOMIC_HASH(addr
), flags
);
141 EXPORT_SYMBOL(___clear_bit
);
143 unsigned long ___change_bit(unsigned long *addr
, unsigned long mask
)
145 unsigned long old
, flags
;
147 spin_lock_irqsave(ATOMIC_HASH(addr
), flags
);
150 spin_unlock_irqrestore(ATOMIC_HASH(addr
), flags
);
154 EXPORT_SYMBOL(___change_bit
);
156 unsigned long __cmpxchg_u32(volatile u32
*ptr
, u32 old
, u32
new)
161 spin_lock_irqsave(ATOMIC_HASH(ptr
), flags
);
162 if ((prev
= *ptr
) == old
)
164 spin_unlock_irqrestore(ATOMIC_HASH(ptr
), flags
);
166 return (unsigned long)prev
;
168 EXPORT_SYMBOL(__cmpxchg_u32
);
170 unsigned long __xchg_u32(volatile u32
*ptr
, u32
new)
175 spin_lock_irqsave(ATOMIC_HASH(ptr
), flags
);
178 spin_unlock_irqrestore(ATOMIC_HASH(ptr
), flags
);
180 return (unsigned long)prev
;
182 EXPORT_SYMBOL(__xchg_u32
);