]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * atomic32.c: 32-bit atomic_t implementation | |
3 | * | |
4 | * Copyright (C) 2004 Keith M Wesolowski | |
5 | * | |
6 | * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf | |
7 | */ | |
8 | ||
9 | #include <asm/atomic.h> | |
10 | #include <linux/spinlock.h> | |
11 | #include <linux/module.h> | |
12 | ||
13 | #ifdef CONFIG_SMP | |
14 | #define ATOMIC_HASH_SIZE 4 | |
15 | #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)]) | |
16 | ||
17 | spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = { | |
18 | [0 ... (ATOMIC_HASH_SIZE-1)] = SPIN_LOCK_UNLOCKED | |
19 | }; | |
20 | ||
21 | #else /* SMP */ | |
22 | ||
a9f6a0dd | 23 | static DEFINE_SPINLOCK(dummy); |
1da177e4 LT |
24 | #define ATOMIC_HASH_SIZE 1 |
25 | #define ATOMIC_HASH(a) (&dummy) | |
26 | ||
27 | #endif /* SMP */ | |
28 | ||
29 | int __atomic_add_return(int i, atomic_t *v) | |
30 | { | |
31 | int ret; | |
32 | unsigned long flags; | |
33 | spin_lock_irqsave(ATOMIC_HASH(v), flags); | |
34 | ||
35 | ret = (v->counter += i); | |
36 | ||
37 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
38 | return ret; | |
39 | } | |
4a6dae6d | 40 | EXPORT_SYMBOL(__atomic_add_return); |
1da177e4 | 41 | |
4a6dae6d | 42 | int atomic_cmpxchg(atomic_t *v, int old, int new) |
1da177e4 | 43 | { |
4a6dae6d | 44 | int ret; |
1da177e4 | 45 | unsigned long flags; |
1da177e4 | 46 | |
4a6dae6d NP |
47 | spin_lock_irqsave(ATOMIC_HASH(v), flags); |
48 | ret = v->counter; | |
49 | if (likely(ret == old)) | |
50 | v->counter = new; | |
1da177e4 LT |
51 | |
52 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
4a6dae6d | 53 | return ret; |
1da177e4 LT |
54 | } |
55 | ||
4a6dae6d NP |
56 | void atomic_set(atomic_t *v, int i) |
57 | { | |
58 | unsigned long flags; | |
1da177e4 | 59 | |
4a6dae6d NP |
60 | spin_lock_irqsave(ATOMIC_HASH(v), flags); |
61 | v->counter = i; | |
62 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
63 | } | |
64 | EXPORT_SYMBOL(atomic_set); |