]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * bitops.c: atomic operations which got too long to be inlined all over | |
3 | * the place. | |
4 | * | |
5 | * Copyright 1999 Philipp Rumpf (prumpf@tux.org) | |
6 | * Copyright 2000 Grant Grundler (grundler@cup.hp.com) | |
7 | */ | |
8 | ||
1da177e4 LT |
9 | #include <linux/kernel.h> |
10 | #include <linux/spinlock.h> | |
60063497 | 11 | #include <linux/atomic.h> |
1da177e4 LT |
12 | |
13 | #ifdef CONFIG_SMP | |
445c8951 | 14 | arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { |
edc35bd7 | 15 | [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED |
1da177e4 LT |
16 | }; |
17 | #endif | |
18 | ||
a8f44e38 | 19 | #ifdef CONFIG_64BIT |
1da177e4 LT |
20 | unsigned long __xchg64(unsigned long x, unsigned long *ptr) |
21 | { | |
22 | unsigned long temp, flags; | |
23 | ||
24 | _atomic_spin_lock_irqsave(ptr, flags); | |
25 | temp = *ptr; | |
26 | *ptr = x; | |
27 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
28 | return temp; | |
29 | } | |
30 | #endif | |
31 | ||
32 | unsigned long __xchg32(int x, int *ptr) | |
33 | { | |
34 | unsigned long flags; | |
35 | long temp; | |
36 | ||
37 | _atomic_spin_lock_irqsave(ptr, flags); | |
38 | temp = (long) *ptr; /* XXX - sign extension wanted? */ | |
39 | *ptr = x; | |
40 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
41 | return (unsigned long)temp; | |
42 | } | |
43 | ||
44 | ||
45 | unsigned long __xchg8(char x, char *ptr) | |
46 | { | |
47 | unsigned long flags; | |
48 | long temp; | |
49 | ||
50 | _atomic_spin_lock_irqsave(ptr, flags); | |
51 | temp = (long) *ptr; /* XXX - sign extension wanted? */ | |
52 | *ptr = x; | |
53 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
54 | return (unsigned long)temp; | |
55 | } | |
56 | ||
57 | ||
54b66800 | 58 | u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new) |
1da177e4 LT |
59 | { |
60 | unsigned long flags; | |
54b66800 | 61 | u64 prev; |
1da177e4 LT |
62 | |
63 | _atomic_spin_lock_irqsave(ptr, flags); | |
64 | if ((prev = *ptr) == old) | |
65 | *ptr = new; | |
66 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
67 | return prev; | |
68 | } | |
1da177e4 LT |
69 | |
70 | unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new) | |
71 | { | |
72 | unsigned long flags; | |
73 | unsigned int prev; | |
74 | ||
75 | _atomic_spin_lock_irqsave(ptr, flags); | |
76 | if ((prev = *ptr) == old) | |
77 | *ptr = new; | |
78 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
79 | return (unsigned long)prev; | |
80 | } |