]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * bitops.c: atomic operations which got too long to be inlined all over | |
3 | * the place. | |
4 | * | |
5 | * Copyright 1999 Philipp Rumpf (prumpf@tux.org) | |
6 | * Copyright 2000 Grant Grundler (grundler@cup.hp.com) | |
7 | */ | |
8 | ||
1da177e4 LT |
9 | #include <linux/kernel.h> |
10 | #include <linux/spinlock.h> | |
60063497 | 11 | #include <linux/atomic.h> |
1da177e4 LT |
12 | |
13 | #ifdef CONFIG_SMP | |
445c8951 | 14 | arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { |
edc35bd7 | 15 | [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED |
1da177e4 LT |
16 | }; |
17 | #endif | |
18 | ||
a8f44e38 | 19 | #ifdef CONFIG_64BIT |
1da177e4 LT |
20 | unsigned long __xchg64(unsigned long x, unsigned long *ptr) |
21 | { | |
22 | unsigned long temp, flags; | |
23 | ||
24 | _atomic_spin_lock_irqsave(ptr, flags); | |
25 | temp = *ptr; | |
26 | *ptr = x; | |
27 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
28 | return temp; | |
29 | } | |
30 | #endif | |
31 | ||
32 | unsigned long __xchg32(int x, int *ptr) | |
33 | { | |
34 | unsigned long flags; | |
35 | long temp; | |
36 | ||
37 | _atomic_spin_lock_irqsave(ptr, flags); | |
38 | temp = (long) *ptr; /* XXX - sign extension wanted? */ | |
39 | *ptr = x; | |
40 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
41 | return (unsigned long)temp; | |
42 | } | |
43 | ||
44 | ||
45 | unsigned long __xchg8(char x, char *ptr) | |
46 | { | |
47 | unsigned long flags; | |
48 | long temp; | |
49 | ||
50 | _atomic_spin_lock_irqsave(ptr, flags); | |
51 | temp = (long) *ptr; /* XXX - sign extension wanted? */ | |
52 | *ptr = x; | |
53 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
54 | return (unsigned long)temp; | |
55 | } | |
56 | ||
57 | ||
a8f44e38 | 58 | #ifdef CONFIG_64BIT |
1da177e4 LT |
59 | unsigned long __cmpxchg_u64(volatile unsigned long *ptr, unsigned long old, unsigned long new) |
60 | { | |
61 | unsigned long flags; | |
62 | unsigned long prev; | |
63 | ||
64 | _atomic_spin_lock_irqsave(ptr, flags); | |
65 | if ((prev = *ptr) == old) | |
66 | *ptr = new; | |
67 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
68 | return prev; | |
69 | } | |
70 | #endif | |
71 | ||
72 | unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new) | |
73 | { | |
74 | unsigned long flags; | |
75 | unsigned int prev; | |
76 | ||
77 | _atomic_spin_lock_irqsave(ptr, flags); | |
78 | if ((prev = *ptr) == old) | |
79 | *ptr = new; | |
80 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
81 | return (unsigned long)prev; | |
82 | } |