]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $ |
2 | * atomic.S: These things are too big to do inline. | |
3 | * | |
4 | * Copyright (C) 1999 David S. Miller (davem@redhat.com) | |
5 | */ | |
6 | ||
1da177e4 LT |
7 | #include <asm/asi.h> |
8 | ||
1da177e4 LT |
9 | .text |
10 | ||
11 | /* Two versions of the atomic routines, one that | |
12 | * does not return a value and does not perform | |
13 | * memory barriers, and a second which returns | |
14 | * a value and does the barriers. | |
15 | */ | |
16 | .globl atomic_add | |
17 | .type atomic_add,#function | |
18 | atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ | |
19 | 1: lduw [%o1], %g1 | |
20 | add %g1, %o0, %g7 | |
21 | cas [%o1], %g1, %g7 | |
22 | cmp %g1, %g7 | |
23 | bne,pn %icc, 1b | |
24 | nop | |
25 | retl | |
26 | nop | |
27 | .size atomic_add, .-atomic_add | |
28 | ||
29 | .globl atomic_sub | |
30 | .type atomic_sub,#function | |
31 | atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | |
32 | 1: lduw [%o1], %g1 | |
33 | sub %g1, %o0, %g7 | |
34 | cas [%o1], %g1, %g7 | |
35 | cmp %g1, %g7 | |
36 | bne,pn %icc, 1b | |
37 | nop | |
38 | retl | |
39 | nop | |
40 | .size atomic_sub, .-atomic_sub | |
41 | ||
b445e26c DM |
42 | /* On SMP we need to use memory barriers to ensure |
43 | * correct memory operation ordering, nop these out | |
44 | * for uniprocessor. | |
45 | */ | |
46 | #ifdef CONFIG_SMP | |
47 | ||
48 | #define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad; | |
49 | #define ATOMIC_POST_BARRIER \ | |
50 | ba,pt %xcc, 80b; \ | |
51 | membar #StoreLoad | #StoreStore | |
52 | ||
53 | 80: retl | |
54 | nop | |
55 | #else | |
56 | #define ATOMIC_PRE_BARRIER | |
57 | #define ATOMIC_POST_BARRIER | |
58 | #endif | |
59 | ||
1da177e4 LT |
60 | .globl atomic_add_ret |
61 | .type atomic_add_ret,#function | |
62 | atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ | |
63 | ATOMIC_PRE_BARRIER | |
64 | 1: lduw [%o1], %g1 | |
65 | add %g1, %o0, %g7 | |
66 | cas [%o1], %g1, %g7 | |
67 | cmp %g1, %g7 | |
68 | bne,pn %icc, 1b | |
69 | add %g7, %o0, %g7 | |
b445e26c | 70 | sra %g7, 0, %o0 |
1da177e4 LT |
71 | ATOMIC_POST_BARRIER |
72 | retl | |
b445e26c | 73 | nop |
1da177e4 LT |
74 | .size atomic_add_ret, .-atomic_add_ret |
75 | ||
76 | .globl atomic_sub_ret | |
77 | .type atomic_sub_ret,#function | |
78 | atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ | |
79 | ATOMIC_PRE_BARRIER | |
80 | 1: lduw [%o1], %g1 | |
81 | sub %g1, %o0, %g7 | |
82 | cas [%o1], %g1, %g7 | |
83 | cmp %g1, %g7 | |
84 | bne,pn %icc, 1b | |
85 | sub %g7, %o0, %g7 | |
b445e26c | 86 | sra %g7, 0, %o0 |
1da177e4 LT |
87 | ATOMIC_POST_BARRIER |
88 | retl | |
b445e26c | 89 | nop |
1da177e4 LT |
90 | .size atomic_sub_ret, .-atomic_sub_ret |
91 | ||
92 | .globl atomic64_add | |
93 | .type atomic64_add,#function | |
94 | atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ | |
95 | 1: ldx [%o1], %g1 | |
96 | add %g1, %o0, %g7 | |
97 | casx [%o1], %g1, %g7 | |
98 | cmp %g1, %g7 | |
99 | bne,pn %xcc, 1b | |
100 | nop | |
101 | retl | |
102 | nop | |
103 | .size atomic64_add, .-atomic64_add | |
104 | ||
105 | .globl atomic64_sub | |
106 | .type atomic64_sub,#function | |
107 | atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | |
108 | 1: ldx [%o1], %g1 | |
109 | sub %g1, %o0, %g7 | |
110 | casx [%o1], %g1, %g7 | |
111 | cmp %g1, %g7 | |
112 | bne,pn %xcc, 1b | |
113 | nop | |
114 | retl | |
115 | nop | |
116 | .size atomic64_sub, .-atomic64_sub | |
117 | ||
118 | .globl atomic64_add_ret | |
119 | .type atomic64_add_ret,#function | |
120 | atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ | |
121 | ATOMIC_PRE_BARRIER | |
122 | 1: ldx [%o1], %g1 | |
123 | add %g1, %o0, %g7 | |
124 | casx [%o1], %g1, %g7 | |
125 | cmp %g1, %g7 | |
126 | bne,pn %xcc, 1b | |
127 | add %g7, %o0, %g7 | |
b445e26c | 128 | mov %g7, %o0 |
1da177e4 LT |
129 | ATOMIC_POST_BARRIER |
130 | retl | |
b445e26c | 131 | nop |
1da177e4 LT |
132 | .size atomic64_add_ret, .-atomic64_add_ret |
133 | ||
134 | .globl atomic64_sub_ret | |
135 | .type atomic64_sub_ret,#function | |
136 | atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ | |
137 | ATOMIC_PRE_BARRIER | |
138 | 1: ldx [%o1], %g1 | |
139 | sub %g1, %o0, %g7 | |
140 | casx [%o1], %g1, %g7 | |
141 | cmp %g1, %g7 | |
142 | bne,pn %xcc, 1b | |
143 | sub %g7, %o0, %g7 | |
b445e26c | 144 | mov %g7, %o0 |
1da177e4 LT |
145 | ATOMIC_POST_BARRIER |
146 | retl | |
b445e26c | 147 | nop |
1da177e4 | 148 | .size atomic64_sub_ret, .-atomic64_sub_ret |