]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/boost/atomic/detail/extra_ops_gcc_arm.hpp
update source to Ceph Pacific 16.2.2
[ceph.git] / ceph / src / boost / boost / atomic / detail / extra_ops_gcc_arm.hpp
1 /*
2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
5 *
6 * Copyright (c) 2017 - 2018 Andrey Semashev
7 */
8 /*!
9 * \file atomic/detail/extra_ops_gcc_arm.hpp
10 *
11 * This header contains implementation of the extra atomic operations for ARM.
12 */
13
14 #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
15 #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
16
17 #include <cstddef>
18 #include <boost/cstdint.hpp>
19 #include <boost/memory_order.hpp>
20 #include <boost/atomic/detail/config.hpp>
21 #include <boost/atomic/detail/platform.hpp>
22 #include <boost/atomic/detail/storage_traits.hpp>
23 #include <boost/atomic/detail/extra_operations_fwd.hpp>
24 #include <boost/atomic/detail/extra_ops_generic.hpp>
25 #include <boost/atomic/detail/ops_gcc_arm_common.hpp>
26 #include <boost/atomic/capabilities.hpp>
27
28 #ifdef BOOST_HAS_PRAGMA_ONCE
29 #pragma once
30 #endif
31
32 namespace boost {
33 namespace atomics {
34 namespace detail {
35
36 template< typename Base >
37 struct gcc_arm_extra_operations_common :
38 public Base
39 {
40 typedef Base base_type;
41 typedef typename base_type::storage_type storage_type;
42
43 static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
44 {
45 base_type::fetch_negate(storage, order);
46 }
47
48 static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
49 {
50 base_type::fetch_complement(storage, order);
51 }
52
53 static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
54 {
55 return !!base_type::negate(storage, order);
56 }
57
58 static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
59 {
60 return !!base_type::add(storage, v, order);
61 }
62
63 static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
64 {
65 return !!base_type::sub(storage, v, order);
66 }
67
68 static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
69 {
70 return !!base_type::bitwise_and(storage, v, order);
71 }
72
73 static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
74 {
75 return !!base_type::bitwise_or(storage, v, order);
76 }
77
78 static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
79 {
80 return !!base_type::bitwise_xor(storage, v, order);
81 }
82
83 static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
84 {
85 return !!base_type::bitwise_complement(storage, order);
86 }
87 };
88
89 template< typename Base, std::size_t Size, bool Signed >
90 struct gcc_arm_extra_operations;
91
92 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
93
94 template< typename Base, bool Signed >
95 struct gcc_arm_extra_operations< Base, 1u, Signed > :
96 public generic_extra_operations< Base, 1u, Signed >
97 {
98 typedef generic_extra_operations< Base, 1u, Signed > base_type;
99 typedef typename base_type::storage_type storage_type;
100 typedef typename storage_traits< 4u >::type extended_storage_type;
101
102 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
103 {
104 gcc_arm_operations_base::fence_before(order);
105 uint32_t tmp;
106 extended_storage_type original, result;
107 __asm__ __volatile__
108 (
109 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
110 "1:\n"
111 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
112 "rsb %[result], %[original], #0\n" // result = 0 - original
113 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
114 "teq %[tmp], #0\n" // flags = tmp==0
115 "bne 1b\n" // if (!flags.equal) goto retry
116 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
117 : [original] "=&r" (original), // %0
118 [result] "=&r" (result), // %1
119 [tmp] "=&l" (tmp), // %2
120 [storage] "+Q" (storage) // %3
121 :
122 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
123 );
124 gcc_arm_operations_base::fence_after(order);
125 return static_cast< storage_type >(original);
126 }
127
128 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
129 {
130 gcc_arm_operations_base::fence_before(order);
131 uint32_t tmp;
132 extended_storage_type original, result;
133 __asm__ __volatile__
134 (
135 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
136 "1:\n"
137 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
138 "rsb %[result], %[original], #0\n" // result = 0 - original
139 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
140 "teq %[tmp], #0\n" // flags = tmp==0
141 "bne 1b\n" // if (!flags.equal) goto retry
142 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
143 : [original] "=&r" (original), // %0
144 [result] "=&r" (result), // %1
145 [tmp] "=&l" (tmp), // %2
146 [storage] "+Q" (storage) // %3
147 :
148 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
149 );
150 gcc_arm_operations_base::fence_after(order);
151 return static_cast< storage_type >(result);
152 }
153
154 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
155 {
156 gcc_arm_operations_base::fence_before(order);
157 uint32_t tmp;
158 extended_storage_type original, result;
159 __asm__ __volatile__
160 (
161 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
162 "1:\n"
163 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
164 "add %[result], %[original], %[value]\n" // result = original + value
165 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
166 "teq %[tmp], #0\n" // flags = tmp==0
167 "bne 1b\n" // if (!flags.equal) goto retry
168 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
169 : [original] "=&r" (original), // %0
170 [result] "=&r" (result), // %1
171 [tmp] "=&l" (tmp), // %2
172 [storage] "+Q" (storage) // %3
173 : [value] "Ir" (v) // %4
174 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
175 );
176 gcc_arm_operations_base::fence_after(order);
177 return static_cast< storage_type >(result);
178 }
179
180 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
181 {
182 gcc_arm_operations_base::fence_before(order);
183 uint32_t tmp;
184 extended_storage_type original, result;
185 __asm__ __volatile__
186 (
187 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
188 "1:\n"
189 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
190 "sub %[result], %[original], %[value]\n" // result = original - value
191 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
192 "teq %[tmp], #0\n" // flags = tmp==0
193 "bne 1b\n" // if (!flags.equal) goto retry
194 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
195 : [original] "=&r" (original), // %0
196 [result] "=&r" (result), // %1
197 [tmp] "=&l" (tmp), // %2
198 [storage] "+Q" (storage) // %3
199 : [value] "Ir" (v) // %4
200 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
201 );
202 gcc_arm_operations_base::fence_after(order);
203 return static_cast< storage_type >(result);
204 }
205
206 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
207 {
208 gcc_arm_operations_base::fence_before(order);
209 uint32_t tmp;
210 extended_storage_type original, result;
211 __asm__ __volatile__
212 (
213 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
214 "1:\n"
215 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
216 "and %[result], %[original], %[value]\n" // result = original & value
217 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
218 "teq %[tmp], #0\n" // flags = tmp==0
219 "bne 1b\n" // if (!flags.equal) goto retry
220 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
221 : [original] "=&r" (original), // %0
222 [result] "=&r" (result), // %1
223 [tmp] "=&l" (tmp), // %2
224 [storage] "+Q" (storage) // %3
225 : [value] "Ir" (v) // %4
226 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
227 );
228 gcc_arm_operations_base::fence_after(order);
229 return static_cast< storage_type >(result);
230 }
231
232 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
233 {
234 gcc_arm_operations_base::fence_before(order);
235 uint32_t tmp;
236 extended_storage_type original, result;
237 __asm__ __volatile__
238 (
239 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
240 "1:\n"
241 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
242 "orr %[result], %[original], %[value]\n" // result = original | value
243 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
244 "teq %[tmp], #0\n" // flags = tmp==0
245 "bne 1b\n" // if (!flags.equal) goto retry
246 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
247 : [original] "=&r" (original), // %0
248 [result] "=&r" (result), // %1
249 [tmp] "=&l" (tmp), // %2
250 [storage] "+Q" (storage) // %3
251 : [value] "Ir" (v) // %4
252 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
253 );
254 gcc_arm_operations_base::fence_after(order);
255 return static_cast< storage_type >(result);
256 }
257
258 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
259 {
260 gcc_arm_operations_base::fence_before(order);
261 uint32_t tmp;
262 extended_storage_type original, result;
263 __asm__ __volatile__
264 (
265 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
266 "1:\n"
267 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
268 "eor %[result], %[original], %[value]\n" // result = original ^ value
269 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
270 "teq %[tmp], #0\n" // flags = tmp==0
271 "bne 1b\n" // if (!flags.equal) goto retry
272 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
273 : [original] "=&r" (original), // %0
274 [result] "=&r" (result), // %1
275 [tmp] "=&l" (tmp), // %2
276 [storage] "+Q" (storage) // %3
277 : [value] "Ir" (v) // %4
278 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
279 );
280 gcc_arm_operations_base::fence_after(order);
281 return static_cast< storage_type >(result);
282 }
283
284 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
285 {
286 gcc_arm_operations_base::fence_before(order);
287 uint32_t tmp;
288 extended_storage_type original, result;
289 __asm__ __volatile__
290 (
291 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
292 "1:\n"
293 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
294 "mvn %[result], %[original]\n" // result = NOT original
295 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
296 "teq %[tmp], #0\n" // flags = tmp==0
297 "bne 1b\n" // if (!flags.equal) goto retry
298 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
299 : [original] "=&r" (original), // %0
300 [result] "=&r" (result), // %1
301 [tmp] "=&l" (tmp), // %2
302 [storage] "+Q" (storage) // %3
303 :
304 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
305 );
306 gcc_arm_operations_base::fence_after(order);
307 return static_cast< storage_type >(original);
308 }
309
310 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
311 {
312 gcc_arm_operations_base::fence_before(order);
313 uint32_t tmp;
314 extended_storage_type original, result;
315 __asm__ __volatile__
316 (
317 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
318 "1:\n"
319 "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
320 "mvn %[result], %[original]\n" // result = NOT original
321 "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
322 "teq %[tmp], #0\n" // flags = tmp==0
323 "bne 1b\n" // if (!flags.equal) goto retry
324 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
325 : [original] "=&r" (original), // %0
326 [result] "=&r" (result), // %1
327 [tmp] "=&l" (tmp), // %2
328 [storage] "+Q" (storage) // %3
329 :
330 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
331 );
332 gcc_arm_operations_base::fence_after(order);
333 return static_cast< storage_type >(result);
334 }
335 };
336
337 template< typename Base, bool Signed >
338 struct extra_operations< Base, 1u, Signed, true > :
339 public gcc_arm_extra_operations_common< gcc_arm_extra_operations< Base, 1u, Signed > >
340 {
341 };
342
343 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
344
345 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
346
347 template< typename Base, bool Signed >
348 struct gcc_arm_extra_operations< Base, 2u, Signed > :
349 public generic_extra_operations< Base, 2u, Signed >
350 {
351 typedef generic_extra_operations< Base, 2u, Signed > base_type;
352 typedef typename base_type::storage_type storage_type;
353 typedef typename storage_traits< 4u >::type extended_storage_type;
354
355 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
356 {
357 gcc_arm_operations_base::fence_before(order);
358 uint32_t tmp;
359 extended_storage_type original, result;
360 __asm__ __volatile__
361 (
362 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
363 "1:\n"
364 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
365 "rsb %[result], %[original], #0\n" // result = 0 - original
366 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
367 "teq %[tmp], #0\n" // flags = tmp==0
368 "bne 1b\n" // if (!flags.equal) goto retry
369 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
370 : [original] "=&r" (original), // %0
371 [result] "=&r" (result), // %1
372 [tmp] "=&l" (tmp), // %2
373 [storage] "+Q" (storage) // %3
374 :
375 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
376 );
377 gcc_arm_operations_base::fence_after(order);
378 return static_cast< storage_type >(original);
379 }
380
381 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
382 {
383 gcc_arm_operations_base::fence_before(order);
384 uint32_t tmp;
385 extended_storage_type original, result;
386 __asm__ __volatile__
387 (
388 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
389 "1:\n"
390 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
391 "rsb %[result], %[original], #0\n" // result = 0 - original
392 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
393 "teq %[tmp], #0\n" // flags = tmp==0
394 "bne 1b\n" // if (!flags.equal) goto retry
395 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
396 : [original] "=&r" (original), // %0
397 [result] "=&r" (result), // %1
398 [tmp] "=&l" (tmp), // %2
399 [storage] "+Q" (storage) // %3
400 :
401 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
402 );
403 gcc_arm_operations_base::fence_after(order);
404 return static_cast< storage_type >(result);
405 }
406
407 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
408 {
409 gcc_arm_operations_base::fence_before(order);
410 uint32_t tmp;
411 extended_storage_type original, result;
412 __asm__ __volatile__
413 (
414 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
415 "1:\n"
416 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
417 "add %[result], %[original], %[value]\n" // result = original + value
418 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
419 "teq %[tmp], #0\n" // flags = tmp==0
420 "bne 1b\n" // if (!flags.equal) goto retry
421 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
422 : [original] "=&r" (original), // %0
423 [result] "=&r" (result), // %1
424 [tmp] "=&l" (tmp), // %2
425 [storage] "+Q" (storage) // %3
426 : [value] "Ir" (v) // %4
427 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
428 );
429 gcc_arm_operations_base::fence_after(order);
430 return static_cast< storage_type >(result);
431 }
432
433 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
434 {
435 gcc_arm_operations_base::fence_before(order);
436 uint32_t tmp;
437 extended_storage_type original, result;
438 __asm__ __volatile__
439 (
440 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
441 "1:\n"
442 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
443 "sub %[result], %[original], %[value]\n" // result = original - value
444 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
445 "teq %[tmp], #0\n" // flags = tmp==0
446 "bne 1b\n" // if (!flags.equal) goto retry
447 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
448 : [original] "=&r" (original), // %0
449 [result] "=&r" (result), // %1
450 [tmp] "=&l" (tmp), // %2
451 [storage] "+Q" (storage) // %3
452 : [value] "Ir" (v) // %4
453 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
454 );
455 gcc_arm_operations_base::fence_after(order);
456 return static_cast< storage_type >(result);
457 }
458
459 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
460 {
461 gcc_arm_operations_base::fence_before(order);
462 uint32_t tmp;
463 extended_storage_type original, result;
464 __asm__ __volatile__
465 (
466 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
467 "1:\n"
468 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
469 "and %[result], %[original], %[value]\n" // result = original & value
470 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
471 "teq %[tmp], #0\n" // flags = tmp==0
472 "bne 1b\n" // if (!flags.equal) goto retry
473 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
474 : [original] "=&r" (original), // %0
475 [result] "=&r" (result), // %1
476 [tmp] "=&l" (tmp), // %2
477 [storage] "+Q" (storage) // %3
478 : [value] "Ir" (v) // %4
479 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
480 );
481 gcc_arm_operations_base::fence_after(order);
482 return static_cast< storage_type >(result);
483 }
484
485 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
486 {
487 gcc_arm_operations_base::fence_before(order);
488 uint32_t tmp;
489 extended_storage_type original, result;
490 __asm__ __volatile__
491 (
492 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
493 "1:\n"
494 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
495 "orr %[result], %[original], %[value]\n" // result = original | value
496 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
497 "teq %[tmp], #0\n" // flags = tmp==0
498 "bne 1b\n" // if (!flags.equal) goto retry
499 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
500 : [original] "=&r" (original), // %0
501 [result] "=&r" (result), // %1
502 [tmp] "=&l" (tmp), // %2
503 [storage] "+Q" (storage) // %3
504 : [value] "Ir" (v) // %4
505 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
506 );
507 gcc_arm_operations_base::fence_after(order);
508 return static_cast< storage_type >(result);
509 }
510
511 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
512 {
513 gcc_arm_operations_base::fence_before(order);
514 uint32_t tmp;
515 extended_storage_type original, result;
516 __asm__ __volatile__
517 (
518 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
519 "1:\n"
520 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
521 "eor %[result], %[original], %[value]\n" // result = original ^ value
522 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
523 "teq %[tmp], #0\n" // flags = tmp==0
524 "bne 1b\n" // if (!flags.equal) goto retry
525 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
526 : [original] "=&r" (original), // %0
527 [result] "=&r" (result), // %1
528 [tmp] "=&l" (tmp), // %2
529 [storage] "+Q" (storage) // %3
530 : [value] "Ir" (v) // %4
531 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
532 );
533 gcc_arm_operations_base::fence_after(order);
534 return static_cast< storage_type >(result);
535 }
536
537 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
538 {
539 gcc_arm_operations_base::fence_before(order);
540 uint32_t tmp;
541 extended_storage_type original, result;
542 __asm__ __volatile__
543 (
544 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
545 "1:\n"
546 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
547 "mvn %[result], %[original]\n" // result = NOT original
548 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
549 "teq %[tmp], #0\n" // flags = tmp==0
550 "bne 1b\n" // if (!flags.equal) goto retry
551 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
552 : [original] "=&r" (original), // %0
553 [result] "=&r" (result), // %1
554 [tmp] "=&l" (tmp), // %2
555 [storage] "+Q" (storage) // %3
556 :
557 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
558 );
559 gcc_arm_operations_base::fence_after(order);
560 return static_cast< storage_type >(original);
561 }
562
563 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
564 {
565 gcc_arm_operations_base::fence_before(order);
566 uint32_t tmp;
567 extended_storage_type original, result;
568 __asm__ __volatile__
569 (
570 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
571 "1:\n"
572 "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
573 "mvn %[result], %[original]\n" // result = NOT original
574 "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
575 "teq %[tmp], #0\n" // flags = tmp==0
576 "bne 1b\n" // if (!flags.equal) goto retry
577 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
578 : [original] "=&r" (original), // %0
579 [result] "=&r" (result), // %1
580 [tmp] "=&l" (tmp), // %2
581 [storage] "+Q" (storage) // %3
582 :
583 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
584 );
585 gcc_arm_operations_base::fence_after(order);
586 return static_cast< storage_type >(result);
587 }
588 };
589
590 template< typename Base, bool Signed >
591 struct extra_operations< Base, 2u, Signed, true > :
592 public gcc_arm_extra_operations_common< gcc_arm_extra_operations< Base, 2u, Signed > >
593 {
594 };
595
596 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
597
598 template< typename Base, bool Signed >
599 struct gcc_arm_extra_operations< Base, 4u, Signed > :
600 public generic_extra_operations< Base, 4u, Signed >
601 {
602 typedef generic_extra_operations< Base, 4u, Signed > base_type;
603 typedef typename base_type::storage_type storage_type;
604
605 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
606 {
607 gcc_arm_operations_base::fence_before(order);
608 uint32_t tmp;
609 storage_type original, result;
610 __asm__ __volatile__
611 (
612 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
613 "1:\n"
614 "ldrex %[original], %[storage]\n" // original = *(&storage)
615 "rsb %[result], %[original], #0\n" // result = 0 - original
616 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
617 "teq %[tmp], #0\n" // flags = tmp==0
618 "bne 1b\n" // if (!flags.equal) goto retry
619 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
620 : [original] "=&r" (original), // %0
621 [result] "=&r" (result), // %1
622 [tmp] "=&l" (tmp), // %2
623 [storage] "+Q" (storage) // %3
624 :
625 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
626 );
627 gcc_arm_operations_base::fence_after(order);
628 return original;
629 }
630
631 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
632 {
633 gcc_arm_operations_base::fence_before(order);
634 uint32_t tmp;
635 storage_type original, result;
636 __asm__ __volatile__
637 (
638 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
639 "1:\n"
640 "ldrex %[original], %[storage]\n" // original = *(&storage)
641 "rsb %[result], %[original], #0\n" // result = 0 - original
642 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
643 "teq %[tmp], #0\n" // flags = tmp==0
644 "bne 1b\n" // if (!flags.equal) goto retry
645 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
646 : [original] "=&r" (original), // %0
647 [result] "=&r" (result), // %1
648 [tmp] "=&l" (tmp), // %2
649 [storage] "+Q" (storage) // %3
650 :
651 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
652 );
653 gcc_arm_operations_base::fence_after(order);
654 return result;
655 }
656
657 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
658 {
659 gcc_arm_operations_base::fence_before(order);
660 uint32_t tmp;
661 storage_type original, result;
662 __asm__ __volatile__
663 (
664 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
665 "1:\n"
666 "ldrex %[original], %[storage]\n" // original = *(&storage)
667 "add %[result], %[original], %[value]\n" // result = original + value
668 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
669 "teq %[tmp], #0\n" // flags = tmp==0
670 "bne 1b\n" // if (!flags.equal) goto retry
671 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
672 : [original] "=&r" (original), // %0
673 [result] "=&r" (result), // %1
674 [tmp] "=&l" (tmp), // %2
675 [storage] "+Q" (storage) // %3
676 : [value] "Ir" (v) // %4
677 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
678 );
679 gcc_arm_operations_base::fence_after(order);
680 return result;
681 }
682
683 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
684 {
685 gcc_arm_operations_base::fence_before(order);
686 uint32_t tmp;
687 storage_type original, result;
688 __asm__ __volatile__
689 (
690 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
691 "1:\n"
692 "ldrex %[original], %[storage]\n" // original = *(&storage)
693 "sub %[result], %[original], %[value]\n" // result = original - value
694 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
695 "teq %[tmp], #0\n" // flags = tmp==0
696 "bne 1b\n" // if (!flags.equal) goto retry
697 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
698 : [original] "=&r" (original), // %0
699 [result] "=&r" (result), // %1
700 [tmp] "=&l" (tmp), // %2
701 [storage] "+Q" (storage) // %3
702 : [value] "Ir" (v) // %4
703 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
704 );
705 gcc_arm_operations_base::fence_after(order);
706 return result;
707 }
708
709 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
710 {
711 gcc_arm_operations_base::fence_before(order);
712 uint32_t tmp;
713 storage_type original, result;
714 __asm__ __volatile__
715 (
716 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
717 "1:\n"
718 "ldrex %[original], %[storage]\n" // original = *(&storage)
719 "and %[result], %[original], %[value]\n" // result = original & value
720 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
721 "teq %[tmp], #0\n" // flags = tmp==0
722 "bne 1b\n" // if (!flags.equal) goto retry
723 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
724 : [original] "=&r" (original), // %0
725 [result] "=&r" (result), // %1
726 [tmp] "=&l" (tmp), // %2
727 [storage] "+Q" (storage) // %3
728 : [value] "Ir" (v) // %4
729 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
730 );
731 gcc_arm_operations_base::fence_after(order);
732 return result;
733 }
734
735 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
736 {
737 gcc_arm_operations_base::fence_before(order);
738 uint32_t tmp;
739 storage_type original, result;
740 __asm__ __volatile__
741 (
742 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
743 "1:\n"
744 "ldrex %[original], %[storage]\n" // original = *(&storage)
745 "orr %[result], %[original], %[value]\n" // result = original | value
746 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
747 "teq %[tmp], #0\n" // flags = tmp==0
748 "bne 1b\n" // if (!flags.equal) goto retry
749 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
750 : [original] "=&r" (original), // %0
751 [result] "=&r" (result), // %1
752 [tmp] "=&l" (tmp), // %2
753 [storage] "+Q" (storage) // %3
754 : [value] "Ir" (v) // %4
755 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
756 );
757 gcc_arm_operations_base::fence_after(order);
758 return result;
759 }
760
761 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
762 {
763 gcc_arm_operations_base::fence_before(order);
764 uint32_t tmp;
765 storage_type original, result;
766 __asm__ __volatile__
767 (
768 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
769 "1:\n"
770 "ldrex %[original], %[storage]\n" // original = *(&storage)
771 "eor %[result], %[original], %[value]\n" // result = original ^ value
772 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
773 "teq %[tmp], #0\n" // flags = tmp==0
774 "bne 1b\n" // if (!flags.equal) goto retry
775 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
776 : [original] "=&r" (original), // %0
777 [result] "=&r" (result), // %1
778 [tmp] "=&l" (tmp), // %2
779 [storage] "+Q" (storage) // %3
780 : [value] "Ir" (v) // %4
781 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
782 );
783 gcc_arm_operations_base::fence_after(order);
784 return result;
785 }
786
787 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
788 {
789 gcc_arm_operations_base::fence_before(order);
790 uint32_t tmp;
791 storage_type original, result;
792 __asm__ __volatile__
793 (
794 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
795 "1:\n"
796 "ldrex %[original], %[storage]\n" // original = *(&storage)
797 "mvn %[result], %[original]\n" // result = NOT original
798 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
799 "teq %[tmp], #0\n" // flags = tmp==0
800 "bne 1b\n" // if (!flags.equal) goto retry
801 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
802 : [original] "=&r" (original), // %0
803 [result] "=&r" (result), // %1
804 [tmp] "=&l" (tmp), // %2
805 [storage] "+Q" (storage) // %3
806 :
807 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
808 );
809 gcc_arm_operations_base::fence_after(order);
810 return original;
811 }
812
813 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
814 {
815 gcc_arm_operations_base::fence_before(order);
816 uint32_t tmp;
817 storage_type original, result;
818 __asm__ __volatile__
819 (
820 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
821 "1:\n"
822 "ldrex %[original], %[storage]\n" // original = *(&storage)
823 "mvn %[result], %[original]\n" // result = NOT original
824 "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
825 "teq %[tmp], #0\n" // flags = tmp==0
826 "bne 1b\n" // if (!flags.equal) goto retry
827 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
828 : [original] "=&r" (original), // %0
829 [result] "=&r" (result), // %1
830 [tmp] "=&l" (tmp), // %2
831 [storage] "+Q" (storage) // %3
832 :
833 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
834 );
835 gcc_arm_operations_base::fence_after(order);
836 return result;
837 }
838 };
839
840 template< typename Base, bool Signed >
841 struct extra_operations< Base, 4u, Signed, true > :
842 public gcc_arm_extra_operations_common< gcc_arm_extra_operations< Base, 4u, Signed > >
843 {
844 };
845
846 #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
847
848 template< typename Base, bool Signed >
849 struct gcc_arm_extra_operations< Base, 8u, Signed > :
850 public generic_extra_operations< Base, 8u, Signed >
851 {
852 typedef generic_extra_operations< Base, 8u, Signed > base_type;
853 typedef typename base_type::storage_type storage_type;
854
855 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
856 {
857 gcc_arm_operations_base::fence_before(order);
858 storage_type original, result;
859 uint32_t tmp;
860 __asm__ __volatile__
861 (
862 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
863 "1:\n"
864 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
865 "mvn %2, %1\n" // result = NOT original
866 "mvn %H2, %H1\n"
867 "adds %2, %2, #1\n" // result = result + 1
868 "adc %H2, %H2, #0\n"
869 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
870 "teq %0, #0\n" // flags = tmp==0
871 "bne 1b\n" // if (!flags.equal) goto retry
872 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
873 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
874 "=&r" (original), // %1
875 "=&r" (result) // %2
876 : "r" (&storage) // %3
877 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
878 );
879 gcc_arm_operations_base::fence_after(order);
880 return original;
881 }
882
883 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
884 {
885 gcc_arm_operations_base::fence_before(order);
886 storage_type original, result;
887 uint32_t tmp;
888 __asm__ __volatile__
889 (
890 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
891 "1:\n"
892 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
893 "mvn %2, %1\n" // result = NOT original
894 "mvn %H2, %H1\n"
895 "adds %2, %2, #1\n" // result = result + 1
896 "adc %H2, %H2, #0\n"
897 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
898 "teq %0, #0\n" // flags = tmp==0
899 "bne 1b\n" // if (!flags.equal) goto retry
900 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
901 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
902 "=&r" (original), // %1
903 "=&r" (result) // %2
904 : "r" (&storage) // %3
905 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
906 );
907 gcc_arm_operations_base::fence_after(order);
908 return result;
909 }
910
911 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
912 {
913 gcc_arm_operations_base::fence_before(order);
914 storage_type original, result;
915 uint32_t tmp;
916 __asm__ __volatile__
917 (
918 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
919 "1:\n"
920 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
921 "adds %2, %1, %4\n" // result = original + value
922 "adc %H2, %H1, %H4\n"
923 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
924 "teq %0, #0\n" // flags = tmp==0
925 "bne 1b\n" // if (!flags.equal) goto retry
926 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
927 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
928 "=&r" (original), // %1
929 "=&r" (result) // %2
930 : "r" (&storage), // %3
931 "r" (v) // %4
932 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
933 );
934 gcc_arm_operations_base::fence_after(order);
935 return result;
936 }
937
938 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
939 {
940 gcc_arm_operations_base::fence_before(order);
941 storage_type original, result;
942 uint32_t tmp;
943 __asm__ __volatile__
944 (
945 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
946 "1:\n"
947 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
948 "subs %2, %1, %4\n" // result = original - value
949 "sbc %H2, %H1, %H4\n"
950 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
951 "teq %0, #0\n" // flags = tmp==0
952 "bne 1b\n" // if (!flags.equal) goto retry
953 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
954 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
955 "=&r" (original), // %1
956 "=&r" (result) // %2
957 : "r" (&storage), // %3
958 "r" (v) // %4
959 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
960 );
961 gcc_arm_operations_base::fence_after(order);
962 return result;
963 }
964
965 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
966 {
967 gcc_arm_operations_base::fence_before(order);
968 storage_type original, result;
969 uint32_t tmp;
970 __asm__ __volatile__
971 (
972 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
973 "1:\n"
974 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
975 "and %2, %1, %4\n" // result = original & value
976 "and %H2, %H1, %H4\n"
977 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
978 "teq %0, #0\n" // flags = tmp==0
979 "bne 1b\n" // if (!flags.equal) goto retry
980 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
981 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
982 "=&r" (original), // %1
983 "=&r" (result) // %2
984 : "r" (&storage), // %3
985 "r" (v) // %4
986 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
987 );
988 gcc_arm_operations_base::fence_after(order);
989 return result;
990 }
991
992 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
993 {
994 gcc_arm_operations_base::fence_before(order);
995 storage_type original, result;
996 uint32_t tmp;
997 __asm__ __volatile__
998 (
999 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1000 "1:\n"
1001 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1002 "orr %2, %1, %4\n" // result = original | value
1003 "orr %H2, %H1, %H4\n"
1004 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1005 "teq %0, #0\n" // flags = tmp==0
1006 "bne 1b\n" // if (!flags.equal) goto retry
1007 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1008 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1009 "=&r" (original), // %1
1010 "=&r" (result) // %2
1011 : "r" (&storage), // %3
1012 "r" (v) // %4
1013 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1014 );
1015 gcc_arm_operations_base::fence_after(order);
1016 return result;
1017 }
1018
1019 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1020 {
1021 gcc_arm_operations_base::fence_before(order);
1022 storage_type original, result;
1023 uint32_t tmp;
1024 __asm__ __volatile__
1025 (
1026 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1027 "1:\n"
1028 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1029 "eor %2, %1, %4\n" // result = original ^ value
1030 "eor %H2, %H1, %H4\n"
1031 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1032 "teq %0, #0\n" // flags = tmp==0
1033 "bne 1b\n" // if (!flags.equal) goto retry
1034 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1035 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1036 "=&r" (original), // %1
1037 "=&r" (result) // %2
1038 : "r" (&storage), // %3
1039 "r" (v) // %4
1040 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1041 );
1042 gcc_arm_operations_base::fence_after(order);
1043 return result;
1044 }
1045
1046 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1047 {
1048 gcc_arm_operations_base::fence_before(order);
1049 storage_type original, result;
1050 uint32_t tmp;
1051 __asm__ __volatile__
1052 (
1053 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1054 "1:\n"
1055 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1056 "mvn %2, %1\n" // result = NOT original
1057 "mvn %H2, %H1\n"
1058 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1059 "teq %0, #0\n" // flags = tmp==0
1060 "bne 1b\n" // if (!flags.equal) goto retry
1061 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1062 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1063 "=&r" (original), // %1
1064 "=&r" (result) // %2
1065 : "r" (&storage) // %3
1066 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1067 );
1068 gcc_arm_operations_base::fence_after(order);
1069 return original;
1070 }
1071
1072 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1073 {
1074 gcc_arm_operations_base::fence_before(order);
1075 storage_type original, result;
1076 uint32_t tmp;
1077 __asm__ __volatile__
1078 (
1079 BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
1080 "1:\n"
1081 "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
1082 "mvn %2, %1\n" // result = NOT original
1083 "mvn %H2, %H1\n"
1084 "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
1085 "teq %0, #0\n" // flags = tmp==0
1086 "bne 1b\n" // if (!flags.equal) goto retry
1087 BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
1088 : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
1089 "=&r" (original), // %1
1090 "=&r" (result) // %2
1091 : "r" (&storage) // %3
1092 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
1093 );
1094 gcc_arm_operations_base::fence_after(order);
1095 return result;
1096 }
1097 };
1098
1099 template< typename Base, bool Signed >
1100 struct extra_operations< Base, 8u, Signed, true > :
1101 public gcc_arm_extra_operations_common< gcc_arm_extra_operations< Base, 8u, Signed > >
1102 {
1103 };
1104
1105 #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
1106
1107 } // namespace detail
1108 } // namespace atomics
1109 } // namespace boost
1110
1111 #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_