]> git.proxmox.com Git - rustc.git/blob - src/libcore/atomic.rs
Imported Upstream version 1.1.0+dfsg1
[rustc.git] / src / libcore / atomic.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Atomic types
12 //!
13 //! Atomic types provide primitive shared-memory communication between
14 //! threads, and are the building blocks of other concurrent
15 //! types.
16 //!
17 //! This module defines atomic versions of a select number of primitive
18 //! types, including `AtomicBool`, `AtomicIsize`, and `AtomicUsize`.
19 //! Atomic types present operations that, when used correctly, synchronize
20 //! updates between threads.
21 //!
22 //! Each method takes an `Ordering` which represents the strength of
23 //! the memory barrier for that operation. These orderings are the
24 //! same as [LLVM atomic orderings][1].
25 //!
26 //! [1]: http://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations
27 //!
28 //! Atomic variables are safe to share between threads (they implement `Sync`)
29 //! but they do not themselves provide the mechanism for sharing. The most
30 //! common way to share an atomic variable is to put it into an `Arc` (an
31 //! atomically-reference-counted shared pointer).
32 //!
33 //! Most atomic types may be stored in static variables, initialized using
34 //! the provided static initializers like `INIT_ATOMIC_BOOL`. Atomic statics
35 //! are often used for lazy global initialization.
36 //!
37 //!
38 //! # Examples
39 //!
40 //! A simple spinlock:
41 //!
42 //! ```
43 //! use std::sync::Arc;
44 //! use std::sync::atomic::{AtomicUsize, Ordering};
45 //! use std::thread;
46 //!
47 //! fn main() {
48 //! let spinlock = Arc::new(AtomicUsize::new(1));
49 //!
50 //! let spinlock_clone = spinlock.clone();
51 //! thread::spawn(move|| {
52 //! spinlock_clone.store(0, Ordering::SeqCst);
53 //! });
54 //!
55 //! // Wait for the other thread to release the lock
56 //! while spinlock.load(Ordering::SeqCst) != 0 {}
57 //! }
58 //! ```
59 //!
60 //! Keep a global count of live threads:
61 //!
62 //! ```
63 //! use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
64 //!
65 //! static GLOBAL_THREAD_COUNT: AtomicUsize = ATOMIC_USIZE_INIT;
66 //!
67 //! let old_thread_count = GLOBAL_THREAD_COUNT.fetch_add(1, Ordering::SeqCst);
68 //! println!("live threads: {}", old_thread_count + 1);
69 //! ```
70
71 #![stable(feature = "rust1", since = "1.0.0")]
72
73 use self::Ordering::*;
74
75 use marker::Sync;
76
77 use intrinsics;
78 use cell::UnsafeCell;
79 use marker::PhantomData;
80
81 use default::Default;
82
83 /// A boolean type which can be safely shared between threads.
84 #[stable(feature = "rust1", since = "1.0.0")]
85 pub struct AtomicBool {
86 v: UnsafeCell<usize>,
87 }
88
89 impl Default for AtomicBool {
90 fn default() -> AtomicBool {
91 ATOMIC_BOOL_INIT
92 }
93 }
94
95 unsafe impl Sync for AtomicBool {}
96
97 /// A signed integer type which can be safely shared between threads.
98 #[stable(feature = "rust1", since = "1.0.0")]
99 pub struct AtomicIsize {
100 v: UnsafeCell<isize>,
101 }
102
103 impl Default for AtomicIsize {
104 fn default() -> AtomicIsize {
105 ATOMIC_ISIZE_INIT
106 }
107 }
108
109 unsafe impl Sync for AtomicIsize {}
110
111 /// An unsigned integer type which can be safely shared between threads.
112 #[stable(feature = "rust1", since = "1.0.0")]
113 pub struct AtomicUsize {
114 v: UnsafeCell<usize>,
115 }
116
117 impl Default for AtomicUsize {
118 fn default() -> AtomicUsize {
119 ATOMIC_USIZE_INIT
120 }
121 }
122
123 unsafe impl Sync for AtomicUsize {}
124
125 /// A raw pointer type which can be safely shared between threads.
126 #[stable(feature = "rust1", since = "1.0.0")]
127 pub struct AtomicPtr<T> {
128 p: UnsafeCell<usize>,
129 _marker: PhantomData<*mut T>,
130 }
131
132 impl<T> Default for AtomicPtr<T> {
133 fn default() -> AtomicPtr<T> {
134 AtomicPtr::new(::ptr::null_mut())
135 }
136 }
137
138 unsafe impl<T> Sync for AtomicPtr<T> {}
139
140 /// Atomic memory orderings
141 ///
142 /// Memory orderings limit the ways that both the compiler and CPU may reorder
143 /// instructions around atomic operations. At its most restrictive,
144 /// "sequentially consistent" atomics allow neither reads nor writes
145 /// to be moved either before or after the atomic operation; on the other end
146 /// "relaxed" atomics allow all reorderings.
147 ///
148 /// Rust's memory orderings are [the same as
149 /// C++'s](http://gcc.gnu.org/wiki/Atomic/GCCMM/AtomicSync).
150 #[stable(feature = "rust1", since = "1.0.0")]
151 #[derive(Copy, Clone)]
152 pub enum Ordering {
153 /// No ordering constraints, only atomic operations.
154 #[stable(feature = "rust1", since = "1.0.0")]
155 Relaxed,
156 /// When coupled with a store, all previous writes become visible
157 /// to another thread that performs a load with `Acquire` ordering
158 /// on the same value.
159 #[stable(feature = "rust1", since = "1.0.0")]
160 Release,
161 /// When coupled with a load, all subsequent loads will see data
162 /// written before a store with `Release` ordering on the same value
163 /// in another thread.
164 #[stable(feature = "rust1", since = "1.0.0")]
165 Acquire,
166 /// When coupled with a load, uses `Acquire` ordering, and with a store
167 /// `Release` ordering.
168 #[stable(feature = "rust1", since = "1.0.0")]
169 AcqRel,
170 /// Like `AcqRel` with the additional guarantee that all threads see all
171 /// sequentially consistent operations in the same order.
172 #[stable(feature = "rust1", since = "1.0.0")]
173 SeqCst,
174 }
175
176 /// An `AtomicBool` initialized to `false`.
177 #[stable(feature = "rust1", since = "1.0.0")]
178 pub const ATOMIC_BOOL_INIT: AtomicBool =
179 AtomicBool { v: UnsafeCell { value: 0 } };
180 /// An `AtomicIsize` initialized to `0`.
181 #[stable(feature = "rust1", since = "1.0.0")]
182 pub const ATOMIC_ISIZE_INIT: AtomicIsize =
183 AtomicIsize { v: UnsafeCell { value: 0 } };
184 /// An `AtomicUsize` initialized to `0`.
185 #[stable(feature = "rust1", since = "1.0.0")]
186 pub const ATOMIC_USIZE_INIT: AtomicUsize =
187 AtomicUsize { v: UnsafeCell { value: 0, } };
188
189 // NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly
190 const UINT_TRUE: usize = !0;
191
192 impl AtomicBool {
193 /// Creates a new `AtomicBool`.
194 ///
195 /// # Examples
196 ///
197 /// ```
198 /// use std::sync::atomic::AtomicBool;
199 ///
200 /// let atomic_true = AtomicBool::new(true);
201 /// let atomic_false = AtomicBool::new(false);
202 /// ```
203 #[inline]
204 #[stable(feature = "rust1", since = "1.0.0")]
205 pub fn new(v: bool) -> AtomicBool {
206 let val = if v { UINT_TRUE } else { 0 };
207 AtomicBool { v: UnsafeCell::new(val) }
208 }
209
210 /// Loads a value from the bool.
211 ///
212 /// `load` takes an `Ordering` argument which describes the memory ordering of this operation.
213 ///
214 /// # Panics
215 ///
216 /// Panics if `order` is `Release` or `AcqRel`.
217 ///
218 /// # Examples
219 ///
220 /// ```
221 /// use std::sync::atomic::{AtomicBool, Ordering};
222 ///
223 /// let some_bool = AtomicBool::new(true);
224 ///
225 /// let value = some_bool.load(Ordering::Relaxed);
226 /// ```
227 #[inline]
228 #[stable(feature = "rust1", since = "1.0.0")]
229 pub fn load(&self, order: Ordering) -> bool {
230 unsafe { atomic_load(self.v.get(), order) > 0 }
231 }
232
233 /// Stores a value into the bool.
234 ///
235 /// `store` takes an `Ordering` argument which describes the memory ordering of this operation.
236 ///
237 /// # Examples
238 ///
239 /// ```
240 /// use std::sync::atomic::{AtomicBool, Ordering};
241 ///
242 /// let some_bool = AtomicBool::new(true);
243 ///
244 /// some_bool.store(false, Ordering::Relaxed);
245 /// ```
246 ///
247 /// # Panics
248 ///
249 /// Panics if `order` is `Acquire` or `AcqRel`.
250 #[inline]
251 #[stable(feature = "rust1", since = "1.0.0")]
252 pub fn store(&self, val: bool, order: Ordering) {
253 let val = if val { UINT_TRUE } else { 0 };
254
255 unsafe { atomic_store(self.v.get(), val, order); }
256 }
257
258 /// Stores a value into the bool, returning the old value.
259 ///
260 /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation.
261 ///
262 /// # Examples
263 ///
264 /// ```
265 /// use std::sync::atomic::{AtomicBool, Ordering};
266 ///
267 /// let some_bool = AtomicBool::new(true);
268 ///
269 /// let value = some_bool.swap(false, Ordering::Relaxed);
270 /// ```
271 #[inline]
272 #[stable(feature = "rust1", since = "1.0.0")]
273 pub fn swap(&self, val: bool, order: Ordering) -> bool {
274 let val = if val { UINT_TRUE } else { 0 };
275
276 unsafe { atomic_swap(self.v.get(), val, order) > 0 }
277 }
278
279 /// Stores a value into the bool if the current value is the same as the expected value.
280 ///
281 /// The return value is always the previous value. If it is equal to `old`, then the value was
282 /// updated.
283 ///
284 /// `swap` also takes an `Ordering` argument which describes the memory ordering of this
285 /// operation.
286 ///
287 /// # Examples
288 ///
289 /// ```
290 /// use std::sync::atomic::{AtomicBool, Ordering};
291 ///
292 /// let some_bool = AtomicBool::new(true);
293 ///
294 /// let value = some_bool.store(false, Ordering::Relaxed);
295 /// ```
296 #[inline]
297 #[stable(feature = "rust1", since = "1.0.0")]
298 pub fn compare_and_swap(&self, old: bool, new: bool, order: Ordering) -> bool {
299 let old = if old { UINT_TRUE } else { 0 };
300 let new = if new { UINT_TRUE } else { 0 };
301
302 unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) > 0 }
303 }
304
305 /// Logical "and" with a boolean value.
306 ///
307 /// Performs a logical "and" operation on the current value and the argument `val`, and sets
308 /// the new value to the result.
309 ///
310 /// Returns the previous value.
311 ///
312 /// # Examples
313 ///
314 /// ```
315 /// use std::sync::atomic::{AtomicBool, Ordering};
316 ///
317 /// let foo = AtomicBool::new(true);
318 /// assert_eq!(true, foo.fetch_and(false, Ordering::SeqCst));
319 /// assert_eq!(false, foo.load(Ordering::SeqCst));
320 ///
321 /// let foo = AtomicBool::new(true);
322 /// assert_eq!(true, foo.fetch_and(true, Ordering::SeqCst));
323 /// assert_eq!(true, foo.load(Ordering::SeqCst));
324 ///
325 /// let foo = AtomicBool::new(false);
326 /// assert_eq!(false, foo.fetch_and(false, Ordering::SeqCst));
327 /// assert_eq!(false, foo.load(Ordering::SeqCst));
328 /// ```
329 #[inline]
330 #[stable(feature = "rust1", since = "1.0.0")]
331 pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
332 let val = if val { UINT_TRUE } else { 0 };
333
334 unsafe { atomic_and(self.v.get(), val, order) > 0 }
335 }
336
337 /// Logical "nand" with a boolean value.
338 ///
339 /// Performs a logical "nand" operation on the current value and the argument `val`, and sets
340 /// the new value to the result.
341 ///
342 /// Returns the previous value.
343 ///
344 /// # Examples
345 ///
346 /// ```
347 /// use std::sync::atomic::{AtomicBool, Ordering};
348 ///
349 /// let foo = AtomicBool::new(true);
350 /// assert_eq!(true, foo.fetch_nand(false, Ordering::SeqCst));
351 /// assert_eq!(true, foo.load(Ordering::SeqCst));
352 ///
353 /// let foo = AtomicBool::new(true);
354 /// assert_eq!(true, foo.fetch_nand(true, Ordering::SeqCst));
355 /// assert_eq!(0, foo.load(Ordering::SeqCst) as usize);
356 /// assert_eq!(false, foo.load(Ordering::SeqCst));
357 ///
358 /// let foo = AtomicBool::new(false);
359 /// assert_eq!(false, foo.fetch_nand(false, Ordering::SeqCst));
360 /// assert_eq!(true, foo.load(Ordering::SeqCst));
361 /// ```
362 #[inline]
363 #[stable(feature = "rust1", since = "1.0.0")]
364 pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool {
365 let val = if val { UINT_TRUE } else { 0 };
366
367 unsafe { atomic_nand(self.v.get(), val, order) > 0 }
368 }
369
370 /// Logical "or" with a boolean value.
371 ///
372 /// Performs a logical "or" operation on the current value and the argument `val`, and sets the
373 /// new value to the result.
374 ///
375 /// Returns the previous value.
376 ///
377 /// # Examples
378 ///
379 /// ```
380 /// use std::sync::atomic::{AtomicBool, Ordering};
381 ///
382 /// let foo = AtomicBool::new(true);
383 /// assert_eq!(true, foo.fetch_or(false, Ordering::SeqCst));
384 /// assert_eq!(true, foo.load(Ordering::SeqCst));
385 ///
386 /// let foo = AtomicBool::new(true);
387 /// assert_eq!(true, foo.fetch_or(true, Ordering::SeqCst));
388 /// assert_eq!(true, foo.load(Ordering::SeqCst));
389 ///
390 /// let foo = AtomicBool::new(false);
391 /// assert_eq!(false, foo.fetch_or(false, Ordering::SeqCst));
392 /// assert_eq!(false, foo.load(Ordering::SeqCst));
393 /// ```
394 #[inline]
395 #[stable(feature = "rust1", since = "1.0.0")]
396 pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
397 let val = if val { UINT_TRUE } else { 0 };
398
399 unsafe { atomic_or(self.v.get(), val, order) > 0 }
400 }
401
402 /// Logical "xor" with a boolean value.
403 ///
404 /// Performs a logical "xor" operation on the current value and the argument `val`, and sets
405 /// the new value to the result.
406 ///
407 /// Returns the previous value.
408 ///
409 /// # Examples
410 ///
411 /// ```
412 /// use std::sync::atomic::{AtomicBool, Ordering};
413 ///
414 /// let foo = AtomicBool::new(true);
415 /// assert_eq!(true, foo.fetch_xor(false, Ordering::SeqCst));
416 /// assert_eq!(true, foo.load(Ordering::SeqCst));
417 ///
418 /// let foo = AtomicBool::new(true);
419 /// assert_eq!(true, foo.fetch_xor(true, Ordering::SeqCst));
420 /// assert_eq!(false, foo.load(Ordering::SeqCst));
421 ///
422 /// let foo = AtomicBool::new(false);
423 /// assert_eq!(false, foo.fetch_xor(false, Ordering::SeqCst));
424 /// assert_eq!(false, foo.load(Ordering::SeqCst));
425 /// ```
426 #[inline]
427 #[stable(feature = "rust1", since = "1.0.0")]
428 pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
429 let val = if val { UINT_TRUE } else { 0 };
430
431 unsafe { atomic_xor(self.v.get(), val, order) > 0 }
432 }
433 }
434
435 #[stable(feature = "rust1", since = "1.0.0")]
436 impl AtomicIsize {
437 /// Creates a new `AtomicIsize`.
438 ///
439 /// # Examples
440 ///
441 /// ```
442 /// use std::sync::atomic::AtomicIsize;
443 ///
444 /// let atomic_forty_two = AtomicIsize::new(42);
445 /// ```
446 #[inline]
447 #[stable(feature = "rust1", since = "1.0.0")]
448 pub fn new(v: isize) -> AtomicIsize {
449 AtomicIsize {v: UnsafeCell::new(v)}
450 }
451
452 /// Loads a value from the isize.
453 ///
454 /// `load` takes an `Ordering` argument which describes the memory ordering of this operation.
455 ///
456 /// # Panics
457 ///
458 /// Panics if `order` is `Release` or `AcqRel`.
459 ///
460 /// # Examples
461 ///
462 /// ```
463 /// use std::sync::atomic::{AtomicIsize, Ordering};
464 ///
465 /// let some_isize = AtomicIsize::new(5);
466 ///
467 /// let value = some_isize.load(Ordering::Relaxed);
468 /// ```
469 #[inline]
470 #[stable(feature = "rust1", since = "1.0.0")]
471 pub fn load(&self, order: Ordering) -> isize {
472 unsafe { atomic_load(self.v.get(), order) }
473 }
474
475 /// Stores a value into the isize.
476 ///
477 /// `store` takes an `Ordering` argument which describes the memory ordering of this operation.
478 ///
479 /// # Examples
480 ///
481 /// ```
482 /// use std::sync::atomic::{AtomicIsize, Ordering};
483 ///
484 /// let some_isize = AtomicIsize::new(5);
485 ///
486 /// some_isize.store(10, Ordering::Relaxed);
487 /// ```
488 ///
489 /// # Panics
490 ///
491 /// Panics if `order` is `Acquire` or `AcqRel`.
492 #[inline]
493 #[stable(feature = "rust1", since = "1.0.0")]
494 pub fn store(&self, val: isize, order: Ordering) {
495 unsafe { atomic_store(self.v.get(), val, order); }
496 }
497
498 /// Stores a value into the isize, returning the old value.
499 ///
500 /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation.
501 ///
502 /// # Examples
503 ///
504 /// ```
505 /// use std::sync::atomic::{AtomicIsize, Ordering};
506 ///
507 /// let some_isize = AtomicIsize::new(5);
508 ///
509 /// let value = some_isize.swap(10, Ordering::Relaxed);
510 /// ```
511 #[inline]
512 #[stable(feature = "rust1", since = "1.0.0")]
513 pub fn swap(&self, val: isize, order: Ordering) -> isize {
514 unsafe { atomic_swap(self.v.get(), val, order) }
515 }
516
517 /// Stores a value into the isize if the current value is the same as the expected value.
518 ///
519 /// The return value is always the previous value. If it is equal to `old`, then the value was
520 /// updated.
521 ///
522 /// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of
523 /// this operation.
524 ///
525 /// # Examples
526 ///
527 /// ```
528 /// use std::sync::atomic::{AtomicIsize, Ordering};
529 ///
530 /// let some_isize = AtomicIsize::new(5);
531 ///
532 /// let value = some_isize.compare_and_swap(5, 10, Ordering::Relaxed);
533 /// ```
534 #[inline]
535 #[stable(feature = "rust1", since = "1.0.0")]
536 pub fn compare_and_swap(&self, old: isize, new: isize, order: Ordering) -> isize {
537 unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
538 }
539
540 /// Add an isize to the current value, returning the previous value.
541 ///
542 /// # Examples
543 ///
544 /// ```
545 /// use std::sync::atomic::{AtomicIsize, Ordering};
546 ///
547 /// let foo = AtomicIsize::new(0);
548 /// assert_eq!(0, foo.fetch_add(10, Ordering::SeqCst));
549 /// assert_eq!(10, foo.load(Ordering::SeqCst));
550 /// ```
551 #[inline]
552 #[stable(feature = "rust1", since = "1.0.0")]
553 pub fn fetch_add(&self, val: isize, order: Ordering) -> isize {
554 unsafe { atomic_add(self.v.get(), val, order) }
555 }
556
557 /// Subtract an isize from the current value, returning the previous value.
558 ///
559 /// # Examples
560 ///
561 /// ```
562 /// use std::sync::atomic::{AtomicIsize, Ordering};
563 ///
564 /// let foo = AtomicIsize::new(0);
565 /// assert_eq!(0, foo.fetch_sub(10, Ordering::SeqCst));
566 /// assert_eq!(-10, foo.load(Ordering::SeqCst));
567 /// ```
568 #[inline]
569 #[stable(feature = "rust1", since = "1.0.0")]
570 pub fn fetch_sub(&self, val: isize, order: Ordering) -> isize {
571 unsafe { atomic_sub(self.v.get(), val, order) }
572 }
573
574 /// Bitwise and with the current isize, returning the previous value.
575 ///
576 /// # Examples
577 ///
578 /// ```
579 /// use std::sync::atomic::{AtomicIsize, Ordering};
580 ///
581 /// let foo = AtomicIsize::new(0b101101);
582 /// assert_eq!(0b101101, foo.fetch_and(0b110011, Ordering::SeqCst));
583 /// assert_eq!(0b100001, foo.load(Ordering::SeqCst));
584 #[inline]
585 #[stable(feature = "rust1", since = "1.0.0")]
586 pub fn fetch_and(&self, val: isize, order: Ordering) -> isize {
587 unsafe { atomic_and(self.v.get(), val, order) }
588 }
589
590 /// Bitwise or with the current isize, returning the previous value.
591 ///
592 /// # Examples
593 ///
594 /// ```
595 /// use std::sync::atomic::{AtomicIsize, Ordering};
596 ///
597 /// let foo = AtomicIsize::new(0b101101);
598 /// assert_eq!(0b101101, foo.fetch_or(0b110011, Ordering::SeqCst));
599 /// assert_eq!(0b111111, foo.load(Ordering::SeqCst));
600 #[inline]
601 #[stable(feature = "rust1", since = "1.0.0")]
602 pub fn fetch_or(&self, val: isize, order: Ordering) -> isize {
603 unsafe { atomic_or(self.v.get(), val, order) }
604 }
605
606 /// Bitwise xor with the current isize, returning the previous value.
607 ///
608 /// # Examples
609 ///
610 /// ```
611 /// use std::sync::atomic::{AtomicIsize, Ordering};
612 ///
613 /// let foo = AtomicIsize::new(0b101101);
614 /// assert_eq!(0b101101, foo.fetch_xor(0b110011, Ordering::SeqCst));
615 /// assert_eq!(0b011110, foo.load(Ordering::SeqCst));
616 #[inline]
617 #[stable(feature = "rust1", since = "1.0.0")]
618 pub fn fetch_xor(&self, val: isize, order: Ordering) -> isize {
619 unsafe { atomic_xor(self.v.get(), val, order) }
620 }
621 }
622
623 #[stable(feature = "rust1", since = "1.0.0")]
624 impl AtomicUsize {
625 /// Creates a new `AtomicUsize`.
626 ///
627 /// # Examples
628 ///
629 /// ```
630 /// use std::sync::atomic::AtomicUsize;
631 ///
632 /// let atomic_forty_two = AtomicUsize::new(42);
633 /// ```
634 #[inline]
635 #[stable(feature = "rust1", since = "1.0.0")]
636 pub fn new(v: usize) -> AtomicUsize {
637 AtomicUsize { v: UnsafeCell::new(v) }
638 }
639
640 /// Loads a value from the usize.
641 ///
642 /// `load` takes an `Ordering` argument which describes the memory ordering of this operation.
643 ///
644 /// # Panics
645 ///
646 /// Panics if `order` is `Release` or `AcqRel`.
647 ///
648 /// # Examples
649 ///
650 /// ```
651 /// use std::sync::atomic::{AtomicUsize, Ordering};
652 ///
653 /// let some_usize = AtomicUsize::new(5);
654 ///
655 /// let value = some_usize.load(Ordering::Relaxed);
656 /// ```
657 #[inline]
658 #[stable(feature = "rust1", since = "1.0.0")]
659 pub fn load(&self, order: Ordering) -> usize {
660 unsafe { atomic_load(self.v.get(), order) }
661 }
662
663 /// Stores a value into the usize.
664 ///
665 /// `store` takes an `Ordering` argument which describes the memory ordering of this operation.
666 ///
667 /// # Examples
668 ///
669 /// ```
670 /// use std::sync::atomic::{AtomicUsize, Ordering};
671 ///
672 /// let some_usize = AtomicUsize::new(5);
673 ///
674 /// some_usize.store(10, Ordering::Relaxed);
675 /// ```
676 ///
677 /// # Panics
678 ///
679 /// Panics if `order` is `Acquire` or `AcqRel`.
680 #[inline]
681 #[stable(feature = "rust1", since = "1.0.0")]
682 pub fn store(&self, val: usize, order: Ordering) {
683 unsafe { atomic_store(self.v.get(), val, order); }
684 }
685
686 /// Stores a value into the usize, returning the old value.
687 ///
688 /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation.
689 ///
690 /// # Examples
691 ///
692 /// ```
693 /// use std::sync::atomic::{AtomicUsize, Ordering};
694 ///
695 /// let some_usize= AtomicUsize::new(5);
696 ///
697 /// let value = some_usize.swap(10, Ordering::Relaxed);
698 /// ```
699 #[inline]
700 #[stable(feature = "rust1", since = "1.0.0")]
701 pub fn swap(&self, val: usize, order: Ordering) -> usize {
702 unsafe { atomic_swap(self.v.get(), val, order) }
703 }
704
705 /// Stores a value into the usize if the current value is the same as the expected value.
706 ///
707 /// The return value is always the previous value. If it is equal to `old`, then the value was
708 /// updated.
709 ///
710 /// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of
711 /// this operation.
712 ///
713 /// # Examples
714 ///
715 /// ```
716 /// use std::sync::atomic::{AtomicUsize, Ordering};
717 ///
718 /// let some_usize = AtomicUsize::new(5);
719 ///
720 /// let value = some_usize.compare_and_swap(5, 10, Ordering::Relaxed);
721 /// ```
722 #[inline]
723 #[stable(feature = "rust1", since = "1.0.0")]
724 pub fn compare_and_swap(&self, old: usize, new: usize, order: Ordering) -> usize {
725 unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
726 }
727
728 /// Add to the current usize, returning the previous value.
729 ///
730 /// # Examples
731 ///
732 /// ```
733 /// use std::sync::atomic::{AtomicUsize, Ordering};
734 ///
735 /// let foo = AtomicUsize::new(0);
736 /// assert_eq!(0, foo.fetch_add(10, Ordering::SeqCst));
737 /// assert_eq!(10, foo.load(Ordering::SeqCst));
738 /// ```
739 #[inline]
740 #[stable(feature = "rust1", since = "1.0.0")]
741 pub fn fetch_add(&self, val: usize, order: Ordering) -> usize {
742 unsafe { atomic_add(self.v.get(), val, order) }
743 }
744
745 /// Subtract from the current usize, returning the previous value.
746 ///
747 /// # Examples
748 ///
749 /// ```
750 /// use std::sync::atomic::{AtomicUsize, Ordering};
751 ///
752 /// let foo = AtomicUsize::new(10);
753 /// assert_eq!(10, foo.fetch_sub(10, Ordering::SeqCst));
754 /// assert_eq!(0, foo.load(Ordering::SeqCst));
755 /// ```
756 #[inline]
757 #[stable(feature = "rust1", since = "1.0.0")]
758 pub fn fetch_sub(&self, val: usize, order: Ordering) -> usize {
759 unsafe { atomic_sub(self.v.get(), val, order) }
760 }
761
762 /// Bitwise and with the current usize, returning the previous value.
763 ///
764 /// # Examples
765 ///
766 /// ```
767 /// use std::sync::atomic::{AtomicUsize, Ordering};
768 ///
769 /// let foo = AtomicUsize::new(0b101101);
770 /// assert_eq!(0b101101, foo.fetch_and(0b110011, Ordering::SeqCst));
771 /// assert_eq!(0b100001, foo.load(Ordering::SeqCst));
772 #[inline]
773 #[stable(feature = "rust1", since = "1.0.0")]
774 pub fn fetch_and(&self, val: usize, order: Ordering) -> usize {
775 unsafe { atomic_and(self.v.get(), val, order) }
776 }
777
778 /// Bitwise or with the current usize, returning the previous value.
779 ///
780 /// # Examples
781 ///
782 /// ```
783 /// use std::sync::atomic::{AtomicUsize, Ordering};
784 ///
785 /// let foo = AtomicUsize::new(0b101101);
786 /// assert_eq!(0b101101, foo.fetch_or(0b110011, Ordering::SeqCst));
787 /// assert_eq!(0b111111, foo.load(Ordering::SeqCst));
788 #[inline]
789 #[stable(feature = "rust1", since = "1.0.0")]
790 pub fn fetch_or(&self, val: usize, order: Ordering) -> usize {
791 unsafe { atomic_or(self.v.get(), val, order) }
792 }
793
794 /// Bitwise xor with the current usize, returning the previous value.
795 ///
796 /// # Examples
797 ///
798 /// ```
799 /// use std::sync::atomic::{AtomicUsize, Ordering};
800 ///
801 /// let foo = AtomicUsize::new(0b101101);
802 /// assert_eq!(0b101101, foo.fetch_xor(0b110011, Ordering::SeqCst));
803 /// assert_eq!(0b011110, foo.load(Ordering::SeqCst));
804 #[inline]
805 #[stable(feature = "rust1", since = "1.0.0")]
806 pub fn fetch_xor(&self, val: usize, order: Ordering) -> usize {
807 unsafe { atomic_xor(self.v.get(), val, order) }
808 }
809 }
810
811 impl<T> AtomicPtr<T> {
812 /// Creates a new `AtomicPtr`.
813 ///
814 /// # Examples
815 ///
816 /// ```
817 /// use std::sync::atomic::AtomicPtr;
818 ///
819 /// let ptr = &mut 5;
820 /// let atomic_ptr = AtomicPtr::new(ptr);
821 /// ```
822 #[inline]
823 #[stable(feature = "rust1", since = "1.0.0")]
824 pub fn new(p: *mut T) -> AtomicPtr<T> {
825 AtomicPtr { p: UnsafeCell::new(p as usize),
826 _marker: PhantomData }
827 }
828
829 /// Loads a value from the pointer.
830 ///
831 /// `load` takes an `Ordering` argument which describes the memory ordering of this operation.
832 ///
833 /// # Panics
834 ///
835 /// Panics if `order` is `Release` or `AcqRel`.
836 ///
837 /// # Examples
838 ///
839 /// ```
840 /// use std::sync::atomic::{AtomicPtr, Ordering};
841 ///
842 /// let ptr = &mut 5;
843 /// let some_ptr = AtomicPtr::new(ptr);
844 ///
845 /// let value = some_ptr.load(Ordering::Relaxed);
846 /// ```
847 #[inline]
848 #[stable(feature = "rust1", since = "1.0.0")]
849 pub fn load(&self, order: Ordering) -> *mut T {
850 unsafe {
851 atomic_load(self.p.get(), order) as *mut T
852 }
853 }
854
855 /// Stores a value into the pointer.
856 ///
857 /// `store` takes an `Ordering` argument which describes the memory ordering of this operation.
858 ///
859 /// # Examples
860 ///
861 /// ```
862 /// use std::sync::atomic::{AtomicPtr, Ordering};
863 ///
864 /// let ptr = &mut 5;
865 /// let some_ptr = AtomicPtr::new(ptr);
866 ///
867 /// let other_ptr = &mut 10;
868 ///
869 /// some_ptr.store(other_ptr, Ordering::Relaxed);
870 /// ```
871 ///
872 /// # Panics
873 ///
874 /// Panics if `order` is `Acquire` or `AcqRel`.
875 #[inline]
876 #[stable(feature = "rust1", since = "1.0.0")]
877 pub fn store(&self, ptr: *mut T, order: Ordering) {
878 unsafe { atomic_store(self.p.get(), ptr as usize, order); }
879 }
880
881 /// Stores a value into the pointer, returning the old value.
882 ///
883 /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation.
884 ///
885 /// # Examples
886 ///
887 /// ```
888 /// use std::sync::atomic::{AtomicPtr, Ordering};
889 ///
890 /// let ptr = &mut 5;
891 /// let some_ptr = AtomicPtr::new(ptr);
892 ///
893 /// let other_ptr = &mut 10;
894 ///
895 /// let value = some_ptr.swap(other_ptr, Ordering::Relaxed);
896 /// ```
897 #[inline]
898 #[stable(feature = "rust1", since = "1.0.0")]
899 pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
900 unsafe { atomic_swap(self.p.get(), ptr as usize, order) as *mut T }
901 }
902
903 /// Stores a value into the pointer if the current value is the same as the expected value.
904 ///
905 /// The return value is always the previous value. If it is equal to `old`, then the value was
906 /// updated.
907 ///
908 /// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of
909 /// this operation.
910 ///
911 /// # Examples
912 ///
913 /// ```
914 /// use std::sync::atomic::{AtomicPtr, Ordering};
915 ///
916 /// let ptr = &mut 5;
917 /// let some_ptr = AtomicPtr::new(ptr);
918 ///
919 /// let other_ptr = &mut 10;
920 /// let another_ptr = &mut 10;
921 ///
922 /// let value = some_ptr.compare_and_swap(other_ptr, another_ptr, Ordering::Relaxed);
923 /// ```
924 #[inline]
925 #[stable(feature = "rust1", since = "1.0.0")]
926 pub fn compare_and_swap(&self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
927 unsafe {
928 atomic_compare_and_swap(self.p.get(), old as usize,
929 new as usize, order) as *mut T
930 }
931 }
932 }
933
934 #[inline]
935 unsafe fn atomic_store<T>(dst: *mut T, val: T, order:Ordering) {
936 match order {
937 Release => intrinsics::atomic_store_rel(dst, val),
938 Relaxed => intrinsics::atomic_store_relaxed(dst, val),
939 SeqCst => intrinsics::atomic_store(dst, val),
940 Acquire => panic!("there is no such thing as an acquire store"),
941 AcqRel => panic!("there is no such thing as an acquire/release store"),
942 }
943 }
944
945 #[inline]
946 #[stable(feature = "rust1", since = "1.0.0")]
947 unsafe fn atomic_load<T>(dst: *const T, order:Ordering) -> T {
948 match order {
949 Acquire => intrinsics::atomic_load_acq(dst),
950 Relaxed => intrinsics::atomic_load_relaxed(dst),
951 SeqCst => intrinsics::atomic_load(dst),
952 Release => panic!("there is no such thing as a release load"),
953 AcqRel => panic!("there is no such thing as an acquire/release load"),
954 }
955 }
956
957 #[inline]
958 #[stable(feature = "rust1", since = "1.0.0")]
959 unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
960 match order {
961 Acquire => intrinsics::atomic_xchg_acq(dst, val),
962 Release => intrinsics::atomic_xchg_rel(dst, val),
963 AcqRel => intrinsics::atomic_xchg_acqrel(dst, val),
964 Relaxed => intrinsics::atomic_xchg_relaxed(dst, val),
965 SeqCst => intrinsics::atomic_xchg(dst, val)
966 }
967 }
968
969 /// Returns the old value (like __sync_fetch_and_add).
970 #[inline]
971 #[stable(feature = "rust1", since = "1.0.0")]
972 unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
973 match order {
974 Acquire => intrinsics::atomic_xadd_acq(dst, val),
975 Release => intrinsics::atomic_xadd_rel(dst, val),
976 AcqRel => intrinsics::atomic_xadd_acqrel(dst, val),
977 Relaxed => intrinsics::atomic_xadd_relaxed(dst, val),
978 SeqCst => intrinsics::atomic_xadd(dst, val)
979 }
980 }
981
982 /// Returns the old value (like __sync_fetch_and_sub).
983 #[inline]
984 #[stable(feature = "rust1", since = "1.0.0")]
985 unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
986 match order {
987 Acquire => intrinsics::atomic_xsub_acq(dst, val),
988 Release => intrinsics::atomic_xsub_rel(dst, val),
989 AcqRel => intrinsics::atomic_xsub_acqrel(dst, val),
990 Relaxed => intrinsics::atomic_xsub_relaxed(dst, val),
991 SeqCst => intrinsics::atomic_xsub(dst, val)
992 }
993 }
994
995 #[inline]
996 #[stable(feature = "rust1", since = "1.0.0")]
997 unsafe fn atomic_compare_and_swap<T>(dst: *mut T, old:T, new:T, order: Ordering) -> T {
998 match order {
999 Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),
1000 Release => intrinsics::atomic_cxchg_rel(dst, old, new),
1001 AcqRel => intrinsics::atomic_cxchg_acqrel(dst, old, new),
1002 Relaxed => intrinsics::atomic_cxchg_relaxed(dst, old, new),
1003 SeqCst => intrinsics::atomic_cxchg(dst, old, new),
1004 }
1005 }
1006
1007 #[inline]
1008 #[stable(feature = "rust1", since = "1.0.0")]
1009 unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
1010 match order {
1011 Acquire => intrinsics::atomic_and_acq(dst, val),
1012 Release => intrinsics::atomic_and_rel(dst, val),
1013 AcqRel => intrinsics::atomic_and_acqrel(dst, val),
1014 Relaxed => intrinsics::atomic_and_relaxed(dst, val),
1015 SeqCst => intrinsics::atomic_and(dst, val)
1016 }
1017 }
1018
1019 #[inline]
1020 #[stable(feature = "rust1", since = "1.0.0")]
1021 unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
1022 match order {
1023 Acquire => intrinsics::atomic_nand_acq(dst, val),
1024 Release => intrinsics::atomic_nand_rel(dst, val),
1025 AcqRel => intrinsics::atomic_nand_acqrel(dst, val),
1026 Relaxed => intrinsics::atomic_nand_relaxed(dst, val),
1027 SeqCst => intrinsics::atomic_nand(dst, val)
1028 }
1029 }
1030
1031
1032 #[inline]
1033 #[stable(feature = "rust1", since = "1.0.0")]
1034 unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
1035 match order {
1036 Acquire => intrinsics::atomic_or_acq(dst, val),
1037 Release => intrinsics::atomic_or_rel(dst, val),
1038 AcqRel => intrinsics::atomic_or_acqrel(dst, val),
1039 Relaxed => intrinsics::atomic_or_relaxed(dst, val),
1040 SeqCst => intrinsics::atomic_or(dst, val)
1041 }
1042 }
1043
1044
1045 #[inline]
1046 #[stable(feature = "rust1", since = "1.0.0")]
1047 unsafe fn atomic_xor<T>(dst: *mut T, val: T, order: Ordering) -> T {
1048 match order {
1049 Acquire => intrinsics::atomic_xor_acq(dst, val),
1050 Release => intrinsics::atomic_xor_rel(dst, val),
1051 AcqRel => intrinsics::atomic_xor_acqrel(dst, val),
1052 Relaxed => intrinsics::atomic_xor_relaxed(dst, val),
1053 SeqCst => intrinsics::atomic_xor(dst, val)
1054 }
1055 }
1056
1057
1058 /// An atomic fence.
1059 ///
1060 /// A fence 'A' which has `Release` ordering semantics, synchronizes with a
1061 /// fence 'B' with (at least) `Acquire` semantics, if and only if there exists
1062 /// atomic operations X and Y, both operating on some atomic object 'M' such
1063 /// that A is sequenced before X, Y is synchronized before B and Y observes
1064 /// the change to M. This provides a happens-before dependence between A and B.
1065 ///
1066 /// Atomic operations with `Release` or `Acquire` semantics can also synchronize
1067 /// with a fence.
1068 ///
1069 /// A fence which has `SeqCst` ordering, in addition to having both `Acquire`
1070 /// and `Release` semantics, participates in the global program order of the
1071 /// other `SeqCst` operations and/or fences.
1072 ///
1073 /// Accepts `Acquire`, `Release`, `AcqRel` and `SeqCst` orderings.
1074 ///
1075 /// # Panics
1076 ///
1077 /// Panics if `order` is `Relaxed`.
1078 #[inline]
1079 #[stable(feature = "rust1", since = "1.0.0")]
1080 pub fn fence(order: Ordering) {
1081 unsafe {
1082 match order {
1083 Acquire => intrinsics::atomic_fence_acq(),
1084 Release => intrinsics::atomic_fence_rel(),
1085 AcqRel => intrinsics::atomic_fence_acqrel(),
1086 SeqCst => intrinsics::atomic_fence(),
1087 Relaxed => panic!("there is no such thing as a relaxed fence")
1088 }
1089 }
1090 }