3 //! This is based on debts ‒ an Arc may owe a reference, but it is marked in the debt. It is either
4 //! put back (by stopping using it), or if the pointer is replaced, the writer bumps the reference
5 //! count and removes the debt.
7 //! The strategy uses two different slots for the debts. The first ones are faster, but fallible.
8 //! If they fail (either because there's interference from a writer at the same time, or because
9 //! they are full), the secondary one that is slower, but always succeeds, is used. In the latter
10 //! case, the reference is bumped and this secondary debt slot is released, so it is available for
13 //! See the [crate::debt] module for the actual slot manipulation. Here we just wrap them into the
16 use std
::borrow
::Borrow
;
17 use std
::mem
::{self, ManuallyDrop}
;
20 use std
::sync
::atomic
::AtomicPtr
;
21 use std
::sync
::atomic
::Ordering
::*;
23 use super::sealed
::{CaS, InnerStrategy, Protected}
;
24 use crate::debt
::{Debt, LocalNode}
;
25 use crate::ref_cnt
::RefCnt
;
27 pub struct HybridProtection
<T
: RefCnt
> {
28 debt
: Option
<&'
static Debt
>,
32 impl<T
: RefCnt
> HybridProtection
<T
> {
33 pub(super) unsafe fn new(ptr
: *const T
::Base
, debt
: Option
<&'
static Debt
>) -> Self {
36 ptr
: ManuallyDrop
::new(T
::from_ptr(ptr
)),
40 /// Try getting a dept into a fast slot.
42 fn attempt(node
: &LocalNode
, storage
: &AtomicPtr
<T
::Base
>) -> Option
<Self> {
43 // Relaxed is good enough here, see the Acquire below
44 let ptr
= storage
.load(Relaxed
);
45 // Try to get a debt slot. If not possible, fail.
46 let debt
= node
.new_fast(ptr
as usize)?
;
48 // Acquire to get the data.
50 // SeqCst to make sure the storage vs. the debt are well ordered.
51 let confirm
= storage
.load(SeqCst
);
53 // Successfully got a debt
54 Some(unsafe { Self::new(ptr, Some(debt)) }
)
55 } else if debt
.pay
::<T
>(ptr
) {
56 // It changed in the meantime, we return the debt (that is on the outdated pointer,
57 // possibly destroyed) and fail.
60 // It changed in the meantime, but the debt for the previous pointer was already paid
61 // for by someone else, so we are fine using it.
62 Some(unsafe { Self::new(ptr, None) }
)
66 /// Get a debt slot using the slower but always successful mechanism.
67 fn fallback(node
: &LocalNode
, storage
: &AtomicPtr
<T
::Base
>) -> Self {
68 // First, we claim a debt slot and store the address of the atomic pointer there, so the
69 // writer can optionally help us out with loading and protecting something.
70 let gen
= node
.new_helping(storage
as *const _
as usize);
71 // We already synchronized the start of the sequence by SeqCst in the new_helping vs swap on
72 // the pointer. We just need to make sure to bring the pointee in (this can be newer than
73 // what we got in the Debt)
74 let candidate
= storage
.load(Acquire
);
76 // Try to replace the debt with our candidate. If it works, we get the debt slot to use. If
77 // not, we get a replacement value, already protected and a debt to take care of.
78 match node
.confirm_helping(gen
, candidate
as usize) {
80 // The fast path -> we got the debt confirmed alright.
81 Self::from_inner(unsafe { Self::new(candidate, Some(debt)).into_inner() }
)
83 Err((unused_debt
, replacement
)) => {
84 // The debt is on the candidate we provided and it is unused, we so we just pay it
86 if !unused_debt
.pay
::<T
>(candidate
) {
87 unsafe { T::dec(candidate) }
;
89 // We got a (possibly) different pointer out. But that one is already protected and
90 // the slot is paid back.
91 unsafe { Self::new(replacement as *mut _, None) }
97 fn as_ptr(&self) -> *const T
::Base
{
98 T
::as_ptr(self.ptr
.deref())
102 impl<T
: RefCnt
> Drop
for HybridProtection
<T
> {
105 match self.debt
.take() {
106 // We have our own copy of Arc, so we don't need a protection. Do nothing (but release
109 // If we owed something, just return the debt. We don't have a pointer owned, so
110 // nothing to release.
112 let ptr
= T
::as_ptr(&self.ptr
);
113 if debt
.pay
::<T
>(ptr
) {
116 // But if the debt was already paid for us, we need to release the pointer, as we
117 // were effectively already in the Unprotected mode.
120 // Equivalent to T::dec(ptr)
121 unsafe { ManuallyDrop::drop(&mut self.ptr) }
;
125 impl<T
: RefCnt
> Protected
<T
> for HybridProtection
<T
> {
127 fn from_inner(ptr
: T
) -> Self {
130 ptr
: ManuallyDrop
::new(ptr
),
135 fn into_inner(mut self) -> T
{
136 // Drop any debt and release any lock held by the given guard and return a
137 // full-featured value that even can outlive the ArcSwap it originated from.
138 match self.debt
.take() {
139 None
=> (), // We have a fully loaded ref-counted pointer.
141 let ptr
= T
::inc(&self.ptr
);
142 if !debt
.pay
::<T
>(ptr
) {
143 unsafe { T::dec(ptr) }
;
148 // The ptr::read & forget is something like a cheating move. We can't move it out, because
149 // we have a destructor and Rust doesn't allow us to do that.
150 let inner
= unsafe { ptr::read(self.ptr.deref()) }
;
156 impl<T
: RefCnt
> Borrow
<T
> for HybridProtection
<T
> {
158 fn borrow(&self) -> &T
{
164 // Mostly for testing, way to disable the fast slo
165 const USE_FAST
: bool
;
168 #[derive(Clone, Default)]
169 pub struct DefaultConfig
;
171 impl Config
for DefaultConfig
{
172 const USE_FAST
: bool
= true;
175 #[derive(Clone, Default)]
176 pub struct HybridStrategy
<Cfg
> {
177 pub(crate) _config
: Cfg
,
180 impl<T
, Cfg
> InnerStrategy
<T
> for HybridStrategy
<Cfg
>
185 type Protected
= HybridProtection
<T
>;
186 unsafe fn load(&self, storage
: &AtomicPtr
<T
::Base
>) -> Self::Protected
{
187 LocalNode
::with(|node
| {
188 let fast
= if Cfg
::USE_FAST
{
189 HybridProtection
::attempt(node
, storage
)
193 fast
.unwrap_or_else(|| HybridProtection
::fallback(node
, storage
))
196 unsafe fn wait_for_readers(&self, old
: *const T
::Base
, storage
: &AtomicPtr
<T
::Base
>) {
197 // The pay_all may need to provide fresh replacement values if someone else is loading from
198 // this particular storage. We do so by the exact same way, by `load` ‒ it's OK, a writer
199 // does not hold a slot and the reader doesn't recurse back into writer, so we won't run
201 let replacement
= || self.load(storage
).into_inner();
202 Debt
::pay_all
::<T
, _
>(old
, storage
as *const _
as usize, replacement
);
206 impl<T
: RefCnt
, Cfg
: Config
> CaS
<T
> for HybridStrategy
<Cfg
> {
207 unsafe fn compare_and_swap
<C
: crate::as_raw
::AsRaw
<T
::Base
>>(
209 storage
: &AtomicPtr
<T
::Base
>,
212 ) -> Self::Protected
{
214 let old
= <Self as InnerStrategy
<T
>>::load(self, storage
);
215 // Observation of their inequality is enough to make a verdict
216 if old
.as_ptr() != current
.as_raw() {
219 // If they are still equal, put the new one in.
220 let new_raw
= T
::as_ptr(&new
);
222 .compare_exchange_weak(current
.as_raw(), new_raw
, SeqCst
, Relaxed
)
225 // We successfully put the new value in. The ref count went in there too.
227 <Self as InnerStrategy
<T
>>::wait_for_readers(self, old
.as_ptr(), storage
);
228 // We just got one ref count out of the storage and we have one in old. We don't
230 T
::dec(old
.as_ptr());