arc_swap/strategy/
hybrid.rs

1//! A hybrid strategy.
2//!
3//! This is based on debts ‒ an Arc may owe a reference, but it is marked in the debt. It is either
4//! put back (by stopping using it), or if the pointer is replaced, the writer bumps the reference
5//! count and removes the debt.
6//!
7//! The strategy uses two different slots for the debts. The first ones are faster, but fallible.
8//! If they fail (either because there's interference from a writer at the same time, or because
9//! they are full), the secondary one that is slower, but always succeeds, is used. In the latter
10//! case, the reference is bumped and this secondary debt slot is released, so it is available for
11//! further loads.
12//!
13//! See the [crate::debt] module for the actual slot manipulation. Here we just wrap them into the
14//! strategy.
15
16use core::borrow::Borrow;
17use core::mem::{self, ManuallyDrop};
18use core::ops::Deref;
19use core::ptr;
20use core::sync::atomic::AtomicPtr;
21use core::sync::atomic::Ordering::*;
22
23use super::sealed::{CaS, InnerStrategy, Protected};
24use crate::debt::{Debt, LocalNode};
25use crate::ref_cnt::RefCnt;
26
27pub struct HybridProtection<T: RefCnt> {
28    debt: Option<&'static Debt>,
29    ptr: ManuallyDrop<T>,
30}
31
32impl<T: RefCnt> HybridProtection<T> {
33    pub(super) unsafe fn new(ptr: *const T::Base, debt: Option<&'static Debt>) -> Self {
34        Self {
35            debt,
36            ptr: ManuallyDrop::new(T::from_ptr(ptr)),
37        }
38    }
39
40    /// Try getting a dept into a fast slot.
41    #[inline]
42    fn attempt(node: &LocalNode, storage: &AtomicPtr<T::Base>) -> Option<Self> {
43        // Relaxed is good enough here, see the Acquire below
44        let ptr = storage.load(Relaxed);
45        // Try to get a debt slot. If not possible, fail.
46        let debt = node.new_fast(ptr as usize)?;
47
48        // Acquire to get the data.
49        //
50        // SeqCst to make sure the storage vs. the debt are well ordered.
51        let confirm = storage.load(SeqCst);
52        if ptr == confirm {
53            // Successfully got a debt
54            // NOTE: we *must* use `confirm` here instead of `ptr`. The address may compare equal,
55            // but they could have different provenance, if the pointer is freed and then
56            // subsequently reused.
57            Some(unsafe { Self::new(confirm, Some(debt)) })
58        } else if debt.pay::<T>(ptr) {
59            // It changed in the meantime, we return the debt (that is on the outdated pointer,
60            // possibly destroyed) and fail.
61            None
62        } else {
63            // It changed in the meantime, but the debt for the previous pointer was already paid
64            // for by someone else, so we are fine using it.
65            Some(unsafe { Self::new(ptr, None) })
66        }
67    }
68
69    /// Get a debt slot using the slower but always successful mechanism.
70    fn fallback(node: &LocalNode, storage: &AtomicPtr<T::Base>) -> Self {
71        // First, we claim a debt slot and store the address of the atomic pointer there, so the
72        // writer can optionally help us out with loading and protecting something.
73        let gen = node.new_helping(storage as *const _ as usize);
74        // We already synchronized the start of the sequence by SeqCst in the new_helping vs swap on
75        // the pointer. We just need to make sure to bring the pointee in (this can be newer than
76        // what we got in the Debt)
77        let candidate = storage.load(Acquire);
78
79        // Try to replace the debt with our candidate. If it works, we get the debt slot to use. If
80        // not, we get a replacement value, already protected and a debt to take care of.
81        match node.confirm_helping(gen, candidate as usize) {
82            Ok(debt) => {
83                // The fast path -> we got the debt confirmed alright.
84                Self::from_inner(unsafe { Self::new(candidate, Some(debt)).into_inner() })
85            }
86            Err((unused_debt, replacement)) => {
87                // The debt is on the candidate we provided and it is unused, we so we just pay it
88                // back right away.
89                if !unused_debt.pay::<T>(candidate) {
90                    unsafe { T::dec(candidate) };
91                }
92                // We got a (possibly) different pointer out. But that one is already protected and
93                // the slot is paid back.
94                unsafe { Self::new(replacement as *mut _, None) }
95            }
96        }
97    }
98
99    #[inline]
100    fn as_ptr(&self) -> *const T::Base {
101        T::as_ptr(self.ptr.deref())
102    }
103}
104
105impl<T: RefCnt> Drop for HybridProtection<T> {
106    #[inline]
107    fn drop(&mut self) {
108        match self.debt.take() {
109            // We have our own copy of Arc, so we don't need a protection. Do nothing (but release
110            // the Arc below).
111            None => (),
112            // If we owed something, just return the debt. We don't have a pointer owned, so
113            // nothing to release.
114            Some(debt) => {
115                let ptr = T::as_ptr(&self.ptr);
116                if debt.pay::<T>(ptr) {
117                    return;
118                }
119                // But if the debt was already paid for us, we need to release the pointer, as we
120                // were effectively already in the Unprotected mode.
121            }
122        }
123        // Equivalent to T::dec(ptr)
124        unsafe { ManuallyDrop::drop(&mut self.ptr) };
125    }
126}
127
128impl<T: RefCnt> Protected<T> for HybridProtection<T> {
129    #[inline]
130    fn from_inner(ptr: T) -> Self {
131        Self {
132            debt: None,
133            ptr: ManuallyDrop::new(ptr),
134        }
135    }
136
137    #[inline]
138    fn into_inner(mut self) -> T {
139        // Drop any debt and release any lock held by the given guard and return a
140        // full-featured value that even can outlive the ArcSwap it originated from.
141        match self.debt.take() {
142            None => (), // We have a fully loaded ref-counted pointer.
143            Some(debt) => {
144                let ptr = T::inc(&self.ptr);
145                if !debt.pay::<T>(ptr) {
146                    unsafe { T::dec(ptr) };
147                }
148            }
149        }
150
151        // The ptr::read & forget is something like a cheating move. We can't move it out, because
152        // we have a destructor and Rust doesn't allow us to do that.
153        let inner = unsafe { ptr::read(self.ptr.deref()) };
154        mem::forget(self);
155        inner
156    }
157}
158
159impl<T: RefCnt> Borrow<T> for HybridProtection<T> {
160    #[inline]
161    fn borrow(&self) -> &T {
162        &self.ptr
163    }
164}
165
166pub trait Config {
167    // Mostly for testing, way to disable the fast slo
168    const USE_FAST: bool;
169}
170
171#[derive(Clone, Default)]
172pub struct DefaultConfig;
173
174impl Config for DefaultConfig {
175    const USE_FAST: bool = true;
176}
177
178#[derive(Clone, Default)]
179pub struct HybridStrategy<Cfg> {
180    pub(crate) _config: Cfg,
181}
182
183impl<T, Cfg> InnerStrategy<T> for HybridStrategy<Cfg>
184where
185    T: RefCnt,
186    Cfg: Config,
187{
188    type Protected = HybridProtection<T>;
189    unsafe fn load(&self, storage: &AtomicPtr<T::Base>) -> Self::Protected {
190        LocalNode::with(|node| {
191            let fast = if Cfg::USE_FAST {
192                HybridProtection::attempt(node, storage)
193            } else {
194                None
195            };
196            fast.unwrap_or_else(|| HybridProtection::fallback(node, storage))
197        })
198    }
199    unsafe fn wait_for_readers(&self, old: *const T::Base, storage: &AtomicPtr<T::Base>) {
200        // The pay_all may need to provide fresh replacement values if someone else is loading from
201        // this particular storage. We do so by the exact same way, by `load` ‒ it's OK, a writer
202        // does not hold a slot and the reader doesn't recurse back into writer, so we won't run
203        // out of slots.
204        let replacement = || self.load(storage).into_inner();
205        Debt::pay_all::<T, _>(old, storage as *const _ as usize, replacement);
206    }
207}
208
209impl<T: RefCnt, Cfg: Config> CaS<T> for HybridStrategy<Cfg> {
210    unsafe fn compare_and_swap<C: crate::as_raw::AsRaw<T::Base>>(
211        &self,
212        storage: &AtomicPtr<T::Base>,
213        current: C,
214        new: T,
215    ) -> Self::Protected {
216        loop {
217            let old = <Self as InnerStrategy<T>>::load(self, storage);
218            // Observation of their inequality is enough to make a verdict
219            if old.as_ptr() != current.as_raw() {
220                return old;
221            }
222            // If they are still equal, put the new one in.
223            let new_raw = T::as_ptr(&new);
224            if storage
225                .compare_exchange_weak(current.as_raw(), new_raw, SeqCst, Relaxed)
226                .is_ok()
227            {
228                // We successfully put the new value in. The ref count went in there too.
229                T::into_ptr(new);
230                <Self as InnerStrategy<T>>::wait_for_readers(self, old.as_ptr(), storage);
231                // We just got one ref count out of the storage and we have one in old. We don't
232                // need two.
233                T::dec(old.as_ptr());
234                return old;
235            }
236        }
237    }
238}