1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
use std::marker::PhantomData;
use std::mem;
use std::ptr::drop_in_place;
use std::sync::atomic::{AtomicPtr, Ordering};
use std::sync::Arc;

use crate::collector::{GcData, InternalGcRef, COLLECTOR};
use crate::marker::{GcDeref, GcSafe};
use crate::{Finalize, Gc, Scan, Scanner};

/// An atomic `Gc<T>`, useful for concurrent algorithms
///
/// This has more overhead than an `AtomicPtr`, but cleanly handles memory management. It also is
/// similar to `Gc<T>` in that it can be cloned, and therefore easily shared.
///
/// A good analogy would be to the excellent `arc-swap` crate. However, we can be more performant,
/// as relying on the collector lets us avoid some synchronization.
///
/// `AtomicGc` should be fairly fast, but you may not assume it does not block. In fact in the
/// presence of an active garbage collection operation, all operations will block. Otherwise
/// it shouldn't block.
#[derive(Clone, Debug)]
pub struct AtomicGc<T: Scan> {
    // It is only safe to read the data here if a collection is not happening
    atomic_ptr: Arc<AtomicPtr<GcData>>,
    backing_handle: InternalGcRef,
    _mark: PhantomData<Gc<T>>,
}

impl<T: Scan> AtomicGc<T> {
    /// Create a new `AtomicGc`
    ///
    /// The created `AtomicGc` will point to the same data as `data`
    #[must_use]
    pub fn new(data: &Gc<T>) -> Self {
        // Ensure we don't create an atomic out of dead data...
        data.assert_live();

        // `data` is guaranteed to be pointing to the data we're about to contain, so we don't need to
        // worry about data getting cleaned up (and therefore we don't need to block the collector)

        // Carefully craft a ptr to store atomically
        let data_arc = data.internal_handle_ref().data();
        let data_ptr = Arc::as_ptr(data_arc);

        let atomic_ptr = Arc::new(AtomicPtr::new(data_ptr as _));

        Self {
            atomic_ptr: atomic_ptr.clone(),
            backing_handle: COLLECTOR.new_handle_for_atomic(atomic_ptr),
            _mark: PhantomData,
        }
    }

    pub(crate) fn internal_handle(&self) -> InternalGcRef {
        self.backing_handle.clone()
    }

    /// `load` the data from this `AtomicGc<T>`, getting back a `Gc<T>`
    ///
    /// The ordering/atomicity guarantees are identical to `AtomicPtr::load`
    #[must_use]
    pub fn load(&self, ordering: Ordering) -> Gc<T> {
        let ptr;
        let internal_handle;
        {
            let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();

            // Safe to manipulate this ptr only because we have the `_collection_blocker`
            // (And we know this `Arc` still has a pointer in the collector data structures,
            // otherwise someone would be accessing an `AtomicGc` pointing to freed data--which
            // is impossible in safe code.)
            let gc_data_ptr = self.atomic_ptr.load(ordering);
            let gc_data_temp = unsafe { Arc::from_raw(gc_data_ptr) };

            // Create a new `Arc` pointing to the same data, but don't invalidate the existing `Arc`
            // (which is effectively "behind" the pointer)
            let new_gc_data_ref = gc_data_temp.clone();
            mem::forget(gc_data_temp);

            ptr = new_gc_data_ref.scan_ptr().cast();
            internal_handle = COLLECTOR.handle_from_data(new_gc_data_ref);
        }

        Gc::new_raw(internal_handle, ptr)
    }

    /// `store` new data into this `AtomicGc`
    ///
    /// The ordering/atomicity guarantees are identical to `AtomicPtr::store`
    pub fn store(&self, v: &Gc<T>, ordering: Ordering) {
        // Ensure we're not storing dead data...
        v.assert_live();

        let data = v.internal_handle_ref().data();
        let raw_data_ptr = Arc::as_ptr(data);

        {
            let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();

            // Safe to manipulate this ptr only because we have the `_collection_blocker`
            // (And we know this `Arc` still has a pointer in the collector data structures,
            // otherwise someone would be accessing an `AtomicGc` pointing to freed data--which
            // is impossible in safe code.)
            self.atomic_ptr.store(raw_data_ptr as _, ordering);
        }
    }

    /// `swap` what data is stored in this `AtomicGc`, getting a `Gc` to the old data back
    ///
    /// The ordering/atomicity guarantees are identical to `AtomicPtr::swap`
    #[must_use]
    pub fn swap(&self, v: &Gc<T>, ordering: Ordering) -> Gc<T> {
        // Ensure we're not storing dead data...
        v.assert_live();

        let data = v.internal_handle_ref().data();
        let raw_data_ptr = Arc::as_ptr(data);

        let ptr;
        let internal_handle;
        {
            let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();
            let old_data_ptr = self.atomic_ptr.swap(raw_data_ptr as _, ordering);

            // Safe to manipulate this ptr only because we have the `_collection_blocker`
            // (And we know this `Arc` still has a pointer in the collector data structures,
            // otherwise someone would be accessing an `AtomicGc` pointing to freed data--which
            // is impossible in safe code.)
            let old_data_arc = unsafe { Arc::from_raw(old_data_ptr) };
            let gc_data = old_data_arc.clone();
            mem::forget(old_data_arc);

            ptr = gc_data.scan_ptr().cast();
            internal_handle = COLLECTOR.handle_from_data(gc_data);
        }

        Gc::new_raw(internal_handle, ptr)
    }

    /// Do a CAS operation. If this `AtomicGc` points to the same data as `current` then after this
    /// operation it will point to the same data as `new`. (And this happens atomically.)
    ///
    /// Data is compared for pointer equality. NOT `Eq` equality. (A swap will only happen if
    /// `current` and this `AtomicGc` point to the same underlying allocation.)
    ///
    /// The ordering/atomicity guarantees are identical to `AtomicPtr::compare_and_swap`
    ///
    /// # Returns
    /// Returns `true` if the swap happened and this `AtomicGc` now points to `new`
    /// Returns `false` if the swap failed / this `AtomicGc` was not pointing to `current`
    #[allow(clippy::must_use_candidate)]
    #[allow(deprecated)]
    pub fn compare_and_swap(&self, current: &Gc<T>, new: &Gc<T>, ordering: Ordering) -> bool {
        // Ensure we're not storing dead data...
        new.assert_live();

        // Turn guess data into a raw ptr
        let guess_data = current.internal_handle_ref().data();
        let guess_data_raw = Arc::as_ptr(guess_data) as _;

        // Turn new data into a raw ptr
        let new_data = new.internal_handle_ref().data();
        let new_data_raw = Arc::as_ptr(new_data) as _;

        let compare_res;
        {
            let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();
            // Safe to manipulate this ptr only because we have the `_collection_blocker`
            // (And we know this `Arc` still has a pointer in the collector data structures,
            // otherwise someone would be accessing an `AtomicGc` pointing to freed data--which
            // is impossible in safe code.)
            compare_res = self
                .atomic_ptr
                .compare_and_swap(guess_data_raw, new_data_raw, ordering);
        }

        compare_res == guess_data_raw
    }

    /// Do a CAE operation. If this `AtomicGc` points to the same data as `current` then after this
    /// operation it will point to the same data as `new`. (And this happens atomically.)
    ///
    /// Data is compared for pointer equality. NOT `Eq` equality. (A swap will only happen if
    /// `current` and this `AtomicGc` point to the same underlying allocation.)
    ///
    /// The ordering/atomicity guarantees are identical to `AtomicPtr::compare_exchange`, refer to
    /// that documentation for documentation about `success` and `failure` orderings.
    ///
    /// # Returns
    /// Returns `true` if the swap happened and this `AtomicGc` now points to `new`
    /// Returns `false` if the swap failed / this `AtomicGc` was not pointing to `current`
    #[allow(clippy::must_use_candidate)]
    pub fn compare_exchange(
        &self,
        current: &Gc<T>,
        new: &Gc<T>,
        success: Ordering,
        failure: Ordering,
    ) -> bool {
        // Ensure we're not storing dead data...
        new.assert_live();

        let guess_data = current.internal_handle_ref().data();
        let guess_data_raw = Arc::as_ptr(guess_data) as _;

        let new_data = new.internal_handle_ref().data();
        let new_data_raw = Arc::as_ptr(new_data) as _;

        let swap_result;
        {
            let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();
            // Safe to manipulate this ptr only because we have the `_collection_blocker`
            // (And we know this `Arc` still has a pointer in the collector data structures,
            // otherwise someone would be accessing an `AtomicGc` pointing to freed data--which
            // is impossible in safe code.)
            swap_result =
                self.atomic_ptr
                    .compare_exchange(guess_data_raw, new_data_raw, success, failure);
        }

        swap_result.is_ok()
    }

    // TODO: Compare and swap/compare and exchange that return the current value
}

unsafe impl<T: Scan> Scan for AtomicGc<T> {
    fn scan(&self, scanner: &mut Scanner<'_>) {
        scanner.add_internal_handle(self.internal_handle());
    }
}

unsafe impl<T: Scan> GcSafe for AtomicGc<T> {}
// unsafe impl<T: Scan> !GcDrop for AtomicGc<T> {}
unsafe impl<T: Scan + Send + Sync> GcDeref for AtomicGc<T> {}

unsafe impl<T: Scan> Finalize for AtomicGc<T> {
    unsafe fn finalize(&mut self) {
        drop_in_place(self)
    }
}

impl<T: Scan> Drop for AtomicGc<T> {
    fn drop(&mut self) {
        // Manually cleanup the backing handle...
        self.backing_handle.invalidate();
    }
}