1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159

use std::marker::{ Send, Sync, Unsize };
use std::ops::CoerceUnsized;
use std::ptr::Shared;
use std::cell::Cell;
use std::ops::{Deref};
use std::sync::atomic::{AtomicUsize, Ordering};


use std::fmt;

/// Types capable of reference counting by itself
pub trait IntrusiveReferenceCounted {
	unsafe fn acquire_ref(&self) { self.multi_acquire_ref(1) }
	unsafe fn release_ref(&self) { self.multi_release_ref(1) }

	unsafe fn multi_acquire_ref(&self, refs: usize);
	unsafe fn multi_release_ref(&self, refs: usize);
}

pub struct Irc<T: ?Sized + IntrusiveReferenceCounted>(Shared<T>);

impl<T: ?Sized + IntrusiveReferenceCounted, U: ?Sized + IntrusiveReferenceCounted> CoerceUnsized<Irc<U>> for Irc<T> where T: Unsize<U> {}
	
impl<T: IntrusiveReferenceCounted> Irc<T> {
	pub unsafe fn from_pointer(ptr: Shared<T>) -> Self {
		debug_assert!(!ptr.is_null());
		Self::from_pointer_checked(ptr)
	}

	pub unsafe fn from_pointer_preacquired(ptr: Shared<T>) -> Self {
		debug_assert!(!ptr.is_null());
		Self::from_pointer_preacquired_checked(ptr)
	}
}

impl<T: IntrusiveReferenceCounted + ?Sized> Irc<T> {
	pub unsafe fn from_pointer_checked(ptr: Shared<T>) -> Self {
		(**ptr).acquire_ref();
		Irc(ptr)
	}

	pub unsafe fn from_pointer_preacquired_checked(ptr: Shared<T>) -> Self {
		Irc(ptr)
	}
	pub unsafe fn inner(&self) -> Shared<T> { self.0 }
}

impl<T: IntrusiveReferenceCounted + ?Sized> Clone for Irc<T> {
	fn clone(&self) -> Self { unsafe { Irc::from_pointer_checked(self.0) } }
}
impl<T: IntrusiveReferenceCounted + ?Sized> Drop for Irc<T> {
	fn drop(&mut self) {
		unsafe { (**self.0).release_ref(); }
	}
}
impl<T: IntrusiveReferenceCounted + ?Sized> Deref for Irc<T> {
	type Target = T;
	fn deref(&self) -> &T {		
		unsafe { &**self.0 }
	}
}

unsafe impl<T: IntrusiveReferenceCounted + ?Sized> Send for Irc<T> where T: Send {}
unsafe impl<T: IntrusiveReferenceCounted + ?Sized> Sync for Irc<T> where T: Sync + Send {}



impl<T: fmt::Debug + IntrusiveReferenceCounted + ?Sized> fmt::Debug for Irc<T> {
	fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
		self.deref().fmt(f)
	}
}

impl<T: fmt::Display + IntrusiveReferenceCounted + ?Sized> fmt::Display for Irc<T> {
	fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
		self.deref().fmt(f)	
	}
}


// --------
// Wrappers

pub struct IrcWrapped<T: ?Sized>(Cell<usize>, T);

impl<T: ?Sized, U> CoerceUnsized<IrcWrapped<U>> for IrcWrapped<T> where T: CoerceUnsized<U> {}

impl<T> IrcWrapped<T> {
	fn new_box(value: T) -> *mut Self { Box::into_raw(Box::new(IrcWrapped(Cell::new(1), value))) }
	pub fn new_irc(value: T) -> Irc<Self> { unsafe { Irc::from_pointer_preacquired(Shared::new(Self::new_box(value))) } }
}
impl<T: ?Sized> IrcWrapped<T> {	
	unsafe fn final_release(&self) { Self::del_box(self as *const Self as *mut Self) }
	unsafe fn del_box(ptr: *mut Self) { Box::from_raw(ptr); }
}

impl<T: ?Sized> IntrusiveReferenceCounted for IrcWrapped<T> {
	unsafe fn multi_acquire_ref(&self, refs: usize) {
		self.0.set(self.0.get() + refs);
	}
	unsafe fn multi_release_ref(&self, refs: usize) {
		if self.0.get() != refs {
			self.0.set(self.0.get() - refs)
		} else {
			self.final_release();
		}
	}
}

impl<T: fmt::Debug + ?Sized> fmt::Debug for IrcWrapped<T> {
	fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
		write!(f, "[{:?}, {:?}]", &self.1, self.0.get())
	}
}

impl<T: fmt::Display + ?Sized> fmt::Display for IrcWrapped<T> {
	fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
		self.1.fmt(f)
	}
}


pub struct AircWrapped<T: ?Sized>(AtomicUsize, T);

impl<T: ?Sized, U> CoerceUnsized<AircWrapped<U>> for AircWrapped<T> where T: CoerceUnsized<U> {}

impl<T> AircWrapped<T> {
	fn new_box(value: T) -> *mut Self { Box::into_raw(Box::new(AircWrapped(AtomicUsize::new(1), value))) }
	pub fn new_irc(value: T) -> Irc<Self> { unsafe { Irc::from_pointer_preacquired(Shared::new(Self::new_box(value))) } }
}
impl<T: ?Sized> AircWrapped<T> {	
	unsafe fn final_release(&self) { Self::del_box(self as *const Self as *mut Self) }
	unsafe fn del_box(ptr: *mut Self) { Box::from_raw(ptr); }
}

impl<T: ?Sized> IntrusiveReferenceCounted for AircWrapped<T> {
	unsafe fn multi_acquire_ref(&self, refs: usize) {
		self.0.fetch_add(refs, Ordering::Relaxed);
	}
	unsafe fn multi_release_ref(&self, refs: usize) {
		if refs == self.0.fetch_sub(refs, Ordering::AcqRel) {
			self.final_release();
		}
	}
}

impl<T: fmt::Debug + ?Sized> fmt::Debug for AircWrapped<T> {
	fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
		write!(f, "[{:?}, {:?}]", &self.1, self.0.load(Ordering::Relaxed))
	}
}

impl<T: fmt::Display + ?Sized> fmt::Display for AircWrapped<T> {
	fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
		self.1.fmt(f)
	}
}