#![allow(dead_code)]
use std::any::{Any, TypeId};
use std::fmt;
use std::hash::{Hash, Hasher};
use std::mem::{ManuallyDrop, MaybeUninit};
use std::boxed::Box as Ptr;
use crate::bytes::*;
pub use crate::copy_value::*;
use crate::traits::*;
use crate::vtable::*;
#[derive(Debug)]
pub enum Error {
ValueTooLarge,
MismatchedTypes { expected: TypeId, actual: TypeId },
}
impl fmt::Display for Error {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::ValueTooLarge => {
write!(f, "Value could not fit into a single pointer sized word.\nTry constructing a BoxValue instead.")?;
}
Error::MismatchedTypes { expected, actual } => {
writeln!(f, "Trying to assign a value of one type (with TypeId {:?}) to a value of another (with TypeId {:?}).", actual, expected)?;
}
}
Ok(())
}
}
impl std::error::Error for Error {}
impl<T: Any> HasDrop for (DropFn, T) {
#[inline]
fn drop_fn(&self) -> &DropFn {
&self.0
}
}
impl<V: HasClone> HasClone for (DropFn, V) {
#[inline]
fn clone_fn(&self) -> &CloneFn {
self.1.clone_fn()
}
#[inline]
fn clone_from_fn(&self) -> &CloneFromFn {
self.1.clone_from_fn()
}
#[inline]
fn clone_into_raw_fn(&self) -> &CloneIntoRawFn {
self.1.clone_into_raw_fn()
}
}
impl<V: HasHash> HasHash for (DropFn, V) {
#[inline]
fn hash_fn(&self) -> &HashFn {
self.1.hash_fn()
}
}
impl<V: HasPartialEq> HasPartialEq for (DropFn, V) {
#[inline]
fn eq_fn(&self) -> &EqFn {
self.1.eq_fn()
}
}
impl<V: HasEq> HasEq for (DropFn, V) {}
impl<V: HasDebug> HasDebug for (DropFn, V) {
#[inline]
fn fmt_fn(&self) -> &FmtFn {
self.1.fmt_fn()
}
}
pub struct Value<B, V>
where
B: GetBytesMut + DropAsAligned,
V: ?Sized + HasDrop,
{
pub(crate) bytes: ManuallyDrop<B>,
pub(crate) type_id: TypeId,
pub(crate) alignment: usize,
pub(crate) vtable: ManuallyDrop<Ptr<V>>,
}
pub type SmallValue<V> = Value<MaybeUninit<usize>, V>;
pub type BoxValue<V> = Value<Box<[MaybeUninit<u8>]>, V>;
impl<V: HasDrop> SmallValue<V> {
#[inline]
pub fn try_new<T: Any + DropBytes>(value: T) -> Option<Value<MaybeUninit<usize>, V>>
where
V: VTable<T>,
{
let val = ManuallyDrop::new(value);
val.try_into_usize().map(|usized_value| Value {
bytes: ManuallyDrop::new(usized_value),
type_id: TypeId::of::<T>(),
alignment: std::mem::align_of::<T>(),
vtable: ManuallyDrop::new(Ptr::new(V::build_vtable())),
})
}
#[inline]
pub fn new<T: Any + DropBytes>(value: T) -> Value<MaybeUninit<usize>, V>
where
V: VTable<T>,
{
Self::try_new(value).unwrap()
}
}
impl<V: ?Sized + HasDrop> SmallValue<V> {
#[inline]
pub(crate) unsafe fn from_raw_parts(
bytes: MaybeUninit<usize>,
type_id: TypeId,
alignment: usize,
vtable: Ptr<V>,
) -> Value<MaybeUninit<usize>, V> {
Value {
bytes: ManuallyDrop::new(bytes),
type_id,
alignment,
vtable: ManuallyDrop::new(vtable),
}
}
#[inline]
pub fn upcast<U: HasDrop + From<V>>(self) -> SmallValue<U>
where
V: Clone,
{
let mut md = ManuallyDrop::new(self);
let output = Value {
bytes: md.bytes,
type_id: md.type_id,
alignment: md.alignment,
vtable: ManuallyDrop::new(Ptr::new(U::from((**md.vtable).clone()))),
};
unsafe {
ManuallyDrop::drop(&mut md.vtable);
}
output
}
#[inline]
pub fn into_raw_parts(self) -> (MaybeUninit<usize>, TypeId, usize, Ptr<V>) {
let mut md = ManuallyDrop::new(self);
let vtable = unsafe { ManuallyDrop::take(&mut md.vtable) };
let bytes = unsafe { ManuallyDrop::take(&mut md.bytes) };
(bytes, md.type_id, md.alignment, vtable)
}
}
impl<V: HasDrop> BoxValue<V> {
#[inline]
pub fn new<T: Any + DropBytes>(value: T) -> Value<Box<[MaybeUninit<u8>]>, V>
where
V: VTable<T>,
{
Value {
bytes: ManuallyDrop::new(Bytes::box_into_box_bytes(Box::new(value))),
type_id: TypeId::of::<T>(),
alignment: std::mem::align_of::<T>(),
vtable: ManuallyDrop::new(Ptr::new(V::build_vtable())),
}
}
}
impl<V: ?Sized + HasDrop> BoxValue<V> {
#[inline]
pub(crate) unsafe fn from_raw_parts(
bytes: Box<[MaybeUninit<u8>]>,
type_id: TypeId,
alignment: usize,
vtable: Ptr<V>,
) -> Value<Box<[MaybeUninit<u8>]>, V> {
Value {
bytes: ManuallyDrop::new(bytes),
type_id,
alignment,
vtable: ManuallyDrop::new(vtable),
}
}
#[inline]
pub fn upcast<U: HasDrop + From<V>>(self) -> BoxValue<U>
where
V: Clone,
{
let mut md = ManuallyDrop::new(self);
let output = Value {
bytes: ManuallyDrop::new(unsafe { ManuallyDrop::take(&mut md.bytes) }),
type_id: md.type_id,
alignment: md.alignment,
vtable: ManuallyDrop::new(Ptr::new(U::from((**md.vtable).clone()))),
};
unsafe {
ManuallyDrop::drop(&mut md.vtable);
}
output
}
#[inline]
pub fn into_raw_parts(self) -> (Box<[MaybeUninit<u8>]>, TypeId, usize, Ptr<V>) {
let mut md = ManuallyDrop::new(self);
let vtable = unsafe { ManuallyDrop::take(&mut md.vtable) };
let bytes = unsafe { ManuallyDrop::take(&mut md.bytes) };
(bytes, md.type_id, md.alignment, vtable)
}
}
impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDrop> Value<B, V> {
#[inline]
pub fn as_ref(&self) -> ValueRef<V> {
ValueRef {
bytes: self.bytes.get_bytes_ref(),
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Ref(&self.vtable),
}
}
#[inline]
pub fn as_mut(&mut self) -> ValueMut<V> {
ValueMut {
bytes: self.bytes.get_bytes_mut(),
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Ref(&self.vtable),
}
}
}
unsafe impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDrop + HasSend> Send for Value<B, V> {}
unsafe impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDrop + HasSync> Sync for Value<B, V> {}
impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDebug + HasDrop> fmt::Debug for Value<B, V> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
unsafe { self.vtable.fmt_fn()(self.bytes.get_bytes_ref(), f) }
}
}
impl<V: ?Sized + Clone + HasClone + HasDrop> Clone for Value<MaybeUninit<usize>, V> {
#[inline]
fn clone(&self) -> Value<MaybeUninit<usize>, V> {
self.as_ref().clone_small_value()
}
}
impl<V: ?Sized + Clone + HasClone + HasDrop> Clone for Value<Box<[MaybeUninit<u8>]>, V> {
#[inline]
fn clone(&self) -> Value<Box<[MaybeUninit<u8>]>, V> {
self.as_ref().clone_value()
}
}
impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDrop> Drop for Value<B, V> {
#[inline]
fn drop(&mut self) {
unsafe {
self.vtable.drop_fn()(self.bytes.get_bytes_mut());
self.bytes.drop_as_aligned(self.alignment);
ManuallyDrop::drop(&mut self.vtable);
}
}
}
impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDrop + HasPartialEq> PartialEq for Value<B, V> {
#[inline]
fn eq(&self, other: &Self) -> bool {
if self.type_id != other.type_id {
return false;
}
unsafe { self.vtable.eq_fn()(self.bytes.get_bytes_ref(), other.bytes.get_bytes_ref()) }
}
}
impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDrop + HasPartialEq> Eq for Value<B, V> {}
impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDrop + HasHash> Hash for Value<B, V> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
unsafe {
self.vtable.hash_fn()(self.bytes.get_bytes_ref(), state);
}
}
}
impl<B: GetBytesMut + DropAsAligned, V: ?Sized + HasDrop> Value<B, V> {
impl_value_base!();
}
impl<V: ?Sized + HasDrop> Value<Box<[MaybeUninit<u8>]>, V> {
#[inline]
pub fn downcast<T: 'static>(self) -> Option<Box<T>> {
let mut s = ManuallyDrop::new(self);
let output = if s.is::<T>() {
Some(unsafe { Bytes::box_from_box_bytes(ManuallyDrop::take(&mut s.bytes)) })
} else {
None
};
unsafe {
ManuallyDrop::drop(&mut s.vtable);
}
output
}
}
impl<V: ?Sized + HasDrop> Value<MaybeUninit<usize>, V> {
#[inline]
pub fn downcast<T: 'static>(self) -> Option<T> {
let mut s = ManuallyDrop::new(self);
let output = if s.is::<T>() {
unsafe { Bytes::try_from_usize(ManuallyDrop::take(&mut s.bytes)) }
} else {
None
};
unsafe {
ManuallyDrop::drop(&mut s.vtable);
}
output
}
}
macro_rules! impl_value_ref_traits {
($value_ref:ident : $($maybe_drop:ident)*) => {
unsafe impl<'a, V: ?Sized + HasSend $( + $maybe_drop)*> Send for $value_ref<'a, V> {}
unsafe impl<'a, V: ?Sized + HasSync $( + $maybe_drop)*> Sync for $value_ref<'a, V> {}
impl<'a, V: ?Sized + HasHash $( + $maybe_drop)*> Hash for $value_ref<'a, V> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
unsafe {
self.vtable.as_ref().hash_fn()(self.bytes.get_bytes_ref(), state);
}
}
}
impl<'a, V: ?Sized + HasDebug $( + $maybe_drop)*> fmt::Debug for $value_ref<'a, V> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
unsafe { self.vtable.as_ref().fmt_fn()(self.bytes.get_bytes_ref(), f) }
}
}
impl<'a, V: ?Sized + HasPartialEq $( + $maybe_drop)*> PartialEq for $value_ref<'a, V> {
#[inline]
fn eq(&self, other: &Self) -> bool {
if self.type_id != other.type_id {
return false;
}
unsafe {
self.vtable.as_ref().eq_fn()(self.bytes.get_bytes_ref(), other.bytes.get_bytes_ref())
}
}
}
impl<'a, V: ?Sized + HasEq $( + $maybe_drop)*> Eq for $value_ref<'a, V> {}
}
}
impl_value_ref_traits!(CopyValueMut:);
impl_value_ref_traits!(CopyValueRef:);
#[derive(Clone)]
pub struct ValueRef<'a, V>
where
V: ?Sized + HasDrop,
{
pub(crate) bytes: &'a [MaybeUninit<u8>],
pub(crate) type_id: TypeId,
pub(crate) alignment: usize,
pub(crate) vtable: VTableRef<'a, V>,
}
impl_value_ref_traits!(ValueRef: HasDrop);
impl<'a, V: HasDrop> ValueRef<'a, V> {
#[inline]
pub fn new<T: Any + DropBytes>(typed: &'a T) -> ValueRef<'a, V>
where
V: VTable<T>,
{
ValueRef {
bytes: typed.as_bytes(),
type_id: TypeId::of::<T>(),
alignment: std::mem::align_of::<T>(),
vtable: VTableRef::Box(Box::new(V::build_vtable())),
}
}
}
impl<'a, B: GetBytesMut + DropAsAligned, V> From<&'a Value<B, V>> for ValueRef<'a, V>
where
B: GetBytesRef,
V: ?Sized + Clone + HasDrop,
{
fn from(val: &'a Value<B, V>) -> Self {
ValueRef {
bytes: val.bytes.get_bytes_ref(),
type_id: val.type_id,
alignment: val.alignment,
vtable: Ptr::clone(&val.vtable).into(),
}
}
}
impl<'a, V: ?Sized + HasDrop> ValueRef<'a, V> {
impl_value_base!();
#[inline]
pub(crate) unsafe fn from_raw_parts(
bytes: &'a [MaybeUninit<u8>],
type_id: TypeId,
alignment: usize,
vtable: impl Into<VTableRef<'a, V>>,
) -> ValueRef<'a, V> {
ValueRef {
bytes,
type_id,
alignment,
vtable: vtable.into(),
}
}
#[inline]
pub fn clone_value(&self) -> Value<Box<[MaybeUninit<u8>]>, V>
where
V: HasClone + Clone,
{
Value {
bytes: ManuallyDrop::new(unsafe { self.vtable.as_ref().clone_fn()(self.bytes) }),
type_id: self.type_id,
alignment: self.alignment,
vtable: ManuallyDrop::new(Ptr::from(self.vtable.as_ref().clone())),
}
}
#[inline]
pub fn clone_small_value(&self) -> Value<MaybeUninit<usize>, V>
where
V: HasClone + Clone,
{
let mut bytes = MaybeUninit::uninit();
unsafe {
self.vtable.clone_into_raw_fn()(
self.bytes.get_bytes_ref(),
Bytes::as_bytes_mut(&mut bytes),
);
}
Value {
bytes: ManuallyDrop::new(bytes),
type_id: self.type_id,
alignment: self.alignment,
vtable: ManuallyDrop::new(Ptr::from(self.vtable.as_ref().clone())),
}
}
#[inline]
pub fn downcast<T: 'static>(self) -> Option<&'a T> {
self.downcast_with::<T, _, _>(|b| unsafe { Bytes::from_bytes(b.bytes) })
}
#[inline]
pub fn upcast<U: ?Sized + HasDrop + From<V>>(self) -> ValueRef<'a, U>
where
V: Clone,
{
ValueRef {
bytes: self.bytes,
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Box(Box::new(U::from(self.vtable.take()))),
}
}
#[inline]
pub fn upcast_ref<U: ?Sized + HasDrop + From<V>>(&self) -> ValueRef<U>
where
V: Clone,
{
let vtable = self.vtable.as_ref();
ValueRef {
bytes: self.bytes,
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Box(Box::new(U::from((*vtable).clone()))),
}
}
#[inline]
pub fn reborrow(&self) -> ValueRef<V> {
ValueRef {
bytes: &*self.bytes,
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Ref(self.vtable.as_ref()),
}
}
}
pub struct ValueMut<'a, V>
where
V: ?Sized + HasDrop,
{
pub(crate) bytes: &'a mut [MaybeUninit<u8>],
pub(crate) type_id: TypeId,
pub(crate) alignment: usize,
pub(crate) vtable: VTableRef<'a, V>,
}
impl_value_ref_traits!(ValueMut: HasDrop);
impl<'a, V: HasDrop> ValueMut<'a, V> {
#[inline]
pub fn new<T: Any>(typed: &'a mut T) -> ValueMut<'a, V>
where
V: VTable<T>,
{
ValueMut {
bytes: typed.as_bytes_mut(),
type_id: TypeId::of::<T>(),
alignment: std::mem::align_of::<T>(),
vtable: VTableRef::Box(Box::new(V::build_vtable())),
}
}
}
impl<'a, V: ?Sized + HasDrop> ValueMut<'a, V> {
impl_value_base!();
#[inline]
pub fn swap<'b>(&mut self, other: &mut ValueMut<'b, V>) {
if self.value_type_id() == other.value_type_id() {
self.bytes.swap_with_slice(other.bytes);
}
}
pub fn assign<B: GetBytesMut + DropAsAligned>(&mut self, mut value: Value<B, V>) {
self.swap(&mut value.as_mut())
}
#[inline]
pub fn clone_from_other<'b>(&mut self, other: impl Into<ValueRef<'b, V>>) -> Result<(), Error>
where
V: HasClone + 'b,
{
let other = other.into();
if self.value_type_id() == other.value_type_id() {
unsafe {
self.vtable.as_ref().clone_from_fn()(self.bytes, other.bytes);
}
Ok(())
} else {
Err(Error::MismatchedTypes {
expected: self.value_type_id(),
actual: other.value_type_id(),
})
}
}
#[inline]
pub fn clone_value(&self) -> Value<Box<[MaybeUninit<u8>]>, V>
where
V: HasClone + Clone,
{
Value {
bytes: ManuallyDrop::new(unsafe { self.vtable.as_ref().clone_fn()(self.bytes) }),
type_id: self.type_id,
alignment: self.alignment,
vtable: ManuallyDrop::new(Ptr::from(self.vtable.as_ref().clone())),
}
}
#[inline]
pub fn clone_small_value(&self) -> Value<MaybeUninit<usize>, V>
where
V: HasClone + Clone,
{
let mut bytes = MaybeUninit::uninit();
unsafe {
self.vtable.clone_into_raw_fn()(
self.bytes.get_bytes_ref(),
Bytes::as_bytes_mut(&mut bytes),
);
}
Value {
bytes: ManuallyDrop::new(bytes),
type_id: self.type_id,
alignment: self.alignment,
vtable: ManuallyDrop::new(Ptr::from(self.vtable.as_ref().clone())),
}
}
#[inline]
pub(crate) unsafe fn from_raw_parts(
bytes: &'a mut [MaybeUninit<u8>],
type_id: TypeId,
alignment: usize,
vtable: impl Into<VTableRef<'a, V>>,
) -> ValueMut<'a, V> {
ValueMut {
bytes,
type_id,
alignment,
vtable: vtable.into(),
}
}
#[inline]
pub fn downcast<T: 'static>(self) -> Option<&'a mut T> {
self.downcast_with::<T, _, _>(|b| unsafe { Bytes::from_bytes_mut(b.bytes) })
}
#[inline]
pub fn upcast<U: ?Sized + HasDrop + From<V>>(self) -> ValueMut<'a, U>
where
V: Clone,
{
ValueMut {
bytes: self.bytes,
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Box(Box::new(U::from(self.vtable.take()))),
}
}
#[inline]
pub fn upcast_mut<U: ?Sized + HasDrop + From<V>>(&mut self) -> ValueMut<U>
where
V: Clone,
{
ValueMut {
bytes: self.bytes,
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Box(Box::new(U::from((*self.vtable).clone()))),
}
}
#[inline]
pub fn reborrow(&self) -> ValueRef<V> {
ValueRef {
bytes: self.bytes,
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Ref(self.vtable.as_ref()),
}
}
#[inline]
pub fn reborrow_mut(&mut self) -> ValueMut<V> {
ValueMut {
bytes: self.bytes,
type_id: self.type_id,
alignment: self.alignment,
vtable: VTableRef::Ref(self.vtable.as_ref()),
}
}
}
impl<'a, V: HasDrop> From<ValueMut<'a, V>> for ValueRef<'a, V> {
#[inline]
fn from(v: ValueMut<'a, V>) -> ValueRef<'a, V> {
ValueRef {
bytes: v.bytes,
type_id: v.type_id,
alignment: v.alignment,
vtable: v.vtable,
}
}
}
unsafe fn drop_copy(_: &mut [MaybeUninit<u8>]) {}
impl<'a, V: Any + Clone> From<CopyValueMut<'a, V>> for ValueMut<'a, (DropFn, V)> {
#[inline]
fn from(v: CopyValueMut<'a, V>) -> ValueMut<'a, (DropFn, V)> {
ValueMut {
bytes: v.bytes,
type_id: v.type_id,
alignment: v.alignment,
vtable: VTableRef::Box(Box::new((drop_copy, v.vtable.take()))),
}
}
}
impl<'a, V: Any + Clone> From<CopyValueRef<'a, V>> for ValueRef<'a, (DropFn, V)> {
#[inline]
fn from(v: CopyValueRef<'a, V>) -> ValueRef<'a, (DropFn, V)> {
ValueRef {
bytes: v.bytes,
type_id: v.type_id,
alignment: v.alignment,
vtable: VTableRef::Box(Box::new((drop_copy, v.vtable.take()))),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::dync_trait;
use std::rc::Rc;
#[dync_trait(dync_crate_name = "crate")]
pub trait Val: Clone + PartialEq + Eq + std::hash::Hash + std::fmt::Debug + 'static {}
impl<T> Val for T where T: Clone + PartialEq + Eq + std::hash::Hash + std::fmt::Debug + 'static {}
#[dync_trait(dync_crate_name = "crate")]
pub trait Float: Clone + PartialEq + std::fmt::Debug + 'static {}
impl<T> Float for T where T: Clone + PartialEq + std::fmt::Debug + 'static {}
#[test]
fn compare_values_with_different_types() {
let a = BoxValue::<ValVTable>::new(Rc::new("Hello"));
let b = BoxValue::<ValVTable>::new(Rc::new(String::from("Hello")));
assert_ne!(a, b);
}
#[test]
fn compare_value_ref_with_different_types() {
let a = BoxValue::<ValVTable>::new(Rc::new("Hello"));
let b = BoxValue::<ValVTable>::new(Rc::new(String::from("Hello")));
assert_ne!(a.as_ref(), b.as_ref());
}
#[test]
fn compare_value_mut_with_different_types() {
let mut a = BoxValue::<ValVTable>::new(Rc::new("Hello"));
let mut b = BoxValue::<ValVTable>::new(Rc::new(String::from("Hello")));
assert_ne!(a.as_mut(), b.as_mut());
}
#[test]
fn values_with_different_types_in_a_hash_set() {
use std::collections::HashSet;
let mut set_a = HashSet::new();
let mut set_b = HashSet::new();
let a = BoxValue::<ValVTable>::new(Rc::new("Hello"));
let b = BoxValue::<ValVTable>::new(String::from("Hello"));
let c = BoxValue::<ValVTable>::new("Hello");
set_a.insert(a.clone());
set_a.insert(b);
set_b.insert(a.clone());
set_b.insert(c);
let set_intersect = set_a.intersection(&set_b).collect::<HashSet<_>>();
assert_eq!(set_intersect.len(), 1);
let elem = set_intersect.into_iter().next().unwrap();
assert_eq!(elem, &a);
}
#[test]
fn value_equality() {
let a = Rc::new(String::from("Hello"));
let b = Rc::new(String::from("Hello"));
assert_eq!(&a, &b);
let a = BoxValue::<ValVTable>::new(Rc::new(String::from("Hello")));
let b = BoxValue::<ValVTable>::new(Rc::new(String::from("Hello")));
let c = b.clone();
let c_rc = b.clone().downcast::<Rc<String>>().unwrap();
let d = BoxValue::<ValVTable>::new(Rc::clone(&*c_rc));
assert_eq!(&a, &b);
assert_eq!(&a, &c);
assert_eq!(&a, &d);
}
#[test]
fn unaligned_box_value() {
let a = [0.0_f32; 3];
let a_val = BoxValue::<FloatVTable>::new(a);
let b = *a_val.clone().downcast::<[f32; 3]>().unwrap();
assert_eq!(&a, &b);
}
#[test]
fn clone_test() {
let val = BoxValue::<ValVTable>::new(Rc::new(1u8));
assert_eq!(&val, &val.clone());
}
#[test]
fn clone_small_test() {
let val = SmallValue::<ValVTable>::new(Rc::new(1u8));
assert_eq!(&val, &val.clone());
}
#[test]
fn copy_value_to_value_convert() {
let v = crate::vec_copy::VecCopy::from(vec![1u32, 2, 3]);
let copy_val = v.get_ref(1);
let val: ValueRef<(DropFn, ())> = copy_val.into();
assert_eq!(val.downcast::<u32>().unwrap(), &2u32);
}
}