tensorgraph_sys/device/
cpu.rs1use std::{
4 alloc::{AllocError, Allocator, Global, Layout},
5 ops::DerefMut,
6};
7
8use crate::ptr::{NonNull, Ref};
9
10use super::{DefaultDeviceAllocator, Device, DeviceAllocator, DevicePtr};
11
12#[derive(Debug)]
13pub struct Cpu;
15
16impl Device for Cpu {
17 type Ptr<T: ?Sized> = *mut T;
18 const IS_CPU: bool = true;
19
20 fn copy_from_host<T: Copy>(from: &[T], to: &mut Ref<[T], Self>) {
21 to.deref_mut().copy_from_slice(from);
22 }
23
24 fn copy_to_host<T: Copy>(from: &Ref<[T], Self>, to: &mut [T]) {
25 to.copy_from_slice(&**from);
26 }
27
28 fn copy<T: Copy>(from: &Ref<[T], Self>, to: &mut Ref<[T], Self>) {
29 to.deref_mut().copy_from_slice(&**from);
30 }
31}
32
33impl DefaultDeviceAllocator for Cpu {
34 type Alloc = Global;
35}
36
37impl<T: ?Sized> DevicePtr<T> for *mut T {
38 fn as_raw(self) -> *mut T {
39 self
40 }
41
42 fn from_raw(ptr: *mut T) -> Self {
43 ptr
44 }
45
46 unsafe fn write(self, val: T)
47 where
48 T: Sized,
49 {
50 self.write(val);
51 }
52}
53
54impl<T: ?Sized> From<std::ptr::NonNull<T>> for NonNull<T, Cpu> {
55 fn from(ptr: std::ptr::NonNull<T>) -> Self {
56 unsafe { Self::new_unchecked(ptr.as_ptr()) }
57 }
58}
59
60impl<T: ?Sized> From<NonNull<T, Cpu>> for std::ptr::NonNull<T> {
61 fn from(ptr: NonNull<T, Cpu>) -> Self {
62 unsafe { Self::new_unchecked(ptr.as_ptr()) }
63 }
64}
65
66impl<A: Allocator> DeviceAllocator for A {
67 type AllocError = AllocError;
68 type Device = Cpu;
69
70 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8], Cpu>, AllocError> {
71 self.allocate(layout).map(NonNull::from)
72 }
73
74 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8], Cpu>, AllocError> {
75 self.allocate_zeroed(layout).map(NonNull::from)
76 }
77
78 unsafe fn deallocate(&self, ptr: NonNull<u8, Cpu>, layout: Layout) {
79 self.deallocate(ptr.into(), layout);
80 }
81
82 unsafe fn grow(
83 &self,
84 ptr: NonNull<u8, Cpu>,
85 old_layout: Layout,
86 new_layout: Layout,
87 ) -> Result<NonNull<[u8], Cpu>, AllocError> {
88 self.grow(ptr.into(), old_layout, new_layout)
89 .map(NonNull::from)
90 }
91
92 unsafe fn grow_zeroed(
93 &self,
94 ptr: NonNull<u8, Cpu>,
95 old_layout: Layout,
96 new_layout: Layout,
97 ) -> Result<NonNull<[u8], Cpu>, AllocError> {
98 self.grow_zeroed(ptr.into(), old_layout, new_layout)
99 .map(NonNull::from)
100 }
101
102 unsafe fn shrink(
103 &self,
104 ptr: NonNull<u8, Cpu>,
105 old_layout: Layout,
106 new_layout: Layout,
107 ) -> Result<NonNull<[u8], Cpu>, AllocError> {
108 self.shrink(ptr.into(), old_layout, new_layout)
109 .map(NonNull::from)
110 }
111}