generic_simd/vector/
mod.rs1pub mod width;
4
5use crate::arch::Token;
6use crate::scalar::Scalar;
7use core::ops::{
8 Add, AddAssign, Deref, DerefMut, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign,
9};
10
11pub trait Native<Token> {
13 type Width: width::Width;
14}
15
16pub type NativeWidth<Scalar, Token> = <Scalar as Native<Token>>::Width;
18
19pub type NativeVector<Scalar, Token> = VectorOf<Scalar, NativeWidth<Scalar, Token>, Token>;
21
22pub type VectorOf<Scalar, Width, Token> = <Scalar as self::Scalar<Token, Width>>::Vector;
24
25pub unsafe trait Vector: Copy {
31 type Scalar: Copy;
33
34 type Token: Token;
36
37 type Width: width::Width;
39
40 type Underlying: Copy;
42
43 #[inline]
45 fn width() -> usize {
46 <Self::Width as width::Width>::VALUE
47 }
48
49 #[inline]
51 fn to_token(self) -> Self::Token {
52 unsafe { Self::Token::new_unchecked() }
53 }
54
55 #[inline]
57 fn as_slice(&self) -> &[Self::Scalar] {
58 unsafe { core::slice::from_raw_parts(self as *const _ as *const _, Self::width()) }
59 }
60
61 #[inline]
63 fn as_slice_mut(&mut self) -> &mut [Self::Scalar] {
64 unsafe { core::slice::from_raw_parts_mut(self as *mut _ as *mut _, Self::width()) }
65 }
66
67 #[inline]
69 fn to_underlying(self) -> Self::Underlying {
70 assert_eq!(
71 (
72 core::mem::size_of::<Self::Underlying>(),
73 core::mem::align_of::<Self::Underlying>(),
74 ),
75 (core::mem::align_of::<Self>(), core::mem::size_of::<Self>(),)
76 );
77 unsafe { core::mem::transmute_copy(&self) }
78 }
79
80 #[inline]
82 fn from_underlying(
83 #[allow(unused_variables)] token: Self::Token,
84 underlying: Self::Underlying,
85 ) -> Self {
86 assert_eq!(
87 (
88 core::mem::size_of::<Self::Underlying>(),
89 core::mem::align_of::<Self::Underlying>(),
90 ),
91 (core::mem::align_of::<Self>(), core::mem::size_of::<Self>(),)
92 );
93 unsafe { core::mem::transmute_copy(&underlying) }
94 }
95
96 #[inline]
101 unsafe fn read_ptr(
102 #[allow(unused_variables)] token: Self::Token,
103 from: *const Self::Scalar,
104 ) -> Self {
105 (from as *const Self).read_unaligned()
106 }
107
108 #[inline]
114 unsafe fn read_aligned_ptr(
115 #[allow(unused_variables)] token: Self::Token,
116 from: *const Self::Scalar,
117 ) -> Self {
118 (from as *const Self).read()
119 }
120
121 #[inline]
128 unsafe fn read_unchecked(token: Self::Token, from: &[Self::Scalar]) -> Self {
129 Self::read_ptr(token, from.as_ptr())
130 }
131
132 #[inline]
137 fn read(token: Self::Token, from: &[Self::Scalar]) -> Self {
138 assert!(
139 from.len() >= Self::width(),
140 "source not larget enough to load vector"
141 );
142 unsafe { Self::read_unchecked(token, from) }
143 }
144
145 #[inline]
150 unsafe fn write_ptr(self, to: *mut Self::Scalar) {
151 (to as *mut Self).write_unaligned(self);
152 }
153
154 #[inline]
160 unsafe fn write_aligned_ptr(self, to: *mut Self::Scalar) {
161 (to as *mut Self).write(self);
162 }
163
164 #[inline]
169 unsafe fn write_unchecked(self, to: &mut [Self::Scalar]) {
170 self.write_ptr(to.as_mut_ptr());
171 }
172
173 #[inline]
178 fn write(self, to: &mut [Self::Scalar]) {
179 assert!(
180 to.len() >= Self::width(),
181 "destination not large enough to store vector"
182 );
183 unsafe { self.write_unchecked(to) };
184 }
185
186 fn zeroed(token: Self::Token) -> Self;
188
189 fn splat(token: Self::Token, from: Self::Scalar) -> Self;
191}
192
193pub trait Ops:
195 Vector
196 + AsRef<[<Self as Vector>::Scalar]>
197 + AsMut<[<Self as Vector>::Scalar]>
198 + Deref<Target = [<Self as Vector>::Scalar]>
199 + DerefMut
200 + Add<Self, Output = Self>
201 + Add<<Self as Vector>::Scalar, Output = Self>
202 + AddAssign<Self>
203 + AddAssign<<Self as Vector>::Scalar>
204 + Sub<Self, Output = Self>
205 + Sub<<Self as Vector>::Scalar, Output = Self>
206 + SubAssign<Self>
207 + SubAssign<<Self as Vector>::Scalar>
208 + Mul<Self, Output = Self>
209 + Mul<<Self as Vector>::Scalar, Output = Self>
210 + MulAssign<Self>
211 + MulAssign<<Self as Vector>::Scalar>
212 + Div<Self, Output = Self>
213 + Div<<Self as Vector>::Scalar, Output = Self>
214 + DivAssign<Self>
215 + DivAssign<<Self as Vector>::Scalar>
216{
217}
218impl<V> Ops for V where
219 V: Vector
220 + AsRef<[<V as Vector>::Scalar]>
221 + AsMut<[<V as Vector>::Scalar]>
222 + Deref<Target = [<V as Vector>::Scalar]>
223 + DerefMut
224 + Add<V, Output = V>
225 + Add<<V as Vector>::Scalar, Output = V>
226 + AddAssign<V>
227 + AddAssign<<V as Vector>::Scalar>
228 + Sub<V, Output = V>
229 + Sub<<V as Vector>::Scalar, Output = V>
230 + SubAssign<V>
231 + SubAssign<<V as Vector>::Scalar>
232 + Mul<V, Output = V>
233 + Mul<<V as Vector>::Scalar, Output = V>
234 + MulAssign<V>
235 + MulAssign<<V as Vector>::Scalar>
236 + Div<V, Output = V>
237 + Div<<V as Vector>::Scalar, Output = V>
238 + DivAssign<V>
239 + DivAssign<<V as Vector>::Scalar>
240{
241}
242
243pub trait Signed: Ops + Neg<Output = Self> {}
245impl<V> Signed for V where V: Ops + Neg<Output = V> {}
246
247pub trait Complex: Signed {
249 type RealScalar: Copy;
251
252 fn conj(self) -> Self;
254
255 fn mul_i(self) -> Self;
257
258 fn mul_neg_i(self) -> Self;
260}