1use std::borrow::{Borrow, BorrowMut};
13use std::fmt::{Debug, Display, Formatter, Result};
14use std::mem;
15use std::ops::{Deref, DerefMut};
16use std::slice::{Iter, IterMut};
17use std::vec::Vec;
18
19#[cfg(feature = "parallel_proc")]
20use rayon::iter::{IntoParallelRefIterator, IntoParallelRefMutIterator};
21
22use crate::structs::allocator::Alloc64;
23use crate::Buffer;
24
25#[repr(transparent)]
59pub struct Vec64<T>(pub Vec<T, Alloc64>);
60
61impl<T> Vec64<T> {
62 #[inline]
63 pub fn new() -> Self {
64 Self(Vec::new_in(Alloc64))
65 }
66
67 #[inline]
68 pub fn with_capacity(cap: usize) -> Self {
69 Self(Vec::with_capacity_in(cap, Alloc64))
70 }
71
72 pub unsafe fn from_vec64_u8(buf: Vec64<u8>) -> Vec64<T> {
81 let byte_len = buf.len();
82 let elem_size = mem::size_of::<T>();
83 assert!(byte_len % elem_size == 0, "Size mismatch in from_vec64_u8");
84
85 let ptr = buf.0.as_ptr() as *mut T;
86 let len = byte_len / elem_size;
87 let cap = buf.0.capacity() / elem_size;
88
89 let _ = mem::ManuallyDrop::new(buf.0);
91
92 let vec = unsafe { Vec::from_raw_parts_in(ptr, len, cap, Alloc64) };
93 Vec64(vec)
94 }
95
96 #[inline]
104 pub unsafe fn from_raw_parts(ptr: *mut T, len: usize, capacity: usize) -> Self {
105 debug_assert_eq!(
106 (ptr as usize) % 64,
107 0,
108 "Vec64::from_raw_parts: pointer is not 64-byte aligned"
109 );
110
111 let vec = unsafe { Vec::from_raw_parts_in(ptr, len, capacity, Alloc64) };
112 Self(vec)
113 }
114}
115
116#[cfg(feature = "parallel_proc")]
118impl<T: Sync + Send> Vec64<T> {
119 #[inline]
120 pub fn par_iter(&self) -> rayon::slice::Iter<'_, T> {
121 self.0.par_iter()
122 }
123
124 #[inline]
125 pub fn par_iter_mut(&mut self) -> rayon::slice::IterMut<'_, T> {
126 self.0.par_iter_mut()
127 }
128}
129
130impl<T: Copy> Vec64<T> {
131 #[inline]
132 pub fn from_slice(slice: &[T]) -> Self {
133 let mut v = Self::with_capacity(slice.len());
134 unsafe {
137 std::ptr::copy_nonoverlapping(slice.as_ptr(), v.0.as_mut_ptr(), slice.len());
138 v.0.set_len(slice.len());
139 }
140 v
141 }
142}
143
144impl<T: Clone> Vec64<T> {
145 #[inline]
146 pub fn from_slice_clone(slice: &[T]) -> Self {
147 let mut v = Self::with_capacity(slice.len());
148 v.0.extend_from_slice(slice);
149 v
150 }
151}
152
153impl<T> Default for Vec64<T> {
154 fn default() -> Self {
155 Self::new()
156 }
157}
158
159impl<T> Deref for Vec64<T> {
160 type Target = Vec<T, Alloc64>;
161 #[inline]
162 fn deref(&self) -> &Self::Target {
163 &self.0
164 }
165}
166
167impl<T> DerefMut for Vec64<T> {
168 #[inline]
169 fn deref_mut(&mut self) -> &mut Self::Target {
170 &mut self.0
171 }
172}
173
174impl<T: Clone> Clone for Vec64<T> {
175 fn clone(&self) -> Self {
176 Self(self.0.clone())
177 }
178}
179
180impl<T: Debug> Debug for Vec64<T> {
181 fn fmt(&self, f: &mut Formatter<'_>) -> Result {
182 self.0.fmt(f)
183 }
184}
185
186impl<T: PartialEq> PartialEq for Vec64<T> {
187 fn eq(&self, other: &Self) -> bool {
188 self.0 == other.0
189 }
190}
191
192impl<T: PartialEq> PartialEq<Buffer<T>> for Vec64<T> {
193 #[inline]
194 fn eq(&self, other: &Buffer<T>) -> bool {
195 self.deref() == other.deref()
196 }
197}
198
199impl<T: Display> Display for Vec64<T> {
200 fn fmt(&self, f: &mut Formatter<'_>) -> Result {
201 write!(f, "[")?;
202 for (i, item) in self.iter().enumerate() {
203 if i > 0 {
204 write!(f, ", ")?;
205 }
206 write!(f, "{item}")?;
207 }
208 write!(f, "]")
209 }
210}
211
212impl<T> IntoIterator for Vec64<T> {
213 type Item = T;
214 type IntoIter = std::vec::IntoIter<T, Alloc64>;
215 #[inline]
216 fn into_iter(self) -> Self::IntoIter {
217 self.0.into_iter()
218 }
219}
220
221impl<'a, T> IntoIterator for &'a Vec64<T> {
222 type Item = &'a T;
223 type IntoIter = Iter<'a, T>;
224 #[inline]
225 fn into_iter(self) -> Self::IntoIter {
226 self.0.iter()
227 }
228}
229impl<'a, T> IntoIterator for &'a mut Vec64<T> {
230 type Item = &'a mut T;
231 type IntoIter = IterMut<'a, T>;
232 #[inline]
233 fn into_iter(self) -> Self::IntoIter {
234 self.0.iter_mut()
235 }
236}
237
238impl<T> Extend<T> for Vec64<T> {
239 #[inline]
240 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
241 self.0.extend(iter)
242 }
243}
244
245impl<T> FromIterator<T> for Vec64<T> {
246 #[inline]
247 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
248 let iterator = iter.into_iter();
249 let mut v = if let Some(exact) = iterator.size_hint().1 {
250 Vec::with_capacity_in(exact, Alloc64)
251 } else {
252 Vec::with_capacity_in(iterator.size_hint().0, Alloc64)
253 };
254 v.extend(iterator);
255 Self(v)
256 }
257}
258
259impl<T> From<Vec<T, Alloc64>> for Vec64<T> {
260 #[inline]
261 fn from(v: Vec<T, Alloc64>) -> Self {
262 Self(v)
263 }
264}
265
266impl<T> From<Vec64<T>> for Vec<T, Alloc64> {
267 #[inline]
268 fn from(v: Vec64<T>) -> Self {
269 v.0
270 }
271}
272
273impl<T> From<Vec<T>> for Vec64<T> {
274 #[inline]
275 fn from(v: Vec<T>) -> Self {
276 let mut vec = Vec::with_capacity_in(v.len(), Alloc64);
277 vec.extend(v);
278 Self(vec)
279 }
280}
281
282impl<T> From<&[T]> for Vec64<T>
283where
284 T: Clone
285{
286 #[inline]
287 fn from(s: &[T]) -> Self {
288 let mut v = Vec::with_capacity_in(s.len(), Alloc64);
289 v.extend_from_slice(s);
290 Self(v)
291 }
292}
293
294impl<T> AsRef<[T]> for Vec64<T> {
295 #[inline]
296 fn as_ref(&self) -> &[T] {
297 self.0.as_ref()
298 }
299}
300impl<T> AsMut<[T]> for Vec64<T> {
301 #[inline]
302 fn as_mut(&mut self) -> &mut [T] {
303 self.0.as_mut()
304 }
305}
306
307impl<T> Borrow<[T]> for Vec64<T> {
308 #[inline]
309 fn borrow(&self) -> &[T] {
310 self.0.borrow()
311 }
312}
313impl<T> BorrowMut<[T]> for Vec64<T> {
314 #[inline]
315 fn borrow_mut(&mut self) -> &mut [T] {
316 self.0.borrow_mut()
317 }
318}
319
320#[macro_export]
321macro_rules! vec64 {
322 (bool $elem:expr; $n:expr) => {{
324 let len = $n as usize;
325 let byte_len = (len + 7) / 8;
326 let mut v = $crate::Vec64::<u8>::with_capacity(byte_len);
327
328 let fill = if $elem { 0xFFu8 } else { 0u8 };
330 v.0.resize(byte_len, fill);
331
332 if $elem && (len & 7) != 0 {
334 let mask = (1u8 << (len & 7)) - 1;
335 let last = byte_len - 1;
336 v.0[last] &= mask;
337 }
338 v
339 }};
340
341 (bool $($x:expr),+ $(,)?) => {{
343 let len: usize = 0 $(+ { let _ = &$x; 1 })*;
345 let byte_len = (len + 7) / 8;
346 let mut v = $crate::Vec64::<u8>::with_capacity(byte_len);
347 v.0.resize(byte_len, 0);
348
349 let mut _idx = 0usize;
351 $(
352 if $x {
353 $crate::null_masking::set_bit(&mut v.0, _idx);
354 }
355 _idx += 1;
356 )+
357 v
358 }};
359
360 () => {
362 $crate::Vec64::new()
363 };
364
365 ($elem:expr; $n:expr) => {{
366 let mut v = $crate::Vec64::with_capacity($n);
367 v.0.resize($n, $elem);
368 v
369 }};
370
371 ($($x:expr),+ $(,)?) => {{
372 let mut v = $crate::Vec64::with_capacity(0 $(+ { let _ = &$x; 1 })*);
373 $(v.push($x);)+
374 v
375 }};
376}
377
378#[cfg(test)]
379mod tests {
380 use super::*;
381 #[cfg(feature = "parallel_proc")]
382 #[test]
383 fn test_new_and_default() {
384 let v: Vec64<u32> = Vec64::new();
385 assert_eq!(v.len(), 0);
386 assert_eq!(v.capacity(), 0);
387
388 let d: Vec64<u32> = Default::default();
389 assert_eq!(d.len(), 0);
390 }
391
392 #[test]
393 fn test_with_capacity_and_alignment() {
394 let v: Vec64<u64> = Vec64::with_capacity(32);
395 assert_eq!(v.len(), 0);
396 assert!(v.capacity() >= 32);
397 assert_eq!(v.0.as_ptr() as usize % 64, 0);
399 }
400
401 #[test]
402 fn test_from_slice_and_from() {
403 let data = [1, 2, 3, 4, 5];
404 let v = Vec64::from_slice(&data);
405 assert_eq!(v.len(), 5);
406 assert_eq!(&v[..], &data);
407
408 let v2: Vec64<_> = Vec64::from(&data[..]);
409 assert_eq!(&v2[..], &data);
410 }
411
412 #[test]
413 fn test_vec_macro() {
414 let v = vec64![1, 2, 3, 4, 5];
415 assert_eq!(&v[..], &[1, 2, 3, 4, 5]);
416
417 let v2 = vec64![7u8; 4];
418 assert_eq!(&v2[..], &[7u8; 4]);
419 }
420
421 #[test]
422 fn test_extend_and_from_iter() {
423 let mut v = Vec64::new();
424 v.extend([10, 20, 30]);
425 assert_eq!(&v[..], &[10, 20, 30]);
426
427 let v2: Vec64<_> = [100, 200].into_iter().collect();
428 assert_eq!(&v2[..], &[100, 200]);
429 }
430
431 #[test]
432 fn test_push_and_index() {
433 let mut v = Vec64::with_capacity(2);
434 v.push(123);
435 v.push(456);
436 assert_eq!(v[0], 123);
437 assert_eq!(v[1], 456);
438 }
439
440 #[test]
441 fn test_as_ref_and_as_mut() {
442 let mut v = Vec64::from_slice(&[1, 2, 3]);
443 assert_eq!(v.as_ref(), &[1, 2, 3]);
444 v.as_mut()[1] = 99;
445 assert_eq!(v[1], 99);
446 }
447
448 #[test]
449 fn test_borrow_traits() {
450 use std::borrow::{Borrow, BorrowMut};
451 let mut v = Vec64::from_slice(&[4, 5, 6]);
452 let r: &[i32] = v.borrow();
453 assert_eq!(r, &[4, 5, 6]);
454 let r: &mut [i32] = v.borrow_mut();
455 r[0] = 42;
456 assert_eq!(v[0], 42);
457 }
458
459 #[test]
460 fn test_clone_partial_eq_debug_display() {
461 let v = vec64![1, 2, 3];
462 let c = v.clone();
463 assert_eq!(v, c);
464 let s = format!("{:?}", v);
465 assert!(s.contains("1"));
466 let s2 = format!("{}", v);
467 assert_eq!(s2, "[1, 2, 3]");
468 }
469
470 #[test]
471 fn test_into_iterator() {
472 let v = vec64![2, 4, 6];
473 let mut out = Vec::new();
474 for x in v {
475 out.push(x);
476 }
477 assert_eq!(out, vec![2, 4, 6]);
478 }
479
480 #[test]
481 fn test_iter_and_iter_mut() {
482 let v = vec64![1, 2, 3];
483 let sum: i32 = v.iter().copied().sum();
484 assert_eq!(sum, 6);
485
486 let mut v = vec64![0, 0, 0];
487 for x in &mut v {
488 *x = 7;
489 }
490 assert_eq!(v[..], [7, 7, 7]);
491 }
492
493 #[test]
494 fn test_from_std_vec() {
495 let std_v = vec![1, 2, 3, 4];
496 let v: Vec64<_> = std_v.clone().into();
497 assert_eq!(v[..], [1, 2, 3, 4]);
498 }
499
500 #[test]
501 fn test_into_std_vec() {
502 let v = vec64![7, 8, 9];
503 let std_v: Vec<_> = v.0.clone().to_vec();
504 assert_eq!(std_v, vec![7, 8, 9]);
505 }
506
507 #[test]
508 fn test_alignment_is_64() {
509 let v: Vec64<u8> = Vec64::with_capacity(32);
510 assert_eq!(v.0.as_ptr() as usize % 64, 0);
511 }
512
513 #[test]
514 fn test_zero_sized_types() {
515 let v: Vec64<()> = vec64![(); 10];
516 assert_eq!(v.len(), 10);
517 }
518
519 #[test]
520 #[should_panic]
521 fn test_index_out_of_bounds() {
522 let v: Vec64<i32> = Vec64::new();
523 let _ = v[1];
524 }
525
526 fn assert_aligned_64<T>(vec: &Vec64<T>) {
528 let ptr = vec.as_ptr() as usize;
529 assert_eq!(ptr % 64, 0, "Pointer {:p} not 64-byte aligned", vec.as_ptr());
530 }
531
532 #[test]
533 fn test_vec64_new_alignment() {
534 let v: Vec64<u32> = Vec64::new();
535 if v.capacity() > 0 {
538 assert_aligned_64(&v);
539 }
540 }
541
542 #[test]
543 fn test_vec64_with_capacity_alignment() {
544 for &n in &[1, 3, 7, 32, 1024, 4096] {
545 let v: Vec64<u8> = Vec64::with_capacity(n);
546 assert_aligned_64(&v);
547 }
548 }
549
550 #[test]
551 fn test_vec64_from_slice_alignment() {
552 let data = [1u64, 2, 3, 4, 5, 6, 7, 8];
553 let v = Vec64::from_slice(&data);
554 assert_aligned_64(&v);
555 }
556
557 #[test]
558 fn test_vec64_macro_alignment() {
559 let v = vec64![0u32; 64];
560 assert_aligned_64(&v);
561
562 let v2 = vec64![1u16, 2, 3, 4, 5];
563 assert_aligned_64(&v2);
564 }
565
566 #[test]
567 fn test_vec64_grow_alignment() {
568 let mut v: Vec64<u64> = Vec64::with_capacity(1);
569 assert_aligned_64(&v);
570 for i in 0..1000 {
571 v.push(i);
572 assert_aligned_64(&v);
573 }
574 }
575
576 #[test]
577 fn test_vec64_alignment_zst() {
578 let v: Vec64<()> = Vec64::with_capacity(100);
579 assert_eq!(v.capacity(), usize::MAX, "ZST Vec should have 'infinite' capacity");
580 }
581}
582
583#[cfg(test)]
584#[cfg(feature = "parallel_proc")]
585mod parallel_tests {
586 use rayon::iter::ParallelIterator;
587
588 use super::*;
589
590 #[test]
591 fn test_vec64_par_iter() {
592 let v = Vec64::from_slice(&[1u32, 2, 3, 4, 5]);
593 let sum: u32 = v.par_iter().sum();
594 assert_eq!(sum, 15);
595 }
596}