1pub mod serde;
3
4use std::{alloc::{alloc, dealloc, realloc, Layout}, fmt, marker::PhantomData, mem, ptr::{self, NonNull}};
5use std::ops::Index;
6use std::fmt::Debug;
7
8use num_traits::Num;
9pub struct ZeroSpVec<N>
16where N: Num
17{
18 buf: RawZeroSpVec<N>,
19 len: usize,
20 nnz: usize,
21 zero: N,
22}
23
24pub trait ZeroSpVecTrait<N>: Clone + Default + Index<usize, Output = N>
25where N: Num
26{
27 unsafe fn ind_ptr(&self) -> *mut u32;
28 unsafe fn val_ptr(&self) -> *mut N;
29 unsafe fn raw_push(&mut self, index: usize, value: N);
30 fn ind_binary_search(&self, index: usize, cut_down: usize) -> Result<usize, usize>;
31 fn new() -> Self;
32 fn with_capacity(cap: usize) -> Self;
33 fn reserve(&mut self, additional: usize);
34 fn shrink_to_fit(&mut self);
35 fn is_empty(&self) -> bool;
36 fn len(&self) -> usize;
37 unsafe fn set_len(&mut self, len: usize);
38 fn capacity(&self) -> usize;
39 fn nnz(&self) -> usize;
40 fn add_dim(&mut self, dim: usize);
41 fn clear(&mut self);
42 fn push(&mut self, elem: N);
43 fn pop(&mut self) -> Option<N>;
44 fn get(&self, index: usize) -> Option<&N>;
45 fn raw_get(&self, index: usize) -> Option<ValueWithIndex<'_, N>>;
46 fn get_with_cut_down(&self, index: usize, cut_down: usize) -> Option<&N>;
47 fn raw_get_with_cut_down(&self, index: usize, cut_down: usize) -> Option<ValueWithIndex<'_, N>>;
48 fn get_ind(&self, index: usize) -> Option<usize>;
49 fn remove(&mut self, index: usize) -> N;
50 fn from_vec(vec: Vec<N>) -> Self;
51 unsafe fn from_raw_iter(iter: impl Iterator<Item = (usize, N)>, len: usize) -> Self;
52 unsafe fn from_sparse_iter(iter: impl Iterator<Item = (usize, N)>, len: usize) -> Self;
53 fn iter(&self) -> ZeroSpVecIter<'_, N>;
54 fn raw_iter(&self) -> ZeroSpVecRawIter<'_, N>;
55}
56
57pub struct ValueWithIndex<'a, N>
58where N: Num
59{
60 pub index: usize,
61 pub value: &'a N,
62}
63
64impl<N> ZeroSpVecTrait<N> for ZeroSpVec<N>
65where N: Num
66{
67 #[inline]
68 unsafe fn ind_ptr(&self) -> *mut u32 {
69 self.buf.ind_ptr.as_ptr()
70 }
71
72 #[inline]
73 unsafe fn val_ptr(&self) -> *mut N {
74 self.buf.val_ptr.as_ptr()
75 }
76
77 #[inline]
84 unsafe fn raw_push(&mut self, index: usize, value: N) {
85 if self.nnz == self.buf.cap {
86 self.buf.grow();
87 }
88 unsafe {
89 let val_ptr = self.val_ptr().add(self.nnz);
90 let ind_ptr = self.ind_ptr().add(self.nnz);
91 ptr::write(val_ptr, value);
92 debug_assert!(index <= u32::MAX as usize, "index overflow for u32 storage");
93 ptr::write(ind_ptr, index as u32);
94 }
95 self.nnz += 1;
96 }
97
98 #[inline]
99 fn ind_binary_search(&self, index: usize, cut_down: usize) -> Result<usize, usize> {
100 if self.nnz == 0 {
102 return Err(0);
103 }
104 let mut left = cut_down;
105 let mut right = self.nnz - 1;
106 while left < right {
107 let mid = left + (right - left) / 2;
108 let mid_index = unsafe { *self.ind_ptr().add(mid) as usize };
110 if mid_index == index {
111 return Ok(mid);
112 } else if mid_index < index {
113 left = mid + 1;
114 } else {
115 right = mid;
116 }
117 }
118
119 let final_index = unsafe { *self.ind_ptr().add(left) as usize };
120 if final_index == index {
121 Ok(left)
122 } else if final_index < index {
123 Err(left + 1)
124 } else {
125 Err(left)
126 }
127 }
128
129 #[inline]
130 fn new() -> Self {
131 ZeroSpVec {
132 buf: RawZeroSpVec::new(),
133 len: 0,
134 nnz: 0,
135 zero: N::zero(),
136 }
137 }
138
139 #[inline]
140 fn with_capacity(cap: usize) -> Self {
141 let mut buf = RawZeroSpVec::new();
142 buf.cap = cap;
143 buf.cap_set();
144 ZeroSpVec {
145 buf: buf,
146 len: 0,
147 nnz: 0,
148 zero: N::zero(),
149 }
150 }
151
152 #[inline]
154 fn reserve(&mut self, additional: usize) {
155 let new_cap = self.nnz + additional;
156 if new_cap > self.buf.cap {
157 self.buf.cap = new_cap;
158 self.buf.re_cap_set();
159 }
160 }
161
162 #[inline]
164 fn shrink_to_fit(&mut self) {
165 if self.len < self.buf.cap {
166 let new_cap = self.nnz;
167 self.buf.cap = new_cap;
168 self.buf.re_cap_set();
169 }
170 }
171
172 #[inline]
173 fn is_empty(&self) -> bool {
174 self.len == 0
175 }
176
177 #[inline]
178 fn len(&self) -> usize {
179 self.len
180 }
181
182 #[inline]
183 unsafe fn set_len(&mut self, len: usize) {
184 self.len = len;
185 }
186
187 #[inline]
189 fn capacity(&self) -> usize {
190 self.buf.cap
191 }
192
193 #[inline]
195 fn nnz(&self) -> usize {
196 self.nnz
197 }
198
199 #[inline]
201 fn add_dim(&mut self, dim: usize) {
202 self.len += dim;
203 }
204
205 #[inline]
208 fn clear(&mut self) {
209 while let Some(_) = self.pop() {
210 }
212 }
213
214 #[inline]
215 fn push(&mut self, elem: N) {
216 if self.nnz == self.buf.cap {
217 self.buf.grow();
218 }
219 if elem != N::zero() {
220 unsafe {
221 let val_ptr = self.val_ptr().add(self.nnz);
222 let ind_ptr = self.ind_ptr().add(self.nnz);
223 ptr::write(val_ptr, elem);
224 debug_assert!(self.len <= u32::MAX as usize, "index overflow for u32 storage");
225 ptr::write(ind_ptr, self.len as u32);
226 }
227 self.nnz += 1;
228 }
229 self.len += 1;
230 }
231
232 #[inline]
233 fn pop(&mut self) -> Option<N> {
234 if self.nnz == 0 {
235 return None;
236 }
237 let pop_element = if self.nnz == self.len {
238 self.nnz -= 1;
239 unsafe {
240 Some(ptr::read(self.val_ptr().add(self.nnz)))
241 }
242 } else {
243 Some(N::zero())
244 };
245 self.len -= 1;
246 pop_element
247 }
248
249 #[inline]
250 fn get(&self, index: usize) -> Option<&N> {
251 if index >= self.len {
252 return None;
253 }
254 match self.ind_binary_search(index, 0) {
255 Ok(idx) => {
256 unsafe {
257 Some(&*self.val_ptr().add(idx))
258 }
259 },
260 Err(_) => {
261 Some(&self.zero)
262 }
263 }
264 }
265
266 #[inline]
267 fn raw_get(&self, index: usize) -> Option<ValueWithIndex<'_, N>> {
268 if index >= self.len {
269 return None;
270 }
271 match self.ind_binary_search(index, 0) {
272 Ok(idx) => {
273 unsafe {
274 Some(ValueWithIndex {
275 index,
276 value: &*self.val_ptr().add(idx),
277 })
278 }
279 },
280 Err(_) => {
281 Some(ValueWithIndex {
282 index,
283 value: &self.zero,
284 })
285 }
286 }
287 }
288
289 #[inline]
290 fn get_with_cut_down(&self, index: usize, cut_down: usize) -> Option<&N> {
291 if index >= self.len {
292 return None;
293 }
294 match self.ind_binary_search(index, cut_down) {
295 Ok(idx) => {
296 unsafe {
297 Some(&*self.val_ptr().add(idx))
298 }
299 },
300 Err(_) => {
301 Some(&self.zero)
302 }
303 }
304 }
305
306 #[inline]
307 fn raw_get_with_cut_down(&self, index: usize, cut_down: usize) -> Option<ValueWithIndex<'_, N>> {
308 if index >= self.len || cut_down >= self.nnz {
309 return None;
310 }
311 match self.ind_binary_search(index, cut_down) {
312 Ok(idx) => {
313 unsafe {
314 Some(ValueWithIndex {
315 index,
316 value: &*self.val_ptr().add(idx),
317 })
318 }
319 },
320 Err(_) => {
321 Some(ValueWithIndex {
322 index,
323 value: &self.zero,
324 })
325 }
326 }
327 }
328
329 #[inline]
330 fn get_ind(&self, index: usize) -> Option<usize> {
331 if index >= self.nnz {
332 return None;
333 }
334 unsafe {
335 Some(ptr::read(self.ind_ptr().add(index)) as usize)
336 }
337 }
338
339
340
341 #[inline]
353 fn remove(&mut self, index: usize) -> N {
354 debug_assert!(index < self.len, "index out of bounds");
355
356 self.len -= 1;
358
359 match self.ind_binary_search(index, 0) {
360 Ok(i) => {
361 let removed_val = unsafe {
363 ptr::read(self.val_ptr().add(i))
364 };
365
366 let count = self.nnz - i - 1;
368 if count > 0 {
369 unsafe {
370 ptr::copy(
372 self.val_ptr().add(i + 1),
373 self.val_ptr().add(i),
374 count
375 );
376 ptr::copy(
378 self.ind_ptr().add(i + 1),
379 self.ind_ptr().add(i),
380 count
381 );
382 for offset in i..(self.nnz - 1) {
384 *self.ind_ptr().add(offset) -= 1;
385 }
386 }
387 }
388 self.nnz -= 1;
390
391 removed_val
393 }
394 Err(i) => {
395 if i < self.nnz {
399 unsafe {
400 for offset in i..self.nnz {
401 *self.ind_ptr().add(offset) -= 1;
402 }
403 }
404 }
405
406 N::zero()
408 }
409 }
410 }
411
412 #[inline]
414 fn from_vec(vec: Vec<N>) -> Self {
415 let mut zero_sp_vec = ZeroSpVec::with_capacity(vec.len());
416 for entry in vec {
417 zero_sp_vec.push(entry);
418 }
419 zero_sp_vec
420 }
421
422 #[inline]
425 unsafe fn from_raw_iter(iter: impl Iterator<Item = (usize, N)>, len: usize) -> Self {
426 let mut zero_sp_vec = ZeroSpVec::with_capacity(iter.size_hint().0);
427 for (index, value) in iter {
428 unsafe {
429 zero_sp_vec.raw_push(index, value);
430 }
431 }
432 zero_sp_vec.len = len;
433 zero_sp_vec.shrink_to_fit();
434 zero_sp_vec
435 }
436
437 #[inline]
440 unsafe fn from_sparse_iter(iter: impl Iterator<Item = (usize, N)>, len: usize) -> Self {
441 let mut zero_sp_vec = ZeroSpVec::with_capacity(iter.size_hint().0);
442 for (index, value) in iter {
443 if value != N::zero() {
444 unsafe {
445 zero_sp_vec.raw_push(index, value);
446 }
447 }
448 }
449 zero_sp_vec.len = len;
450 zero_sp_vec.shrink_to_fit();
451 zero_sp_vec
452 }
453
454 #[inline]
457 fn iter(&self) -> ZeroSpVecIter<'_, N> {
458 ZeroSpVecIter {
459 vec: self,
460 pos: 0,
461 }
462 }
463
464 #[inline]
467 fn raw_iter(&self) -> ZeroSpVecRawIter<'_, N> {
468 ZeroSpVecRawIter {
469 vec: self,
470 pos: 0,
471 }
472 }
473}
474
475unsafe impl <N: Num + Send> Send for ZeroSpVec<N> {}
476unsafe impl <N: Num + Sync> Sync for ZeroSpVec<N> {}
477
478impl<N> Clone for ZeroSpVec<N>
479where N: Num
480{
481 #[inline]
482 fn clone(&self) -> Self {
483 ZeroSpVec {
484 buf: self.buf.clone(),
485 len: self.len,
486 nnz: self.nnz,
487 zero: N::zero(),
488 }
489 }
490}
491
492impl<N> Drop for ZeroSpVec<N>
493where N: Num
494{
495 #[inline]
496 fn drop(&mut self) {
497 }
499}
500
501impl<N> Default for ZeroSpVec<N>
502where N: Num
503{
504 #[inline]
505 fn default() -> Self {
506 ZeroSpVec::new()
507 }
508}
509
510impl<N> Index<usize> for ZeroSpVec<N>
511where N: Num
512{
513 type Output = N;
514
515 #[inline]
516 fn index(&self, index: usize) -> &Self::Output {
517 self.get(index).expect("index out of bounds")
518 }
519}
520
521impl<N: Num + Debug> Debug for ZeroSpVec<N> {
522 #[inline]
523 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
524 if f.sign_plus() {
525 f.debug_struct("DefaultSparseVec")
526 .field("buf", &self.buf)
527 .field("nnz", &self.nnz)
528 .field("len", &self.len)
529 .field("zero", &self.zero)
530 .finish()
531 } else if f.alternate() {
532 write!(f, "ZeroSpVec({:?})", self.iter().collect::<Vec<&N>>())
533 } else {
534 f.debug_list().entries((0..self.len).map(|i| self.get(i).unwrap())).finish()
535 }
536 }
537}
538
539#[derive(Clone, Copy)]
540pub struct ZeroSpVecIter<'a, N>
541where N: Num
542{
543 vec: &'a ZeroSpVec<N>,
544 pos: usize,
545}
546
547impl<'a, N> Iterator for ZeroSpVecIter<'a, N>
548where N: Num
549{
550 type Item = &'a N;
551
552 #[inline]
553 fn next(&mut self) -> Option<Self::Item> {
554 self.vec.get(self.pos).map(|val| {
555 self.pos += 1;
556 val
557 })
558 }
559}
560
561#[derive(Clone, Copy)]
562pub struct ZeroSpVecRawIter<'a, N>
563where N: Num
564{
565 vec: &'a ZeroSpVec<N>,
566 pos: usize,
567}
568
569impl<'a, N> Iterator for ZeroSpVecRawIter<'a, N>
570where N: Num
571{
572 type Item = (usize, &'a N);
573
574 #[inline]
575 fn next(&mut self) -> Option<Self::Item> {
576 if self.pos < self.vec.nnz() {
577 let index = unsafe { *self.vec.ind_ptr().add(self.pos) as usize };
578 let value = unsafe { &*self.vec.val_ptr().add(self.pos) };
579 self.pos += 1;
580 Some((index, value))
581 } else {
582 None
583 }
584 }
585
586 #[inline]
587 fn nth(&mut self, n: usize) -> Option<Self::Item> {
588 let nnz = self.vec.nnz();
589 match self.pos.checked_add(n) {
590 Some(new_pos) if new_pos < nnz => {
591 self.pos = new_pos;
592 self.next()
593 }
594 _ => {
595 self.pos = nnz;
596 None
597 }
598 }
599 }
600
601 #[inline]
602 fn size_hint(&self) -> (usize, Option<usize>) {
603 let remaining = self.vec.nnz().saturating_sub(self.pos);
604 (remaining, Some(remaining))
605 }
606
607}
608
609impl<'a, N> ExactSizeIterator for ZeroSpVecRawIter<'a, N>
610where
611 N: Num,
612{
613 #[inline]
614 fn len(&self) -> usize {
615 self.vec.nnz().saturating_sub(self.pos)
616 }
617}
618
619impl<'a, N> std::iter::FusedIterator for ZeroSpVecRawIter<'a, N>
620where
621 N: Num,
622{
623}
624
625impl<T> From<Vec<T>> for ZeroSpVec<T>
626where T: Num
627{
628 #[inline]
629 fn from(vec: Vec<T>) -> Self {
630 ZeroSpVec::from_vec(vec)
631 }
632}
633
634impl<'a, N> From<ZeroSpVecRawIter<'a, N>> for ZeroSpVec<N>
635where
636 N: Num + Copy,
637{
638 #[inline]
639 fn from(iter: ZeroSpVecRawIter<'a, N>) -> Self {
640 let mut vec = ZeroSpVec::new();
641 for (idx, val) in iter {
642 unsafe {
643 vec.raw_push(idx, *val);
644 }
645 vec.len += 1;
646 }
647 vec
648 }
649}
650
651
652
653
654
655
656
657
658
659#[derive(Debug)]
661struct RawZeroSpVec<N>
662where N: Num
663{
664 val_ptr: NonNull<N>,
665 ind_ptr: NonNull<u32>,
666 cap: usize,
671 _marker: PhantomData<N>, }
673
674impl<N> RawZeroSpVec<N>
675where N: Num
676{
677 #[inline]
678 fn new() -> Self {
679 let cap = if mem::size_of::<N>() == 0 { std::usize::MAX } else { 0 };
681
682 RawZeroSpVec {
683 val_ptr: NonNull::dangling(),
685 ind_ptr: NonNull::dangling(),
687 cap: cap,
688 _marker: PhantomData,
689 }
690 }
691
692 #[inline]
693 fn grow(&mut self) {
694 unsafe {
695 let val_elem_size = mem::size_of::<N>();
696 let ind_elem_size = mem::size_of::<u32>();
697
698 debug_assert!(val_elem_size != 0, "capacity overflow");
701
702 let t_align = mem::align_of::<N>();
704 let usize_align = mem::align_of::<u32>();
705
706 let (new_cap, val_ptr, ind_ptr): (usize, *mut N, *mut u32) =
708 if self.cap == 0 {
709 let new_val_layout = Layout::from_size_align(val_elem_size, t_align).expect("Failed to create memory layout");
710 let new_ind_layout = Layout::from_size_align(ind_elem_size, usize_align).expect("Failed to create memory layout");
711 (
712 1,
713 alloc(new_val_layout) as *mut N,
714 alloc(new_ind_layout) as *mut u32,
715 )
716 } else {
717 let new_cap = self.cap * 2;
719 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout for reallocation");
720 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout for reallocation");
721 (
722 new_cap,
723 realloc(self.val_ptr.as_ptr() as *mut u8, new_val_layout, val_elem_size * new_cap) as *mut N,
724 realloc(self.ind_ptr.as_ptr() as *mut u8, new_ind_layout, ind_elem_size * new_cap) as *mut u32,
725 )
726 };
727
728 if val_ptr.is_null() || ind_ptr.is_null() {
730 oom();
731 }
732
733 self.val_ptr = NonNull::new_unchecked(val_ptr);
735 self.ind_ptr = NonNull::new_unchecked(ind_ptr);
736 self.cap = new_cap;
737 }
738 }
739
740 #[inline]
741 fn cap_set(&mut self) {
742 unsafe {
743 let val_elem_size = mem::size_of::<N>();
744 let ind_elem_size = mem::size_of::<u32>();
745
746 let t_align = mem::align_of::<N>();
747 let usize_align = mem::align_of::<u32>();
748
749 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
750 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
751 let new_val_ptr = alloc(new_val_layout) as *mut N;
752 let new_ind_ptr = alloc(new_ind_layout) as *mut u32;
753 if new_val_ptr.is_null() || new_ind_ptr.is_null() {
754 oom();
755 }
756 self.val_ptr = NonNull::new_unchecked(new_val_ptr);
757 self.ind_ptr = NonNull::new_unchecked(new_ind_ptr);
758 }
759 }
760
761 #[inline]
762 fn re_cap_set(&mut self) {
763 unsafe {
764 let val_elem_size = mem::size_of::<N>();
765 let ind_elem_size = mem::size_of::<u32>();
766
767 let t_align = mem::align_of::<N>();
768 let usize_align = mem::align_of::<u32>();
769
770 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
771 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
772 let new_val_ptr = realloc(self.val_ptr.as_ptr() as *mut u8, new_val_layout, val_elem_size * self.cap) as *mut N;
773 let new_ind_ptr = realloc(self.ind_ptr.as_ptr() as *mut u8, new_ind_layout, ind_elem_size * self.cap) as *mut u32;
774 if new_val_ptr.is_null() || new_ind_ptr.is_null() {
775 oom();
776 }
777 self.val_ptr = NonNull::new_unchecked(new_val_ptr);
778 self.ind_ptr = NonNull::new_unchecked(new_ind_ptr);
779 }
780 }
781}
782
783impl<N> Clone for RawZeroSpVec<N>
784where N: Num
785{
786 #[inline]
787 fn clone(&self) -> Self {
788 unsafe {
789 if self.cap == 0 || self.cap == usize::MAX {
792 return RawZeroSpVec {
793 val_ptr: NonNull::dangling(),
794 ind_ptr: NonNull::dangling(),
795 cap: self.cap,
796 _marker: PhantomData,
797 };
798 }
799
800 let val_elem_size = mem::size_of::<N>();
801 let ind_elem_size = mem::size_of::<u32>();
802
803 let t_align = mem::align_of::<N>();
804 let usize_align = mem::align_of::<u32>();
805
806 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
807 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
808 let new_val_ptr = alloc(new_val_layout) as *mut N;
809 let new_ind_ptr = alloc(new_ind_layout) as *mut u32;
810 if new_val_ptr.is_null() || new_ind_ptr.is_null() {
811 oom();
812 }
813 ptr::copy_nonoverlapping(self.val_ptr.as_ptr(), new_val_ptr, self.cap);
814 ptr::copy_nonoverlapping(self.ind_ptr.as_ptr(), new_ind_ptr, self.cap);
815
816 RawZeroSpVec {
817 val_ptr: NonNull::new_unchecked(new_val_ptr),
818 ind_ptr: NonNull::new_unchecked(new_ind_ptr),
819 cap: self.cap,
820 _marker: PhantomData,
821 }
822 }
823 }
824}
825
826unsafe impl<N: Num + Send> Send for RawZeroSpVec<N> {}
827unsafe impl<N: Num + Sync> Sync for RawZeroSpVec<N> {}
828
829impl<N> Drop for RawZeroSpVec<N>
830where N: Num
831{
832 #[inline]
833 fn drop(&mut self) {
834 unsafe {
835 if self.cap == 0 || self.cap == usize::MAX {
837 return;
838 }
839
840 let val_elem_size = mem::size_of::<N>();
841 let ind_elem_size = mem::size_of::<u32>();
842
843 let t_align = mem::align_of::<N>();
844 let usize_align = mem::align_of::<u32>();
845
846 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
847 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
848 dealloc(self.val_ptr.as_ptr() as *mut u8, new_val_layout);
849 dealloc(self.ind_ptr.as_ptr() as *mut u8, new_ind_layout);
850 }
851 }
852}
853
854#[cold]
861fn oom() {
862 ::std::process::exit(-9999);
863}