1pub mod serde;
3
4use std::{alloc::{alloc, dealloc, realloc, Layout}, fmt, marker::PhantomData, mem, ptr::{self, NonNull}};
5use std::ops::Index;
6use std::fmt::Debug;
7
8use num::Num;
9pub struct ZeroSpVec<N>
16where N: Num
17{
18 buf: RawZeroSpVec<N>,
19 len: usize,
20 nnz: usize,
21 zero: N,
22}
23
24pub trait ZeroSpVecTrait<N>: Clone + Default + Index<usize, Output = N>
25where N: Num
26{
27 unsafe fn ind_ptr(&self) -> *mut u32;
28 unsafe fn val_ptr(&self) -> *mut N;
29 unsafe fn raw_push(&mut self, index: usize, value: N);
30 fn ind_binary_search(&self, index: usize, cut_down: usize) -> Result<usize, usize>;
31 fn new() -> Self;
32 fn with_capacity(cap: usize) -> Self;
33 fn reserve(&mut self, additional: usize);
34 fn shrink_to_fit(&mut self);
35 fn is_empty(&self) -> bool;
36 fn len(&self) -> usize;
37 fn len_mut(&mut self) -> &mut usize;
38 fn capacity(&self) -> usize;
39 fn nnz(&self) -> usize;
40 fn add_dim(&mut self, dim: usize);
41 fn clear(&mut self);
42 fn push(&mut self, elem: N);
43 fn pop(&mut self) -> Option<N>;
44 fn get(&self, index: usize) -> Option<&N>;
45 fn raw_get(&self, index: usize) -> Option<ValueWithIndex<N>>;
46 fn get_with_cut_down(&self, index: usize, cut_down: usize) -> Option<&N>;
47 fn raw_get_with_cut_down(&self, index: usize, cut_down: usize) -> Option<ValueWithIndex<N>>;
48 fn get_ind(&self, index: usize) -> Option<usize>;
49 fn remove(&mut self, index: usize) -> N;
50 fn from_vec(vec: Vec<N>) -> Self;
51 unsafe fn from_raw_iter(iter: impl Iterator<Item = (usize, N)>, len: usize) -> Self;
52 unsafe fn from_sparse_iter(iter: impl Iterator<Item = (usize, N)>, len: usize) -> Self;
53 fn iter(&self) -> ZeroSpVecIter<N>;
54 fn raw_iter(&self) -> ZeroSpVecRawIter<N>;
55}
56
57pub struct ValueWithIndex<'a, N>
58where N: Num
59{
60 pub index: usize,
61 pub value: &'a N,
62}
63
64impl<N> ZeroSpVecTrait<N> for ZeroSpVec<N>
65where N: Num
66{
67 #[inline]
68 unsafe fn ind_ptr(&self) -> *mut u32 {
69 self.buf.ind_ptr.as_ptr()
70 }
71
72 #[inline]
73 unsafe fn val_ptr(&self) -> *mut N {
74 self.buf.val_ptr.as_ptr()
75 }
76
77 #[inline]
84 unsafe fn raw_push(&mut self, index: usize, value: N) {
85 if self.nnz == self.buf.cap {
86 self.buf.grow();
87 }
88 unsafe {
89 let val_ptr = self.val_ptr().add(self.nnz);
90 let ind_ptr = self.ind_ptr().add(self.nnz);
91 ptr::write(val_ptr, value);
92 debug_assert!(index <= u32::MAX as usize, "index overflow for u32 storage");
93 ptr::write(ind_ptr, index as u32);
94 }
95 self.nnz += 1;
96 }
97
98 #[inline]
99 fn ind_binary_search(&self, index: usize, cut_down: usize) -> Result<usize, usize> {
100 if self.nnz == 0 {
102 return Err(0);
103 }
104
105 let mut left = cut_down;
106 let mut right = self.nnz - 1;
107 while left < right {
108 let mid = left + (right - left) / 2;
109 let mid_index = unsafe { ptr::read(self.ind_ptr().add(mid)) as usize };
110 if mid_index == index {
111 return Ok(mid);
112 } else if mid_index < index {
113 left = mid + 1;
114 } else {
115 right = mid;
116 }
117 }
118
119 let final_index = unsafe { ptr::read(self.ind_ptr().add(left)) as usize };
121 if final_index == index {
122 Ok(left)
123 } else if final_index < index {
124 Err(left + 1)
125 } else {
126 Err(left)
127 }
128 }
129
130 #[inline]
131 fn new() -> Self {
132 ZeroSpVec {
133 buf: RawZeroSpVec::new(),
134 len: 0,
135 nnz: 0,
136 zero: N::zero(),
137 }
138 }
139
140 #[inline]
141 fn with_capacity(cap: usize) -> Self {
142 let mut buf = RawZeroSpVec::new();
143 buf.cap = cap;
144 buf.cap_set();
145 ZeroSpVec {
146 buf: buf,
147 len: 0,
148 nnz: 0,
149 zero: N::zero(),
150 }
151 }
152
153 #[inline]
154 fn reserve(&mut self, additional: usize) {
155 let new_cap = self.nnz + additional;
156 if new_cap > self.buf.cap {
157 self.buf.cap = new_cap;
158 self.buf.re_cap_set();
159 }
160 }
161
162 #[inline]
163 fn shrink_to_fit(&mut self) {
164 if self.len < self.buf.cap {
165 let new_cap = self.nnz;
166 self.buf.cap = new_cap;
167 self.buf.re_cap_set();
168 }
169 }
170
171 #[inline]
172 fn is_empty(&self) -> bool {
173 self.len == 0
174 }
175
176 #[inline]
177 fn len(&self) -> usize {
178 self.len
179 }
180
181 #[inline]
182 fn len_mut(&mut self) -> &mut usize {
183 &mut self.len
184 }
185
186 #[inline]
187 fn capacity(&self) -> usize {
188 self.buf.cap
189 }
190
191 #[inline]
192 fn nnz(&self) -> usize {
193 self.nnz
194 }
195
196 #[inline]
197 fn add_dim(&mut self, dim: usize) {
198 self.len += dim;
199 }
200
201 #[inline]
202 fn clear(&mut self) {
203 while let Some(_) = self.pop() {
204 }
206 }
207
208 #[inline]
209 fn push(&mut self, elem: N) {
210 if self.nnz == self.buf.cap {
211 self.buf.grow();
212 }
213 if elem != N::zero() {
214 unsafe {
215 let val_ptr = self.val_ptr().add(self.nnz);
216 let ind_ptr = self.ind_ptr().add(self.nnz);
217 ptr::write(val_ptr, elem);
218 debug_assert!(self.len <= u32::MAX as usize, "index overflow for u32 storage");
219 ptr::write(ind_ptr, self.len as u32);
220 }
221 self.nnz += 1;
222 }
223 self.len += 1;
224 }
225
226 #[inline]
227 fn pop(&mut self) -> Option<N> {
228 if self.nnz == 0 {
229 return None;
230 }
231 let pop_element = if self.nnz == self.len {
232 self.nnz -= 1;
233 unsafe {
234 Some(ptr::read(self.val_ptr().add(self.nnz)))
235 }
236 } else {
237 Some(N::zero())
238 };
239 self.len -= 1;
240 pop_element
241 }
242
243 #[inline]
244 fn get(&self, index: usize) -> Option<&N> {
245 if index >= self.len {
246 return None;
247 }
248 match self.ind_binary_search(index, 0) {
249 Ok(idx) => {
250 unsafe {
251 Some(&*self.val_ptr().add(idx))
252 }
253 },
254 Err(_) => {
255 Some(&self.zero)
256 }
257 }
258 }
259
260 #[inline]
261 fn raw_get(&self, index: usize) -> Option<ValueWithIndex<N>> {
262 if index >= self.len {
263 return None;
264 }
265 match self.ind_binary_search(index, 0) {
266 Ok(idx) => {
267 unsafe {
268 Some(ValueWithIndex {
269 index,
270 value: &*self.val_ptr().add(idx),
271 })
272 }
273 },
274 Err(_) => {
275 Some(ValueWithIndex {
276 index,
277 value: &self.zero,
278 })
279 }
280 }
281 }
282
283 #[inline]
284 fn get_with_cut_down(&self, index: usize, cut_down: usize) -> Option<&N> {
285 if index >= self.len {
286 return None;
287 }
288 match self.ind_binary_search(index, cut_down) {
289 Ok(idx) => {
290 unsafe {
291 Some(&*self.val_ptr().add(idx))
292 }
293 },
294 Err(_) => {
295 Some(&self.zero)
296 }
297 }
298 }
299
300 #[inline]
301 fn raw_get_with_cut_down(&self, index: usize, cut_down: usize) -> Option<ValueWithIndex<N>> {
302 if index >= self.len {
303 return None;
304 }
305 match self.ind_binary_search(index, cut_down) {
306 Ok(idx) => {
307 unsafe {
308 Some(ValueWithIndex {
309 index,
310 value: &*self.val_ptr().add(idx),
311 })
312 }
313 },
314 Err(_) => {
315 Some(ValueWithIndex {
316 index,
317 value: &self.zero,
318 })
319 }
320 }
321 }
322
323 #[inline]
324 fn get_ind(&self, index: usize) -> Option<usize> {
325 if index >= self.nnz {
326 return None;
327 }
328 unsafe {
329 Some(ptr::read(self.ind_ptr().add(index)) as usize)
330 }
331 }
332
333
334
335 #[inline]
347 fn remove(&mut self, index: usize) -> N {
348 debug_assert!(index < self.len, "index out of bounds");
349
350 self.len -= 1;
352
353 match self.ind_binary_search(index, 0) {
354 Ok(i) => {
355 let removed_val = unsafe {
357 ptr::read(self.val_ptr().add(i))
358 };
359
360 let count = self.nnz - i - 1;
362 if count > 0 {
363 unsafe {
364 ptr::copy(
366 self.val_ptr().add(i + 1),
367 self.val_ptr().add(i),
368 count
369 );
370 ptr::copy(
372 self.ind_ptr().add(i + 1),
373 self.ind_ptr().add(i),
374 count
375 );
376 for offset in i..(self.nnz - 1) {
378 *self.ind_ptr().add(offset) -= 1;
379 }
380 }
381 }
382 self.nnz -= 1;
384
385 removed_val
387 }
388 Err(i) => {
389 if i < self.nnz {
393 unsafe {
394 for offset in i..self.nnz {
395 *self.ind_ptr().add(offset) -= 1;
396 }
397 }
398 }
399
400 N::zero()
402 }
403 }
404 }
405
406 #[inline]
407 fn from_vec(vec: Vec<N>) -> Self {
408 let mut zero_sp_vec = ZeroSpVec::with_capacity(vec.len());
409 for entry in vec {
410 zero_sp_vec.push(entry);
411 }
412 zero_sp_vec
413 }
414
415 #[inline]
417 unsafe fn from_raw_iter(iter: impl Iterator<Item = (usize, N)>, len: usize) -> Self {
418 let mut zero_sp_vec = ZeroSpVec::with_capacity(iter.size_hint().0);
419 for (index, value) in iter {
420 unsafe {
421 zero_sp_vec.raw_push(index, value);
422 }
423 }
424 zero_sp_vec.len = len;
425 zero_sp_vec
426 }
427
428 #[inline]
432 unsafe fn from_sparse_iter(iter: impl Iterator<Item = (usize, N)>, len: usize) -> Self {
433 let mut zero_sp_vec = ZeroSpVec::new();
434 for (index, value) in iter {
435 if value != N::zero() {
436 unsafe {
437 zero_sp_vec.raw_push(index, value);
438 }
439 }
440 }
441 zero_sp_vec.len = len;
442 zero_sp_vec
443 }
444
445 #[inline]
446 fn iter(&self) -> ZeroSpVecIter<N> {
447 ZeroSpVecIter {
448 vec: self,
449 pos: 0,
450 }
451 }
452
453 #[inline]
454 fn raw_iter(&self) -> ZeroSpVecRawIter<N> {
455 ZeroSpVecRawIter {
456 vec: self,
457 pos: 0,
458 }
459 }
460}
461
462unsafe impl <N: Num + Send> Send for ZeroSpVec<N> {}
463unsafe impl <N: Num + Sync> Sync for ZeroSpVec<N> {}
464
465impl<N> Clone for ZeroSpVec<N>
466where N: Num
467{
468 #[inline]
469 fn clone(&self) -> Self {
470 ZeroSpVec {
471 buf: self.buf.clone(),
472 len: self.len,
473 nnz: self.nnz,
474 zero: N::zero(),
475 }
476 }
477}
478
479impl<N> Drop for ZeroSpVec<N>
480where N: Num
481{
482 #[inline]
483 fn drop(&mut self) {
484 }
486}
487
488impl<N> Default for ZeroSpVec<N>
489where N: Num
490{
491 #[inline]
492 fn default() -> Self {
493 ZeroSpVec::new()
494 }
495}
496
497impl<N> Index<usize> for ZeroSpVec<N>
498where N: Num
499{
500 type Output = N;
501
502 #[inline]
503 fn index(&self, index: usize) -> &Self::Output {
504 self.get(index).expect("index out of bounds")
505 }
506}
507
508impl<N: Num + Debug> Debug for ZeroSpVec<N> {
509 #[inline]
510 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
511 if f.sign_plus() {
512 f.debug_struct("DefaultSparseVec")
513 .field("buf", &self.buf)
514 .field("nnz", &self.nnz)
515 .field("len", &self.len)
516 .field("zero", &self.zero)
517 .finish()
518 } else if f.alternate() {
519 write!(f, "ZeroSpVec({:?})", self.iter().collect::<Vec<&N>>())
520 } else {
521 f.debug_list().entries((0..self.len).map(|i| self.get(i).unwrap())).finish()
522 }
523 }
524}
525
526pub struct ZeroSpVecIter<'a, N>
527where N: Num
528{
529 vec: &'a ZeroSpVec<N>,
530 pos: usize,
531}
532
533impl<'a, N> Iterator for ZeroSpVecIter<'a, N>
534where N: Num
535{
536 type Item = &'a N;
537
538 #[inline]
539 fn next(&mut self) -> Option<Self::Item> {
540 self.vec.get(self.pos).map(|val| {
541 self.pos += 1;
542 val
543 })
544 }
545}
546
547pub struct ZeroSpVecRawIter<'a, N>
548where N: Num
549{
550 vec: &'a ZeroSpVec<N>,
551 pos: usize,
552}
553
554impl<'a, N> Iterator for ZeroSpVecRawIter<'a, N>
555where N: Num
556{
557 type Item = (usize, &'a N);
558
559 #[inline]
560 fn next(&mut self) -> Option<Self::Item> {
561 if self.pos < self.vec.nnz() {
562 let index = unsafe { *self.vec.ind_ptr().add(self.pos) as usize };
563 let value = unsafe { &*self.vec.val_ptr().add(self.pos) };
564 self.pos += 1;
565 Some((index, value))
566 } else {
567 None
568 }
569 }
570
571 #[inline]
572 fn size_hint(&self) -> (usize, Option<usize>) {
573 (self.vec.nnz().saturating_sub(self.pos), Some(self.vec.len()))
574 }
575}
576
577impl<T> From<Vec<T>> for ZeroSpVec<T>
578where T: Num
579{
580 #[inline]
581 fn from(vec: Vec<T>) -> Self {
582 ZeroSpVec::from_vec(vec)
583 }
584}
585
586impl<'a, N> From<ZeroSpVecRawIter<'a, N>> for ZeroSpVec<N>
587where
588 N: Num + Copy,
589{
590 #[inline]
591 fn from(iter: ZeroSpVecRawIter<'a, N>) -> Self {
592 let mut vec = ZeroSpVec::new();
593 for (idx, val) in iter {
594 unsafe {
595 vec.raw_push(idx, *val);
596 }
597 vec.len += 1;
598 }
599 vec
600 }
601}
602
603
604
605
606
607
608
609
610
611#[derive(Debug)]
613struct RawZeroSpVec<N>
614where N: Num
615{
616 val_ptr: NonNull<N>,
617 ind_ptr: NonNull<u32>,
618 cap: usize,
623 _marker: PhantomData<N>, }
625
626impl<N> RawZeroSpVec<N>
627where N: Num
628{
629 #[inline]
630 fn new() -> Self {
631 let cap = if mem::size_of::<N>() == 0 { std::usize::MAX } else { 0 };
633
634 RawZeroSpVec {
635 val_ptr: NonNull::dangling(),
637 ind_ptr: NonNull::dangling(),
639 cap: cap,
640 _marker: PhantomData,
641 }
642 }
643
644 #[inline]
645 fn grow(&mut self) {
646 unsafe {
647 let val_elem_size = mem::size_of::<N>();
648 let ind_elem_size = mem::size_of::<u32>();
649
650 debug_assert!(val_elem_size != 0, "capacity overflow");
653
654 let t_align = mem::align_of::<N>();
656 let usize_align = mem::align_of::<u32>();
657
658 let (new_cap, val_ptr, ind_ptr): (usize, *mut N, *mut u32) =
660 if self.cap == 0 {
661 let new_val_layout = Layout::from_size_align(val_elem_size, t_align).expect("Failed to create memory layout");
662 let new_ind_layout = Layout::from_size_align(ind_elem_size, usize_align).expect("Failed to create memory layout");
663 (
664 1,
665 alloc(new_val_layout) as *mut N,
666 alloc(new_ind_layout) as *mut u32,
667 )
668 } else {
669 let new_cap = self.cap * 2;
671 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout for reallocation");
672 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout for reallocation");
673 (
674 new_cap,
675 realloc(self.val_ptr.as_ptr() as *mut u8, new_val_layout, val_elem_size * new_cap) as *mut N,
676 realloc(self.ind_ptr.as_ptr() as *mut u8, new_ind_layout, ind_elem_size * new_cap) as *mut u32,
677 )
678 };
679
680 if val_ptr.is_null() || ind_ptr.is_null() {
682 oom();
683 }
684
685 self.val_ptr = NonNull::new_unchecked(val_ptr);
687 self.ind_ptr = NonNull::new_unchecked(ind_ptr);
688 self.cap = new_cap;
689 }
690 }
691
692 #[inline]
693 fn cap_set(&mut self) {
694 unsafe {
695 let val_elem_size = mem::size_of::<N>();
696 let ind_elem_size = mem::size_of::<u32>();
697
698 let t_align = mem::align_of::<N>();
699 let usize_align = mem::align_of::<u32>();
700
701 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
702 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
703 let new_val_ptr = alloc(new_val_layout) as *mut N;
704 let new_ind_ptr = alloc(new_ind_layout) as *mut u32;
705 if new_val_ptr.is_null() || new_ind_ptr.is_null() {
706 oom();
707 }
708 self.val_ptr = NonNull::new_unchecked(new_val_ptr);
709 self.ind_ptr = NonNull::new_unchecked(new_ind_ptr);
710 }
711 }
712
713 #[inline]
714 fn re_cap_set(&mut self) {
715 unsafe {
716 let val_elem_size = mem::size_of::<N>();
717 let ind_elem_size = mem::size_of::<u32>();
718
719 let t_align = mem::align_of::<N>();
720 let usize_align = mem::align_of::<u32>();
721
722 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
723 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
724 let new_val_ptr = realloc(self.val_ptr.as_ptr() as *mut u8, new_val_layout, val_elem_size * self.cap) as *mut N;
725 let new_ind_ptr = realloc(self.ind_ptr.as_ptr() as *mut u8, new_ind_layout, ind_elem_size * self.cap) as *mut u32;
726 if new_val_ptr.is_null() || new_ind_ptr.is_null() {
727 oom();
728 }
729 self.val_ptr = NonNull::new_unchecked(new_val_ptr);
730 self.ind_ptr = NonNull::new_unchecked(new_ind_ptr);
731 }
732 }
733}
734
735impl<N> Clone for RawZeroSpVec<N>
736where N: Num
737{
738 #[inline]
739 fn clone(&self) -> Self {
740 unsafe {
741 if self.cap == 0 || self.cap == usize::MAX {
744 return RawZeroSpVec {
745 val_ptr: NonNull::dangling(),
746 ind_ptr: NonNull::dangling(),
747 cap: self.cap,
748 _marker: PhantomData,
749 };
750 }
751
752 let val_elem_size = mem::size_of::<N>();
753 let ind_elem_size = mem::size_of::<u32>();
754
755 let t_align = mem::align_of::<N>();
756 let usize_align = mem::align_of::<u32>();
757
758 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
759 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
760 let new_val_ptr = alloc(new_val_layout) as *mut N;
761 let new_ind_ptr = alloc(new_ind_layout) as *mut u32;
762 if new_val_ptr.is_null() || new_ind_ptr.is_null() {
763 oom();
764 }
765 ptr::copy_nonoverlapping(self.val_ptr.as_ptr(), new_val_ptr, self.cap);
766 ptr::copy_nonoverlapping(self.ind_ptr.as_ptr(), new_ind_ptr, self.cap);
767
768 RawZeroSpVec {
769 val_ptr: NonNull::new_unchecked(new_val_ptr),
770 ind_ptr: NonNull::new_unchecked(new_ind_ptr),
771 cap: self.cap,
772 _marker: PhantomData,
773 }
774 }
775 }
776}
777
778unsafe impl<N: Num + Send> Send for RawZeroSpVec<N> {}
779unsafe impl<N: Num + Sync> Sync for RawZeroSpVec<N> {}
780
781impl<N> Drop for RawZeroSpVec<N>
782where N: Num
783{
784 #[inline]
785 fn drop(&mut self) {
786 unsafe {
787 if self.cap == 0 || self.cap == usize::MAX {
789 return;
790 }
791
792 let val_elem_size = mem::size_of::<N>();
793 let ind_elem_size = mem::size_of::<u32>();
794
795 let t_align = mem::align_of::<N>();
796 let usize_align = mem::align_of::<u32>();
797
798 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
799 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
800 dealloc(self.val_ptr.as_ptr() as *mut u8, new_val_layout);
801 dealloc(self.ind_ptr.as_ptr() as *mut u8, new_ind_layout);
802 }
803 }
804}
805
806#[cold]
813fn oom() {
814 ::std::process::exit(-9999);
815}