1use std::{alloc::{alloc, dealloc, realloc, Layout}, collections::HashMap, fmt::{self, Debug}, marker::PhantomData, mem, ops::{Index, IndexMut}, ptr::{self, NonNull}};
2
3use num::Num;
4
5use super::{normal_vec_trait::NormalVecMethods, vec_trait::Math};
6
7pub struct DefaultSparseVec<T: Default + PartialEq + Clone> {
14 buf: RawDefaultSparseVec<T>,
15 raw_len: usize,
16 len: usize,
17 default: T,
18}
19
20impl<T: Default + PartialEq + Clone> DefaultSparseVec<T> {
21 #[inline(always)]
22 fn val_ptr(&self) -> *mut T { self.buf.val_ptr.as_ptr() }
23
24 #[inline(always)]
25 fn ind_ptr(&self) -> *mut usize { self.buf.ind_ptr.as_ptr() }
26
27 #[inline(always)]
28 fn cap(&self) -> usize { self.buf.cap }
29
30 #[inline(always)]
34 fn ind_binary_search(&self, index: &usize) -> Result<usize, usize> {
35 if self.raw_len == 0 {
37 return Err(0);
38 }
39
40 let mut left = 0;
41 let mut right = self.raw_len - 1;
42 while left < right {
43 let mid = left + (right - left) / 2;
44 let mid_index = unsafe { ptr::read(self.ind_ptr().add(mid)) };
45 if mid_index == *index {
46 return Ok(mid);
47 } else if mid_index < *index {
48 left = mid + 1;
49 } else {
50 right = mid;
51 }
52 }
53
54 let final_index = unsafe { ptr::read(self.ind_ptr().add(left)) };
56 if final_index == *index {
57 Ok(left)
58 } else if final_index < *index {
59 Err(left + 1)
60 } else {
61 Err(left)
62 }
63 }
64
65 #[inline(always)]
67 pub fn new() -> Self {
68 DefaultSparseVec {
69 buf: RawDefaultSparseVec::new(),
70 raw_len: 0,
71 len: 0,
72 default: Default::default(),
73 }
74 }
75
76 #[inline(always)]
77 pub fn with_capacity(cap: usize) -> Self {
78 let mut vec = DefaultSparseVec {
79 buf: RawDefaultSparseVec::new(),
80 raw_len: 0,
81 len: 0,
82 default: Default::default(),
83 };
84 vec.buf.cap = cap;
85 vec.buf.cap_set();
86 vec
87 }
88
89 #[inline(always)]
91 pub fn is_empty(&self) -> bool {
92 self.len == 0
93 }
94
95 #[inline(always)]
98 pub fn capacity(&self) -> usize {
99 self.cap()
100 }
101
102 #[inline(always)]
107 pub fn reserve(&mut self, additional: usize) {
108 let new_cap = self.raw_len + additional;
109 if new_cap > self.cap() {
110 self.buf.cap = new_cap;
111 self.buf.re_cap_set();
112 }
113 }
114
115 #[inline(always)]
119 pub fn shrink_to_fit(&mut self) {
120 if self.raw_len < self.cap() {
121 self.buf.cap = self.raw_len;
122 self.buf.re_cap_set();
123 }
124 }
125
126 #[inline(always)]
129 pub fn nnz(&self) -> usize {
130 self.raw_len
131 }
132
133 #[inline(always)]
135 pub fn len(&self) -> usize {
136 self.len
137 }
138
139 #[inline(always)]
141 pub fn clear(&mut self) {
142 while let Some(_) = self.pop() {}
143 }
144
145 #[inline(always)]
147 pub fn push(&mut self, elem: T) {
148 if self.raw_len == self.cap() {
149 self.buf.grow();
150 }
151 if self.default != elem {
152 unsafe {
153 ptr::write(self.val_ptr().offset(self.raw_len as isize), elem);
154 ptr::write(self.ind_ptr().offset(self.raw_len as isize), self.len);
155 }
156 self.raw_len += 1;
157 }
158 self.len += 1;
159 }
160
161 #[inline(always)]
163 pub fn pop(&mut self) -> Option<T> {
164 if self.len == 0 {
165 return None;
166 }
167 let pop_elem =
169 if self.raw_len == self.len {
170 self.raw_len -= 1;
171 unsafe {
172 Some(ptr::read(self.val_ptr().offset(self.raw_len as isize)))
173 }
174 } else {
175 Some(self.default.clone())
176 };
177 self.len -= 1;
178 pop_elem
179 }
180
181 #[inline(always)]
183 pub fn get(&self, index: usize) -> Option<&T> {
184 if index >= self.len {
185 return None;
186 }
187 match self.ind_binary_search(&index) {
188 Ok(i) => {
189 let val = unsafe { &*self.val_ptr().offset(i as isize) };
190 Some(val)
191 }
192 Err(_) => Some(&self.default),
193 }
194 }
195
196 #[deprecated(note = "このメソッドは避けるべきです.
201 スパース分部の実値を渡すため、スパース分部の値を無駄に生成します.
202 default値以外を代入する場合は問題ありません.")]
203 #[inline(always)]
204 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
205 if index >= self.len {
206 return None;
207 }
208 match self.ind_binary_search(&index) {
209 Ok(i) => {
210 let val = unsafe { &mut *self.val_ptr().offset(i as isize) };
211 Some(val)
212 }
213 Err(i) => {
214 if self.raw_len == self.cap() {
215 self.buf.grow();
216 }
217 unsafe {
218 let src = i as isize;
219 let dst = src + 1;
220 let count = self.raw_len - i;
221 ptr::copy(
222 self.val_ptr().offset(src),
223 self.val_ptr().offset(dst),
224 count,
225 );
226 ptr::copy(
227 self.ind_ptr().offset(src),
228 self.ind_ptr().offset(dst),
229 count,
230 );
231 ptr::write(self.val_ptr().offset(i as isize), self.default.clone());
232 ptr::write(self.ind_ptr().offset(i as isize), index);
233 }
234 self.raw_len += 1;
235 let val = unsafe { &mut *self.val_ptr().offset(i as isize) };
236 Some(val)
237 },
238 }
239 }
240
241 #[inline(always)]
248 pub fn insert(&mut self, index: usize, elem: T) {
249 assert!(index <= self.len, "index out of bounds");
250
251 self.len += 1;
253
254 if self.raw_len == self.cap() {
256 self.buf.grow();
257 }
258
259 let i = match self.ind_binary_search(&index) {
262 Ok(pos) => pos,
263 Err(pos) => pos,
264 };
265
266 unsafe {
267 let src = i as isize;
269 let dst = src + 1;
270 let count = self.raw_len - i;
271
272 ptr::copy(
274 self.val_ptr().offset(src),
275 self.val_ptr().offset(dst),
276 count,
277 );
278 ptr::copy(
280 self.ind_ptr().offset(src),
281 self.ind_ptr().offset(dst),
282 count,
283 );
284
285 for offset in (i + 1)..(self.raw_len + 1) {
287 *self.ind_ptr().offset(offset as isize) += 1;
288 }
289 }
290
291 if elem != self.default {
293 unsafe {
294 ptr::write(self.val_ptr().offset(i as isize), elem);
296 ptr::write(self.ind_ptr().offset(i as isize), index);
297 }
298 self.raw_len += 1;
300 }
301 }
302
303 #[inline(always)]
312 pub fn remove(&mut self, index: usize) -> T {
313 assert!(index < self.len, "index out of bounds");
314
315 self.len -= 1;
317
318 match self.ind_binary_search(&index) {
319 Ok(i) => {
320 let removed_val = unsafe {
322 ptr::read(self.val_ptr().offset(i as isize))
323 };
324
325 let count = self.raw_len - i - 1;
327 if count > 0 {
328 unsafe {
329 ptr::copy(
331 self.val_ptr().offset(i as isize + 1),
332 self.val_ptr().offset(i as isize),
333 count
334 );
335 ptr::copy(
337 self.ind_ptr().offset(i as isize + 1),
338 self.ind_ptr().offset(i as isize),
339 count
340 );
341 for offset in i..(self.raw_len - 1) {
343 *self.ind_ptr().offset(offset as isize) -= 1;
344 }
345 }
346 }
347 self.raw_len -= 1;
349
350 removed_val
352 }
353 Err(i) => {
354 if i < self.raw_len {
358 unsafe {
359 for offset in i..self.raw_len {
360 *self.ind_ptr().offset(offset as isize) -= 1;
361 }
362 }
363 }
364
365 self.default.clone()
367 }
368 }
369 }
370
371 #[inline(always)]
375 pub fn append(&mut self, other: Self) {
376 let other_len = other.len();
377 let other_raw_len = other.nnz();
378 let other_default = other.default.clone();
379
380 if other_len == 0 {
382 return;
383 }
384
385 assert!(
388 self.default == other_default,
389 "default value mismatch"
390 );
391
392 let offset = self.len;
394
395 self.len += other_len;
397
398 if self.raw_len + other_raw_len > self.cap() {
401 self.reserve(self.raw_len + other_raw_len - self.cap());
403 }
404
405 if other_raw_len > 0 {
407 unsafe {
408 ptr::copy(
412 other.ind_ptr(),
413 self.ind_ptr().add(self.raw_len),
414 other_raw_len,
415 );
416 ptr::copy(
417 other.val_ptr(),
418 self.val_ptr().add(self.raw_len),
419 other_raw_len,
420 );
421
422 for i in 0..other_raw_len {
424 *self.ind_ptr().add(self.raw_len + i) += offset;
425 }
426 }
427
428 self.raw_len += other_raw_len;
430 }
431 }
432
433 pub fn extend<I>(&mut self, iter: I)
435 where
436 I: IntoIterator<Item = T>,
437 {
438 for elem in iter {
439 self.push(elem);
440 }
441 }
442
443 #[inline(always)]
447 pub fn iter(&self) -> impl Iterator<Item = (&usize, &T)> {
448 (0..self.raw_len).map(move |i| {
449 let val: &T = unsafe { &*self.val_ptr().offset(i as isize) };
450 let ind: &usize = unsafe { &*self.ind_ptr().offset(i as isize) };
451 (ind, val)
452 })
453 }
454
455 #[inline(always)]
459 pub fn iter_mut(&mut self) -> impl Iterator<Item = (&mut usize, &mut T)> {
460 (0..self.raw_len).map(move |i| {
461 let val: &mut T = unsafe { &mut *self.val_ptr().offset(i as isize) };
462 let ind: &mut usize = unsafe { &mut *self.ind_ptr().offset(i as isize) };
463 (ind, val)
464 })
465 }
466
467 #[inline(always)]
469 pub fn as_slice_val(&self) -> &[T] {
470 unsafe {
471 std::slice::from_raw_parts(self.val_ptr(), self.raw_len)
472 }
473 }
474
475 #[inline(always)]
476 pub fn as_slice_ind(&self) -> &[usize] {
477 unsafe {
478 std::slice::from_raw_parts(self.ind_ptr(), self.raw_len)
479 }
480 }
481
482 #[inline(always)]
483 pub fn as_mut_slice_val(&mut self) -> &mut [T] {
484 unsafe {
485 std::slice::from_raw_parts_mut(self.val_ptr(), self.raw_len)
486 }
487 }
488
489 #[inline(always)]
490 pub fn as_mut_slice_ind(&mut self) -> &mut [usize] {
491 unsafe {
492 std::slice::from_raw_parts_mut(self.ind_ptr(), self.raw_len)
493 }
494 }
495}
496
497unsafe impl<T: Send + Default + PartialEq + Clone> Send for DefaultSparseVec<T> {}
498unsafe impl<T: Send + Default + PartialEq + Clone> Sync for DefaultSparseVec<T> {}
499
500impl<T: Default + PartialEq + Clone> Clone for DefaultSparseVec<T> {
501 fn clone(&self) -> Self {
502 let new_buf = self.buf.deep_clone(self.raw_len);
504
505 DefaultSparseVec {
506 buf: new_buf,
507 raw_len: self.raw_len,
508 len: self.len,
509 default: self.default.clone(),
510 }
511 }
512}
513
514
515impl<T: Default + PartialEq + Clone> Drop for DefaultSparseVec<T> {
516 #[inline(always)]
517 fn drop(&mut self) {
518 while let Some(_) = self.pop() {}
519 }
520}
521
522impl<T: Default + PartialEq + Clone + Debug> Debug for DefaultSparseVec<T> {
523 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
524 if f.sign_plus() {
525 f.debug_struct("DefaultSparseVec")
526 .field("buf", &self.buf)
527 .field("raw_len", &self.raw_len)
528 .field("len", &self.len)
529 .field("default", &self.default)
530 .finish()
531 } else if f.alternate() {
532 write!(f, "DefaultSparseVec({:?})", self.iter().collect::<Vec<_>>())
533 } else {
534 f.debug_list().entries((0..self.len).map(|i| self.get(i).unwrap())).finish()
535 }
536 }
537}
538
539impl<T: Default + PartialEq + Clone> Index<usize> for DefaultSparseVec<T> {
540 type Output = T;
541
542 #[inline(always)]
543 fn index(&self, index: usize) -> &Self::Output {
544 self.get(index).expect("index out of bounds")
545 }
546}
547
548impl<T: Default + PartialEq + Clone> IndexMut<usize> for DefaultSparseVec<T> {
549 #[inline(always)]
552 #[warn(deprecated)]
553 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
554 self.get_mut(index).expect("index out of bounds")
555 }
556}
557
558impl <T: Default + PartialEq + Clone> Default for DefaultSparseVec<T> {
559 #[inline(always)]
560 fn default() -> Self {
561 Self::new()
562 }
563}
564
565impl<T: Default + PartialEq + Clone> From<Vec<T>> for DefaultSparseVec<T> {
566 #[inline(always)]
567 fn from(vec: Vec<T>) -> Self {
568 let mut svec = DefaultSparseVec::new();
569 vec.into_iter().for_each(|elem| svec.push(elem));
570 svec.shrink_to_fit();
571 svec
572 }
573}
574
575impl<T: Default + PartialEq + Clone> From<HashMap<usize, T>> for DefaultSparseVec<T> {
576 #[inline(always)]
577 fn from(map: HashMap<usize, T>) -> Self {
578 let mut svec = DefaultSparseVec::new();
579 map.into_iter().for_each(|(index, elem)| svec.insert(index, elem));
580 svec.shrink_to_fit();
581 svec
582 }
583}
584
585impl<T: Default + PartialEq + Clone> Into<Vec<T>> for DefaultSparseVec<T> {
586 #[inline(always)]
587 fn into(self) -> Vec<T> {
588 let mut vec = Vec::new();
589 (0..self.len()).for_each(|i| vec.push(self.get(i).unwrap().clone()));
590 vec
591 }
592}
593
594impl<T: Default + PartialEq + Clone> Into<HashMap<usize, T>> for DefaultSparseVec<T> {
595 #[inline(always)]
596 fn into(self) -> HashMap<usize, T> {
597 let mut map = HashMap::new();
598 self.iter().for_each(|(index, elem)| {
599 map.insert(*index, elem.clone());
600 });
601 map
602 }
603}
604
605impl<T: Default + PartialEq + Clone> NormalVecMethods<T> for DefaultSparseVec<T> {
606 #[inline(always)]
607 fn n_push(&mut self, elem: T) {
608 if self.raw_len == self.cap() {
609 self.buf.grow();
610 }
611 if self.default == elem {
612 unsafe {
613 ptr::write(self.val_ptr().offset(self.raw_len as isize), elem);
614 ptr::write(self.ind_ptr().offset(self.raw_len as isize), self.len);
615 }
616 self.raw_len += 1;
617 }
618 self.len += 1;
619 }
620
621 #[inline(always)]
622 fn n_pop(&mut self) -> Option<T> {
623 if self.len == 0 {
624 return None;
625 }
626 let pop_elem =
628 if self.raw_len == self.len {
629 self.raw_len -= 1;
630 unsafe {
631 Some(ptr::read(self.val_ptr().offset(self.raw_len as isize)))
632 }
633 } else {
634 Some(self.default.clone())
635 };
636 self.len -= 1;
637 pop_elem
638 }
639
640 #[inline(always)]
641 fn n_insert(&mut self, index: usize, elem: T) {
642 self.insert(index, elem);
643 }
644}
645
646impl<T> Math<T> for DefaultSparseVec<T>
647 where
648 T: Num + Default + PartialEq + Clone + std::ops::AddAssign + std::ops::Mul<Output = T> + Into<u64>,
649{
650 #[inline(always)]
651 fn u64_dot(&self, other: &Self) -> u64 {
652 let mut sum: u64 = 0;
653 let mut self_iter = self.iter();
654 let mut other_iter = other.iter();
655 let mut self_current = self_iter.next();
656 let mut other_current = other_iter.next();
657
658 while self_current.is_some() && other_current.is_some() {
659 if self_current.unwrap().0 < other_current.unwrap().0 {
660 self_current = self_iter.next();
661 } else if self_current.unwrap().0 > other_current.unwrap().0 {
662 other_current = other_iter.next();
663 } else {
664 sum += (self_current.unwrap().1.clone() * other_current.unwrap().1.clone()).into();
665 self_current = self_iter.next();
666 other_current = other_iter.next();
667 }
668 }
669 sum
670 }
671}
672
673
674#[derive(Debug,)]
681struct RawDefaultSparseVec<T> {
682 val_ptr: NonNull<T>,
683 ind_ptr: NonNull<usize>,
684 cap: usize,
689 _marker: PhantomData<T>, }
691
692impl<T> RawDefaultSparseVec<T> {
693 #[inline(always)]
694 fn new() -> Self {
695 let cap = if mem::size_of::<T>() == 0 { std::usize::MAX } else { 0 };
697
698 RawDefaultSparseVec {
699 val_ptr: NonNull::dangling(),
701 ind_ptr: NonNull::dangling(),
703 cap: cap,
704 _marker: PhantomData,
705 }
706 }
707
708 #[inline(always)]
709 fn grow(&mut self) {
710 unsafe {
711 let val_elem_size = mem::size_of::<T>();
712 let ind_elem_size = mem::size_of::<usize>();
713
714 assert!(val_elem_size != 0, "capacity overflow");
717
718 let t_align = mem::align_of::<T>();
720 let usize_align = mem::align_of::<usize>();
721
722 let (new_cap, val_ptr, ind_ptr): (usize, *mut T, *mut usize) =
724 if self.cap == 0 {
725 let new_val_layout = Layout::from_size_align(val_elem_size, t_align).expect("Failed to create memory layout");
726 let new_ind_layout = Layout::from_size_align(ind_elem_size, usize_align).expect("Failed to create memory layout");
727 (
728 1,
729 alloc(new_val_layout) as *mut T,
730 alloc(new_ind_layout) as *mut usize,
731 )
732 } else {
733 let new_cap = self.cap * 2;
735 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout for reallocation");
736 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout for reallocation");
737 (
738 new_cap,
739 realloc(self.val_ptr.as_ptr() as *mut u8, new_val_layout, val_elem_size * new_cap) as *mut T,
740 realloc(self.ind_ptr.as_ptr() as *mut u8, new_ind_layout, ind_elem_size * new_cap) as *mut usize,
741 )
742 };
743
744 if val_ptr.is_null() || ind_ptr.is_null() {
746 oom();
747 }
748
749 self.val_ptr = NonNull::new_unchecked(val_ptr);
751 self.ind_ptr = NonNull::new_unchecked(ind_ptr);
752 self.cap = new_cap;
753 }
754 }
755
756 #[inline(always)]
757 fn cap_set(&mut self) {
758 unsafe {
759 let val_elem_size = mem::size_of::<T>();
760 let ind_elem_size = mem::size_of::<usize>();
761
762 let t_align = mem::align_of::<T>();
763 let usize_align = mem::align_of::<usize>();
764
765 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
766 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
767 let new_val_ptr = alloc(new_val_layout) as *mut T;
768 let new_ind_ptr = alloc(new_ind_layout) as *mut usize;
769 if new_val_ptr.is_null() || new_ind_ptr.is_null() {
770 oom();
771 }
772 self.val_ptr = NonNull::new_unchecked(new_val_ptr);
773 self.ind_ptr = NonNull::new_unchecked(new_ind_ptr);
774 }
775 }
776
777 #[inline(always)]
778 fn re_cap_set(&mut self) {
779 unsafe {
780 let val_elem_size = mem::size_of::<T>();
781 let ind_elem_size = mem::size_of::<usize>();
782
783 let t_align = mem::align_of::<T>();
784 let usize_align = mem::align_of::<usize>();
785
786 let new_val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
787 let new_ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
788 let new_val_ptr = realloc(self.val_ptr.as_ptr() as *mut u8, new_val_layout, val_elem_size * self.cap) as *mut T;
789 let new_ind_ptr = realloc(self.ind_ptr.as_ptr() as *mut u8, new_ind_layout, ind_elem_size * self.cap) as *mut usize;
790 if new_val_ptr.is_null() || new_ind_ptr.is_null() {
791 oom();
792 }
793 self.val_ptr = NonNull::new_unchecked(new_val_ptr);
794 self.ind_ptr = NonNull::new_unchecked(new_ind_ptr);
795 }
796 }
797
798 fn deep_clone(&self, raw_len: usize) -> Self {
799 unsafe {
800 let val_elem_size = mem::size_of::<T>();
802 let ind_elem_size = mem::size_of::<usize>();
803 let t_align = mem::align_of::<T>();
804 let usize_align = mem::align_of::<usize>();
805 let val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).unwrap();
806 let ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).unwrap();
807
808 let new_val_ptr = alloc(val_layout) as *mut T;
809 let new_ind_ptr = alloc(ind_layout) as *mut usize;
810
811 ptr::copy_nonoverlapping(self.val_ptr.as_ptr(), new_val_ptr, raw_len);
813 ptr::copy_nonoverlapping(self.ind_ptr.as_ptr(), new_ind_ptr, raw_len);
814
815 RawDefaultSparseVec {
816 val_ptr: NonNull::new_unchecked(new_val_ptr),
817 ind_ptr: NonNull::new_unchecked(new_ind_ptr),
818 cap: self.cap,
819 _marker: PhantomData,
820 }
821 }
822 }
823}
824
825unsafe impl<T: Send> Send for RawDefaultSparseVec<T> {}
826unsafe impl<T: Sync> Sync for RawDefaultSparseVec<T> {}
827
828impl<T> Drop for RawDefaultSparseVec<T> {
829 #[inline(always)]
830 fn drop(&mut self) {
831 let val_elem_size = mem::size_of::<T>();
832 let ind_elem_size = mem::size_of::<usize>();
833 if self.cap != 0 && val_elem_size != 0 {
834 let t_align = mem::align_of::<T>();
835 let usize_align = mem::align_of::<usize>();
836 unsafe {
837 let val_layout = Layout::from_size_align(val_elem_size * self.cap, t_align).expect("Failed to create memory layout");
838 let ind_layout = Layout::from_size_align(ind_elem_size * self.cap, usize_align).expect("Failed to create memory layout");
839 dealloc(self.val_ptr.as_ptr() as *mut u8, val_layout);
840 dealloc(self.ind_ptr.as_ptr() as *mut u8, ind_layout);
841 }
842 }
843 }
844}
845
846fn oom() {
853 ::std::process::exit(-9999);
854}