rotated_vec/lib.rs
1//! A dynamic array based on a 2-level rotated array.
2//!
3//! See <a href="https://github.com/senderista/rotated-array-set/blob/master/README.md">the `rotated-array-set` README</a> for a detailed discussion of the performance
4//! benefits and drawbacks of an equivalent data structure.
5
6#![doc(html_root_url = "https://docs.rs/rotated-vec/0.1.0/rotated_vec/")]
7#![doc(html_logo_url = "https://raw.githubusercontent.com/senderista/rotated-array-set/master/img/cells.png")]
8
9use std::mem;
10use std::cmp::{min, Ordering};
11use std::fmt::Debug;
12use std::hash::{Hash, Hasher};
13use std::iter::{DoubleEndedIterator, ExactSizeIterator, FromIterator, FusedIterator};
14use std::ops::{Index, IndexMut};
15
16/// A dynamic array based on a 2-level rotated array.
17///
18/// This is roughly a drop-in replacement for `Vec`, except that there is no
19/// deref to a slice, so underlying slice methods are unavailable. Many of
20/// the most useful slice methods have been ported.
21///
22/// # Examples
23///
24/// ```
25/// use rotated_vec::RotatedVec;
26///
27/// // Type inference lets us omit an explicit type signature (which
28/// // would be `RotatedVec<i32>` in this example).
29/// let mut vec = RotatedVec::new();
30///
31/// // Push some integers onto the vector.
32/// vec.push(-1);
33/// vec.push(6);
34/// vec.push(1729);
35/// vec.push(24);
36///
37/// // Pop an integer from the vector.
38/// vec.pop();
39///
40/// // Insert an integer at a given index.
41/// vec.insert(1, 0);
42///
43/// // Remove an integer at a given index.
44/// vec.remove(1);
45///
46/// // Change an integer at a given index.
47/// vec[1] = 0;
48///
49/// // Iterate over everything.
50/// for int in &vec {
51/// println!("{}", int);
52/// }
53/// ```
54#[derive(Debug, Clone)]
55pub struct RotatedVec<T> {
56 data: Vec<T>,
57 start_indexes: Vec<usize>,
58}
59
60/// An iterator over the items of a `RotatedVec`.
61///
62/// This `struct` is created by the [`iter`] method on [`RotatedVec`][`RotatedVec`].
63/// See its documentation for more.
64///
65/// [`RotatedVec`]: struct.RotatedVec.html
66/// [`iter`]: struct.RotatedVec.html#method.iter
67#[derive(Debug, Copy, Clone)]
68pub struct Iter<'a, T: 'a> {
69 container: &'a RotatedVec<T>,
70 next_index: usize,
71 next_rev_index: usize,
72}
73
74impl<'a, T> Iter<'a, T>
75where
76 T: Copy + Default + Debug,
77{
78 #[inline(always)]
79 fn assert_invariants(&self) -> bool {
80 assert!(self.next_index <= self.container.len());
81 assert!(self.next_rev_index <= self.container.len());
82 if self.next_rev_index < self.next_index {
83 assert!(self.next_index - self.next_rev_index == 1);
84 }
85 true
86 }
87}
88
89/// An iterator over the items of a `RotatedVec`.
90///
91/// This `struct` is created by the [`iter_mut`] method on [`RotatedVec`][`RotatedVec`].
92/// See its documentation for more.
93///
94/// [`RotatedVec`]: struct.RotatedVec.html
95/// [`iter_mut`]: struct.RotatedVec.html#method.iter_mut
96#[derive(Debug)]
97pub struct IterMut<'a, T: 'a> {
98 container: &'a mut RotatedVec<T>,
99 next_index: usize,
100 next_rev_index: usize,
101}
102
103impl<'a, T> IterMut<'a, T>
104where
105 T: Copy + Default + Debug,
106{
107 #[inline(always)]
108 fn assert_invariants(&self) -> bool {
109 assert!(self.next_index <= self.container.len());
110 assert!(self.next_rev_index <= self.container.len());
111 if self.next_rev_index < self.next_index {
112 assert!(self.next_index - self.next_rev_index == 1);
113 }
114 true
115 }
116}
117
118/// An owning iterator over the items of a `RotatedVec`.
119///
120/// This `struct` is created by the [`into_iter`] method on [`RotatedVec`][`RotatedVec`]
121/// (provided by the `IntoIterator` trait). See its documentation for more.
122///
123/// [`RotatedVec`]: struct.RotatedVec.html
124/// [`into_iter`]: struct.RotatedVec.html#method.into_iter
125#[derive(Debug, Clone)]
126pub struct IntoIter<T> {
127 vec: Vec<T>,
128 next_index: usize,
129}
130
131impl<T> RotatedVec<T>
132where
133 T: Copy + Default + Debug,
134{
135 /// Makes a new `RotatedVec` without any heap allocations.
136 ///
137 /// This is a constant-time operation.
138 ///
139 /// # Examples
140 ///
141 /// ```
142 /// #![allow(unused_mut)]
143 /// use rotated_vec::RotatedVec;
144 ///
145 /// let mut vec: RotatedVec<i32> = RotatedVec::new();
146 /// ```
147 pub fn new() -> Self {
148 RotatedVec {
149 data: Vec::new(),
150 start_indexes: Vec::new(),
151 }
152 }
153
154 /// Constructs a new, empty `RotatedVec<T>` with the specified capacity.
155 ///
156 /// The vector will be able to hold exactly `capacity` elements without
157 /// reallocating. If `capacity` is 0, the vector will not allocate.
158 ///
159 /// It is important to note that although the returned vector has the
160 /// *capacity* specified, the vector will have a zero *length*.
161 ///
162 /// # Examples
163 ///
164 /// ```
165 /// use rotated_vec::RotatedVec;
166 ///
167 /// let mut vec = RotatedVec::with_capacity(10);
168 ///
169 /// // The vector contains no items, even though it has capacity for more
170 /// assert_eq!(vec.len(), 0);
171 ///
172 /// // These are all done without reallocating...
173 /// for i in 0..10 {
174 /// vec.push(i);
175 /// }
176 ///
177 /// // ...but this may make the vector reallocate
178 /// vec.push(11);
179 /// ```
180 pub fn with_capacity(capacity: usize) -> RotatedVec<T> {
181 let start_indexes_capacity = if capacity > 0 {
182 Self::get_subarray_idx_from_array_idx(capacity - 1) + 1
183 } else {
184 0
185 };
186 RotatedVec {
187 data: Vec::with_capacity(capacity),
188 start_indexes: Vec::with_capacity(start_indexes_capacity),
189 }
190 }
191
192
193 /// Returns a reference to the value in the array, if any, at the given index.
194 ///
195 /// This is a constant-time operation.
196 ///
197 /// # Examples
198 ///
199 /// ```
200 /// use rotated_vec::RotatedVec;
201 ///
202 /// let vec: RotatedVec<_> = vec![1, 2, 3].into();
203 /// assert_eq!(vec.get(0), Some(&1));
204 /// assert_eq!(vec.get(3), None);
205 /// ```
206 pub fn get(&self, index: usize) -> Option<&T> {
207 if index >= self.data.len() {
208 return None;
209 }
210 let real_idx = self.get_real_index(index);
211 Some(&self.data[real_idx])
212 }
213
214 /// Returns a mutable reference to the value in the array, if any, at the given index.
215 ///
216 /// This is a constant-time operation.
217 ///
218 /// # Examples
219 ///
220 /// ```
221 /// use rotated_vec::RotatedVec;
222 ///
223 /// let mut vec: RotatedVec<_> = vec![1, 2, 3].into();
224 /// assert_eq!(vec.get_mut(0), Some(&mut 1));
225 /// assert_eq!(vec.get_mut(3), None);
226 /// ```
227 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
228 if index >= self.data.len() {
229 return None;
230 }
231 let real_idx = self.get_real_index(index);
232 Some(&mut self.data[real_idx])
233 }
234
235 /// Swaps two elements in the vector.
236 ///
237 /// This is a constant-time operation.
238 ///
239 /// # Arguments
240 ///
241 /// * a - The index of the first element
242 /// * b - The index of the second element
243 ///
244 /// # Panics
245 ///
246 /// Panics if `a` or `b` are out of bounds.
247 ///
248 /// # Examples
249 ///
250 /// ```
251 /// use rotated_vec::RotatedVec;
252 ///
253 /// let mut vec: RotatedVec<_> = vec!["a", "b", "c", "d"].into();
254 /// vec.swap(1, 3);
255 /// assert_eq!(vec, vec!["a", "d", "c", "b"].into());
256 /// ```
257 pub fn swap(&mut self, a: usize, b: usize) {
258 self.data.swap(a, b);
259 }
260
261 /// Returns the number of elements the vector can hold without
262 /// reallocating.
263 ///
264 /// # Examples
265 ///
266 /// ```
267 /// use rotated_vec::RotatedVec;
268 ///
269 /// let vec: RotatedVec<i32> = RotatedVec::with_capacity(10);
270 /// assert_eq!(vec.capacity(), 10);
271 ///
272 pub fn capacity(&self) -> usize {
273 self.data.capacity()
274 }
275
276 /// Reserves the minimum capacity for exactly `additional` more elements to
277 /// be inserted in the given `RotatedVec<T>`. After calling `reserve_exact`,
278 /// capacity will be greater than or equal to `self.len() + additional`.
279 /// Does nothing if the capacity is already sufficient.
280 ///
281 /// Note that the allocator may give the collection more space than it
282 /// requests. Therefore, capacity can not be relied upon to be precisely
283 /// minimal. Prefer `reserve` if future insertions are expected.
284 ///
285 /// # Panics
286 ///
287 /// Panics if the new capacity overflows `usize`.
288 ///
289 /// # Examples
290 ///
291 /// ```
292 /// use rotated_vec::RotatedVec;
293 ///
294 /// let mut vec: RotatedVec<_> = vec![1].into();
295 /// vec.reserve_exact(10);
296 /// assert!(vec.capacity() >= 11);
297 /// ```
298 pub fn reserve_exact(&mut self, additional: usize) {
299 self.data.reserve_exact(additional);
300 }
301
302 /// Reserves capacity for at least `additional` more elements to be inserted
303 /// in the given `RotatedVec<T>`. The collection may reserve more space to avoid
304 /// frequent reallocations. After calling `reserve`, capacity will be
305 /// greater than or equal to `self.len() + additional`. Does nothing if
306 /// capacity is already sufficient.
307 ///
308 /// # Panics
309 ///
310 /// Panics if the new capacity overflows `usize`.
311 ///
312 /// # Examples
313 ///
314 /// ```
315 /// use rotated_vec::RotatedVec;
316 ///
317 /// let mut vec: RotatedVec<_> = vec![1].into();
318 /// vec.reserve(10);
319 /// assert!(vec.capacity() >= 11);
320 /// ```
321 pub fn reserve(&mut self, additional: usize) {
322 self.data.reserve(additional);
323 }
324
325 /// Shrinks the capacity of the vector as much as possible.
326 ///
327 /// It will drop down as close as possible to the length but the allocator
328 /// may still inform the vector that there is space for a few more elements.
329 ///
330 /// # Examples
331 ///
332 /// ```
333 /// use rotated_vec::RotatedVec;
334 ///
335 /// let mut vec = RotatedVec::with_capacity(10);
336 /// vec.extend([1, 2, 3].iter().cloned());
337 /// assert_eq!(vec.capacity(), 10);
338 /// vec.shrink_to_fit();
339 /// assert!(vec.capacity() >= 3);
340 /// ```
341 pub fn shrink_to_fit(&mut self) {
342 self.data.shrink_to_fit();
343 }
344
345 /// Shortens the vector, keeping the first `len` elements and dropping
346 /// the rest.
347 ///
348 /// This is an `O(√n)` operation.
349 ///
350 /// If `len` is greater than the vector's current length, this has no
351 /// effect.
352 ///
353 /// Note that this method has no effect on the allocated capacity
354 /// of the vector.
355 ///
356 /// # Examples
357 ///
358 /// Truncating a five element vector to two elements:
359 ///
360 /// ```
361 /// use rotated_vec::RotatedVec;
362 ///
363 /// let mut vec: RotatedVec<_> = vec![1, 2, 3, 4, 5].into();
364 /// vec.truncate(2);
365 /// assert_eq!(vec, vec![1, 2].into());
366 /// ```
367 ///
368 /// No truncation occurs when `len` is greater than the vector's current
369 /// length:
370 ///
371 /// ```
372 /// use rotated_vec::RotatedVec;
373 ///
374 /// let mut vec: RotatedVec<_> = vec![1, 2, 3].into();
375 /// vec.truncate(8);
376 /// assert_eq!(vec, vec![1, 2, 3].into());
377 /// ```
378 ///
379 /// Truncating when `len == 0` is equivalent to calling the [`clear`]
380 /// method.
381 ///
382 /// ```
383 /// use rotated_vec::RotatedVec;
384 ///
385 /// let mut vec: RotatedVec<_> = vec![1, 2, 3].into();
386 /// vec.truncate(0);
387 /// assert_eq!(vec, vec![].into());
388 /// ```
389 ///
390 pub fn truncate(&mut self, len: usize) {
391 if len >= self.len() {
392 return
393 }
394 // conceptually, we drop all subarrays after the truncated length,
395 // then un-rotate the new last subarray, then drop any remaining elements.
396 self.unrotate_last_subarray();
397 // drop subarrays after truncated length
398 let last_subarray_idx = Self::get_subarray_idx_from_array_idx(self.len() - 1);
399 self.start_indexes.truncate(last_subarray_idx + 1);
400 // truncate data array
401 self.data.truncate(len);
402 }
403
404 /// Gets an iterator that visits the values in the `RotatedVec` in order.
405 ///
406 /// # Examples
407 ///
408 /// ```
409 /// use rotated_vec::RotatedVec;
410 ///
411 /// let vec: RotatedVec<usize> = vec![1, 2, 3].into();
412 /// let mut iter = vec.iter();
413 /// assert_eq!(iter.next(), Some(&1));
414 /// assert_eq!(iter.next(), Some(&2));
415 /// assert_eq!(iter.next(), Some(&3));
416 /// assert_eq!(iter.next(), None);
417 /// ```
418 pub fn iter(&self) -> Iter<T> {
419 Iter {
420 container: self,
421 next_index: 0,
422 next_rev_index: if self.len() == 0 { 0 } else { self.len() - 1 },
423 }
424 }
425
426 /// Gets a mutable iterator that visits the values in the `RotatedVec` in order.
427 ///
428 /// # Examples
429 ///
430 /// ```
431 /// use rotated_vec::RotatedVec;
432 ///
433 /// let mut vec: RotatedVec<usize> = vec![1, 2, 3].into();
434 /// let mut iter = vec.iter_mut();
435 /// let mut current_elem = None;
436 /// current_elem = iter.next();
437 /// assert_eq!(current_elem, Some(&mut 1));
438 /// *current_elem.unwrap() = 2;
439 /// current_elem = iter.next();
440 /// assert_eq!(current_elem, Some(&mut 2));
441 /// *current_elem.unwrap() = 3;
442 /// current_elem = iter.next();
443 /// assert_eq!(current_elem, Some(&mut 3));
444 /// *current_elem.unwrap() = 4;
445 /// assert_eq!(iter.next(), None);
446 /// assert_eq!(vec, vec![2, 3, 4].into());
447 /// ```
448 pub fn iter_mut(&mut self) -> IterMut<T> {
449 let len = self.len();
450 IterMut {
451 container: self,
452 next_index: 0,
453 next_rev_index: len - 1,
454 }
455 }
456
457 /// Returns the number of elements in the set.
458 ///
459 /// This is a constant-time operation.
460 ///
461 /// # Examples
462 ///
463 /// ```
464 /// use rotated_vec::RotatedVec;
465 ///
466 /// let mut vec = RotatedVec::new();
467 /// assert_eq!(vec.len(), 0);
468 /// vec.push(1);
469 /// assert_eq!(vec.len(), 1);
470 /// ```
471 pub fn len(&self) -> usize {
472 self.data.len()
473 }
474
475 /// Returns `true` if the set contains no elements.
476 ///
477 /// This is a constant-time operation.
478 ///
479 /// # Examples
480 ///
481 /// ```
482 /// use rotated_vec::RotatedVec;
483 ///
484 /// let mut vec = RotatedVec::new();
485 /// assert!(vec.is_empty());
486 /// vec.push(1);
487 /// assert!(!vec.is_empty());
488 /// ```
489 pub fn is_empty(&self) -> bool {
490 self.data.is_empty()
491 }
492
493 /// Clears the vector, removing all values.
494 ///
495 /// This is a constant-time operation.
496 ///
497 /// # Examples
498 ///
499 /// ```
500 /// use rotated_vec::RotatedVec;
501 ///
502 /// let mut vec = RotatedVec::new();
503 /// vec.push(1);
504 /// vec.clear();
505 /// assert!(vec.is_empty());
506 /// ```
507 pub fn clear(&mut self) {
508 self.data.clear();
509 self.start_indexes.clear();
510 }
511
512 /// Returns `true` if the `RotatedVec` contains an element equal to the
513 /// given value.
514 ///
515 /// # Examples
516 ///
517 /// ```
518 /// use rotated_vec::RotatedVec;
519 ///
520 /// let mut vec = RotatedVec::new();
521 ///
522 /// vec.push(0);
523 /// vec.push(1);
524 ///
525 /// assert_eq!(vec.contains(&1), true);
526 /// assert_eq!(vec.contains(&10), false);
527 /// ```
528 pub fn contains(&self, x: &T) -> bool
529 where T: PartialEq<T>
530 {
531 self.data.contains(x)
532 }
533
534 /// Appends an element to the back of a collection.
535 ///
536 /// This is a constant-time operation.
537 ///
538 /// # Panics
539 ///
540 /// Panics if the number of elements in the vector overflows a `usize`.
541 ///
542 /// # Examples
543 ///
544 /// ```
545 /// use rotated_vec::RotatedVec;
546 ///
547 /// let mut vec: RotatedVec<_> = vec![1, 2].into();
548 /// vec.push(3);
549 /// assert_eq!(vec, vec![1, 2, 3].into());
550 /// ```
551 pub fn push(&mut self, value: T) {
552 self.insert(self.len(), value);
553 }
554
555 /// Removes the last element from a vector and returns it, or [`None`] if it
556 /// is empty.
557 ///
558 /// This is a constant-time operation.
559 ///
560 /// # Examples
561 ///
562 /// ```
563 /// use rotated_vec::RotatedVec;
564 ///
565 /// let mut vec: RotatedVec<_> = vec![1, 2, 3].into();
566 /// assert_eq!(vec.pop(), Some(3));
567 /// assert_eq!(vec, vec![1, 2].into());
568 /// ```
569 pub fn pop(&mut self) -> Option<T> {
570 if self.is_empty() {
571 None
572 } else {
573 Some(self.remove(self.len() - 1))
574 }
575 }
576
577 /// Inserts an element at position `index` within the vector.
578 ///
579 /// This is an `O(√n)` operation.
580 ///
581 /// # Panics
582 ///
583 /// Panics if `index > len`.
584 ///
585 /// # Examples
586 ///
587 ///
588 /// ```
589 /// use rotated_vec::RotatedVec;
590 ///
591 /// let mut vec: RotatedVec<_> = vec![1, 2, 3].into();
592 /// vec.insert(1, 4);
593 /// assert_eq!(vec, vec![1, 4, 2, 3].into());
594 /// vec.insert(4, 5);
595 /// assert_eq!(vec, vec![1, 4, 2, 3, 5].into());
596 /// ```
597 pub fn insert(&mut self, index: usize, element: T) {
598 assert!(index <= self.len());
599 let insert_idx = if index < self.len() {
600 self.get_real_index(index)
601 } else {
602 self.len()
603 };
604 // find subarray containing this insertion point
605 let subarray_idx = Self::get_subarray_idx_from_array_idx(insert_idx);
606 // inserted element could be in a new subarray
607 debug_assert!(subarray_idx <= self.start_indexes.len());
608 // create a new subarray if necessary
609 if subarray_idx == self.start_indexes.len() {
610 self.start_indexes.push(0);
611 }
612 let subarray_offset = Self::get_array_idx_from_subarray_idx(subarray_idx);
613 // if insertion point is in last subarray and last subarray isn't full, just insert the new element
614 if subarray_idx == self.start_indexes.len() - 1 && !self.is_last_subarray_full() {
615 // Since we always insert into a partially full subarray in order,
616 // there is no need to update the pivot location.
617 debug_assert!(self.start_indexes[subarray_idx] == 0);
618 self.data.insert(insert_idx, element);
619 debug_assert!(self.assert_invariants());
620 return;
621 }
622 // From now on, we can assume that the subarray we're inserting into is always full.
623 let next_subarray_offset = Self::get_array_idx_from_subarray_idx(subarray_idx + 1);
624 let subarray = &mut self.data[subarray_offset..next_subarray_offset];
625 let pivot_offset = self.start_indexes[subarray_idx];
626 let insert_offset = insert_idx - subarray_offset;
627 let end_offset = if pivot_offset == 0 {
628 subarray.len() - 1
629 } else {
630 pivot_offset - 1
631 };
632 let mut prev_end_elem = subarray[end_offset];
633 // this logic is best understood with a diagram of a rotated array, e.g.:
634 //
635 // ------------------------------------------------------------------------
636 // | 12 | 13 | 14 | 15 | 16 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 |
637 // ------------------------------------------------------------------------
638 //
639 if end_offset < pivot_offset && insert_offset >= pivot_offset {
640 subarray.copy_within(pivot_offset..insert_offset, end_offset);
641 subarray[insert_offset - 1] = element;
642 self.start_indexes[subarray_idx] = end_offset;
643 } else {
644 subarray.copy_within(insert_offset..end_offset, insert_offset + 1);
645 subarray[insert_offset] = element;
646 }
647 debug_assert!(self.assert_invariants());
648 let max_subarray_idx = self.start_indexes.len() - 1;
649 let next_subarray_idx = subarray_idx + 1;
650 let last_subarray_full = self.is_last_subarray_full();
651 // now loop over all remaining subarrays, setting the first (pivot) of each to the last of its predecessor
652 for (i, pivot_offset_ref) in self.start_indexes[next_subarray_idx..].iter_mut().enumerate() {
653 let cur_subarray_idx = next_subarray_idx + i;
654 // if the last subarray isn't full, skip it
655 if cur_subarray_idx == max_subarray_idx && !last_subarray_full {
656 break;
657 }
658 let end_offset = if *pivot_offset_ref == 0 {
659 cur_subarray_idx
660 } else {
661 *pivot_offset_ref - 1
662 };
663 let end_idx = end_offset + Self::get_array_idx_from_subarray_idx(cur_subarray_idx);
664 let next_end_elem = self.data[end_idx];
665 self.data[end_idx] = prev_end_elem;
666 *pivot_offset_ref = end_offset;
667 prev_end_elem = next_end_elem;
668 }
669 // if the last subarray was full, append current last element to a new subarray, otherwise insert last element in rotated order
670 if last_subarray_full {
671 self.data.push(prev_end_elem);
672 self.start_indexes.push(0);
673 } else {
674 let max_subarray_offset = Self::get_array_idx_from_subarray_idx(max_subarray_idx);
675 // since `prev_end_elem` is guaranteed to be <= the pivot value, we always insert it at the pivot location
676 self.data.insert(max_subarray_offset, prev_end_elem);
677 }
678 // debug_assert!(self.data[self.get_real_index(index)] == element);
679 debug_assert!(self.assert_invariants());
680 }
681
682 /// Removes and returns the element at position `index` within the vector.
683 ///
684 /// This is an `O(√n)` operation.
685 ///
686 /// # Panics
687 ///
688 /// Panics if `index` is out of bounds.
689 ///
690 /// # Examples
691 ///
692 /// ```
693 /// use rotated_vec::RotatedVec;
694 ///
695 /// let mut vec: RotatedVec<_> = vec![1, 2, 3].into();
696 /// assert_eq!(vec.remove(1), 2);
697 /// assert_eq!(vec, vec![1, 3].into());
698 /// ```
699 pub fn remove(&mut self, index: usize) -> T {
700 assert!(index < self.len());
701 let old_len = self.len();
702 let mut remove_idx = self.get_real_index(index);
703 let element = self.data[remove_idx];
704 let max_subarray_idx = self.start_indexes.len() - 1;
705 let max_subarray_offset = Self::get_array_idx_from_subarray_idx(max_subarray_idx);
706 // find subarray containing the element to remove
707 let subarray_idx = Self::get_subarray_idx_from_array_idx(remove_idx);
708 debug_assert!(subarray_idx <= max_subarray_idx);
709 let subarray_offset = Self::get_array_idx_from_subarray_idx(subarray_idx);
710 // if we're not removing an element in the last subarray, then we end up deleting its first element,
711 // which is always at the first offset since it's in order
712 let mut max_subarray_remove_idx = if subarray_idx == max_subarray_idx {
713 remove_idx
714 } else {
715 max_subarray_offset
716 };
717 // if the last subarray was rotated, un-rotate it to maintain insert invariant
718 if self.is_last_subarray_full() {
719 let last_start_offset = self.start_indexes[max_subarray_idx];
720 // rotate left by the start offset
721 self.data[max_subarray_offset..].rotate_left(last_start_offset);
722 self.start_indexes[max_subarray_idx] = 0;
723 // the remove index might change after un-rotating the last subarray
724 if subarray_idx == max_subarray_idx {
725 remove_idx = self.get_real_index(index);
726 max_subarray_remove_idx = remove_idx;
727 }
728 }
729 // if insertion point is not in last subarray, perform a "hard exchange"
730 if subarray_idx < max_subarray_idx {
731 // From now on, we can assume that the subarray we're removing from is full.
732 let next_subarray_offset = Self::get_array_idx_from_subarray_idx(subarray_idx + 1);
733 let subarray = &mut self.data[subarray_offset..next_subarray_offset];
734 let pivot_offset = self.start_indexes[subarray_idx];
735 let remove_offset = remove_idx - subarray_offset;
736 let end_offset = if pivot_offset == 0 {
737 subarray.len() - 1
738 } else {
739 pivot_offset - 1
740 };
741 // this logic is best understood with a diagram of a rotated array, e.g.:
742 //
743 // ------------------------------------------------------------------------
744 // | 12 | 13 | 14 | 15 | 16 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 |
745 // ------------------------------------------------------------------------
746 //
747 let mut prev_end_offset = if end_offset < pivot_offset && remove_offset >= pivot_offset
748 {
749 subarray.copy_within(pivot_offset..remove_offset, pivot_offset + 1);
750 let new_pivot_offset = if pivot_offset == subarray.len() - 1 {
751 0
752 } else {
753 pivot_offset + 1
754 };
755 self.start_indexes[subarray_idx] = new_pivot_offset;
756 pivot_offset
757 } else {
758 subarray.copy_within(remove_offset + 1..=end_offset, remove_offset);
759 end_offset
760 };
761 let next_subarray_idx = min(max_subarray_idx, subarray_idx + 1);
762 // now perform an "easy exchange" in all remaining subarrays except the last,
763 // setting the last element of each to the first element of its successor.
764 for (i, pivot_offset_ref) in self.start_indexes[next_subarray_idx..max_subarray_idx]
765 .iter_mut()
766 .enumerate()
767 {
768 let cur_subarray_idx = next_subarray_idx + i;
769 let cur_subarray_offset = Self::get_array_idx_from_subarray_idx(cur_subarray_idx);
770 let prev_end_idx =
771 prev_end_offset + Self::get_array_idx_from_subarray_idx(cur_subarray_idx - 1);
772 self.data[prev_end_idx] = self.data[cur_subarray_offset + *pivot_offset_ref];
773 prev_end_offset = *pivot_offset_ref;
774 let new_start_offset = if *pivot_offset_ref == cur_subarray_idx {
775 0
776 } else {
777 *pivot_offset_ref + 1
778 };
779 *pivot_offset_ref = new_start_offset;
780 }
781 // now we fix up the last subarray. if it was initially full, we need to un-rotate it to maintain the insert invariant.
782 // if the removed element is in the last subarray, we just un-rotate and remove() on the vec, updating auxiliary arrays.
783 // otherwise, we copy the first element to the last position of the previous subarray, then remove it and fix up
784 // auxiliary arrays.
785 let prev_end_idx =
786 prev_end_offset + Self::get_array_idx_from_subarray_idx(max_subarray_idx - 1);
787 // since the last subarray is always in order, its first element is always on the first offset
788 self.data[prev_end_idx] = self.data[max_subarray_offset];
789 }
790 self.data.remove(max_subarray_remove_idx);
791 // if last subarray is now empty, trim start_indexes
792 if max_subarray_offset == self.data.len() {
793 self.start_indexes.pop();
794 }
795 debug_assert!(self.len() == old_len - 1);
796 debug_assert!(self.assert_invariants());
797 element
798 }
799
800 /// Moves all the elements of `other` into `self`, leaving `other` empty.
801 ///
802 /// # Panics
803 ///
804 /// Panics if the number of elements in the array overflows a `usize`.
805 ///
806 /// # Examples
807 ///
808 /// ```
809 /// use rotated_vec::RotatedVec;
810 ///
811 /// let mut vec: RotatedVec<_> = vec![1, 2, 3].into();
812 /// let mut vec2: RotatedVec<_> = vec![4, 5, 6].into();
813 /// vec.append(&mut vec2);
814 /// assert_eq!(vec, vec![1, 2, 3, 4, 5, 6].into());
815 /// assert_eq!(vec2, vec![].into());
816 /// ```
817 pub fn append(&mut self, other: &mut Self) {
818 // if the last subarray is partially full, un-rotate it so we can append directly
819 if !self.is_last_subarray_full() {
820 self.unrotate_last_subarray();
821 }
822 // append data directly to backing array
823 self.data.append(&mut other.data);
824 // fix up start indexes
825 let last_subarray_idx = Self::get_subarray_idx_from_array_idx(self.data.len() - 1);
826 self.start_indexes.resize(last_subarray_idx + 1, 0);
827 // clear all data in `other`
828 other.clear();
829 }
830
831 /// Sorts the vector.
832 ///
833 /// This sort is stable (i.e., does not reorder equal elements) and `O(n log n)` worst-case.
834 ///
835 /// When applicable, unstable sorting is preferred because it is generally faster than stable
836 /// sorting and it doesn't allocate auxiliary memory.
837 /// See [`sort_unstable`](#method.sort_unstable).
838 ///
839 /// # Current implementation
840 ///
841 /// The current algorithm is an adaptive, iterative merge sort inspired by
842 /// [timsort](https://en.wikipedia.org/wiki/Timsort).
843 /// It is designed to be very fast in cases where the vector is nearly sorted, or consists of
844 /// two or more sorted sequences concatenated one after another.
845 ///
846 /// Also, it allocates temporary storage half the size of `self`, but for short vectors a
847 /// non-allocating insertion sort is used instead.
848 ///
849 /// # Examples
850 ///
851 /// ```
852 /// use is_sorted::IsSorted;
853 /// use rotated_vec::RotatedVec;
854 ///
855 /// let mut vec: RotatedVec<_> = vec![-5, 4, 1, -3, 2].into();
856 ///
857 /// vec.sort();
858 /// assert!(IsSorted::is_sorted(&mut vec.iter()));
859 /// ```
860 pub fn sort(&mut self)
861 where T: Ord
862 {
863 self.data.sort();
864 // TODO: we really want slice.fill() here when it becomes available
865 for idx in self.start_indexes.as_mut_slice() {
866 *idx = 0;
867 }
868 }
869
870 /// Sorts the vector, but may not preserve the order of equal elements.
871 ///
872 /// This sort is unstable (i.e., may reorder equal elements), in-place
873 /// (i.e., does not allocate), and `O(n log n)` worst-case.
874 ///
875 /// # Current implementation
876 ///
877 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
878 /// which combines the fast average case of randomized quicksort with the fast worst case of
879 /// heapsort, while achieving linear time on vectors with certain patterns. It uses some
880 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
881 /// deterministic behavior.
882 ///
883 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
884 /// vector consists of several concatenated sorted sequences.
885 ///
886 /// # Examples
887 ///
888 /// ```
889 /// use is_sorted::IsSorted;
890 /// use rotated_vec::RotatedVec;
891 ///
892 /// let mut vec: RotatedVec<_> = vec![-5, 4, 1, -3, 2].into();
893 ///
894 /// vec.sort_unstable();
895 /// assert!(IsSorted::is_sorted(&mut vec.iter()));
896 /// ```
897 ///
898 /// [pdqsort]: https://github.com/orlp/pdqsort
899 pub fn sort_unstable(&mut self)
900 where T: Ord
901 {
902 self.data.sort_unstable();
903 // TODO: we really want slice.fill() here when it becomes available
904 for idx in self.start_indexes.as_mut_slice() {
905 *idx = 0;
906 }
907 }
908
909 // this returns the index in the backing array of the given logical index
910 fn get_real_index(&self, index: usize) -> usize {
911 debug_assert!(index < self.data.len());
912 let subarray_idx = Self::get_subarray_idx_from_array_idx(index);
913 let subarray_start_idx = Self::get_array_idx_from_subarray_idx(subarray_idx);
914 let subarray_len = if subarray_idx == self.start_indexes.len() - 1 {
915 self.data.len() - subarray_start_idx
916 } else {
917 subarray_idx + 1
918 };
919 debug_assert!(index >= subarray_start_idx);
920 let idx_offset = index - subarray_start_idx;
921 let pivot_offset = self.start_indexes[subarray_idx];
922 let rotated_offset = (pivot_offset + idx_offset) % subarray_len;
923 debug_assert!(rotated_offset < subarray_len);
924 let real_idx = subarray_start_idx + rotated_offset;
925 real_idx
926 }
927
928 fn integer_sum(n: usize) -> usize {
929 // I learned this from a 10-year-old named Gauss
930 (n * (n + 1)) / 2
931 }
932
933 fn integer_sum_inverse(n: usize) -> usize {
934 // y = (x * (x + 1)) / 2
935 // x = (sqrt(8 * y + 1) - 1) / 2
936 ((f64::from((n * 8 + 1) as u32).sqrt() as usize) - 1) / 2
937 }
938
939 fn get_subarray_idx_from_array_idx(idx: usize) -> usize {
940 if idx == 0 {
941 0
942 } else {
943 Self::integer_sum_inverse(idx)
944 }
945 }
946
947 fn get_array_idx_from_subarray_idx(idx: usize) -> usize {
948 if idx == 0 {
949 0
950 } else {
951 Self::integer_sum(idx)
952 }
953 }
954
955 fn is_last_subarray_full(&self) -> bool {
956 self.data.len() == Self::get_array_idx_from_subarray_idx(self.start_indexes.len())
957 }
958
959 fn unrotate_last_subarray(&mut self) {
960 let last_subarray_idx = Self::get_subarray_idx_from_array_idx(self.len() - 1);
961 let last_subarray_start_idx = Self::get_array_idx_from_subarray_idx(last_subarray_idx);
962 let last_subarray_len = if last_subarray_idx == self.start_indexes.len() - 1 {
963 self.len() - last_subarray_start_idx
964 } else {
965 last_subarray_idx + 1
966 };
967 let last_subarray_end_idx = last_subarray_start_idx + last_subarray_len;
968 let last_subarray = &mut self.data[last_subarray_start_idx..last_subarray_end_idx];
969 // un-rotate subarray in-place
970 let pivot_offset = self.start_indexes[last_subarray_idx];
971 last_subarray.rotate_left(pivot_offset);
972 self.start_indexes[last_subarray_idx] = 0;
973 }
974
975 #[inline(always)]
976 fn assert_invariants(&self) -> bool {
977 // assert offset array has proper length
978 let expected_start_indexes_len = if self.is_empty() {
979 0
980 } else {
981 Self::get_subarray_idx_from_array_idx(self.len() - 1) + 1
982 };
983 assert_eq!(self.start_indexes.len(), expected_start_indexes_len);
984 // assert index of each subarray's first element lies within the subarray
985 assert!(self
986 .start_indexes
987 .iter()
988 .enumerate()
989 .all(|(idx, &offset)| offset <= idx));
990 true
991 }
992
993 // given data array, initialize offset array
994 fn init(&mut self) {
995 debug_assert!(self.start_indexes.is_empty());
996 if !self.data.is_empty() {
997 let last_subarray_idx = Self::get_subarray_idx_from_array_idx(self.data.len() - 1);
998 self.start_indexes = vec![0; last_subarray_idx + 1];
999 }
1000 }
1001}
1002
1003impl<T> PartialEq for RotatedVec<T>
1004where
1005 T: Copy + Default + Debug + PartialEq,
1006{
1007 fn eq(&self, other: &Self) -> bool {
1008 if self.len() != other.len() {
1009 return false;
1010 }
1011 for i in 0..self.len() {
1012 if self.get(i).unwrap() != other.get(i).unwrap() {
1013 return false;
1014 }
1015 }
1016 true
1017 }
1018}
1019
1020impl<T> Eq for RotatedVec<T>
1021where
1022 T: Copy + Default + Debug + PartialEq
1023{}
1024
1025impl<T> PartialOrd for RotatedVec<T>
1026where
1027 T: Copy + Default + Debug + PartialOrd
1028{
1029 fn partial_cmp(&self, other: &RotatedVec<T>) -> Option<Ordering> {
1030 self.iter().partial_cmp(other.iter())
1031 }
1032}
1033
1034impl<T> Ord for RotatedVec<T>
1035where
1036 T: Copy + Default + Debug + Ord
1037{
1038 fn cmp(&self, other: &RotatedVec<T>) -> Ordering {
1039 self.iter().cmp(other.iter())
1040 }
1041}
1042
1043impl<T> Hash for RotatedVec<T>
1044where
1045 T: Copy + Default + Debug + PartialEq + Hash
1046{
1047 fn hash<H: Hasher>(&self, state: &mut H) {
1048 for i in 0..self.len() {
1049 self.get(i).hash(state);
1050 }
1051 }
1052}
1053
1054impl<T> Index<usize> for RotatedVec<T>
1055where
1056 T: Copy + Default + Debug,
1057{
1058 type Output = T;
1059
1060 #[inline]
1061 fn index(&self, index: usize) -> &T {
1062 self.get(index).expect("Out of bounds access")
1063 }
1064}
1065
1066impl<T> IndexMut<usize> for RotatedVec<T>
1067where
1068 T: Copy + Default + Debug,
1069{
1070 #[inline]
1071 fn index_mut(&mut self, index: usize) -> &mut T {
1072 self.get_mut(index).expect("Out of bounds access")
1073 }
1074}
1075
1076impl<T> Extend<T> for RotatedVec<T>
1077where
1078 T: Copy + Default + Debug,
1079{
1080 fn extend<I>(&mut self, iter: I)
1081 where
1082 I: IntoIterator<Item = T>,
1083 {
1084 // if the last subarray is partially full, un-rotate it so we can append directly
1085 if !self.is_last_subarray_full() {
1086 self.unrotate_last_subarray();
1087 }
1088 // append data directly to backing array
1089 self.data.extend(iter);
1090 // fix up start indexes
1091 let last_subarray_idx = Self::get_subarray_idx_from_array_idx(self.data.len() - 1);
1092 self.start_indexes.resize(last_subarray_idx + 1, 0);
1093 }
1094}
1095
1096impl<'a, T> Iterator for Iter<'a, T>
1097where
1098 T: Copy + Default + Debug,
1099{
1100 type Item = &'a T;
1101
1102 fn next(&mut self) -> Option<Self::Item> {
1103 if self.len() == 0 || self.next_index > self.next_rev_index {
1104 None
1105 } else {
1106 let current = self.container.get(self.next_index);
1107 self.next_index += 1;
1108 debug_assert!(self.assert_invariants());
1109 current
1110 }
1111 }
1112
1113 fn nth(&mut self, n: usize) -> Option<Self::Item> {
1114 self.next_index = min(self.next_index + n, self.len());
1115 let ret = if self.len() == 0 || self.next_index > self.next_rev_index {
1116 None
1117 } else {
1118 let nth = self.container.get(self.next_index);
1119 self.next_index += 1;
1120 nth
1121 };
1122 debug_assert!(self.assert_invariants());
1123 ret
1124 }
1125
1126 fn count(self) -> usize {
1127 self.len() - self.next_index
1128 }
1129
1130 fn last(self) -> Option<Self::Item> {
1131 if self.len() == 0 {
1132 None
1133 } else {
1134 self.container.get(self.len() - 1)
1135 }
1136 }
1137
1138 fn max(self) -> Option<Self::Item> {
1139 if self.len() == 0 {
1140 None
1141 } else {
1142 self.container.get(self.len() - 1)
1143 }
1144 }
1145
1146 fn min(self) -> Option<Self::Item> {
1147 self.container.get(0)
1148 }
1149
1150 fn size_hint(&self) -> (usize, Option<usize>) {
1151 let remaining_count = self.len() - self.next_index;
1152 (remaining_count, Some(remaining_count))
1153 }
1154}
1155
1156impl<'a, T> DoubleEndedIterator for Iter<'a, T>
1157where
1158 T: Copy + Default + Debug,
1159{
1160 fn next_back(&mut self) -> Option<Self::Item> {
1161 if self.len() == 0 || self.next_rev_index < self.next_index {
1162 None
1163 } else {
1164 let current = self.container.get(self.next_rev_index);
1165 // We can't decrement next_rev_index past 0, so we cheat and move next_index
1166 // ahead instead. That works since next() must return None once next_rev_index
1167 // has crossed next_index.
1168 if self.next_rev_index == 0 {
1169 self.next_index += 1;
1170 } else {
1171 self.next_rev_index -= 1;
1172 }
1173 debug_assert!(self.assert_invariants());
1174 current
1175 }
1176 }
1177
1178 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
1179 self.next_rev_index = self.next_rev_index.saturating_sub(n);
1180 let ret = if self.len() == 0 || self.next_rev_index < self.next_index {
1181 None
1182 } else {
1183 let nth = self.container.get(self.next_rev_index);
1184 // We can't decrement next_rev_index past 0, so we cheat and move next_index
1185 // ahead instead. That works since next() must return None once next_rev_index
1186 // has crossed next_index.
1187 if self.next_rev_index == 0 {
1188 self.next_index += 1;
1189 } else {
1190 self.next_rev_index -= 1;
1191 }
1192 nth
1193 };
1194 debug_assert!(self.assert_invariants());
1195 ret
1196 }
1197}
1198
1199impl<T> ExactSizeIterator for Iter<'_, T>
1200where
1201 T: Copy + Default + Debug,
1202{
1203 fn len(&self) -> usize {
1204 self.container.len()
1205 }
1206}
1207
1208impl<T> FusedIterator for Iter<'_, T> where T: Copy + Default + Debug {}
1209
1210impl<'a, T> Iterator for IterMut<'a, T>
1211where
1212 T: Copy + Default + Debug,
1213{
1214 type Item = &'a mut T;
1215
1216 // unsafe code required, see:
1217 // https://www.reddit.com/r/rust/comments/6ffrbs/implementing_a_safe_mutable_iterator/
1218 // https://stackoverflow.com/questions/25730586/how-can-i-create-my-own-data-structure-with-an-iterator-that-returns-mutable-ref
1219 // https://stackoverflow.com/questions/27118398/simple-as-possible-example-of-returning-a-mutable-reference-from-your-own-iterat
1220 fn next(&mut self) -> Option<Self::Item> {
1221 let ret = if self.len() == 0 || self.next_index > self.next_rev_index {
1222 None
1223 } else {
1224 let current = self.container.get_mut(self.next_index);
1225 self.next_index += 1;
1226 // see MutItems example at https://docs.rs/strided/0.2.9/src/strided/base.rs.html
1227 // per above links, rustc cannot understand that we never return two mutable references to the same object,
1228 // so we have to use unsafe code to coerce the return value to the desired lifetime
1229 unsafe { mem::transmute(current) }
1230 };
1231 debug_assert!(self.assert_invariants());
1232 ret
1233 }
1234
1235 fn nth(&mut self, n: usize) -> Option<Self::Item> {
1236 self.next_index = min(self.next_index + n, self.len());
1237 let ret = if self.len() == 0 || self.next_index > self.next_rev_index {
1238 None
1239 } else {
1240 let nth = self.container.get_mut(self.next_index);
1241 self.next_index += 1;
1242 // per above links, rustc cannot understand that we never return two mutable references to the same object,
1243 // so we have to use unsafe code to coerce the return value to the desired lifetime
1244 unsafe { mem::transmute(nth) }
1245 };
1246 debug_assert!(self.assert_invariants());
1247 ret
1248 }
1249
1250 fn count(self) -> usize {
1251 self.len() - self.next_index
1252 }
1253
1254 fn last(self) -> Option<Self::Item> {
1255 if self.len() == 0 {
1256 None
1257 } else {
1258 self.container.get_mut(self.len() - 1)
1259 }
1260 }
1261
1262 fn size_hint(&self) -> (usize, Option<usize>) {
1263 let remaining_count = self.len() - self.next_index;
1264 (remaining_count, Some(remaining_count))
1265 }
1266}
1267
1268impl<'a, T> DoubleEndedIterator for IterMut<'a, T>
1269where
1270 T: Copy + Default + Debug,
1271{
1272 fn next_back(&mut self) -> Option<Self::Item> {
1273 if self.len() == 0 || self.next_rev_index < self.next_index {
1274 None
1275 } else {
1276 let current = self.container.get(self.next_rev_index);
1277 // We can't decrement next_rev_index past 0, so we cheat and move next_index
1278 // ahead instead. That works since next() must return None once next_rev_index
1279 // has crossed next_index.
1280 if self.next_rev_index == 0 {
1281 self.next_index += 1;
1282 } else {
1283 self.next_rev_index -= 1;
1284 }
1285 debug_assert!(self.assert_invariants());
1286 // per above links, rustc cannot understand that we never return two mutable references to the same object,
1287 // so we have to use unsafe code to coerce the return value to the desired lifetime
1288 unsafe { mem::transmute(current) }
1289 }
1290 }
1291
1292 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
1293 self.next_rev_index = self.next_rev_index.saturating_sub(n);
1294 let ret = if self.len() == 0 || self.next_rev_index < self.next_index {
1295 None
1296 } else {
1297 let nth = self.container.get(self.next_rev_index);
1298 // We can't decrement next_rev_index past 0, so we cheat and move next_index
1299 // ahead instead. That works since next() must return None once next_rev_index
1300 // has crossed next_index.
1301 if self.next_rev_index == 0 {
1302 self.next_index += 1;
1303 } else {
1304 self.next_rev_index -= 1;
1305 }
1306 // per above links, rustc cannot understand that we never return two mutable references to the same object,
1307 // so we have to use unsafe code to coerce the return value to the desired lifetime
1308 unsafe { mem::transmute(nth) }
1309 };
1310 debug_assert!(self.assert_invariants());
1311 ret
1312 }
1313}
1314
1315impl<T> ExactSizeIterator for IterMut<'_, T>
1316where
1317 T: Copy + Default + Debug,
1318{
1319 fn len(&self) -> usize {
1320 self.container.len()
1321 }
1322}
1323
1324impl<T> FusedIterator for IterMut<'_, T> where T: Copy + Default + Debug {}
1325
1326impl<'a, T> IntoIterator for &'a RotatedVec<T>
1327where
1328 T: Copy + Default + Debug,
1329{
1330 type Item = &'a T;
1331 type IntoIter = Iter<'a, T>;
1332
1333 fn into_iter(self) -> Self::IntoIter {
1334 self.iter()
1335 }
1336}
1337
1338impl<'a, T> IntoIterator for &'a mut RotatedVec<T>
1339where
1340 T: Copy + Default + Debug,
1341{
1342 type Item = &'a mut T;
1343 type IntoIter = IterMut<'a, T>;
1344
1345 fn into_iter(self) -> Self::IntoIter {
1346 self.iter_mut()
1347 }
1348}
1349
1350impl<T> IntoIterator for RotatedVec<T>
1351where
1352 T: Copy + Default + Debug,
1353{
1354 type Item = T;
1355 type IntoIter = IntoIter<T>;
1356
1357 fn into_iter(self) -> Self::IntoIter {
1358 IntoIter {
1359 vec: self.into(),
1360 next_index: 0,
1361 }
1362 }
1363}
1364
1365impl<'a, T> Iterator for IntoIter<T>
1366where
1367 T: Copy + Default + Debug,
1368{
1369 type Item = T;
1370
1371 fn next(&mut self) -> Option<Self::Item> {
1372 if self.next_index == self.vec.len() {
1373 None
1374 } else {
1375 let current = self.vec[self.next_index];
1376 self.next_index += 1;
1377 debug_assert!(self.next_index <= self.vec.len());
1378 Some(current)
1379 }
1380 }
1381}
1382
1383impl<'a, T> From<&'a [T]> for RotatedVec<T>
1384where
1385 T: Copy + Default + Debug,
1386{
1387 fn from(slice: &'a [T]) -> Self {
1388 let mut this = RotatedVec {
1389 data: slice.to_vec(),
1390 start_indexes: Vec::new(),
1391 };
1392 this.init();
1393 this
1394 }
1395}
1396
1397impl<T> From<Vec<T>> for RotatedVec<T>
1398where
1399 T: Copy + Default + Debug,
1400{
1401 fn from(vec: Vec<T>) -> Self {
1402 let mut this = RotatedVec {
1403 data: vec,
1404 start_indexes: Vec::new(),
1405 };
1406 this.init();
1407 this
1408 }
1409}
1410
1411impl<T> Into<Vec<T>> for RotatedVec<T>
1412where
1413 T: Copy + Default + Debug,
1414{
1415 fn into(mut self) -> Vec<T> {
1416 // un-rotate the data array in-place and steal it from self
1417 for (i, &pivot_offset) in self.start_indexes.iter().enumerate() {
1418 let subarray_start_idx = Self::get_array_idx_from_subarray_idx(i);
1419 let subarray_len = if i == self.start_indexes.len() - 1 {
1420 self.data.len() - subarray_start_idx
1421 } else {
1422 i + 1
1423 };
1424 let subarray_end_idx = subarray_start_idx + subarray_len;
1425 let subarray = &mut self.data[subarray_start_idx..subarray_end_idx];
1426 // un-rotate subarray in-place
1427 subarray.rotate_left(pivot_offset);
1428 }
1429 // steal data array
1430 self.data
1431 }
1432}
1433
1434impl<T> FromIterator<T> for RotatedVec<T>
1435where
1436 T: Copy + Default + Debug,
1437{
1438 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1439 let mut this = RotatedVec {
1440 data: Vec::from_iter(iter.into_iter()),
1441 start_indexes: Vec::new(),
1442 };
1443 this.init();
1444 this
1445 }
1446}
1447
1448impl<T> Default for RotatedVec<T>
1449where
1450 T: Copy + Default + Debug,
1451{
1452 #[inline]
1453 fn default() -> RotatedVec<T> {
1454 RotatedVec::new()
1455 }
1456}