1use std::cmp::Ordering;
2use std::hint::unreachable_unchecked;
3
4use rle::Searchable;
5
6use super::*;
7use std::ops::AddAssign;
8
9impl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> UnsafeCursor<E, I, IE, LE> {
11 pub(crate) fn new(node: NonNull<NodeLeaf<E, I, IE, LE>>, idx: usize, offset: usize) -> Self {
12 UnsafeCursor { node, idx, offset }
13 }
14
15 #[allow(clippy::mut_from_ref)] pub(crate) unsafe fn get_node_mut(&self) -> &mut NodeLeaf<E, I, IE, LE> {
17 &mut *self.node.as_ptr()
18 }
19
20 pub fn traverse_forward(&mut self) -> bool {
31 let node = unsafe { self.node.as_ref() };
32 if let Some(n) = node.next {
33 self.node = n;
34 self.idx = 0;
35 self.offset = 0;
36 true
37 } else { false }
38 }
39
40 pub fn traverse_backwards(&mut self) -> bool {
41 let node = unsafe { self.node.as_ref() };
42 if let Some(n) = node.prev_leaf() {
43 let node_ref = unsafe { n.as_ref() };
44 self.node = n;
45 self.idx = node_ref.len_entries() - 1;
46 self.offset = node_ref.data[self.idx].len();
47 true
48 } else { false }
49 }
50
51 pub fn prev_entry_marker(&mut self, marker: Option<&mut I::Update>) -> bool {
54 if self.idx > 0 {
55 self.idx -= 1;
56 self.offset = self.get_raw_entry().len();
57 true
59 } else {
60 if let Some(marker) = marker {
61 unsafe { self.node.as_mut() }.flush_metric_update(marker);
62 }
63 self.traverse_backwards()
64 }
65 }
66
67 pub fn prev_entry(&mut self) -> bool {
68 self.prev_entry_marker(None)
69 }
70
71 #[inline(always)]
74 pub fn next_entry_marker(&mut self, marker: Option<&mut I::Update>) -> bool {
75 unsafe {
77 if self.idx + 1 < self.node.as_ref().num_entries as usize {
78 self.idx += 1;
79 self.offset = 0;
80 true
81 } else {
82 if let Some(marker) = marker {
83 self.node.as_mut().flush_metric_update(marker);
84 }
85 self.traverse_forward()
86 }
87 }
88 }
89
90 #[inline(always)]
91 pub fn next_entry(&mut self) -> bool {
92 self.next_entry_marker(None)
93 }
94
95 pub unsafe fn count_pos_raw<Out, F, G, H>(&self, offset_to_num: F, entry_len: G, entry_len_at: H) -> Out
99 where Out: AddAssign + Default, F: Fn(I::Value) -> Out, G: Fn(&E) -> Out, H: Fn(&E, usize) -> Out
100 {
101 if self.offset == usize::MAX { return Out::default(); }
103
104 let node = self.node.as_ref();
105 let mut pos = Out::default();
106
107 if self.idx >= node.data.len() { unreachable_unchecked(); }
108
109 for e in &node.data[0..self.idx] {
111 pos += entry_len(e);
112 }
113
114 if self.offset != 0 {
115 let e = &node.data[self.idx];
116 pos += if self.offset < e.len() {
117 entry_len_at(e, self.offset)
118 } else {
119 entry_len(e)
120 };
121 }
122
123 let mut parent = node.parent;
125 let mut node_ptr = NodePtr::Leaf(self.node);
126 loop {
127 match parent {
128 ParentPtr::Root(_) => { break; }, ParentPtr::Internal(n) => {
131 let node_ref = n.as_ref();
132 let idx = node_ref.find_child(node_ptr).unwrap();
133
134 for c in &node_ref.metrics[0..idx] {
135 pos += offset_to_num(*c);
136 }
137
138 node_ptr = NodePtr::Internal(n);
140 parent = node_ref.parent;
141 }
142 }
143 }
144
145 pos
146 }
147
148 pub unsafe fn count_pos(&self) -> I::Value {
152 self.count_pos_raw(|v| v, |e| {
153 let mut v = I::Value::default();
154 I::increment_offset(&mut v, e);
155 v
156 }, |e, offset| {
157 let mut e = *e;
159 e.truncate(offset);
160 let mut v = I::Value::default();
161 I::increment_offset(&mut v, &e);
162 v
163 })
164 }
165
166 pub fn get_raw_entry(&self) -> &E {
169 assert_ne!(self.offset, usize::MAX, "Cannot get entry for a cursor to an empty list");
170 let node = unsafe { self.node.as_ref() };
171 &node.data[self.idx]
172 }
173
174 pub fn try_get_raw_entry(&self) -> Option<E> {
175 let node = unsafe { self.node.as_ref() };
176 if self.idx < node.len_entries() {
177 Some(node.data[self.idx])
178 } else { None }
179 }
180
181 pub fn get_raw_entry_mut(&mut self) -> &mut E {
182 assert_ne!(self.offset, usize::MAX, "Cannot get entry for a cursor to an empty list");
183 let node = unsafe { self.node.as_mut() };
184 debug_assert!(self.idx < node.len_entries());
185 &mut node.data[self.idx]
186 }
187
188 pub(crate) fn roll_to_next_entry_internal<F: FnOnce(&mut Self)>(&mut self, f: F) -> bool {
193 unsafe {
194 if self.offset == usize::MAX {
195 false
197 } else {
198 let node = self.node.as_ref();
199 debug_assert!(self.idx < node.len_entries());
200 let seq_len = node.data[self.idx].len();
201
202 debug_assert!(self.offset <= seq_len);
203
204 if self.offset < seq_len { return true; }
205 f(self);
206 self.next_entry()
207 }
208 }
209 }
210
211 pub fn roll_to_next_entry(&mut self) -> bool {
212 self.roll_to_next_entry_internal(|_| {})
213 }
214
215 pub fn roll_to_next_entry_marker(&mut self, marker: &mut I::Update) -> bool {
217 self.roll_to_next_entry_internal(|cursor| {
218 unsafe {
219 cursor.node.as_mut().flush_metric_update(marker);
220 }
221 })
222 }
223
224 pub fn next_item(&mut self) -> bool {
243 if !self.roll_to_next_entry() {
244 return false;
245 }
246 self.offset += 1;
247 true
248 }
249
250 pub fn move_forward_by_offset(&mut self, mut amt: usize, mut marker: Option<&mut I::Update>) {
251 loop {
252 let len_here = self.get_raw_entry().len();
253 if self.offset + amt <= len_here {
254 self.offset += amt;
255 break;
256 }
257 amt -= len_here - self.offset;
258 if !self.next_entry_marker(marker.take()) {
259 panic!("Cannot move back before the start of the tree");
260 }
261 }
262 }
263
264 pub fn move_back_by_offset(&mut self, mut amt: usize, mut marker: Option<&mut I::Update>) {
266 while self.offset < amt {
267 amt -= self.offset;
268 self.offset = 0;
269 if !self.prev_entry_marker(marker.take()) {
270 panic!("Cannot move back before the start of the tree");
271 }
272 }
273 self.offset -= amt;
274 }
275
276 pub fn compress_node(&mut self) {
279 if self.idx >= LE { return; } let node = unsafe { self.node.as_mut() };
282
283 if self.idx >= node.len_entries() {
284 return;
286 }
287
288 let mut merged = 0;
289
290 for i in self.idx.max(1)..node.num_entries as usize {
291 if i >= LE || i - 1 - merged >= LE || i - merged >= LE {
293 unsafe { unreachable_unchecked(); }
294 }
295
296 let dest_idx = i - 1 - merged;
297
298 if node.data[dest_idx].can_append(&node.data[i]) {
299 if i == self.idx {
300 self.offset += node.data[dest_idx].len();
302 self.idx = dest_idx;
303 }
304
305 node.data[dest_idx].append(node.data[i]);
306 merged += 1;
307 } else if merged > 0 {
308 node.data[i - merged] = node.data[i];
309 } }
311 node.num_entries -= merged as u8;
312 }
313
314 pub fn check(&self) {
315 let node = unsafe { self.node.as_ref() };
316
317 if node.num_entries == 0 {
318 assert_eq!(self.idx, 0);
319 assert_eq!(self.offset, usize::MAX);
320 } else {
321 assert!(self.idx < node.len_entries());
322 assert!(self.offset <= node.data[self.idx].len());
323 }
324 }
325
326 pub unsafe fn unsafe_cmp(&self, other: &Self) -> Ordering {
327 if self.node == other.node {
328 if self.idx == other.idx { self.offset.cmp(&other.offset) }
330 else { self.idx.cmp(&other.idx) }
331 } else {
332 let mut n1 = NodePtr::Leaf(self.node);
334 let mut n2 = NodePtr::Leaf(other.node);
335 loop {
336 let p1 = n1.get_parent().unwrap_internal();
338 let p2 = n2.get_parent().unwrap_internal();
339
340 if p1 == p2 {
341 let node = p1.as_ref();
342 let idx1 = node.find_child(n1).unwrap();
343 let idx2 = node.find_child(n2).unwrap();
344 return idx1.cmp(&idx2);
345 }
346
347 n1 = NodePtr::Internal(p1);
349 n2 = NodePtr::Internal(p2);
350 }
351 }
352 }
353}
354
355impl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> PartialEq for UnsafeCursor<E, I, IE, LE> {
357 fn eq(&self, other: &Self) -> bool {
358 self.node == other.node && self.idx == other.idx && self.offset == other.offset
359 }
360}
361
362impl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> Eq for UnsafeCursor<E, I, IE, LE> {}
363
364
365impl<E: ContentTraits + Searchable, I: TreeMetrics<E>, const IE: usize, const LE: usize> UnsafeCursor<E, I, IE, LE> {
366 pub unsafe fn unsafe_get_item(&self) -> Option<E::Item> {
367 let mut cursor = self.clone();
369 if cursor.roll_to_next_entry() {
370 Some(cursor.get_raw_entry().at_offset(cursor.offset))
371 } else { None }
372 }
373}
374
375impl<E: ContentTraits + ContentLength, I: FindContent<E>, const IE: usize, const LE: usize> UnsafeCursor<E, I, IE, LE> {
396 pub unsafe fn unsafe_count_content_pos(&self) -> usize {
397 self.count_pos_raw(I::index_to_content, E::content_len, E::content_len_at_offset)
398 }
399
400 pub fn move_forward_by_content(&mut self, _amt: usize) {
403 todo!();
404 }
421
422 pub fn move_back_by_content(&mut self, _amt: usize) {
423 todo!()
424 }
425}
426
427impl<E: ContentTraits, I: FindOffset<E>, const IE: usize, const LE: usize> UnsafeCursor<E, I, IE, LE> {
428 pub unsafe fn unsafe_count_offset_pos(&self) -> usize {
429 self.count_pos_raw(I::index_to_offset, E::len, |_, off| off)
430
431 }
433}
434
435#[cfg(test)]
436mod tests {
437 use super::*;
438 use crate::testrange::TestRange;
439
440 #[test]
441 fn compare_cursors() {
442 let mut tree = ContentTreeRaw::<TestRange, RawPositionMetricsU32, DEFAULT_IE, DEFAULT_LE>::new();
443
444 let cursor = tree.unsafe_cursor_at_start();
445 assert_eq!(cursor, cursor);
446
447 tree.insert_at_start_notify(TestRange { id: 0, len: 1, is_activated: true }, null_notify);
448
449 let c1 = tree.unsafe_cursor_at_start();
450 let c2 = tree.unsafe_cursor_at_end();
451 assert_eq!(unsafe { c1.unsafe_cmp(&c2) }, Ordering::Less);
452
453 for i in 0..1000 {
455 tree.insert_at_start_notify(TestRange { id: i, len: 1, is_activated: true }, null_notify);
456 }
457
458 let c1 = tree.unsafe_cursor_at_start();
459 let c2 = tree.unsafe_cursor_at_end();
460 assert_eq!(unsafe { c1.unsafe_cmp(&c2) }, Ordering::Less);
461 }
462
463 #[test]
464 fn empty_tree_has_empty_iter() {
465 let tree = ContentTreeRaw::<TestRange, RawPositionMetricsU32, DEFAULT_IE, DEFAULT_LE>::new();
467 for _item in tree.raw_iter() {
468 panic!("Found spurious item");
469 }
470 }
471}