1use super::types::{
6 DirtyRegion, InvalidatedRange, LineDiff, ReparseRequest, SourceEdit, TextChange,
7};
8
9#[allow(missing_docs)]
11pub fn edit_distance(a: &str, b: &str) -> usize {
12 let a: Vec<char> = a.chars().collect();
13 let b: Vec<char> = b.chars().collect();
14 let m = a.len();
15 let n = b.len();
16 let mut dp = vec![vec![0usize; n + 1]; m + 1];
17 for (i, row) in dp.iter_mut().enumerate().take(m + 1) {
18 row[0] = i;
19 }
20 for (j, cell) in dp[0].iter_mut().enumerate().take(n + 1) {
21 *cell = j;
22 }
23 for i in 1..=m {
24 for j in 1..=n {
25 dp[i][j] = if a[i - 1] == b[j - 1] {
26 dp[i - 1][j - 1]
27 } else {
28 1 + dp[i - 1][j - 1].min(dp[i - 1][j]).min(dp[i][j - 1])
29 };
30 }
31 }
32 dp[m][n]
33}
34#[allow(missing_docs)]
36pub fn diff_lines(old: &str, new: &str) -> Vec<TextChange> {
37 let old_lines: Vec<&str> = old.lines().collect();
38 let new_lines: Vec<&str> = new.lines().collect();
39 let mut changes = Vec::new();
40 let min_len = old_lines.len().min(new_lines.len());
41 let first_diff = (0..min_len).find(|&i| old_lines[i] != new_lines[i]);
42 if let Some(idx) = first_diff {
43 let old_start: usize = old_lines[..idx].iter().map(|l| l.len() + 1).sum();
44 let old_end: usize = old_lines[..old_lines.len()]
45 .iter()
46 .map(|l| l.len() + 1)
47 .sum();
48 let new_text: String = new_lines[idx..].join("\n");
49 changes.push(TextChange::replacement(old_start, old_end, new_text));
50 } else if old_lines.len() > new_lines.len() {
51 let del_start: usize = old_lines[..new_lines.len()]
52 .iter()
53 .map(|l| l.len() + 1)
54 .sum();
55 let del_end = old.len();
56 changes.push(TextChange::deletion(del_start, del_end));
57 } else if new_lines.len() > old_lines.len() {
58 let ins_at = old.len();
59 let extra: String = "\n".to_string() + &new_lines[old_lines.len()..].join("\n");
60 changes.push(TextChange::insertion(ins_at, extra));
61 }
62 changes
63}
64#[cfg(test)]
65mod tests {
66 use super::*;
67 use crate::incremental::*;
68 #[test]
69 fn test_text_change_apply_insertion() {
70 let change = TextChange::insertion(5, " world");
71 let result = change.apply("hello");
72 assert_eq!(result, "hello world");
73 }
74 #[test]
75 fn test_text_change_apply_deletion() {
76 let change = TextChange::deletion(5, 11);
77 let result = change.apply("hello world");
78 assert_eq!(result, "hello");
79 }
80 #[test]
81 fn test_text_change_apply_replacement() {
82 let change = TextChange::replacement(6, 11, "Rust");
83 let result = change.apply("hello world");
84 assert_eq!(result, "hello Rust");
85 }
86 #[test]
87 fn test_text_change_delta() {
88 let ins = TextChange::insertion(0, "abc");
89 assert_eq!(ins.delta(), 3);
90 let del = TextChange::deletion(0, 5);
91 assert_eq!(del.delta(), -5);
92 let rep = TextChange::replacement(0, 5, "hi");
93 assert_eq!(rep.delta(), -3);
94 }
95 #[test]
96 fn test_text_change_is_insertion() {
97 let ins = TextChange::insertion(0, "x");
98 assert!(ins.is_insertion());
99 assert!(!ins.is_deletion());
100 assert!(!ins.is_replacement());
101 }
102 #[test]
103 fn test_text_change_is_deletion() {
104 let del = TextChange::deletion(0, 3);
105 assert!(del.is_deletion());
106 }
107 #[test]
108 fn test_text_change_is_replacement() {
109 let rep = TextChange::replacement(0, 5, "hi");
110 assert!(rep.is_replacement());
111 }
112 #[test]
113 fn test_incremental_parser_new() {
114 let src = "def foo : Nat := 0\ntheorem bar : True := trivial\n";
115 let parser = IncrementalParser::new(src);
116 assert_eq!(parser.version(), 0);
117 assert!(parser.cache_size() > 0);
118 assert_eq!(parser.dirty_count(), 0);
119 }
120 #[test]
121 fn test_apply_change_marks_dirty() {
122 let src = "def foo : Nat := 0\n";
123 let mut parser = IncrementalParser::new(src);
124 let initial_version = parser.version();
125 parser.apply_change(TextChange::replacement(4, 7, "bar"));
126 assert_eq!(parser.version(), initial_version + 1);
127 assert!(parser.source().contains("bar"));
128 }
129 #[test]
130 fn test_split_declarations() {
131 let src = "def foo : Nat := 0\ntheorem bar : True := trivial\naxiom baz : False\n";
132 let decls = IncrementalParser::split_declarations(src);
133 assert_eq!(decls.len(), 3);
134 assert!(decls[0].1.starts_with("def "));
135 assert!(decls[1].1.starts_with("theorem "));
136 assert!(decls[2].1.starts_with("axiom "));
137 }
138 #[test]
139 fn test_versioned_source_new() {
140 let vs = VersionedSource::new("file:///foo.oxilean", "def x := 1");
141 assert_eq!(vs.uri, "file:///foo.oxilean");
142 assert_eq!(vs.version, 0);
143 assert_eq!(vs.content, "def x := 1");
144 assert!(!vs.is_empty());
145 assert_eq!(vs.len(), 10);
146 }
147 #[test]
148 fn test_offset_to_position() {
149 let vs = VersionedSource::new("u", "hello\nworld\n");
150 assert_eq!(vs.offset_to_position(0), (0, 0));
151 assert_eq!(vs.offset_to_position(5), (0, 5));
152 assert_eq!(vs.offset_to_position(6), (1, 0));
153 assert_eq!(vs.offset_to_position(11), (1, 5));
154 }
155 #[test]
156 fn test_position_to_offset() {
157 let vs = VersionedSource::new("u", "hello\nworld\n");
158 assert_eq!(vs.position_to_offset(0, 0), Some(0));
159 assert_eq!(vs.position_to_offset(0, 5), Some(5));
160 assert_eq!(vs.position_to_offset(1, 0), Some(6));
161 assert_eq!(vs.position_to_offset(1, 5), Some(11));
162 assert_eq!(vs.position_to_offset(99, 0), None);
163 }
164 #[test]
165 fn test_dependency_graph_add_and_dependents() {
166 let mut g = DependencyGraph::new();
167 g.add_edge("bar", "foo");
168 g.add_edge("baz", "bar");
169 let deps = g.dependents_of("foo");
170 assert!(deps.contains(&"bar".to_string()));
171 assert!(deps.contains(&"baz".to_string()));
172 }
173 #[test]
174 fn test_dependency_graph_direct_dependencies() {
175 let mut g = DependencyGraph::new();
176 g.add_edge("bar", "foo");
177 g.add_edge("bar", "qux");
178 let direct = g.direct_dependencies("bar");
179 assert!(direct.contains(&"foo".to_string()));
180 assert!(direct.contains(&"qux".to_string()));
181 }
182 #[test]
183 fn test_dependency_graph_remove_node() {
184 let mut g = DependencyGraph::new();
185 g.add_edge("bar", "foo");
186 g.remove_node("foo");
187 assert!(g.direct_dependencies("bar").is_empty());
188 }
189 #[test]
190 fn test_edit_distance_same() {
191 assert_eq!(edit_distance("hello", "hello"), 0);
192 }
193 #[test]
194 fn test_edit_distance_empty() {
195 assert_eq!(edit_distance("", "abc"), 3);
196 assert_eq!(edit_distance("abc", ""), 3);
197 }
198 #[test]
199 fn test_edit_distance_diff() {
200 assert_eq!(edit_distance("kitten", "sitting"), 3);
201 }
202 #[test]
203 fn test_diff_lines_insertion() {
204 let old = "a\n";
205 let new = "a\nb\n";
206 let changes = diff_lines(old, new);
207 assert!(!changes.is_empty());
208 }
209 #[test]
210 fn test_diff_lines_no_change() {
211 let src = "def foo := 0\n";
212 let changes = diff_lines(src, src);
213 assert!(changes.is_empty());
214 }
215 #[test]
216 fn test_token_fingerprint_same() {
217 let fp1 = TokenFingerprint::compute(&["def", "foo", ":=", "0"]);
218 let fp2 = TokenFingerprint::compute(&["def", "foo", ":=", "0"]);
219 assert_eq!(fp1, fp2);
220 }
221 #[test]
222 fn test_token_fingerprint_diff() {
223 let fp1 = TokenFingerprint::compute(&["def", "foo"]);
224 let fp2 = TokenFingerprint::compute(&["def", "bar"]);
225 assert_ne!(fp1, fp2);
226 }
227 #[test]
228 fn test_green_node_leaf() {
229 let node = GreenNode::leaf(SyntaxKind::Ident, "foo");
230 assert!(node.is_leaf());
231 assert_eq!(node.width, 3);
232 assert_eq!(node.to_text(), "foo");
233 }
234 #[test]
235 fn test_green_node_interior() {
236 let a = GreenNode::leaf(SyntaxKind::Token("def".to_string()), "def");
237 let b = GreenNode::leaf(SyntaxKind::Ident, "foo");
238 let node = GreenNode::interior(SyntaxKind::Def, vec![a, b]);
239 assert!(!node.is_leaf());
240 assert_eq!(node.width, 6);
241 assert_eq!(node.to_text(), "deffoo");
242 }
243 #[test]
244 fn test_red_node_range() {
245 let green = GreenNode::leaf(SyntaxKind::Ident, "hello");
246 let red = RedNode::new(&green, 10);
247 assert_eq!(red.range(), 10..15);
248 }
249 #[test]
250 fn test_red_node_children() {
251 let a = GreenNode::leaf(SyntaxKind::Ident, "ab");
252 let b = GreenNode::leaf(SyntaxKind::Ident, "cd");
253 let parent = GreenNode::interior(SyntaxKind::Root, vec![a, b]);
254 let red = RedNode::new(&parent, 0);
255 let children = red.children();
256 assert_eq!(children.len(), 2);
257 assert_eq!(children[0].range(), 0..2);
258 assert_eq!(children[1].range(), 2..4);
259 }
260 #[test]
261 fn test_persistent_vec_push() {
262 let v0: PersistentVec<i32> = PersistentVec::new();
263 let v1 = v0.push(1);
264 let v2 = v1.push(2);
265 assert_eq!(v0.len(), 0);
266 assert_eq!(v1.len(), 1);
267 assert_eq!(v2.len(), 2);
268 assert_eq!(v2.get(0), Some(&1));
269 assert_eq!(v2.get(1), Some(&2));
270 }
271 #[test]
272 fn test_persistent_vec_set() {
273 let v: PersistentVec<i32> = PersistentVec::new().push(10).push(20);
274 let v2 = v.set(0, 99).expect("test operation should succeed");
275 assert_eq!(v.get(0), Some(&10));
276 assert_eq!(v2.get(0), Some(&99));
277 }
278 #[test]
279 fn test_transaction_commit() {
280 let src = "hello world";
281 let mut tx = Transaction::begin(src);
282 tx.add(TextChange::replacement(6, 11, "Rust"));
283 let result = tx.commit(src);
284 assert_eq!(result, "hello Rust");
285 }
286 #[test]
287 fn test_transaction_rollback() {
288 let src = "hello";
289 let mut tx = Transaction::begin(src);
290 tx.add(TextChange::insertion(5, " world"));
291 let original = tx.rollback().expect("test operation should succeed");
292 assert_eq!(original, "hello");
293 }
294 #[test]
295 fn test_undo_redo_stack() {
296 let mut stack = UndoRedoStack::new("v0");
297 stack.push("v1");
298 stack.push("v2");
299 assert_eq!(stack.current(), "v2");
300 assert!(stack.can_undo());
301 stack.undo();
302 assert_eq!(stack.current(), "v1");
303 assert!(stack.can_redo());
304 stack.redo();
305 assert_eq!(stack.current(), "v2");
306 }
307 #[test]
308 fn test_undo_redo_clears_redo_on_push() {
309 let mut stack = UndoRedoStack::new("v0");
310 stack.push("v1");
311 stack.undo();
312 assert!(stack.can_redo());
313 stack.push("v2");
314 assert!(!stack.can_redo());
315 }
316 #[test]
317 fn test_incremental_lexer_basic() {
318 let mut lexer = IncrementalLexer::new();
319 let tokens = lexer.lex("def foo := 0\ndef bar := 1", &[0, 1]);
320 assert!(tokens.contains(&"def".to_string()));
321 assert!(tokens.contains(&"foo".to_string()));
322 }
323 #[test]
324 fn test_incremental_lexer_caches() {
325 let mut lexer = IncrementalLexer::new();
326 let t1 = lexer.lex("def foo := 0\ndef bar := 1", &[0, 1]);
327 let t2 = lexer.lex("def foo := 0\ndef bar := 1", &[]);
328 assert_eq!(t1, t2);
329 }
330 #[test]
331 fn test_invalidate_by_name() {
332 let src = "def foo : Nat := 0\ndef bar : Nat := 1\n";
333 let mut parser = IncrementalParser::new(src);
334 parser.invalidate_by_name("foo");
335 let invalid: Vec<_> = parser.invalid_declarations();
336 assert!(invalid.iter().any(|d| d.name.as_deref() == Some("foo")));
337 }
338}
339#[allow(dead_code)]
341#[allow(missing_docs)]
342pub fn apply_edits(source: &str, edits: &[SourceEdit]) -> String {
343 let mut result = source.to_string();
344 let mut sorted = edits.to_vec();
345 sorted.sort_by(|a, b| b.start.cmp(&a.start));
346 for edit in sorted {
347 let end = edit.end.min(result.len());
348 let start = edit.start.min(end);
349 result.replace_range(start..end, &edit.new_text);
350 }
351 result
352}
353#[allow(dead_code)]
355#[allow(missing_docs)]
356pub fn compute_invalidated_range(edit: &SourceEdit, context_bytes: usize) -> InvalidatedRange {
357 let start = edit.start.saturating_sub(context_bytes);
358 let end = edit.end + edit.new_text.len() + context_bytes;
359 InvalidatedRange::new(start, end)
360}
361#[allow(dead_code)]
363#[allow(missing_docs)]
364pub fn compute_dirty_region(source: &str, edit: &SourceEdit) -> DirtyRegion {
365 let start_line = source[..edit.start.min(source.len())]
366 .lines()
367 .count()
368 .saturating_sub(1);
369 let end_byte = (edit.end + edit.new_text.len()).min(source.len());
370 let end_line = source[..end_byte].lines().count().saturating_sub(1);
371 DirtyRegion::new(start_line, end_line, edit.start, end_byte)
372}
373#[cfg(test)]
374mod extended_incremental_tests {
375 use super::*;
376 use crate::incremental::*;
377 #[test]
378 fn test_source_edit_kinds() {
379 let ins = SourceEdit::insert(5, "hello");
380 assert!(ins.is_insert());
381 let del = SourceEdit::delete(2, 8);
382 assert!(del.is_delete());
383 let rep = SourceEdit::replace(0, 3, "xyz");
384 assert!(rep.is_replace());
385 }
386 #[test]
387 fn test_source_edit_delta() {
388 let rep = SourceEdit::replace(0, 5, "abc");
389 assert_eq!(rep.delta(), -2);
390 let ins = SourceEdit::insert(3, "hello");
391 assert_eq!(ins.delta(), 5);
392 }
393 #[test]
394 fn test_apply_edits() {
395 let source = "hello world";
396 let edit = SourceEdit::replace(6, 11, "Rust");
397 let result = apply_edits(source, &[edit]);
398 assert_eq!(result, "hello Rust");
399 }
400 #[test]
401 fn test_apply_edits_insert() {
402 let source = "helo";
403 let edit = SourceEdit::insert(3, "l");
404 let result = apply_edits(source, &[edit]);
405 assert_eq!(result, "hello");
406 }
407 #[test]
408 fn test_edit_history_undo_redo() {
409 let mut hist = EditHistory::new(10);
410 let e = SourceEdit::insert(0, "x");
411 hist.push(e.clone());
412 assert_eq!(hist.history_len(), 1);
413 let undone = hist.undo();
414 assert!(undone.is_some());
415 assert_eq!(hist.history_len(), 0);
416 assert_eq!(hist.undo_count(), 1);
417 let redone = hist.redo();
418 assert!(redone.is_some());
419 assert_eq!(hist.history_len(), 1);
420 }
421 #[test]
422 fn test_invalidated_range() {
423 let r = InvalidatedRange::new(10, 20);
424 assert!(r.contains(15));
425 assert!(!r.contains(5));
426 assert_eq!(r.len(), 10);
427 let r2 = InvalidatedRange::new(18, 30);
428 assert!(r.overlaps(&r2));
429 let merged = r.merge(&r2);
430 assert_eq!(merged.start, 10);
431 assert_eq!(merged.end, 30);
432 }
433 #[test]
434 fn test_token_validity() {
435 let mut tv = TokenValidity::new();
436 tv.mark_valid(0, 10);
437 tv.mark_valid(20, 30);
438 assert!(tv.is_valid_at(5));
439 assert!(!tv.is_valid_at(15));
440 tv.invalidate(&InvalidatedRange::new(0, 10));
441 assert!(!tv.is_valid_at(5));
442 assert_eq!(tv.valid_count(), 1);
443 }
444 #[test]
445 fn test_parse_version() {
446 let mut v = ParseVersion::new();
447 v.increment();
448 v.increment();
449 v.mark_full_parse();
450 v.increment();
451 assert_eq!(v.edits_since_full_parse(), 1);
452 assert!(!v.needs_full_reparse(5));
453 }
454 #[test]
455 fn test_node_range_cache() {
456 let mut cache = NodeRangeCache::new();
457 cache.insert(0, 10, 1);
458 cache.insert(10, 20, 2);
459 assert_eq!(cache.lookup(0, 10), Some(1));
460 cache.invalidate_range(&InvalidatedRange::new(5, 15));
461 assert_eq!(cache.lookup(0, 10), None);
462 assert_eq!(cache.lookup(10, 20), None);
463 assert_eq!(cache.size(), 0);
464 }
465 #[test]
466 fn test_incremental_lexer() {
467 let mut lex = IncrementalLexerExt::new("hello world");
468 assert_eq!(lex.source(), "hello world");
469 lex.apply_edit(SourceEdit::replace(6, 11, "Rust"));
470 assert_eq!(lex.source(), "hello Rust");
471 assert_eq!(lex.version(), 1);
472 }
473 #[test]
474 fn test_simple_rope() {
475 let mut rope = SimpleRope::new("hello");
476 rope.insert(5, " world");
477 assert_eq!(rope.to_string(), "hello world");
478 rope.delete(5, 11);
479 assert_eq!(rope.to_string(), "hello");
480 assert_eq!(rope.len(), 5);
481 }
482 #[test]
483 fn test_decl_dependency_tracker() {
484 let mut tracker = DeclDependencyTracker::new();
485 tracker.register_decl("foo", 0, 50);
486 tracker.register_decl("bar", 50, 100);
487 let edit = SourceEdit::replace(40, 60, "x");
488 let affected = tracker.affected_by_edit(&edit);
489 assert!(affected.contains(&"foo"));
490 assert!(affected.contains(&"bar"));
491 assert_eq!(tracker.decl_count(), 2);
492 }
493 #[test]
494 fn test_incr_parse_stats() {
495 let mut stats = IncrParseStats::new();
496 stats.total_edits = 10;
497 stats.tokens_reused = 80;
498 stats.tokens_relexed = 20;
499 stats.nodes_reused = 70;
500 stats.nodes_rebuilt = 30;
501 assert!((stats.reuse_fraction_tokens() - 0.8).abs() < 1e-9);
502 assert!((stats.reuse_fraction_nodes() - 0.7).abs() < 1e-9);
503 let s = stats.summary();
504 assert!(s.contains("token_reuse=80.0%"));
505 }
506 #[test]
507 fn test_incremental_parse_cache() {
508 let mut cache = IncrementalParseCache::new(10);
509 let entry = IncrParseEntry {
510 region_hash: 42,
511 result_repr: "expr".into(),
512 version: 1,
513 };
514 cache.store(entry);
515 assert!(cache.lookup(42).is_some());
516 assert!(cache.lookup(99).is_none());
517 assert_eq!(cache.stats(), (1, 1));
518 }
519 #[test]
520 fn test_compute_dirty_region() {
521 let source = "def foo := 1\ndef bar := 2\n";
522 let edit = SourceEdit::replace(4, 7, "baz");
523 let dirty = compute_dirty_region(source, &edit);
524 assert!(dirty.byte_count() > 0);
525 assert!(dirty.is_single_line());
526 }
527}
528#[allow(dead_code)]
530#[allow(missing_docs)]
531pub fn line_diff_source(old: &str, new: &str) -> LineDiff {
532 let old_lines: Vec<_> = old.lines().collect();
533 let new_lines: Vec<_> = new.lines().collect();
534 let mut diff = LineDiff::new();
535 let max = old_lines.len().max(new_lines.len());
536 for i in 0..max {
537 match (old_lines.get(i), new_lines.get(i)) {
538 (Some(o), Some(n)) if o != n => diff.add_change(i, *o, *n),
539 (Some(o), None) => diff.add_change(i, *o, ""),
540 (None, Some(n)) => diff.add_change(i, "", *n),
541 _ => {}
542 }
543 }
544 diff
545}
546#[allow(dead_code)]
548#[allow(missing_docs)]
549pub fn merge_reparse_requests(requests: &mut Vec<ReparseRequest>) {
550 requests.sort_by_key(|r| r.start_byte);
551 let mut merged: Vec<ReparseRequest> = Vec::new();
552 for req in requests.drain(..) {
553 if let Some(last) = merged.last_mut() {
554 if req.start_byte <= last.end_byte {
555 last.end_byte = last.end_byte.max(req.end_byte);
556 if req.priority > last.priority {
557 last.priority = req.priority;
558 }
559 continue;
560 }
561 }
562 merged.push(req);
563 }
564 *requests = merged;
565}
566#[cfg(test)]
567mod extended_incremental_tests_2 {
568 use super::*;
569 use crate::incremental::*;
570 #[test]
571 fn test_line_diff_source() {
572 let old = "a\nb\nc";
573 let new = "a\nX\nc";
574 let diff = line_diff_source(old, new);
575 assert_eq!(diff.count(), 1);
576 assert_eq!(diff.affected_lines(), vec![1]);
577 }
578 #[test]
579 fn test_reparse_request() {
580 let req = ReparseRequest::new(0, 100, 1).with_priority(ReparsePriority::High);
581 assert_eq!(req.byte_span(), 100);
582 assert_eq!(req.priority, ReparsePriority::High);
583 }
584 #[test]
585 fn test_reparse_queue() {
586 let mut q = ReparseQueue::new();
587 q.push(ReparseRequest::new(0, 50, 1).with_priority(ReparsePriority::Low));
588 q.push(ReparseRequest::new(0, 50, 1).with_priority(ReparsePriority::Urgent));
589 assert!(q.has_urgent());
590 let top = q.pop().expect("collection should not be empty");
591 assert_eq!(top.priority, ReparsePriority::Urgent);
592 }
593 #[test]
594 fn test_merge_reparse_requests() {
595 let mut reqs = vec![
596 ReparseRequest::new(0, 30, 1),
597 ReparseRequest::new(20, 60, 1),
598 ];
599 merge_reparse_requests(&mut reqs);
600 assert_eq!(reqs.len(), 1);
601 assert_eq!(reqs[0].end_byte, 60);
602 }
603 #[test]
604 fn test_offset_to_token_map() {
605 let mut map = OffsetToTokenMap::new();
606 map.insert(0, 1);
607 map.insert(10, 2);
608 map.insert(20, 3);
609 assert_eq!(map.token_at(5), Some(1));
610 assert_eq!(map.token_at(15), Some(2));
611 map.invalidate_from(10);
612 assert_eq!(map.count(), 1);
613 }
614 #[test]
615 fn test_offset_map_shift() {
616 let mut map = OffsetToTokenMap::new();
617 map.insert(10, 1);
618 map.insert(20, 2);
619 map.shift(10, 5);
620 assert_eq!(map.token_at(15), Some(1));
621 assert_eq!(map.token_at(25), Some(2));
622 }
623 #[test]
624 fn test_incremental_parse_result() {
625 let mut r = IncrementalParseResult::new(true, 80, 20, 500);
626 assert!((r.reuse_ratio() - 0.8).abs() < 1e-9);
627 assert!(!r.has_errors());
628 r.add_error("parse error");
629 assert!(r.has_errors());
630 }
631 #[test]
632 fn test_change_detector() {
633 let source = "hello world";
634 let mut det = ChangeDetector::new();
635 det.record(source, 0, 5);
636 assert!(!det.has_changed(source, 0, 5));
637 assert!(det.has_changed("HELLO world", 0, 5));
638 }
639 #[test]
640 fn test_incremental_checksum() {
641 let source = "abcdef";
642 let cs = IncrementalChecksum::build(source);
643 let total = cs.total();
644 assert!(total > 0);
645 let r1 = cs.range_sum(0, 3);
646 let r2 = cs.range_sum(3, 6);
647 assert_eq!(r1 + r2, total);
648 }
649 #[test]
650 fn test_atomic_version() {
651 let av = AtomicVersion::new();
652 let v1 = av.increment();
653 let v2 = av.increment();
654 assert_eq!(v1, 1);
655 assert_eq!(v2, 2);
656 av.reset();
657 assert_eq!(av.load(), 0);
658 }
659 #[test]
660 fn test_snapshot_manager() {
661 let mut mgr = SnapshotManager::new(3);
662 mgr.save(ParseSnapshot::capture("src1", 1, 10, 3));
663 mgr.save(ParseSnapshot::capture("src2", 2, 10, 1));
664 mgr.save(ParseSnapshot::capture("src3", 3, 10, 5));
665 assert_eq!(mgr.count(), 3);
666 let best = mgr.best().expect("test operation should succeed");
667 assert_eq!(best.error_count, 1);
668 let latest = mgr.latest().expect("test operation should succeed");
669 assert_eq!(latest.version, 3);
670 }
671 #[test]
672 fn test_parse_snapshot_cleaner_than() {
673 let a = ParseSnapshot::capture("a", 1, 5, 2);
674 let b = ParseSnapshot::capture("b", 2, 5, 5);
675 assert!(a.is_cleaner_than(&b));
676 assert!(!b.is_cleaner_than(&a));
677 }
678 #[test]
679 fn test_line_diff_default() {
680 let diff = LineDiff::default();
681 assert_eq!(diff.count(), 0);
682 }
683}
684#[cfg(test)]
685mod extended_incremental_tests_3 {
686 use super::*;
687 use crate::incremental::*;
688 #[test]
689 fn test_incr_scope_stack() {
690 let mut stack = IncrScopeStack::new();
691 stack.push(IncrScopeEntry::new(0, ScopeKind2::Paren, 1));
692 stack.push(IncrScopeEntry::new(5, ScopeKind2::Bracket, 2));
693 assert_eq!(stack.depth(), 2);
694 assert_eq!(stack.current_scope(), Some(ScopeKind2::Bracket));
695 let popped = stack.pop().expect("collection should not be empty");
696 assert_eq!(popped.kind, ScopeKind2::Bracket);
697 }
698 #[test]
699 fn test_incremental_error_map() {
700 let mut map = IncrementalErrorMap::new();
701 map.add_error(10, "unexpected token");
702 map.add_error(20, "expected ')'");
703 map.add_error(15, "ambiguous");
704 assert_eq!(map.total_error_count(), 3);
705 let errs = map.errors_in_range(10, 20);
706 assert_eq!(errs.len(), 2);
707 map.clear_range(10, 20);
708 assert_eq!(map.total_error_count(), 1);
709 }
710 #[test]
711 fn test_edit_buffer() {
712 let mut buf = EditBuffer::new(3);
713 assert!(buf.add(SourceEdit::insert(0, "x")));
714 assert!(buf.add(SourceEdit::delete(5, 10)));
715 assert_eq!(buf.pending_count(), 2);
716 let flushed = buf.flush();
717 assert_eq!(flushed.len(), 2);
718 assert!(buf.is_empty());
719 }
720 #[test]
721 fn test_edit_buffer_overflow() {
722 let mut buf = EditBuffer::new(2);
723 buf.add(SourceEdit::insert(0, "a"));
724 buf.add(SourceEdit::insert(1, "b"));
725 assert!(!buf.add(SourceEdit::insert(2, "c")));
726 }
727 #[test]
728 fn test_token_reachability() {
729 let mut r = TokenReachability::new();
730 r.mark_reachable(10);
731 r.mark_reachable(20);
732 r.mark_reachable(30);
733 assert!(r.is_reachable(10));
734 assert!(!r.is_reachable(15));
735 assert_eq!(r.reachable_count(), 3);
736 assert!((r.coverage_fraction(10) - 0.3).abs() < 1e-9);
737 }
738 #[test]
739 fn test_fiber_pool() {
740 let mut pool = FiberPool::new();
741 let id1 = pool.spawn(0, 0, "start");
742 let id2 = pool.spawn(10, 1, "mid");
743 assert_eq!(pool.active_count(), 2);
744 let f = pool.get(id1).expect("key should exist");
745 assert!(f.is_at_root());
746 pool.remove(id2);
747 assert_eq!(pool.active_count(), 1);
748 }
749 #[test]
750 fn test_incremental_session() {
751 let mut sess = IncrementalSession::new("hello world");
752 assert_eq!(sess.current_version(), 0);
753 sess.apply_edit(SourceEdit::replace(6, 11, "Rust"));
754 assert_eq!(sess.source_text(), "hello Rust");
755 assert_eq!(sess.current_version(), 1);
756 assert!(!sess.has_errors());
757 }
758 #[test]
759 fn test_parse_fiber() {
760 let f = ParseFiber::new(1, 0, 0, "state");
761 assert!(f.is_at_root());
762 let f2 = ParseFiber::new(2, 5, 3, "deeper");
763 assert!(!f2.is_at_root());
764 }
765}