1use std::collections::HashMap;
9use std::path::PathBuf;
10
11use compact_str::CompactString;
12use smallvec::SmallVec;
13
14use super::types::{Edge, RefKind, Reference, Scope, Symbol, SymbolKind, Token};
15
16#[derive(Debug, Clone, Default)]
25pub struct StringTable {
26 data: Vec<u8>,
28 lookup: HashMap<CompactString, u32>,
30}
31
32impl StringTable {
33 pub fn new() -> Self {
35 Self::default()
36 }
37
38 pub fn with_capacity(capacity: usize) -> Self {
40 Self {
41 data: Vec::with_capacity(capacity),
42 lookup: HashMap::with_capacity(capacity / 16), }
44 }
45
46 pub fn intern(&mut self, s: &str) -> u32 {
50 let compact = CompactString::new(s);
52 if let Some(&offset) = self.lookup.get(&compact) {
53 return offset;
54 }
55
56 let offset = self.data.len() as u32;
58 self.data.extend_from_slice(s.as_bytes());
59 self.data.push(0); self.lookup.insert(compact, offset);
61 offset
62 }
63
64 pub fn get(&self, offset: u32) -> Option<&str> {
69 let start = offset as usize;
70 if start >= self.data.len() {
71 return None;
72 }
73
74 let end = self.data[start..]
76 .iter()
77 .position(|&b| b == 0)
78 .map(|pos| start + pos)?;
79
80 std::str::from_utf8(&self.data[start..end]).ok()
81 }
82
83 pub fn as_bytes(&self) -> &[u8] {
85 &self.data
86 }
87
88 pub fn from_bytes(data: Vec<u8>) -> Self {
90 let mut lookup = HashMap::new();
92 let mut offset = 0usize;
93 while offset < data.len() {
94 let end = data[offset..]
96 .iter()
97 .position(|&b| b == 0)
98 .map(|pos| offset + pos);
99
100 if let Some(end) = end {
101 if let Ok(s) = std::str::from_utf8(&data[offset..end]) {
102 lookup.insert(CompactString::new(s), offset as u32);
103 }
104 offset = end + 1;
105 } else {
106 break;
107 }
108 }
109
110 Self { data, lookup }
111 }
112
113 pub fn len(&self) -> usize {
115 self.lookup.len()
116 }
117
118 pub fn is_empty(&self) -> bool {
120 self.lookup.is_empty()
121 }
122
123 pub fn byte_size(&self) -> usize {
125 self.data.len()
126 }
127}
128
129#[derive(Debug)]
141pub struct SemanticIndex {
142 pub symbols: Vec<Symbol>,
147
148 pub tokens: Vec<Token>,
150
151 pub references: Vec<Reference>,
153
154 pub scopes: Vec<Scope>,
156
157 pub edges: Vec<Edge>,
159
160 pub symbol_by_name: HashMap<CompactString, SmallVec<[u32; 4]>>,
166
167 pub token_by_name: HashMap<CompactString, Vec<u32>>,
169
170 pub incoming_edges: Vec<SmallVec<[u32; 8]>>,
173
174 pub outgoing_edges: Vec<SmallVec<[u32; 8]>>,
177
178 pub refs_to_symbol: Vec<Vec<u32>>,
180
181 pub files: Vec<PathBuf>,
186
187 pub strings: StringTable,
189
190 pub entry_points: Vec<u32>,
192}
193
194impl SemanticIndex {
195 pub fn new() -> Self {
197 Self {
198 symbols: Vec::new(),
199 tokens: Vec::new(),
200 references: Vec::new(),
201 scopes: Vec::new(),
202 edges: Vec::new(),
203 symbol_by_name: HashMap::new(),
204 token_by_name: HashMap::new(),
205 incoming_edges: Vec::new(),
206 outgoing_edges: Vec::new(),
207 refs_to_symbol: Vec::new(),
208 files: Vec::new(),
209 strings: StringTable::new(),
210 entry_points: Vec::new(),
211 }
212 }
213
214 pub fn with_capacity(
216 symbols: usize,
217 tokens: usize,
218 references: usize,
219 scopes: usize,
220 edges: usize,
221 files: usize,
222 ) -> Self {
223 Self {
224 symbols: Vec::with_capacity(symbols),
225 tokens: Vec::with_capacity(tokens),
226 references: Vec::with_capacity(references),
227 scopes: Vec::with_capacity(scopes),
228 edges: Vec::with_capacity(edges),
229 symbol_by_name: HashMap::with_capacity(symbols),
230 token_by_name: HashMap::with_capacity(tokens / 4), incoming_edges: Vec::with_capacity(symbols),
232 outgoing_edges: Vec::with_capacity(symbols),
233 refs_to_symbol: Vec::with_capacity(symbols),
234 files: Vec::with_capacity(files),
235 strings: StringTable::with_capacity(symbols * 20), entry_points: Vec::with_capacity(files), }
238 }
239
240 pub fn add_file(&mut self, path: PathBuf) -> u16 {
246 let id = self.files.len() as u16;
247 self.files.push(path);
248 id
249 }
250
251 pub fn add_symbol(&mut self, symbol: Symbol, name: &str) {
253 let id = symbol.id as usize;
254
255 if id >= self.symbols.len() {
257 self.symbols.resize(
258 id + 1,
259 Symbol::new(0, 0, 0, SymbolKind::Unknown, Default::default(), 0, 0),
260 );
261 }
262 self.symbols[id] = symbol;
263
264 let compact_name = CompactString::new(name);
266 self.symbol_by_name
267 .entry(compact_name)
268 .or_insert_with(SmallVec::new)
269 .push(symbol.id);
270
271 if symbol.is_entry_point() {
273 self.entry_points.push(symbol.id);
274 }
275
276 while self.incoming_edges.len() <= id {
278 self.incoming_edges.push(SmallVec::new());
279 }
280 while self.outgoing_edges.len() <= id {
281 self.outgoing_edges.push(SmallVec::new());
282 }
283 while self.refs_to_symbol.len() <= id {
284 self.refs_to_symbol.push(Vec::new());
285 }
286 }
287
288 pub fn add_token(&mut self, token: Token, name: &str) {
290 let id = token.id as usize;
291
292 if id >= self.tokens.len() {
294 self.tokens.resize(
295 id + 1,
296 Token::new(0, 0, 0, 0, 0, super::types::TokenKind::Unknown, 0),
297 );
298 }
299 self.tokens[id] = token;
300
301 let compact_name = CompactString::new(name);
303 self.token_by_name
304 .entry(compact_name)
305 .or_insert_with(Vec::new)
306 .push(token.id);
307 }
308
309 pub fn add_reference(&mut self, reference: Reference) {
311 self.references.push(reference);
312
313 let sym_id = reference.symbol_id as usize;
315 if sym_id < self.refs_to_symbol.len() {
316 self.refs_to_symbol[sym_id].push(reference.token_id);
317 }
318 }
319
320 pub fn add_scope(&mut self, scope: Scope) {
322 let id = scope.id as usize;
323 if id >= self.scopes.len() {
324 self.scopes.resize(id + 1, Scope::file_scope(0, 0, 0));
325 }
326 self.scopes[id] = scope;
327 }
328
329 pub fn add_edge(&mut self, edge: Edge) {
331 self.edges.push(edge);
332
333 let from = edge.from_symbol as usize;
335 let to = edge.to_symbol as usize;
336
337 if from < self.outgoing_edges.len() {
338 self.outgoing_edges[from].push(edge.to_symbol);
339 }
340 if to < self.incoming_edges.len() {
341 self.incoming_edges[to].push(edge.from_symbol);
342 }
343 }
344
345 pub fn rebuild_lookups(&mut self) {
349 self.symbol_by_name.clear();
351 self.token_by_name.clear();
352 self.incoming_edges.clear();
353 self.outgoing_edges.clear();
354 self.refs_to_symbol.clear();
355 self.entry_points.clear();
356
357 self.incoming_edges
359 .resize(self.symbols.len(), SmallVec::new());
360 self.outgoing_edges
361 .resize(self.symbols.len(), SmallVec::new());
362 self.refs_to_symbol.resize(self.symbols.len(), Vec::new());
363
364 for symbol in &self.symbols {
366 if let Some(name) = self.strings.get(symbol.name_offset) {
367 self.symbol_by_name
368 .entry(CompactString::new(name))
369 .or_insert_with(SmallVec::new)
370 .push(symbol.id);
371 }
372 if symbol.is_entry_point() {
373 self.entry_points.push(symbol.id);
374 }
375 }
376
377 for token in &self.tokens {
379 if let Some(name) = self.strings.get(token.name_offset) {
380 self.token_by_name
381 .entry(CompactString::new(name))
382 .or_insert_with(Vec::new)
383 .push(token.id);
384 }
385 }
386
387 for edge in &self.edges {
389 let from = edge.from_symbol as usize;
390 let to = edge.to_symbol as usize;
391 if from < self.outgoing_edges.len() {
392 self.outgoing_edges[from].push(edge.to_symbol);
393 }
394 if to < self.incoming_edges.len() {
395 self.incoming_edges[to].push(edge.from_symbol);
396 }
397 }
398
399 for reference in &self.references {
401 let sym_id = reference.symbol_id as usize;
402 if sym_id < self.refs_to_symbol.len() {
403 self.refs_to_symbol[sym_id].push(reference.token_id);
404 }
405 }
406 }
407
408 pub fn symbols_by_name(&self, name: &str) -> Option<&SmallVec<[u32; 4]>> {
414 self.symbol_by_name.get(&CompactString::new(name))
415 }
416
417 pub fn symbols_matching(&self, pattern: &str) -> Vec<u32> {
419 let pattern_lower = pattern.to_lowercase();
420 self.symbol_by_name
421 .iter()
422 .filter(|(name, _)| name.to_lowercase().contains(&pattern_lower))
423 .flat_map(|(_, ids)| ids.iter().copied())
424 .collect()
425 }
426
427 pub fn tokens_by_name(&self, name: &str) -> Option<&Vec<u32>> {
429 self.token_by_name.get(&CompactString::new(name))
430 }
431
432 pub fn tokens_matching(&self, pattern: &str) -> Vec<u32> {
434 let pattern_lower = pattern.to_lowercase();
435 self.token_by_name
436 .iter()
437 .filter(|(name, _)| name.to_lowercase().contains(&pattern_lower))
438 .flat_map(|(_, ids)| ids.iter().copied())
439 .collect()
440 }
441
442 #[inline]
444 pub fn symbol(&self, id: u32) -> Option<&Symbol> {
445 self.symbols.get(id as usize)
446 }
447
448 #[inline]
450 pub fn token(&self, id: u32) -> Option<&Token> {
451 self.tokens.get(id as usize)
452 }
453
454 #[inline]
456 pub fn scope(&self, id: u32) -> Option<&Scope> {
457 self.scopes.get(id as usize)
458 }
459
460 pub fn symbol_name(&self, symbol: &Symbol) -> Option<&str> {
462 self.strings.get(symbol.name_offset)
463 }
464
465 pub fn token_name(&self, token: &Token) -> Option<&str> {
467 self.strings.get(token.name_offset)
468 }
469
470 pub fn file_path(&self, file_id: u16) -> Option<&PathBuf> {
472 self.files.get(file_id as usize)
473 }
474
475 pub fn callers(&self, symbol_id: u32) -> &[u32] {
477 self.incoming_edges
478 .get(symbol_id as usize)
479 .map(|v| v.as_slice())
480 .unwrap_or(&[])
481 }
482
483 pub fn callees(&self, symbol_id: u32) -> &[u32] {
485 self.outgoing_edges
486 .get(symbol_id as usize)
487 .map(|v| v.as_slice())
488 .unwrap_or(&[])
489 }
490
491 pub fn references_to(&self, symbol_id: u32) -> impl Iterator<Item = &Reference> + '_ {
493 self.refs_to_symbol
494 .get(symbol_id as usize)
495 .into_iter()
496 .flat_map(move |token_ids| {
497 let sid = symbol_id;
498 token_ids.iter().filter_map(move |&tid| {
499 self.references
500 .iter()
501 .find(|r| r.token_id == tid && r.symbol_id == sid)
502 })
503 })
504 }
505
506 pub fn references_of_kind(
508 &self,
509 symbol_id: u32,
510 kind: RefKind,
511 ) -> impl Iterator<Item = &Reference> {
512 self.references_to(symbol_id)
513 .filter(move |r| r.ref_kind() == kind)
514 }
515
516 pub fn call_references(&self, symbol_id: u32) -> impl Iterator<Item = &Reference> {
518 self.references_of_kind(symbol_id, RefKind::Call)
519 }
520
521 pub fn entry_point_symbols(&self) -> impl Iterator<Item = &Symbol> {
523 self.entry_points.iter().filter_map(|&id| self.symbol(id))
524 }
525
526 pub fn symbols_in_file(&self, file_id: u16) -> impl Iterator<Item = &Symbol> {
528 self.symbols.iter().filter(move |s| s.file_id == file_id)
529 }
530
531 pub fn tokens_in_file(&self, file_id: u16) -> impl Iterator<Item = &Token> {
533 self.tokens.iter().filter(move |t| t.file_id == file_id)
534 }
535
536 pub fn stats(&self) -> IndexStats {
542 IndexStats {
543 symbols: self.symbols.len(),
544 tokens: self.tokens.len(),
545 references: self.references.len(),
546 scopes: self.scopes.len(),
547 edges: self.edges.len(),
548 files: self.files.len(),
549 entry_points: self.entry_points.len(),
550 string_bytes: self.strings.byte_size(),
551 unique_names: self.strings.len(),
552 }
553 }
554
555 pub fn file_id_for_path(&self, path: &std::path::Path) -> Option<u16> {
561 self.files.iter().position(|p| p == path).map(|i| i as u16)
562 }
563
564 pub fn remove_file_data(&mut self, file_id: u16) -> usize {
571 let symbols_to_remove: Vec<u32> = self
573 .symbols
574 .iter()
575 .filter(|s| s.file_id == file_id)
576 .map(|s| s.id)
577 .collect();
578
579 if symbols_to_remove.is_empty() {
580 return 0;
581 }
582
583 let removed_count = symbols_to_remove.len();
584
585 let symbol_set: std::collections::HashSet<u32> =
587 symbols_to_remove.iter().copied().collect();
588
589 for symbol_id in &symbols_to_remove {
591 if let Some(symbol) = self.symbols.get(*symbol_id as usize) {
592 if let Some(name) = self.strings.get(symbol.name_offset) {
593 let compact_name = CompactString::new(name);
594 if let Some(ids) = self.symbol_by_name.get_mut(&compact_name) {
595 ids.retain(|id| *id != *symbol_id);
596 if ids.is_empty() {
597 self.symbol_by_name.remove(&compact_name);
598 }
599 }
600 }
601 }
602 }
603
604 self.entry_points.retain(|&id| !symbol_set.contains(&id));
606
607 for &symbol_id in &symbols_to_remove {
609 let idx = symbol_id as usize;
610 if idx < self.incoming_edges.len() {
611 self.incoming_edges[idx].clear();
612 }
613 if idx < self.outgoing_edges.len() {
614 self.outgoing_edges[idx].clear();
615 }
616 if idx < self.refs_to_symbol.len() {
617 self.refs_to_symbol[idx].clear();
618 }
619 }
620
621 self.edges
623 .retain(|e| !symbol_set.contains(&e.from_symbol) && !symbol_set.contains(&e.to_symbol));
624
625 for adj in &mut self.incoming_edges {
627 adj.retain(|id| !symbol_set.contains(id));
628 }
629 for adj in &mut self.outgoing_edges {
630 adj.retain(|id| !symbol_set.contains(id));
631 }
632
633 let tokens_to_remove: Vec<u32> = self
635 .tokens
636 .iter()
637 .filter(|t| t.file_id == file_id)
638 .map(|t| t.id)
639 .collect();
640
641 let token_set: std::collections::HashSet<u32> = tokens_to_remove.iter().copied().collect();
642
643 for token_id in &tokens_to_remove {
645 if let Some(token) = self.tokens.get(*token_id as usize) {
646 if let Some(name) = self.strings.get(token.name_offset) {
647 let compact_name = CompactString::new(name);
648 if let Some(ids) = self.token_by_name.get_mut(&compact_name) {
649 ids.retain(|&id| id != *token_id);
650 if ids.is_empty() {
651 self.token_by_name.remove(&compact_name);
652 }
653 }
654 }
655 }
656 }
657
658 self.references
660 .retain(|r| !token_set.contains(&r.token_id) && !symbol_set.contains(&r.symbol_id));
661
662 for refs in &mut self.refs_to_symbol {
664 refs.retain(|&tid| !token_set.contains(&tid));
665 }
666
667 for scope in &mut self.scopes {
670 if scope.file_id == file_id {
671 scope.kind = super::types::ScopeKind::Unknown as u8;
673 scope.start_line = 0;
674 scope.end_line = 0;
675 }
676 }
677
678 removed_count
682 }
683
684 pub fn next_symbol_id(&self) -> u32 {
686 self.symbols.len() as u32
687 }
688
689 pub fn next_token_id(&self) -> u32 {
691 self.tokens.len() as u32
692 }
693
694 pub fn next_scope_id(&self) -> u32 {
696 self.scopes.len() as u32
697 }
698
699 pub fn has_file(&self, path: &std::path::Path) -> bool {
701 self.file_id_for_path(path).is_some()
702 }
703}
704
705impl Default for SemanticIndex {
706 fn default() -> Self {
707 Self::new()
708 }
709}
710
711#[derive(Debug, Clone, Copy)]
717pub struct IndexStats {
718 pub symbols: usize,
719 pub tokens: usize,
720 pub references: usize,
721 pub scopes: usize,
722 pub edges: usize,
723 pub files: usize,
724 pub entry_points: usize,
725 pub string_bytes: usize,
726 pub unique_names: usize,
727}
728
729impl std::fmt::Display for IndexStats {
730 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
731 writeln!(f, "Semantic Index Statistics:")?;
732 writeln!(f, " Symbols: {:>8}", self.symbols)?;
733 writeln!(f, " Tokens: {:>8}", self.tokens)?;
734 writeln!(f, " References: {:>8}", self.references)?;
735 writeln!(f, " Scopes: {:>8}", self.scopes)?;
736 writeln!(f, " Edges: {:>8}", self.edges)?;
737 writeln!(f, " Files: {:>8}", self.files)?;
738 writeln!(f, " Entry Points: {:>8}", self.entry_points)?;
739 writeln!(f, " String Bytes: {:>8}", self.string_bytes)?;
740 writeln!(f, " Unique Names: {:>8}", self.unique_names)?;
741 Ok(())
742 }
743}
744
745#[cfg(test)]
750mod tests {
751 use super::*;
752 use crate::trace::types::{SymbolFlags, TokenKind};
753
754 #[test]
755 fn test_string_table_interning() {
756 let mut table = StringTable::new();
757
758 let offset1 = table.intern("foo");
759 let offset2 = table.intern("bar");
760 let offset3 = table.intern("foo"); assert_eq!(offset1, offset3); assert_ne!(offset1, offset2); assert_eq!(table.get(offset1), Some("foo"));
766 assert_eq!(table.get(offset2), Some("bar"));
767 assert_eq!(table.len(), 2);
768 }
769
770 #[test]
771 fn test_semantic_index_basic() {
772 let mut index = SemanticIndex::new();
773
774 let file_id = index.add_file(PathBuf::from("test.ts"));
776 assert_eq!(file_id, 0);
777
778 let name_offset = index.strings.intern("myFunction");
780 let symbol = Symbol::new(
781 0,
782 name_offset,
783 file_id,
784 SymbolKind::Function,
785 SymbolFlags::IS_ENTRY_POINT | SymbolFlags::IS_EXPORTED,
786 10,
787 20,
788 );
789 index.add_symbol(symbol, "myFunction");
790
791 assert_eq!(index.symbols.len(), 1);
793 assert!(index.symbols_by_name("myFunction").is_some());
794 assert_eq!(index.entry_points.len(), 1);
795 }
796
797 #[test]
798 fn test_call_graph() {
799 let mut index = SemanticIndex::new();
800 let file_id = index.add_file(PathBuf::from("test.ts"));
801
802 let name1 = index.strings.intern("caller");
804 let name2 = index.strings.intern("callee");
805
806 let sym1 = Symbol::new(
807 0,
808 name1,
809 file_id,
810 SymbolKind::Function,
811 SymbolFlags::empty(),
812 1,
813 10,
814 );
815 let sym2 = Symbol::new(
816 1,
817 name2,
818 file_id,
819 SymbolKind::Function,
820 SymbolFlags::empty(),
821 15,
822 25,
823 );
824
825 index.add_symbol(sym1, "caller");
826 index.add_symbol(sym2, "callee");
827
828 index.add_edge(Edge::new(0, 1, 5));
830
831 assert_eq!(index.callers(1), &[0]);
833 assert_eq!(index.callees(0), &[1]);
834 }
835
836 #[test]
837 fn test_references() {
838 let mut index = SemanticIndex::new();
839 let file_id = index.add_file(PathBuf::from("test.ts"));
840
841 let name = index.strings.intern("myVar");
843 let symbol = Symbol::new(
844 0,
845 name,
846 file_id,
847 SymbolKind::Variable,
848 SymbolFlags::empty(),
849 1,
850 1,
851 );
852 index.add_symbol(symbol, "myVar");
853
854 let token = Token::new(0, name, file_id, 5, 10, TokenKind::Identifier, 0);
856 index.add_token(token, "myVar");
857 index.add_reference(Reference::new(0, 0, RefKind::Read));
858
859 let refs: Vec<_> = index.references_to(0).collect();
861 assert_eq!(refs.len(), 1);
862 assert!(refs[0].is_read());
863 }
864
865 #[test]
866 fn test_file_id_for_path() {
867 let mut index = SemanticIndex::new();
868
869 let file1_id = index.add_file(PathBuf::from("src/main.ts"));
870 let file2_id = index.add_file(PathBuf::from("src/utils.ts"));
871
872 assert_eq!(
873 index.file_id_for_path(std::path::Path::new("src/main.ts")),
874 Some(file1_id)
875 );
876 assert_eq!(
877 index.file_id_for_path(std::path::Path::new("src/utils.ts")),
878 Some(file2_id)
879 );
880 assert_eq!(
881 index.file_id_for_path(std::path::Path::new("src/other.ts")),
882 None
883 );
884 }
885
886 #[test]
887 fn test_remove_file_data() {
888 let mut index = SemanticIndex::new();
889
890 let file1_id = index.add_file(PathBuf::from("file1.ts"));
892 let file2_id = index.add_file(PathBuf::from("file2.ts"));
893
894 let name1 = index.strings.intern("func1");
895 let name2 = index.strings.intern("func2");
896
897 let sym1 = Symbol::new(
898 0,
899 name1,
900 file1_id,
901 SymbolKind::Function,
902 SymbolFlags::IS_ENTRY_POINT,
903 1,
904 10,
905 );
906 let sym2 = Symbol::new(
907 1,
908 name2,
909 file2_id,
910 SymbolKind::Function,
911 SymbolFlags::empty(),
912 1,
913 10,
914 );
915
916 index.add_symbol(sym1, "func1");
917 index.add_symbol(sym2, "func2");
918
919 index.add_edge(Edge::new(0, 1, 5));
921
922 assert!(index.symbols_by_name("func1").is_some());
924 assert!(index.symbols_by_name("func2").is_some());
925 assert_eq!(index.entry_points.len(), 1);
926 assert_eq!(index.edges.len(), 1);
927
928 let removed = index.remove_file_data(file1_id);
930 assert_eq!(removed, 1, "Should remove 1 symbol");
931
932 assert!(
934 index
935 .symbols_by_name("func1")
936 .map(|s| s.is_empty())
937 .unwrap_or(true),
938 "func1 should be removed from lookup"
939 );
940
941 assert!(
943 index
944 .symbols_by_name("func2")
945 .map(|s| !s.is_empty())
946 .unwrap_or(false),
947 "func2 should still be in lookup"
948 );
949
950 assert!(
952 index.entry_points.is_empty(),
953 "Entry points should be empty"
954 );
955
956 assert!(index.edges.is_empty(), "Edges should be removed");
958 }
959
960 #[test]
961 fn test_next_id_methods() {
962 let mut index = SemanticIndex::new();
963 let file_id = index.add_file(PathBuf::from("test.ts"));
964
965 assert_eq!(index.next_symbol_id(), 0);
967 assert_eq!(index.next_token_id(), 0);
968 assert_eq!(index.next_scope_id(), 0);
969
970 let name = index.strings.intern("test");
972 let symbol = Symbol::new(
973 0,
974 name,
975 file_id,
976 SymbolKind::Function,
977 SymbolFlags::empty(),
978 1,
979 10,
980 );
981 index.add_symbol(symbol, "test");
982
983 assert_eq!(index.next_symbol_id(), 1);
985
986 let token = Token::new(0, name, file_id, 1, 0, TokenKind::Identifier, 0);
988 index.add_token(token, "test");
989
990 assert_eq!(index.next_token_id(), 1);
992 }
993
994 #[test]
995 fn test_has_file() {
996 let mut index = SemanticIndex::new();
997
998 assert!(!index.has_file(std::path::Path::new("test.ts")));
999
1000 index.add_file(PathBuf::from("test.ts"));
1001
1002 assert!(index.has_file(std::path::Path::new("test.ts")));
1003 assert!(!index.has_file(std::path::Path::new("other.ts")));
1004 }
1005}