1use std::collections::HashMap;
8use std::fmt::Write as _;
9use std::path::{Path, PathBuf};
10
11use rkyv::{Archive, Deserialize as RkyvDeserialize, Serialize as RkyvSerialize};
12use streaming_iterator::StreamingIterator;
13use tree_sitter::{Parser, Query, QueryCursor};
14
15use crate::languages;
16use crate::walk;
17
18#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
22pub struct RepoGraph {
23 pub files: Vec<FileNode>,
25 pub edges: Vec<(u32, u32, u32)>,
27 pub base_ranks: Vec<f32>,
29 pub callers: Vec<Vec<u32>>,
31 pub callees: Vec<Vec<u32>>,
33 pub def_edges: Vec<(DefId, DefId, u32)>,
35 pub def_ranks: Vec<f32>,
37 pub def_callers: Vec<Vec<DefId>>,
39 pub def_callees: Vec<Vec<DefId>>,
41 pub def_offsets: Vec<usize>,
43 pub alpha: f32,
45}
46
47#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
49pub struct FileNode {
50 pub path: String,
52 pub defs: Vec<Definition>,
54 pub imports: Vec<ImportRef>,
56}
57
58#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
60pub struct Definition {
61 pub name: String,
63 pub kind: String,
65 pub start_line: u32,
67 pub end_line: u32,
69 pub scope: String,
71 pub signature: Option<String>,
73 pub start_byte: u32,
75 pub end_byte: u32,
77 pub calls: Vec<CallRef>,
79}
80
81#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
83pub struct ImportRef {
84 pub raw_path: String,
86 pub resolved_idx: Option<u32>,
88}
89
90pub type DefId = (u32, u16);
92
93#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
95pub struct CallRef {
96 pub name: String,
98 pub byte_offset: u32,
100 pub resolved: Option<DefId>,
102}
103
104const DAMPING: f32 = 0.85;
108
109const EPSILON: f32 = 1e-6;
111
112const MAX_ITERATIONS: usize = 100;
114
115const MAX_NEIGHBORS: usize = 5;
117
118const CHARS_PER_TOKEN: usize = 4;
120
121fn import_query_for_extension(ext: &str) -> Option<(tree_sitter::Language, Query)> {
127 let (lang, query_str): (tree_sitter::Language, &str) = match ext {
128 "rs" => (
129 tree_sitter_rust::LANGUAGE.into(),
130 "(use_declaration) @import",
131 ),
132 "py" => (
133 tree_sitter_python::LANGUAGE.into(),
134 concat!(
135 "(import_statement) @import\n",
136 "(import_from_statement) @import",
137 ),
138 ),
139 "js" | "jsx" => (
140 tree_sitter_javascript::LANGUAGE.into(),
141 "(import_statement source: (string) @import_path) @import",
142 ),
143 "ts" => (
144 tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into(),
145 "(import_statement source: (string) @import_path) @import",
146 ),
147 "tsx" => (
148 tree_sitter_typescript::LANGUAGE_TSX.into(),
149 "(import_statement source: (string) @import_path) @import",
150 ),
151 "go" => (
152 tree_sitter_go::LANGUAGE.into(),
153 "(import_spec path: (interpreted_string_literal) @import_path) @import",
154 ),
155 "rb" => (
157 tree_sitter_ruby::LANGUAGE.into(),
158 "(call method: (identifier) @_method arguments: (argument_list (string (string_content) @import_path)) (#eq? @_method \"require\")) @import",
159 ),
160 _ => return None,
161 };
162 let query = match Query::new(&lang, query_str) {
163 Ok(q) => q,
164 Err(e) => {
165 tracing::warn!(ext, %e, "import query compilation failed — language may be ABI-incompatible");
166 return None;
167 }
168 };
169 Some((lang, query))
170}
171
172fn extract_imports(
174 source: &str,
175 lang: &tree_sitter::Language,
176 import_query: &Query,
177) -> Vec<String> {
178 let mut parser = Parser::new();
179 if parser.set_language(lang).is_err() {
180 return vec![];
181 }
182 let Some(tree) = parser.parse(source, None) else {
183 return vec![];
184 };
185
186 let mut cursor = QueryCursor::new();
187 let mut imports = Vec::new();
188 let mut matches = cursor.matches(import_query, tree.root_node(), source.as_bytes());
189
190 while let Some(m) = matches.next() {
191 let mut import_path_text = None;
193 let mut import_text = None;
194
195 for cap in m.captures {
196 let cap_name = &import_query.capture_names()[cap.index as usize];
197 let text = &source[cap.node.start_byte()..cap.node.end_byte()];
198 if *cap_name == "import_path" {
199 import_path_text = Some(text.trim_matches(|c| c == '"' || c == '\''));
200 } else if *cap_name == "import" {
201 import_text = Some(text);
202 }
203 }
204
205 if let Some(path) = import_path_text {
206 imports.push(path.to_string());
207 } else if let Some(text) = import_text {
208 imports.push(text.to_string());
209 }
210 }
211
212 imports
213}
214
215fn resolve_rust_import(
222 raw: &str,
223 file_path: &Path,
224 root: &Path,
225 file_index: &HashMap<PathBuf, usize>,
226) -> Option<usize> {
227 let trimmed = raw
229 .trim()
230 .trim_start_matches("use ")
231 .trim_end_matches(';')
232 .trim();
233
234 let segments: Vec<&str> = trimmed.split("::").collect();
235 if segments.is_empty() {
236 return None;
237 }
238
239 let (base, skip) = match segments[0] {
241 "crate" => {
242 let mut dir = file_path.parent();
246 let crate_root = loop {
247 match dir {
248 Some(d) if d.join("Cargo.toml").exists() => break d.join("src"),
249 Some(d) => dir = d.parent(),
250 None => break root.join("src"), }
252 };
253 (crate_root, 1)
254 }
255 "self" => {
256 let dir = file_path.parent()?;
257 (dir.to_path_buf(), 1)
258 }
259 "super" => {
260 let dir = file_path.parent()?.parent()?;
261 (dir.to_path_buf(), 1)
262 }
263 _ => return None,
265 };
266
267 let path_segments = &segments[skip..];
271 for end in (1..=path_segments.len()).rev() {
272 let mut candidate = base.clone();
273 for seg in &path_segments[..end] {
274 let clean = seg.split('{').next().unwrap_or(seg).trim();
276 if !clean.is_empty() {
277 candidate.push(clean);
278 }
279 }
280
281 let as_file = candidate.with_extension("rs");
283 if let Some(&idx) = file_index.get(&as_file) {
284 return Some(idx);
285 }
286
287 let as_mod = candidate.join("mod.rs");
289 if let Some(&idx) = file_index.get(&as_mod) {
290 return Some(idx);
291 }
292 }
293
294 None
295}
296
297fn resolve_import(
299 raw: &str,
300 ext: &str,
301 file_path: &Path,
302 root: &Path,
303 file_index: &HashMap<PathBuf, usize>,
304) -> Option<usize> {
305 match ext {
306 "rs" => resolve_rust_import(raw, file_path, root, file_index),
307 "py" => resolve_python_import(raw, root, file_index),
308 "js" | "jsx" | "ts" | "tsx" => resolve_js_import(raw, file_path, file_index),
309 _ => None,
311 }
312}
313
314fn resolve_python_import(
318 raw: &str,
319 root: &Path,
320 file_index: &HashMap<PathBuf, usize>,
321) -> Option<usize> {
322 let module_path = if let Some(rest) = raw.strip_prefix("from ") {
323 rest.split_whitespace().next()?
324 } else if let Some(rest) = raw.strip_prefix("import ") {
325 rest.split_whitespace().next()?
326 } else {
327 return None;
328 };
329
330 let rel_path: PathBuf = module_path.split('.').collect();
331 let as_file = root.join(&rel_path).with_extension("py");
332 if let Some(&idx) = file_index.get(&as_file) {
333 return Some(idx);
334 }
335
336 let as_init = root.join(&rel_path).join("__init__.py");
337 file_index.get(&as_init).copied()
338}
339
340fn resolve_js_import(
344 raw: &str,
345 file_path: &Path,
346 file_index: &HashMap<PathBuf, usize>,
347) -> Option<usize> {
348 if !raw.starts_with('.') {
349 return None;
350 }
351
352 let dir = file_path.parent()?;
353 let candidate = dir.join(raw);
354
355 for ext in &["js", "jsx", "ts", "tsx"] {
356 let with_ext = candidate.with_extension(ext);
357 if let Some(&idx) = file_index.get(&with_ext) {
358 return Some(idx);
359 }
360 }
361
362 for ext in &["js", "jsx", "ts", "tsx"] {
363 let index_file = candidate.join("index").with_extension(ext);
364 if let Some(&idx) = file_index.get(&index_file) {
365 return Some(idx);
366 }
367 }
368
369 None
370}
371
372fn extract_definitions(source: &str, config: &languages::LangConfig) -> Vec<Definition> {
376 let mut parser = Parser::new();
377 if parser.set_language(&config.language).is_err() {
378 return vec![];
379 }
380 let Some(tree) = parser.parse(source, None) else {
381 return vec![];
382 };
383
384 let mut cursor = QueryCursor::new();
385 let mut defs = Vec::new();
386 let mut matches = cursor.matches(&config.query, tree.root_node(), source.as_bytes());
387
388 while let Some(m) = matches.next() {
389 let mut name = String::new();
390 let mut def_node = None;
391
392 for cap in m.captures {
393 let cap_name = &config.query.capture_names()[cap.index as usize];
394 if *cap_name == "name" {
395 name = source[cap.node.start_byte()..cap.node.end_byte()].to_string();
396 } else if *cap_name == "def" {
397 def_node = Some(cap.node);
398 }
399 }
400
401 if let Some(node) = def_node {
402 let scope = crate::chunk::build_scope_chain(node, source);
403 let signature = crate::chunk::extract_signature(node, source);
404 #[expect(clippy::cast_possible_truncation, reason = "line numbers fit in u32")]
405 let start_line = node.start_position().row as u32 + 1;
406 #[expect(clippy::cast_possible_truncation, reason = "line numbers fit in u32")]
407 let end_line = node.end_position().row as u32 + 1;
408 #[expect(clippy::cast_possible_truncation, reason = "byte offsets fit in u32")]
409 let start_byte = node.start_byte() as u32;
410 #[expect(clippy::cast_possible_truncation, reason = "byte offsets fit in u32")]
411 let end_byte = node.end_byte() as u32;
412 defs.push(Definition {
413 name,
414 kind: node.kind().to_string(),
415 start_line,
416 end_line,
417 scope,
418 signature,
419 start_byte,
420 end_byte,
421 calls: vec![],
422 });
423 }
424 }
425
426 defs
427}
428
429fn extract_calls(source: &str, call_config: &languages::CallConfig, defs: &mut [Definition]) {
437 let mut parser = Parser::new();
438 if parser.set_language(&call_config.language).is_err() {
439 return;
440 }
441 let Some(tree) = parser.parse(source, None) else {
442 return;
443 };
444
445 let mut cursor = QueryCursor::new();
446 let mut matches = cursor.matches(&call_config.query, tree.root_node(), source.as_bytes());
447
448 while let Some(m) = matches.next() {
449 let mut callee_name = None;
450 let mut call_byte = 0u32;
451
452 for cap in m.captures {
453 let cap_name = &call_config.query.capture_names()[cap.index as usize];
454 if *cap_name == "callee" {
455 callee_name = Some(source[cap.node.start_byte()..cap.node.end_byte()].to_string());
456 #[expect(clippy::cast_possible_truncation, reason = "byte offsets fit in u32")]
457 {
458 call_byte = cap.node.start_byte() as u32;
459 }
460 }
461 }
462
463 if let Some(name) = callee_name {
464 if let Some(def) = defs
466 .iter_mut()
467 .find(|d| d.start_byte <= call_byte && call_byte < d.end_byte)
468 {
469 if def.name != name {
471 def.calls.push(CallRef {
472 name,
473 byte_offset: call_byte,
474 resolved: None,
475 });
476 }
477 }
478 }
480 }
481}
482
483fn build_def_index(files: &[FileNode]) -> HashMap<String, Vec<DefId>> {
485 let mut index: HashMap<String, Vec<DefId>> = HashMap::new();
486 for (file_idx, file) in files.iter().enumerate() {
487 for (def_idx, def) in file.defs.iter().enumerate() {
488 #[expect(clippy::cast_possible_truncation)]
489 let did: DefId = (file_idx as u32, def_idx as u16);
490 index.entry(def.name.clone()).or_default().push(did);
491 }
492 }
493 index
494}
495
496fn resolve_calls(files: &mut [FileNode], def_index: &HashMap<String, Vec<DefId>>) {
503 let imported_files: Vec<std::collections::HashSet<u32>> = files
505 .iter()
506 .map(|f| {
507 f.imports
508 .iter()
509 .filter_map(|imp| imp.resolved_idx)
510 .collect()
511 })
512 .collect();
513
514 for file_idx in 0..files.len() {
515 for def_idx in 0..files[file_idx].defs.len() {
516 for call_idx in 0..files[file_idx].defs[def_idx].calls.len() {
517 let call_name = files[file_idx].defs[def_idx].calls[call_idx].name.clone();
518
519 let Some(candidates) = def_index.get(&call_name) else {
520 continue;
521 };
522
523 #[expect(clippy::cast_possible_truncation)]
525 let file_idx_u32 = file_idx as u32;
526 if let Some(&did) = candidates.iter().find(|(f, _)| *f == file_idx_u32) {
527 files[file_idx].defs[def_idx].calls[call_idx].resolved = Some(did);
528 continue;
529 }
530
531 if let Some(&did) = candidates
533 .iter()
534 .find(|(f, _)| imported_files[file_idx].contains(f))
535 {
536 files[file_idx].defs[def_idx].calls[call_idx].resolved = Some(did);
537 }
538 }
540 }
541 }
542}
543
544fn def_offsets(files: &[FileNode]) -> Vec<usize> {
546 let mut offsets = Vec::with_capacity(files.len() + 1);
547 offsets.push(0);
548 for file in files {
549 offsets.push(offsets.last().unwrap() + file.defs.len());
550 }
551 offsets
552}
553
554fn flatten_def_id(offsets: &[usize], did: DefId) -> usize {
556 offsets[did.0 as usize] + did.1 as usize
557}
558
559fn build_def_neighbor_lists(
561 n: usize,
562 edges: &[(u32, u32, u32)],
563 offsets: &[usize],
564) -> (Vec<Vec<DefId>>, Vec<Vec<DefId>>) {
565 let mut incoming: Vec<Vec<(u32, u32)>> = vec![vec![]; n];
566 let mut outgoing: Vec<Vec<(u32, u32)>> = vec![vec![]; n];
567
568 for &(src, dst, w) in edges {
569 let (s, d) = (src as usize, dst as usize);
570 if s < n && d < n {
571 incoming[d].push((src, w));
572 outgoing[s].push((dst, w));
573 }
574 }
575
576 let to_def_id = |flat: u32| -> DefId {
578 let flat_usize = flat as usize;
579 let file_idx = offsets.partition_point(|&o| o <= flat_usize) - 1;
580 let def_idx = flat_usize - offsets[file_idx];
581 #[expect(clippy::cast_possible_truncation)]
582 (file_idx as u32, def_idx as u16)
583 };
584
585 let callers = incoming
586 .into_iter()
587 .map(|mut v| {
588 v.sort_by(|a, b| b.1.cmp(&a.1));
589 v.truncate(MAX_NEIGHBORS);
590 v.into_iter().map(|(idx, _)| to_def_id(idx)).collect()
591 })
592 .collect();
593
594 let callees = outgoing
595 .into_iter()
596 .map(|mut v| {
597 v.sort_by(|a, b| b.1.cmp(&a.1));
598 v.truncate(MAX_NEIGHBORS);
599 v.into_iter().map(|(idx, _)| to_def_id(idx)).collect()
600 })
601 .collect();
602
603 (callers, callees)
604}
605
606#[expect(
615 clippy::cast_precision_loss,
616 reason = "node count fits comfortably in f32"
617)]
618fn pagerank(n: usize, edges: &[(u32, u32, u32)], focus: Option<usize>) -> Vec<f32> {
619 if n == 0 {
620 return vec![];
621 }
622
623 let mut out_edges: Vec<Vec<(usize, f32)>> = vec![vec![]; n];
625 let mut out_weight: Vec<f32> = vec![0.0; n];
626
627 for &(src, dst, w) in edges {
628 let (s, d) = (src as usize, dst as usize);
629 if s < n && d < n {
630 #[expect(clippy::cast_possible_truncation, reason = "edge weights are small")]
631 let wf = f64::from(w) as f32;
632 out_edges[s].push((d, wf));
633 out_weight[s] += wf;
634 }
635 }
636
637 let bias: Vec<f32> = if let Some(idx) = focus {
641 let uniform = 1.0 / n as f32;
642 let mut b = vec![0.3 * uniform; n];
643 if idx < n {
644 b[idx] += 0.7;
645 }
646 let sum: f32 = b.iter().sum();
648 for v in &mut b {
649 *v /= sum;
650 }
651 b
652 } else {
653 vec![1.0 / n as f32; n]
654 };
655
656 let mut rank = vec![1.0 / n as f32; n];
657 let mut next_rank = vec![0.0_f32; n];
658
659 for _ in 0..MAX_ITERATIONS {
660 let dangling: f32 = rank
662 .iter()
663 .enumerate()
664 .filter(|&(i, _)| out_edges[i].is_empty())
665 .map(|(_, &r)| r)
666 .sum();
667
668 for (i, nr) in next_rank.iter_mut().enumerate() {
670 *nr = (1.0 - DAMPING).mul_add(bias[i], DAMPING * dangling * bias[i]);
671 }
672
673 for (src, edges_list) in out_edges.iter().enumerate() {
674 if edges_list.is_empty() {
675 continue;
676 }
677 let src_rank = rank[src];
678 let total_w = out_weight[src];
679 for &(dst, w) in edges_list {
680 next_rank[dst] += DAMPING * src_rank * (w / total_w);
681 }
682 }
683
684 let diff: f32 = rank
686 .iter()
687 .zip(next_rank.iter())
688 .map(|(a, b)| (a - b).abs())
689 .sum();
690
691 std::mem::swap(&mut rank, &mut next_rank);
692
693 if diff < EPSILON {
694 break;
695 }
696 }
697
698 rank
699}
700
701struct DefGraphData {
705 def_edges: Vec<(DefId, DefId, u32)>,
706 def_ranks: Vec<f32>,
707 def_callers: Vec<Vec<DefId>>,
708 def_callees: Vec<Vec<DefId>>,
709 offsets: Vec<usize>,
710 base_ranks: Vec<f32>,
711 file_edges: Vec<(u32, u32, u32)>,
712}
713
714fn compute_def_graph(files: &[FileNode]) -> DefGraphData {
716 let mut def_edge_map: HashMap<(DefId, DefId), u32> = HashMap::new();
718 for (file_idx, file) in files.iter().enumerate() {
719 for (def_idx, def) in file.defs.iter().enumerate() {
720 #[expect(clippy::cast_possible_truncation)]
721 let caller_id: DefId = (file_idx as u32, def_idx as u16);
722 for call in &def.calls {
723 if let Some(callee_id) = call.resolved {
724 *def_edge_map.entry((caller_id, callee_id)).or_insert(0) += 1;
725 }
726 }
727 }
728 }
729 let def_edges: Vec<(DefId, DefId, u32)> = def_edge_map
730 .into_iter()
731 .map(|((src, dst), w)| (src, dst, w))
732 .collect();
733
734 let offsets = def_offsets(files);
736 let n_defs = *offsets.last().unwrap_or(&0);
737
738 let flat_def_edges: Vec<(u32, u32, u32)> = def_edges
739 .iter()
740 .map(|(src, dst, w)| {
741 #[expect(clippy::cast_possible_truncation)]
742 (
743 flatten_def_id(&offsets, *src) as u32,
744 flatten_def_id(&offsets, *dst) as u32,
745 *w,
746 )
747 })
748 .collect();
749
750 let def_ranks = pagerank(n_defs, &flat_def_edges, None);
751
752 let base_ranks: Vec<f32> = files
754 .iter()
755 .enumerate()
756 .map(|(i, file)| {
757 let start = offsets[i];
758 let end = start + file.defs.len();
759 def_ranks[start..end].iter().sum()
760 })
761 .collect();
762
763 let mut file_edge_map: HashMap<(u32, u32), u32> = HashMap::new();
765 for &(src, dst, w) in &def_edges {
766 let src_file = src.0;
767 let dst_file = dst.0;
768 if src_file != dst_file {
769 *file_edge_map.entry((src_file, dst_file)).or_insert(0) += w;
770 }
771 }
772 let file_edges: Vec<(u32, u32, u32)> = file_edge_map
773 .into_iter()
774 .map(|((src, dst), w)| (src, dst, w))
775 .collect();
776
777 let (def_callers, def_callees) = build_def_neighbor_lists(n_defs, &flat_def_edges, &offsets);
779
780 DefGraphData {
781 def_edges,
782 def_ranks,
783 def_callers,
784 def_callees,
785 offsets,
786 base_ranks,
787 file_edges,
788 }
789}
790
791pub fn build_graph(root: &Path) -> crate::Result<RepoGraph> {
801 let root = root.canonicalize().map_err(|e| crate::Error::Io {
802 path: root.display().to_string(),
803 source: e,
804 })?;
805
806 let all_files = walk::collect_files(&root, None);
807
808 let mut file_index: HashMap<PathBuf, usize> = HashMap::new();
810 let mut files: Vec<FileNode> = Vec::new();
811 let mut raw_sources: Vec<(usize, String, String)> = Vec::new(); for path in &all_files {
814 let ext = path
815 .extension()
816 .and_then(|e| e.to_str())
817 .unwrap_or_default()
818 .to_string();
819
820 if languages::config_for_extension(&ext).is_none()
822 && import_query_for_extension(&ext).is_none()
823 {
824 continue;
825 }
826
827 let Ok(source) = std::fs::read_to_string(path) else {
828 continue; };
830
831 let rel_path = path
832 .strip_prefix(&root)
833 .unwrap_or(path)
834 .display()
835 .to_string();
836
837 let idx = files.len();
838 file_index.insert(path.clone(), idx);
839 files.push(FileNode {
840 path: rel_path,
841 defs: vec![],
842 imports: vec![],
843 });
844 raw_sources.push((idx, ext, source));
845 }
846
847 for (idx, ext, source) in &raw_sources {
849 if let Some(config) = languages::config_for_extension(ext) {
851 files[*idx].defs = extract_definitions(source, &config);
852 }
853
854 if let Some((lang, import_query)) = import_query_for_extension(ext) {
856 let raw_imports = extract_imports(source, &lang, &import_query);
857 let file_path = root.join(&files[*idx].path);
858
859 files[*idx].imports = raw_imports
860 .into_iter()
861 .map(|raw| {
862 let resolved_idx = resolve_import(&raw, ext, &file_path, &root, &file_index)
863 .and_then(|i| u32::try_from(i).ok());
864 ImportRef {
865 raw_path: raw,
866 resolved_idx,
867 }
868 })
869 .collect();
870 }
871 }
872
873 for (idx, ext, source) in &raw_sources {
875 if let Some(call_config) = languages::call_query_for_extension(ext) {
876 extract_calls(source, &call_config, &mut files[*idx].defs);
877 }
878 }
879
880 let def_index = build_def_index(&files);
882 resolve_calls(&mut files, &def_index);
883
884 let graph_data = compute_def_graph(&files);
886
887 let n = files.len();
889 let (callers, callees) = build_neighbor_lists(n, &graph_data.file_edges);
890
891 #[expect(clippy::cast_precision_loss, reason = "graph sizes fit in f32")]
893 let density = if n > 1 {
894 graph_data.file_edges.len() as f32 / (n as f32 * (n as f32 - 1.0))
895 } else {
896 0.0
897 };
898 let alpha = 0.3f32.mul_add(density.min(1.0), 0.5);
899
900 Ok(RepoGraph {
901 files,
902 edges: graph_data.file_edges,
903 base_ranks: graph_data.base_ranks,
904 callers,
905 callees,
906 def_edges: graph_data.def_edges,
907 def_ranks: graph_data.def_ranks,
908 def_callers: graph_data.def_callers,
909 def_callees: graph_data.def_callees,
910 def_offsets: graph_data.offsets,
911 alpha,
912 })
913}
914
915impl RepoGraph {
916 #[must_use]
918 pub fn def_rank(&self, did: DefId) -> f32 {
919 let flat = self.def_offsets[did.0 as usize] + did.1 as usize;
920 self.def_ranks.get(flat).copied().unwrap_or(0.0)
921 }
922
923 #[must_use]
925 pub fn find_def(&self, file_path: &str, def_name: &str) -> Option<DefId> {
926 for (file_idx, file) in self.files.iter().enumerate() {
927 if file.path == file_path {
928 for (def_idx, def) in file.defs.iter().enumerate() {
929 if def.name == def_name {
930 #[expect(clippy::cast_possible_truncation)]
931 return Some((file_idx as u32, def_idx as u16));
932 }
933 }
934 }
935 }
936 None
937 }
938}
939
940fn build_neighbor_lists(n: usize, edges: &[(u32, u32, u32)]) -> (Vec<Vec<u32>>, Vec<Vec<u32>>) {
942 let mut incoming: Vec<Vec<(u32, u32)>> = vec![vec![]; n];
943 let mut outgoing: Vec<Vec<(u32, u32)>> = vec![vec![]; n];
944
945 for &(src, dst, w) in edges {
946 let (s, d) = (src as usize, dst as usize);
947 if s < n && d < n {
948 incoming[d].push((src, w));
949 outgoing[s].push((dst, w));
950 }
951 }
952
953 let trim = |lists: &mut [Vec<(u32, u32)>]| -> Vec<Vec<u32>> {
955 lists
956 .iter_mut()
957 .map(|list| {
958 list.sort_by(|a, b| b.1.cmp(&a.1));
959 list.iter()
960 .take(MAX_NEIGHBORS)
961 .map(|(idx, _)| *idx)
962 .collect()
963 })
964 .collect()
965 };
966
967 (trim(&mut incoming), trim(&mut outgoing))
968}
969
970#[must_use]
985pub fn render(graph: &RepoGraph, max_tokens: usize, focus: Option<usize>) -> String {
986 let n = graph.files.len();
987 if n == 0 {
988 return String::new();
989 }
990
991 let ranks = if focus.is_some() {
993 pagerank(n, &graph.edges, focus)
994 } else {
995 graph.base_ranks.clone()
996 };
997
998 let mut sorted: Vec<usize> = (0..n).collect();
1000 sorted.sort_by(|&a, &b| ranks[b].total_cmp(&ranks[a]));
1001
1002 let mut output = String::new();
1003 let mut used_tokens = 0;
1004 let max_chars = max_tokens * CHARS_PER_TOKEN;
1005
1006 for (rank_pos, &file_idx) in sorted.iter().enumerate() {
1007 if used_tokens >= max_tokens {
1008 break;
1009 }
1010
1011 let file = &graph.files[file_idx];
1012 let score = ranks[file_idx];
1013 #[expect(clippy::cast_precision_loss, reason = "file counts fit in f32")]
1014 let percentile = (rank_pos as f32) / (n as f32);
1015
1016 let section = if percentile < 0.1 {
1017 render_tier0(graph, file_idx, file, score)
1018 } else if percentile < 0.3 {
1019 render_tier1(file, score)
1020 } else if percentile < 0.7 {
1021 render_tier2(file, score)
1022 } else {
1023 render_tier3(file)
1024 };
1025
1026 let section_chars = section.len();
1027 if used_tokens > 0 && used_tokens + section_chars / CHARS_PER_TOKEN > max_tokens {
1028 let path_line = format!("{}\n", file.path);
1030 let path_tokens = path_line.len() / CHARS_PER_TOKEN;
1031 if used_tokens + path_tokens <= max_tokens {
1032 output.push_str(&path_line);
1033 }
1034 break;
1035 }
1036
1037 output.push_str(§ion);
1038 used_tokens = output.len().min(max_chars) / CHARS_PER_TOKEN;
1039 }
1040
1041 output
1042}
1043
1044fn render_tier0(graph: &RepoGraph, file_idx: usize, file: &FileNode, score: f32) -> String {
1046 let mut out = format!("## {} (rank: {score:.4})\n", file.path);
1047
1048 if file_idx < graph.callers.len() && !graph.callers[file_idx].is_empty() {
1050 let _ = write!(out, " called by: ");
1051 let names: Vec<&str> = graph.callers[file_idx]
1052 .iter()
1053 .filter_map(|&idx| graph.files.get(idx as usize).map(|f| f.path.as_str()))
1054 .collect();
1055 let _ = writeln!(out, "{}", names.join(", "));
1056 }
1057
1058 if file_idx < graph.callees.len() && !graph.callees[file_idx].is_empty() {
1060 let _ = write!(out, " calls: ");
1061 let names: Vec<&str> = graph.callees[file_idx]
1062 .iter()
1063 .filter_map(|&idx| graph.files.get(idx as usize).map(|f| f.path.as_str()))
1064 .collect();
1065 let _ = writeln!(out, "{}", names.join(", "));
1066 }
1067
1068 for def in &file.defs {
1070 let scope_prefix = if def.scope.is_empty() {
1071 String::new()
1072 } else {
1073 format!("{} > ", def.scope)
1074 };
1075 if let Some(sig) = &def.signature {
1076 let _ = writeln!(out, " {scope_prefix}{} {sig}", def.kind);
1077 } else {
1078 let _ = writeln!(out, " {scope_prefix}{} {}", def.kind, def.name);
1079 }
1080 }
1081 let _ = writeln!(out);
1082 out
1083}
1084
1085fn render_tier1(file: &FileNode, score: f32) -> String {
1087 let mut out = format!("## {} (rank: {score:.4})\n", file.path);
1088 for def in &file.defs {
1089 if let Some(sig) = &def.signature {
1090 let _ = writeln!(out, " {sig}");
1091 } else {
1092 let _ = writeln!(out, " {} {}", def.kind, def.name);
1093 }
1094 }
1095 let _ = writeln!(out);
1096 out
1097}
1098
1099fn render_tier2(file: &FileNode, score: f32) -> String {
1101 let mut out = format!("{} (rank: {score:.4})", file.path);
1102 if !file.defs.is_empty() {
1103 let names: Vec<String> = file
1104 .defs
1105 .iter()
1106 .map(|d| format!("{}:{}", d.kind, d.name))
1107 .collect();
1108 let _ = write!(out, " -- {}", names.join(", "));
1109 }
1110 let _ = writeln!(out);
1111 out
1112}
1113
1114fn render_tier3(file: &FileNode) -> String {
1116 format!("{}\n", file.path)
1117}
1118
1119#[cfg(test)]
1122mod tests {
1123 use super::*;
1124
1125 #[test]
1126 fn test_pagerank_simple() {
1127 let edges = vec![(0, 1, 1), (1, 2, 1), (2, 0, 1)];
1129 let ranks = pagerank(3, &edges, None);
1130
1131 assert_eq!(ranks.len(), 3);
1133 let sum: f32 = ranks.iter().sum();
1134 assert!(
1135 (sum - 1.0).abs() < 0.01,
1136 "ranks should sum to ~1.0, got {sum}"
1137 );
1138
1139 let expected = 1.0 / 3.0;
1141 for (i, &r) in ranks.iter().enumerate() {
1142 assert!(
1143 (r - expected).abs() < 0.05,
1144 "rank[{i}] = {r}, expected ~{expected}"
1145 );
1146 }
1147 }
1148
1149 #[test]
1150 fn test_pagerank_star() {
1151 let edges = vec![(0, 3, 1), (1, 3, 1), (2, 3, 1)];
1153 let ranks = pagerank(4, &edges, None);
1154
1155 assert_eq!(ranks.len(), 4);
1156 let max_idx = ranks
1158 .iter()
1159 .enumerate()
1160 .max_by(|a, b| a.1.total_cmp(b.1))
1161 .unwrap()
1162 .0;
1163 assert_eq!(max_idx, 3, "node 3 should have highest rank");
1164 assert!(
1165 ranks[3] > ranks[0],
1166 "rank[3]={} should be > rank[0]={}",
1167 ranks[3],
1168 ranks[0]
1169 );
1170 }
1171
1172 #[test]
1173 fn test_pagerank_topic_sensitive() {
1174 let edges = vec![(0, 1, 1), (1, 2, 1)];
1176 let uniform_ranks = pagerank(3, &edges, None);
1177 let biased_ranks = pagerank(3, &edges, Some(0));
1178
1179 assert!(
1181 biased_ranks[0] > uniform_ranks[0],
1182 "focused rank[0]={} should be > uniform rank[0]={}",
1183 biased_ranks[0],
1184 uniform_ranks[0]
1185 );
1186 }
1187
1188 #[test]
1189 fn test_pagerank_empty() {
1190 let ranks = pagerank(0, &[], None);
1191 assert!(ranks.is_empty());
1192 }
1193
1194 #[test]
1195 fn test_render_tiers() {
1196 let files: Vec<FileNode> = (0..10)
1198 .map(|i| FileNode {
1199 path: format!("src/file_{i}.rs"),
1200 defs: vec![Definition {
1201 name: format!("func_{i}"),
1202 kind: "function_item".to_string(),
1203 start_line: 1,
1204 end_line: 5,
1205 scope: String::new(),
1206 signature: Some(format!("func_{i}(x: i32) -> i32")),
1207 start_byte: 0,
1208 end_byte: 0,
1209 calls: vec![],
1210 }],
1211 imports: vec![],
1212 })
1213 .collect();
1214
1215 let edges: Vec<(u32, u32, u32)> = (1..10).map(|i| (i, 0, 1)).collect();
1217 let base_ranks = pagerank(10, &edges, None);
1218 let (top_callers, top_callees) = build_neighbor_lists(10, &edges);
1219
1220 let graph = RepoGraph {
1221 files,
1222 edges,
1223 base_ranks,
1224 callers: top_callers,
1225 callees: top_callees,
1226 def_edges: vec![],
1227 def_ranks: vec![],
1228 def_callers: vec![],
1229 def_callees: vec![],
1230 def_offsets: vec![0],
1231 alpha: 0.5,
1232 };
1233
1234 let full = render(&graph, 10_000, None);
1236 assert!(
1237 full.contains("file_0"),
1238 "output should contain the top-ranked file"
1239 );
1240 assert!(
1242 full.contains("## src/file_0.rs"),
1243 "top file should have tier 0 heading"
1244 );
1245
1246 let small = render(&graph, 10, None);
1248 assert!(
1249 !small.is_empty(),
1250 "even tiny budget should produce some output"
1251 );
1252 let full_lines = full.lines().count();
1254 let small_lines = small.lines().count();
1255 assert!(
1256 small_lines < full_lines,
1257 "small budget ({small_lines} lines) should have fewer lines than full ({full_lines})"
1258 );
1259 }
1260
1261 #[test]
1262 fn test_render_empty_graph() {
1263 let graph = RepoGraph {
1264 files: vec![],
1265 edges: vec![],
1266 base_ranks: vec![],
1267 callers: vec![],
1268 callees: vec![],
1269 def_edges: vec![],
1270 def_ranks: vec![],
1271 def_callers: vec![],
1272 def_callees: vec![],
1273 def_offsets: vec![0],
1274 alpha: 0.5,
1275 };
1276 let output = render(&graph, 1000, None);
1277 assert!(output.is_empty(), "empty graph should render empty string");
1278 }
1279
1280 #[test]
1281 fn test_build_graph_on_fixtures() {
1282 let fixtures = Path::new(env!("CARGO_MANIFEST_DIR"))
1283 .parent()
1284 .unwrap()
1285 .parent()
1286 .unwrap()
1287 .join("tests")
1288 .join("fixtures");
1289
1290 let graph = build_graph(&fixtures).expect("build_graph should succeed on fixtures");
1291
1292 assert!(
1294 !graph.files.is_empty(),
1295 "graph should contain files from fixtures"
1296 );
1297
1298 let rs_file = graph.files.iter().find(|f| f.path.ends_with("sample.rs"));
1300 assert!(rs_file.is_some(), "should find sample.rs");
1301 let rs_file = rs_file.unwrap();
1302 assert!(
1303 !rs_file.defs.is_empty(),
1304 "sample.rs should have definitions"
1305 );
1306 assert!(
1307 rs_file.defs.iter().any(|d| d.name == "hello"),
1308 "should find 'hello' function in sample.rs"
1309 );
1310
1311 let py_file = graph.files.iter().find(|f| f.path.ends_with("sample.py"));
1313 assert!(py_file.is_some(), "should find sample.py");
1314 let py_file = py_file.unwrap();
1315 assert!(
1316 !py_file.defs.is_empty(),
1317 "sample.py should have definitions"
1318 );
1319 assert!(
1320 py_file.defs.iter().any(|d| d.name == "greet"),
1321 "should find 'greet' function in sample.py"
1322 );
1323
1324 assert_eq!(graph.base_ranks.len(), graph.files.len());
1326 let sum: f32 = graph.base_ranks.iter().sum();
1327 assert!(
1328 (sum - 1.0).abs() < 0.01,
1329 "PageRank scores should sum to ~1.0, got {sum}"
1330 );
1331 }
1332
1333 #[test]
1334 fn test_extract_imports_rust() {
1335 let source = "use crate::foo::bar;\nuse std::collections::HashMap;\n";
1336 let (lang, query) = import_query_for_extension("rs").unwrap();
1337 let imports = extract_imports(source, &lang, &query);
1338 assert_eq!(imports.len(), 2);
1339 assert!(imports[0].contains("crate::foo::bar"));
1340 }
1341
1342 #[test]
1343 fn test_resolve_rust_crate_import() {
1344 let root = PathBuf::from("/project");
1345 let file_path = PathBuf::from("/project/src/main.rs");
1346 let mut file_index = HashMap::new();
1347 file_index.insert(PathBuf::from("/project/src/foo/bar.rs"), 1);
1348 file_index.insert(PathBuf::from("/project/src/main.rs"), 0);
1349
1350 let result = resolve_rust_import("use crate::foo::bar;", &file_path, &root, &file_index);
1351 assert_eq!(result, Some(1));
1352 }
1353
1354 #[test]
1355 fn test_resolve_rust_external_crate_dropped() {
1356 let root = PathBuf::from("/project");
1357 let file_path = PathBuf::from("/project/src/main.rs");
1358 let file_index = HashMap::new();
1359
1360 let result = resolve_rust_import(
1361 "use std::collections::HashMap;",
1362 &file_path,
1363 &root,
1364 &file_index,
1365 );
1366 assert_eq!(result, None, "external crate imports should be dropped");
1367 }
1368
1369 #[test]
1370 fn test_neighbor_lists() {
1371 let edges = vec![(0, 1, 1), (0, 2, 1), (1, 2, 1)];
1373 let (incoming, outgoing) = build_neighbor_lists(3, &edges);
1374
1375 assert!(incoming[2].contains(&0));
1377 assert!(incoming[2].contains(&1));
1378
1379 assert!(outgoing[0].contains(&1));
1381 assert!(outgoing[0].contains(&2));
1382 }
1383
1384 #[test]
1385 #[ignore = "runs on full ripvec codebase; use --nocapture to see output"]
1386 fn test_full_repo_map() {
1387 use std::time::Instant;
1388
1389 let root = Path::new(env!("CARGO_MANIFEST_DIR"))
1390 .parent()
1391 .unwrap()
1392 .parent()
1393 .unwrap();
1394
1395 let t0 = Instant::now();
1397 let graph = build_graph(root).expect("build_graph on ripvec root");
1398 let build_ms = t0.elapsed().as_secs_f64() * 1000.0;
1399
1400 let t1 = Instant::now();
1402 let rendered = render(&graph, 2000, None);
1403 let render_ms = t1.elapsed().as_secs_f64() * 1000.0;
1404
1405 let t2 = Instant::now();
1407 let focus_idx = graph
1408 .base_ranks
1409 .iter()
1410 .enumerate()
1411 .max_by(|a, b| a.1.total_cmp(b.1))
1412 .map(|(i, _)| i);
1413 let focused = render(&graph, 2000, focus_idx);
1414 let focus_ms = t2.elapsed().as_secs_f64() * 1000.0;
1415
1416 eprintln!("\n=== Repo Map Performance ===");
1417 eprintln!(
1418 "Files: {}, Edges: {}, Defs: {}",
1419 graph.files.len(),
1420 graph.edges.len(),
1421 graph.files.iter().map(|f| f.defs.len()).sum::<usize>()
1422 );
1423 eprintln!("build_graph: {build_ms:.1}ms (walk + parse + resolve + PageRank)");
1424 eprintln!(
1425 "render(default): {render_ms:.3}ms ({} chars, ~{} tokens)",
1426 rendered.len(),
1427 rendered.len() / 4
1428 );
1429 eprintln!(
1430 "render(focused): {focus_ms:.3}ms ({} chars, ~{} tokens)",
1431 focused.len(),
1432 focused.len() / 4
1433 );
1434
1435 eprintln!("\nTop 5 by PageRank:");
1436 let mut ranked: Vec<(usize, f32)> = graph.base_ranks.iter().copied().enumerate().collect();
1437 ranked.sort_by(|a, b| b.1.total_cmp(&a.1));
1438 for (i, rank) in ranked.iter().take(5) {
1439 eprintln!(" {:.4} {}", rank, graph.files[*i].path);
1440 }
1441
1442 eprintln!("\n=== Default Render ===\n{rendered}");
1443 eprintln!(
1444 "\n=== Focused Render (on {}) ===\n{focused}",
1445 focus_idx
1446 .map(|i| graph.files[i].path.as_str())
1447 .unwrap_or("none")
1448 );
1449 }
1450}