Skip to main content

ripvec_core/
repo_map.rs

1//! `PageRank`-weighted structural overview of a codebase.
2//!
3//! Builds a dependency graph from tree-sitter definition and import extraction,
4//! ranks files by importance using `PageRank` (standard or topic-sensitive), and
5//! renders a budget-constrained overview with tiered detail levels.
6
7use std::collections::HashMap;
8use std::fmt::Write as _;
9use std::path::{Path, PathBuf};
10
11use rkyv::{Archive, Deserialize as RkyvDeserialize, Serialize as RkyvSerialize};
12use streaming_iterator::StreamingIterator;
13use tree_sitter::{Parser, Query, QueryCursor};
14
15use crate::languages;
16use crate::walk;
17
18// ── Data Structures ──────────────────────────────────────────────────
19
20/// Persisted dependency graph with `PageRank` scores.
21#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
22pub struct RepoGraph {
23    /// Files in the repository with definitions, imports, and calls.
24    pub files: Vec<FileNode>,
25    /// File-level edges (derived from def-level call edges).
26    pub edges: Vec<(u32, u32, u32)>,
27    /// File-level `PageRank` scores (aggregated from def-level).
28    pub base_ranks: Vec<f32>,
29    /// File-level callers (indices into `files`).
30    pub callers: Vec<Vec<u32>>,
31    /// File-level callees (indices into `files`).
32    pub callees: Vec<Vec<u32>>,
33    /// Definition-level call edges: `(caller_def, callee_def, weight)`.
34    pub def_edges: Vec<(DefId, DefId, u32)>,
35    /// Definition-level `PageRank` scores (flattened: `offsets[file_idx] + def_idx`).
36    pub def_ranks: Vec<f32>,
37    /// Definition-level callers (flattened, parallel to `def_ranks`).
38    pub def_callers: Vec<Vec<DefId>>,
39    /// Definition-level callees (flattened, parallel to `def_ranks`).
40    pub def_callees: Vec<Vec<DefId>>,
41    /// Prefix-sum offsets for flattening `DefId` to linear index.
42    pub def_offsets: Vec<usize>,
43    /// Auto-tuned alpha for search boost.
44    pub alpha: f32,
45}
46
47/// A file in the repository with its definitions and imports.
48#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
49pub struct FileNode {
50    /// Relative path from the repository root.
51    pub path: String,
52    /// Definitions (functions, structs, classes, etc.) extracted from this file.
53    pub defs: Vec<Definition>,
54    /// Import references extracted from this file.
55    pub imports: Vec<ImportRef>,
56}
57
58/// A definition extracted from a source file.
59#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
60pub struct Definition {
61    /// Name of the definition (e.g., function name, class name).
62    pub name: String,
63    /// Kind of syntax node (e.g., `function_item`, `class_definition`).
64    pub kind: String,
65    /// 1-based start line number.
66    pub start_line: u32,
67    /// 1-based end line number.
68    pub end_line: u32,
69    /// Scope chain (e.g., `"impl_item Foo > fn bar"`).
70    pub scope: String,
71    /// Function/method signature, if available.
72    pub signature: Option<String>,
73    /// Byte offset of this definition's start in the source file.
74    pub start_byte: u32,
75    /// Byte offset of this definition's end in the source file.
76    pub end_byte: u32,
77    /// Call sites within this definition's body.
78    pub calls: Vec<CallRef>,
79}
80
81/// An import reference extracted from a source file.
82#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
83pub struct ImportRef {
84    /// Raw import path as written in source (e.g., `crate::foo::bar`).
85    pub raw_path: String,
86    /// Resolved file index in [`RepoGraph::files`], if resolution succeeded.
87    pub resolved_idx: Option<u32>,
88}
89
90/// Unique identifier for a definition: (file index, definition index within file).
91pub type DefId = (u32, u16);
92
93/// A call site extracted from a definition body.
94#[derive(Debug, Clone, Archive, RkyvSerialize, RkyvDeserialize)]
95pub struct CallRef {
96    /// Callee function/method name.
97    pub name: String,
98    /// Byte offset of the call in the source file (for scoping to definitions).
99    pub byte_offset: u32,
100    /// Resolved target definition, if resolution succeeded.
101    pub resolved: Option<DefId>,
102}
103
104// ── Constants ────────────────────────────────────────────────────────
105
106/// `PageRank` damping factor.
107const DAMPING: f32 = 0.85;
108
109/// `PageRank` convergence threshold.
110const EPSILON: f32 = 1e-6;
111
112/// Maximum `PageRank` iterations.
113const MAX_ITERATIONS: usize = 100;
114
115/// Maximum callers/callees stored per file.
116const MAX_NEIGHBORS: usize = 5;
117
118/// Approximate characters per token for budget estimation.
119const CHARS_PER_TOKEN: usize = 4;
120
121// ── Import Queries ───────────────────────────────────────────────────
122
123/// Compile a tree-sitter import query for the given extension.
124///
125/// Returns `None` for unsupported extensions.
126fn import_query_for_extension(ext: &str) -> Option<(tree_sitter::Language, Query)> {
127    let (lang, query_str): (tree_sitter::Language, &str) = match ext {
128        "rs" => (
129            tree_sitter_rust::LANGUAGE.into(),
130            "(use_declaration) @import",
131        ),
132        "py" => (
133            tree_sitter_python::LANGUAGE.into(),
134            concat!(
135                "(import_statement) @import\n",
136                "(import_from_statement) @import",
137            ),
138        ),
139        "js" | "jsx" => (
140            tree_sitter_javascript::LANGUAGE.into(),
141            "(import_statement source: (string) @import_path) @import",
142        ),
143        "ts" => (
144            tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into(),
145            "(import_statement source: (string) @import_path) @import",
146        ),
147        "tsx" => (
148            tree_sitter_typescript::LANGUAGE_TSX.into(),
149            "(import_statement source: (string) @import_path) @import",
150        ),
151        "go" => (
152            tree_sitter_go::LANGUAGE.into(),
153            "(import_spec path: (interpreted_string_literal) @import_path) @import",
154        ),
155        // Ruby: require statements.
156        "rb" => (
157            tree_sitter_ruby::LANGUAGE.into(),
158            "(call method: (identifier) @_method arguments: (argument_list (string (string_content) @import_path)) (#eq? @_method \"require\")) @import",
159        ),
160        _ => return None,
161    };
162    let query = match Query::new(&lang, query_str) {
163        Ok(q) => q,
164        Err(e) => {
165            tracing::warn!(ext, %e, "import query compilation failed — language may be ABI-incompatible");
166            return None;
167        }
168    };
169    Some((lang, query))
170}
171
172/// Extract import paths from source using tree-sitter.
173fn extract_imports(
174    source: &str,
175    lang: &tree_sitter::Language,
176    import_query: &Query,
177) -> Vec<String> {
178    let mut parser = Parser::new();
179    if parser.set_language(lang).is_err() {
180        return vec![];
181    }
182    let Some(tree) = parser.parse(source, None) else {
183        return vec![];
184    };
185
186    let mut cursor = QueryCursor::new();
187    let mut imports = Vec::new();
188    let mut matches = cursor.matches(import_query, tree.root_node(), source.as_bytes());
189
190    while let Some(m) = matches.next() {
191        // Prefer @import_path capture (JS/TS/Go), fall back to full @import text
192        let mut import_path_text = None;
193        let mut import_text = None;
194
195        for cap in m.captures {
196            let cap_name = &import_query.capture_names()[cap.index as usize];
197            let text = &source[cap.node.start_byte()..cap.node.end_byte()];
198            if *cap_name == "import_path" {
199                import_path_text = Some(text.trim_matches(|c| c == '"' || c == '\''));
200            } else if *cap_name == "import" {
201                import_text = Some(text);
202            }
203        }
204
205        if let Some(path) = import_path_text {
206            imports.push(path.to_string());
207        } else if let Some(text) = import_text {
208            imports.push(text.to_string());
209        }
210    }
211
212    imports
213}
214
215// ── Import Resolution ────────────────────────────────────────────────
216
217/// Resolve a Rust `use` path to a file index in the file map.
218///
219/// Handles `crate::`, `self::`, and `super::` prefixes. External crate
220/// imports are dropped (returns `None`).
221fn resolve_rust_import(
222    raw: &str,
223    file_path: &Path,
224    root: &Path,
225    file_index: &HashMap<PathBuf, usize>,
226) -> Option<usize> {
227    // Extract the module path from `use crate::foo::bar;` or `use crate::foo::bar::Baz;`
228    let trimmed = raw
229        .trim()
230        .trim_start_matches("use ")
231        .trim_end_matches(';')
232        .trim();
233
234    let segments: Vec<&str> = trimmed.split("::").collect();
235    if segments.is_empty() {
236        return None;
237    }
238
239    // Determine the base directory and skip prefix segments
240    let (base, skip) = match segments[0] {
241        "crate" => {
242            // Find the nearest Cargo.toml ancestor to determine the crate root.
243            // In a workspace, `crate::foo` resolves relative to the crate's src/,
244            // not the workspace root.
245            let mut dir = file_path.parent();
246            let crate_root = loop {
247                match dir {
248                    Some(d) if d.join("Cargo.toml").exists() => break d.join("src"),
249                    Some(d) => dir = d.parent(),
250                    None => break root.join("src"), // fallback
251                }
252            };
253            (crate_root, 1)
254        }
255        "self" => {
256            let dir = file_path.parent()?;
257            (dir.to_path_buf(), 1)
258        }
259        "super" => {
260            let dir = file_path.parent()?.parent()?;
261            (dir.to_path_buf(), 1)
262        }
263        // External crate — drop
264        _ => return None,
265    };
266
267    // Build candidate paths from the remaining segments.
268    // Try progressively shorter prefixes since the last segments
269    // may be items (struct, fn) rather than modules.
270    let path_segments = &segments[skip..];
271    for end in (1..=path_segments.len()).rev() {
272        let mut candidate = base.clone();
273        for seg in &path_segments[..end] {
274            // Strip glob patterns like `{Foo, Bar}`
275            let clean = seg.split('{').next().unwrap_or(seg).trim();
276            if !clean.is_empty() {
277                candidate.push(clean);
278            }
279        }
280
281        // Try file.rs
282        let as_file = candidate.with_extension("rs");
283        if let Some(&idx) = file_index.get(&as_file) {
284            return Some(idx);
285        }
286
287        // Try dir/mod.rs
288        let as_mod = candidate.join("mod.rs");
289        if let Some(&idx) = file_index.get(&as_mod) {
290            return Some(idx);
291        }
292    }
293
294    None
295}
296
297/// Resolve an import path to a file index based on file extension.
298fn resolve_import(
299    raw: &str,
300    ext: &str,
301    file_path: &Path,
302    root: &Path,
303    file_index: &HashMap<PathBuf, usize>,
304) -> Option<usize> {
305    match ext {
306        "rs" => resolve_rust_import(raw, file_path, root, file_index),
307        "py" => resolve_python_import(raw, root, file_index),
308        "js" | "jsx" | "ts" | "tsx" => resolve_js_import(raw, file_path, file_index),
309        // Go imports use full package paths — skip local resolution
310        _ => None,
311    }
312}
313
314/// Resolve a Python import to a file index.
315///
316/// Handles `import foo.bar` and `from foo.bar import baz` patterns.
317fn resolve_python_import(
318    raw: &str,
319    root: &Path,
320    file_index: &HashMap<PathBuf, usize>,
321) -> Option<usize> {
322    let module_path = if let Some(rest) = raw.strip_prefix("from ") {
323        rest.split_whitespace().next()?
324    } else if let Some(rest) = raw.strip_prefix("import ") {
325        rest.split_whitespace().next()?
326    } else {
327        return None;
328    };
329
330    let rel_path: PathBuf = module_path.split('.').collect();
331    let as_file = root.join(&rel_path).with_extension("py");
332    if let Some(&idx) = file_index.get(&as_file) {
333        return Some(idx);
334    }
335
336    let as_init = root.join(&rel_path).join("__init__.py");
337    file_index.get(&as_init).copied()
338}
339
340/// Resolve a JS/TS import to a file index.
341///
342/// Handles relative paths like `./foo` or `../bar`.
343fn resolve_js_import(
344    raw: &str,
345    file_path: &Path,
346    file_index: &HashMap<PathBuf, usize>,
347) -> Option<usize> {
348    if !raw.starts_with('.') {
349        return None;
350    }
351
352    let dir = file_path.parent()?;
353    let candidate = dir.join(raw);
354
355    for ext in &["js", "jsx", "ts", "tsx"] {
356        let with_ext = candidate.with_extension(ext);
357        if let Some(&idx) = file_index.get(&with_ext) {
358            return Some(idx);
359        }
360    }
361
362    for ext in &["js", "jsx", "ts", "tsx"] {
363        let index_file = candidate.join("index").with_extension(ext);
364        if let Some(&idx) = file_index.get(&index_file) {
365            return Some(idx);
366        }
367    }
368
369    None
370}
371
372// ── Extraction ───────────────────────────────────────────────────────
373
374/// Extract definitions from a source file using tree-sitter.
375fn extract_definitions(source: &str, config: &languages::LangConfig) -> Vec<Definition> {
376    let mut parser = Parser::new();
377    if parser.set_language(&config.language).is_err() {
378        return vec![];
379    }
380    let Some(tree) = parser.parse(source, None) else {
381        return vec![];
382    };
383
384    let mut cursor = QueryCursor::new();
385    let mut defs = Vec::new();
386    let mut matches = cursor.matches(&config.query, tree.root_node(), source.as_bytes());
387
388    while let Some(m) = matches.next() {
389        let mut name = String::new();
390        let mut def_node = None;
391
392        for cap in m.captures {
393            let cap_name = &config.query.capture_names()[cap.index as usize];
394            if *cap_name == "name" {
395                name = source[cap.node.start_byte()..cap.node.end_byte()].to_string();
396            } else if *cap_name == "def" {
397                def_node = Some(cap.node);
398            }
399        }
400
401        if let Some(node) = def_node {
402            let scope = crate::chunk::build_scope_chain(node, source);
403            let signature = crate::chunk::extract_signature(node, source);
404            #[expect(clippy::cast_possible_truncation, reason = "line numbers fit in u32")]
405            let start_line = node.start_position().row as u32 + 1;
406            #[expect(clippy::cast_possible_truncation, reason = "line numbers fit in u32")]
407            let end_line = node.end_position().row as u32 + 1;
408            #[expect(clippy::cast_possible_truncation, reason = "byte offsets fit in u32")]
409            let start_byte = node.start_byte() as u32;
410            #[expect(clippy::cast_possible_truncation, reason = "byte offsets fit in u32")]
411            let end_byte = node.end_byte() as u32;
412            defs.push(Definition {
413                name,
414                kind: node.kind().to_string(),
415                start_line,
416                end_line,
417                scope,
418                signature,
419                start_byte,
420                end_byte,
421                calls: vec![],
422            });
423        }
424    }
425
426    defs
427}
428
429// ── Call Extraction & Resolution ────────────────────────────────────
430
431/// Extract call sites from a source file and assign them to definitions.
432///
433/// Uses the language's call query to find all call expressions, then
434/// assigns each call to the definition whose byte range contains it.
435/// Calls outside any definition body (module-level) are ignored.
436fn extract_calls(source: &str, call_config: &languages::CallConfig, defs: &mut [Definition]) {
437    let mut parser = Parser::new();
438    if parser.set_language(&call_config.language).is_err() {
439        return;
440    }
441    let Some(tree) = parser.parse(source, None) else {
442        return;
443    };
444
445    let mut cursor = QueryCursor::new();
446    let mut matches = cursor.matches(&call_config.query, tree.root_node(), source.as_bytes());
447
448    while let Some(m) = matches.next() {
449        let mut callee_name = None;
450        let mut call_byte = 0u32;
451
452        for cap in m.captures {
453            let cap_name = &call_config.query.capture_names()[cap.index as usize];
454            if *cap_name == "callee" {
455                callee_name = Some(source[cap.node.start_byte()..cap.node.end_byte()].to_string());
456                #[expect(clippy::cast_possible_truncation, reason = "byte offsets fit in u32")]
457                {
458                    call_byte = cap.node.start_byte() as u32;
459                }
460            }
461        }
462
463        if let Some(name) = callee_name {
464            // Assign to the enclosing definition by byte range
465            if let Some(def) = defs
466                .iter_mut()
467                .find(|d| d.start_byte <= call_byte && call_byte < d.end_byte)
468            {
469                // Skip self-recursive calls
470                if def.name != name {
471                    def.calls.push(CallRef {
472                        name,
473                        byte_offset: call_byte,
474                        resolved: None,
475                    });
476                }
477            }
478            // Calls outside any definition are ignored (module-level init)
479        }
480    }
481}
482
483/// Build an index from definition name to list of `DefId`s.
484fn build_def_index(files: &[FileNode]) -> HashMap<String, Vec<DefId>> {
485    let mut index: HashMap<String, Vec<DefId>> = HashMap::new();
486    for (file_idx, file) in files.iter().enumerate() {
487        for (def_idx, def) in file.defs.iter().enumerate() {
488            #[expect(clippy::cast_possible_truncation)]
489            let did: DefId = (file_idx as u32, def_idx as u16);
490            index.entry(def.name.clone()).or_default().push(did);
491        }
492    }
493    index
494}
495
496/// Resolve call references to target definitions.
497///
498/// Strategy:
499/// 1. Same-file: prefer definitions in the caller's own file.
500/// 2. Imported-file: check definitions in files this file imports.
501/// 3. Unresolved: leave `resolved` as `None`.
502fn resolve_calls(files: &mut [FileNode], def_index: &HashMap<String, Vec<DefId>>) {
503    // Pre-compute imported file sets for each file
504    let imported_files: Vec<std::collections::HashSet<u32>> = files
505        .iter()
506        .map(|f| {
507            f.imports
508                .iter()
509                .filter_map(|imp| imp.resolved_idx)
510                .collect()
511        })
512        .collect();
513
514    for file_idx in 0..files.len() {
515        for def_idx in 0..files[file_idx].defs.len() {
516            for call_idx in 0..files[file_idx].defs[def_idx].calls.len() {
517                let call_name = files[file_idx].defs[def_idx].calls[call_idx].name.clone();
518
519                let Some(candidates) = def_index.get(&call_name) else {
520                    continue;
521                };
522
523                // Priority 1: same file
524                #[expect(clippy::cast_possible_truncation)]
525                let file_idx_u32 = file_idx as u32;
526                if let Some(&did) = candidates.iter().find(|(f, _)| *f == file_idx_u32) {
527                    files[file_idx].defs[def_idx].calls[call_idx].resolved = Some(did);
528                    continue;
529                }
530
531                // Priority 2: imported file
532                if let Some(&did) = candidates
533                    .iter()
534                    .find(|(f, _)| imported_files[file_idx].contains(f))
535                {
536                    files[file_idx].defs[def_idx].calls[call_idx].resolved = Some(did);
537                }
538                // Priority 3: unresolved — leave as None
539            }
540        }
541    }
542}
543
544/// Compute a prefix-sum offset table for flattening `DefId`s to linear indices.
545fn def_offsets(files: &[FileNode]) -> Vec<usize> {
546    let mut offsets = Vec::with_capacity(files.len() + 1);
547    offsets.push(0);
548    for file in files {
549        offsets.push(offsets.last().unwrap() + file.defs.len());
550    }
551    offsets
552}
553
554/// Flatten a `DefId` to a linear index using the offset table.
555fn flatten_def_id(offsets: &[usize], did: DefId) -> usize {
556    offsets[did.0 as usize] + did.1 as usize
557}
558
559/// Build top-N caller and callee lists for each definition (flattened).
560fn build_def_neighbor_lists(
561    n: usize,
562    edges: &[(u32, u32, u32)],
563    offsets: &[usize],
564) -> (Vec<Vec<DefId>>, Vec<Vec<DefId>>) {
565    let mut incoming: Vec<Vec<(u32, u32)>> = vec![vec![]; n];
566    let mut outgoing: Vec<Vec<(u32, u32)>> = vec![vec![]; n];
567
568    for &(src, dst, w) in edges {
569        let (s, d) = (src as usize, dst as usize);
570        if s < n && d < n {
571            incoming[d].push((src, w));
572            outgoing[s].push((dst, w));
573        }
574    }
575
576    // Convert flat index back to DefId
577    let to_def_id = |flat: u32| -> DefId {
578        let flat_usize = flat as usize;
579        let file_idx = offsets.partition_point(|&o| o <= flat_usize) - 1;
580        let def_idx = flat_usize - offsets[file_idx];
581        #[expect(clippy::cast_possible_truncation)]
582        (file_idx as u32, def_idx as u16)
583    };
584
585    let callers = incoming
586        .into_iter()
587        .map(|mut v| {
588            v.sort_by(|a, b| b.1.cmp(&a.1));
589            v.truncate(MAX_NEIGHBORS);
590            v.into_iter().map(|(idx, _)| to_def_id(idx)).collect()
591        })
592        .collect();
593
594    let callees = outgoing
595        .into_iter()
596        .map(|mut v| {
597            v.sort_by(|a, b| b.1.cmp(&a.1));
598            v.truncate(MAX_NEIGHBORS);
599            v.into_iter().map(|(idx, _)| to_def_id(idx)).collect()
600        })
601        .collect();
602
603    (callers, callees)
604}
605
606// ── PageRank ─────────────────────────────────────────────────────────
607
608/// Compute `PageRank` scores for a graph.
609///
610/// If `focus` is `Some(idx)`, computes topic-sensitive `PageRank` biased
611/// toward file `idx`. Otherwise computes standard (uniform) `PageRank`.
612///
613/// Returns one score per node, summing to 1.0.
614#[expect(
615    clippy::cast_precision_loss,
616    reason = "node count fits comfortably in f32"
617)]
618fn pagerank(n: usize, edges: &[(u32, u32, u32)], focus: Option<usize>) -> Vec<f32> {
619    if n == 0 {
620        return vec![];
621    }
622
623    // Build adjacency: out_edges[src] = [(dst, weight)]
624    let mut out_edges: Vec<Vec<(usize, f32)>> = vec![vec![]; n];
625    let mut out_weight: Vec<f32> = vec![0.0; n];
626
627    for &(src, dst, w) in edges {
628        let (s, d) = (src as usize, dst as usize);
629        if s < n && d < n {
630            #[expect(clippy::cast_possible_truncation, reason = "edge weights are small")]
631            let wf = f64::from(w) as f32;
632            out_edges[s].push((d, wf));
633            out_weight[s] += wf;
634        }
635    }
636
637    // Personalization vector: for topic-sensitive PageRank, blend
638    // 70% focus on the target file with 30% uniform. Pure focus
639    // (100%) starves unreachable nodes to rank=0 in sparse graphs.
640    let bias: Vec<f32> = if let Some(idx) = focus {
641        let uniform = 1.0 / n as f32;
642        let mut b = vec![0.3 * uniform; n];
643        if idx < n {
644            b[idx] += 0.7;
645        }
646        // Normalize to sum=1
647        let sum: f32 = b.iter().sum();
648        for v in &mut b {
649            *v /= sum;
650        }
651        b
652    } else {
653        vec![1.0 / n as f32; n]
654    };
655
656    let mut rank = vec![1.0 / n as f32; n];
657    let mut next_rank = vec![0.0_f32; n];
658
659    for _ in 0..MAX_ITERATIONS {
660        // Collect dangling mass (nodes with no outgoing edges)
661        let dangling: f32 = rank
662            .iter()
663            .enumerate()
664            .filter(|&(i, _)| out_edges[i].is_empty())
665            .map(|(_, &r)| r)
666            .sum();
667
668        // Distribute rank
669        for (i, nr) in next_rank.iter_mut().enumerate() {
670            *nr = (1.0 - DAMPING).mul_add(bias[i], DAMPING * dangling * bias[i]);
671        }
672
673        for (src, edges_list) in out_edges.iter().enumerate() {
674            if edges_list.is_empty() {
675                continue;
676            }
677            let src_rank = rank[src];
678            let total_w = out_weight[src];
679            for &(dst, w) in edges_list {
680                next_rank[dst] += DAMPING * src_rank * (w / total_w);
681            }
682        }
683
684        // Check convergence
685        let diff: f32 = rank
686            .iter()
687            .zip(next_rank.iter())
688            .map(|(a, b)| (a - b).abs())
689            .sum();
690
691        std::mem::swap(&mut rank, &mut next_rank);
692
693        if diff < EPSILON {
694            break;
695        }
696    }
697
698    rank
699}
700
701// ── Graph Building ───────────────────────────────────────────────────
702
703/// Intermediate result from definition-level graph computation.
704struct DefGraphData {
705    def_edges: Vec<(DefId, DefId, u32)>,
706    def_ranks: Vec<f32>,
707    def_callers: Vec<Vec<DefId>>,
708    def_callees: Vec<Vec<DefId>>,
709    offsets: Vec<usize>,
710    base_ranks: Vec<f32>,
711    file_edges: Vec<(u32, u32, u32)>,
712}
713
714/// Build definition-level edges, compute `PageRank`, and derive file-level data.
715fn compute_def_graph(files: &[FileNode]) -> DefGraphData {
716    // Build definition-level edge list from resolved calls
717    let mut def_edge_map: HashMap<(DefId, DefId), u32> = HashMap::new();
718    for (file_idx, file) in files.iter().enumerate() {
719        for (def_idx, def) in file.defs.iter().enumerate() {
720            #[expect(clippy::cast_possible_truncation)]
721            let caller_id: DefId = (file_idx as u32, def_idx as u16);
722            for call in &def.calls {
723                if let Some(callee_id) = call.resolved {
724                    *def_edge_map.entry((caller_id, callee_id)).or_insert(0) += 1;
725                }
726            }
727        }
728    }
729    let def_edges: Vec<(DefId, DefId, u32)> = def_edge_map
730        .into_iter()
731        .map(|((src, dst), w)| (src, dst, w))
732        .collect();
733
734    // Compute def-level PageRank
735    let offsets = def_offsets(files);
736    let n_defs = *offsets.last().unwrap_or(&0);
737
738    let flat_def_edges: Vec<(u32, u32, u32)> = def_edges
739        .iter()
740        .map(|(src, dst, w)| {
741            #[expect(clippy::cast_possible_truncation)]
742            (
743                flatten_def_id(&offsets, *src) as u32,
744                flatten_def_id(&offsets, *dst) as u32,
745                *w,
746            )
747        })
748        .collect();
749
750    let def_ranks = pagerank(n_defs, &flat_def_edges, None);
751
752    // Aggregate def ranks to file level
753    let base_ranks: Vec<f32> = files
754        .iter()
755        .enumerate()
756        .map(|(i, file)| {
757            let start = offsets[i];
758            let end = start + file.defs.len();
759            def_ranks[start..end].iter().sum()
760        })
761        .collect();
762
763    // Derive file-level edges from def-level call edges
764    let mut file_edge_map: HashMap<(u32, u32), u32> = HashMap::new();
765    for &(src, dst, w) in &def_edges {
766        let src_file = src.0;
767        let dst_file = dst.0;
768        if src_file != dst_file {
769            *file_edge_map.entry((src_file, dst_file)).or_insert(0) += w;
770        }
771    }
772    let file_edges: Vec<(u32, u32, u32)> = file_edge_map
773        .into_iter()
774        .map(|((src, dst), w)| (src, dst, w))
775        .collect();
776
777    // Build def-level caller/callee lists
778    let (def_callers, def_callees) = build_def_neighbor_lists(n_defs, &flat_def_edges, &offsets);
779
780    DefGraphData {
781        def_edges,
782        def_ranks,
783        def_callers,
784        def_callees,
785        offsets,
786        base_ranks,
787        file_edges,
788    }
789}
790
791/// Build a dependency graph from a repository root.
792///
793/// Walks the directory tree, parses each supported file with tree-sitter,
794/// extracts definitions and imports, resolves import paths to files, runs
795/// `PageRank`, and builds caller/callee lists.
796///
797/// # Errors
798///
799/// Returns an error if file walking or reading fails.
800pub fn build_graph(root: &Path) -> crate::Result<RepoGraph> {
801    let root = root.canonicalize().map_err(|e| crate::Error::Io {
802        path: root.display().to_string(),
803        source: e,
804    })?;
805
806    let all_files = walk::collect_files(&root, None);
807
808    // Build file index mapping canonical paths to indices
809    let mut file_index: HashMap<PathBuf, usize> = HashMap::new();
810    let mut files: Vec<FileNode> = Vec::new();
811    let mut raw_sources: Vec<(usize, String, String)> = Vec::new(); // (idx, ext, source)
812
813    for path in &all_files {
814        let ext = path
815            .extension()
816            .and_then(|e| e.to_str())
817            .unwrap_or_default()
818            .to_string();
819
820        // Only process files with known language support
821        if languages::config_for_extension(&ext).is_none()
822            && import_query_for_extension(&ext).is_none()
823        {
824            continue;
825        }
826
827        let Ok(source) = std::fs::read_to_string(path) else {
828            continue; // Skip binary/unreadable files
829        };
830
831        let rel_path = path
832            .strip_prefix(&root)
833            .unwrap_or(path)
834            .display()
835            .to_string();
836
837        let idx = files.len();
838        file_index.insert(path.clone(), idx);
839        files.push(FileNode {
840            path: rel_path,
841            defs: vec![],
842            imports: vec![],
843        });
844        raw_sources.push((idx, ext, source));
845    }
846
847    // Extract definitions and imports
848    for (idx, ext, source) in &raw_sources {
849        // Extract definitions
850        if let Some(config) = languages::config_for_extension(ext) {
851            files[*idx].defs = extract_definitions(source, &config);
852        }
853
854        // Extract imports
855        if let Some((lang, import_query)) = import_query_for_extension(ext) {
856            let raw_imports = extract_imports(source, &lang, &import_query);
857            let file_path = root.join(&files[*idx].path);
858
859            files[*idx].imports = raw_imports
860                .into_iter()
861                .map(|raw| {
862                    let resolved_idx = resolve_import(&raw, ext, &file_path, &root, &file_index)
863                        .and_then(|i| u32::try_from(i).ok());
864                    ImportRef {
865                        raw_path: raw,
866                        resolved_idx,
867                    }
868                })
869                .collect();
870        }
871    }
872
873    // Extract calls within definitions
874    for (idx, ext, source) in &raw_sources {
875        if let Some(call_config) = languages::call_query_for_extension(ext) {
876            extract_calls(source, &call_config, &mut files[*idx].defs);
877        }
878    }
879
880    // Resolve call references to target definitions
881    let def_index = build_def_index(&files);
882    resolve_calls(&mut files, &def_index);
883
884    // Build def-level graph, compute PageRank, and derive file-level data
885    let graph_data = compute_def_graph(&files);
886
887    // Build file-level caller/callee lists
888    let n = files.len();
889    let (callers, callees) = build_neighbor_lists(n, &graph_data.file_edges);
890
891    // Auto-tune alpha based on graph density
892    #[expect(clippy::cast_precision_loss, reason = "graph sizes fit in f32")]
893    let density = if n > 1 {
894        graph_data.file_edges.len() as f32 / (n as f32 * (n as f32 - 1.0))
895    } else {
896        0.0
897    };
898    let alpha = 0.3f32.mul_add(density.min(1.0), 0.5);
899
900    Ok(RepoGraph {
901        files,
902        edges: graph_data.file_edges,
903        base_ranks: graph_data.base_ranks,
904        callers,
905        callees,
906        def_edges: graph_data.def_edges,
907        def_ranks: graph_data.def_ranks,
908        def_callers: graph_data.def_callers,
909        def_callees: graph_data.def_callees,
910        def_offsets: graph_data.offsets,
911        alpha,
912    })
913}
914
915impl RepoGraph {
916    /// Get the `PageRank` score for a specific definition.
917    #[must_use]
918    pub fn def_rank(&self, did: DefId) -> f32 {
919        let flat = self.def_offsets[did.0 as usize] + did.1 as usize;
920        self.def_ranks.get(flat).copied().unwrap_or(0.0)
921    }
922
923    /// Look up a definition by file path and name. Returns the first match.
924    #[must_use]
925    pub fn find_def(&self, file_path: &str, def_name: &str) -> Option<DefId> {
926        for (file_idx, file) in self.files.iter().enumerate() {
927            if file.path == file_path {
928                for (def_idx, def) in file.defs.iter().enumerate() {
929                    if def.name == def_name {
930                        #[expect(clippy::cast_possible_truncation)]
931                        return Some((file_idx as u32, def_idx as u16));
932                    }
933                }
934            }
935        }
936        None
937    }
938}
939
940/// Build top-N caller and callee lists for each file.
941fn build_neighbor_lists(n: usize, edges: &[(u32, u32, u32)]) -> (Vec<Vec<u32>>, Vec<Vec<u32>>) {
942    let mut incoming: Vec<Vec<(u32, u32)>> = vec![vec![]; n];
943    let mut outgoing: Vec<Vec<(u32, u32)>> = vec![vec![]; n];
944
945    for &(src, dst, w) in edges {
946        let (s, d) = (src as usize, dst as usize);
947        if s < n && d < n {
948            incoming[d].push((src, w));
949            outgoing[s].push((dst, w));
950        }
951    }
952
953    // Sort by weight descending, keep top N
954    let trim = |lists: &mut [Vec<(u32, u32)>]| -> Vec<Vec<u32>> {
955        lists
956            .iter_mut()
957            .map(|list| {
958                list.sort_by(|a, b| b.1.cmp(&a.1));
959                list.iter()
960                    .take(MAX_NEIGHBORS)
961                    .map(|(idx, _)| *idx)
962                    .collect()
963            })
964            .collect()
965    };
966
967    (trim(&mut incoming), trim(&mut outgoing))
968}
969
970// ── Rendering ────────────────────────────────────────────────────────
971
972/// Render a budget-constrained overview of the repository.
973///
974/// Files are sorted by `PageRank` (or topic-sensitive rank if `focus` is
975/// `Some`). Output uses four tiers of decreasing detail:
976///
977/// - **Tier 0** (top 10%): full path, rank, callers/callees, signatures with scopes
978/// - **Tier 1** (next 20%): full path, rank, signatures
979/// - **Tier 2** (next 40%): full path, rank, definition names and kinds
980/// - **Tier 3** (bottom 30%): file path only
981///
982/// Stops accumulating output when the estimated token count exceeds
983/// `max_tokens`.
984#[must_use]
985pub fn render(graph: &RepoGraph, max_tokens: usize, focus: Option<usize>) -> String {
986    let n = graph.files.len();
987    if n == 0 {
988        return String::new();
989    }
990
991    // Compute ranks (recompute topic-sensitive if focus is given)
992    let ranks = if focus.is_some() {
993        pagerank(n, &graph.edges, focus)
994    } else {
995        graph.base_ranks.clone()
996    };
997
998    // Sort file indices by rank descending
999    let mut sorted: Vec<usize> = (0..n).collect();
1000    sorted.sort_by(|&a, &b| ranks[b].total_cmp(&ranks[a]));
1001
1002    let mut output = String::new();
1003    let mut used_tokens = 0;
1004    let max_chars = max_tokens * CHARS_PER_TOKEN;
1005
1006    for (rank_pos, &file_idx) in sorted.iter().enumerate() {
1007        if used_tokens >= max_tokens {
1008            break;
1009        }
1010
1011        let file = &graph.files[file_idx];
1012        let score = ranks[file_idx];
1013        #[expect(clippy::cast_precision_loss, reason = "file counts fit in f32")]
1014        let percentile = (rank_pos as f32) / (n as f32);
1015
1016        let section = if percentile < 0.1 {
1017            render_tier0(graph, file_idx, file, score)
1018        } else if percentile < 0.3 {
1019            render_tier1(file, score)
1020        } else if percentile < 0.7 {
1021            render_tier2(file, score)
1022        } else {
1023            render_tier3(file)
1024        };
1025
1026        let section_chars = section.len();
1027        if used_tokens > 0 && used_tokens + section_chars / CHARS_PER_TOKEN > max_tokens {
1028            // Would exceed budget — try to fit at least the path
1029            let path_line = format!("{}\n", file.path);
1030            let path_tokens = path_line.len() / CHARS_PER_TOKEN;
1031            if used_tokens + path_tokens <= max_tokens {
1032                output.push_str(&path_line);
1033            }
1034            break;
1035        }
1036
1037        output.push_str(&section);
1038        used_tokens = output.len().min(max_chars) / CHARS_PER_TOKEN;
1039    }
1040
1041    output
1042}
1043
1044/// Render tier 0: full detail with callers, callees, and signatures.
1045fn render_tier0(graph: &RepoGraph, file_idx: usize, file: &FileNode, score: f32) -> String {
1046    let mut out = format!("## {} (rank: {score:.4})\n", file.path);
1047
1048    // Callers
1049    if file_idx < graph.callers.len() && !graph.callers[file_idx].is_empty() {
1050        let _ = write!(out, "  called by: ");
1051        let names: Vec<&str> = graph.callers[file_idx]
1052            .iter()
1053            .filter_map(|&idx| graph.files.get(idx as usize).map(|f| f.path.as_str()))
1054            .collect();
1055        let _ = writeln!(out, "{}", names.join(", "));
1056    }
1057
1058    // Callees
1059    if file_idx < graph.callees.len() && !graph.callees[file_idx].is_empty() {
1060        let _ = write!(out, "  calls: ");
1061        let names: Vec<&str> = graph.callees[file_idx]
1062            .iter()
1063            .filter_map(|&idx| graph.files.get(idx as usize).map(|f| f.path.as_str()))
1064            .collect();
1065        let _ = writeln!(out, "{}", names.join(", "));
1066    }
1067
1068    // Definitions with scope and signature
1069    for def in &file.defs {
1070        let scope_prefix = if def.scope.is_empty() {
1071            String::new()
1072        } else {
1073            format!("{} > ", def.scope)
1074        };
1075        if let Some(sig) = &def.signature {
1076            let _ = writeln!(out, "  {scope_prefix}{} {sig}", def.kind);
1077        } else {
1078            let _ = writeln!(out, "  {scope_prefix}{} {}", def.kind, def.name);
1079        }
1080    }
1081    let _ = writeln!(out);
1082    out
1083}
1084
1085/// Render tier 1: file path, rank, and signatures.
1086fn render_tier1(file: &FileNode, score: f32) -> String {
1087    let mut out = format!("## {} (rank: {score:.4})\n", file.path);
1088    for def in &file.defs {
1089        if let Some(sig) = &def.signature {
1090            let _ = writeln!(out, "  {sig}");
1091        } else {
1092            let _ = writeln!(out, "  {} {}", def.kind, def.name);
1093        }
1094    }
1095    let _ = writeln!(out);
1096    out
1097}
1098
1099/// Render tier 2: file path, rank, and definition names/kinds.
1100fn render_tier2(file: &FileNode, score: f32) -> String {
1101    let mut out = format!("{} (rank: {score:.4})", file.path);
1102    if !file.defs.is_empty() {
1103        let names: Vec<String> = file
1104            .defs
1105            .iter()
1106            .map(|d| format!("{}:{}", d.kind, d.name))
1107            .collect();
1108        let _ = write!(out, " -- {}", names.join(", "));
1109    }
1110    let _ = writeln!(out);
1111    out
1112}
1113
1114/// Render tier 3: file path only.
1115fn render_tier3(file: &FileNode) -> String {
1116    format!("{}\n", file.path)
1117}
1118
1119// ── Tests ────────────────────────────────────────────────────────────
1120
1121#[cfg(test)]
1122mod tests {
1123    use super::*;
1124
1125    #[test]
1126    fn test_pagerank_simple() {
1127        // 3-node graph: 0 -> 1 -> 2, 2 -> 0 (cycle)
1128        let edges = vec![(0, 1, 1), (1, 2, 1), (2, 0, 1)];
1129        let ranks = pagerank(3, &edges, None);
1130
1131        // All nodes in a symmetric cycle should have equal rank
1132        assert_eq!(ranks.len(), 3);
1133        let sum: f32 = ranks.iter().sum();
1134        assert!(
1135            (sum - 1.0).abs() < 0.01,
1136            "ranks should sum to ~1.0, got {sum}"
1137        );
1138
1139        // In a perfect cycle, all ranks should be approximately equal
1140        let expected = 1.0 / 3.0;
1141        for (i, &r) in ranks.iter().enumerate() {
1142            assert!(
1143                (r - expected).abs() < 0.05,
1144                "rank[{i}] = {r}, expected ~{expected}"
1145            );
1146        }
1147    }
1148
1149    #[test]
1150    fn test_pagerank_star() {
1151        // Star graph: 0,1,2 all point to 3
1152        let edges = vec![(0, 3, 1), (1, 3, 1), (2, 3, 1)];
1153        let ranks = pagerank(4, &edges, None);
1154
1155        assert_eq!(ranks.len(), 4);
1156        // Node 3 should have the highest rank
1157        let max_idx = ranks
1158            .iter()
1159            .enumerate()
1160            .max_by(|a, b| a.1.total_cmp(b.1))
1161            .unwrap()
1162            .0;
1163        assert_eq!(max_idx, 3, "node 3 should have highest rank");
1164        assert!(
1165            ranks[3] > ranks[0],
1166            "rank[3]={} should be > rank[0]={}",
1167            ranks[3],
1168            ranks[0]
1169        );
1170    }
1171
1172    #[test]
1173    fn test_pagerank_topic_sensitive() {
1174        // 3-node chain: 0 -> 1 -> 2
1175        let edges = vec![(0, 1, 1), (1, 2, 1)];
1176        let uniform_ranks = pagerank(3, &edges, None);
1177        let biased_ranks = pagerank(3, &edges, Some(0));
1178
1179        // With focus on node 0, it should get a higher rank than uniform
1180        assert!(
1181            biased_ranks[0] > uniform_ranks[0],
1182            "focused rank[0]={} should be > uniform rank[0]={}",
1183            biased_ranks[0],
1184            uniform_ranks[0]
1185        );
1186    }
1187
1188    #[test]
1189    fn test_pagerank_empty() {
1190        let ranks = pagerank(0, &[], None);
1191        assert!(ranks.is_empty());
1192    }
1193
1194    #[test]
1195    fn test_render_tiers() {
1196        // Build a small graph with 10 files to exercise all tiers
1197        let files: Vec<FileNode> = (0..10)
1198            .map(|i| FileNode {
1199                path: format!("src/file_{i}.rs"),
1200                defs: vec![Definition {
1201                    name: format!("func_{i}"),
1202                    kind: "function_item".to_string(),
1203                    start_line: 1,
1204                    end_line: 5,
1205                    scope: String::new(),
1206                    signature: Some(format!("func_{i}(x: i32) -> i32")),
1207                    start_byte: 0,
1208                    end_byte: 0,
1209                    calls: vec![],
1210                }],
1211                imports: vec![],
1212            })
1213            .collect();
1214
1215        // Create a star graph: files 1-9 all import from file 0
1216        let edges: Vec<(u32, u32, u32)> = (1..10).map(|i| (i, 0, 1)).collect();
1217        let base_ranks = pagerank(10, &edges, None);
1218        let (top_callers, top_callees) = build_neighbor_lists(10, &edges);
1219
1220        let graph = RepoGraph {
1221            files,
1222            edges,
1223            base_ranks,
1224            callers: top_callers,
1225            callees: top_callees,
1226            def_edges: vec![],
1227            def_ranks: vec![],
1228            def_callers: vec![],
1229            def_callees: vec![],
1230            def_offsets: vec![0],
1231            alpha: 0.5,
1232        };
1233
1234        // Large budget: should include all files
1235        let full = render(&graph, 10_000, None);
1236        assert!(
1237            full.contains("file_0"),
1238            "output should contain the top-ranked file"
1239        );
1240        // file_0 should appear as tier 0 (highest rank)
1241        assert!(
1242            full.contains("## src/file_0.rs"),
1243            "top file should have tier 0 heading"
1244        );
1245
1246        // Tiny budget: should only fit a few files
1247        let small = render(&graph, 10, None);
1248        assert!(
1249            !small.is_empty(),
1250            "even tiny budget should produce some output"
1251        );
1252        // Should have fewer entries than full render
1253        let full_lines = full.lines().count();
1254        let small_lines = small.lines().count();
1255        assert!(
1256            small_lines < full_lines,
1257            "small budget ({small_lines} lines) should have fewer lines than full ({full_lines})"
1258        );
1259    }
1260
1261    #[test]
1262    fn test_render_empty_graph() {
1263        let graph = RepoGraph {
1264            files: vec![],
1265            edges: vec![],
1266            base_ranks: vec![],
1267            callers: vec![],
1268            callees: vec![],
1269            def_edges: vec![],
1270            def_ranks: vec![],
1271            def_callers: vec![],
1272            def_callees: vec![],
1273            def_offsets: vec![0],
1274            alpha: 0.5,
1275        };
1276        let output = render(&graph, 1000, None);
1277        assert!(output.is_empty(), "empty graph should render empty string");
1278    }
1279
1280    #[test]
1281    fn test_build_graph_on_fixtures() {
1282        let fixtures = Path::new(env!("CARGO_MANIFEST_DIR"))
1283            .parent()
1284            .unwrap()
1285            .parent()
1286            .unwrap()
1287            .join("tests")
1288            .join("fixtures");
1289
1290        let graph = build_graph(&fixtures).expect("build_graph should succeed on fixtures");
1291
1292        // Should find at least the 3 fixture files
1293        assert!(
1294            !graph.files.is_empty(),
1295            "graph should contain files from fixtures"
1296        );
1297
1298        // Should find definitions in the Rust fixture
1299        let rs_file = graph.files.iter().find(|f| f.path.ends_with("sample.rs"));
1300        assert!(rs_file.is_some(), "should find sample.rs");
1301        let rs_file = rs_file.unwrap();
1302        assert!(
1303            !rs_file.defs.is_empty(),
1304            "sample.rs should have definitions"
1305        );
1306        assert!(
1307            rs_file.defs.iter().any(|d| d.name == "hello"),
1308            "should find 'hello' function in sample.rs"
1309        );
1310
1311        // Should find definitions in the Python fixture
1312        let py_file = graph.files.iter().find(|f| f.path.ends_with("sample.py"));
1313        assert!(py_file.is_some(), "should find sample.py");
1314        let py_file = py_file.unwrap();
1315        assert!(
1316            !py_file.defs.is_empty(),
1317            "sample.py should have definitions"
1318        );
1319        assert!(
1320            py_file.defs.iter().any(|d| d.name == "greet"),
1321            "should find 'greet' function in sample.py"
1322        );
1323
1324        // PageRank scores should be computed
1325        assert_eq!(graph.base_ranks.len(), graph.files.len());
1326        let sum: f32 = graph.base_ranks.iter().sum();
1327        assert!(
1328            (sum - 1.0).abs() < 0.01,
1329            "PageRank scores should sum to ~1.0, got {sum}"
1330        );
1331    }
1332
1333    #[test]
1334    fn test_extract_imports_rust() {
1335        let source = "use crate::foo::bar;\nuse std::collections::HashMap;\n";
1336        let (lang, query) = import_query_for_extension("rs").unwrap();
1337        let imports = extract_imports(source, &lang, &query);
1338        assert_eq!(imports.len(), 2);
1339        assert!(imports[0].contains("crate::foo::bar"));
1340    }
1341
1342    #[test]
1343    fn test_resolve_rust_crate_import() {
1344        let root = PathBuf::from("/project");
1345        let file_path = PathBuf::from("/project/src/main.rs");
1346        let mut file_index = HashMap::new();
1347        file_index.insert(PathBuf::from("/project/src/foo/bar.rs"), 1);
1348        file_index.insert(PathBuf::from("/project/src/main.rs"), 0);
1349
1350        let result = resolve_rust_import("use crate::foo::bar;", &file_path, &root, &file_index);
1351        assert_eq!(result, Some(1));
1352    }
1353
1354    #[test]
1355    fn test_resolve_rust_external_crate_dropped() {
1356        let root = PathBuf::from("/project");
1357        let file_path = PathBuf::from("/project/src/main.rs");
1358        let file_index = HashMap::new();
1359
1360        let result = resolve_rust_import(
1361            "use std::collections::HashMap;",
1362            &file_path,
1363            &root,
1364            &file_index,
1365        );
1366        assert_eq!(result, None, "external crate imports should be dropped");
1367    }
1368
1369    #[test]
1370    fn test_neighbor_lists() {
1371        // 0 -> 1, 0 -> 2, 1 -> 2
1372        let edges = vec![(0, 1, 1), (0, 2, 1), (1, 2, 1)];
1373        let (incoming, outgoing) = build_neighbor_lists(3, &edges);
1374
1375        // Node 2 should be called by 0 and 1
1376        assert!(incoming[2].contains(&0));
1377        assert!(incoming[2].contains(&1));
1378
1379        // Node 0 should call 1 and 2
1380        assert!(outgoing[0].contains(&1));
1381        assert!(outgoing[0].contains(&2));
1382    }
1383
1384    #[test]
1385    #[ignore = "runs on full ripvec codebase; use --nocapture to see output"]
1386    fn test_full_repo_map() {
1387        use std::time::Instant;
1388
1389        let root = Path::new(env!("CARGO_MANIFEST_DIR"))
1390            .parent()
1391            .unwrap()
1392            .parent()
1393            .unwrap();
1394
1395        // Phase 1: build_graph (walk + parse + import resolve + PageRank)
1396        let t0 = Instant::now();
1397        let graph = build_graph(root).expect("build_graph on ripvec root");
1398        let build_ms = t0.elapsed().as_secs_f64() * 1000.0;
1399
1400        // Phase 2: render (default, no focus)
1401        let t1 = Instant::now();
1402        let rendered = render(&graph, 2000, None);
1403        let render_ms = t1.elapsed().as_secs_f64() * 1000.0;
1404
1405        // Phase 3: render (topic-sensitive, focused on highest-ranked file)
1406        let t2 = Instant::now();
1407        let focus_idx = graph
1408            .base_ranks
1409            .iter()
1410            .enumerate()
1411            .max_by(|a, b| a.1.total_cmp(b.1))
1412            .map(|(i, _)| i);
1413        let focused = render(&graph, 2000, focus_idx);
1414        let focus_ms = t2.elapsed().as_secs_f64() * 1000.0;
1415
1416        eprintln!("\n=== Repo Map Performance ===");
1417        eprintln!(
1418            "Files: {}, Edges: {}, Defs: {}",
1419            graph.files.len(),
1420            graph.edges.len(),
1421            graph.files.iter().map(|f| f.defs.len()).sum::<usize>()
1422        );
1423        eprintln!("build_graph:     {build_ms:.1}ms (walk + parse + resolve + PageRank)");
1424        eprintln!(
1425            "render(default): {render_ms:.3}ms ({} chars, ~{} tokens)",
1426            rendered.len(),
1427            rendered.len() / 4
1428        );
1429        eprintln!(
1430            "render(focused): {focus_ms:.3}ms ({} chars, ~{} tokens)",
1431            focused.len(),
1432            focused.len() / 4
1433        );
1434
1435        eprintln!("\nTop 5 by PageRank:");
1436        let mut ranked: Vec<(usize, f32)> = graph.base_ranks.iter().copied().enumerate().collect();
1437        ranked.sort_by(|a, b| b.1.total_cmp(&a.1));
1438        for (i, rank) in ranked.iter().take(5) {
1439            eprintln!("  {:.4} {}", rank, graph.files[*i].path);
1440        }
1441
1442        eprintln!("\n=== Default Render ===\n{rendered}");
1443        eprintln!(
1444            "\n=== Focused Render (on {}) ===\n{focused}",
1445            focus_idx
1446                .map(|i| graph.files[i].path.as_str())
1447                .unwrap_or("none")
1448        );
1449    }
1450}