Skip to main content

routa_server/api/
codebases.rs

1use axum::{
2    extract::State,
3    routing::{get, patch, post},
4    Json, Router,
5};
6use serde::{Deserialize, Serialize};
7
8use crate::api::repo_context::{
9    normalize_local_repo_path, validate_local_git_repo_path, validate_repo_path,
10};
11use crate::error::ServerError;
12use crate::models::codebase::{Codebase, CodebaseSourceType};
13use crate::state::AppState;
14
15fn repo_label_from_path(repo_path: &str) -> String {
16    std::path::Path::new(repo_path)
17        .file_name()
18        .and_then(|name| name.to_str())
19        .map(str::to_string)
20        .unwrap_or_else(|| repo_path.to_string())
21}
22
23pub fn router() -> Router<AppState> {
24    Router::new()
25        .route(
26            "/workspaces/{workspace_id}/codebases",
27            get(list_codebases).post(add_codebase),
28        )
29        .route(
30            "/workspaces/{workspace_id}/codebases/changes",
31            get(list_codebase_changes),
32        )
33        .route(
34            "/workspaces/{workspace_id}/codebases/{codebase_id}/reposlide",
35            get(get_reposlide),
36        )
37        .route(
38            "/workspaces/{workspace_id}/codebases/{codebase_id}/wiki",
39            get(get_wiki),
40        )
41        .nest(
42            "/workspaces/{workspace_id}/codebases/{codebase_id}/git",
43            crate::api::git::router(),
44        )
45        .route(
46            "/codebases/{id}",
47            patch(update_codebase).delete(delete_codebase),
48        )
49        .route("/codebases/{id}/default", post(set_default_codebase))
50}
51
52async fn list_codebases(
53    State(state): State<AppState>,
54    axum::extract::Path(workspace_id): axum::extract::Path<String>,
55) -> Result<Json<serde_json::Value>, ServerError> {
56    let codebases = state
57        .codebase_store
58        .list_by_workspace(&workspace_id)
59        .await?;
60    Ok(Json(serde_json::json!({ "codebases": codebases })))
61}
62
63async fn list_codebase_changes(
64    State(state): State<AppState>,
65    axum::extract::Path(workspace_id): axum::extract::Path<String>,
66) -> Result<Json<serde_json::Value>, ServerError> {
67    let codebases = state
68        .codebase_store
69        .list_by_workspace(&workspace_id)
70        .await?;
71
72    let repos = codebases
73        .into_iter()
74        .map(|codebase| {
75            let label = codebase
76                .label
77                .clone()
78                .unwrap_or_else(|| repo_label_from_path(&codebase.repo_path));
79
80            if codebase.repo_path.is_empty() {
81                return serde_json::json!({
82                    "codebaseId": codebase.id,
83                    "repoPath": codebase.repo_path,
84                    "label": label,
85                    "branch": codebase.branch.unwrap_or_else(|| "unknown".to_string()),
86                    "status": { "clean": true, "ahead": 0, "behind": 0, "modified": 0, "untracked": 0 },
87                    "files": [],
88                    "error": "Missing repository path",
89                });
90            }
91
92            if !crate::git::is_git_repository(&codebase.repo_path) {
93                return serde_json::json!({
94                    "codebaseId": codebase.id,
95                    "repoPath": codebase.repo_path,
96                    "label": label,
97                    "branch": codebase.branch.unwrap_or_else(|| "unknown".to_string()),
98                    "status": { "clean": true, "ahead": 0, "behind": 0, "modified": 0, "untracked": 0 },
99                    "files": [],
100                    "error": "Repository is missing or not a git repository",
101                });
102            }
103
104            let changes = crate::git::get_repo_changes(&codebase.repo_path);
105            serde_json::json!({
106                "codebaseId": codebase.id,
107                "repoPath": codebase.repo_path,
108                "label": label,
109                "branch": changes.branch,
110                "status": changes.status,
111                "files": changes.files,
112            })
113        })
114        .collect::<Vec<_>>();
115
116    Ok(Json(serde_json::json!({
117        "workspaceId": workspace_id,
118        "repos": repos,
119    })))
120}
121
122#[derive(Debug, Deserialize)]
123#[serde(rename_all = "camelCase")]
124struct AddCodebaseRequest {
125    repo_path: String,
126    branch: Option<String>,
127    label: Option<String>,
128    source_type: Option<CodebaseSourceType>,
129    source_url: Option<String>,
130    #[serde(default)]
131    is_default: bool,
132}
133
134async fn add_codebase(
135    State(state): State<AppState>,
136    axum::extract::Path(workspace_id): axum::extract::Path<String>,
137    Json(body): Json<AddCodebaseRequest>,
138) -> Result<Json<serde_json::Value>, ServerError> {
139    let source_type = body.source_type.unwrap_or(CodebaseSourceType::Local);
140    let repo_path = normalize_local_repo_path(&body.repo_path);
141    match source_type {
142        CodebaseSourceType::Local => validate_local_git_repo_path(&repo_path)?,
143        CodebaseSourceType::Github => validate_repo_path(&repo_path, "Path ")?,
144    }
145    let repo_path = repo_path.to_string_lossy().to_string();
146
147    // Check for duplicate repo_path within the workspace
148    if let Some(_existing) = state
149        .codebase_store
150        .find_by_repo_path(&workspace_id, &repo_path)
151        .await?
152    {
153        return Err(ServerError::Conflict(format!(
154            "Codebase with repo_path '{}' already exists in workspace {}",
155            repo_path, workspace_id
156        )));
157    }
158
159    let codebase = Codebase::new(
160        uuid::Uuid::new_v4().to_string(),
161        workspace_id,
162        repo_path,
163        body.branch,
164        body.label,
165        body.is_default,
166        Some(source_type),
167        body.source_url,
168    );
169
170    state.codebase_store.save(&codebase).await?;
171    Ok(Json(serde_json::json!({ "codebase": codebase })))
172}
173
174#[derive(Debug, Deserialize)]
175#[serde(rename_all = "camelCase")]
176struct UpdateCodebaseRequest {
177    branch: Option<String>,
178    label: Option<String>,
179    repo_path: Option<String>,
180    source_type: Option<CodebaseSourceType>,
181    source_url: Option<String>,
182}
183
184async fn update_codebase(
185    State(state): State<AppState>,
186    axum::extract::Path(id): axum::extract::Path<String>,
187    Json(body): Json<UpdateCodebaseRequest>,
188) -> Result<Json<serde_json::Value>, ServerError> {
189    let existing = state
190        .codebase_store
191        .get(&id)
192        .await?
193        .ok_or_else(|| ServerError::NotFound(format!("Codebase {} not found", id)))?;
194    let requested_source_type = body
195        .source_type
196        .clone()
197        .or_else(|| existing.source_type.clone())
198        .unwrap_or(CodebaseSourceType::Local);
199
200    let repo_path = if let Some(repo_path) = body.repo_path.as_deref() {
201        let normalized = normalize_local_repo_path(repo_path);
202        match requested_source_type {
203            CodebaseSourceType::Local => validate_local_git_repo_path(&normalized)?,
204            CodebaseSourceType::Github => validate_repo_path(&normalized, "Path ")?,
205        }
206        let normalized = normalized.to_string_lossy().to_string();
207
208        if let Some(duplicate) = state
209            .codebase_store
210            .find_by_repo_path(&existing.workspace_id, &normalized)
211            .await?
212        {
213            if duplicate.id != id {
214                return Err(ServerError::Conflict(format!(
215                    "Codebase with repo_path '{}' already exists in workspace {}",
216                    normalized, existing.workspace_id
217                )));
218            }
219        }
220
221        Some(normalized)
222    } else {
223        None
224    };
225
226    state
227        .codebase_store
228        .update(
229            &id,
230            body.branch.as_deref(),
231            body.label.as_deref(),
232            repo_path.as_deref(),
233            body.source_type.as_ref().map(CodebaseSourceType::as_str),
234            body.source_url.as_deref(),
235        )
236        .await?;
237
238    let codebase = state
239        .codebase_store
240        .get(&id)
241        .await?
242        .ok_or_else(|| ServerError::NotFound(format!("Codebase {} not found", id)))?;
243
244    Ok(Json(serde_json::json!({ "codebase": codebase })))
245}
246
247async fn delete_codebase(
248    State(state): State<AppState>,
249    axum::extract::Path(id): axum::extract::Path<String>,
250) -> Result<Json<serde_json::Value>, ServerError> {
251    // Clean up worktrees on disk before deleting the codebase
252    if let Ok(Some(codebase)) = state.codebase_store.get(&id).await {
253        let repo_path = &codebase.repo_path;
254
255        // Acquire repo lock to prevent races with concurrent worktree operations
256        let lock = {
257            let mut locks = crate::api::worktrees::get_repo_locks().lock().await;
258            locks
259                .entry(repo_path.to_string())
260                .or_insert_with(|| std::sync::Arc::new(tokio::sync::Mutex::new(())))
261                .clone()
262        };
263        let _guard = lock.lock().await;
264
265        let worktrees = state
266            .worktree_store
267            .list_by_codebase(&id)
268            .await
269            .map_err(|e| ServerError::Internal(format!("Failed to list worktrees: {}", e)))?;
270        for wt in &worktrees {
271            if let Err(e) = crate::git::worktree_remove(repo_path, &wt.worktree_path, true) {
272                tracing::warn!(
273                    "[Codebase DELETE] Failed to remove worktree {}: {}",
274                    wt.id,
275                    e
276                );
277            }
278        }
279        if !worktrees.is_empty() {
280            let _ = crate::git::worktree_prune(repo_path);
281        }
282    }
283
284    state.codebase_store.delete(&id).await?;
285    Ok(Json(serde_json::json!({ "deleted": true })))
286}
287
288async fn set_default_codebase(
289    State(state): State<AppState>,
290    axum::extract::Path(id): axum::extract::Path<String>,
291) -> Result<Json<serde_json::Value>, ServerError> {
292    let codebase = state
293        .codebase_store
294        .get(&id)
295        .await?
296        .ok_or_else(|| ServerError::NotFound(format!("Codebase {} not found", id)))?;
297
298    state
299        .codebase_store
300        .set_default(&codebase.workspace_id, &id)
301        .await?;
302
303    let updated = state
304        .codebase_store
305        .get(&id)
306        .await?
307        .ok_or_else(|| ServerError::NotFound(format!("Codebase {} not found", id)))?;
308
309    Ok(Json(serde_json::json!({ "codebase": updated })))
310}
311
312// ─── RepoSlide ──────────────────────────────────────────────────
313
314const IGNORE_DIRS: &[&str] = &[
315    "node_modules",
316    ".git",
317    ".next",
318    "dist",
319    "build",
320    "target",
321    ".routa",
322    ".worktrees",
323    "__pycache__",
324    ".tox",
325    ".venv",
326    "venv",
327    ".cache",
328];
329
330const MAX_DEPTH: usize = 4;
331const MAX_CHILDREN: usize = 50;
332const MAX_DIR_FOCUS_SLIDES: usize = 6;
333const MAX_REPOWIKI_MODULES: usize = 8;
334
335const ENTRY_POINT_FILES: &[&str] = &[
336    "README.md",
337    "AGENTS.md",
338    "package.json",
339    "Cargo.toml",
340    "go.mod",
341    "pyproject.toml",
342    "setup.py",
343    "pom.xml",
344    "build.gradle",
345    "Makefile",
346    "Dockerfile",
347    "docker-compose.yml",
348    "tsconfig.json",
349];
350
351const ANCHOR_DIRS: &[&str] = &[
352    "src/app",
353    "src/core",
354    "src/client",
355    "crates",
356    "apps",
357    "lib",
358    "pkg",
359    "cmd",
360    "internal",
361    "api",
362];
363
364const KEY_FILE_NAMES: &[&str] = &[
365    "README.md",
366    "AGENTS.md",
367    "ARCHITECTURE.md",
368    "CONTRIBUTING.md",
369    "LICENSE",
370    "CHANGELOG.md",
371];
372
373const REPOWIKI_ROOT_FILE_ANCHORS: &[&str] = &[
374    "README.md",
375    "README",
376    "AGENTS.md",
377    "package.json",
378    "Cargo.toml",
379    "pyproject.toml",
380    "go.mod",
381];
382
383const REPOWIKI_NESTED_FILE_ANCHORS: &[&str] = &["docs/ARCHITECTURE.md", "docs/adr/README.md"];
384
385const REPOWIKI_DIRECTORY_ANCHORS: &[&str] = &[
386    "src/app",
387    "src/core",
388    "src/client",
389    "crates",
390    "docs",
391    "apps",
392    "api",
393];
394
395const REPOWIKI_STORYLINE_KEY_FILES: &[&str] = &[
396    "README.md",
397    "AGENTS.md",
398    "ARCHITECTURE.md",
399    "CONTRIBUTING.md",
400    "Cargo.toml",
401    "package.json",
402];
403
404#[derive(Debug, Serialize)]
405#[serde(rename_all = "camelCase")]
406struct RepoTreeNode {
407    name: String,
408    path: String,
409    #[serde(rename = "type")]
410    node_type: String,
411    #[serde(skip_serializing_if = "Option::is_none")]
412    children: Option<Vec<RepoTreeNode>>,
413    #[serde(skip_serializing_if = "Option::is_none")]
414    file_count: Option<u64>,
415}
416
417#[derive(Debug, Serialize)]
418#[serde(rename_all = "camelCase")]
419struct RepoSummary {
420    total_files: u64,
421    total_directories: u64,
422    top_level_folders: Vec<String>,
423    source_type: String,
424    #[serde(skip_serializing_if = "Option::is_none")]
425    branch: Option<String>,
426}
427
428fn scan_repo_tree(repo_path: &str) -> RepoTreeNode {
429    let root_name = std::path::Path::new(repo_path)
430        .file_name()
431        .and_then(|n| n.to_str())
432        .unwrap_or(repo_path)
433        .to_string();
434    scan_dir(repo_path, &root_name, ".", 0)
435}
436
437fn scan_dir(abs_path: &str, name: &str, rel_path: &str, depth: usize) -> RepoTreeNode {
438    let mut node = RepoTreeNode {
439        name: name.to_string(),
440        path: rel_path.to_string(),
441        node_type: "directory".to_string(),
442        children: Some(Vec::new()),
443        file_count: Some(0),
444    };
445
446    if depth >= MAX_DEPTH {
447        return node;
448    }
449
450    let mut entries: Vec<std::fs::DirEntry> = match std::fs::read_dir(abs_path) {
451        Ok(rd) => rd.filter_map(|e| e.ok()).collect(),
452        Err(_) => return node,
453    };
454
455    entries.sort_by(|a, b| {
456        let a_dir = a.file_type().map(|ft| ft.is_dir()).unwrap_or(false);
457        let b_dir = b.file_type().map(|ft| ft.is_dir()).unwrap_or(false);
458        match (a_dir, b_dir) {
459            (true, false) => std::cmp::Ordering::Less,
460            (false, true) => std::cmp::Ordering::Greater,
461            _ => a.file_name().cmp(&b.file_name()),
462        }
463    });
464
465    let children = node.children.as_mut().unwrap();
466    let mut file_count: u64 = 0;
467    let mut child_count = 0;
468
469    for entry in entries {
470        if child_count >= MAX_CHILDREN {
471            break;
472        }
473        let entry_name = entry.file_name().to_string_lossy().to_string();
474        if IGNORE_DIRS.contains(&entry_name.as_str()) {
475            continue;
476        }
477        let ft = match entry.file_type() {
478            Ok(ft) => ft,
479            Err(_) => continue,
480        };
481        let child_rel = if rel_path == "." {
482            entry_name.clone()
483        } else {
484            format!("{}/{}", rel_path, entry_name)
485        };
486        let child_abs = format!("{}/{}", abs_path, entry_name);
487
488        if ft.is_dir() {
489            let child = scan_dir(&child_abs, &entry_name, &child_rel, depth + 1);
490            file_count += child.file_count.unwrap_or(0);
491            children.push(child);
492        } else if ft.is_file() {
493            children.push(RepoTreeNode {
494                name: entry_name,
495                path: child_rel,
496                node_type: "file".to_string(),
497                children: None,
498                file_count: None,
499            });
500            file_count += 1;
501        }
502
503        child_count += 1;
504    }
505
506    node.file_count = Some(file_count);
507    node
508}
509
510fn compute_summary(tree: &RepoTreeNode, source_type: &str, branch: Option<&str>) -> RepoSummary {
511    let (files, dirs) = count_tree(tree);
512    let top_level_folders = tree
513        .children
514        .as_ref()
515        .map(|c| {
516            c.iter()
517                .filter(|n| n.node_type == "directory")
518                .map(|n| n.name.clone())
519                .collect()
520        })
521        .unwrap_or_default();
522
523    RepoSummary {
524        total_files: files,
525        total_directories: dirs,
526        top_level_folders,
527        source_type: source_type.to_string(),
528        branch: branch.map(str::to_string),
529    }
530}
531
532fn count_tree(node: &RepoTreeNode) -> (u64, u64) {
533    if node.node_type == "file" {
534        return (1, 0);
535    }
536    let mut files = 0u64;
537    let mut dirs = 1u64;
538    for child in node.children.as_deref().unwrap_or(&[]) {
539        let (f, d) = count_tree(child);
540        files += f;
541        dirs += d;
542    }
543    (files, dirs)
544}
545
546fn detect_entry_points(tree: &RepoTreeNode) -> Vec<serde_json::Value> {
547    let mut found = Vec::new();
548
549    for child in tree.children.as_deref().unwrap_or(&[]) {
550        if child.node_type == "file" && ENTRY_POINT_FILES.contains(&child.name.as_str()) {
551            found.push(serde_json::json!({
552                "name": child.name,
553                "path": child.path,
554                "reason": format!("Project entry point ({})", child.name),
555            }));
556        }
557    }
558
559    for anchor in ANCHOR_DIRS {
560        if let Some(node) = find_node_by_path(tree, anchor) {
561            found.push(serde_json::json!({
562                "name": *anchor,
563                "path": node.path,
564                "reason": "Architecture anchor directory",
565            }));
566        }
567    }
568
569    found
570}
571
572fn detect_key_files(tree: &RepoTreeNode) -> Vec<serde_json::Value> {
573    tree.children
574        .as_deref()
575        .unwrap_or(&[])
576        .iter()
577        .filter(|c| c.node_type == "file" && KEY_FILE_NAMES.contains(&c.name.as_str()))
578        .map(|c| {
579            serde_json::json!({
580                "name": c.name,
581                "path": c.path,
582            })
583        })
584        .collect()
585}
586
587fn extract_architecture_anchors(tree: &RepoTreeNode) -> Vec<serde_json::Value> {
588    let mut anchors = Vec::new();
589
590    for child in tree.children.as_deref().unwrap_or(&[]) {
591        if child.node_type != "file" {
592            continue;
593        }
594
595        if REPOWIKI_ROOT_FILE_ANCHORS
596            .iter()
597            .any(|anchor| matches_root_file_anchor(&child.name, anchor))
598        {
599            anchors.push(serde_json::json!({
600                "kind": "file",
601                "path": child.path,
602                "reason": format!("Architecture/documentation anchor ({})", child.name),
603            }));
604        }
605    }
606
607    for anchor in REPOWIKI_DIRECTORY_ANCHORS {
608        if let Some(node) = find_node_by_path(tree, anchor) {
609            anchors.push(serde_json::json!({
610                "kind": "directory",
611                "path": node.path,
612                "reason": "Architecture anchor directory",
613            }));
614        }
615    }
616
617    for anchor in REPOWIKI_NESTED_FILE_ANCHORS {
618        if let Some(node) = find_node_by_path(tree, anchor) {
619            if node.node_type == "file" {
620                anchors.push(serde_json::json!({
621                    "kind": "file",
622                    "path": node.path,
623                    "reason": format!("Architecture/documentation anchor ({})", node.name),
624                }));
625            }
626        }
627    }
628
629    anchors
630}
631
632fn matches_root_file_anchor(file_name: &str, anchor: &str) -> bool {
633    let base_name = anchor.split('.').next().unwrap_or(anchor);
634    file_name == anchor || file_name == base_name || file_name.starts_with(&format!("{base_name}."))
635}
636
637fn build_repowiki_modules(tree: &RepoTreeNode) -> Vec<serde_json::Value> {
638    let mut modules: Vec<&RepoTreeNode> = tree
639        .children
640        .as_deref()
641        .unwrap_or(&[])
642        .iter()
643        .filter(|child| child.node_type == "directory")
644        .collect();
645    modules.sort_by(|left, right| {
646        right
647            .file_count
648            .unwrap_or(0)
649            .cmp(&left.file_count.unwrap_or(0))
650    });
651
652    modules
653        .into_iter()
654        .take(MAX_REPOWIKI_MODULES)
655        .map(|child| {
656            serde_json::json!({
657                "name": child.name,
658                "path": child.path,
659                "fileCount": child.file_count.unwrap_or(0),
660                "role": infer_module_role(&child.name),
661            })
662        })
663        .collect()
664}
665
666fn infer_module_role(name: &str) -> &'static str {
667    match name {
668        "src" => "Primary application source code.",
669        "docs" => "Documentation, architecture notes, and operational guides.",
670        "crates" => "Rust service/runtime modules.",
671        "apps" => "Application entrypoints and package surfaces.",
672        "app" => "User-facing application layer.",
673        _ => "Core repository module area.",
674    }
675}
676
677fn build_repository_role_summary(top_level_folders: &[String]) -> String {
678    if top_level_folders.is_empty() {
679        return "Repository is compact and mostly root-file driven.".to_string();
680    }
681
682    format!(
683        "Repository is organized around {}.",
684        top_level_folders
685            .iter()
686            .take(4)
687            .cloned()
688            .collect::<Vec<_>>()
689            .join(", ")
690    )
691}
692
693fn build_runtime_boundaries(top_level_folders: &[String]) -> Vec<String> {
694    let mut boundaries = Vec::new();
695
696    if top_level_folders.iter().any(|folder| folder == "src") {
697        boundaries.push("Source runtime boundary under src/".to_string());
698    }
699    if top_level_folders.iter().any(|folder| folder == "crates") {
700        boundaries.push("Rust/Axum backend boundary under crates/".to_string());
701    }
702    if top_level_folders.iter().any(|folder| folder == "apps") {
703        boundaries.push("Multi-app boundary under apps/".to_string());
704    }
705    if top_level_folders.iter().any(|folder| folder == "docs") {
706        boundaries.push("Documentation and architecture boundary under docs/".to_string());
707    }
708
709    boundaries
710}
711
712fn build_cross_layer_relationships(top_level_folders: &[String]) -> Vec<String> {
713    if top_level_folders.iter().any(|folder| folder == "src")
714        && top_level_folders.iter().any(|folder| folder == "crates")
715    {
716        return vec![
717            "Next.js app layer in src/ coordinates with Rust services in crates/.".to_string(),
718        ];
719    }
720
721    if top_level_folders.iter().any(|folder| folder == "src")
722        && top_level_folders.iter().any(|folder| folder == "docs")
723    {
724        return vec![
725            "Implementation in src/ is guided by architecture and ADR documents in docs/."
726                .to_string(),
727        ];
728    }
729
730    vec!["Cross-layer relationships require deeper file-level inspection.".to_string()]
731}
732
733fn build_repowiki_workflows(top_level_folders: &[String]) -> Vec<serde_json::Value> {
734    let top_level_paths = top_level_folders
735        .iter()
736        .map(|folder| format!("{folder}/"))
737        .collect::<Vec<_>>();
738    let repo_orientation_paths = [
739        vec!["README.md".to_string(), "AGENTS.md".to_string()],
740        top_level_paths.clone(),
741    ]
742    .concat();
743
744    vec![
745        serde_json::json!({
746            "name": "Repo orientation",
747            "description": "Start from README/AGENTS and map top-level modules before detailed tracing.",
748            "relatedPaths": repo_orientation_paths,
749        }),
750        serde_json::json!({
751            "name": "Architecture walkthrough",
752            "description": "Trace runtime boundaries and handoffs between major layers.",
753            "relatedPaths": top_level_paths,
754        }),
755    ]
756}
757
758fn build_repowiki_glossary(top_level_folders: &[String]) -> Vec<serde_json::Value> {
759    let mut glossary = vec![
760        serde_json::json!({
761            "term": "RepoWiki",
762            "meaning": "Intermediate architecture-aware repository knowledge artifact."
763        }),
764        serde_json::json!({
765            "term": "Storyline context",
766            "meaning": "Slide-ready narrative hints generated from repository evidence."
767        }),
768    ];
769
770    if top_level_folders.iter().any(|folder| folder == "crates") {
771        glossary.push(serde_json::json!({
772            "term": "crates",
773            "meaning": "Rust package/workspace area.",
774            "sourcePath": "crates/",
775        }));
776    }
777
778    if top_level_folders.iter().any(|folder| folder == "src") {
779        glossary.push(serde_json::json!({
780            "term": "src",
781            "meaning": "Application source root.",
782            "sourcePath": "src/",
783        }));
784    }
785
786    glossary
787}
788
789fn build_repowiki_storyline_context(
790    tree: &RepoTreeNode,
791    anchors: &[serde_json::Value],
792) -> serde_json::Value {
793    let mut focus_areas: Vec<&RepoTreeNode> = tree
794        .children
795        .as_deref()
796        .unwrap_or(&[])
797        .iter()
798        .filter(|child| child.node_type == "directory")
799        .collect();
800    focus_areas.sort_by(|left, right| {
801        right
802            .file_count
803            .unwrap_or(0)
804            .cmp(&left.file_count.unwrap_or(0))
805    });
806
807    let focus_areas = focus_areas
808        .into_iter()
809        .take(MAX_DIR_FOCUS_SLIDES)
810        .map(|directory| {
811            serde_json::json!({
812                "path": directory.path,
813                "fileCount": directory.file_count.unwrap_or(0),
814            })
815        })
816        .collect::<Vec<_>>();
817
818    let entry_points = anchors
819        .iter()
820        .filter(|anchor| {
821            anchor
822                .get("kind")
823                .and_then(|value| value.as_str())
824                .unwrap_or("file")
825                == "file"
826        })
827        .filter_map(|anchor| anchor.get("path").and_then(|value| value.as_str()))
828        .map(str::to_string)
829        .collect::<Vec<_>>();
830
831    let key_files = tree
832        .children
833        .as_deref()
834        .unwrap_or(&[])
835        .iter()
836        .filter(|child| {
837            child.node_type == "file" && REPOWIKI_STORYLINE_KEY_FILES.contains(&child.name.as_str())
838        })
839        .map(|child| child.path.clone())
840        .collect::<Vec<_>>();
841
842    let primary_module = focus_areas
843        .first()
844        .and_then(|area| area.get("path"))
845        .and_then(|value| value.as_str())
846        .unwrap_or("the primary module");
847
848    serde_json::json!({
849        "suggestedSections": [
850            "Repository overview",
851            "Top-level architecture",
852            "Runtime boundaries",
853            "Important modules and responsibilities",
854            "Key files and why they matter",
855            "Main workflows / narratives",
856            "Slide-ready storyline hints",
857        ],
858        "entryPoints": entry_points,
859        "keyFiles": key_files,
860        "focusAreas": focus_areas,
861        "narrativeHints": [
862            format!("Start from docs/README and then explain {}.", primary_module),
863            "Call out cross-layer boundaries between app/core/client or equivalent runtime layers.",
864            "Label inferred conclusions explicitly when source files do not state intent directly.",
865        ],
866    })
867}
868
869fn build_focus_directories(tree: &RepoTreeNode) -> Vec<serde_json::Value> {
870    let mut focus_dirs: Vec<&RepoTreeNode> = tree
871        .children
872        .as_deref()
873        .unwrap_or(&[])
874        .iter()
875        .filter(|c| c.node_type == "directory")
876        .collect();
877    focus_dirs.sort_by(|a, b| b.file_count.unwrap_or(0).cmp(&a.file_count.unwrap_or(0)));
878
879    focus_dirs
880        .into_iter()
881        .take(MAX_DIR_FOCUS_SLIDES)
882        .map(|dir| {
883            let children: Vec<serde_json::Value> = dir
884                .children
885                .as_deref()
886                .unwrap_or(&[])
887                .iter()
888                .map(|child| {
889                    let mut value = serde_json::json!({
890                        "name": child.name,
891                        "type": child.node_type,
892                    });
893                    if child.node_type == "directory" {
894                        value["fileCount"] = serde_json::json!(child.file_count.unwrap_or(0));
895                    }
896                    value
897                })
898                .collect();
899
900            serde_json::json!({
901                "name": dir.name,
902                "path": dir.path,
903                "fileCount": dir.file_count.unwrap_or(0),
904                "children": children,
905            })
906        })
907        .collect()
908}
909
910fn build_reposlide_prompt(
911    codebase: &Codebase,
912    summary: &RepoSummary,
913    root_files: &[String],
914    entry_points: &[serde_json::Value],
915    key_files: &[serde_json::Value],
916    focus_directories: &[serde_json::Value],
917) -> String {
918    let repo_label = codebase
919        .label
920        .clone()
921        .unwrap_or_else(|| repo_label_from_path(&codebase.repo_path));
922    let mut lines = vec![
923        format!(
924            "Create a presentation slide deck for the repository \"{}\".",
925            repo_label
926        ),
927        String::new(),
928        "Goal:".to_string(),
929        "- Explain what this repository is, how it is structured, and how an engineer should orient themselves quickly.".to_string(),
930        "- Keep the deck concise: target 6-8 slides.".to_string(),
931        "- Use evidence from the local repository only. If a conclusion is inferred, label it as an inference.".to_string(),
932        String::new(),
933        "Required coverage:".to_string(),
934        "- Repository purpose and audience.".to_string(),
935        "- Runtime or architecture overview.".to_string(),
936        "- Top-level structure and major subsystems.".to_string(),
937        "- Important entry points, docs, and operational files.".to_string(),
938        "- Notable risks, TODOs, or ambiguities if they materially affect understanding.".to_string(),
939        String::new(),
940        "Before drafting slides, inspect these first if they exist:".to_string(),
941        "- AGENTS.md".to_string(),
942        "- README.md".to_string(),
943        "- docs/ARCHITECTURE.md".to_string(),
944        "- docs/adr/README.md".to_string(),
945        "- package.json / Cargo.toml / pyproject.toml / go.mod".to_string(),
946        String::new(),
947        "Output:".to_string(),
948        "- Build the deck with slide-skill and save the final artifact as a PPTX.".to_string(),
949        "- In the final response, report the PPTX path and summarize the slide outline.".to_string(),
950        String::new(),
951        "Repository context:".to_string(),
952        format!("- Repo path: {}", codebase.repo_path),
953        format!(
954            "- Branch: {}",
955            codebase.branch.as_deref().unwrap_or("unknown")
956        ),
957        format!("- Source type: {}", summary.source_type),
958        format!("- Total files scanned: {}", summary.total_files),
959        format!("- Total directories scanned: {}", summary.total_directories),
960        format!(
961            "- Top-level folders: {}",
962            if summary.top_level_folders.is_empty() {
963                "(none detected)".to_string()
964            } else {
965                summary.top_level_folders.join(", ")
966            }
967        ),
968        format!(
969            "- Root files: {}",
970            if root_files.is_empty() {
971                "(none detected)".to_string()
972            } else {
973                root_files.join(", ")
974            }
975        ),
976    ];
977
978    if !entry_points.is_empty() {
979        lines.push(String::new());
980        lines.push("Entry points and architecture anchors:".to_string());
981        for item in entry_points {
982            let path = item
983                .get("path")
984                .and_then(|value| value.as_str())
985                .unwrap_or("(unknown)");
986            let reason = item
987                .get("reason")
988                .and_then(|value| value.as_str())
989                .unwrap_or("(no reason)");
990            lines.push(format!("- {}: {}", path, reason));
991        }
992    }
993
994    if !key_files.is_empty() {
995        lines.push(String::new());
996        lines.push("Key files worth reading:".to_string());
997        for item in key_files {
998            let path = item
999                .get("path")
1000                .and_then(|value| value.as_str())
1001                .unwrap_or("(unknown)");
1002            lines.push(format!("- {}", path));
1003        }
1004    }
1005
1006    if !focus_directories.is_empty() {
1007        lines.push(String::new());
1008        lines.push("Largest top-level areas:".to_string());
1009        for item in focus_directories {
1010            let dir_path = item
1011                .get("path")
1012                .and_then(|value| value.as_str())
1013                .unwrap_or("(unknown)");
1014            let file_count = item
1015                .get("fileCount")
1016                .and_then(|value| value.as_u64())
1017                .unwrap_or(0);
1018            let preview = item
1019                .get("children")
1020                .and_then(|value| value.as_array())
1021                .map(|children| {
1022                    children
1023                        .iter()
1024                        .take(8)
1025                        .map(|child| {
1026                            let name = child
1027                                .get("name")
1028                                .and_then(|value| value.as_str())
1029                                .unwrap_or("(unknown)");
1030                            let node_type = child
1031                                .get("type")
1032                                .and_then(|value| value.as_str())
1033                                .unwrap_or("file");
1034                            if node_type == "directory" {
1035                                let nested_count = child
1036                                    .get("fileCount")
1037                                    .and_then(|value| value.as_u64())
1038                                    .unwrap_or(0);
1039                                format!("{}/ ({} files)", name, nested_count)
1040                            } else {
1041                                name.to_string()
1042                            }
1043                        })
1044                        .collect::<Vec<_>>()
1045                        .join(", ")
1046                })
1047                .unwrap_or_default();
1048            lines.push(format!(
1049                "- {} ({} files): {}",
1050                dir_path,
1051                file_count,
1052                if preview.is_empty() {
1053                    "no immediate children scanned".to_string()
1054                } else {
1055                    preview
1056                }
1057            ));
1058        }
1059    }
1060
1061    lines.push(String::new());
1062    lines.push(
1063        "Work in the repository itself as the primary context. Do not generate application code for Routa; generate the slide deck artifact about this repo.".to_string(),
1064    );
1065
1066    lines.join("\n")
1067}
1068
1069fn resolve_reposlide_skill_repo_path() -> Option<String> {
1070    let cwd = std::env::current_dir().ok()?;
1071    let candidate = cwd.join("tools").join("office-skills");
1072    let skill_file = candidate
1073        .join(".agents")
1074        .join("skills")
1075        .join("slide")
1076        .join("SKILL.md");
1077
1078    if skill_file.is_file() {
1079        Some(candidate.to_string_lossy().to_string())
1080    } else {
1081        None
1082    }
1083}
1084
1085fn find_node_by_path<'a>(tree: &'a RepoTreeNode, target: &str) -> Option<&'a RepoTreeNode> {
1086    let segments: Vec<&str> = target.split('/').collect();
1087    let mut current = tree;
1088    for seg in &segments {
1089        current = current.children.as_ref()?.iter().find(|c| c.name == *seg)?;
1090    }
1091    Some(current)
1092}
1093
1094async fn get_reposlide(
1095    State(state): State<AppState>,
1096    axum::extract::Path((workspace_id, codebase_id)): axum::extract::Path<(String, String)>,
1097) -> Result<Json<serde_json::Value>, ServerError> {
1098    let codebase = state
1099        .codebase_store
1100        .get(&codebase_id)
1101        .await?
1102        .ok_or_else(|| ServerError::NotFound(format!("Codebase {} not found", codebase_id)))?;
1103
1104    if codebase.workspace_id != workspace_id {
1105        return Err(ServerError::NotFound(format!(
1106            "Codebase {} not found in workspace {}",
1107            codebase_id, workspace_id
1108        )));
1109    }
1110
1111    if codebase.repo_path.is_empty() {
1112        return Err(ServerError::BadRequest(
1113            "Codebase has no repository path".to_string(),
1114        ));
1115    }
1116
1117    let tree = scan_repo_tree(&codebase.repo_path);
1118    let source_type = codebase
1119        .source_type
1120        .as_ref()
1121        .map(CodebaseSourceType::as_str)
1122        .unwrap_or("local");
1123    let summary = compute_summary(&tree, source_type, codebase.branch.as_deref());
1124    let root_files: Vec<String> = tree
1125        .children
1126        .as_deref()
1127        .unwrap_or(&[])
1128        .iter()
1129        .filter(|c| c.node_type == "file")
1130        .map(|c| c.name.clone())
1131        .collect();
1132    let entry_points = detect_entry_points(&tree);
1133    let key_files = detect_key_files(&tree);
1134    let focus_directories = build_focus_directories(&tree);
1135    let skill_repo_path = resolve_reposlide_skill_repo_path();
1136    let skill_available = skill_repo_path.is_some();
1137    let prompt = build_reposlide_prompt(
1138        &codebase,
1139        &summary,
1140        &root_files,
1141        &entry_points,
1142        &key_files,
1143        &focus_directories,
1144    );
1145
1146    Ok(Json(serde_json::json!({
1147        "codebase": {
1148            "id": codebase.id,
1149            "label": codebase.label,
1150            "repoPath": codebase.repo_path,
1151            "sourceType": source_type,
1152            "sourceUrl": codebase.source_url,
1153            "branch": codebase.branch,
1154        },
1155        "summary": summary,
1156        "context": {
1157            "rootFiles": root_files,
1158            "entryPoints": entry_points,
1159            "keyFiles": key_files,
1160            "focusDirectories": focus_directories,
1161        },
1162        "launch": {
1163            "skillName": "slide-skill",
1164            "skillRepoPath": skill_repo_path,
1165            "skillAvailable": skill_available,
1166            "unavailableReason": if skill_available {
1167                serde_json::Value::Null
1168            } else {
1169                serde_json::Value::String("slide-skill could not be found relative to the current Routa installation.".to_string())
1170            },
1171            "prompt": prompt,
1172        },
1173    })))
1174}
1175
1176async fn get_wiki(
1177    State(state): State<AppState>,
1178    axum::extract::Path((workspace_id, codebase_id)): axum::extract::Path<(String, String)>,
1179) -> Result<Json<serde_json::Value>, ServerError> {
1180    let codebase = state
1181        .codebase_store
1182        .get(&codebase_id)
1183        .await?
1184        .ok_or_else(|| ServerError::NotFound(format!("Codebase {} not found", codebase_id)))?;
1185
1186    if codebase.workspace_id != workspace_id {
1187        return Err(ServerError::NotFound(format!(
1188            "Codebase {} not found in workspace {}",
1189            codebase_id, workspace_id
1190        )));
1191    }
1192
1193    if codebase.repo_path.is_empty() {
1194        return Err(ServerError::BadRequest(
1195            "Codebase has no repository path".to_string(),
1196        ));
1197    }
1198
1199    let tree = scan_repo_tree(&codebase.repo_path);
1200    let source_type = codebase
1201        .source_type
1202        .as_ref()
1203        .map(CodebaseSourceType::as_str)
1204        .unwrap_or("local");
1205    let summary = compute_summary(&tree, source_type, codebase.branch.as_deref());
1206    let anchors = extract_architecture_anchors(&tree);
1207    let modules = build_repowiki_modules(&tree);
1208
1209    let source_links = anchors
1210        .iter()
1211        .filter_map(|anchor| {
1212            anchor.get("path").map(|path| {
1213                serde_json::json!({
1214                    "label": path,
1215                    "path": path,
1216                })
1217            })
1218        })
1219        .chain(modules.iter().map(|module| {
1220            serde_json::json!({
1221                "label": module
1222                    .get("name")
1223                    .and_then(|value| value.as_str())
1224                    .unwrap_or("."),
1225                "path": module
1226                    .get("path")
1227                    .and_then(|value| value.as_str())
1228                    .unwrap_or("."),
1229            })
1230        }))
1231        .collect::<Vec<_>>();
1232
1233    let top_level_folders = summary.top_level_folders.clone();
1234    let storyline_context = build_repowiki_storyline_context(&tree, &anchors);
1235
1236    Ok(Json(serde_json::json!({
1237        "codebase": {
1238            "id": codebase.id,
1239            "workspaceId": codebase.workspace_id,
1240            "label": codebase.label,
1241            "repoPath": codebase.repo_path,
1242            "sourceType": source_type,
1243            "sourceUrl": codebase.source_url,
1244            "branch": codebase.branch,
1245        },
1246        "summary": {
1247            "totalFiles": summary.total_files,
1248            "totalDirectories": summary.total_directories,
1249            "topLevelFolders": summary.top_level_folders,
1250            "sourceType": summary.source_type,
1251            "branch": summary.branch,
1252            "repositoryRoleSummary": build_repository_role_summary(&top_level_folders),
1253        },
1254        "anchors": anchors,
1255        "modules": modules,
1256        "architecture": {
1257            "runtimeBoundaries": build_runtime_boundaries(&top_level_folders),
1258            "crossLayerRelationships": build_cross_layer_relationships(&top_level_folders),
1259        },
1260        "workflows": build_repowiki_workflows(&top_level_folders),
1261        "glossary": build_repowiki_glossary(&top_level_folders),
1262        "sourceLinks": source_links,
1263        "storylineContext": storyline_context,
1264    })))
1265}
1266
1267#[cfg(test)]
1268mod tests {
1269    use super::*;
1270
1271    fn file(name: &str, path: &str) -> RepoTreeNode {
1272        RepoTreeNode {
1273            name: name.to_string(),
1274            path: path.to_string(),
1275            node_type: "file".to_string(),
1276            children: None,
1277            file_count: None,
1278        }
1279    }
1280
1281    fn directory(
1282        name: &str,
1283        path: &str,
1284        file_count: u64,
1285        children: Vec<RepoTreeNode>,
1286    ) -> RepoTreeNode {
1287        RepoTreeNode {
1288            name: name.to_string(),
1289            path: path.to_string(),
1290            node_type: "directory".to_string(),
1291            children: Some(children),
1292            file_count: Some(file_count),
1293        }
1294    }
1295
1296    fn sample_tree() -> RepoTreeNode {
1297        directory(
1298            "repo",
1299            ".",
1300            6,
1301            vec![
1302                file("README.md", "README.md"),
1303                file("packagefoo.js", "packagefoo.js"),
1304                directory("app", "app", 1, vec![file("page.tsx", "app/page.tsx")]),
1305                directory(
1306                    "docs",
1307                    "docs",
1308                    2,
1309                    vec![
1310                        file("ARCHITECTURE.md", "docs/ARCHITECTURE.md"),
1311                        directory(
1312                            "adr",
1313                            "docs/adr",
1314                            1,
1315                            vec![file("README.md", "docs/adr/README.md")],
1316                        ),
1317                    ],
1318                ),
1319                directory(
1320                    "src",
1321                    "src",
1322                    2,
1323                    vec![directory(
1324                        "app",
1325                        "src/app",
1326                        1,
1327                        vec![file("page.tsx", "src/app/page.tsx")],
1328                    )],
1329                ),
1330            ],
1331        )
1332    }
1333
1334    #[test]
1335    fn repowiki_extract_architecture_anchors_includes_nested_docs_and_skips_false_positives() {
1336        let anchors = extract_architecture_anchors(&sample_tree());
1337
1338        let paths = anchors
1339            .iter()
1340            .filter_map(|anchor| anchor.get("path").and_then(|value| value.as_str()))
1341            .collect::<Vec<_>>();
1342
1343        assert!(paths.contains(&"README.md"));
1344        assert!(paths.contains(&"docs/ARCHITECTURE.md"));
1345        assert!(paths.contains(&"docs/adr/README.md"));
1346        assert!(!paths.contains(&"packagefoo.js"));
1347    }
1348
1349    #[test]
1350    fn repowiki_storyline_and_modules_match_expected_semantics() {
1351        let tree = sample_tree();
1352        let anchors = extract_architecture_anchors(&tree);
1353        let modules = build_repowiki_modules(&tree);
1354        let storyline = build_repowiki_storyline_context(&tree, &anchors);
1355
1356        let app_module = modules
1357            .iter()
1358            .find(|module| module.get("path").and_then(|value| value.as_str()) == Some("app"))
1359            .expect("expected app module");
1360        assert_eq!(
1361            app_module
1362                .get("role")
1363                .and_then(|value| value.as_str())
1364                .expect("expected role"),
1365            "User-facing application layer."
1366        );
1367
1368        let entry_points = storyline
1369            .get("entryPoints")
1370            .and_then(|value| value.as_array())
1371            .expect("expected entry points");
1372        let entry_paths = entry_points
1373            .iter()
1374            .filter_map(|value| value.as_str())
1375            .collect::<Vec<_>>();
1376        assert!(entry_paths.contains(&"README.md"));
1377        assert!(entry_paths.contains(&"docs/ARCHITECTURE.md"));
1378        assert!(!entry_paths.contains(&"docs"));
1379    }
1380}