ai_workbench_lib/modules/job_processor/
markdown.rs

1//! Markdown generation utilities for job processor output
2
3use std::collections::HashMap;
4use std::fmt::Write;
5
6use crate::JobConfig;
7
8use super::types::FileChunkResult;
9
10/// A builder for creating structured markdown documents
11pub struct MarkdownBuilder {
12    content: String,
13}
14
15impl MarkdownBuilder {
16    pub fn new() -> Self {
17        Self {
18            content: String::with_capacity(16384),
19        }
20    }
21    
22    pub fn heading(&mut self, level: u8, text: &str) -> &mut Self {
23        let prefix = "#".repeat(level as usize);
24        writeln!(self.content, "{} {}\n", prefix, text).unwrap();
25        self
26    }
27    
28    pub fn paragraph(&mut self, text: &str) -> &mut Self {
29        writeln!(self.content, "{}\n", text).unwrap();
30        self
31    }
32    
33    #[allow(dead_code)]
34    pub fn code_block(&mut self, text: &str) -> &mut Self {
35        writeln!(self.content, "```\n{}\n```\n", text).unwrap();
36        self
37    }
38    
39    pub fn metadata(&mut self, key: &str, value: &str) -> &mut Self {
40        writeln!(self.content, "**{}:** {}\n", key, value).unwrap();
41        self
42    }
43    
44    pub fn horizontal_rule(&mut self) -> &mut Self {
45        writeln!(self.content, "---\n").unwrap();
46        self
47    }
48    
49    pub fn emoji_heading(&mut self, level: u8, emoji: &str, text: &str) -> &mut Self {
50        let prefix = "#".repeat(level as usize);
51        writeln!(self.content, "{} {} {}\n", prefix, emoji, text).unwrap();
52        self
53    }
54    
55    pub fn build(self) -> String {
56        self.content
57    }
58}
59
60impl Default for MarkdownBuilder {
61    fn default() -> Self {
62        Self::new()
63    }
64}
65
66/// Utilities for formatting job processor output as markdown
67pub struct MarkdownFormatter;
68
69impl MarkdownFormatter {
70
71    /// Formats the analysis results as well-structured Markdown using the builder
72    pub fn format_as_markdown(config: &JobConfig, files_summary: &str, 
73                         file_groups: &HashMap<String, Vec<&FileChunkResult>>, 
74                         final_summary: &str) -> String {
75        let mut builder = MarkdownBuilder::new();
76        
77        // Header with job metadata
78        builder
79            .heading(1, "File Analysis Results")
80            .metadata("Job ID", &config.job_id)
81            .metadata("Model", &config.model_id)
82            .paragraph("");
83        
84        // Processing summary
85        let clean_summary = MarkdownFormatter::clean_processing_summary(files_summary);
86        builder
87            .heading(2, "Processing Summary")
88            .paragraph(&clean_summary)
89            .horizontal_rule();
90        
91        // Individual file analyses
92        MarkdownFormatter::add_file_analyses(&mut builder, file_groups);
93        
94        // Final summary
95        if !final_summary.trim().is_empty() {
96            builder
97                .emoji_heading(1, "📊", "Final Analysis Summary")
98                .paragraph(final_summary);
99        }
100        
101        // Footer
102        let timestamp = std::time::SystemTime::now()
103            .duration_since(std::time::UNIX_EPOCH)
104            .unwrap_or_default()
105            .as_secs();
106            
107        builder
108            .horizontal_rule()
109            .paragraph(&format!("*Analysis completed: Unix timestamp {}*", timestamp));
110        
111        builder.build()
112    }
113
114    /// Formats file analyses section in markdown
115    pub fn add_file_analyses(builder: &mut MarkdownBuilder, 
116                            file_groups: &HashMap<String, Vec<&FileChunkResult>>) {
117        // Sort files for consistent output
118        let mut sorted_files: Vec<_> = file_groups.iter().collect();
119        sorted_files.sort_by_key(|(path, _)| path.as_str());
120        
121        for (file_path, file_results) in sorted_files {
122            if file_results.is_empty() {
123                continue;
124            }
125            
126            let file_type = &file_results[0].file_type;
127            builder
128                .emoji_heading(2, "📁", &format!("File: `{}`", file_path))
129                .metadata("Type", file_type)
130                .metadata("Chunks", &file_results.len().to_string())
131                .paragraph("");
132            
133            for result in file_results {
134                if file_results.len() > 1 {
135                    builder.heading(3, &format!("Chunk {} (Tokens: {})", 
136                                               result.chunk_id + 1, 
137                                               result.tokens_used.as_ref().map(|t| t.total_tokens).unwrap_or(0)));
138                }
139                
140                let cleaned_output = Self::clean_ai_output(&result.output);
141                builder.paragraph(&cleaned_output);
142            }
143            
144            builder.horizontal_rule();
145        }
146    }
147
148    /// Cleans up AI output for better markdown formatting
149    pub fn clean_ai_output(output: &str) -> String {
150        output
151            .replace("### ", "### ")
152            .replace("#### ", "#### ")
153            .replace("=== ", "### ")
154            .replace("---", "\n---\n")
155            .trim()
156            .to_string()
157    }
158
159    /// Cleans the processing summary text
160    pub fn clean_processing_summary(files_summary: &str) -> String {
161        files_summary
162            .replace("Processing Summary:", "")
163            .trim()
164            .to_string()
165    }
166}