1use anyhow::Result;
11use deadpool_postgres::{Config, ManagerConfig, RecyclingMethod, Runtime};
12use fraiseql_core::{
13 compiler::fact_table::{DatabaseIntrospector, FactTableDetector, FactTableMetadata},
14 db::PostgresIntrospector,
15};
16use serde_json::json;
17use tokio_postgres::NoTls;
18
19use crate::output::OutputFormatter;
20
21#[derive(Debug, Clone, Copy)]
23#[non_exhaustive]
24pub enum OutputFormat {
25 Python,
27 Json,
29}
30
31impl OutputFormat {
32 pub fn parse(s: &str) -> std::result::Result<Self, String> {
38 match s.to_lowercase().as_str() {
39 "python" | "py" => Ok(Self::Python),
40 "json" => Ok(Self::Json),
41 _ => Err(format!("Invalid format '{s}', expected: python, json")),
42 }
43 }
44}
45
46async fn create_introspector(database_url: &str) -> Result<PostgresIntrospector> {
48 let mut cfg = Config::new();
49 cfg.url = Some(database_url.to_string());
50 cfg.manager = Some(ManagerConfig {
51 recycling_method: RecyclingMethod::Fast,
52 });
53 cfg.pool = Some(deadpool_postgres::PoolConfig::new(2));
54
55 let pool = cfg
56 .create_pool(Some(Runtime::Tokio1), NoTls)
57 .map_err(|e| anyhow::anyhow!("Failed to create database pool: {e}"))?;
58
59 let _client = pool
61 .get()
62 .await
63 .map_err(|e| anyhow::anyhow!("Failed to connect to database: {e}"))?;
64
65 Ok(PostgresIntrospector::new(pool))
66}
67
68pub async fn run(
86 database_url: &str,
87 format: OutputFormat,
88 formatter: &OutputFormatter,
89) -> Result<()> {
90 formatter.progress("Introspecting database for fact tables...");
91 formatter.progress(&format!(" Database: {database_url}"));
92
93 let introspector = create_introspector(database_url).await?;
95
96 let fact_tables = introspector
98 .list_fact_tables()
99 .await
100 .map_err(|e| anyhow::anyhow!("Failed to list fact tables: {e}"))?;
101
102 if fact_tables.is_empty() {
103 formatter.progress("\nwarn: No fact tables found (tables starting with 'tf_')");
104 formatter.progress(" Fact tables should be named like: tf_sales, tf_events, tf_orders");
105 return Ok(());
106 }
107
108 formatter.progress(&format!("\nFound {} fact table(s):", fact_tables.len()));
109 for table in &fact_tables {
110 formatter.progress(&format!(" - {table}"));
111 }
112 formatter.progress("");
113
114 let mut metadata_list: Vec<FactTableMetadata> = Vec::new();
116 let mut errors: Vec<(String, String)> = Vec::new();
117
118 for table_name in &fact_tables {
119 match FactTableDetector::introspect(&introspector, table_name).await {
120 Ok(metadata) => {
121 metadata_list.push(metadata);
122 },
123 Err(e) => {
124 errors.push((table_name.clone(), e.to_string()));
125 },
126 }
127 }
128
129 if !errors.is_empty() {
131 formatter.progress(&format!("warn: Failed to introspect {} table(s):", errors.len()));
132 for (table, error) in &errors {
133 formatter.progress(&format!(" - {table}: {error}"));
134 }
135 formatter.progress("");
136 }
137
138 match format {
140 OutputFormat::Python => {
141 println!("\n# Suggested fact table decorators:");
142 println!("# (Copy and paste into your Python schema)");
143 println!("# Generated by: fraiseql introspect facts");
144 println!();
145 println!("import fraiseql");
146 println!();
147
148 for metadata in &metadata_list {
149 println!("{}", format_as_python(metadata));
150 println!();
151 }
152 },
153 OutputFormat::Json => {
154 let output: serde_json::Value = metadata_list
155 .iter()
156 .map(|m| {
157 (
158 m.table_name.clone(),
159 json!({
160 "table_name": m.table_name,
161 "measures": m.measures.iter().map(|measure| {
162 json!({
163 "name": measure.name,
164 "sql_type": format!("{:?}", measure.sql_type),
165 "nullable": measure.nullable
166 })
167 }).collect::<Vec<_>>(),
168 "dimensions": {
169 "name": m.dimensions.name,
170 "paths": m.dimensions.paths.iter().map(|p| {
171 json!({
172 "name": p.name,
173 "json_path": p.json_path,
174 "data_type": p.data_type
175 })
176 }).collect::<Vec<_>>()
177 },
178 "denormalized_filters": m.denormalized_filters.iter().map(|f| {
179 json!({
180 "name": f.name,
181 "sql_type": format!("{:?}", f.sql_type),
182 "indexed": f.indexed
183 })
184 }).collect::<Vec<_>>(),
185 "calendar_dimensions": m.calendar_dimensions.iter().map(|c| {
186 json!({
187 "source_column": c.source_column,
188 "granularities": c.granularities.iter().map(|g| {
189 json!({
190 "column_name": g.column_name,
191 "buckets": g.buckets.iter().map(|b| {
192 json!({
193 "json_key": b.json_key,
194 "bucket_type": format!("{:?}", b.bucket_type)
195 })
196 }).collect::<Vec<_>>()
197 })
198 }).collect::<Vec<_>>()
199 })
200 }).collect::<Vec<_>>()
201 }),
202 )
203 })
204 .collect::<serde_json::Map<String, serde_json::Value>>()
205 .into();
206
207 println!("{}", serde_json::to_string_pretty(&output)?);
208 },
209 }
210
211 formatter.progress("\nok: Introspection complete");
212 formatter.progress(&format!(" {} table(s) introspected successfully", metadata_list.len()));
213 if !errors.is_empty() {
214 formatter.progress(&format!(" {} table(s) failed", errors.len()));
215 }
216
217 Ok(())
218}
219
220fn format_as_python(metadata: &FactTableMetadata) -> String {
222 let mut output = String::new();
223
224 let measures: Vec<String> = metadata.measures.iter().map(|m| format!("'{}'", m.name)).collect();
226
227 let filters: Vec<String> =
229 metadata.denormalized_filters.iter().map(|f| format!("'{}'", f.name)).collect();
230
231 let class_name = metadata
233 .table_name
234 .strip_prefix("tf_")
235 .unwrap_or(&metadata.table_name)
236 .split('_')
237 .map(|s| {
238 let mut c = s.chars();
239 match c.next() {
240 None => String::new(),
241 Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
242 }
243 })
244 .collect::<String>();
245
246 output.push_str(&format!("# Fact table: {}\n", metadata.table_name));
248 output.push_str("@fraiseql.fact_table(\n");
249 output.push_str(&format!(" measures=[{}],\n", measures.join(", ")));
250 output.push_str(&format!(" dimensions='{}',\n", metadata.dimensions.name));
251
252 if !filters.is_empty() {
253 output.push_str(&format!(" filters=[{}],\n", filters.join(", ")));
254 }
255
256 if !metadata.calendar_dimensions.is_empty() {
258 let calendar_cols: Vec<String> = metadata
259 .calendar_dimensions
260 .iter()
261 .map(|c| format!("'{}'", c.source_column))
262 .collect();
263 output.push_str(&format!(" calendar_columns=[{}],\n", calendar_cols.join(", ")));
264 }
265
266 output.push_str(")\n");
267 output.push_str(&format!("class {class_name}:\n"));
268 output.push_str(&format!(
269 " \"\"\"Fact table: {} ({} measures, {} filters)\"\"\"\n",
270 metadata.table_name,
271 metadata.measures.len(),
272 metadata.denormalized_filters.len()
273 ));
274 output.push_str(" pass");
275
276 output
277}
278
279#[cfg(test)]
280mod tests {
281 use super::*;
282
283 #[test]
284 fn test_output_format_from_str() {
285 assert!(matches!(OutputFormat::parse("python"), Ok(OutputFormat::Python)));
286 assert!(matches!(OutputFormat::parse("json"), Ok(OutputFormat::Json)));
287 assert!(
288 OutputFormat::parse("invalid").is_err(),
289 "expected Err for unknown output format 'invalid'"
290 );
291 }
292
293 #[test]
294 fn test_format_as_python() {
295 use fraiseql_core::compiler::fact_table::{
296 DimensionColumn, FilterColumn, MeasureColumn, SqlType,
297 };
298
299 let metadata = FactTableMetadata {
300 table_name: "tf_sales".to_string(),
301 measures: vec![
302 MeasureColumn {
303 name: "revenue".to_string(),
304 sql_type: SqlType::Decimal,
305 nullable: false,
306 },
307 MeasureColumn {
308 name: "quantity".to_string(),
309 sql_type: SqlType::Int,
310 nullable: false,
311 },
312 ],
313 dimensions: DimensionColumn {
314 name: "data".to_string(),
315 paths: vec![],
316 },
317 denormalized_filters: vec![FilterColumn {
318 name: "customer_id".to_string(),
319 sql_type: SqlType::Uuid,
320 indexed: true,
321 }],
322 calendar_dimensions: vec![],
323 };
324
325 let output = format_as_python(&metadata);
326 assert!(output.contains("@fraiseql.fact_table"));
327 assert!(output.contains("'revenue'"));
328 assert!(output.contains("'quantity'"));
329 assert!(output.contains("'customer_id'"));
330 assert!(output.contains("class Sales:"));
331 }
332}