Skip to main content

fraiseql_cli/commands/
compile.rs

1//! Schema compilation command
2//!
3//! Compiles schema.json (from Python/TypeScript/etc.) into optimized schema.compiled.json
4
5use std::{fs, path::Path};
6
7use anyhow::{Context, Result};
8use fraiseql_core::schema::CompiledSchema;
9use tracing::{info, warn};
10
11use crate::{
12    config::FraiseQLConfig,
13    schema::{IntermediateSchema, SchemaConverter, SchemaOptimizer, SchemaValidator},
14};
15
16/// Run the compile command
17///
18/// # Arguments
19///
20/// * `input` - Path to fraiseql.toml (TOML) or schema.json (legacy)
21/// * `types` - Optional path to types.json (when using TOML workflow)
22/// * `schema_dir` - Optional directory for auto-discovery of schema files
23/// * `type_files` - Optional vector of explicit type file paths
24/// * `query_files` - Optional vector of explicit query file paths
25/// * `mutation_files` - Optional vector of explicit mutation file paths
26/// * `output` - Path to write schema.compiled.json
27/// * `check` - If true, validate only without writing output
28/// * `database` - Optional database URL for indexed column validation
29///
30/// # Workflows
31///
32/// 1. TOML-only: `fraiseql compile fraiseql.toml`
33/// 2. Language + TOML: `fraiseql compile fraiseql.toml --types types.json`
34/// 3. Multi-file auto-discovery: `fraiseql compile fraiseql.toml --schema-dir schema/`
35/// 4. Multi-file explicit: `fraiseql compile fraiseql.toml --type-file a.json --type-file b.json`
36/// 5. Legacy JSON: `fraiseql compile schema.json`
37///
38/// # Errors
39///
40/// Returns error if:
41/// - Input file doesn't exist or can't be read
42/// - JSON/TOML parsing fails
43/// - Schema validation fails
44/// - Output file can't be written
45/// - Database connection fails (when database URL is provided)
46#[allow(clippy::too_many_arguments)]
47pub async fn run(
48    input: &str,
49    types: Option<&str>,
50    schema_dir: Option<&str>,
51    type_files: Vec<String>,
52    query_files: Vec<String>,
53    mutation_files: Vec<String>,
54    output: &str,
55    check: bool,
56    database: Option<&str>,
57) -> Result<()> {
58    info!("Compiling schema: {input}");
59
60    // 1. Determine workflow based on input file and options
61    let input_path = Path::new(input);
62    if !input_path.exists() {
63        anyhow::bail!("Input file not found: {input}");
64    }
65
66    // Load schema based on file type and options
67    let is_toml = input_path
68        .extension()
69        .and_then(|ext| ext.to_str())
70        .is_some_and(|ext| ext.eq_ignore_ascii_case("toml"));
71    let mut intermediate: IntermediateSchema = if is_toml {
72        // TOML workflow (new)
73        info!("Using TOML-based workflow");
74
75        // Determine mode based on precedence:
76        // 1. Explicit file lists (highest priority)
77        // 2. --schema-dir auto-discovery
78        // 3. Domain discovery (from TOML config)
79        // 4. TOML includes (if configured)
80        // 5. --types single file
81        // 6. TOML-only (no external files)
82
83        if !type_files.is_empty() || !query_files.is_empty() || !mutation_files.is_empty() {
84            // Mode 1: Explicit file lists
85            info!("Mode: Explicit file lists");
86            crate::schema::SchemaMerger::merge_explicit_files(
87                input,
88                &type_files,
89                &query_files,
90                &mutation_files,
91            )
92            .context("Failed to load explicit schema files")?
93        } else if let Some(dir) = schema_dir {
94            // Mode 2: Auto-discovery directory
95            info!("Mode: Auto-discovery from directory: {}", dir);
96            crate::schema::SchemaMerger::merge_from_directory(input, dir)
97                .context("Failed to load schema from directory")?
98        } else if let Some(types_path) = types {
99            // Mode 3: Single types.json file (backward compatible)
100            info!("Mode: Language + TOML (types.json + fraiseql.toml)");
101            crate::schema::SchemaMerger::merge_files(types_path, input)
102                .context("Failed to merge types.json with TOML")?
103        } else {
104            // Try modes in order: domain discovery → includes → toml-only
105            info!("Mode: TOML-based (checking for domain discovery...)");
106            if let Ok(schema) = crate::schema::SchemaMerger::merge_from_domains(input) {
107                schema
108            } else {
109                info!("No domains configured, checking for TOML includes...");
110                if let Ok(schema) = crate::schema::SchemaMerger::merge_with_includes(input) {
111                    schema
112                } else {
113                    info!("No includes configured, using TOML-only definitions");
114                    crate::schema::SchemaMerger::merge_toml_only(input)
115                        .context("Failed to load schema from TOML")?
116                }
117            }
118        }
119    } else {
120        // Legacy JSON workflow
121        info!("Using legacy JSON workflow");
122        let schema_json = fs::read_to_string(input_path).context("Failed to read schema.json")?;
123
124        // 2. Parse JSON into IntermediateSchema (language-agnostic format)
125        info!("Parsing intermediate schema...");
126        serde_json::from_str(&schema_json).context("Failed to parse schema.json")?
127    };
128
129    // 2a. Load and apply security configuration from fraiseql.toml if it exists
130    if Path::new("fraiseql.toml").exists() {
131        info!("Loading security configuration from fraiseql.toml...");
132        match FraiseQLConfig::from_file("fraiseql.toml") {
133            Ok(config) => {
134                info!("Validating security configuration...");
135                config.validate()?;
136
137                info!("Applying security configuration to schema...");
138                // Merge security config into intermediate schema
139                let security_json = config.fraiseql.security.to_json();
140                intermediate.security = Some(security_json);
141
142                info!("Security configuration applied successfully");
143            },
144            Err(e) => {
145                warn!("Failed to load fraiseql.toml: {e}");
146                warn!("Continuing with default security configuration");
147            },
148        }
149    } else {
150        info!("No fraiseql.toml found, using default security configuration");
151    }
152
153    // 3. Validate intermediate schema
154    info!("Validating schema structure...");
155    let validation_report =
156        SchemaValidator::validate(&intermediate).context("Failed to validate schema")?;
157
158    if !validation_report.is_valid() {
159        validation_report.print();
160        anyhow::bail!("Schema validation failed with {} error(s)", validation_report.error_count());
161    }
162
163    // Print warnings if any
164    if validation_report.warning_count() > 0 {
165        validation_report.print();
166    }
167
168    // 4. Convert to CompiledSchema (validates and normalizes)
169    info!("Converting to compiled format...");
170    let mut schema = SchemaConverter::convert(intermediate)
171        .context("Failed to convert schema to compiled format")?;
172
173    // 5. Optimize schema and generate SQL hints
174    info!("Analyzing schema for optimization opportunities...");
175    let optimization_report =
176        SchemaOptimizer::optimize(&mut schema).context("Failed to optimize schema")?;
177
178    // 5b. Optional: Validate indexed columns against database
179    if let Some(db_url) = database {
180        info!("Validating indexed columns against database...");
181        validate_indexed_columns(&schema, db_url).await?;
182    }
183
184    // 6. If check-only mode, stop here
185    if check {
186        println!("✓ Schema is valid");
187        println!("  Types: {}", schema.types.len());
188        println!("  Queries: {}", schema.queries.len());
189        println!("  Mutations: {}", schema.mutations.len());
190
191        // Print optimization suggestions
192        optimization_report.print();
193
194        return Ok(());
195    }
196
197    // 7. Write compiled schema
198    info!("Writing compiled schema to: {output}");
199    let output_json =
200        serde_json::to_string_pretty(&schema).context("Failed to serialize compiled schema")?;
201
202    fs::write(output, output_json).context("Failed to write compiled schema")?;
203
204    // 8. Success message
205    println!("✓ Schema compiled successfully");
206    println!("  Input:  {input}");
207    println!("  Output: {output}");
208    println!("  Types: {}", schema.types.len());
209    println!("  Queries: {}", schema.queries.len());
210    println!("  Mutations: {}", schema.mutations.len());
211
212    // Print optimization suggestions
213    optimization_report.print();
214
215    Ok(())
216}
217
218/// Validate indexed columns against database views.
219///
220/// Connects to the database and introspects view columns to verify that
221/// any indexed column naming conventions are properly set up.
222///
223/// # Arguments
224///
225/// * `schema` - The compiled schema to validate
226/// * `db_url` - Database connection URL
227///
228/// # Errors
229///
230/// Returns error if database connection fails. Warnings are printed for
231/// missing indexed columns but don't cause validation to fail.
232async fn validate_indexed_columns(schema: &CompiledSchema, db_url: &str) -> Result<()> {
233    use deadpool_postgres::{Config, ManagerConfig, RecyclingMethod, Runtime};
234    use fraiseql_core::db::postgres::PostgresIntrospector;
235    use tokio_postgres::NoTls;
236
237    // Create pool for introspection
238    let mut cfg = Config::new();
239    cfg.url = Some(db_url.to_string());
240    cfg.manager = Some(ManagerConfig {
241        recycling_method: RecyclingMethod::Fast,
242    });
243    cfg.pool = Some(deadpool_postgres::PoolConfig::new(2));
244
245    let pool = cfg
246        .create_pool(Some(Runtime::Tokio1), NoTls)
247        .context("Failed to create connection pool for indexed column validation")?;
248
249    let introspector = PostgresIntrospector::new(pool);
250
251    let mut total_indexed = 0;
252    let mut total_views = 0;
253
254    // Check each query's sql_source (view)
255    for query in &schema.queries {
256        if let Some(view_name) = &query.sql_source {
257            total_views += 1;
258
259            // Get indexed columns for this view
260            match introspector.get_indexed_nested_columns(view_name).await {
261                Ok(indexed_cols) => {
262                    if !indexed_cols.is_empty() {
263                        info!(
264                            "View '{}': found {} indexed column(s): {:?}",
265                            view_name,
266                            indexed_cols.len(),
267                            indexed_cols
268                        );
269                        total_indexed += indexed_cols.len();
270                    }
271                },
272                Err(e) => {
273                    warn!(
274                        "Could not introspect view '{}': {}. Skipping indexed column check.",
275                        view_name, e
276                    );
277                },
278            }
279        }
280    }
281
282    println!("✓ Indexed column validation complete");
283    println!("  Views checked: {total_views}");
284    println!("  Indexed columns found: {total_indexed}");
285
286    Ok(())
287}
288
289#[cfg(test)]
290mod tests {
291    use std::collections::HashMap;
292
293    use fraiseql_core::schema::{
294        AutoParams, CompiledSchema, FieldDefinition, FieldType, QueryDefinition, TypeDefinition,
295    };
296    use fraiseql_core::validation::CustomTypeRegistry;
297
298    #[test]
299    fn test_validate_schema_success() {
300        let schema = CompiledSchema {
301            types:         vec![TypeDefinition {
302                name:                "User".to_string(),
303                fields:              vec![
304                    FieldDefinition {
305                        name:           "id".to_string(),
306                        field_type:     FieldType::Int,
307                        nullable:       false,
308                        default_value:  None,
309                        description:    None,
310                        vector_config:  None,
311                        alias:          None,
312                        deprecation:    None,
313                        requires_scope: None,
314                    },
315                    FieldDefinition {
316                        name:           "name".to_string(),
317                        field_type:     FieldType::String,
318                        nullable:       false,
319                        default_value:  None,
320                        description:    None,
321                        vector_config:  None,
322                        alias:          None,
323                        deprecation:    None,
324                        requires_scope: None,
325                    },
326                ],
327                description:         Some("User type".to_string()),
328                sql_source:          String::new(),
329                jsonb_column:        String::new(),
330                sql_projection_hint: None,
331                implements:          vec![],
332            }],
333            queries:       vec![QueryDefinition {
334                name:         "users".to_string(),
335                return_type:  "User".to_string(),
336                returns_list: true,
337                nullable:     false,
338                arguments:    vec![],
339                sql_source:   Some("v_user".to_string()),
340                description:  Some("Get users".to_string()),
341                auto_params:  AutoParams::default(),
342                deprecation:  None,
343                jsonb_column: "data".to_string(),
344            }],
345            enums:         vec![],
346            input_types:   vec![],
347            interfaces:    vec![],
348            unions:        vec![],
349            mutations:     vec![],
350            subscriptions: vec![],
351            directives:    vec![],
352            observers:     Vec::new(),
353            fact_tables:   HashMap::default(),
354            federation:    None,
355            security:      None,
356            schema_sdl:    None,
357            custom_scalars: CustomTypeRegistry::default(),
358        };
359
360        // Validation is done inside SchemaConverter::convert, not exposed separately
361        // This test just verifies we can build a valid schema structure
362        assert_eq!(schema.types.len(), 1);
363        assert_eq!(schema.queries.len(), 1);
364    }
365
366    #[test]
367    fn test_validate_schema_unknown_type() {
368        let schema = CompiledSchema {
369            types:         vec![],
370            enums:         vec![],
371            input_types:   vec![],
372            interfaces:    vec![],
373            unions:        vec![],
374            queries:       vec![QueryDefinition {
375                name:         "users".to_string(),
376                return_type:  "UnknownType".to_string(),
377                returns_list: true,
378                nullable:     false,
379                arguments:    vec![],
380                sql_source:   Some("v_user".to_string()),
381                description:  Some("Get users".to_string()),
382                auto_params:  AutoParams::default(),
383                deprecation:  None,
384                jsonb_column: "data".to_string(),
385            }],
386            mutations:     vec![],
387            subscriptions: vec![],
388            directives:    vec![],
389            observers:     Vec::new(),
390            fact_tables:   HashMap::default(),
391            federation:    None,
392            security:      None,
393            schema_sdl:    None,
394            custom_scalars: CustomTypeRegistry::default(),
395        };
396
397        // Note: Validation is private to SchemaConverter
398        // This test demonstrates the schema structure with an invalid type
399        assert_eq!(schema.types.len(), 0);
400        assert_eq!(schema.queries[0].return_type, "UnknownType");
401    }
402}