Skip to main content

fraiseql_cli/commands/
compile.rs

1//! Schema compilation command
2//!
3//! Compiles schema.json (from Python/TypeScript/etc.) into optimized schema.compiled.json
4
5use std::{fs, path::Path};
6
7use anyhow::{Context, Result};
8use fraiseql_core::schema::{CURRENT_SCHEMA_FORMAT_VERSION, CompiledSchema};
9use tracing::{info, warn};
10
11use crate::{
12    config::TomlProjectConfig,
13    schema::{
14        IntermediateSchema, OptimizationReport, SchemaConverter, SchemaOptimizer, SchemaValidator,
15    },
16};
17
18/// Input source configuration for schema compilation.
19#[derive(Debug, Default)]
20pub struct CompileOptions<'a> {
21    /// Path to `fraiseql.toml` (TOML workflow) or `schema.json` (legacy).
22    pub input:          &'a str,
23    /// Optional path to `types.json` (TOML workflow, backward compat).
24    pub types:          Option<&'a str>,
25    /// Optional directory for schema file auto-discovery.
26    pub schema_dir:     Option<&'a str>,
27    /// Explicit type file paths (highest priority).
28    pub type_files:     Vec<String>,
29    /// Explicit query file paths.
30    pub query_files:    Vec<String>,
31    /// Explicit mutation file paths.
32    pub mutation_files: Vec<String>,
33    /// Optional database URL for indexed column validation.
34    pub database:       Option<&'a str>,
35}
36
37impl<'a> CompileOptions<'a> {
38    /// Create new compile options with just the input path.
39    #[must_use]
40    pub fn new(input: &'a str) -> Self {
41        Self {
42            input,
43            ..Default::default()
44        }
45    }
46
47    /// Set the types path.
48    #[must_use]
49    pub fn with_types(mut self, types: &'a str) -> Self {
50        self.types = Some(types);
51        self
52    }
53
54    /// Set the schema directory for auto-discovery.
55    #[must_use]
56    pub fn with_schema_dir(mut self, schema_dir: &'a str) -> Self {
57        self.schema_dir = Some(schema_dir);
58        self
59    }
60
61    /// Set the database URL for validation.
62    #[must_use]
63    pub fn with_database(mut self, database: &'a str) -> Self {
64        self.database = Some(database);
65        self
66    }
67}
68
69/// Select and execute the appropriate schema-loading strategy for TOML-based workflows.
70///
71/// Tries strategies in priority order:
72/// 1. Explicit file lists (highest priority)
73/// 2. Directory auto-discovery
74/// 3. Single types file (backward-compatible)
75/// 4. Domain discovery → TOML includes → TOML-only (fallback sequence)
76#[allow(clippy::cognitive_complexity)] // Reason: multi-strategy schema discovery with fallback chain
77fn load_intermediate_schema(
78    toml_path: &str,
79    type_files: &[String],
80    query_files: &[String],
81    mutation_files: &[String],
82    schema_dir: Option<&str>,
83    types_path: Option<&str>,
84) -> Result<IntermediateSchema> {
85    if !type_files.is_empty() || !query_files.is_empty() || !mutation_files.is_empty() {
86        info!("Mode: Explicit file lists");
87        return crate::schema::SchemaMerger::merge_explicit_files(
88            toml_path,
89            type_files,
90            query_files,
91            mutation_files,
92        )
93        .context("Failed to load explicit schema files");
94    }
95    if let Some(dir) = schema_dir {
96        info!("Mode: Auto-discovery from directory: {}", dir);
97        return crate::schema::SchemaMerger::merge_from_directory(toml_path, dir)
98            .context("Failed to load schema from directory");
99    }
100    if let Some(types) = types_path {
101        info!("Mode: Language + TOML (types.json + fraiseql.toml)");
102        return crate::schema::SchemaMerger::merge_files(types, toml_path)
103            .context("Failed to merge types.json with TOML");
104    }
105    info!("Mode: TOML-based (checking for domain discovery...)");
106    if let Ok(schema) = crate::schema::SchemaMerger::merge_from_domains(toml_path) {
107        return Ok(schema);
108    }
109    info!("No domains configured, checking for TOML includes...");
110    if let Ok(schema) = crate::schema::SchemaMerger::merge_with_includes(toml_path) {
111        return Ok(schema);
112    }
113    info!("No includes configured, using TOML-only definitions");
114    crate::schema::SchemaMerger::merge_toml_only(toml_path)
115        .context("Failed to load schema from TOML")
116}
117
118/// Compile a schema to `CompiledSchema` without writing to disk.
119///
120/// This is the core compilation logic, shared between `compile` (which writes to disk)
121/// and `run` (which serves in-memory without any file artifacts).
122///
123/// # Arguments
124///
125/// * `opts` - Compilation options including input paths and configuration
126///
127/// # Errors
128///
129/// Returns error if input is missing, parsing fails, validation fails, or the database
130/// connection fails (when `database` is provided).
131#[allow(clippy::cognitive_complexity)] // Reason: end-to-end compilation pipeline with validation, introspection, and output stages
132pub async fn compile_to_schema(
133    opts: CompileOptions<'_>,
134) -> Result<(CompiledSchema, OptimizationReport)> {
135    info!("Compiling schema: {}", opts.input);
136
137    // 1. Determine workflow based on input file and options
138    let input_path = Path::new(opts.input);
139    if !input_path.exists() {
140        anyhow::bail!("Input file not found: {}", opts.input);
141    }
142
143    // Load schema based on file type and options
144    let is_toml = input_path
145        .extension()
146        .and_then(|ext| ext.to_str())
147        .is_some_and(|ext| ext.eq_ignore_ascii_case("toml"));
148    let mut intermediate: IntermediateSchema = if is_toml {
149        info!("Using TOML-based workflow");
150        load_intermediate_schema(
151            opts.input,
152            &opts.type_files,
153            &opts.query_files,
154            &opts.mutation_files,
155            opts.schema_dir,
156            opts.types,
157        )?
158    } else {
159        // Legacy JSON workflow
160        info!("Using legacy JSON workflow");
161        let schema_json = fs::read_to_string(input_path).context("Failed to read schema.json")?;
162
163        // 2. Parse JSON into IntermediateSchema (language-agnostic format)
164        info!("Parsing intermediate schema...");
165        serde_json::from_str(&schema_json).context("Failed to parse schema.json")?
166    };
167
168    // 2a. Load and apply security configuration from fraiseql.toml if it exists.
169    // Skip when the input itself is a TomlSchema file: in that case the security
170    // settings are embedded in the TomlSchema, and the CWD fraiseql.toml uses a
171    // different TOML format (TomlSchema vs TomlProjectConfig) that is not compatible.
172    if !is_toml && Path::new("fraiseql.toml").exists() {
173        info!("Loading security configuration from fraiseql.toml...");
174        match TomlProjectConfig::from_file("fraiseql.toml") {
175            Ok(config) => {
176                info!("Validating security configuration...");
177                config.validate()?;
178
179                info!("Applying security configuration to schema...");
180                // Merge security config into intermediate schema
181                let security_json = config.fraiseql.security.to_json();
182                intermediate.security = Some(security_json);
183
184                info!("Security configuration applied successfully");
185            },
186            Err(e) => {
187                anyhow::bail!(
188                    "Failed to parse fraiseql.toml: {e}\n\
189                     Fix the configuration file or remove it to use defaults."
190                );
191            },
192        }
193    } else {
194        info!("No fraiseql.toml found, using default security configuration");
195    }
196
197    // 3. Validate intermediate schema
198    info!("Validating schema structure...");
199    let validation_report =
200        SchemaValidator::validate(&intermediate).context("Failed to validate schema")?;
201
202    if !validation_report.is_valid() {
203        validation_report.print();
204        anyhow::bail!("Schema validation failed with {} error(s)", validation_report.error_count());
205    }
206
207    // Print warnings if any
208    if validation_report.warning_count() > 0 {
209        validation_report.print();
210    }
211
212    // 4. Convert to CompiledSchema (validates and normalizes)
213    info!("Converting to compiled format...");
214    let mut schema = SchemaConverter::convert(intermediate)
215        .context("Failed to convert schema to compiled format")?;
216
217    // 5. Optimize schema and generate SQL hints (mutates schema in place, report for display)
218    info!("Analyzing schema for optimization opportunities...");
219    let report = SchemaOptimizer::optimize(&mut schema).context("Failed to optimize schema")?;
220
221    // 5a. Stamp schema format version for runtime compatibility checks.
222    schema.schema_format_version = Some(CURRENT_SCHEMA_FORMAT_VERSION);
223
224    // 5b. Optional: Validate indexed columns against database
225    if let Some(db_url) = opts.database {
226        info!("Validating indexed columns against database...");
227        validate_indexed_columns(&schema, db_url).await?;
228    }
229
230    // 5c. Warn when SQLite is the target but the schema uses features SQLite doesn't support.
231    check_sqlite_compatibility_warnings(&schema, opts.input, is_toml, opts.database);
232
233    Ok((schema, report))
234}
235
236/// Run the compile command
237///
238/// # Arguments
239///
240/// * `input` - Path to fraiseql.toml (TOML) or schema.json (legacy)
241/// * `types` - Optional path to types.json (when using TOML workflow)
242/// * `schema_dir` - Optional directory for auto-discovery of schema files
243/// * `type_files` - Optional vector of explicit type file paths
244/// * `query_files` - Optional vector of explicit query file paths
245/// * `mutation_files` - Optional vector of explicit mutation file paths
246/// * `output` - Path to write schema.compiled.json
247/// * `check` - If true, validate only without writing output
248/// * `database` - Optional database URL for indexed column validation
249///
250/// # Workflows
251///
252/// 1. TOML-only: `fraiseql compile fraiseql.toml`
253/// 2. Language + TOML: `fraiseql compile fraiseql.toml --types types.json`
254/// 3. Multi-file auto-discovery: `fraiseql compile fraiseql.toml --schema-dir schema/`
255/// 4. Multi-file explicit: `fraiseql compile fraiseql.toml --type-file a.json --type-file b.json`
256/// 5. Legacy JSON: `fraiseql compile schema.json`
257///
258/// # Errors
259///
260/// Returns error if:
261/// - Input file doesn't exist or can't be read
262/// - JSON/TOML parsing fails
263/// - Schema validation fails
264/// - Output file can't be written
265/// - Database connection fails (when database URL is provided)
266#[allow(clippy::too_many_arguments)] // Reason: run() is the CLI entry point that receives individual args from clap; keeping them separate for clarity
267pub async fn run(
268    input: &str,
269    types: Option<&str>,
270    schema_dir: Option<&str>,
271    type_files: Vec<String>,
272    query_files: Vec<String>,
273    mutation_files: Vec<String>,
274    output: &str,
275    check: bool,
276    database: Option<&str>,
277) -> Result<()> {
278    let opts = CompileOptions {
279        input,
280        types,
281        schema_dir,
282        type_files,
283        query_files,
284        mutation_files,
285        database,
286    };
287    let (schema, optimization_report) = compile_to_schema(opts).await?;
288
289    // If check-only mode, stop here
290    if check {
291        println!("✓ Schema is valid");
292        println!("  Types: {}", schema.types.len());
293        println!("  Queries: {}", schema.queries.len());
294        println!("  Mutations: {}", schema.mutations.len());
295        optimization_report.print();
296        return Ok(());
297    }
298
299    // Write compiled schema
300    info!("Writing compiled schema to: {output}");
301    let output_json =
302        serde_json::to_string_pretty(&schema).context("Failed to serialize compiled schema")?;
303    fs::write(output, output_json).context("Failed to write compiled schema")?;
304
305    // Success message
306    println!("✓ Schema compiled successfully");
307    println!("  Input:  {input}");
308    println!("  Output: {output}");
309    println!("  Types: {}", schema.types.len());
310    println!("  Queries: {}", schema.queries.len());
311    println!("  Mutations: {}", schema.mutations.len());
312    optimization_report.print();
313
314    Ok(())
315}
316
317/// Emit warnings when schema uses features that SQLite does not support.
318///
319/// SQLite lacks stored procedures (mutations) and relay/subscription support.
320/// A compile-time warning helps catch this before runtime failures.
321fn check_sqlite_compatibility_warnings(
322    schema: &CompiledSchema,
323    input_path: &str,
324    is_toml: bool,
325    database_url: Option<&str>,
326) {
327    let target_is_sqlite = database_url
328        .is_some_and(|url| url.to_ascii_lowercase().starts_with("sqlite://"))
329        || is_toml && detect_sqlite_target_in_toml(input_path);
330
331    if !target_is_sqlite {
332        return;
333    }
334
335    let mutation_count = schema.mutations.len();
336    let relay_count = schema.queries.iter().filter(|q| q.relay).count();
337    let subscription_count = schema.subscriptions.len();
338
339    if mutation_count > 0 {
340        warn!(
341            "Schema contains {} mutation(s) but target database is SQLite. \
342             Mutations are not supported on SQLite. \
343             See: https://fraiseql.dev/docs/database-compatibility",
344            mutation_count,
345        );
346    }
347    if relay_count > 0 {
348        warn!(
349            "Schema contains {} relay query/queries but target database is SQLite. \
350             Relay (keyset pagination) is not supported on SQLite. \
351             See: https://fraiseql.dev/docs/database-compatibility",
352            relay_count,
353        );
354    }
355    if subscription_count > 0 {
356        warn!(
357            "Schema contains {} subscription(s) but target database is SQLite. \
358             Subscriptions are not supported on SQLite. \
359             See: https://fraiseql.dev/docs/database-compatibility",
360            subscription_count,
361        );
362    }
363}
364
365/// Check if the TOML schema file specifies `database_target = "sqlite"`.
366///
367/// Reads and parses the TOML to extract the schema metadata. Returns `false`
368/// on any parse error (non-fatal — warning detection is best-effort).
369fn detect_sqlite_target_in_toml(toml_path: &str) -> bool {
370    let Ok(content) = fs::read_to_string(toml_path) else {
371        return false;
372    };
373    let Ok(toml_schema) = toml::from_str::<crate::config::toml_schema::TomlSchema>(&content) else {
374        return false;
375    };
376    toml_schema.schema.database_target.to_ascii_lowercase().contains("sqlite")
377}
378
379/// Validate indexed columns against database views.
380///
381/// Connects to the database and introspects view columns to verify that
382/// any indexed column naming conventions are properly set up.
383///
384/// # Arguments
385///
386/// * `schema` - The compiled schema to validate
387/// * `db_url` - Database connection URL
388///
389/// # Errors
390///
391/// Returns error if database connection fails. Warnings are printed for
392/// missing indexed columns but don't cause validation to fail.
393async fn validate_indexed_columns(schema: &CompiledSchema, db_url: &str) -> Result<()> {
394    use deadpool_postgres::{Config, ManagerConfig, RecyclingMethod, Runtime};
395    use fraiseql_core::db::postgres::PostgresIntrospector;
396    use tokio_postgres::NoTls;
397
398    // Create pool for introspection
399    let mut cfg = Config::new();
400    cfg.url = Some(db_url.to_string());
401    cfg.manager = Some(ManagerConfig {
402        recycling_method: RecyclingMethod::Fast,
403    });
404    cfg.pool = Some(deadpool_postgres::PoolConfig::new(2));
405
406    let pool = cfg
407        .create_pool(Some(Runtime::Tokio1), NoTls)
408        .context("Failed to create connection pool for indexed column validation")?;
409
410    let introspector = PostgresIntrospector::new(pool);
411
412    let mut total_indexed = 0;
413    let mut total_views = 0;
414
415    // Check each query's sql_source (view)
416    for query in &schema.queries {
417        if let Some(view_name) = &query.sql_source {
418            total_views += 1;
419
420            // Get indexed columns for this view
421            match introspector.get_indexed_nested_columns(view_name).await {
422                Ok(indexed_cols) => {
423                    if !indexed_cols.is_empty() {
424                        info!(
425                            "View '{}': found {} indexed column(s): {:?}",
426                            view_name,
427                            indexed_cols.len(),
428                            indexed_cols
429                        );
430                        total_indexed += indexed_cols.len();
431                    }
432                },
433                Err(e) => {
434                    warn!(
435                        "Could not introspect view '{}': {}. Skipping indexed column check.",
436                        view_name, e
437                    );
438                },
439            }
440        }
441    }
442
443    println!("✓ Indexed column validation complete");
444    println!("  Views checked: {total_views}");
445    println!("  Indexed columns found: {total_indexed}");
446
447    Ok(())
448}
449
450#[cfg(test)]
451mod tests {
452    use std::collections::HashMap;
453
454    use fraiseql_core::{
455        schema::{
456            AutoParams, CompiledSchema, CursorType, FieldDefinition, FieldDenyPolicy, FieldType,
457            QueryDefinition, TypeDefinition,
458        },
459        validation::CustomTypeRegistry,
460    };
461    use indexmap::IndexMap;
462
463    #[test]
464    fn test_validate_schema_success() {
465        let schema = CompiledSchema {
466            types: vec![TypeDefinition {
467                name:                "User".into(),
468                fields:              vec![
469                    FieldDefinition {
470                        name:           "id".into(),
471                        field_type:     FieldType::Int,
472                        nullable:       false,
473                        default_value:  None,
474                        description:    None,
475                        vector_config:  None,
476                        alias:          None,
477                        deprecation:    None,
478                        requires_scope: None,
479                        on_deny:        FieldDenyPolicy::default(),
480                        encryption:     None,
481                    },
482                    FieldDefinition {
483                        name:           "name".into(),
484                        field_type:     FieldType::String,
485                        nullable:       false,
486                        default_value:  None,
487                        description:    None,
488                        vector_config:  None,
489                        alias:          None,
490                        deprecation:    None,
491                        requires_scope: None,
492                        on_deny:        FieldDenyPolicy::default(),
493                        encryption:     None,
494                    },
495                ],
496                description:         Some("User type".to_string()),
497                sql_source:          String::new().into(),
498                jsonb_column:        String::new(),
499                sql_projection_hint: None,
500                implements:          vec![],
501                requires_role:       None,
502                is_error:            false,
503                relay:               false,
504                relationships:       Vec::new(),
505            }],
506            queries: vec![QueryDefinition {
507                name:                "users".to_string(),
508                return_type:         "User".to_string(),
509                returns_list:        true,
510                nullable:            false,
511                arguments:           vec![],
512                sql_source:          Some("v_user".to_string()),
513                description:         Some("Get users".to_string()),
514                auto_params:         AutoParams::default(),
515                deprecation:         None,
516                jsonb_column:        "data".to_string(),
517                relay:               false,
518                relay_cursor_column: None,
519                relay_cursor_type:   CursorType::default(),
520                inject_params:       IndexMap::default(),
521                cache_ttl_seconds:   None,
522                additional_views:    vec![],
523                requires_role:       None,
524                rest_path:           None,
525                rest_method:         None,
526            }],
527            enums: vec![],
528            input_types: vec![],
529            interfaces: vec![],
530            unions: vec![],
531            mutations: vec![],
532            subscriptions: vec![],
533            directives: vec![],
534            observers: Vec::new(),
535            fact_tables: HashMap::default(),
536            federation: None,
537            security: None,
538            observers_config: None,
539            subscriptions_config: None,
540            validation_config: None,
541            debug_config: None,
542            mcp_config: None,
543            schema_sdl: None,
544            // None is intentional here: this struct is used only for in-process
545            // validation assertions and is never serialised to disk. The real
546            // compile path stamps the version at compile_impl() line 220.
547            schema_format_version: None,
548            custom_scalars: CustomTypeRegistry::default(),
549            ..Default::default()
550        };
551
552        // Validation is done inside SchemaConverter::convert, not exposed separately
553        // This test just verifies we can build a valid schema structure
554        assert_eq!(schema.types.len(), 1);
555        assert_eq!(schema.queries.len(), 1);
556    }
557
558    #[test]
559    fn test_validate_schema_unknown_type() {
560        let schema = CompiledSchema {
561            types: vec![],
562            enums: vec![],
563            input_types: vec![],
564            interfaces: vec![],
565            unions: vec![],
566            queries: vec![QueryDefinition {
567                name:                "users".to_string(),
568                return_type:         "UnknownType".to_string(),
569                returns_list:        true,
570                nullable:            false,
571                arguments:           vec![],
572                sql_source:          Some("v_user".to_string()),
573                description:         Some("Get users".to_string()),
574                auto_params:         AutoParams::default(),
575                deprecation:         None,
576                jsonb_column:        "data".to_string(),
577                relay:               false,
578                relay_cursor_column: None,
579                relay_cursor_type:   CursorType::default(),
580                inject_params:       IndexMap::default(),
581                cache_ttl_seconds:   None,
582                additional_views:    vec![],
583                requires_role:       None,
584                rest_path:           None,
585                rest_method:         None,
586            }],
587            mutations: vec![],
588            subscriptions: vec![],
589            directives: vec![],
590            observers: Vec::new(),
591            fact_tables: HashMap::default(),
592            federation: None,
593            security: None,
594            observers_config: None,
595            subscriptions_config: None,
596            validation_config: None,
597            debug_config: None,
598            mcp_config: None,
599            schema_sdl: None,
600            // None is intentional here: this struct is used only for in-process
601            // validation assertions and is never serialised to disk. The real
602            // compile path stamps the version at compile_impl() line 220.
603            schema_format_version: None,
604            custom_scalars: CustomTypeRegistry::default(),
605            ..Default::default()
606        };
607
608        // Note: Validation is private to SchemaConverter
609        // This test demonstrates the schema structure with an invalid type
610        assert_eq!(schema.types.len(), 0);
611        assert_eq!(schema.queries[0].return_type, "UnknownType");
612    }
613}