use std::{fs, path::Path};
use anyhow::{Context, Result};
use fraiseql_core::schema::CompiledSchema;
use tracing::{info, warn};
use crate::{
config::FraiseQLConfig,
schema::{
IntermediateSchema, OptimizationReport, SchemaConverter, SchemaOptimizer, SchemaValidator,
},
};
#[derive(Debug, Default)]
pub struct CompileOptions<'a> {
pub input: &'a str,
pub types: Option<&'a str>,
pub schema_dir: Option<&'a str>,
pub type_files: Vec<String>,
pub query_files: Vec<String>,
pub mutation_files: Vec<String>,
pub database: Option<&'a str>,
}
impl<'a> CompileOptions<'a> {
#[must_use]
pub fn new(input: &'a str) -> Self {
Self {
input,
..Default::default()
}
}
#[must_use]
pub fn with_types(mut self, types: &'a str) -> Self {
self.types = Some(types);
self
}
#[must_use]
pub fn with_schema_dir(mut self, schema_dir: &'a str) -> Self {
self.schema_dir = Some(schema_dir);
self
}
#[must_use]
pub fn with_database(mut self, database: &'a str) -> Self {
self.database = Some(database);
self
}
}
fn load_intermediate_schema(
toml_path: &str,
type_files: &[String],
query_files: &[String],
mutation_files: &[String],
schema_dir: Option<&str>,
types_path: Option<&str>,
) -> Result<IntermediateSchema> {
if !type_files.is_empty() || !query_files.is_empty() || !mutation_files.is_empty() {
info!("Mode: Explicit file lists");
return crate::schema::SchemaMerger::merge_explicit_files(
toml_path,
type_files,
query_files,
mutation_files,
)
.context("Failed to load explicit schema files");
}
if let Some(dir) = schema_dir {
info!("Mode: Auto-discovery from directory: {}", dir);
return crate::schema::SchemaMerger::merge_from_directory(toml_path, dir)
.context("Failed to load schema from directory");
}
if let Some(types) = types_path {
info!("Mode: Language + TOML (types.json + fraiseql.toml)");
return crate::schema::SchemaMerger::merge_files(types, toml_path)
.context("Failed to merge types.json with TOML");
}
info!("Mode: TOML-based (checking for domain discovery...)");
if let Ok(schema) = crate::schema::SchemaMerger::merge_from_domains(toml_path) {
return Ok(schema);
}
info!("No domains configured, checking for TOML includes...");
if let Ok(schema) = crate::schema::SchemaMerger::merge_with_includes(toml_path) {
return Ok(schema);
}
info!("No includes configured, using TOML-only definitions");
crate::schema::SchemaMerger::merge_toml_only(toml_path)
.context("Failed to load schema from TOML")
}
pub async fn compile_to_schema(
opts: CompileOptions<'_>,
) -> Result<(CompiledSchema, OptimizationReport)> {
info!("Compiling schema: {}", opts.input);
let input_path = Path::new(opts.input);
if !input_path.exists() {
anyhow::bail!("Input file not found: {}", opts.input);
}
let is_toml = input_path
.extension()
.and_then(|ext| ext.to_str())
.is_some_and(|ext| ext.eq_ignore_ascii_case("toml"));
let mut intermediate: IntermediateSchema = if is_toml {
info!("Using TOML-based workflow");
load_intermediate_schema(
opts.input,
&opts.type_files,
&opts.query_files,
&opts.mutation_files,
opts.schema_dir,
opts.types,
)?
} else {
info!("Using legacy JSON workflow");
let schema_json = fs::read_to_string(input_path).context("Failed to read schema.json")?;
info!("Parsing intermediate schema...");
serde_json::from_str(&schema_json).context("Failed to parse schema.json")?
};
if !is_toml && Path::new("fraiseql.toml").exists() {
info!("Loading security configuration from fraiseql.toml...");
match FraiseQLConfig::from_file("fraiseql.toml") {
Ok(config) => {
info!("Validating security configuration...");
config.validate()?;
info!("Applying security configuration to schema...");
let security_json = config.fraiseql.security.to_json();
intermediate.security = Some(security_json);
info!("Security configuration applied successfully");
},
Err(e) => {
anyhow::bail!(
"Failed to parse fraiseql.toml: {e}\n\
Fix the configuration file or remove it to use defaults."
);
},
}
} else {
info!("No fraiseql.toml found, using default security configuration");
}
info!("Validating schema structure...");
let validation_report =
SchemaValidator::validate(&intermediate).context("Failed to validate schema")?;
if !validation_report.is_valid() {
validation_report.print();
anyhow::bail!("Schema validation failed with {} error(s)", validation_report.error_count());
}
if validation_report.warning_count() > 0 {
validation_report.print();
}
info!("Converting to compiled format...");
let mut schema = SchemaConverter::convert(intermediate)
.context("Failed to convert schema to compiled format")?;
info!("Analyzing schema for optimization opportunities...");
let report = SchemaOptimizer::optimize(&mut schema).context("Failed to optimize schema")?;
if let Some(db_url) = opts.database {
info!("Validating indexed columns against database...");
validate_indexed_columns(&schema, db_url).await?;
}
Ok((schema, report))
}
#[allow(clippy::too_many_arguments)] pub async fn run(
input: &str,
types: Option<&str>,
schema_dir: Option<&str>,
type_files: Vec<String>,
query_files: Vec<String>,
mutation_files: Vec<String>,
output: &str,
check: bool,
database: Option<&str>,
) -> Result<()> {
let opts = CompileOptions {
input,
types,
schema_dir,
type_files,
query_files,
mutation_files,
database,
};
let (schema, optimization_report) = compile_to_schema(opts).await?;
if check {
println!("✓ Schema is valid");
println!(" Types: {}", schema.types.len());
println!(" Queries: {}", schema.queries.len());
println!(" Mutations: {}", schema.mutations.len());
optimization_report.print();
return Ok(());
}
info!("Writing compiled schema to: {output}");
let output_json =
serde_json::to_string_pretty(&schema).context("Failed to serialize compiled schema")?;
fs::write(output, output_json).context("Failed to write compiled schema")?;
println!("✓ Schema compiled successfully");
println!(" Input: {input}");
println!(" Output: {output}");
println!(" Types: {}", schema.types.len());
println!(" Queries: {}", schema.queries.len());
println!(" Mutations: {}", schema.mutations.len());
optimization_report.print();
Ok(())
}
async fn validate_indexed_columns(schema: &CompiledSchema, db_url: &str) -> Result<()> {
use deadpool_postgres::{Config, ManagerConfig, RecyclingMethod, Runtime};
use fraiseql_core::db::postgres::PostgresIntrospector;
use tokio_postgres::NoTls;
let mut cfg = Config::new();
cfg.url = Some(db_url.to_string());
cfg.manager = Some(ManagerConfig {
recycling_method: RecyclingMethod::Fast,
});
cfg.pool = Some(deadpool_postgres::PoolConfig::new(2));
let pool = cfg
.create_pool(Some(Runtime::Tokio1), NoTls)
.context("Failed to create connection pool for indexed column validation")?;
let introspector = PostgresIntrospector::new(pool);
let mut total_indexed = 0;
let mut total_views = 0;
for query in &schema.queries {
if let Some(view_name) = &query.sql_source {
total_views += 1;
match introspector.get_indexed_nested_columns(view_name).await {
Ok(indexed_cols) => {
if !indexed_cols.is_empty() {
info!(
"View '{}': found {} indexed column(s): {:?}",
view_name,
indexed_cols.len(),
indexed_cols
);
total_indexed += indexed_cols.len();
}
},
Err(e) => {
warn!(
"Could not introspect view '{}': {}. Skipping indexed column check.",
view_name, e
);
},
}
}
}
println!("✓ Indexed column validation complete");
println!(" Views checked: {total_views}");
println!(" Indexed columns found: {total_indexed}");
Ok(())
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use fraiseql_core::{
schema::{
AutoParams, CompiledSchema, FieldDefinition, FieldType, QueryDefinition, TypeDefinition,
},
validation::CustomTypeRegistry,
};
#[test]
fn test_validate_schema_success() {
let schema = CompiledSchema {
types: vec![TypeDefinition {
name: "User".to_string(),
fields: vec![
FieldDefinition {
name: "id".to_string(),
field_type: FieldType::Int,
nullable: false,
default_value: None,
description: None,
vector_config: None,
alias: None,
deprecation: None,
requires_scope: None,
encryption: None,
},
FieldDefinition {
name: "name".to_string(),
field_type: FieldType::String,
nullable: false,
default_value: None,
description: None,
vector_config: None,
alias: None,
deprecation: None,
requires_scope: None,
encryption: None,
},
],
description: Some("User type".to_string()),
sql_source: String::new(),
jsonb_column: String::new(),
sql_projection_hint: None,
implements: vec![],
is_error: false,
}],
queries: vec![QueryDefinition {
name: "users".to_string(),
return_type: "User".to_string(),
returns_list: true,
nullable: false,
arguments: vec![],
sql_source: Some("v_user".to_string()),
description: Some("Get users".to_string()),
auto_params: AutoParams::default(),
deprecation: None,
jsonb_column: "data".to_string(),
}],
enums: vec![],
input_types: vec![],
interfaces: vec![],
unions: vec![],
mutations: vec![],
subscriptions: vec![],
directives: vec![],
observers: Vec::new(),
fact_tables: HashMap::default(),
federation: None,
security: None,
observers_config: None,
schema_sdl: None,
custom_scalars: CustomTypeRegistry::default(),
};
assert_eq!(schema.types.len(), 1);
assert_eq!(schema.queries.len(), 1);
}
#[test]
fn test_validate_schema_unknown_type() {
let schema = CompiledSchema {
types: vec![],
enums: vec![],
input_types: vec![],
interfaces: vec![],
unions: vec![],
queries: vec![QueryDefinition {
name: "users".to_string(),
return_type: "UnknownType".to_string(),
returns_list: true,
nullable: false,
arguments: vec![],
sql_source: Some("v_user".to_string()),
description: Some("Get users".to_string()),
auto_params: AutoParams::default(),
deprecation: None,
jsonb_column: "data".to_string(),
}],
mutations: vec![],
subscriptions: vec![],
directives: vec![],
observers: Vec::new(),
fact_tables: HashMap::default(),
federation: None,
security: None,
observers_config: None,
schema_sdl: None,
custom_scalars: CustomTypeRegistry::default(),
};
assert_eq!(schema.types.len(), 0);
assert_eq!(schema.queries[0].return_type, "UnknownType");
}
}