use std::fs;
use anyhow::{Context, Result};
use serde_json::{Value, json};
use crate::{config::TomlSchema, schema::IntermediateSchema};
pub struct SchemaMerger;
impl SchemaMerger {
pub fn merge_files(types_path: &str, toml_path: &str) -> Result<IntermediateSchema> {
let types_json = fs::read_to_string(types_path)
.context(format!("Failed to read types.json from {types_path}"))?;
let types_value: Value =
serde_json::from_str(&types_json).context("Failed to parse types.json")?;
let toml_schema = TomlSchema::from_file(toml_path)
.context(format!("Failed to load TOML from {toml_path}"))?;
Self::merge_values(&types_value, &toml_schema)
}
pub fn merge_toml_only(toml_path: &str) -> Result<IntermediateSchema> {
let toml_schema = TomlSchema::from_file(toml_path)
.context(format!("Failed to load TOML from {toml_path}"))?;
toml_schema.validate()?;
let types_value = toml_schema.to_intermediate_schema();
Self::merge_values(&types_value, &toml_schema)
}
pub fn merge_from_directory(toml_path: &str, schema_dir: &str) -> Result<IntermediateSchema> {
let toml_schema = TomlSchema::from_file(toml_path)
.context(format!("Failed to load TOML from {toml_path}"))?;
toml_schema.validate()?;
let types_value = crate::schema::MultiFileLoader::load_from_directory(schema_dir)
.context(format!("Failed to load schema from directory {schema_dir}"))?;
Self::merge_values(&types_value, &toml_schema)
}
fn load_section(files: &[String], key: &str) -> Result<Option<serde_json::Value>> {
if files.is_empty() {
return Ok(None);
}
let paths: Vec<std::path::PathBuf> = files.iter().map(std::path::PathBuf::from).collect();
let loaded = crate::schema::MultiFileLoader::load_from_paths(&paths)
.with_context(|| format!("Failed to load {key} files"))?;
Ok(loaded.get(key).cloned())
}
fn extend_from_json_file(
path: &std::path::Path,
all_types: &mut Vec<Value>,
all_queries: &mut Vec<Value>,
all_mutations: &mut Vec<Value>,
) -> Result<()> {
let content = fs::read_to_string(path)
.with_context(|| format!("Failed to read {}", path.display()))?;
let value: Value = serde_json::from_str(&content)
.with_context(|| format!("Failed to parse {}", path.display()))?;
for (vec, key) in [
(all_types as &mut Vec<Value>, "types"),
(all_queries, "queries"),
(all_mutations, "mutations"),
] {
if let Some(Value::Array(items)) = value.get(key) {
vec.extend(items.iter().cloned());
}
}
Ok(())
}
fn enrich_type_from_toml(enriched_type: &mut Value, toml_type: &crate::config::toml_schema::TypeDefinition) {
enriched_type["sql_source"] = json!(toml_type.sql_source);
if let Some(desc) = &toml_type.description {
enriched_type["description"] = json!(desc);
}
}
pub fn merge_explicit_files(
toml_path: &str,
type_files: &[String],
query_files: &[String],
mutation_files: &[String],
) -> Result<IntermediateSchema> {
let toml_schema = TomlSchema::from_file(toml_path)
.context(format!("Failed to load TOML from {toml_path}"))?;
toml_schema.validate()?;
let mut types_value = serde_json::json!({
"types": [],
"queries": [],
"mutations": []
});
if let Some(v) = Self::load_section(type_files, "types")? {
types_value["types"] = v;
}
if let Some(v) = Self::load_section(query_files, "queries")? {
types_value["queries"] = v;
}
if let Some(v) = Self::load_section(mutation_files, "mutations")? {
types_value["mutations"] = v;
}
Self::merge_values(&types_value, &toml_schema)
}
pub fn merge_from_domains(toml_path: &str) -> Result<IntermediateSchema> {
let toml_schema = TomlSchema::from_file(toml_path)
.context(format!("Failed to load TOML from {toml_path}"))?;
toml_schema.validate()?;
let domains = toml_schema
.domain_discovery
.resolve_domains()
.context("Failed to discover domains")?;
if domains.is_empty() {
let empty_value = serde_json::json!({
"types": [],
"queries": [],
"mutations": []
});
return Self::merge_values(&empty_value, &toml_schema);
}
let mut all_types = Vec::new();
let mut all_queries = Vec::new();
let mut all_mutations = Vec::new();
for domain in domains {
for filename in ["types.json", "queries.json", "mutations.json"] {
let path = domain.path.join(filename);
if path.exists() {
Self::extend_from_json_file(
&path,
&mut all_types,
&mut all_queries,
&mut all_mutations,
)?;
}
}
}
let types_value = serde_json::json!({
"types": all_types,
"queries": all_queries,
"mutations": all_mutations,
});
Self::merge_values(&types_value, &toml_schema)
}
pub fn merge_with_includes(toml_path: &str) -> Result<IntermediateSchema> {
let toml_schema = TomlSchema::from_file(toml_path)
.context(format!("Failed to load TOML from {toml_path}"))?;
toml_schema.validate()?;
let types_value = if toml_schema.includes.is_empty() {
serde_json::json!({
"types": [],
"queries": [],
"mutations": []
})
} else {
let resolved = toml_schema
.includes
.resolve_globs()
.context("Failed to resolve glob patterns in schema.includes")?;
let type_files: Vec<std::path::PathBuf> = resolved.types;
let mut merged_types = if type_files.is_empty() {
serde_json::json!({
"types": [],
"queries": [],
"mutations": []
})
} else {
crate::schema::MultiFileLoader::load_from_paths(&type_files)
.context("Failed to load type files")?
};
if !resolved.queries.is_empty() {
let loaded = crate::schema::MultiFileLoader::load_from_paths(&resolved.queries)
.context("Failed to load query files")?;
let new_items =
loaded.get("queries").and_then(Value::as_array).cloned().unwrap_or_default();
if let Some(Value::Array(existing)) = merged_types.get_mut("queries") {
existing.extend(new_items);
}
}
if !resolved.mutations.is_empty() {
let loaded = crate::schema::MultiFileLoader::load_from_paths(&resolved.mutations)
.context("Failed to load mutation files")?;
let new_items = loaded
.get("mutations")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
if let Some(Value::Array(existing)) = merged_types.get_mut("mutations") {
existing.extend(new_items);
}
}
merged_types
};
Self::merge_values(&types_value, &toml_schema)
}
fn merge_values(types_value: &Value, toml_schema: &TomlSchema) -> Result<IntermediateSchema> {
let mut types_array: Vec<Value> = Vec::new();
let mut queries_array: Vec<Value> = Vec::new();
let mut mutations_array: Vec<Value> = Vec::new();
if let Some(types_obj) = types_value.get("types") {
match types_obj {
Value::Array(types_list) => {
for type_item in types_list {
if let Some(type_name) = type_item.get("name").and_then(|v| v.as_str()) {
let mut enriched_type = type_item.clone();
if let Some(toml_type) = toml_schema.types.get(type_name) {
Self::enrich_type_from_toml(&mut enriched_type, toml_type);
}
types_array.push(enriched_type);
}
}
},
Value::Object(types_map) => {
for (type_name, type_value) in types_map {
let mut enriched_type = type_value.clone();
enriched_type["name"] = json!(type_name);
if let Some(Value::Object(fields_map)) = enriched_type.get("fields") {
let fields_array: Vec<Value> = fields_map
.iter()
.map(|(field_name, field_value)| {
let mut field = field_value.clone();
field["name"] = json!(field_name);
field
})
.collect();
enriched_type["fields"] = json!(fields_array);
}
if let Some(toml_type) = toml_schema.types.get(type_name) {
Self::enrich_type_from_toml(&mut enriched_type, toml_type);
}
types_array.push(enriched_type);
}
},
_ => {},
}
}
let existing_type_names: std::collections::HashSet<_> = types_array
.iter()
.filter_map(|t| {
t.get("name").and_then(|v| v.as_str()).map(str::to_string)
})
.collect();
for (type_name, toml_type) in &toml_schema.types {
if !existing_type_names.contains(type_name) {
types_array.push(json!({
"name": type_name,
"sql_source": toml_type.sql_source,
"description": toml_type.description,
"fields": toml_type.fields.iter().map(|(fname, fdef)| json!({
"name": fname,
"type": fdef.field_type,
"nullable": fdef.nullable,
"description": fdef.description,
})).collect::<Vec<_>>(),
}));
}
}
if let Some(Value::Array(queries_list)) = types_value.get("queries") {
queries_array.clone_from(queries_list);
}
for (query_name, toml_query) in &toml_schema.queries {
queries_array.push(json!({
"name": query_name,
"return_type": toml_query.return_type,
"return_array": toml_query.return_array,
"sql_source": toml_query.sql_source,
"description": toml_query.description,
"args": toml_query.args.iter().map(|arg| json!({
"name": arg.name,
"type": arg.arg_type,
"required": arg.required,
"default": arg.default,
"description": arg.description,
})).collect::<Vec<_>>(),
}));
}
if let Some(Value::Array(mutations_list)) = types_value.get("mutations") {
mutations_array.clone_from(mutations_list);
}
for (mutation_name, toml_mutation) in &toml_schema.mutations {
mutations_array.push(json!({
"name": mutation_name,
"return_type": toml_mutation.return_type,
"sql_source": toml_mutation.sql_source,
"operation": toml_mutation.operation,
"description": toml_mutation.description,
"args": toml_mutation.args.iter().map(|arg| json!({
"name": arg.name,
"type": arg.arg_type,
"required": arg.required,
"default": arg.default,
"description": arg.description,
})).collect::<Vec<_>>(),
}));
}
let mut merged = serde_json::json!({
"version": "2.0.0",
"types": types_array,
"queries": queries_array,
"mutations": mutations_array,
});
merged["security"] = json!({
"default_policy": toml_schema.security.default_policy,
"rules": toml_schema.security.rules.iter().map(|r| json!({
"name": r.name,
"rule": r.rule,
"description": r.description,
"cacheable": r.cacheable,
"cache_ttl_seconds": r.cache_ttl_seconds,
})).collect::<Vec<_>>(),
"policies": toml_schema.security.policies.iter().map(|p| json!({
"name": p.name,
"type": p.policy_type,
"rule": p.rule,
"roles": p.roles,
"strategy": p.strategy,
"attributes": p.attributes,
"description": p.description,
"cache_ttl_seconds": p.cache_ttl_seconds,
})).collect::<Vec<_>>(),
"field_auth": toml_schema.security.field_auth.iter().map(|fa| json!({
"type_name": fa.type_name,
"field_name": fa.field_name,
"policy": fa.policy,
})).collect::<Vec<_>>(),
"enterprise": json!({
"rate_limiting_enabled": toml_schema.security.enterprise.rate_limiting_enabled,
"auth_endpoint_max_requests": toml_schema.security.enterprise.auth_endpoint_max_requests,
"auth_endpoint_window_seconds": toml_schema.security.enterprise.auth_endpoint_window_seconds,
"audit_logging_enabled": toml_schema.security.enterprise.audit_logging_enabled,
"audit_log_backend": toml_schema.security.enterprise.audit_log_backend,
"audit_retention_days": toml_schema.security.enterprise.audit_retention_days,
"error_sanitization": toml_schema.security.enterprise.error_sanitization,
"hide_implementation_details": toml_schema.security.enterprise.hide_implementation_details,
"constant_time_comparison": toml_schema.security.enterprise.constant_time_comparison,
"pkce_enabled": toml_schema.security.enterprise.pkce_enabled,
}),
});
if toml_schema.observers.enabled
|| toml_schema.observers.redis_url.is_some()
|| toml_schema.observers.nats_url.is_some()
{
if toml_schema.observers.backend == "nats" && toml_schema.observers.nats_url.is_none() {
tracing::warn!(
"observers.backend is \"nats\" but observers.nats_url is not set; \
the runtime will require FRAISEQL_NATS_URL to be configured"
);
}
merged["observers_config"] = json!({
"enabled": toml_schema.observers.enabled,
"backend": toml_schema.observers.backend,
"redis_url": toml_schema.observers.redis_url,
"nats_url": toml_schema.observers.nats_url,
"handlers": toml_schema.observers.handlers.iter().map(|h| json!({
"name": h.name,
"event": h.event,
"action": h.action,
"webhook_url": h.webhook_url,
"retry_strategy": h.retry_strategy,
"max_retries": h.max_retries,
"description": h.description,
})).collect::<Vec<_>>(),
});
}
if toml_schema.federation.enabled {
merged["federation_config"] = serde_json::to_value(&toml_schema.federation)
.unwrap_or_default();
}
serde_json::from_value::<IntermediateSchema>(merged)
.context("Failed to convert merged schema to IntermediateSchema")
}
}
#[cfg(test)]
mod tests {
use std::fs;
use tempfile::TempDir;
use super::*;
#[test]
fn test_merge_toml_only() {
let toml_content = r#"
[schema]
name = "test"
version = "1.0.0"
database_target = "postgresql"
[database]
url = "postgresql://localhost/test"
[types.User]
sql_source = "v_user"
[types.User.fields.id]
type = "ID"
[types.User.fields.name]
type = "String"
[queries.users]
return_type = "User"
return_array = true
sql_source = "v_user"
"#;
let temp_path = "/tmp/test_fraiseql.toml";
std::fs::write(temp_path, toml_content).unwrap();
let result = SchemaMerger::merge_toml_only(temp_path);
assert!(result.is_ok());
let _ = std::fs::remove_file(temp_path);
}
#[test]
fn test_merge_with_includes() -> Result<()> {
let temp_dir = TempDir::new()?;
let user_types = serde_json::json!({
"types": [{"name": "User", "fields": []}],
"queries": [],
"mutations": []
});
fs::write(temp_dir.path().join("user.json"), user_types.to_string())?;
let post_types = serde_json::json!({
"types": [{"name": "Post", "fields": []}],
"queries": [],
"mutations": []
});
fs::write(temp_dir.path().join("post.json"), post_types.to_string())?;
let toml_content = format!(
r#"
[schema]
name = "test"
version = "1.0.0"
database_target = "postgresql"
[database]
url = "postgresql://localhost/test"
[includes]
types = ["{}/*.json"]
queries = []
mutations = []
"#,
temp_dir.path().to_string_lossy()
);
let toml_path = temp_dir.path().join("fraiseql.toml");
fs::write(&toml_path, toml_content)?;
let result = SchemaMerger::merge_with_includes(toml_path.to_str().unwrap());
assert!(result.is_ok());
let schema = result?;
assert_eq!(schema.types.len(), 2);
Ok(())
}
#[test]
fn test_merge_with_includes_missing_files() -> Result<()> {
let temp_dir = TempDir::new()?;
let toml_content = r#"
[schema]
name = "test"
version = "1.0.0"
database_target = "postgresql"
[database]
url = "postgresql://localhost/test"
[includes]
types = ["/nonexistent/path/*.json"]
queries = []
mutations = []
"#;
let toml_path = temp_dir.path().join("fraiseql.toml");
fs::write(&toml_path, toml_content)?;
let result = SchemaMerger::merge_with_includes(toml_path.to_str().unwrap());
assert!(result.is_ok());
let schema = result?;
assert_eq!(schema.types.len(), 0);
Ok(())
}
#[test]
fn test_merge_from_domains() -> Result<()> {
let temp_dir = TempDir::new()?;
let schema_dir = temp_dir.path().join("schema");
fs::create_dir(&schema_dir)?;
fs::create_dir(schema_dir.join("auth"))?;
fs::create_dir(schema_dir.join("products"))?;
let auth_types = serde_json::json!({
"types": [{"name": "User", "fields": []}],
"queries": [{"name": "getUser", "return_type": "User"}],
"mutations": []
});
fs::write(schema_dir.join("auth/types.json"), auth_types.to_string())?;
let product_types = serde_json::json!({
"types": [{"name": "Product", "fields": []}],
"queries": [{"name": "getProduct", "return_type": "Product"}],
"mutations": []
});
fs::write(schema_dir.join("products/types.json"), product_types.to_string())?;
let schema_dir_str = schema_dir.to_string_lossy().to_string();
let toml_content = format!(
r#"
[schema]
name = "test"
version = "1.0.0"
database_target = "postgresql"
[database]
url = "postgresql://localhost/test"
[domain_discovery]
enabled = true
root_dir = "{schema_dir_str}"
"#
);
let toml_path = temp_dir.path().join("fraiseql.toml");
fs::write(&toml_path, toml_content)?;
let result = SchemaMerger::merge_from_domains(toml_path.to_str().unwrap());
assert!(result.is_ok());
let schema = result?;
assert_eq!(schema.types.len(), 2);
assert_eq!(schema.queries.len(), 2);
Ok(())
}
#[test]
fn test_merge_from_domains_alphabetical_order() -> Result<()> {
let temp_dir = TempDir::new()?;
let schema_dir = temp_dir.path().join("schema");
fs::create_dir(&schema_dir)?;
fs::create_dir(schema_dir.join("zebra"))?;
fs::create_dir(schema_dir.join("alpha"))?;
fs::create_dir(schema_dir.join("middle"))?;
for domain in &["zebra", "alpha", "middle"] {
let types = serde_json::json!({
"types": [{"name": domain.to_uppercase(), "fields": []}],
"queries": [],
"mutations": []
});
fs::write(schema_dir.join(format!("{domain}/types.json")), types.to_string())?;
}
let schema_dir_str = schema_dir.to_string_lossy().to_string();
let toml_content = format!(
r#"
[schema]
name = "test"
version = "1.0.0"
database_target = "postgresql"
[database]
url = "postgresql://localhost/test"
[domain_discovery]
enabled = true
root_dir = "{schema_dir_str}"
"#
);
let toml_path = temp_dir.path().join("fraiseql.toml");
fs::write(&toml_path, toml_content)?;
let result = SchemaMerger::merge_from_domains(toml_path.to_str().unwrap());
assert!(result.is_ok());
let schema = result?;
let type_names: Vec<String> = schema.types.iter().map(|t| t.name.clone()).collect();
assert_eq!(type_names[0], "ALPHA");
assert_eq!(type_names[1], "MIDDLE");
assert_eq!(type_names[2], "ZEBRA");
Ok(())
}
}