Skip to main content

fraiseql_cli/schema/
database_validator.rs

1//! Compile-time database validation for schema definitions.
2//!
3//! Validates a compiled schema against a live database at three levels:
4//! - **L1**: `sql_source` relation exists in the database
5//! - **L2**: Columns and JSON column types match
6//! - **L3**: JSONB keys exist in sampled rows (best-effort)
7//!
8//! All diagnostics are warnings — compilation never fails due to validation.
9
10use std::{
11    collections::{HashMap, HashSet},
12    fmt,
13};
14
15use fraiseql_core::{
16    db::{
17        DatabaseType,
18        introspector::{DatabaseIntrospector, RelationInfo},
19    },
20    schema::CompiledSchema,
21};
22
23/// Report containing all database validation warnings and discovered metadata.
24pub struct DatabaseValidationReport {
25    /// All warnings emitted during validation.
26    pub warnings: Vec<DatabaseWarning>,
27    /// Native columns discovered per query during L2 validation.
28    ///
29    /// Key: query name. Value: map of argument name → PostgreSQL type string
30    /// (e.g. `"uuid"`, `"integer"`, `"text"`).
31    ///
32    /// Only contains entries for queries that have at least one direct argument
33    /// with a matching native column on their `sql_source`.
34    pub native_columns: HashMap<String, HashMap<String, String>>,
35}
36
37/// A single database validation warning.
38#[derive(Debug)]
39pub enum DatabaseWarning {
40    /// L1: `sql_source` relation does not exist.
41    MissingRelation {
42        /// Name of the query or mutation.
43        query_name: String,
44        /// The `sql_source` value that was not found.
45        sql_source: String,
46    },
47    /// L1: `additional_view` does not exist.
48    MissingAdditionalView {
49        /// Name of the query.
50        query_name: String,
51        /// The view name that was not found.
52        view_name:  String,
53    },
54    /// L2: `jsonb_column` does not exist on the relation.
55    MissingJsonColumn {
56        /// Name of the query.
57        query_name:  String,
58        /// The `sql_source` relation.
59        sql_source:  String,
60        /// The missing column name.
61        column_name: String,
62    },
63    /// L2: `jsonb_column` exists but is not a JSON/JSONB type.
64    WrongJsonColumnType {
65        /// Name of the query.
66        query_name:  String,
67        /// The `sql_source` relation.
68        sql_source:  String,
69        /// The column name.
70        column_name: String,
71        /// The actual SQL data type.
72        actual_type: String,
73    },
74    /// L2: `relay_cursor_column` does not exist on the relation.
75    MissingCursorColumn {
76        /// Name of the query.
77        query_name:  String,
78        /// The `sql_source` relation.
79        sql_source:  String,
80        /// The missing cursor column name.
81        column_name: String,
82    },
83    /// L3: a JSON key path is declared but not found in sampled data.
84    MissingJsonKey {
85        /// Name of the query.
86        query_name:  String,
87        /// The `sql_source` relation.
88        sql_source:  String,
89        /// The JSON column being sampled.
90        json_column: String,
91        /// The GraphQL field name.
92        field_name:  String,
93        /// The snake_case key looked up in the JSON.
94        json_key:    String,
95    },
96    /// L2: a direct query argument has no matching native column — will fall back to JSONB extraction.
97    ///
98    /// For best performance, consider adding a native column with the same name
99    /// and an index on the `sql_source` table/view.
100    NativeColumnFallback {
101        /// Name of the query.
102        query_name: String,
103        /// The `sql_source` relation.
104        sql_source: String,
105        /// The argument name that has no matching native column.
106        arg_name:   String,
107    },
108}
109
110impl fmt::Display for DatabaseWarning {
111    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
112        match self {
113            Self::MissingRelation {
114                query_name,
115                sql_source,
116            } => {
117                write!(
118                    f,
119                    "query `{query_name}`: sql_source `{sql_source}` does not exist in database"
120                )
121            },
122            Self::MissingAdditionalView {
123                query_name,
124                view_name,
125            } => {
126                write!(
127                    f,
128                    "query `{query_name}`: additional_view `{view_name}` does not exist in database"
129                )
130            },
131            Self::MissingJsonColumn {
132                query_name,
133                sql_source,
134                column_name,
135            } => {
136                write!(
137                    f,
138                    "query `{query_name}`: column `{column_name}` not found on `{sql_source}`"
139                )
140            },
141            Self::WrongJsonColumnType {
142                query_name,
143                sql_source,
144                column_name,
145                actual_type,
146            } => {
147                write!(
148                    f,
149                    "query `{query_name}`: column `{column_name}` on `{sql_source}` is `{actual_type}`, expected json/jsonb"
150                )
151            },
152            Self::MissingCursorColumn {
153                query_name,
154                sql_source,
155                column_name,
156            } => {
157                write!(
158                    f,
159                    "query `{query_name}`: relay cursor column `{column_name}` not found on `{sql_source}`"
160                )
161            },
162            Self::MissingJsonKey {
163                query_name,
164                sql_source,
165                json_column,
166                field_name,
167                json_key,
168            } => {
169                write!(
170                    f,
171                    "query `{query_name}`: field `{field_name}` (key `{json_key}`) not found in `{sql_source}.{json_column}` sample data"
172                )
173            },
174            Self::NativeColumnFallback {
175                query_name,
176                sql_source,
177                arg_name,
178            } => {
179                write!(
180                    f,
181                    "query `{query_name}`: argument `{arg_name}` will use JSONB extraction \
182                     (`{sql_source}.data->>''{arg_name}''`) — no native column `{arg_name}` found on \
183                     `{sql_source}`. Add a native column with an index for O(log n) lookup."
184                )
185            },
186        }
187    }
188}
189
190/// Check if a SQL data type represents a JSON column for the given database.
191fn is_json_type(data_type: &str, db_type: DatabaseType) -> bool {
192    let lower = data_type.to_lowercase();
193    match db_type {
194        DatabaseType::PostgreSQL => lower == "jsonb" || lower == "json",
195        DatabaseType::MySQL => lower == "json",
196        DatabaseType::SQLite => lower.contains("json"),
197        // SQL Server has no native JSON type — always attempt JSON
198        // validation for the configured jsonb_column
199        DatabaseType::SQLServer => true,
200    }
201}
202
203/// Split a potentially schema-qualified name into (optional_schema, name).
204fn split_schema_qualified(sql_source: &str) -> (Option<&str>, &str) {
205    match sql_source.split_once('.') {
206        Some((schema, table)) => (Some(schema), table),
207        None => (None, sql_source),
208    }
209}
210
211/// Check if a relation exists in the relation lookup maps.
212fn relation_exists(
213    schema_qualified: &HashMap<(String, String), RelationInfo>,
214    unqualified: &HashMap<String, Vec<String>>,
215    sql_source: &str,
216) -> bool {
217    let (schema, name) = split_schema_qualified(sql_source);
218    if let Some(s) = schema {
219        schema_qualified.contains_key(&(s.to_string(), name.to_string()))
220    } else {
221        unqualified.contains_key(name)
222    }
223}
224
225/// Convert a `camelCase` or `PascalCase` field name to `snake_case`.
226///
227/// This matches the convention used by FraiseQL for JSONB key extraction.
228fn to_snake_case(name: &str) -> String {
229    let mut result = String::with_capacity(name.len() + 4);
230    for (i, ch) in name.chars().enumerate() {
231        if ch.is_uppercase() {
232            if i > 0 {
233                result.push('_');
234            }
235            result.push(ch.to_lowercase().next().unwrap_or(ch));
236        } else {
237            result.push(ch);
238        }
239    }
240    result
241}
242
243/// Validate a compiled schema against a live database.
244///
245/// Performs three levels of validation:
246/// - **L1**: Checks that `sql_source` relations exist
247/// - **L2**: Checks column existence and JSON column types
248/// - **L3**: Checks JSONB key existence via sampling
249///
250/// All diagnostics are warnings — the report never causes compilation to fail.
251///
252/// # Errors
253///
254/// Returns `FraiseQLError` if database introspection queries fail.
255pub async fn validate_schema_against_database(
256    schema: &CompiledSchema,
257    introspector: &impl DatabaseIntrospector,
258) -> fraiseql_core::Result<DatabaseValidationReport> {
259    // Auto-wired argument names excluded from direct-arg native column detection.
260    // Must stay in sync with AUTO_PARAM_NAMES in fraiseql-core/runtime/executor/query.rs.
261    const AUTO_PARAM_NAMES: &[&str] =
262        &["where", "limit", "offset", "orderBy", "first", "last", "after", "before"];
263
264    let mut warnings = Vec::new();
265    let mut native_columns: HashMap<String, HashMap<String, String>> = HashMap::new();
266    let db_type = introspector.database_type();
267
268    // L1: Build relation lookup maps
269    let relations = introspector.list_relations().await?;
270    let (schema_qualified, unqualified) = build_relation_maps(&relations);
271
272    // Validate queries
273    for query in &schema.queries {
274        if let Some(ref source) = query.sql_source {
275            // L1: Check relation exists
276            if !relation_exists(&schema_qualified, &unqualified, source) {
277                warnings.push(DatabaseWarning::MissingRelation {
278                    query_name: query.name.clone(),
279                    sql_source: source.clone(),
280                });
281                continue; // Skip L2/L3 if relation doesn't exist
282            }
283
284            // L2: Get columns for the relation.
285            // Pass the full source (possibly schema-qualified like "benchmark.tv_post") so
286            // the introspector can use the explicit schema when present.
287            let columns = introspector.get_columns(source).await?;
288            let column_map: HashMap<String, String> =
289                columns.into_iter().map(|(name, dtype, _)| (name, dtype)).collect();
290
291            // L2: Check jsonb_column
292            let jsonb_col = &query.jsonb_column;
293            if !jsonb_col.is_empty() {
294                if let Some(actual_type) = column_map.get(jsonb_col) {
295                    if !is_json_type(actual_type, db_type) {
296                        warnings.push(DatabaseWarning::WrongJsonColumnType {
297                            query_name:  query.name.clone(),
298                            sql_source:  source.clone(),
299                            column_name: jsonb_col.clone(),
300                            actual_type: actual_type.clone(),
301                        });
302                    }
303                } else {
304                    warnings.push(DatabaseWarning::MissingJsonColumn {
305                        query_name:  query.name.clone(),
306                        sql_source:  source.clone(),
307                        column_name: jsonb_col.clone(),
308                    });
309                }
310            }
311
312            // L2: Check relay_cursor_column
313            if query.relay {
314                if let Some(ref cursor_col) = query.relay_cursor_column {
315                    if !column_map.contains_key(cursor_col) {
316                        warnings.push(DatabaseWarning::MissingCursorColumn {
317                            query_name:  query.name.clone(),
318                            sql_source:  source.clone(),
319                            column_name: cursor_col.clone(),
320                        });
321                    }
322                }
323            }
324
325            // L3: Sample JSON keys if jsonb_column is valid JSON type
326            if !jsonb_col.is_empty() {
327                let json_type_ok =
328                    column_map.get(jsonb_col).is_some_and(|t| is_json_type(t, db_type));
329
330                if json_type_ok {
331                    validate_json_keys(
332                        schema,
333                        query,
334                        source,
335                        jsonb_col,
336                        introspector,
337                        source, // pass full schema-qualified source for sample queries
338                        &mut warnings,
339                    )
340                    .await?;
341                }
342            }
343
344            // L2: Detect native columns for direct (non-auto-param) arguments.
345            let direct_args: Vec<&str> = query
346                .arguments
347                .iter()
348                .filter(|a| !AUTO_PARAM_NAMES.contains(&a.name.as_str()))
349                .map(|a| a.name.as_str())
350                .collect();
351
352            if !direct_args.is_empty() {
353                let mut query_native: HashMap<String, String> = HashMap::new();
354                for arg_name in &direct_args {
355                    if let Some(col_type) = column_map.get(*arg_name) {
356                        query_native.insert((*arg_name).to_string(), col_type.clone());
357                    } else {
358                        warnings.push(DatabaseWarning::NativeColumnFallback {
359                            query_name: query.name.clone(),
360                            sql_source: source.clone(),
361                            arg_name:   (*arg_name).to_string(),
362                        });
363                    }
364                }
365                if !query_native.is_empty() {
366                    native_columns.insert(query.name.clone(), query_native);
367                }
368            }
369
370            // L1: Check additional_views
371            for view in &query.additional_views {
372                if !relation_exists(&schema_qualified, &unqualified, view) {
373                    warnings.push(DatabaseWarning::MissingAdditionalView {
374                        query_name: query.name.clone(),
375                        view_name:  view.clone(),
376                    });
377                }
378            }
379        }
380    }
381
382    // Validate mutations (L1 only)
383    for mutation in &schema.mutations {
384        if let Some(ref source) = mutation.sql_source {
385            if !relation_exists(&schema_qualified, &unqualified, source) {
386                warnings.push(DatabaseWarning::MissingRelation {
387                    query_name: mutation.name.clone(),
388                    sql_source: source.clone(),
389                });
390            }
391        }
392    }
393
394    Ok(DatabaseValidationReport { warnings, native_columns })
395}
396
397/// Build lookup maps from the list of relations.
398fn build_relation_maps(
399    relations: &[RelationInfo],
400) -> (HashMap<(String, String), RelationInfo>, HashMap<String, Vec<String>>) {
401    let mut schema_qualified = HashMap::new();
402    let mut unqualified: HashMap<String, Vec<String>> = HashMap::new();
403
404    for rel in relations {
405        schema_qualified.insert((rel.schema.clone(), rel.name.clone()), rel.clone());
406        unqualified.entry(rel.name.clone()).or_default().push(rel.schema.clone());
407    }
408
409    (schema_qualified, unqualified)
410}
411
412/// Validate JSON keys in sampled data for L3 checking.
413async fn validate_json_keys(
414    schema: &CompiledSchema,
415    query: &fraiseql_core::schema::QueryDefinition,
416    source: &str,
417    jsonb_col: &str,
418    introspector: &impl DatabaseIntrospector,
419    table_name: &str,
420    warnings: &mut Vec<DatabaseWarning>,
421) -> fraiseql_core::Result<()> {
422    let samples = introspector.get_sample_json_rows(table_name, jsonb_col, 5).await?;
423
424    if samples.is_empty() {
425        return Ok(());
426    }
427
428    // Merge all top-level keys from sampled rows
429    let mut all_keys = HashSet::new();
430    for sample in &samples {
431        if let serde_json::Value::Object(map) = sample {
432            for key in map.keys() {
433                all_keys.insert(key.clone());
434            }
435        }
436    }
437
438    if all_keys.is_empty() {
439        return Ok(());
440    }
441
442    // Find the type definition for this query's return type
443    let type_def = schema.types.iter().find(|t| t.name.as_str() == query.return_type);
444
445    if let Some(type_def) = type_def {
446        for field in &type_def.fields {
447            let field_str = field.name.as_str();
448            let json_key = to_snake_case(field_str);
449            // Skip fields that are top-level columns (not from JSONB)
450            // Convention: fields like "id", "pk_*", "fk_*" are columns, not JSON keys
451            if field_str == "id" || field_str.starts_with("pk_") || field_str.starts_with("fk_") {
452                continue;
453            }
454            if !all_keys.contains(&json_key) && !all_keys.contains(field_str) {
455                warnings.push(DatabaseWarning::MissingJsonKey {
456                    query_name: query.name.clone(),
457                    sql_source: source.to_string(),
458                    json_column: jsonb_col.to_string(),
459                    field_name: field_str.to_string(),
460                    json_key,
461                });
462            }
463        }
464    }
465
466    Ok(())
467}
468
469/// Enum dispatch for database introspectors.
470///
471/// Uses enum dispatch instead of `Box<dyn DatabaseIntrospector>` because the
472/// trait uses `async_fn_in_trait` and cannot be object-safe.
473pub enum AnyIntrospector {
474    /// PostgreSQL introspector.
475    Postgres(fraiseql_core::db::PostgresIntrospector),
476    #[cfg(feature = "mysql")]
477    /// MySQL introspector.
478    MySql(fraiseql_core::db::MySqlIntrospector),
479    #[cfg(feature = "sqlite")]
480    /// SQLite introspector.
481    Sqlite(fraiseql_core::db::SqliteIntrospector),
482    #[cfg(feature = "sqlserver")]
483    /// SQL Server introspector.
484    SqlServer(fraiseql_core::db::SqlServerIntrospector),
485}
486
487impl DatabaseIntrospector for AnyIntrospector {
488    async fn list_fact_tables(&self) -> fraiseql_core::Result<Vec<String>> {
489        match self {
490            Self::Postgres(i) => i.list_fact_tables().await,
491            #[cfg(feature = "mysql")]
492            Self::MySql(i) => i.list_fact_tables().await,
493            #[cfg(feature = "sqlite")]
494            Self::Sqlite(i) => i.list_fact_tables().await,
495            #[cfg(feature = "sqlserver")]
496            Self::SqlServer(i) => i.list_fact_tables().await,
497        }
498    }
499
500    async fn get_columns(
501        &self,
502        table_name: &str,
503    ) -> fraiseql_core::Result<Vec<(String, String, bool)>> {
504        match self {
505            Self::Postgres(i) => i.get_columns(table_name).await,
506            #[cfg(feature = "mysql")]
507            Self::MySql(i) => i.get_columns(table_name).await,
508            #[cfg(feature = "sqlite")]
509            Self::Sqlite(i) => i.get_columns(table_name).await,
510            #[cfg(feature = "sqlserver")]
511            Self::SqlServer(i) => i.get_columns(table_name).await,
512        }
513    }
514
515    async fn get_indexed_columns(&self, table_name: &str) -> fraiseql_core::Result<Vec<String>> {
516        match self {
517            Self::Postgres(i) => i.get_indexed_columns(table_name).await,
518            #[cfg(feature = "mysql")]
519            Self::MySql(i) => i.get_indexed_columns(table_name).await,
520            #[cfg(feature = "sqlite")]
521            Self::Sqlite(i) => i.get_indexed_columns(table_name).await,
522            #[cfg(feature = "sqlserver")]
523            Self::SqlServer(i) => i.get_indexed_columns(table_name).await,
524        }
525    }
526
527    fn database_type(&self) -> DatabaseType {
528        match self {
529            Self::Postgres(i) => i.database_type(),
530            #[cfg(feature = "mysql")]
531            Self::MySql(i) => i.database_type(),
532            #[cfg(feature = "sqlite")]
533            Self::Sqlite(i) => i.database_type(),
534            #[cfg(feature = "sqlserver")]
535            Self::SqlServer(i) => i.database_type(),
536        }
537    }
538
539    async fn get_sample_jsonb(
540        &self,
541        table_name: &str,
542        column_name: &str,
543    ) -> fraiseql_core::Result<Option<serde_json::Value>> {
544        match self {
545            Self::Postgres(i) => i.get_sample_jsonb(table_name, column_name).await,
546            #[cfg(feature = "mysql")]
547            Self::MySql(i) => i.get_sample_jsonb(table_name, column_name).await,
548            #[cfg(feature = "sqlite")]
549            Self::Sqlite(i) => i.get_sample_jsonb(table_name, column_name).await,
550            #[cfg(feature = "sqlserver")]
551            Self::SqlServer(i) => i.get_sample_jsonb(table_name, column_name).await,
552        }
553    }
554
555    async fn list_relations(&self) -> fraiseql_core::Result<Vec<fraiseql_core::db::RelationInfo>> {
556        match self {
557            Self::Postgres(i) => i.list_relations().await,
558            #[cfg(feature = "mysql")]
559            Self::MySql(i) => i.list_relations().await,
560            #[cfg(feature = "sqlite")]
561            Self::Sqlite(i) => i.list_relations().await,
562            #[cfg(feature = "sqlserver")]
563            Self::SqlServer(i) => i.list_relations().await,
564        }
565    }
566
567    async fn get_sample_json_rows(
568        &self,
569        table_name: &str,
570        column_name: &str,
571        limit: usize,
572    ) -> fraiseql_core::Result<Vec<serde_json::Value>> {
573        match self {
574            Self::Postgres(i) => i.get_sample_json_rows(table_name, column_name, limit).await,
575            #[cfg(feature = "mysql")]
576            Self::MySql(i) => i.get_sample_json_rows(table_name, column_name, limit).await,
577            #[cfg(feature = "sqlite")]
578            Self::Sqlite(i) => i.get_sample_json_rows(table_name, column_name, limit).await,
579            #[cfg(feature = "sqlserver")]
580            Self::SqlServer(i) => i.get_sample_json_rows(table_name, column_name, limit).await,
581        }
582    }
583}
584
585/// Create an introspector from a database URL.
586///
587/// Detects the database type from the URL scheme and creates the appropriate
588/// introspector with a connection pool.
589///
590/// # Errors
591///
592/// Returns error if the URL scheme is unrecognized or the connection pool
593/// cannot be created.
594#[allow(clippy::unused_async)] // Reason: callers always .await this; feature-gated branches do use await
595pub async fn create_introspector(db_url: &str) -> anyhow::Result<AnyIntrospector> {
596    if db_url.starts_with("postgres") {
597        use deadpool_postgres::{Config, ManagerConfig, RecyclingMethod, Runtime};
598        use tokio_postgres::NoTls;
599
600        let mut cfg = Config::new();
601        cfg.url = Some(db_url.to_string());
602        cfg.manager = Some(ManagerConfig {
603            recycling_method: RecyclingMethod::Fast,
604        });
605        cfg.pool = Some(deadpool_postgres::PoolConfig::new(2));
606
607        let pool = cfg
608            .create_pool(Some(Runtime::Tokio1), NoTls)
609            .map_err(|e| anyhow::anyhow!("Failed to create PostgreSQL pool: {e}"))?;
610
611        Ok(AnyIntrospector::Postgres(fraiseql_core::db::PostgresIntrospector::new(pool)))
612    } else if db_url.starts_with("mysql") || db_url.starts_with("mariadb") {
613        #[cfg(feature = "mysql")]
614        {
615            use sqlx::mysql::MySqlPool;
616
617            let pool = MySqlPool::connect(db_url)
618                .await
619                .map_err(|e| anyhow::anyhow!("Failed to create MySQL pool: {e}"))?;
620
621            Ok(AnyIntrospector::MySql(fraiseql_core::db::MySqlIntrospector::new(pool)))
622        }
623        #[cfg(not(feature = "mysql"))]
624        {
625            anyhow::bail!("MySQL support not compiled in. Rebuild with `--features mysql`.")
626        }
627    } else if db_url.starts_with("sqlite")
628        || std::path::Path::new(db_url)
629            .extension()
630            .is_some_and(|ext| ext.eq_ignore_ascii_case("db") || ext.eq_ignore_ascii_case("sqlite"))
631    {
632        #[cfg(feature = "sqlite")]
633        {
634            use sqlx::sqlite::SqlitePool;
635
636            let pool = SqlitePool::connect(db_url)
637                .await
638                .map_err(|e| anyhow::anyhow!("Failed to create SQLite pool: {e}"))?;
639
640            Ok(AnyIntrospector::Sqlite(fraiseql_core::db::SqliteIntrospector::new(pool)))
641        }
642        #[cfg(not(feature = "sqlite"))]
643        {
644            anyhow::bail!("SQLite support not compiled in. Rebuild with `--features sqlite`.")
645        }
646    } else if db_url.starts_with("mssql") || db_url.starts_with("server=") {
647        #[cfg(feature = "sqlserver")]
648        {
649            use bb8::Pool;
650            use bb8_tiberius::ConnectionManager;
651            use tiberius::Config;
652
653            let config = Config::from_ado_string(db_url).map_err(|e| {
654                anyhow::anyhow!("Failed to parse SQL Server connection string: {e}")
655            })?;
656            let mgr = ConnectionManager::build(config).map_err(|e| {
657                anyhow::anyhow!("Failed to build SQL Server connection manager: {e}")
658            })?;
659            let pool = Pool::builder()
660                .max_size(2)
661                .build(mgr)
662                .await
663                .map_err(|e| anyhow::anyhow!("Failed to create SQL Server pool: {e}"))?;
664
665            Ok(AnyIntrospector::SqlServer(fraiseql_core::db::SqlServerIntrospector::new(pool)))
666        }
667        #[cfg(not(feature = "sqlserver"))]
668        {
669            anyhow::bail!(
670                "SQL Server support not compiled in. Rebuild with `--features sqlserver`."
671            )
672        }
673    } else {
674        anyhow::bail!("Unrecognized database URL scheme: {db_url}")
675    }
676}
677
678#[cfg(test)]
679#[allow(clippy::unwrap_used)] // Reason: test code
680mod tests {
681    use std::collections::HashMap;
682
683    use fraiseql_core::{
684        schema::{
685            AutoParams, CompiledSchema, CursorType, FieldDefinition, FieldType, MutationDefinition,
686            QueryDefinition, TypeDefinition,
687        },
688        validation::CustomTypeRegistry,
689    };
690    use indexmap::IndexMap;
691
692    use super::*;
693
694    /// Mock introspector for unit tests.
695    struct MockIntrospector {
696        relations:    Vec<RelationInfo>,
697        columns:      HashMap<String, Vec<(String, String, bool)>>,
698        json_samples: HashMap<(String, String), Vec<serde_json::Value>>,
699        db_type:      DatabaseType,
700    }
701
702    impl MockIntrospector {
703        fn new(db_type: DatabaseType) -> Self {
704            Self {
705                relations: Vec::new(),
706                columns: HashMap::new(),
707                json_samples: HashMap::new(),
708                db_type,
709            }
710        }
711
712        fn with_relation(
713            mut self,
714            schema: &str,
715            name: &str,
716            kind: fraiseql_core::db::RelationKind,
717        ) -> Self {
718            self.relations.push(RelationInfo {
719                schema: schema.to_string(),
720                name: name.to_string(),
721                kind,
722            });
723            self
724        }
725
726        fn with_columns(mut self, table: &str, cols: Vec<(&str, &str, bool)>) -> Self {
727            self.columns.insert(
728                table.to_string(),
729                cols.into_iter()
730                    .map(|(n, t, nullable)| (n.to_string(), t.to_string(), nullable))
731                    .collect(),
732            );
733            self
734        }
735
736        fn with_json_samples(
737            mut self,
738            table: &str,
739            column: &str,
740            samples: Vec<serde_json::Value>,
741        ) -> Self {
742            self.json_samples.insert((table.to_string(), column.to_string()), samples);
743            self
744        }
745    }
746
747    impl DatabaseIntrospector for MockIntrospector {
748        async fn list_fact_tables(&self) -> fraiseql_core::Result<Vec<String>> {
749            Ok(Vec::new())
750        }
751
752        async fn get_columns(
753            &self,
754            table_name: &str,
755        ) -> fraiseql_core::Result<Vec<(String, String, bool)>> {
756            Ok(self.columns.get(table_name).cloned().unwrap_or_default())
757        }
758
759        async fn get_indexed_columns(
760            &self,
761            _table_name: &str,
762        ) -> fraiseql_core::Result<Vec<String>> {
763            Ok(Vec::new())
764        }
765
766        fn database_type(&self) -> DatabaseType {
767            self.db_type
768        }
769
770        async fn list_relations(&self) -> fraiseql_core::Result<Vec<RelationInfo>> {
771            Ok(self.relations.clone())
772        }
773
774        async fn get_sample_json_rows(
775            &self,
776            table_name: &str,
777            column_name: &str,
778            _limit: usize,
779        ) -> fraiseql_core::Result<Vec<serde_json::Value>> {
780            Ok(self
781                .json_samples
782                .get(&(table_name.to_string(), column_name.to_string()))
783                .cloned()
784                .unwrap_or_default())
785        }
786    }
787
788    fn make_query(name: &str, return_type: &str, sql_source: &str) -> QueryDefinition {
789        QueryDefinition {
790            name:                name.to_string(),
791            return_type:         return_type.to_string(),
792            returns_list:        true,
793            nullable:            false,
794            arguments:           vec![],
795            sql_source:          Some(sql_source.to_string()),
796            description:         None,
797            auto_params:         AutoParams::default(),
798            deprecation:         None,
799            jsonb_column:        "data".to_string(),
800            relay:               false,
801            relay_cursor_column: None,
802            relay_cursor_type:   CursorType::default(),
803            inject_params:       IndexMap::default(),
804            cache_ttl_seconds:   None,
805            additional_views:    vec![],
806            requires_role:       None,
807            rest_path:           None,
808            rest_method:         None,
809            native_columns:      HashMap::new(),
810        }
811    }
812
813    fn make_type(name: &str, fields: Vec<(&str, FieldType)>) -> TypeDefinition {
814        TypeDefinition {
815            name:                name.into(),
816            fields:              fields
817                .into_iter()
818                .map(|(n, ft)| FieldDefinition::new(n, ft))
819                .collect(),
820            description:         None,
821            sql_source:          "".into(),
822            jsonb_column:        "data".to_string(),
823            sql_projection_hint: None,
824            implements:          vec![],
825            requires_role:       None,
826            is_error:            false,
827            relay:               false,
828            relationships:       Vec::new(),
829        }
830    }
831
832    fn make_schema(types: Vec<TypeDefinition>, queries: Vec<QueryDefinition>) -> CompiledSchema {
833        CompiledSchema {
834            types,
835            queries,
836            enums: vec![],
837            input_types: vec![],
838            interfaces: vec![],
839            unions: vec![],
840            mutations: vec![],
841            subscriptions: vec![],
842            directives: vec![],
843            observers: Vec::new(),
844            fact_tables: HashMap::default(),
845            federation: None,
846            security: None,
847            observers_config: None,
848            subscriptions_config: None,
849            validation_config: None,
850            debug_config: None,
851            mcp_config: None,
852            schema_sdl: None,
853            schema_format_version: None,
854            custom_scalars: CustomTypeRegistry::default(),
855            ..Default::default()
856        }
857    }
858
859    #[tokio::test]
860    async fn test_valid_schema_no_warnings() {
861        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
862            .with_relation("public", "v_user", fraiseql_core::db::RelationKind::View)
863            .with_columns("v_user", vec![("data", "jsonb", false), ("pk_user", "bigint", false)])
864            .with_json_samples(
865                "v_user",
866                "data",
867                vec![serde_json::json!({"name": "Alice", "email": "alice@example.com"})],
868            );
869
870        let schema = make_schema(
871            vec![make_type(
872                "User",
873                vec![("name", FieldType::String), ("email", FieldType::String)],
874            )],
875            vec![make_query("users", "User", "v_user")],
876        );
877
878        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
879        assert!(
880            report.warnings.is_empty(),
881            "Expected no warnings, got: {:?}",
882            report.warnings.len()
883        );
884    }
885
886    #[tokio::test]
887    async fn test_missing_relation() {
888        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL);
889        let schema = make_schema(vec![], vec![make_query("users", "User", "v_user")]);
890
891        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
892        assert_eq!(report.warnings.len(), 1);
893        assert!(
894            matches!(&report.warnings[0], DatabaseWarning::MissingRelation { sql_source, .. } if sql_source == "v_user")
895        );
896    }
897
898    #[tokio::test]
899    async fn test_missing_additional_view() {
900        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
901            .with_relation("public", "v_user", fraiseql_core::db::RelationKind::View)
902            .with_columns("v_user", vec![("data", "jsonb", false)]);
903
904        let mut query = make_query("users", "User", "v_user");
905        query.additional_views = vec!["v_missing".to_string()];
906
907        let schema = make_schema(vec![], vec![query]);
908
909        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
910        assert_eq!(report.warnings.len(), 1);
911        assert!(
912            matches!(&report.warnings[0], DatabaseWarning::MissingAdditionalView { view_name, .. } if view_name == "v_missing")
913        );
914    }
915
916    #[tokio::test]
917    async fn test_missing_jsonb_column() {
918        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
919            .with_relation("public", "v_user", fraiseql_core::db::RelationKind::View)
920            .with_columns("v_user", vec![("pk_user", "bigint", false)]);
921
922        let schema = make_schema(vec![], vec![make_query("users", "User", "v_user")]);
923
924        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
925        assert_eq!(report.warnings.len(), 1);
926        assert!(
927            matches!(&report.warnings[0], DatabaseWarning::MissingJsonColumn { column_name, .. } if column_name == "data")
928        );
929    }
930
931    #[tokio::test]
932    async fn test_wrong_json_column_type() {
933        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
934            .with_relation("public", "v_user", fraiseql_core::db::RelationKind::View)
935            .with_columns("v_user", vec![("data", "text", false)]);
936
937        let schema = make_schema(vec![], vec![make_query("users", "User", "v_user")]);
938
939        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
940        assert_eq!(report.warnings.len(), 1);
941        assert!(
942            matches!(&report.warnings[0], DatabaseWarning::WrongJsonColumnType { actual_type, .. } if actual_type == "text")
943        );
944    }
945
946    #[tokio::test]
947    async fn test_sqlserver_nvarchar_no_warning() {
948        let introspector = MockIntrospector::new(DatabaseType::SQLServer)
949            .with_relation("dbo", "v_user", fraiseql_core::db::RelationKind::View)
950            .with_columns("v_user", vec![("data", "nvarchar", false)]);
951
952        let schema = make_schema(vec![], vec![make_query("users", "User", "v_user")]);
953
954        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
955        // SQL Server: nvarchar is always accepted for JSON columns
956        assert!(
957            !report
958                .warnings
959                .iter()
960                .any(|w| matches!(w, DatabaseWarning::WrongJsonColumnType { .. }))
961        );
962    }
963
964    #[tokio::test]
965    async fn test_missing_cursor_column() {
966        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
967            .with_relation("public", "v_user", fraiseql_core::db::RelationKind::View)
968            .with_columns("v_user", vec![("data", "jsonb", false)]);
969
970        let mut query = make_query("users", "User", "v_user");
971        query.relay = true;
972        query.relay_cursor_column = Some("pk_user".to_string());
973
974        let schema = make_schema(vec![], vec![query]);
975
976        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
977        assert!(report.warnings.iter().any(|w| matches!(w, DatabaseWarning::MissingCursorColumn { column_name, .. } if column_name == "pk_user")));
978    }
979
980    #[tokio::test]
981    async fn test_missing_json_key() {
982        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
983            .with_relation("public", "v_user", fraiseql_core::db::RelationKind::View)
984            .with_columns("v_user", vec![("data", "jsonb", false)])
985            .with_json_samples("v_user", "data", vec![serde_json::json!({"name": "Alice"})]);
986
987        let schema = make_schema(
988            vec![make_type(
989                "User",
990                vec![("name", FieldType::String), ("email", FieldType::String)],
991            )],
992            vec![make_query("users", "User", "v_user")],
993        );
994
995        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
996        assert!(report.warnings.iter().any(|w| matches!(w, DatabaseWarning::MissingJsonKey { field_name, .. } if field_name == "email")));
997    }
998
999    #[tokio::test]
1000    async fn test_empty_json_sample_no_l3_warnings() {
1001        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
1002            .with_relation("public", "v_user", fraiseql_core::db::RelationKind::View)
1003            .with_columns("v_user", vec![("data", "jsonb", false)]);
1004
1005        let schema = make_schema(
1006            vec![make_type("User", vec![("name", FieldType::String)])],
1007            vec![make_query("users", "User", "v_user")],
1008        );
1009
1010        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
1011        // No L3 warnings because no sample data
1012        assert!(
1013            !report
1014                .warnings
1015                .iter()
1016                .any(|w| matches!(w, DatabaseWarning::MissingJsonKey { .. }))
1017        );
1018    }
1019
1020    #[tokio::test]
1021    async fn test_schema_qualified_match() {
1022        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
1023            .with_relation("etl_log", "v_foo", fraiseql_core::db::RelationKind::View)
1024            .with_columns("v_foo", vec![("data", "jsonb", false)]);
1025
1026        let schema = make_schema(vec![], vec![make_query("foos", "Foo", "etl_log.v_foo")]);
1027
1028        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
1029        // Should match
1030        assert!(
1031            !report
1032                .warnings
1033                .iter()
1034                .any(|w| matches!(w, DatabaseWarning::MissingRelation { .. }))
1035        );
1036    }
1037
1038    #[tokio::test]
1039    async fn test_schema_qualified_wrong_schema() {
1040        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL).with_relation(
1041            "public",
1042            "v_foo",
1043            fraiseql_core::db::RelationKind::View,
1044        );
1045
1046        let schema = make_schema(vec![], vec![make_query("foos", "Foo", "etl_log.v_foo")]);
1047
1048        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
1049        assert_eq!(report.warnings.len(), 1);
1050        assert!(
1051            matches!(&report.warnings[0], DatabaseWarning::MissingRelation { sql_source, .. } if sql_source == "etl_log.v_foo")
1052        );
1053    }
1054
1055    #[tokio::test]
1056    async fn test_mutation_missing_sql_source() {
1057        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL);
1058
1059        let mut schema = make_schema(vec![], vec![]);
1060        schema.mutations.push(MutationDefinition {
1061            name: "createUser".to_string(),
1062            sql_source: Some("fn_create_user".to_string()),
1063            ..MutationDefinition::new("createUser", "User")
1064        });
1065
1066        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
1067        assert_eq!(report.warnings.len(), 1);
1068        assert!(
1069            matches!(&report.warnings[0], DatabaseWarning::MissingRelation { sql_source, .. } if sql_source == "fn_create_user")
1070        );
1071    }
1072
1073    #[tokio::test]
1074    async fn test_query_no_sql_source_skipped() {
1075        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL);
1076
1077        let mut query = make_query("users", "User", "v_user");
1078        query.sql_source = None;
1079
1080        let schema = make_schema(vec![], vec![query]);
1081
1082        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
1083        assert!(report.warnings.is_empty());
1084    }
1085
1086    #[tokio::test]
1087    async fn test_multiple_samples_merge_keys() {
1088        let introspector = MockIntrospector::new(DatabaseType::PostgreSQL)
1089            .with_relation("public", "v_user", fraiseql_core::db::RelationKind::View)
1090            .with_columns("v_user", vec![("data", "jsonb", false)])
1091            .with_json_samples(
1092                "v_user",
1093                "data",
1094                vec![
1095                    serde_json::json!({"name": "Alice", "email": "alice@example.com"}),
1096                    serde_json::json!({"email": "bob@example.com", "age": 30}),
1097                ],
1098            );
1099
1100        let schema = make_schema(
1101            vec![make_type(
1102                "User",
1103                vec![
1104                    ("name", FieldType::String),
1105                    ("email", FieldType::String),
1106                    ("age", FieldType::Int),
1107                ],
1108            )],
1109            vec![make_query("users", "User", "v_user")],
1110        );
1111
1112        let report = validate_schema_against_database(&schema, &introspector).await.unwrap();
1113        // All keys present across both samples
1114        assert!(
1115            !report
1116                .warnings
1117                .iter()
1118                .any(|w| matches!(w, DatabaseWarning::MissingJsonKey { .. }))
1119        );
1120    }
1121
1122    #[test]
1123    fn test_to_snake_case() {
1124        assert_eq!(to_snake_case("firstName"), "first_name");
1125        assert_eq!(to_snake_case("name"), "name");
1126        assert_eq!(to_snake_case("HTMLParser"), "h_t_m_l_parser");
1127        assert_eq!(to_snake_case("already_snake"), "already_snake");
1128    }
1129
1130    #[test]
1131    fn test_is_json_type_postgres() {
1132        assert!(is_json_type("jsonb", DatabaseType::PostgreSQL));
1133        assert!(is_json_type("json", DatabaseType::PostgreSQL));
1134        assert!(!is_json_type("text", DatabaseType::PostgreSQL));
1135    }
1136
1137    #[test]
1138    fn test_is_json_type_mysql() {
1139        assert!(is_json_type("json", DatabaseType::MySQL));
1140        assert!(!is_json_type("varchar", DatabaseType::MySQL));
1141    }
1142
1143    #[test]
1144    fn test_is_json_type_sqlite() {
1145        assert!(is_json_type("json", DatabaseType::SQLite));
1146        assert!(is_json_type("JSON", DatabaseType::SQLite));
1147        assert!(!is_json_type("text", DatabaseType::SQLite));
1148    }
1149
1150    #[test]
1151    fn test_is_json_type_sqlserver() {
1152        // SQL Server always returns true
1153        assert!(is_json_type("nvarchar", DatabaseType::SQLServer));
1154        assert!(is_json_type("varchar", DatabaseType::SQLServer));
1155    }
1156
1157    #[test]
1158    fn test_display_warnings() {
1159        let warning = DatabaseWarning::MissingRelation {
1160            query_name: "users".to_string(),
1161            sql_source: "v_user".to_string(),
1162        };
1163        assert_eq!(
1164            warning.to_string(),
1165            "query `users`: sql_source `v_user` does not exist in database"
1166        );
1167    }
1168}