Skip to main content

panproto_protocols/database/
sql.rs

1//! SQL protocol definition.
2//!
3//! SQL uses a constrained hypergraph schema theory
4//! (`colimit(ThHypergraph, ThConstraint)`) and a set-valued functor
5//! instance theory (`ThFunctor`).
6//!
7//! Tables are modeled as hyper-edges connecting column vertices.
8//! Foreign keys are hyper-edges connecting source columns to target columns.
9
10use std::collections::HashMap;
11
12use panproto_gat::{Sort, Theory, colimit};
13use panproto_schema::{EdgeRule, Protocol, Schema, SchemaBuilder};
14
15use crate::error::ProtocolError;
16use crate::theories;
17
18/// Returns the SQL protocol definition.
19///
20/// Schema theory: `colimit(ThHypergraph, ThConstraint)`.
21/// Instance theory: `ThFunctor`.
22#[must_use]
23pub fn protocol() -> Protocol {
24    Protocol {
25        name: "sql".into(),
26        schema_theory: "ThSQLSchema".into(),
27        instance_theory: "ThSQLInstance".into(),
28        edge_rules: edge_rules(),
29        obj_kinds: vec![
30            "table".into(),
31            "integer".into(),
32            "string".into(),
33            "boolean".into(),
34            "number".into(),
35            "bytes".into(),
36            "timestamp".into(),
37            "date".into(),
38            "uuid".into(),
39            "json".into(),
40        ],
41        constraint_sorts: vec![
42            "NOT NULL".into(),
43            "UNIQUE".into(),
44            "CHECK".into(),
45            "PRIMARY KEY".into(),
46            "DEFAULT".into(),
47            "FOREIGN KEY".into(),
48        ],
49        has_order: true,
50        nominal_identity: true,
51        ..Protocol::default()
52    }
53}
54
55/// Register the component GATs for SQL with a theory registry.
56///
57/// Registers `ThHypergraph`, `ThConstraint`, `ThFunctor`, and the
58/// composed schema/instance theories.
59pub fn register_theories<S: ::std::hash::BuildHasher>(registry: &mut HashMap<String, Theory, S>) {
60    let th_hypergraph = theories::th_hypergraph();
61    let th_constraint = theories::th_constraint();
62    let th_functor = theories::th_functor();
63
64    registry.insert("ThHypergraph".into(), th_hypergraph.clone());
65    registry.insert("ThConstraint".into(), th_constraint.clone());
66    registry.insert("ThFunctor".into(), th_functor.clone());
67
68    // Compose schema theory: colimit(ThHypergraph, ThConstraint) over shared Vertex.
69    let shared_vertex = Theory::new("ThVertex", vec![Sort::simple("Vertex")], vec![], vec![]);
70
71    if let Ok(mut schema_theory) = colimit(&th_hypergraph, &th_constraint, &shared_vertex) {
72        schema_theory.name = "ThSQLSchema".into();
73        registry.insert("ThSQLSchema".into(), schema_theory);
74    }
75
76    // Instance theory is just ThFunctor (no colimit needed).
77    let mut inst = th_functor;
78    inst.name = "ThSQLInstance".into();
79    registry.insert("ThSQLInstance".into(), inst);
80}
81
82/// Parse a SQL DDL string into a [`Schema`].
83///
84/// Supports `CREATE TABLE`, `ALTER TABLE`, and `DROP TABLE` statements with
85/// column definitions, primary keys, foreign keys, `NOT NULL`, `UNIQUE`,
86/// `CHECK`, and `DEFAULT` constraints.
87///
88/// # Errors
89///
90/// Returns [`ProtocolError`] if the DDL cannot be parsed or
91/// schema construction fails.
92pub fn parse_ddl(ddl: &str) -> Result<Schema, ProtocolError> {
93    let proto = protocol();
94    let mut builder = SchemaBuilder::new(&proto);
95    let mut hyper_edge_counter: usize = 0;
96    let mut dropped_tables: std::collections::HashSet<String> = std::collections::HashSet::new();
97
98    // Simple line-based DDL parser.
99    let statements = split_statements(ddl);
100
101    // First pass: identify dropped tables.
102    for stmt in &statements {
103        let trimmed = stmt.trim();
104        let upper = trimmed.to_uppercase();
105        if upper.starts_with("DROP TABLE") {
106            if let Ok(name) = extract_drop_table_name(trimmed) {
107                dropped_tables.insert(name);
108            }
109        }
110    }
111
112    // Track tables and their columns for ALTER TABLE support.
113    let mut table_columns: HashMap<String, HashMap<String, String>> = HashMap::new();
114
115    for stmt in &statements {
116        let trimmed = stmt.trim();
117        let upper = trimmed.to_uppercase();
118
119        if upper.starts_with("CREATE TABLE") {
120            let table_name = extract_table_name(trimmed)?;
121            if dropped_tables.contains(&table_name) {
122                continue;
123            }
124            let (new_builder, cols) =
125                parse_create_table(builder, trimmed, &mut hyper_edge_counter)?;
126            builder = new_builder;
127            table_columns.insert(table_name, cols);
128        } else if upper.starts_with("ALTER TABLE") {
129            builder = parse_alter_table(builder, trimmed, &mut table_columns)?;
130        }
131        // DROP TABLE already handled via dropped_tables set.
132    }
133
134    let schema = builder.build()?;
135    Ok(schema)
136}
137
138/// Split DDL text into individual statements by semicolons.
139fn split_statements(ddl: &str) -> Vec<String> {
140    ddl.split(';')
141        .map(|s| s.trim().to_string())
142        .filter(|s| !s.is_empty())
143        .collect()
144}
145
146/// Parse a single CREATE TABLE statement.
147///
148/// Returns the updated builder and a map of column names to vertex IDs.
149fn parse_create_table(
150    mut builder: SchemaBuilder,
151    stmt: &str,
152    hyper_edge_counter: &mut usize,
153) -> Result<(SchemaBuilder, HashMap<String, String>), ProtocolError> {
154    // Extract table name.
155    let table_name = extract_table_name(stmt)?;
156
157    // Create a vertex for the table.
158    builder = builder.vertex(&table_name, "table", None)?;
159
160    // Extract column block (content between outer parentheses).
161    let columns_block = extract_parenthesized(stmt)?;
162
163    // Parse each column definition.
164    let column_defs = split_column_defs(&columns_block);
165
166    let mut sig = HashMap::new();
167
168    for col_def in &column_defs {
169        let trimmed = col_def.trim();
170        if trimmed.is_empty() {
171            continue;
172        }
173
174        let upper = trimmed.to_uppercase();
175
176        // Handle table-level constraints.
177        if upper.starts_with("PRIMARY KEY") {
178            // PRIMARY KEY(col1, col2)
179            if let Some(cols) = extract_constraint_columns(trimmed) {
180                let constraint_val = cols.join(",");
181                builder = builder.constraint(&table_name, "PRIMARY KEY", &constraint_val);
182            }
183            continue;
184        }
185        if upper.starts_with("FOREIGN KEY") {
186            // FOREIGN KEY(col) REFERENCES other_table(col)
187            builder = parse_table_foreign_key(builder, trimmed, &table_name, &sig);
188            continue;
189        }
190        if upper.starts_with("UNIQUE") {
191            if let Some(cols) = extract_constraint_columns(trimmed) {
192                let constraint_val = cols.join(",");
193                builder = builder.constraint(&table_name, "UNIQUE", &constraint_val);
194            }
195            continue;
196        }
197        if upper.starts_with("CHECK") {
198            // Extract the expression inside parentheses.
199            if let Ok(expr) = extract_parenthesized(trimmed) {
200                builder = builder.constraint(&table_name, "CHECK", &expr);
201            }
202            continue;
203        }
204        if upper.starts_with("CONSTRAINT") {
205            // Named constraint: CONSTRAINT name PRIMARY KEY(...) / FOREIGN KEY(...) / etc.
206            // Parse the inner constraint type.
207            if upper.contains("PRIMARY KEY") {
208                if let Some(cols) = extract_constraint_columns(trimmed) {
209                    let constraint_val = cols.join(",");
210                    builder = builder.constraint(&table_name, "PRIMARY KEY", &constraint_val);
211                }
212            } else if upper.contains("FOREIGN KEY") {
213                builder = parse_table_foreign_key(builder, trimmed, &table_name, &sig);
214            } else if upper.contains("UNIQUE") {
215                if let Some(cols) = extract_constraint_columns(trimmed) {
216                    let constraint_val = cols.join(",");
217                    builder = builder.constraint(&table_name, "UNIQUE", &constraint_val);
218                }
219            } else if upper.contains("CHECK") {
220                if let Ok(expr) = extract_parenthesized(trimmed) {
221                    builder = builder.constraint(&table_name, "CHECK", &expr);
222                }
223            }
224            continue;
225        }
226
227        // Parse column: name type [constraints...]
228        let parts: Vec<&str> = trimmed.split_whitespace().collect();
229        if parts.len() < 2 {
230            continue;
231        }
232
233        let col_name = parts[0].trim_matches('"').trim_matches('`');
234        let col_type = parts[1];
235        let col_id = format!("{table_name}.{col_name}");
236
237        // Determine vertex kind from SQL type.
238        let kind = sql_type_to_kind(col_type);
239        builder = builder.vertex(&col_id, &kind, None)?;
240
241        // Parse inline constraints.
242        let rest = parts[2..].join(" ").to_uppercase();
243        if rest.contains("NOT NULL") {
244            builder = builder.constraint(&col_id, "NOT NULL", "true");
245        }
246        if rest.contains("PRIMARY KEY") {
247            builder = builder.constraint(&col_id, "PRIMARY KEY", "true");
248        }
249        if rest.contains("UNIQUE") {
250            builder = builder.constraint(&col_id, "UNIQUE", "true");
251        }
252        if let Some(default_val) = extract_default(&rest) {
253            builder = builder.constraint(&col_id, "DEFAULT", &default_val);
254        }
255
256        // Handle inline REFERENCES.
257        if let Some(ref_idx) = rest.find("REFERENCES") {
258            let ref_rest = &rest[ref_idx + "REFERENCES".len()..].trim().to_string();
259            let ref_table = ref_rest
260                .split(|c: char| c == '(' || c.is_whitespace())
261                .next()
262                .unwrap_or("")
263                .trim();
264            if !ref_table.is_empty() {
265                builder =
266                    builder.constraint(&col_id, "FOREIGN KEY", &format!("{ref_table}.{col_name}"));
267            }
268        }
269
270        // Add a prop edge from table to column.
271        builder = builder.edge(&table_name, &col_id, "prop", Some(col_name))?;
272
273        sig.insert(col_name.to_string(), col_id);
274    }
275
276    // Create a hyper-edge for the table (connecting all columns).
277    if !sig.is_empty() {
278        let he_id = format!("he_{hyper_edge_counter}");
279        *hyper_edge_counter += 1;
280        builder = builder.hyper_edge(&he_id, "table", sig.clone(), &table_name)?;
281    }
282
283    Ok((builder, sig))
284}
285
286/// Parse a table-level FOREIGN KEY constraint.
287fn parse_table_foreign_key(
288    mut builder: SchemaBuilder,
289    constraint_str: &str,
290    table_name: &str,
291    sig: &HashMap<String, String>,
292) -> SchemaBuilder {
293    let upper = constraint_str.to_uppercase();
294
295    // Extract the column(s) in the FOREIGN KEY clause.
296    let fk_cols = extract_constraint_columns_at(&upper, "FOREIGN KEY");
297
298    // Extract REFERENCES target.
299    if let Some(ref_idx) = upper.find("REFERENCES") {
300        let ref_rest = &constraint_str[ref_idx + "REFERENCES".len()..]
301            .trim()
302            .to_string();
303        let ref_table = ref_rest
304            .split(|c: char| c == '(' || c.is_whitespace())
305            .next()
306            .unwrap_or("")
307            .trim()
308            .to_string();
309
310        if !ref_table.is_empty() {
311            if let Some(fk_cols) = fk_cols {
312                for col in &fk_cols {
313                    let col_lower = col.to_lowercase();
314                    if let Some(col_id) = sig.get(&col_lower) {
315                        builder = builder.constraint(
316                            col_id,
317                            "FOREIGN KEY",
318                            &format!("{ref_table}.{col_lower}"),
319                        );
320                    } else {
321                        // Column may not exist yet; add constraint to table.
322                        builder = builder.constraint(
323                            table_name,
324                            "FOREIGN KEY",
325                            &format!("{col_lower}->{ref_table}"),
326                        );
327                    }
328                }
329            }
330        }
331    }
332
333    builder
334}
335
336/// Parse an ALTER TABLE statement.
337fn parse_alter_table(
338    mut builder: SchemaBuilder,
339    stmt: &str,
340    table_columns: &mut HashMap<String, HashMap<String, String>>,
341) -> Result<SchemaBuilder, ProtocolError> {
342    let upper = stmt.to_uppercase();
343
344    // Extract table name after ALTER TABLE.
345    let after_alter = upper
346        .find("ALTER TABLE")
347        .map(|i| i + "ALTER TABLE".len())
348        .ok_or_else(|| ProtocolError::Parse("no ALTER TABLE keyword found".into()))?;
349
350    let remainder = stmt[after_alter..].trim();
351    let table_end = remainder
352        .find(|c: char| c.is_whitespace())
353        .unwrap_or(remainder.len());
354    let table_name = remainder[..table_end]
355        .trim()
356        .trim_matches('"')
357        .trim_matches('`')
358        .to_string();
359
360    let after_table = remainder[table_end..].trim();
361    let after_table_upper = after_table.to_uppercase();
362
363    if after_table_upper.starts_with("ADD COLUMN") || after_table_upper.starts_with("ADD ") {
364        // ADD [COLUMN] name type [constraints...]
365        let col_def = if after_table_upper.starts_with("ADD COLUMN") {
366            after_table["ADD COLUMN".len()..].trim()
367        } else {
368            after_table["ADD".len()..].trim()
369        };
370
371        let parts: Vec<&str> = col_def.split_whitespace().collect();
372        if parts.len() >= 2 {
373            let col_name = parts[0].trim_matches('"').trim_matches('`');
374            let col_type = parts[1];
375            let col_id = format!("{table_name}.{col_name}");
376            let kind = sql_type_to_kind(col_type);
377            builder = builder.vertex(&col_id, &kind, None)?;
378            builder = builder.edge(&table_name, &col_id, "prop", Some(col_name))?;
379
380            let rest = parts[2..].join(" ").to_uppercase();
381            if rest.contains("NOT NULL") {
382                builder = builder.constraint(&col_id, "NOT NULL", "true");
383            }
384
385            if let Some(cols) = table_columns.get_mut(&table_name) {
386                cols.insert(col_name.to_string(), col_id);
387            }
388        }
389    } else if after_table_upper.starts_with("DROP COLUMN") || after_table_upper.starts_with("DROP ")
390    {
391        // DROP [COLUMN] name - we acknowledge but the column vertex remains.
392        // Full removal would require schema diffing, which is out of scope.
393    } else if after_table_upper.starts_with("MODIFY")
394        || after_table_upper.starts_with("ALTER COLUMN")
395    {
396        // MODIFY/ALTER COLUMN name type - acknowledged but column vertex already exists.
397        // Constraints could be updated, but column identity doesn't change.
398    }
399
400    Ok(builder)
401}
402
403/// Extract the table name from a CREATE TABLE statement.
404fn extract_table_name(stmt: &str) -> Result<String, ProtocolError> {
405    // "CREATE TABLE [IF NOT EXISTS] name (...)"
406    let upper = stmt.to_uppercase();
407    let start = if upper.contains("IF NOT EXISTS") {
408        upper
409            .find("IF NOT EXISTS")
410            .map(|i| i + "IF NOT EXISTS".len())
411    } else {
412        upper.find("TABLE").map(|i| i + "TABLE".len())
413    };
414
415    let start = start.ok_or_else(|| ProtocolError::Parse("no TABLE keyword found".into()))?;
416    let remainder = stmt[start..].trim();
417    let name_end = remainder
418        .find(|c: char| c == '(' || c.is_whitespace())
419        .unwrap_or(remainder.len());
420
421    let name = remainder[..name_end]
422        .trim()
423        .trim_matches('"')
424        .trim_matches('`')
425        .to_string();
426
427    if name.is_empty() {
428        return Err(ProtocolError::Parse("empty table name".into()));
429    }
430
431    Ok(name)
432}
433
434/// Extract the table name from a DROP TABLE statement.
435fn extract_drop_table_name(stmt: &str) -> Result<String, ProtocolError> {
436    let upper = stmt.to_uppercase();
437    let start = if upper.contains("IF EXISTS") {
438        upper.find("IF EXISTS").map(|i| i + "IF EXISTS".len())
439    } else {
440        upper.find("TABLE").map(|i| i + "TABLE".len())
441    };
442
443    let start = start.ok_or_else(|| ProtocolError::Parse("no TABLE keyword found".into()))?;
444    let remainder = stmt[start..].trim();
445    let name_end = remainder
446        .find(|c: char| c.is_whitespace() || c == ';')
447        .unwrap_or(remainder.len());
448
449    let name = remainder[..name_end]
450        .trim()
451        .trim_matches('"')
452        .trim_matches('`')
453        .to_string();
454
455    if name.is_empty() {
456        return Err(ProtocolError::Parse("empty table name".into()));
457    }
458
459    Ok(name)
460}
461
462/// Extract the parenthesized block from a statement.
463fn extract_parenthesized(stmt: &str) -> Result<String, ProtocolError> {
464    let open = stmt
465        .find('(')
466        .ok_or_else(|| ProtocolError::Parse("no opening parenthesis".into()))?;
467    let close = stmt
468        .rfind(')')
469        .ok_or_else(|| ProtocolError::Parse("no closing parenthesis".into()))?;
470    if close <= open {
471        return Err(ProtocolError::Parse("mismatched parentheses".into()));
472    }
473    Ok(stmt[open + 1..close].to_string())
474}
475
476/// Split column definitions by commas, respecting nested parentheses.
477fn split_column_defs(block: &str) -> Vec<String> {
478    let mut defs = Vec::new();
479    let mut current = String::new();
480    let mut depth = 0;
481
482    for ch in block.chars() {
483        match ch {
484            '(' => {
485                depth += 1;
486                current.push(ch);
487            }
488            ')' => {
489                depth -= 1;
490                current.push(ch);
491            }
492            ',' if depth == 0 => {
493                defs.push(current.trim().to_string());
494                current.clear();
495            }
496            _ => current.push(ch),
497        }
498    }
499    if !current.trim().is_empty() {
500        defs.push(current.trim().to_string());
501    }
502    defs
503}
504
505/// Map a SQL type name to a vertex kind.
506fn sql_type_to_kind(sql_type: &str) -> String {
507    let upper = sql_type.to_uppercase();
508    if upper.starts_with("INT")
509        || upper.starts_with("BIGINT")
510        || upper.starts_with("SMALLINT")
511        || upper.starts_with("TINYINT")
512        || upper.starts_with("SERIAL")
513    {
514        "integer".into()
515    } else if upper.starts_with("VARCHAR") || upper.starts_with("TEXT") || upper.starts_with("CHAR")
516    {
517        "string".into()
518    } else if upper.starts_with("BOOL") {
519        "boolean".into()
520    } else if upper.starts_with("FLOAT")
521        || upper.starts_with("DOUBLE")
522        || upper.starts_with("DECIMAL")
523        || upper.starts_with("NUMERIC")
524        || upper.starts_with("REAL")
525    {
526        "number".into()
527    } else if upper.starts_with("BYTEA") || upper.starts_with("BLOB") {
528        "bytes".into()
529    } else if upper.starts_with("TIMESTAMP") {
530        "timestamp".into()
531    } else if upper.starts_with("DATE") {
532        "date".into()
533    } else if upper.starts_with("UUID") {
534        "uuid".into()
535    } else if upper.starts_with("JSON") || upper.starts_with("JSONB") {
536        "json".into()
537    } else {
538        "string".into()
539    }
540}
541
542/// Extract a DEFAULT value from a constraint string.
543fn extract_default(constraint_str: &str) -> Option<String> {
544    let idx = constraint_str.find("DEFAULT")?;
545    let rest = constraint_str[idx + "DEFAULT".len()..].trim();
546    // Take the first token as the default value.
547    let end = rest
548        .find(|c: char| c.is_whitespace() || c == ',')
549        .unwrap_or(rest.len());
550    let val = rest[..end].trim().to_string();
551    if val.is_empty() { None } else { Some(val) }
552}
553
554/// Extract column names from a constraint like `PRIMARY KEY(col1, col2)`.
555fn extract_constraint_columns(constraint_str: &str) -> Option<Vec<String>> {
556    let open = constraint_str.find('(')?;
557    let close = constraint_str[open..].find(')')? + open;
558    let inner = &constraint_str[open + 1..close];
559    let cols: Vec<String> = inner
560        .split(',')
561        .map(|s| s.trim().trim_matches('"').trim_matches('`').to_string())
562        .filter(|s| !s.is_empty())
563        .collect();
564    if cols.is_empty() { None } else { Some(cols) }
565}
566
567/// Extract column names from a constraint starting at a specific keyword.
568fn extract_constraint_columns_at(upper_str: &str, keyword: &str) -> Option<Vec<String>> {
569    let idx = upper_str.find(keyword)?;
570    let after = &upper_str[idx + keyword.len()..];
571    let open = after.find('(')?;
572    let close = after[open..].find(')')? + open;
573    let inner = &after[open + 1..close];
574    let cols: Vec<String> = inner
575        .split(',')
576        .map(|s| s.trim().to_string())
577        .filter(|s| !s.is_empty())
578        .collect();
579    if cols.is_empty() { None } else { Some(cols) }
580}
581
582/// Map a vertex kind back to a SQL type name.
583fn kind_to_sql_type(kind: &str) -> &'static str {
584    match kind {
585        "integer" => "INTEGER",
586        "boolean" => "BOOLEAN",
587        "number" => "FLOAT",
588        "bytes" => "BYTEA",
589        "timestamp" => "TIMESTAMP",
590        "date" => "DATE",
591        "uuid" => "UUID",
592        "json" => "JSONB",
593        _ => "TEXT",
594    }
595}
596
597/// Emit a [`Schema`] as SQL DDL `CREATE TABLE` statements.
598///
599/// Reconstructs table definitions from the schema graph, including
600/// column types and constraints (`NOT NULL`, `PRIMARY KEY`, `UNIQUE`,
601/// `DEFAULT`).
602///
603/// # Errors
604///
605/// Returns [`ProtocolError::Emit`] if the schema cannot be serialized.
606pub fn emit_ddl(schema: &Schema) -> Result<String, ProtocolError> {
607    use std::fmt::Write;
608
609    use crate::emit::{children_by_edge, vertex_constraints};
610
611    let mut output = String::new();
612
613    // Find all table vertices.
614    let mut tables: Vec<&panproto_schema::Vertex> = schema
615        .vertices
616        .values()
617        .filter(|v| v.kind == "table")
618        .collect();
619    tables.sort_by(|a, b| a.id.cmp(&b.id));
620
621    for table in &tables {
622        let _ = writeln!(output, "CREATE TABLE {} (", table.id);
623
624        let columns = children_by_edge(schema, &table.id, "prop");
625        let col_count = columns.len();
626        for (i, (edge, col_vertex)) in columns.iter().enumerate() {
627            let col_name = edge.name.as_deref().unwrap_or(&col_vertex.id);
628            let sql_type = kind_to_sql_type(&col_vertex.kind);
629
630            let mut constraints_str = String::new();
631            let constraints = vertex_constraints(schema, &col_vertex.id);
632            for c in &constraints {
633                match c.sort.as_str() {
634                    "PRIMARY KEY" if c.value == "true" => {
635                        constraints_str.push_str(" PRIMARY KEY");
636                    }
637                    "NOT NULL" if c.value == "true" => {
638                        constraints_str.push_str(" NOT NULL");
639                    }
640                    "UNIQUE" if c.value == "true" => {
641                        constraints_str.push_str(" UNIQUE");
642                    }
643                    "DEFAULT" => {
644                        let _ = write!(constraints_str, " DEFAULT {}", c.value);
645                    }
646                    _ => {}
647                }
648            }
649
650            let comma = if i + 1 < col_count { "," } else { "" };
651            let _ = writeln!(output, "  {col_name} {sql_type}{constraints_str}{comma}");
652        }
653
654        output.push_str(");\n\n");
655    }
656
657    Ok(output)
658}
659
660/// Well-formedness rules for SQL edges.
661fn edge_rules() -> Vec<EdgeRule> {
662    vec![
663        EdgeRule {
664            edge_kind: "prop".into(),
665            src_kinds: vec!["table".into()],
666            tgt_kinds: vec![],
667        },
668        EdgeRule {
669            edge_kind: "foreign-key".into(),
670            src_kinds: vec![],
671            tgt_kinds: vec![],
672        },
673    ]
674}
675
676#[cfg(test)]
677#[allow(clippy::expect_used, clippy::unwrap_used)]
678mod tests {
679    use super::*;
680
681    #[test]
682    fn protocol_creates_valid_definition() {
683        let p = protocol();
684        assert_eq!(p.name, "sql");
685        assert_eq!(p.schema_theory, "ThSQLSchema");
686        assert_eq!(p.instance_theory, "ThSQLInstance");
687        assert!(p.find_edge_rule("prop").is_some());
688    }
689
690    #[test]
691    fn register_theories_adds_correct_theories() {
692        let mut registry = HashMap::new();
693        register_theories(&mut registry);
694
695        assert!(registry.contains_key("ThHypergraph"));
696        assert!(registry.contains_key("ThConstraint"));
697        assert!(registry.contains_key("ThFunctor"));
698        assert!(registry.contains_key("ThSQLSchema"));
699        assert!(registry.contains_key("ThSQLInstance"));
700
701        let schema_t = &registry["ThSQLSchema"];
702        assert!(schema_t.find_sort("Vertex").is_some());
703        assert!(schema_t.find_sort("HyperEdge").is_some());
704        assert!(schema_t.find_sort("Constraint").is_some());
705    }
706
707    #[test]
708    fn parse_simple_create_table() {
709        let ddl = r"
710            CREATE TABLE users (
711                id INTEGER PRIMARY KEY NOT NULL,
712                name VARCHAR(255) NOT NULL,
713                email TEXT UNIQUE,
714                active BOOLEAN DEFAULT true
715            );
716        ";
717
718        let schema = parse_ddl(ddl);
719        assert!(schema.is_ok(), "parse_ddl should succeed: {schema:?}");
720        let schema = schema.ok();
721        let schema = schema.as_ref();
722
723        assert!(schema.is_some_and(|s| s.has_vertex("users")));
724        assert!(schema.is_some_and(|s| s.has_vertex("users.id")));
725        assert!(schema.is_some_and(|s| s.has_vertex("users.name")));
726        assert!(schema.is_some_and(|s| s.has_vertex("users.email")));
727        assert!(schema.is_some_and(|s| s.has_vertex("users.active")));
728    }
729
730    #[test]
731    fn parse_multiple_tables() {
732        let ddl = r"
733            CREATE TABLE posts (
734                id INTEGER PRIMARY KEY,
735                title TEXT NOT NULL,
736                author_id INTEGER
737            );
738            CREATE TABLE comments (
739                id INTEGER PRIMARY KEY,
740                body TEXT,
741                post_id INTEGER
742            );
743        ";
744
745        let schema = parse_ddl(ddl);
746        assert!(schema.is_ok(), "parse_ddl should succeed: {schema:?}");
747        let schema = schema.ok();
748        let schema = schema.as_ref();
749
750        assert!(schema.is_some_and(|s| s.has_vertex("posts")));
751        assert!(schema.is_some_and(|s| s.has_vertex("comments")));
752        assert!(schema.is_some_and(|s| s.has_vertex("posts.title")));
753        assert!(schema.is_some_and(|s| s.has_vertex("comments.body")));
754    }
755
756    #[test]
757    fn parse_empty_ddl() {
758        let result = parse_ddl("");
759        // Empty DDL produces no vertices, which SchemaBuilder rejects.
760        assert!(result.is_err(), "empty DDL should fail with EmptySchema");
761    }
762
763    #[test]
764    fn parse_timestamp_and_uuid_types() {
765        let ddl = r"
766            CREATE TABLE events (
767                id UUID PRIMARY KEY,
768                created_at TIMESTAMP NOT NULL,
769                event_date DATE,
770                payload JSONB
771            );
772        ";
773        let schema = parse_ddl(ddl).expect("should parse");
774        assert_eq!(schema.vertices.get("events.id").unwrap().kind, "uuid");
775        assert_eq!(
776            schema.vertices.get("events.created_at").unwrap().kind,
777            "timestamp"
778        );
779        assert_eq!(
780            schema.vertices.get("events.event_date").unwrap().kind,
781            "date"
782        );
783        assert_eq!(schema.vertices.get("events.payload").unwrap().kind, "json");
784    }
785
786    #[test]
787    fn parse_float_double_types() {
788        let ddl = r"
789            CREATE TABLE measurements (
790                temp FLOAT,
791                pressure DOUBLE
792            );
793        ";
794        let schema = parse_ddl(ddl).expect("should parse");
795        assert_eq!(
796            schema.vertices.get("measurements.temp").unwrap().kind,
797            "number"
798        );
799        assert_eq!(
800            schema.vertices.get("measurements.pressure").unwrap().kind,
801            "number"
802        );
803    }
804
805    #[test]
806    fn parse_drop_table() {
807        let ddl = r"
808            CREATE TABLE temp (id INTEGER);
809            DROP TABLE temp;
810        ";
811        let result = parse_ddl(ddl);
812        // The table was dropped, so no vertices should be created.
813        assert!(result.is_err(), "dropped table should produce empty schema");
814    }
815
816    #[test]
817    fn parse_table_level_primary_key() {
818        let ddl = r"
819            CREATE TABLE orders (
820                order_id INTEGER NOT NULL,
821                product_id INTEGER NOT NULL,
822                PRIMARY KEY(order_id, product_id)
823            );
824        ";
825        let schema = parse_ddl(ddl).expect("should parse");
826        let constraints = schema.constraints.get("orders");
827        assert!(constraints.is_some());
828        assert!(constraints.unwrap().iter().any(|c| c.sort == "PRIMARY KEY"));
829    }
830
831    #[test]
832    fn emit_ddl_roundtrip() {
833        let ddl = r"
834            CREATE TABLE users (
835                id INTEGER PRIMARY KEY NOT NULL,
836                name TEXT NOT NULL,
837                active BOOLEAN DEFAULT true
838            );
839        ";
840
841        let schema1 = parse_ddl(ddl).expect("first parse should succeed");
842        let emitted = emit_ddl(&schema1).expect("emit should succeed");
843        let schema2 = parse_ddl(&emitted).expect("re-parse should succeed");
844
845        assert_eq!(
846            schema1.vertex_count(),
847            schema2.vertex_count(),
848            "vertex counts should match after round-trip"
849        );
850        assert_eq!(
851            schema1.edge_count(),
852            schema2.edge_count(),
853            "edge counts should match after round-trip"
854        );
855    }
856
857    #[test]
858    fn parse_alter_table_add_column() {
859        let ddl = r"
860            CREATE TABLE users (
861                id INTEGER PRIMARY KEY
862            );
863            ALTER TABLE users ADD COLUMN name TEXT NOT NULL;
864        ";
865        let schema = parse_ddl(ddl).expect("should parse");
866        assert!(schema.has_vertex("users.name"));
867    }
868}