1use std::collections::{BTreeMap, BTreeSet};
2
3#[cfg(feature = "postgres")]
4use sqlx::PgPool;
5use sqlx::SqlitePool;
6
7#[derive(Debug, Clone, PartialEq, Eq)]
8pub struct SchemaColumn {
9 pub name: String,
10 pub sql_type: String,
11 pub nullable: bool,
12 pub primary_key: bool,
13}
14
15impl SchemaColumn {
16 fn normalized_type(&self) -> String {
17 normalize_sql_type(&self.sql_type)
18 }
19}
20
21#[derive(Debug, Clone, PartialEq, Eq)]
22pub struct SchemaIndex {
23 pub name: String,
24 pub columns: Vec<String>,
25 pub unique: bool,
26}
27
28#[derive(Debug, Clone, PartialEq, Eq)]
29pub struct SchemaForeignKey {
30 pub column: String,
31 pub ref_table: String,
32 pub ref_column: String,
33}
34
35#[derive(Debug, Clone, PartialEq, Eq)]
36pub struct SchemaTable {
37 pub name: String,
38 pub columns: Vec<SchemaColumn>,
39 pub indexes: Vec<SchemaIndex>,
40 pub foreign_keys: Vec<SchemaForeignKey>,
41 pub create_sql: Option<String>,
42}
43
44impl SchemaTable {
45 pub fn column(&self, name: &str) -> Option<&SchemaColumn> {
46 self.columns.iter().find(|c| c.name == name)
47 }
48
49 pub fn to_create_sql(&self) -> String {
50 if let Some(sql) = &self.create_sql {
51 return sql.clone();
52 }
53
54 let mut cols = Vec::new();
55 for col in &self.columns {
56 if col.primary_key {
57 cols.push(format!("{} INTEGER PRIMARY KEY", col.name));
58 continue;
59 }
60 let mut def = format!("{} {}", col.name, col.sql_type);
61 if !col.nullable {
62 def.push_str(" NOT NULL");
63 }
64 cols.push(def);
65 }
66
67 format!(
68 "CREATE TABLE IF NOT EXISTS {} ({})",
69 self.name,
70 cols.join(", ")
71 )
72 }
73}
74
75pub trait ModelSchema {
76 fn schema() -> SchemaTable;
77}
78
79#[macro_export]
80macro_rules! schema_models {
81 ($($model:ty),+ $(,)?) => {
82 vec![$(<$model as $crate::schema::ModelSchema>::schema()),+]
83 };
84}
85
86#[derive(Debug, Clone, PartialEq, Eq)]
87pub struct ColumnDiff {
88 pub table: String,
89 pub column: String,
90}
91
92#[derive(Debug, Clone, PartialEq, Eq)]
93pub struct ColumnTypeDiff {
94 pub table: String,
95 pub column: String,
96 pub expected: String,
97 pub actual: String,
98}
99
100#[derive(Debug, Clone, PartialEq, Eq)]
101pub struct ColumnNullabilityDiff {
102 pub table: String,
103 pub column: String,
104 pub expected_nullable: bool,
105 pub actual_nullable: bool,
106}
107
108#[derive(Debug, Clone, PartialEq, Eq)]
109pub struct ColumnPrimaryKeyDiff {
110 pub table: String,
111 pub column: String,
112 pub expected_primary_key: bool,
113 pub actual_primary_key: bool,
114}
115
116#[derive(Debug, Clone, PartialEq, Eq, Default)]
117pub struct SchemaDiff {
118 pub missing_tables: Vec<String>,
119 pub extra_tables: Vec<String>,
120 pub missing_columns: Vec<ColumnDiff>,
121 pub extra_columns: Vec<ColumnDiff>,
122 pub type_mismatches: Vec<ColumnTypeDiff>,
123 pub nullability_mismatches: Vec<ColumnNullabilityDiff>,
124 pub primary_key_mismatches: Vec<ColumnPrimaryKeyDiff>,
125 pub missing_indexes: Vec<(String, SchemaIndex)>,
126 pub extra_indexes: Vec<(String, SchemaIndex)>,
127 pub missing_foreign_keys: Vec<(String, SchemaForeignKey)>,
128 pub extra_foreign_keys: Vec<(String, SchemaForeignKey)>,
129}
130
131impl SchemaDiff {
132 pub fn is_empty(&self) -> bool {
133 self.missing_tables.is_empty()
134 && self.extra_tables.is_empty()
135 && self.missing_columns.is_empty()
136 && self.extra_columns.is_empty()
137 && self.type_mismatches.is_empty()
138 && self.nullability_mismatches.is_empty()
139 && self.primary_key_mismatches.is_empty()
140 && self.missing_indexes.is_empty()
141 && self.extra_indexes.is_empty()
142 && self.missing_foreign_keys.is_empty()
143 && self.extra_foreign_keys.is_empty()
144 }
145}
146
147pub fn format_schema_diff_summary(diff: &SchemaDiff) -> String {
148 if diff.is_empty() {
149 return "Schema diff: no changes".to_string();
150 }
151
152 let mut lines = Vec::new();
153 lines.push("Schema diff summary:".to_string());
154 lines.push(format!(" missing tables: {}", diff.missing_tables.len()));
155 lines.push(format!(" extra tables: {}", diff.extra_tables.len()));
156 lines.push(format!(" missing columns: {}", diff.missing_columns.len()));
157 lines.push(format!(" extra columns: {}", diff.extra_columns.len()));
158 lines.push(format!(" type mismatches: {}", diff.type_mismatches.len()));
159 lines.push(format!(
160 " nullability mismatches: {}",
161 diff.nullability_mismatches.len()
162 ));
163 lines.push(format!(
164 " primary key mismatches: {}",
165 diff.primary_key_mismatches.len()
166 ));
167 lines.push(format!(" missing indexes: {}", diff.missing_indexes.len()));
168 lines.push(format!(" extra indexes: {}", diff.extra_indexes.len()));
169 lines.push(format!(
170 " missing foreign keys: {}",
171 diff.missing_foreign_keys.len()
172 ));
173 lines.push(format!(
174 " extra foreign keys: {}",
175 diff.extra_foreign_keys.len()
176 ));
177
178 if !diff.missing_tables.is_empty() {
179 lines.push(format!(
180 " missing tables list: {}",
181 diff.missing_tables.join(", ")
182 ));
183 }
184 if !diff.extra_tables.is_empty() {
185 lines.push(format!(
186 " extra tables list: {}",
187 diff.extra_tables.join(", ")
188 ));
189 }
190
191 lines.join("\n")
192}
193
194pub async fn introspect_sqlite_schema(pool: &SqlitePool) -> Result<Vec<SchemaTable>, sqlx::Error> {
195 let table_names: Vec<String> = sqlx::query_scalar(
196 "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name != '_premix_migrations' ORDER BY name",
197 )
198 .fetch_all(pool)
199 .await?;
200
201 let mut tables = Vec::new();
202 for name in table_names {
203 let pragma_sql = format!("PRAGMA table_info({})", name);
204 let rows: Vec<(i64, String, String, i64, Option<String>, i64)> =
205 sqlx::query_as(&pragma_sql).fetch_all(pool).await?;
206
207 if rows.is_empty() {
208 continue;
209 }
210
211 let columns = rows
212 .into_iter()
213 .map(|(_cid, col_name, col_type, notnull, _default, pk)| {
214 let is_pk = pk > 0;
215 SchemaColumn {
216 name: col_name,
217 sql_type: col_type,
218 nullable: !is_pk && notnull == 0,
219 primary_key: is_pk,
220 }
221 })
222 .collect();
223
224 let indexes = introspect_sqlite_indexes(pool, &name).await?;
225 let foreign_keys = introspect_sqlite_foreign_keys(pool, &name).await?;
226
227 tables.push(SchemaTable {
228 name,
229 columns,
230 indexes,
231 foreign_keys,
232 create_sql: None,
233 });
234 }
235
236 Ok(tables)
237}
238
239#[cfg(feature = "postgres")]
240pub async fn introspect_postgres_schema(pool: &PgPool) -> Result<Vec<SchemaTable>, sqlx::Error> {
241 let table_names: Vec<String> = sqlx::query_scalar(
242 "SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE' AND table_name != '_premix_migrations' ORDER BY table_name",
243 )
244 .fetch_all(pool)
245 .await?;
246
247 let mut tables = Vec::new();
248 for name in table_names {
249 let pk_cols: Vec<String> = sqlx::query_scalar(
250 "SELECT a.attname FROM pg_index i JOIN pg_attribute a ON a.attrelid=i.indrelid AND a.attnum = ANY(i.indkey) WHERE i.indrelid=$1::regclass AND i.indisprimary",
251 )
252 .bind(&name)
253 .fetch_all(pool)
254 .await?;
255 let pk_set: BTreeSet<String> = pk_cols.into_iter().collect();
256
257 let rows: Vec<(String, String, String, String)> = sqlx::query_as(
258 "SELECT column_name, data_type, udt_name, is_nullable FROM information_schema.columns WHERE table_schema='public' AND table_name=$1 ORDER BY ordinal_position",
259 )
260 .bind(&name)
261 .fetch_all(pool)
262 .await?;
263
264 if rows.is_empty() {
265 continue;
266 }
267
268 let columns = rows
269 .into_iter()
270 .map(|(col_name, data_type, udt_name, is_nullable)| {
271 let is_pk = pk_set.contains(&col_name);
272 let sql_type = if data_type.eq_ignore_ascii_case("ARRAY") {
273 let base = udt_name.trim_start_matches('_');
274 format!("{}[]", base)
275 } else if data_type.eq_ignore_ascii_case("USER-DEFINED") {
276 udt_name
277 } else {
278 data_type
279 };
280 SchemaColumn {
281 name: col_name,
282 sql_type,
283 nullable: !is_pk && is_nullable.eq_ignore_ascii_case("YES"),
284 primary_key: is_pk,
285 }
286 })
287 .collect();
288
289 let indexes = introspect_postgres_indexes(pool, &name).await?;
290 let foreign_keys = introspect_postgres_foreign_keys(pool, &name).await?;
291
292 tables.push(SchemaTable {
293 name,
294 columns,
295 indexes,
296 foreign_keys,
297 create_sql: None,
298 });
299 }
300
301 Ok(tables)
302}
303
304pub async fn diff_sqlite_schema(
305 pool: &SqlitePool,
306 expected: &[SchemaTable],
307) -> Result<SchemaDiff, sqlx::Error> {
308 let actual = introspect_sqlite_schema(pool).await?;
309 Ok(diff_schema(expected, &actual))
310}
311
312#[cfg(feature = "postgres")]
313pub async fn diff_postgres_schema(
314 pool: &PgPool,
315 expected: &[SchemaTable],
316) -> Result<SchemaDiff, sqlx::Error> {
317 let actual = introspect_postgres_schema(pool).await?;
318 Ok(diff_schema(expected, &actual))
319}
320
321pub fn diff_schema(expected: &[SchemaTable], actual: &[SchemaTable]) -> SchemaDiff {
322 let mut diff = SchemaDiff::default();
323
324 let expected_map: BTreeMap<_, _> = expected.iter().map(|t| (&t.name, t)).collect();
325 let actual_map: BTreeMap<_, _> = actual.iter().map(|t| (&t.name, t)).collect();
326
327 for name in expected_map.keys() {
328 if !actual_map.contains_key(name) {
329 diff.missing_tables.push((*name).to_string());
330 }
331 }
332 for name in actual_map.keys() {
333 if !expected_map.contains_key(name) {
334 diff.extra_tables.push((*name).to_string());
335 }
336 }
337
338 for (name, expected_table) in &expected_map {
339 let Some(actual_table) = actual_map.get(name) else {
340 continue;
341 };
342
343 let expected_cols: BTreeMap<_, _> = expected_table
344 .columns
345 .iter()
346 .map(|c| (&c.name, c))
347 .collect();
348 let actual_cols: BTreeMap<_, _> =
349 actual_table.columns.iter().map(|c| (&c.name, c)).collect();
350
351 for col in expected_cols.keys() {
352 if !actual_cols.contains_key(col) {
353 diff.missing_columns.push(ColumnDiff {
354 table: (*name).to_string(),
355 column: (*col).to_string(),
356 });
357 }
358 }
359 for col in actual_cols.keys() {
360 if !expected_cols.contains_key(col) {
361 diff.extra_columns.push(ColumnDiff {
362 table: (*name).to_string(),
363 column: (*col).to_string(),
364 });
365 }
366 }
367
368 for (col_name, expected_col) in &expected_cols {
369 let Some(actual_col) = actual_cols.get(col_name) else {
370 continue;
371 };
372
373 let expected_type = expected_col.normalized_type();
374 let actual_type = actual_col.normalized_type();
375 if expected_type != actual_type {
376 diff.type_mismatches.push(ColumnTypeDiff {
377 table: (*name).to_string(),
378 column: (*col_name).to_string(),
379 expected: expected_col.sql_type.clone(),
380 actual: actual_col.sql_type.clone(),
381 });
382 }
383
384 if expected_col.nullable != actual_col.nullable {
385 diff.nullability_mismatches.push(ColumnNullabilityDiff {
386 table: (*name).to_string(),
387 column: (*col_name).to_string(),
388 expected_nullable: expected_col.nullable,
389 actual_nullable: actual_col.nullable,
390 });
391 }
392
393 if expected_col.primary_key != actual_col.primary_key {
394 diff.primary_key_mismatches.push(ColumnPrimaryKeyDiff {
395 table: (*name).to_string(),
396 column: (*col_name).to_string(),
397 expected_primary_key: expected_col.primary_key,
398 actual_primary_key: actual_col.primary_key,
399 });
400 }
401 }
402
403 let expected_indexes = index_map(&expected_table.indexes);
404 let actual_indexes = index_map(&actual_table.indexes);
405 for key in expected_indexes.keys() {
406 if !actual_indexes.contains_key(key) {
407 if let Some(index) = expected_indexes.get(key) {
408 diff.missing_indexes
409 .push(((*name).to_string(), (*index).clone()));
410 }
411 }
412 }
413 for key in actual_indexes.keys() {
414 if !expected_indexes.contains_key(key) {
415 if let Some(index) = actual_indexes.get(key) {
416 diff.extra_indexes
417 .push(((*name).to_string(), (*index).clone()));
418 }
419 }
420 }
421
422 let expected_fks = foreign_key_map(&expected_table.foreign_keys);
423 let actual_fks = foreign_key_map(&actual_table.foreign_keys);
424 for key in expected_fks.keys() {
425 if !actual_fks.contains_key(key) {
426 if let Some(fk) = expected_fks.get(key) {
427 diff.missing_foreign_keys
428 .push(((*name).to_string(), (*fk).clone()));
429 }
430 }
431 }
432 for key in actual_fks.keys() {
433 if !expected_fks.contains_key(key) {
434 if let Some(fk) = actual_fks.get(key) {
435 diff.extra_foreign_keys
436 .push(((*name).to_string(), (*fk).clone()));
437 }
438 }
439 }
440 }
441
442 diff.missing_tables.sort();
443 diff.extra_tables.sort();
444
445 diff
446}
447
448pub fn sqlite_migration_sql(expected: &[SchemaTable], diff: &SchemaDiff) -> Vec<String> {
449 let expected_map: BTreeMap<String, &SchemaTable> =
450 expected.iter().map(|t| (t.name.clone(), t)).collect();
451 let mut statements = Vec::new();
452
453 for table in &diff.missing_tables {
454 if let Some(schema) = expected_map.get(table) {
455 statements.push(schema.to_create_sql());
456 for index in &schema.indexes {
457 statements.push(sqlite_create_index_sql(&schema.name, index));
458 }
459 } else {
460 statements.push(format!("-- Missing schema for table {}", table));
461 }
462 }
463
464 let mut missing_by_table: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
465 for col in &diff.missing_columns {
466 missing_by_table
467 .entry(col.table.clone())
468 .or_default()
469 .insert(col.column.clone());
470 }
471
472 for (table, columns) in missing_by_table {
473 let Some(schema) = expected_map.get(&table) else {
474 continue;
475 };
476 for col_name in columns {
477 let Some(col) = schema.column(&col_name) else {
478 continue;
479 };
480 if col.primary_key {
481 statements.push(format!(
482 "-- TODO: add primary key column {}.{} manually",
483 table, col_name
484 ));
485 continue;
486 }
487 let mut stmt = format!(
488 "ALTER TABLE {} ADD COLUMN {} {}",
489 table, col.name, col.sql_type
490 );
491 if !col.nullable {
492 stmt.push_str(" NOT NULL");
493 }
494 statements.push(stmt);
495 }
496 }
497
498 for mismatch in &diff.type_mismatches {
499 statements.push(format!(
500 "-- TODO: column type mismatch {}.{} (expected {}, actual {})",
501 mismatch.table, mismatch.column, mismatch.expected, mismatch.actual
502 ));
503 }
504 for mismatch in &diff.nullability_mismatches {
505 statements.push(format!(
506 "-- TODO: column nullability mismatch {}.{} (expected nullable {}, actual nullable {})",
507 mismatch.table, mismatch.column, mismatch.expected_nullable, mismatch.actual_nullable
508 ));
509 }
510 for mismatch in &diff.primary_key_mismatches {
511 statements.push(format!(
512 "-- TODO: column primary key mismatch {}.{} (expected pk {}, actual pk {})",
513 mismatch.table,
514 mismatch.column,
515 mismatch.expected_primary_key,
516 mismatch.actual_primary_key
517 ));
518 }
519 for (table, index) in &diff.missing_indexes {
520 statements.push(sqlite_create_index_sql(table, index));
521 }
522 for (table, index) in &diff.extra_indexes {
523 statements.push(format!(
524 "-- TODO: extra index {}.{} ({})",
525 table,
526 index.name,
527 index.columns.join(", ")
528 ));
529 }
530 for (table, fk) in &diff.missing_foreign_keys {
531 statements.push(format!(
532 "-- TODO: add foreign key {}.{} -> {}({}) (requires table rebuild)",
533 table, fk.column, fk.ref_table, fk.ref_column
534 ));
535 }
536 for (table, fk) in &diff.extra_foreign_keys {
537 statements.push(format!(
538 "-- TODO: extra foreign key {}.{} -> {}({})",
539 table, fk.column, fk.ref_table, fk.ref_column
540 ));
541 }
542 for extra in &diff.extra_columns {
543 statements.push(format!(
544 "-- TODO: extra column {}.{} not in models",
545 extra.table, extra.column
546 ));
547 }
548 for table in &diff.extra_tables {
549 statements.push(format!("-- TODO: extra table {} not in models", table));
550 }
551
552 statements
553}
554
555fn normalize_sql_type(sql_type: &str) -> String {
556 let t = sql_type.trim().to_lowercase();
557 if t.is_empty() {
558 return t;
559 }
560 if t.contains("int") || t.contains("serial") {
561 return "integer".to_string();
562 }
563 if t.contains("char") || t.contains("text") || t.contains("clob") {
564 return "text".to_string();
565 }
566 if t.contains("real")
567 || t.contains("floa")
568 || t.contains("doub")
569 || t.contains("numeric")
570 || t.contains("decimal")
571 {
572 return "real".to_string();
573 }
574 if t.contains("bool") {
575 return "boolean".to_string();
576 }
577 if t.contains("time") || t.contains("date") || t.contains("uuid") || t.contains("json") {
578 return "text".to_string();
579 }
580 t
581}
582
583#[cfg(feature = "postgres")]
584pub fn postgres_migration_sql(expected: &[SchemaTable], diff: &SchemaDiff) -> Vec<String> {
585 let expected_map: BTreeMap<String, &SchemaTable> =
586 expected.iter().map(|t| (t.name.clone(), t)).collect();
587 let mut statements = Vec::new();
588
589 for table in &diff.missing_tables {
590 if let Some(schema) = expected_map.get(table) {
591 statements.push(schema.to_create_sql());
592 for index in &schema.indexes {
593 statements.push(postgres_create_index_sql(&schema.name, index));
594 }
595 } else {
596 statements.push(format!("-- Missing schema for table {}", table));
597 }
598 }
599
600 let mut missing_by_table: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
601 for col in &diff.missing_columns {
602 missing_by_table
603 .entry(col.table.clone())
604 .or_default()
605 .insert(col.column.clone());
606 }
607
608 for (table, columns) in missing_by_table {
609 let Some(schema) = expected_map.get(&table) else {
610 continue;
611 };
612 for col_name in columns {
613 let Some(col) = schema.column(&col_name) else {
614 continue;
615 };
616 if col.primary_key {
617 statements.push(format!(
618 "-- TODO: add primary key column {}.{} manually",
619 table, col_name
620 ));
621 continue;
622 }
623 let mut stmt = format!(
624 "ALTER TABLE {} ADD COLUMN {} {}",
625 table, col.name, col.sql_type
626 );
627 if !col.nullable {
628 stmt.push_str(" NOT NULL");
629 }
630 statements.push(stmt);
631 }
632 }
633
634 for mismatch in &diff.type_mismatches {
635 statements.push(format!(
636 "-- TODO: column type mismatch {}.{} (expected {}, actual {})",
637 mismatch.table, mismatch.column, mismatch.expected, mismatch.actual
638 ));
639 }
640 for mismatch in &diff.nullability_mismatches {
641 statements.push(format!(
642 "-- TODO: column nullability mismatch {}.{} (expected nullable {}, actual nullable {})",
643 mismatch.table, mismatch.column, mismatch.expected_nullable, mismatch.actual_nullable
644 ));
645 }
646 for mismatch in &diff.primary_key_mismatches {
647 statements.push(format!(
648 "-- TODO: column primary key mismatch {}.{} (expected pk {}, actual pk {})",
649 mismatch.table,
650 mismatch.column,
651 mismatch.expected_primary_key,
652 mismatch.actual_primary_key
653 ));
654 }
655 for (table, index) in &diff.missing_indexes {
656 statements.push(postgres_create_index_sql(table, index));
657 }
658 for (table, index) in &diff.extra_indexes {
659 statements.push(format!(
660 "-- TODO: extra index {}.{} ({})",
661 table,
662 index.name,
663 index.columns.join(", ")
664 ));
665 }
666 for (table, fk) in &diff.missing_foreign_keys {
667 let fk_name = format!("fk_{}_{}", table, fk.column);
668 statements.push(format!(
669 "ALTER TABLE {} ADD CONSTRAINT {} FOREIGN KEY ({}) REFERENCES {}({})",
670 table, fk_name, fk.column, fk.ref_table, fk.ref_column
671 ));
672 }
673 for (table, fk) in &diff.extra_foreign_keys {
674 statements.push(format!(
675 "-- TODO: extra foreign key {}.{} -> {}({})",
676 table, fk.column, fk.ref_table, fk.ref_column
677 ));
678 }
679 for extra in &diff.extra_columns {
680 statements.push(format!(
681 "-- TODO: extra column {}.{} not in models",
682 extra.table, extra.column
683 ));
684 }
685 for table in &diff.extra_tables {
686 statements.push(format!("-- TODO: extra table {} not in models", table));
687 }
688
689 statements
690}
691
692fn index_key(index: &SchemaIndex) -> (String, String, bool) {
693 let name = index.name.clone();
694 let cols = index.columns.join(",");
695 (name, cols, index.unique)
696}
697
698fn index_map(indexes: &[SchemaIndex]) -> BTreeMap<(String, String, bool), &SchemaIndex> {
699 indexes.iter().map(|i| (index_key(i), i)).collect()
700}
701
702fn foreign_key_key(fk: &SchemaForeignKey) -> (String, String, String) {
703 (
704 fk.column.clone(),
705 fk.ref_table.clone(),
706 fk.ref_column.clone(),
707 )
708}
709
710fn foreign_key_map(
711 fks: &[SchemaForeignKey],
712) -> BTreeMap<(String, String, String), &SchemaForeignKey> {
713 fks.iter().map(|f| (foreign_key_key(f), f)).collect()
714}
715
716async fn introspect_sqlite_indexes(
717 pool: &SqlitePool,
718 table: &str,
719) -> Result<Vec<SchemaIndex>, sqlx::Error> {
720 let sql = format!("PRAGMA index_list({})", table);
721 let rows: Vec<(i64, String, i64, String, i64)> = sqlx::query_as(&sql).fetch_all(pool).await?;
722
723 let mut indexes = Vec::new();
724 for (_seq, name, unique, origin, _partial) in rows {
725 if origin == "pk" || name.starts_with("sqlite_autoindex") {
726 continue;
727 }
728 let info_sql = format!("PRAGMA index_info({})", name);
729 let info_rows: Vec<(i64, i64, String)> = sqlx::query_as(&info_sql).fetch_all(pool).await?;
730 let columns = info_rows.into_iter().map(|(_seq, _cid, col)| col).collect();
731 indexes.push(SchemaIndex {
732 name,
733 columns,
734 unique: unique != 0,
735 });
736 }
737 Ok(indexes)
738}
739
740async fn introspect_sqlite_foreign_keys(
741 pool: &SqlitePool,
742 table: &str,
743) -> Result<Vec<SchemaForeignKey>, sqlx::Error> {
744 let sql = format!("PRAGMA foreign_key_list({})", table);
745 #[allow(clippy::type_complexity)]
746 let rows: Vec<(i64, i64, String, String, String, String, String, String)> =
747 sqlx::query_as(&sql).fetch_all(pool).await?;
748
749 let mut fks = Vec::new();
750 for (_id, _seq, ref_table, from, to, _on_update, _on_delete, _match) in rows {
751 fks.push(SchemaForeignKey {
752 column: from,
753 ref_table,
754 ref_column: to,
755 });
756 }
757 Ok(fks)
758}
759
760fn sqlite_create_index_sql(table: &str, index: &SchemaIndex) -> String {
761 let unique = if index.unique { "UNIQUE " } else { "" };
762 let name = if index.name.is_empty() {
763 format!("idx_{}_{}", table, index.columns.join("_"))
764 } else {
765 index.name.clone()
766 };
767 format!(
768 "CREATE {}INDEX IF NOT EXISTS {} ON {} ({})",
769 unique,
770 name,
771 table,
772 index.columns.join(", ")
773 )
774}
775
776#[cfg(feature = "postgres")]
777fn postgres_create_index_sql(table: &str, index: &SchemaIndex) -> String {
778 let unique = if index.unique { "UNIQUE " } else { "" };
779 let name = if index.name.is_empty() {
780 format!("idx_{}_{}", table, index.columns.join("_"))
781 } else {
782 index.name.clone()
783 };
784 format!(
785 "CREATE {}INDEX IF NOT EXISTS {} ON {} ({})",
786 unique,
787 name,
788 table,
789 index.columns.join(", ")
790 )
791}
792
793#[cfg(feature = "postgres")]
794async fn introspect_postgres_indexes(
795 pool: &PgPool,
796 table: &str,
797) -> Result<Vec<SchemaIndex>, sqlx::Error> {
798 let rows: Vec<(String, bool, Vec<String>)> = sqlx::query_as(
799 "SELECT i.relname AS index_name, ix.indisunique, array_agg(a.attname ORDER BY x.n) AS columns
800 FROM pg_class t
801 JOIN pg_index ix ON t.oid = ix.indrelid
802 JOIN pg_class i ON i.oid = ix.indexrelid
803 JOIN LATERAL unnest(ix.indkey) WITH ORDINALITY AS x(attnum, n) ON true
804 JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = x.attnum
805 WHERE t.relname = $1 AND t.relkind = 'r' AND NOT ix.indisprimary
806 GROUP BY i.relname, ix.indisunique
807 ORDER BY i.relname",
808 )
809 .bind(table)
810 .fetch_all(pool)
811 .await?;
812
813 let indexes = rows
814 .into_iter()
815 .map(|(name, unique, columns)| SchemaIndex {
816 name,
817 columns,
818 unique,
819 })
820 .collect();
821 Ok(indexes)
822}
823
824#[cfg(feature = "postgres")]
825async fn introspect_postgres_foreign_keys(
826 pool: &PgPool,
827 table: &str,
828) -> Result<Vec<SchemaForeignKey>, sqlx::Error> {
829 let rows: Vec<(String, String, String)> = sqlx::query_as(
830 "SELECT kcu.column_name, ccu.table_name, ccu.column_name
831 FROM information_schema.table_constraints tc
832 JOIN information_schema.key_column_usage kcu
833 ON tc.constraint_name = kcu.constraint_name AND tc.table_schema = kcu.table_schema
834 JOIN information_schema.constraint_column_usage ccu
835 ON ccu.constraint_name = tc.constraint_name AND ccu.table_schema = tc.table_schema
836 WHERE tc.constraint_type = 'FOREIGN KEY'
837 AND tc.table_schema = 'public'
838 AND tc.table_name = $1
839 ORDER BY kcu.ordinal_position",
840 )
841 .bind(table)
842 .fetch_all(pool)
843 .await?;
844
845 let fks = rows
846 .into_iter()
847 .map(|(column, ref_table, ref_column)| SchemaForeignKey {
848 column,
849 ref_table,
850 ref_column,
851 })
852 .collect();
853
854 Ok(fks)
855}
856
857#[cfg(test)]
858mod tests {
859 use super::*;
860
861 #[tokio::test]
862 async fn sqlite_introspect_and_diff_empty() {
863 let pool = SqlitePool::connect("sqlite::memory:").await.unwrap();
864 sqlx::query(
865 "CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT NOT NULL, deleted_at TEXT)",
866 )
867 .execute(&pool)
868 .await
869 .unwrap();
870
871 let expected = vec![SchemaTable {
872 name: "users".to_string(),
873 columns: vec![
874 SchemaColumn {
875 name: "id".to_string(),
876 sql_type: "INTEGER".to_string(),
877 nullable: false,
878 primary_key: true,
879 },
880 SchemaColumn {
881 name: "name".to_string(),
882 sql_type: "TEXT".to_string(),
883 nullable: false,
884 primary_key: false,
885 },
886 SchemaColumn {
887 name: "deleted_at".to_string(),
888 sql_type: "TEXT".to_string(),
889 nullable: true,
890 primary_key: false,
891 },
892 ],
893 indexes: Vec::new(),
894 foreign_keys: Vec::new(),
895 create_sql: None,
896 }];
897
898 let actual = introspect_sqlite_schema(&pool).await.unwrap();
899 let diff = diff_schema(&expected, &actual);
900 assert!(diff.is_empty());
901 }
902
903 #[tokio::test]
904 async fn sqlite_diff_reports_missing_column() {
905 let pool = SqlitePool::connect("sqlite::memory:").await.unwrap();
906 sqlx::query("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)")
907 .execute(&pool)
908 .await
909 .unwrap();
910
911 let expected = vec![SchemaTable {
912 name: "users".to_string(),
913 columns: vec![
914 SchemaColumn {
915 name: "id".to_string(),
916 sql_type: "INTEGER".to_string(),
917 nullable: false,
918 primary_key: true,
919 },
920 SchemaColumn {
921 name: "name".to_string(),
922 sql_type: "TEXT".to_string(),
923 nullable: false,
924 primary_key: false,
925 },
926 SchemaColumn {
927 name: "status".to_string(),
928 sql_type: "TEXT".to_string(),
929 nullable: true,
930 primary_key: false,
931 },
932 ],
933 indexes: Vec::new(),
934 foreign_keys: Vec::new(),
935 create_sql: None,
936 }];
937
938 let actual = introspect_sqlite_schema(&pool).await.unwrap();
939 let diff = diff_schema(&expected, &actual);
940 assert_eq!(diff.missing_columns.len(), 1);
941
942 let summary = format_schema_diff_summary(&diff);
943 assert!(summary.contains("missing columns: 1"));
944
945 let sql = sqlite_migration_sql(&expected, &diff);
946 assert!(
947 sql.iter()
948 .any(|stmt| stmt.contains("ALTER TABLE users ADD COLUMN status"))
949 );
950 }
951
952 #[tokio::test]
953 async fn sqlite_diff_reports_missing_index() {
954 let pool = SqlitePool::connect("sqlite::memory:").await.unwrap();
955 sqlx::query("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)")
956 .execute(&pool)
957 .await
958 .unwrap();
959
960 let expected = vec![SchemaTable {
961 name: "users".to_string(),
962 columns: vec![
963 SchemaColumn {
964 name: "id".to_string(),
965 sql_type: "INTEGER".to_string(),
966 nullable: false,
967 primary_key: true,
968 },
969 SchemaColumn {
970 name: "name".to_string(),
971 sql_type: "TEXT".to_string(),
972 nullable: false,
973 primary_key: false,
974 },
975 ],
976 indexes: vec![SchemaIndex {
977 name: "idx_users_name".to_string(),
978 columns: vec!["name".to_string()],
979 unique: false,
980 }],
981 foreign_keys: Vec::new(),
982 create_sql: None,
983 }];
984
985 let actual = introspect_sqlite_schema(&pool).await.unwrap();
986 let diff = diff_schema(&expected, &actual);
987 assert_eq!(diff.missing_indexes.len(), 1);
988
989 let sql = sqlite_migration_sql(&expected, &diff);
990 assert!(
991 sql.iter()
992 .any(|stmt| stmt.contains("CREATE INDEX IF NOT EXISTS idx_users_name"))
993 );
994 }
995
996 #[cfg(feature = "postgres")]
997 fn pg_url() -> String {
998 std::env::var("DATABASE_URL").unwrap_or_else(|_| {
999 "postgres://postgres:admin123@localhost:5432/premix_bench".to_string()
1000 })
1001 }
1002
1003 #[cfg(feature = "postgres")]
1004 #[tokio::test]
1005 async fn postgres_introspect_and_diff() {
1006 let url = pg_url();
1007 let pool = match PgPool::connect(&url).await {
1008 Ok(pool) => pool,
1009 Err(_) => return,
1010 };
1011
1012 sqlx::query("DROP TABLE IF EXISTS schema_posts")
1013 .execute(&pool)
1014 .await
1015 .unwrap();
1016 sqlx::query("DROP TABLE IF EXISTS schema_users")
1017 .execute(&pool)
1018 .await
1019 .unwrap();
1020
1021 sqlx::query("CREATE TABLE schema_users (id SERIAL PRIMARY KEY, name TEXT NOT NULL)")
1022 .execute(&pool)
1023 .await
1024 .unwrap();
1025 sqlx::query(
1026 "CREATE TABLE schema_posts (id SERIAL PRIMARY KEY, user_id INTEGER NOT NULL, title TEXT NOT NULL, CONSTRAINT fk_schema_posts_user_id FOREIGN KEY (user_id) REFERENCES schema_users(id))",
1027 )
1028 .execute(&pool)
1029 .await
1030 .unwrap();
1031 sqlx::query("CREATE INDEX idx_schema_posts_user_id ON schema_posts (user_id)")
1032 .execute(&pool)
1033 .await
1034 .unwrap();
1035
1036 let expected = vec![
1037 SchemaTable {
1038 name: "schema_posts".to_string(),
1039 columns: vec![
1040 SchemaColumn {
1041 name: "id".to_string(),
1042 sql_type: "INTEGER".to_string(),
1043 nullable: false,
1044 primary_key: true,
1045 },
1046 SchemaColumn {
1047 name: "user_id".to_string(),
1048 sql_type: "INTEGER".to_string(),
1049 nullable: false,
1050 primary_key: false,
1051 },
1052 SchemaColumn {
1053 name: "title".to_string(),
1054 sql_type: "TEXT".to_string(),
1055 nullable: false,
1056 primary_key: false,
1057 },
1058 ],
1059 indexes: vec![SchemaIndex {
1060 name: "idx_schema_posts_user_id".to_string(),
1061 columns: vec!["user_id".to_string()],
1062 unique: false,
1063 }],
1064 foreign_keys: vec![SchemaForeignKey {
1065 column: "user_id".to_string(),
1066 ref_table: "schema_users".to_string(),
1067 ref_column: "id".to_string(),
1068 }],
1069 create_sql: None,
1070 },
1071 SchemaTable {
1072 name: "schema_users".to_string(),
1073 columns: vec![
1074 SchemaColumn {
1075 name: "id".to_string(),
1076 sql_type: "INTEGER".to_string(),
1077 nullable: false,
1078 primary_key: true,
1079 },
1080 SchemaColumn {
1081 name: "name".to_string(),
1082 sql_type: "TEXT".to_string(),
1083 nullable: false,
1084 primary_key: false,
1085 },
1086 ],
1087 indexes: Vec::new(),
1088 foreign_keys: Vec::new(),
1089 create_sql: None,
1090 },
1091 ];
1092
1093 let actual = introspect_postgres_schema(&pool).await.unwrap();
1094 let expected_names: BTreeSet<String> =
1095 expected.iter().map(|table| table.name.clone()).collect();
1096 let actual = actual
1097 .into_iter()
1098 .filter(|table| expected_names.contains(&table.name))
1099 .collect::<Vec<_>>();
1100 let diff = diff_schema(&expected, &actual);
1101 assert!(diff.is_empty(), "postgres schema diff: {diff:?}");
1102
1103 sqlx::query("DROP TABLE IF EXISTS schema_posts")
1104 .execute(&pool)
1105 .await
1106 .unwrap();
1107 sqlx::query("DROP TABLE IF EXISTS schema_users")
1108 .execute(&pool)
1109 .await
1110 .unwrap();
1111 }
1112}