mod common;
use common::*;
#[tokio::test]
async fn cross_file_fk_with_column_type_migration() {
let (_container, url) = setup_postgres().await;
let connection = PgConnection::new(&url).await.unwrap();
let initial_sql = r#"
CREATE TABLE "public"."Parent" (
"id" VARCHAR(50) NOT NULL,
"name" TEXT,
CONSTRAINT "Parent_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "public"."Child" (
"id" VARCHAR(50) NOT NULL,
"parentId" VARCHAR(50) NOT NULL,
CONSTRAINT "Child_pkey" PRIMARY KEY ("id")
);
"#;
for stmt in initial_sql.split(';').filter(|s| !s.trim().is_empty()) {
sqlx::query(stmt).execute(connection.pool()).await.unwrap();
}
let target_schema = parse_sql_string(
r#"
CREATE TABLE "public"."Parent" (
"id" TEXT NOT NULL,
"name" TEXT,
CONSTRAINT "Parent_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "public"."Child" (
"id" TEXT NOT NULL,
"parentId" TEXT NOT NULL,
CONSTRAINT "Child_pkey" PRIMARY KEY ("id")
);
ALTER TABLE "public"."Child"
ADD CONSTRAINT "Child_parentId_fkey"
FOREIGN KEY ("parentId") REFERENCES "public"."Parent"("id")
ON DELETE CASCADE ON UPDATE CASCADE;
"#,
)
.unwrap();
let current_schema = introspect_schema(&connection, &["public".to_string()], false)
.await
.unwrap();
let ops = compute_diff(¤t_schema, &target_schema);
let planned = plan_migration(ops);
let mut found_alter_columns = false;
let mut found_add_fk = false;
let mut alter_after_fk = false;
for op in &planned {
match op {
MigrationOp::AlterColumn { .. } => {
found_alter_columns = true;
if found_add_fk {
alter_after_fk = true;
}
}
MigrationOp::AddForeignKey { .. } => {
found_add_fk = true;
}
_ => {}
}
}
assert!(
found_alter_columns,
"Should have AlterColumn operations for VARCHAR->TEXT conversion"
);
assert!(found_add_fk, "Should have AddForeignKey operation");
assert!(
!alter_after_fk,
"AlterColumn operations should come BEFORE AddForeignKey"
);
let sql = generate_sql(&planned);
for stmt in &sql {
sqlx::query(stmt)
.execute(connection.pool())
.await
.unwrap_or_else(|_| panic!("Failed to execute: {stmt}"));
}
let fk_count: (i64,) = sqlx::query_as(
r#"
SELECT COUNT(*) FROM information_schema.table_constraints
WHERE constraint_type = 'FOREIGN KEY'
AND table_name = 'Child'
AND constraint_name = 'Child_parentId_fkey'
"#,
)
.fetch_one(connection.pool())
.await
.unwrap();
assert_eq!(fk_count.0, 1, "FK constraint should exist");
}
#[tokio::test]
async fn cross_file_fk_text_to_uuid_migration() {
let (_container, url) = setup_postgres().await;
let connection = PgConnection::new(&url).await.unwrap();
let initial_sql = r#"
CREATE TABLE "public"."Parent" (
"id" TEXT NOT NULL,
"name" TEXT,
CONSTRAINT "Parent_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "public"."Child" (
"id" TEXT NOT NULL,
"parentId" TEXT NOT NULL,
CONSTRAINT "Child_pkey" PRIMARY KEY ("id")
);
"#;
for stmt in initial_sql.split(';').filter(|s| !s.trim().is_empty()) {
sqlx::query(stmt).execute(connection.pool()).await.unwrap();
}
sqlx::query("INSERT INTO \"public\".\"Parent\" (\"id\", \"name\") VALUES ('550e8400-e29b-41d4-a716-446655440000', 'Parent 1')")
.execute(connection.pool())
.await
.unwrap();
sqlx::query("INSERT INTO \"public\".\"Child\" (\"id\", \"parentId\") VALUES ('660e8400-e29b-41d4-a716-446655440001', '550e8400-e29b-41d4-a716-446655440000')")
.execute(connection.pool())
.await
.unwrap();
let target_schema = parse_sql_string(
r#"
CREATE TABLE "public"."Parent" (
"id" UUID NOT NULL,
"name" TEXT,
CONSTRAINT "Parent_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "public"."Child" (
"id" UUID NOT NULL,
"parentId" UUID NOT NULL,
CONSTRAINT "Child_pkey" PRIMARY KEY ("id")
);
ALTER TABLE "public"."Child"
ADD CONSTRAINT "Child_parentId_fkey"
FOREIGN KEY ("parentId") REFERENCES "public"."Parent"("id")
ON DELETE CASCADE ON UPDATE CASCADE;
"#,
)
.unwrap();
let current_schema = introspect_schema(&connection, &["public".to_string()], false)
.await
.unwrap();
let ops = compute_diff(¤t_schema, &target_schema);
let planned = plan_migration(ops);
let sql = generate_sql(&planned);
let has_using_clause = sql.iter().any(|s| s.contains("USING"));
assert!(
has_using_clause,
"ALTER COLUMN TYPE should include USING clause for TEXT->UUID conversion"
);
for stmt in &sql {
sqlx::query(stmt)
.execute(connection.pool())
.await
.unwrap_or_else(|e| panic!("Failed to execute: {stmt}: {e}"));
}
let parent_col_type: (String,) = sqlx::query_as(
r#"
SELECT data_type FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'Parent' AND column_name = 'id'
"#,
)
.fetch_one(connection.pool())
.await
.unwrap();
assert_eq!(parent_col_type.0, "uuid", "Parent.id should be UUID type");
let child_col_type: (String,) = sqlx::query_as(
r#"
SELECT data_type FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'Child' AND column_name = 'parentId'
"#,
)
.fetch_one(connection.pool())
.await
.unwrap();
assert_eq!(
child_col_type.0, "uuid",
"Child.parentId should be UUID type"
);
let fk_count: (i64,) = sqlx::query_as(
r#"
SELECT COUNT(*) FROM information_schema.table_constraints
WHERE constraint_type = 'FOREIGN KEY'
AND table_name = 'Child'
AND constraint_name = 'Child_parentId_fkey'
"#,
)
.fetch_one(connection.pool())
.await
.unwrap();
assert_eq!(fk_count.0, 1, "FK constraint should exist");
let parent_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM \"public\".\"Parent\"")
.fetch_one(connection.pool())
.await
.unwrap();
assert_eq!(parent_count.0, 1, "Parent data should be preserved");
let child_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM \"public\".\"Child\"")
.fetch_one(connection.pool())
.await
.unwrap();
assert_eq!(child_count.0, 1, "Child data should be preserved");
}
#[tokio::test]
async fn cross_file_fk_text_to_uuid_multifile() {
let (_container, url) = setup_postgres().await;
let connection = PgConnection::new(&url).await.unwrap();
let initial_sql = r#"
CREATE TABLE "myschema"."Parent" (
"id" TEXT NOT NULL,
"name" TEXT,
CONSTRAINT "Parent_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "myschema"."Child" (
"id" TEXT NOT NULL,
"parentId" TEXT NOT NULL,
CONSTRAINT "Child_pkey" PRIMARY KEY ("id")
);
"#;
sqlx::query("CREATE SCHEMA IF NOT EXISTS myschema")
.execute(connection.pool())
.await
.unwrap();
for stmt in initial_sql.split(';').filter(|s| !s.trim().is_empty()) {
sqlx::query(stmt).execute(connection.pool()).await.unwrap();
}
let temp_dir = tempfile::tempdir().unwrap();
let parent_file = temp_dir.path().join("00_tables.sql");
std::fs::write(
&parent_file,
r#"
CREATE TABLE IF NOT EXISTS "myschema"."Parent" (
"id" UUID NOT NULL,
"name" TEXT,
CONSTRAINT "Parent_pkey" PRIMARY KEY ("id")
);
"#,
)
.unwrap();
let child_file = temp_dir.path().join("child_table.sql");
std::fs::write(
&child_file,
r#"
CREATE TABLE IF NOT EXISTS "myschema"."Child" (
"id" UUID NOT NULL,
"parentId" UUID NOT NULL,
CONSTRAINT "Child_pkey" PRIMARY KEY ("id")
);
ALTER TABLE "myschema"."Child"
ADD CONSTRAINT "Child_parentId_fkey"
FOREIGN KEY ("parentId") REFERENCES "myschema"."Parent"("id")
ON DELETE CASCADE ON UPDATE CASCADE;
"#,
)
.unwrap();
let sources = vec![format!("{}/*.sql", temp_dir.path().display())];
let target_schema = load_schema_sources(&sources).unwrap();
let current_schema = introspect_schema(&connection, &["myschema".to_string()], false)
.await
.unwrap();
let ops = compute_diff(¤t_schema, &target_schema);
let planned = plan_migration(ops);
let mut found_alter_columns = false;
let mut found_add_fk = false;
let mut alter_after_fk = false;
for op in &planned {
match op {
MigrationOp::AlterColumn { .. } => {
found_alter_columns = true;
if found_add_fk {
alter_after_fk = true;
}
}
MigrationOp::AddForeignKey { .. } => {
found_add_fk = true;
}
_ => {}
}
}
assert!(
found_alter_columns,
"Should have AlterColumn operations for TEXT->UUID conversion"
);
assert!(found_add_fk, "Should have AddForeignKey operation");
assert!(
!alter_after_fk,
"AlterColumn operations should come BEFORE AddForeignKey"
);
}
#[tokio::test]
async fn fk_type_change_with_existing_fk_in_database() {
let (_container, url) = setup_postgres().await;
let connection = PgConnection::new(&url).await.unwrap();
sqlx::query("CREATE SCHEMA IF NOT EXISTS mrv")
.execute(connection.pool())
.await
.unwrap();
let initial_sql = r#"
CREATE TABLE "mrv"."CompoundUnit" (
"id" TEXT NOT NULL,
CONSTRAINT "CompoundUnit_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "mrv"."FertilizerApplication" (
"id" TEXT NOT NULL,
"compoundUnitId" TEXT,
CONSTRAINT "FertilizerApplication_pkey" PRIMARY KEY ("id")
);
ALTER TABLE "mrv"."FertilizerApplication"
ADD CONSTRAINT "FertilizerApplication_compoundUnitId_fkey"
FOREIGN KEY ("compoundUnitId") REFERENCES "mrv"."CompoundUnit"("id");
"#;
for stmt in initial_sql.split(';').filter(|s| !s.trim().is_empty()) {
sqlx::query(stmt).execute(connection.pool()).await.unwrap();
}
sqlx::query(
"INSERT INTO \"mrv\".\"CompoundUnit\" (\"id\") VALUES ('550e8400-e29b-41d4-a716-446655440000')",
)
.execute(connection.pool())
.await
.unwrap();
sqlx::query(
"INSERT INTO \"mrv\".\"FertilizerApplication\" (\"id\", \"compoundUnitId\") VALUES ('660e8400-e29b-41d4-a716-446655440001', '550e8400-e29b-41d4-a716-446655440000')",
)
.execute(connection.pool())
.await
.unwrap();
let target_schema = parse_sql_string(
r#"
CREATE SCHEMA IF NOT EXISTS "mrv";
CREATE TABLE "mrv"."CompoundUnit" (
"id" UUID NOT NULL,
CONSTRAINT "CompoundUnit_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "mrv"."FertilizerApplication" (
"id" UUID NOT NULL,
"compoundUnitId" UUID,
CONSTRAINT "FertilizerApplication_pkey" PRIMARY KEY ("id")
);
ALTER TABLE "mrv"."FertilizerApplication"
ADD CONSTRAINT "FertilizerApplication_compoundUnitId_fkey"
FOREIGN KEY ("compoundUnitId") REFERENCES "mrv"."CompoundUnit"("id");
"#,
)
.unwrap();
let current_schema = introspect_schema(&connection, &["mrv".to_string()], false)
.await
.unwrap();
let ops = compute_diff(¤t_schema, &target_schema);
let planned = plan_migration(ops);
let mut found_drop_fk = false;
let mut found_alter_columns = false;
let mut found_add_fk = false;
let mut alter_before_drop = false;
let mut add_before_alter = false;
for op in &planned {
match op {
MigrationOp::DropForeignKey { .. } => {
found_drop_fk = true;
if found_alter_columns {
alter_before_drop = true;
}
}
MigrationOp::AlterColumn { .. } => {
found_alter_columns = true;
if found_add_fk {
add_before_alter = true;
}
}
MigrationOp::AddForeignKey { .. } => {
found_add_fk = true;
}
_ => {}
}
}
assert!(
found_drop_fk,
"Should have DropForeignKey operation for FK affected by type change"
);
assert!(
found_alter_columns,
"Should have AlterColumn operations for TEXT->UUID conversion"
);
assert!(
found_add_fk,
"Should have AddForeignKey operation to restore FK after type change"
);
assert!(
!alter_before_drop,
"DropForeignKey must come BEFORE AlterColumn"
);
assert!(
!add_before_alter,
"AlterColumn must come BEFORE AddForeignKey"
);
let sql = generate_sql(&planned);
for stmt in &sql {
sqlx::query(stmt)
.execute(connection.pool())
.await
.unwrap_or_else(|e| panic!("Failed to execute: {stmt}: {e}"));
}
let compound_col_type: (String,) = sqlx::query_as(
r#"
SELECT data_type FROM information_schema.columns
WHERE table_schema = 'mrv' AND table_name = 'CompoundUnit' AND column_name = 'id'
"#,
)
.fetch_one(connection.pool())
.await
.unwrap();
assert_eq!(
compound_col_type.0, "uuid",
"CompoundUnit.id should be UUID type"
);
let fk_count: (i64,) = sqlx::query_as(
r#"
SELECT COUNT(*) FROM information_schema.table_constraints
WHERE constraint_type = 'FOREIGN KEY'
AND table_schema = 'mrv'
AND table_name = 'FertilizerApplication'
AND constraint_name = 'FertilizerApplication_compoundUnitId_fkey'
"#,
)
.fetch_one(connection.pool())
.await
.unwrap();
assert_eq!(fk_count.0, 1, "FK constraint should exist after migration");
}
#[tokio::test]
async fn fk_type_change_referenced_column_only() {
let (_container, url) = setup_postgres().await;
let connection = PgConnection::new(&url).await.unwrap();
sqlx::query("CREATE SCHEMA IF NOT EXISTS mrv")
.execute(connection.pool())
.await
.unwrap();
let initial_sql = r#"
CREATE TABLE "mrv"."Parent" (
"id" TEXT NOT NULL,
CONSTRAINT "Parent_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "mrv"."Child" (
"id" TEXT NOT NULL,
"parentId" TEXT,
CONSTRAINT "Child_pkey" PRIMARY KEY ("id")
);
ALTER TABLE "mrv"."Child"
ADD CONSTRAINT "Child_parentId_fkey"
FOREIGN KEY ("parentId") REFERENCES "mrv"."Parent"("id");
"#;
for stmt in initial_sql.split(';').filter(|s| !s.trim().is_empty()) {
sqlx::query(stmt).execute(connection.pool()).await.unwrap();
}
let target_schema = parse_sql_string(
r#"
CREATE SCHEMA IF NOT EXISTS "mrv";
CREATE TABLE "mrv"."Parent" (
"id" UUID NOT NULL,
CONSTRAINT "Parent_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "mrv"."Child" (
"id" TEXT NOT NULL,
"parentId" UUID,
CONSTRAINT "Child_pkey" PRIMARY KEY ("id")
);
ALTER TABLE "mrv"."Child"
ADD CONSTRAINT "Child_parentId_fkey"
FOREIGN KEY ("parentId") REFERENCES "mrv"."Parent"("id");
"#,
)
.unwrap();
let current_schema = introspect_schema(&connection, &["mrv".to_string()], false)
.await
.unwrap();
let ops = compute_diff(¤t_schema, &target_schema);
let planned = plan_migration(ops);
let drop_fk_ops: Vec<_> = planned
.iter()
.filter(|op| matches!(op, MigrationOp::DropForeignKey { .. }))
.collect();
assert!(
!drop_fk_ops.is_empty(),
"Should generate DropForeignKey when referenced column type changes"
);
let drop_fk_pos = planned
.iter()
.position(|op| matches!(op, MigrationOp::DropForeignKey { .. }))
.unwrap();
let alter_pos = planned
.iter()
.position(|op| matches!(op, MigrationOp::AlterColumn { .. }))
.unwrap();
assert!(
drop_fk_pos < alter_pos,
"DropForeignKey (pos {drop_fk_pos}) must come before AlterColumn (pos {alter_pos})"
);
}