1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
use crate::Database;
use diesel::{
migration::{Migration, MigrationSource},
query_dsl::RunQueryDsl,
sql_query,
sql_types::Text,
};
use diesel_migrations::{FileBasedMigrations, MigrationHarness};
use serde::{Deserialize, Serialize};
use super::{CreateRustAppMigration, MigrationStatus};
#[derive(Debug, Deserialize, QueryableByName)]
pub struct MyQueryResult {
#[diesel(sql_type=Text)]
pub json: String,
}
#[derive(Serialize, Deserialize)]
pub struct MySqlQuery {
pub query: String,
}
#[derive(Serialize, Deserialize)]
pub struct HealthCheckResponse {
pub message: String,
}
pub fn query_db(db: &Database, body: &MySqlQuery) -> Result<String, diesel::result::Error> {
let q = format!("SELECT json_agg(q) as json FROM ({}) q;", body.query);
let mut db = db.pool.get().unwrap();
Ok(sql_query(q.as_str())
.get_result::<MyQueryResult>(&mut db)?
.json)
}
pub fn is_connected(db: &Database) -> bool {
let mut db = db.pool.clone().get().unwrap();
let is_connected = sql_query("SELECT 1;").execute(&mut db);
is_connected.is_err()
}
pub fn get_migrations(db: &Database) -> Vec<CreateRustAppMigration> {
let mut db = db.pool.clone().get().unwrap();
let source = FileBasedMigrations::find_migrations_directory().unwrap();
#[cfg(feature = "database_sqlite")]
let file_migrations =
MigrationSource::<crate::database::DieselBackend>::migrations(&source).unwrap();
#[cfg(feature = "database_postgres")]
let file_migrations =
MigrationSource::<crate::database::DieselBackend>::migrations(&source).unwrap();
let db_migrations = MigrationHarness::applied_migrations(&mut db).unwrap();
let pending_migrations = MigrationHarness::pending_migrations(&mut db, source).unwrap();
let mut all_migrations = vec![];
file_migrations.iter().for_each(|fm| {
all_migrations.push(CreateRustAppMigration {
name: fm.name().to_string(),
version: fm.name().version().to_string(),
status: MigrationStatus::Unknown,
})
});
pending_migrations.iter().for_each(|pm| {
if let Some(existing) = all_migrations.iter_mut().find(|m| {
m.version
.eq_ignore_ascii_case(&pm.name().version().to_string())
}) {
existing.status = MigrationStatus::Pending;
}
});
db_migrations.iter().for_each(|dm| {
match all_migrations
.iter_mut()
.find(|m| m.version.eq_ignore_ascii_case(&dm.to_string()))
{
Some(existing) => {
existing.status = MigrationStatus::Applied;
}
None => all_migrations.push(CreateRustAppMigration {
name: format!("{dm}_?"),
version: dm.to_string(),
status: MigrationStatus::AppliedButMissingLocally,
}),
}
});
all_migrations
}
pub fn needs_migration(db: &Database) -> bool {
let mut db = db.pool.clone().get().unwrap();
let source = FileBasedMigrations::find_migrations_directory().unwrap();
MigrationHarness::has_pending_migration(&mut db, source).unwrap()
}
pub fn migrate_db(db: &Database) -> (bool, Option<String>) {
let mut db = db.pool.clone().get().unwrap();
let source = FileBasedMigrations::find_migrations_directory().unwrap();
let has_pending_migrations =
MigrationHarness::has_pending_migration(&mut db, source.clone()).unwrap();
if !has_pending_migrations {
return (true, None);
}
let op = MigrationHarness::run_pending_migrations(&mut db, source);
match op {
Ok(_) => (true, None),
Err(err) => {
println!("{err:#?}");
(false, Some(err.to_string()))
}
}
}
pub fn health() {}