use crate::store::DB_MAP_CLIENT_NAMES;
use mongodb::{
bson, bson::document::Document, options::UpdateModifications, sync::Client, sync::Collection,
sync::Cursor, sync::Database,
};
use regex::Regex;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct ModelState {
pub database: String,
pub collection: String,
pub fields: Vec<String>,
pub status: bool,
}
pub struct Monitor<'a> {
pub keyword: &'a str,
pub models: Vec<crate::models::Meta>,
}
impl<'a> Monitor<'a> {
pub fn mango_tech_name(&self) -> String {
let re = Regex::new(r"^[_a-zA-Z\d]{6,48}$").unwrap();
if !re.is_match(self.keyword) {
panic!("Keyword - Valid characters: _ a-z A-Z 0-9 ; Size: 6-48.");
}
format!("mango_tech__{}", self.keyword)
}
fn refresh(
&self,
client_store: &std::sync::MutexGuard<'_, std::collections::HashMap<String, Client>>,
) {
for meta in self.models.iter() {
let client: &Client = client_store.get(&meta.db_client_name).unwrap();
let mango_tech_keyword: String = self.mango_tech_name();
let collection_name: &str = "models";
let database_names: Vec<String> = client.list_database_names(None, None).unwrap();
if !database_names.contains(&mango_tech_keyword) {
client
.database(&mango_tech_keyword)
.create_collection(collection_name, None)
.unwrap();
} else {
let mango_orm_db: Database = client.database(&mango_tech_keyword);
let mango_orm_collection: Collection = mango_orm_db.collection(collection_name);
let mut cursor: Cursor = mango_orm_collection.find(None, None).unwrap();
while let Some(result) = cursor.next() {
match result {
Ok(document) => {
let mut model_state: ModelState =
bson::de::from_document(document).unwrap();
model_state.status = false;
let query: Document = bson::doc! {
"database": &model_state.database,
"collection": &model_state.collection
};
let update: UpdateModifications = UpdateModifications::Document(
bson::ser::to_document(&model_state).unwrap(),
);
mango_orm_collection
.update_one(query, update, None)
.unwrap();
}
Err(err) => panic!("Migration `refresh()` > {}", err),
}
}
}
}
}
fn napalm(
&self,
client_store: &std::sync::MutexGuard<'_, std::collections::HashMap<String, Client>>,
) {
for meta in self.models.iter() {
let client: &Client = client_store.get(&meta.db_client_name).unwrap();
let mango_tech_keyword: String = self.mango_tech_name();
let collection_name: &str = "models";
let mango_tech_db: Database = client.database(&mango_tech_keyword);
let mango_tech_collection: Collection = mango_tech_db.collection(collection_name);
let cursor: Cursor = mango_tech_collection.find(None, None).unwrap();
let results: Vec<Result<Document, mongodb::error::Error>> = cursor.collect();
for result in results {
match result {
Ok(document) => {
let model_state: ModelState = bson::de::from_document(document).unwrap();
if !model_state.status {
client
.database(&model_state.database)
.collection(&model_state.collection)
.drop(None)
.unwrap();
let query: Document = bson::doc! {
"database": &model_state.database,
"collection": &model_state.collection
};
mango_tech_collection.delete_one(query, None).unwrap();
}
}
Err(err) => panic!("Migration `napalm()` > {}", err),
}
}
}
}
pub fn migrat(&self) {
let client_store: std::sync::MutexGuard<'_, std::collections::HashMap<String, Client>> =
DB_MAP_CLIENT_NAMES.lock().unwrap();
self.refresh(&client_store);
for meta in self.models.iter() {
let client: &Client = client_store.get(&meta.db_client_name).unwrap();
let fields_name: Vec<&str> =
meta.fields_name.iter().map(|item| item.as_str()).collect();
let ignore_fields: Vec<&str> = meta
.ignore_fields
.iter()
.map(|item| item.as_str())
.collect();
let trunc_list_fields_name: Vec<&str> = fields_name
.iter()
.filter(|item| **item != "hash" && !ignore_fields.contains(item))
.map(|item| *item)
.collect();
let mango_tech_keyword: String = self.mango_tech_name();
let database_names: Vec<String> = client.list_database_names(None, None).unwrap();
let map_default_values: std::collections::HashMap<String, (String, String)> =
meta.map_default_values.clone();
let filter: Document = mongodb::bson::doc! {
"database": &meta.database_name,
"collection": &meta.collection_name
};
let model: Option<Document> = client
.database(&mango_tech_keyword)
.collection("models")
.find_one(filter, None)
.unwrap();
if model.is_some() {
let mango_orm_fnames: Vec<String> = {
let model: Document = model.unwrap();
let fields: Vec<mongodb::bson::Bson> =
model.get_array("fields").unwrap().to_vec();
fields
.into_iter()
.map(|item: mongodb::bson::Bson| item.as_str().unwrap().to_string())
.collect()
};
let mut run_documents_modification: bool = false;
if trunc_list_fields_name.len() != mango_orm_fnames.len() {
run_documents_modification = true;
} else {
for item in trunc_list_fields_name.iter() {
if mango_orm_fnames.iter().any(|item2| item2 != item) {
run_documents_modification = true;
break;
}
}
}
if run_documents_modification {
let db: Database = client.database(&meta.database_name);
let collection: mongodb::sync::Collection =
db.collection(&meta.collection_name);
let mut cursor: mongodb::sync::Cursor = collection.find(None, None).unwrap();
while let Some(result) = cursor.next() {
let doc_from_db: mongodb::bson::document::Document = result.unwrap();
let mut tmp_doc = mongodb::bson::document::Document::new();
for field in fields_name.iter() {
if *field == "hash" || ignore_fields.contains(&field) {
continue;
}
if doc_from_db.contains_key(field) {
let value_from_db: Option<&mongodb::bson::Bson> =
doc_from_db.get(field);
if value_from_db.is_some() {
tmp_doc.insert(field.to_string(), value_from_db.unwrap());
} else {
panic!(
"Service: `{}` > Model: `{}` > Field: `{}` > \
Method: `migrat()` : \
Can't get field value from database.",
meta.service_name, meta.model_name, field
);
}
} else {
let value = map_default_values.get(*field).unwrap();
tmp_doc.insert(
field.to_string(),
match &value.0[..] {
"checkBoxText" | "radioText" | "inputColor"
| "inputEmail" | "inputPassword" | "inputPhone"
| "inputText" | "inputUrl" | "inputIP" | "inputIPv4"
| "inputIPv6" | "textArea" | "selectText" => {
mongodb::bson::Bson::String(value.1.clone())
}
"inputDate" => {
let val: String = value.1.clone();
if !val.is_empty() {
if !crate::store::REGEX_IS_DATE.is_match(&val) {
panic!(
"Service: `{}` > Model: `{}` > \
Method: `widgets()` : Incorrect date \
format. Example: 1970-02-28",
meta.service_name, meta.model_name
)
}
let val = format!("{}T00:00", val);
let dt: chrono::DateTime<chrono::Utc> =
chrono::DateTime::<chrono::Utc>::from_utc(
chrono::NaiveDateTime::parse_from_str(
&val,
"%Y-%m-%dT%H:%M",
)
.unwrap(),
chrono::Utc,
);
mongodb::bson::Bson::DateTime(dt)
} else {
mongodb::bson::Bson::Null
}
}
"inputDateTime" => {
let val: String = value.1.clone();
if !val.is_empty() {
if !crate::store::REGEX_IS_DATETIME.is_match(&val) {
panic!(
"Service: `{}` > Model: `{}` > \
Method: `widgets()` : \
Incorrect date and time format. \
Example: 1970-02-28T00:00",
meta.service_name, meta.model_name
)
}
let dt: chrono::DateTime<chrono::Utc> =
chrono::DateTime::<chrono::Utc>::from_utc(
chrono::NaiveDateTime::parse_from_str(
&val,
"%Y-%m-%dT%H:%M",
)
.unwrap(),
chrono::Utc,
);
mongodb::bson::Bson::DateTime(dt)
} else {
mongodb::bson::Bson::Null
}
}
"checkBoxI32" | "inputRadioI32" | "inputNumberI32"
| "rangeI32" | "selectI32" => mongodb::bson::Bson::Int32(
value.1.parse::<i32>().unwrap(),
),
"checkBoxU32" | "radioU32" | "numberU32" | "rangeU32"
| "selectU32" | "checkBoxI64" | "radioI64"
| "numberI64" | "rangeI64" | "selectI64" => {
mongodb::bson::Bson::Int64(
value.1.parse::<i64>().unwrap(),
)
}
"checkBoxF64" | "radioF64" | "numberF64" | "rangeF64"
| "selectF64" => mongodb::bson::Bson::Double(
value.1.parse::<f64>().unwrap(),
),
"checkBoxBool" => mongodb::bson::Bson::Boolean(
value.1.parse::<bool>().unwrap(),
),
_ => panic!(
"Service: `{}` > Model: `{}` > Method: \
`migrat()` : Invalid Widget type.",
meta.service_name, meta.model_name
),
},
);
}
}
for field in vec!["created_at", "updated_at"] {
if doc_from_db.contains_key(field) {
let value_from_db: Option<&mongodb::bson::Bson> =
doc_from_db.get(field);
if value_from_db.is_some() {
tmp_doc.insert(field.to_string(), value_from_db.unwrap());
} else {
panic!(
"Service: `{}` > Model: `{}` > \
Method: `migrat()` : \
Cannot get field value from database for \
field `{}`.",
meta.service_name, meta.model_name, field
);
}
} else {
panic!(
"Service: `{}` > Model: `{}` > Method: `migrat()` : \
Key `{}` was not found in the document from \
the database.",
meta.service_name, meta.model_name, field
);
}
}
let query =
mongodb::bson::doc! {"_id": doc_from_db.get_object_id("_id").unwrap()};
let mut update: Document = mongodb::bson::document::Document::new();
update.insert("$set".to_string(), mongodb::bson::Bson::Document(tmp_doc));
collection.update_one(query, update, None).unwrap();
}
}
}
let db: Database = client.database(&meta.database_name);
if !database_names.contains(&meta.database_name)
|| !db
.list_collection_names(None)
.unwrap()
.contains(&meta.collection_name)
{
db.create_collection(&meta.collection_name, None).unwrap();
}
let db: Database = client.database(&mango_tech_keyword);
if !database_names.contains(&mango_tech_keyword)
|| !db
.list_collection_names(None)
.unwrap()
.contains(&"models".to_owned())
{
panic!("For migration not used `models::Monitor.refresh()`.");
} else {
let collection: Collection = db.collection("models");
let filter: Document = mongodb::bson::doc! {"database": &meta.database_name, "collection": &meta.collection_name};
let doc: Document = mongodb::bson::doc! {
"database": &meta.database_name,
"collection": &meta.collection_name,
"fields": trunc_list_fields_name.iter().map(|item| item.to_string())
.collect::<Vec<String>>(),
"status": true
};
if collection.count_documents(filter.clone(), None).unwrap() == 0_i64 {
collection.insert_one(doc, None).unwrap();
} else {
let update: UpdateModifications = UpdateModifications::Document(doc);
collection.update_one(filter, update, None).unwrap();
}
}
}
self.napalm(&client_store);
}
}