use crate::{
forms::{FileData, ImageData},
store::DB_MAP_CLIENT_NAMES,
};
use mongodb::{
bson, bson::document::Document, options::UpdateModifications, sync::Client, sync::Collection,
sync::Cursor, sync::Database,
};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Serialize, Deserialize)]
pub struct ModelState {
pub database: String,
pub collection: String,
pub fields: Vec<String>,
pub status: bool,
}
pub struct Monitor<'a> {
pub project_name: &'a str,
pub unique_project_key: &'a str,
pub models: Vec<crate::models::Meta>,
}
impl<'a> Monitor<'a> {
pub fn mango_tech_name(&self) -> String {
let re = Regex::new(r"^[a-zA-Z][_a-zA-Z\d]{1,21}$").unwrap();
if !re.is_match(self.project_name) {
panic!("PROJECT_NAME - Valid characters: _ a-z A-Z 0-9 ; \
Max size: 21 ; \
First character: a-z A-Z");
}
let re = Regex::new(r"^[a-zA-Z\d]{8,16}$").unwrap();
if !re.is_match(self.unique_project_key) {
panic!("UNIQUE_PROJECT_KEY - Valid characters: a-z A-Z 0-9 ; \
Size: 8-16.");
}
format!("mango_tech__{}__{}", self.project_name, self.unique_project_key)
}
fn refresh(&self, client_store: &std::sync::RwLockReadGuard<HashMap<String, Client>>) {
for meta in self.models.iter() {
let client: &Client = client_store.get(&meta.db_client_name).unwrap();
let db_mango_tech: String = self.mango_tech_name();
let collection_models_name: &str = "monitor_models";
let collection_dyn_widgets_name: &str = "dynamic_widgets";
let database_names: Vec<String> = client.list_database_names(None, None)
.unwrap_or_else(|err| panic!("Migration `refresh()` : {}", err.to_string()));
if !database_names.contains(&db_mango_tech) {
client
.database(&db_mango_tech)
.create_collection(collection_models_name, None)
.unwrap_or_else(|err| panic!("Migration `refresh()` : {}", err.to_string()));
client
.database(&db_mango_tech)
.create_collection(collection_dyn_widgets_name, None)
.unwrap_or_else(|err| panic!("Migration `refresh()` : {}", err.to_string()));
} else {
let mango_tech_db: Database = client.database(&db_mango_tech);
let collection_models: Collection = mango_tech_db.collection(collection_models_name);
let mut cursor: Cursor = collection_models.find(None, None)
.unwrap_or_else(|err| panic!("Migration `refresh()` : {}", err.to_string()));
while let Some(result) = cursor.next() {
match result {
Ok(document) => {
let mut model_state: ModelState =
bson::de::from_document(document)
.unwrap_or_else(|err| panic!("Migration `refresh()` : {}", err.to_string()));
model_state.status = false;
let query: Document = bson::doc! {
"database": &model_state.database,
"collection": &model_state.collection
};
let update: UpdateModifications = UpdateModifications::Document(
bson::ser::to_document(&model_state)
.unwrap_or_else(|err| panic!("Migration `refresh()` : {}", err.to_string())),
);
collection_models
.update_one(query, update, None)
.unwrap_or_else(|err| panic!("Migration `refresh()` : {}", err.to_string()));
}
Err(err) => panic!("Migration `refresh()` : {}", err.to_string()),
}
}
}
}
}
fn napalm(&self, client_store: &std::sync::RwLockReadGuard<HashMap<String, Client>>) {
for meta in self.models.iter() {
let client: &Client = client_store.get(&meta.db_client_name).unwrap();
let db_mango_tech: String = self.mango_tech_name();
let collection_models_name: &str = "monitor_models";
let collection_dyn_widgets_name: &str = "dynamic_widgets";
let mango_tech_db: Database = client.database(&db_mango_tech);
let collection_models: Collection = mango_tech_db.collection(collection_models_name);
let collection_dyn_widgets: Collection = mango_tech_db.collection(collection_dyn_widgets_name);
let cursor: Cursor = collection_models.find(None, None)
.unwrap_or_else(|err| panic!("Migration `napalm()` : {}", err.to_string()));
let results: Vec<Result<Document, mongodb::error::Error>> = cursor.collect();
for result in results {
match result {
Ok(document) => {
let model_state: ModelState = bson::de::from_document(document)
.unwrap_or_else(|err| panic!("Migration `napalm()` : {}", err.to_string()));
if !model_state.status {
client
.database(&model_state.database)
.collection(&model_state.collection)
.drop(None)
.unwrap_or_else(|err| panic!("Migration `napalm()` : {}", err.to_string()));
let query: Document = bson::doc! {
"database": &model_state.database,
"collection": &model_state.collection
};
collection_models.delete_one(query.clone(), None)
.unwrap_or_else(|err| panic!("Migration `napalm()` : {}", err.to_string()));
collection_dyn_widgets.delete_one(query, None)
.unwrap_or_else(|err| panic!("Migration `napalm()` : {}", err.to_string()));
}
}
Err(err) => panic!("Migration `napalm()` : {}", err.to_string()),
}
}
}
}
pub fn migrat(&self) {
let client_store: std::sync::RwLockReadGuard<HashMap<String, Client>> =
DB_MAP_CLIENT_NAMES.read()
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
self.refresh(&client_store);
for meta in self.models.iter() {
if !Regex::new(r"^[_a-zA-Z][_a-zA-Z\d]{1,31}$").unwrap().is_match(meta.service_name.as_str()) {
panic!("Model: `{}` : Service_name - Valid characters: _ a-z A-Z 0-9 \
; Max size: 31 ; First character: _ a-z A-Z", meta.model_name);
}
if !Regex::new(r"^[_a-zA-Z][_a-zA-Z\d]{14,62}$").unwrap().is_match(meta.database_name.as_str()) {
panic!("Model: `{}` : Database name - Valid characters: _ a-z A-Z 0-9 \
; Max size: 21 ; First character: _ a-z A-Z", meta.model_name);
}
let client: &Client = client_store.get(&meta.db_client_name).unwrap();
let fields_name: Vec<&str> =
meta.fields_name.iter().map(|item| item.as_str()).collect();
let ignore_fields: Vec<&str> = meta
.ignore_fields
.iter()
.map(|item| item.as_str())
.collect();
let trunc_list_fields_name: Vec<&str> = fields_name
.iter()
.filter(|item| **item != "hash" && !ignore_fields.contains(item))
.map(|item| *item)
.collect();
let db_mango_tech: String = self.mango_tech_name();
let database_names: Vec<String> = client.list_database_names(None, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
let map_default_values: std::collections::HashMap<String, (String, String)> =
meta.map_default_values.clone();
let filter: Document = mongodb::bson::doc! {
"database": &meta.database_name,
"collection": &meta.collection_name
};
let model: Option<Document> = client
.database(&db_mango_tech)
.collection("monitor_models")
.find_one(filter, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
if model.is_some() {
let monitor_models_fields_name: Vec<String> = {
let model: Document = model.unwrap();
let fields: Vec<mongodb::bson::Bson> =
model.get_array("fields").unwrap().to_vec();
fields
.into_iter()
.map(|item: mongodb::bson::Bson| item.as_str().unwrap().to_string())
.collect()
};
let mut run_documents_modification: bool = false;
if trunc_list_fields_name.len() != monitor_models_fields_name.len() {
run_documents_modification = true;
} else {
for item in trunc_list_fields_name.iter() {
if monitor_models_fields_name.iter().any(|item2| item2 != item) {
run_documents_modification = true;
break;
}
}
}
if run_documents_modification {
let db: Database = client.database(&meta.database_name);
let collection: mongodb::sync::Collection =
db.collection(&meta.collection_name);
let mut cursor: mongodb::sync::Cursor = collection.find(None, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
while let Some(result) = cursor.next() {
let doc_from_db: mongodb::bson::document::Document =
result.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
let mut tmp_doc = mongodb::bson::document::Document::new();
for field in fields_name.iter() {
if *field == "hash" || ignore_fields.contains(&field) {
continue;
}
if doc_from_db.contains_key(field) {
let value_from_db: Option<&mongodb::bson::Bson> =
doc_from_db.get(field);
if value_from_db.is_some() {
tmp_doc.insert(field.to_string(), value_from_db.unwrap());
} else {
panic!(
"Service: `{}` > Model: `{}` > Field: `{}` > \
Method: `migrat()` : \
Can't get field value from database.",
meta.service_name, meta.model_name, field
);
}
} else {
let value = map_default_values.get(*field).unwrap();
tmp_doc.insert(
field.to_string(),
match value.0.as_str() {
"checkBoxText" | "radioText" | "inputColor"
| "inputEmail" | "inputPassword" | "inputPhone"
| "inputText" | "inputUrl" | "inputIP" | "inputIPv4"
| "inputIPv6" | "textArea" | "selectText" => {
let val: String = value.1.clone();
if !val.is_empty() {
mongodb::bson::Bson::String(val)
} else {
mongodb::bson::Bson::Null
}
}
"inputDate" => {
let val: String = value.1.clone();
if !val.is_empty() {
if !crate::store::REGEX_IS_DATE.is_match(&val) {
panic!(
"Service: `{}` > Model: `{}` > \
Method: `widgets()` : Incorrect date \
format. Example: 1970-02-28",
meta.service_name, meta.model_name
)
}
let val = format!("{}T00:00", val);
let dt: chrono::DateTime<chrono::Utc> =
chrono::DateTime::<chrono::Utc>::from_utc(
chrono::NaiveDateTime::parse_from_str(
&val,
"%Y-%m-%dT%H:%M",
)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string())),
chrono::Utc,
);
mongodb::bson::Bson::DateTime(dt)
} else {
mongodb::bson::Bson::Null
}
}
"inputDateTime" => {
let val: String = value.1.clone();
if !val.is_empty() {
if !crate::store::REGEX_IS_DATETIME.is_match(&val) {
panic!(
"Service: `{}` > Model: `{}` > \
Method: `widgets()` : \
Incorrect date and time format. \
Example: 1970-02-28T00:00",
meta.service_name, meta.model_name
)
}
let dt: chrono::DateTime<chrono::Utc> =
chrono::DateTime::<chrono::Utc>::from_utc(
chrono::NaiveDateTime::parse_from_str(
&val,
"%Y-%m-%dT%H:%M",
)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string())),
chrono::Utc,
);
mongodb::bson::Bson::DateTime(dt)
} else {
mongodb::bson::Bson::Null
}
}
"checkBoxI32" | "inputRadioI32" | "numberI32"
| "rangeI32" | "selectI32" => {
let val: String = value.1.clone();
if !val.is_empty() {
mongodb::bson::Bson::Int32(
val.parse::<i32>().unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string())),
)
} else {
mongodb::bson::Bson::Null
}
}
"checkBoxU32" | "radioU32" | "numberU32" | "rangeU32"
| "selectU32" | "checkBoxI64" | "radioI64"
| "numberI64" | "rangeI64" | "selectI64" => {
let val: String = value.1.clone();
if !val.is_empty() {
mongodb::bson::Bson::Int64(
val.parse::<i64>().unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string())),
)
} else {
mongodb::bson::Bson::Null
}
}
"checkBoxF64" | "radioF64" | "numberF64" | "rangeF64"
| "selectF64" => {
let val: String = value.1.clone();
if !val.is_empty() {
mongodb::bson::Bson::Double(
val.parse::<f64>().unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string())),
)
} else {
mongodb::bson::Bson::Null
}
}
"checkBoxBool" => {
let val: String = value.1.clone();
if !val.is_empty() {
mongodb::bson::Bson::Boolean(
val.parse::<bool>().unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string())),
)
} else {
mongodb::bson::Bson::Boolean(false)
}
}
"inputFile" => {
let val: String = value.1.clone();
if !val.is_empty() {
let mut file_data =
serde_json::from_str::<FileData>(val.as_str())
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
let is_emty_path = file_data.path.is_empty();
let is_emty_url = file_data.url.is_empty();
if (!is_emty_path && is_emty_url)
|| (is_emty_path && !is_emty_url) {
panic!(
"Model: `{}` > Field: `{}` > Method: \
`migrat()` : Check the `path` and `url` \
attributes in the `default` field parameter.",
meta.model_name, field
);
}
let path: String = file_data.path.clone();
let f_path = std::path::Path::new(path.as_str());
if !f_path.exists() || !f_path.is_file() {
panic!(
"Model: `{}` > Field: `{}` > Method: \
`migrat()` : File is missing - {}",
meta.model_name, field, path
)
}
let metadata: std::fs::Metadata = f_path.metadata()
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
file_data.size = metadata.len() as u32;
file_data.name = f_path.file_name().unwrap().to_str().unwrap().to_string();
let result = mongodb::bson::ser::to_document(&file_data)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
mongodb::bson::Bson::Document(result)
} else {
mongodb::bson::Bson::Null
}
}
"inputImage" => {
let val: String = value.1.clone();
if !val.is_empty() {
let mut file_data =
serde_json::from_str::<ImageData>(val.as_str())
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
let is_emty_path = file_data.path.is_empty();
let is_emty_url = file_data.url.is_empty();
if (!is_emty_path && is_emty_url)
|| (is_emty_path && !is_emty_url) {
panic!(
"Model: `{}` > Field: `{}` > Method: \
`migrat()` : Check the `path` and `url` \
attributes in the `default` field parameter.",
meta.model_name, field
);
}
let path: String = file_data.path.clone();
let f_path = std::path::Path::new(path.as_str());
if !f_path.exists() || !f_path.is_file() {
panic!(
"Model: `{}` > Field: `{}` > Method: \
`migrat()` : File is missing - {}",
meta.model_name, field, path
)
}
let metadata: std::fs::Metadata = f_path
.metadata()
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
file_data.size = metadata.len() as u32;
file_data.name = f_path.file_name().unwrap().to_str().unwrap().to_string();
let dimensions: (u32, u32) = image::image_dimensions(path)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
file_data.width = dimensions.0;
file_data.height = dimensions.1;
let result = mongodb::bson::ser::to_document(&file_data)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
mongodb::bson::Bson::Document(result)
} else {
mongodb::bson::Bson::Null
}
}
_ => panic!(
"Service: `{}` > Model: `{}` > Method: \
`migrat()` : Invalid Widget type.",
meta.service_name, meta.model_name
),
},
);
}
}
for field in vec!["created_at", "updated_at"] {
if doc_from_db.contains_key(field) {
let value_from_db: Option<&mongodb::bson::Bson> =
doc_from_db.get(field);
if value_from_db.is_some() {
tmp_doc.insert(field.to_string(), value_from_db.unwrap());
} else {
panic!(
"Service: `{}` > Model: `{}` > \
Method: `migrat()` : \
Cannot get field value from database for \
field `{}`.",
meta.service_name, meta.model_name, field
);
}
} else {
panic!(
"Service: `{}` > Model: `{}` > Method: `migrat()` : \
Key `{}` was not found in the document from \
the database.",
meta.service_name, meta.model_name, field
);
}
}
let query =
mongodb::bson::doc! {"_id": doc_from_db.get_object_id("_id")
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()))
};
let mut update: Document = mongodb::bson::document::Document::new();
update.insert("$set".to_string(), mongodb::bson::Bson::Document(tmp_doc));
collection.update_one(query, update, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
}
}
}
let db: Database = client.database(&meta.database_name);
if !database_names.contains(&meta.database_name)
|| !db
.list_collection_names(None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()))
.contains(&meta.collection_name)
{
db.create_collection(&meta.collection_name, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
}
let db: Database = client.database(&db_mango_tech);
if !database_names.contains(&db_mango_tech)
|| !db
.list_collection_names(None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()))
.contains(&"monitor_models".to_owned())
{
panic!("In the `refresh()` method, no technical database has been created for the project.");
} else {
let collection: Collection = db.collection("monitor_models");
let filter: Document = mongodb::bson::doc! {
"database": &meta.database_name,
"collection": &meta.collection_name
};
let doc: Document = mongodb::bson::doc! {
"database": &meta.database_name,
"collection": &meta.collection_name,
"fields": trunc_list_fields_name.iter().map(|item| item.to_string())
.collect::<Vec<String>>(),
"status": true
};
if collection.count_documents(filter.clone(), None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()))
== 0_i64 {
collection.insert_one(doc, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
} else {
let update = UpdateModifications::Document(doc);
collection.update_one(filter, update, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
}
}
if !database_names.contains(&db_mango_tech)
|| !db
.list_collection_names(None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()))
.contains(&"dynamic_widgets".to_owned())
{
panic!("In the `refresh()` method, no technical database has been created for the project.");
} else {
let collection: Collection = db.collection("dynamic_widgets");
let filter: Document = mongodb::bson::doc! {
"database": &meta.database_name,
"collection": &meta.collection_name
};
if collection.count_documents(filter.clone(), None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()))
== 0_i64 {
let mut new_doc: Document = mongodb::bson::doc! {
"database": &meta.database_name,
"collection": &meta.collection_name,
"fields": {}
};
let mut fields_doc: Document = mongodb::bson::document::Document::new();
for (field, widget) in meta.map_widget_type.clone() {
if widget.contains("Dyn") {
fields_doc.insert(field, mongodb::bson::Bson::Array(Vec::new()));
}
}
new_doc.insert("fields".to_string(), fields_doc);
collection.insert_one(new_doc, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
} else {
let mut exist_doc = collection.find_one(filter.clone(), None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string())).unwrap();
let fields_doc = exist_doc.get_document_mut("fields")
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
let dyn_fields_from_db: Vec<String> = fields_doc.keys().map(|item| item.into()).collect();
let mut dyn_fields_from_model: Vec<String> = Vec::new();
for (field, widget) in meta.map_widget_type.clone() {
if widget.contains("Dyn") {
dyn_fields_from_model.push(field.clone());
if !dyn_fields_from_db.contains(&field) {
fields_doc.insert(field, mongodb::bson::Bson::Array(Vec::new()));
}
}
}
for field in dyn_fields_from_db {
if !dyn_fields_from_model.contains(&field) {
fields_doc.remove(&field).unwrap();
}
}
let update: UpdateModifications = UpdateModifications::Document(exist_doc);
collection.update_one(filter, update, None)
.unwrap_or_else(|err| panic!("Migration `migrat()` : {}", err.to_string()));
}
}
}
self.napalm(&client_store);
}
}