pub mod coordinate;
#[cfg(feature = "schemasync")]
pub mod field_value;
#[cfg(feature = "schemasync")]
pub mod field_value_recursive;
pub mod format;
#[cfg(feature = "wasm-plugins")]
pub mod plugin;
pub mod plugin_types;
#[cfg(feature = "schemasync")]
pub mod regex_val_gen;
#[cfg(feature = "surrealdb")]
use crate::{
dependency::sort_tables_by_dependencies,
evenframe_log,
schemasync::TableConfig,
schemasync::compare::surql::SurrealdbComparator,
schemasync::mockmake::coordinate::{
CoherentDataset, Coordination, CoordinationGroup, CoordinationId, CoordinationPair,
},
schemasync::mockmake::format::Format,
schemasync::{PreservationMode, database::surql::access::execute_access_query},
types::{StructConfig, StructField, TaggedUnion},
wrappers::EvenframeRecordId,
};
#[cfg(feature = "surrealdb")]
use rand::RngExt;
#[cfg(feature = "surrealdb")]
use std::collections::{HashMap, HashSet};
#[cfg(feature = "surrealdb")]
use surrealdb::Surreal;
#[cfg(feature = "surrealdb")]
use surrealdb::engine::local::Db;
#[cfg(feature = "surrealdb")]
use surrealdb::engine::remote::http::Client;
#[cfg(feature = "surrealdb")]
use uuid::Uuid;
#[cfg(feature = "surrealdb")]
#[derive(Debug)]
pub struct Mockmaker<'a> {
db: &'a Surreal<Client>,
pub(super) tables: &'a HashMap<String, TableConfig>,
objects: &'a HashMap<String, StructConfig>,
enums: &'a HashMap<String, TaggedUnion>,
pub(super) schemasync_config: &'a crate::schemasync::config::SchemasyncConfig,
pub comparator: Option<SurrealdbComparator<'a>>,
pub(super) registry: &'a crate::types::ForeignTypeRegistry,
pub(super) id_map: HashMap<String, Vec<String>>,
pub(super) record_diffs: HashMap<String, i32>,
filtered_tables: HashMap<String, TableConfig>,
filtered_objects: HashMap<String, StructConfig>,
pub coordinated_values: HashMap<CoordinationId, String>,
#[cfg(feature = "wasm-plugins")]
pub(super) plugin_manager: Option<std::cell::RefCell<plugin::PluginManager>>,
}
#[cfg(feature = "surrealdb")]
impl<'a> Mockmaker<'a> {
pub fn new(
db: &'a Surreal<Client>,
tables: &'a HashMap<String, TableConfig>,
objects: &'a HashMap<String, StructConfig>,
enums: &'a HashMap<String, TaggedUnion>,
schemasync_config: &'a crate::schemasync::config::SchemasyncConfig,
registry: &'a crate::types::ForeignTypeRegistry,
) -> Self {
Self {
db,
tables,
objects,
enums,
schemasync_config,
comparator: Some(SurrealdbComparator::new(db, schemasync_config)),
registry,
id_map: HashMap::new(),
record_diffs: HashMap::new(),
filtered_tables: HashMap::new(),
filtered_objects: HashMap::new(),
coordinated_values: HashMap::new(),
#[cfg(feature = "wasm-plugins")]
plugin_manager: {
if schemasync_config.plugins.is_empty() {
None
} else {
let project_root = std::env::current_dir().unwrap_or_default();
match plugin::PluginManager::new(&schemasync_config.plugins, &project_root) {
Ok(pm) => Some(std::cell::RefCell::new(pm)),
Err(e) => {
tracing::error!("Failed to initialize WASM plugins: {}", e);
None
}
}
}
},
}
}
pub async fn run(mut self) -> Result<(), Box<dyn std::error::Error>> {
tracing::info!("Starting Mockmaker pipeline");
tracing::debug!("Step 1: Generating IDs for mock data");
self.generate_ids().await?;
tracing::debug!("Step 2: ??");
tracing::debug!("Step 3: Removing old data based on schema changes");
self.remove_old_data().await?;
tracing::debug!("Step 4: Executing access queries");
self.execute_access().await?;
tracing::debug!("Step 5: Filtering changed tables and objects");
self.filter_changes().await?;
tracing::debug!("Step 6: Generating coordinated values");
self.generate_coordinated_values();
tracing::debug!("Step 7: Generating mock data");
self.generate_mock_data().await?;
tracing::info!("Mockmaker pipeline completed successfully");
Ok(())
}
pub async fn generate_ids(&mut self) -> Result<(), Box<dyn std::error::Error>> {
evenframe_log!("", "record_diffs.log");
tracing::trace!("Starting ID generation for all tables");
let mut map = HashMap::new();
let mut record_diffs = HashMap::new();
let full_refresh = self.schemasync_config.mock_gen_config.full_refresh_mode;
for (table_name, table_config) in self.tables {
tracing::trace!(table = %table_name, "Generating IDs for table");
let desired_count =
if let Some(mock_generation_config) = &table_config.mock_generation_config {
mock_generation_config.n
} else {
self.schemasync_config.mock_gen_config.default_batch_size
};
if full_refresh {
let ids: Vec<String> = (1..=desired_count)
.map(|i| format!("{table_name}:{i}"))
.collect();
tracing::trace!(
table = %table_name,
desired_count = desired_count,
"Full refresh mode - generating fresh sequential IDs"
);
record_diffs.insert(table_name.clone(), desired_count as i32);
map.insert(table_name.clone(), ids);
continue;
}
let query = format!("SELECT id FROM {table_name};",);
tracing::trace!("Querying existing IDs {query}");
let mut response = self.db.query(query).await.expect(
"Something went wrong getting the ids from the db for mock data generation",
);
evenframe_log!(&format!("{:?}", response), "record_diffs.log", true);
let existing_values: Vec<serde_json::Value> = response.take(0).unwrap_or_default();
struct IdResponse {
id: EvenframeRecordId,
}
let existing_ids: Vec<IdResponse> = existing_values
.into_iter()
.filter_map(|v| {
v.get("id")
.and_then(|id| id.as_str())
.map(|id_str| IdResponse {
id: EvenframeRecordId::from(id_str.to_string()),
})
})
.collect();
let mut ids = Vec::new();
let existing_count = existing_ids.len();
let record_diff = desired_count as i32 - existing_count as i32;
tracing::trace!(
table = %table_name,
existing_count = existing_count,
desired_count = desired_count,
record_diff = record_diff,
"Calculated record difference"
);
record_diffs.insert(table_name.clone(), record_diff);
if existing_count >= desired_count {
for (i, record) in existing_ids.into_iter().enumerate() {
if i < desired_count {
let id_string = record.id.to_string();
ids.push(id_string);
} else {
break;
}
}
} else {
for record in existing_ids {
ids.push(record.id.to_string());
}
let mut next_id = existing_count + 1;
while ids.len() < desired_count {
ids.push(format!("{table_name}:{next_id}"));
next_id += 1;
}
}
map.insert(table_name.clone(), ids.clone());
}
self.id_map = map;
self.record_diffs = record_diffs;
tracing::debug!(table_count = self.id_map.len(), "ID generation complete");
evenframe_log!(
format!("Record count differences: {:#?}", self.record_diffs),
"record_diffs.log",
true
);
Ok(())
}
pub async fn remove_old_data(&mut self) -> Result<(), Box<dyn std::error::Error>> {
tracing::trace!("Removing old data based on schema changes");
if self.schemasync_config.mock_gen_config.full_refresh_mode {
tracing::info!("Full refresh mode - deleting all records from all tables");
let mut delete_all = String::new();
for table_name in self.tables.keys() {
delete_all.push_str(&format!("DELETE {};\n", table_name));
}
if let Some(comparator) = self.comparator.as_ref()
&& let Some(schema_changes) = comparator.get_schema_changes()
{
let remove_stmts = self.generate_remove_statements(schema_changes);
if !remove_stmts.is_empty() {
tracing::info!("Full refresh mode - removing stale fields/tables");
delete_all.push_str(&remove_stmts);
}
}
if !delete_all.is_empty() {
evenframe_log!(&delete_all, "remove_statements.surql");
self.db.query(delete_all).await?;
}
tracing::trace!("Full refresh data deletion complete");
return Ok(());
}
let comparator = self.comparator.as_ref().unwrap();
let schema_changes = comparator.get_schema_changes().unwrap();
let remove_statements = self.generate_remove_statements(schema_changes);
tracing::debug!(
statement_length = remove_statements.len(),
"Generated remove statements"
);
evenframe_log!(&remove_statements, "remove_statements.surql");
if !remove_statements.is_empty() {
tracing::trace!("Executing remove statements");
self.db.query(remove_statements).await?;
}
tracing::trace!("Old data removal complete");
Ok(())
}
pub async fn execute_access(&mut self) -> Result<(), Box<dyn std::error::Error>> {
tracing::trace!("Executing access definitions");
let comparator = self.comparator.as_ref().unwrap();
let access_query = comparator.get_access_query();
tracing::debug!(query_length = access_query.len(), "Executing access query");
execute_access_query(self.db, access_query).await
}
pub async fn filter_changes(&mut self) -> Result<(), Box<dyn std::error::Error>> {
tracing::trace!("Filtering changes based on schema comparison");
let comparator = self.comparator.as_ref().unwrap();
let schema_changes = comparator.get_schema_changes().unwrap();
let (filtered_tables, filtered_objects) =
if self.schemasync_config.mock_gen_config.full_refresh_mode {
tracing::debug!("Full refresh mode enabled - using all tables and objects");
(self.tables.clone(), self.objects.clone())
} else {
tracing::debug!("Incremental mode - filtering changed items only");
self.filter_changed_tables_and_objects(
schema_changes,
self.tables,
self.objects,
self.enums,
&self.record_diffs,
)
};
self.filtered_tables = filtered_tables;
self.filtered_objects = filtered_objects;
tracing::info!(
filtered_tables = self.filtered_tables.len(),
filtered_objects = self.filtered_objects.len(),
"Filtering complete"
);
evenframe_log!(
format!("{:#?}{:#?}", self.filtered_objects, self.filtered_tables),
"filtered.log"
);
Ok(())
}
pub(super) async fn generate_mock_data(&self) -> Result<(), Box<dyn std::error::Error>> {
tracing::trace!("Starting mock data generation");
let sorted_table_names =
sort_tables_by_dependencies(&self.filtered_tables, &self.filtered_objects, self.enums);
tracing::debug!(
table_count = sorted_table_names.len(),
"Tables sorted by dependencies"
);
evenframe_log!(
&format!("Sorted table order: {sorted_table_names:?}"),
"table_order.log",
true
);
for table_name in &sorted_table_names {
if let Some(table) = &self.filtered_tables.get(table_name) {
tracing::trace!(
table = %table_name,
is_relation = table.relation.is_some(),
"Processing table for mock data"
);
if self.schemasync_config.should_generate_mocks {
let stmts = if table.relation.is_some() {
tracing::trace!(table = %table_name, "Generating INSERT statements for relation");
self.generate_insert_statements(table_name, table)
} else {
tracing::trace!(table = %table_name, "Generating UPSERT statements for table");
self.generate_upsert_statements(table_name, table)
};
tracing::debug!(
table = %table_name,
statement_count = stmts.lines().count(),
"Generated mock data statements"
);
evenframe_log!(&stmts, "all_statements.surql", true);
use crate::schemasync::database::surql::execute::execute_and_validate;
match execute_and_validate(self.db, &stmts, "UPSERT", table_name).await {
Ok(_results) => {
tracing::debug!(table = %table_name, "Mock data inserted successfully");
}
Err(e) => {
tracing::error!(
table = %table_name,
error = %e,
"Failed to execute statements"
);
let error_msg = format!(
"Failed to execute upsert statements for table {}: {}",
table_name, e
);
evenframe_log!(&error_msg, "results.log", true);
return Err(e);
}
}
}
}
}
tracing::info!("Mock data generation complete");
Ok(())
}
pub fn get_new_schema(&self) -> Option<&Surreal<Db>> {
self.comparator.as_ref()?.get_new_schema()
}
pub fn random_string(len: usize) -> String {
use rand::distr::Alphanumeric;
let mut rng = rand::rng();
(0..len).map(|_| rng.sample(Alphanumeric) as char).collect()
}
pub fn build_coordination_groups(&mut self) -> Vec<CoordinationGroup> {
let mut coordination_groups = Vec::new();
let mut coordination_map: HashMap<String, Vec<(String, Coordination)>> = HashMap::new();
for (table_name, table_config) in self.tables {
if let Some(ref mock_config) = table_config.mock_generation_config {
for coordination in &mock_config.coordination_rules {
let field_names = match coordination {
Coordination::InitializeEqual(fields) => fields.clone(),
Coordination::InitializeSequential { field_names, .. } => {
field_names.clone()
}
Coordination::InitializeSum { field_names, .. } => field_names.clone(),
Coordination::InitializeDerive {
source_field_names,
target_field_name,
..
} => {
let mut all_fields = source_field_names.clone();
all_fields.push(target_field_name.clone());
all_fields
}
Coordination::OneToOne(field_name) => vec![field_name.clone()],
Coordination::InitializeCoherent(dataset) => match dataset {
CoherentDataset::Address {
city,
state,
zip,
country,
} => [city, state, zip, country]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::PersonName {
first_name,
last_name,
full_name,
} => [first_name, last_name, full_name]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::GeoLocation {
latitude,
longitude,
city,
country,
} => [latitude, longitude, city, country]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::DateRange {
start_date,
end_date,
} => vec![start_date.clone(), end_date.clone()],
CoherentDataset::GeoRadius {
latitude,
longitude,
..
} => [latitude, longitude]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
},
};
let mut sorted_fields = field_names.clone();
sorted_fields.sort();
let coordination_key = format!("{:?}", sorted_fields);
coordination_map
.entry(coordination_key)
.or_default()
.push((table_name.clone(), coordination.clone()));
}
}
}
for (_coordination_key, table_coordinations) in coordination_map {
let mut group = CoordinationGroup::builder().id(Uuid::new_v4()).build();
let mut group_tables = HashSet::new();
let mut group_pairs = Vec::new();
let mut coordination_by_type: HashMap<String, Vec<(String, Coordination)>> =
HashMap::new();
for (table_name, coordination) in table_coordinations {
let type_key = match &coordination {
Coordination::InitializeEqual(_) => "equal",
Coordination::InitializeSequential { .. } => "sequential",
Coordination::InitializeSum { .. } => "sum",
Coordination::InitializeDerive { .. } => "derive",
Coordination::OneToOne(_) => "one_to_one",
Coordination::InitializeCoherent(_) => "coherent",
};
coordination_by_type
.entry(type_key.to_string())
.or_default()
.push((table_name.clone(), coordination.clone()));
group_tables.insert(table_name);
}
for typed_coordinations in coordination_by_type.values() {
let mut processed = HashSet::new();
for (_, coordination) in typed_coordinations {
let coord_str = format!("{:?}", coordination);
if processed.contains(&coord_str) {
continue;
}
processed.insert(coord_str.clone());
let field_names = match coordination {
Coordination::InitializeEqual(fields) => fields.clone(),
Coordination::InitializeSequential { field_names, .. } => {
field_names.clone()
}
Coordination::InitializeSum { field_names, .. } => field_names.clone(),
Coordination::InitializeDerive {
source_field_names,
target_field_name,
..
} => {
let mut all_fields = source_field_names.clone();
all_fields.push(target_field_name.clone());
all_fields
}
Coordination::OneToOne(field_name) => vec![field_name.clone()],
Coordination::InitializeCoherent(dataset) => match dataset {
CoherentDataset::Address {
city,
state,
zip,
country,
} => [city, state, zip, country]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::PersonName {
first_name,
last_name,
full_name,
} => [first_name, last_name, full_name]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::GeoLocation {
latitude,
longitude,
city,
country,
} => [latitude, longitude, city, country]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::DateRange {
start_date,
end_date,
} => vec![start_date.clone(), end_date.clone()],
CoherentDataset::GeoRadius {
latitude,
longitude,
..
} => [latitude, longitude]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
},
};
let mut coordinated_fields = Vec::new();
for field_name in &field_names {
for (t_name, t_coord) in typed_coordinations {
let t_fields = match t_coord {
Coordination::InitializeEqual(f) => f.clone(),
Coordination::InitializeSequential { field_names: f, .. } => {
f.clone()
}
Coordination::InitializeSum { field_names: f, .. } => f.clone(),
Coordination::InitializeDerive {
source_field_names,
target_field_name,
..
} => {
let mut all = source_field_names.clone();
all.push(target_field_name.clone());
all
}
Coordination::OneToOne(f) => vec![f.clone()],
Coordination::InitializeCoherent(d) => match d {
CoherentDataset::Address {
city,
state,
zip,
country,
} => [city, state, zip, country]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::PersonName {
first_name,
last_name,
full_name,
} => [first_name, last_name, full_name]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::GeoLocation {
latitude,
longitude,
city,
country,
} => [latitude, longitude, city, country]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
CoherentDataset::DateRange {
start_date,
end_date,
} => {
vec![start_date.clone(), end_date.clone()]
}
CoherentDataset::GeoRadius {
latitude,
longitude,
..
} => [latitude, longitude]
.into_iter()
.filter(|s| !s.is_empty())
.cloned()
.collect(),
},
};
if t_fields.contains(field_name) {
coordinated_fields.push(
CoordinationId::builder()
.table_name(t_name.clone())
.field_name(field_name.clone())
.build(),
);
}
}
}
if !coordinated_fields.is_empty() {
match coordination.validate(self, &coordinated_fields) {
Ok(()) => {
let pair = CoordinationPair::builder()
.coordinated_fields(coordinated_fields)
.coordination(coordination.clone())
.build();
group_pairs.push(pair);
}
Err(e) => {
tracing::error!(
"Skipping invalid coordination for tables {:?}: {}",
group_tables,
e
);
evenframe_log!(
format!(
"ERROR: Invalid coordination skipped\nTables: {:?}\nCoordination: {:?}\nError: {}\n",
group_tables, coordination, e
),
"coordination_validation_errors.log",
true
);
}
}
}
}
}
if !group_pairs.is_empty() {
group.tables = group_tables;
group.coordination_pairs = group_pairs;
coordination_groups.push(group);
}
}
coordination_groups
}
}
#[cfg(not(feature = "surrealdb"))]
use crate::schemasync::PreservationMode;
#[cfg(not(feature = "surrealdb"))]
use crate::schemasync::mockmake::format::Format;
#[cfg(not(feature = "surrealdb"))]
use crate::types::StructField;
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct MockGenerationConfig {
pub n: usize,
pub table_level_override: Option<std::collections::HashMap<StructField, Format>>,
pub coordination_rules: Vec<crate::schemasync::mockmake::coordinate::Coordination>,
pub batch_size: usize,
pub regenerate_fields: Vec<String>,
pub preservation_mode: PreservationMode,
#[serde(default)]
pub plugin: Option<String>,
}
impl Default for MockGenerationConfig {
fn default() -> Self {
let (n, batch_size, preservation_mode) = match crate::config::EvenframeConfig::new() {
Ok(config) => (
config.schemasync.mock_gen_config.default_record_count,
config.schemasync.mock_gen_config.default_batch_size,
config.schemasync.mock_gen_config.default_preservation_mode,
),
Err(_) => {
(10, 1000, PreservationMode::Smart)
}
};
Self {
n,
table_level_override: None,
coordination_rules: Vec::new(),
batch_size,
regenerate_fields: vec![],
preservation_mode,
plugin: None,
}
}
}
impl quote::ToTokens for MockGenerationConfig {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let n = self.n;
let batch_size = self.batch_size;
let coordination_rules_tokens = if self.coordination_rules.is_empty() {
quote::quote! { vec![] }
} else {
quote::quote! { vec![] }
};
let regenerate_fields = &self.regenerate_fields;
let preservation_mode_tokens = match &self.preservation_mode {
PreservationMode::Smart => {
quote::quote! { ::evenframe::schemasync::PreservationMode::Smart }
}
PreservationMode::Full => {
quote::quote! { ::evenframe::schemasync::PreservationMode::Full }
}
PreservationMode::None => {
quote::quote! { ::evenframe::schemasync::PreservationMode::None }
}
};
let plugin_tokens = match &self.plugin {
Some(name) => quote::quote! { Some(#name.to_string()) },
None => quote::quote! { None },
};
let config_tokens = quote::quote! {
MockGenerationConfig {
n: #n,
table_level_override: None,
coordination_rules: #coordination_rules_tokens,
batch_size: #batch_size,
regenerate_fields: vec![#(#regenerate_fields.to_string()),*],
preservation_mode: #preservation_mode_tokens,
plugin: #plugin_tokens,
}
};
tokens.extend(config_tokens);
}
}