use serde_json::{Map, Number, Value};
use std::{collections::HashMap, ffi::OsString, fs, io::BufWriter, sync::RwLock};
use crate::{
Db, FieldInfo, JsonPrimitive,
database::{ColumnValue, DbConfig, ExpansionChain, IdManager, IdType, IdValue, SchemaDict},
};
pub(crate) type MemoryCollection = RwLock<InternalMemoryCollection>;
pub(crate) struct InternalMemoryCollection {
collection: HashMap<String, Value>,
id_manager: IdManager,
config: DbConfig,
pub name: String,
pub schema: Option<SchemaDict>,
}
impl InternalMemoryCollection {
pub fn new(name: &str, config: DbConfig) -> Self {
let collection: HashMap<String, Value> = HashMap::new();
let id_manager = IdManager::new(config.id_type);
let mut schema = SchemaDict::default();
schema.fields.insert(
config.id_key.clone(),
FieldInfo {
nullable: false,
ty: match config.id_type {
IdType::Int => JsonPrimitive::Int,
_ => JsonPrimitive::String,
},
},
);
let schema = Some(schema);
Self {
collection,
id_manager,
config,
name: name.to_ascii_lowercase(),
schema,
}
}
pub fn into_protected(self) -> MemoryCollection {
RwLock::new(self)
}
pub fn schema(&self) -> Option<SchemaDict> {
self.schema.as_ref().cloned()
}
pub fn get_reference_column_name(&self) -> String {
let name = if self.name.ends_with("s") {
self.name[..self.name.len() - 1].to_string()
} else {
self.name.to_string()
};
let id_key = if self.config.id_key.starts_with("_") {
let mut id_key = self.config.id_key.clone();
id_key.remove(0);
id_key
} else {
self.config.id_key.clone()
};
format!("{}_{}", name, id_key)
}
pub fn ensure_update_schema_for_item(&mut self, item: &Value) {
if let Value::Object(map) = item {
if self.schema.is_none() {
self.schema = Some(SchemaDict::infer_schema_from_object(map));
} else if let Some(schema) = &mut self.schema {
schema.merge_schema(map);
}
}
}
pub fn merge_json_values(mut base: Value, update: Value) -> Value {
match (&mut base, update) {
(Value::Object(base_map), Value::Object(update_map)) => {
for (key, value) in update_map {
if base_map.contains_key(&key) {
let existing_value = base_map.get(&key).unwrap().clone();
base_map.insert(key, Self::merge_json_values(existing_value, value));
} else {
base_map.insert(key, value);
}
}
base
}
(_, update_value) => update_value,
}
}
pub fn new_coll(name: &str, config: DbConfig) -> Self {
Self::new(name, config)
}
pub fn get_all(&self) -> Vec<Value> {
self.collection.values().cloned().collect::<Vec<Value>>()
}
pub fn get_paginated(&self, offset: usize, limit: usize) -> Vec<Value> {
self.collection
.values()
.skip(offset)
.take(limit)
.cloned()
.collect::<Vec<Value>>()
}
pub fn get(&self, id: &str) -> Option<Value> {
self.collection.get(id).cloned()
}
pub fn get_filtered_by_columns_values(
&self,
columns_values: Vec<ColumnValue>,
expansion_type: ExpansionChain,
db: &Db,
) -> Vec<Value> {
self.collection
.values()
.filter_map(|row| match row {
Value::Object(map) => {
for column_value in &columns_values {
match map.get(&column_value.column) {
Some(value) => {
if *value != column_value.value {
return None;
}
}
None => return None,
}
}
let expanded = self.expand_row(row, expansion_type.clone(), db);
Some(expanded)
}
_ => None,
})
.collect::<Vec<Value>>()
}
fn expand_object(
&self,
object: Map<String, Value>,
collection_name: String,
next_expansion_type: ExpansionChain,
db: &Db,
) -> Value {
let refs = db.get_collection_refs(&self.name);
let mut object = object.clone();
match refs {
Some(refs) => {
for entry in refs.values() {
if entry.ref_collection.eq_ignore_ascii_case(&collection_name) {
if let Some(collection) = db.get(&entry.ref_collection) {
if let Some(cell) = object.get(&entry.column) {
let cvs =
vec![ColumnValue::new(entry.ref_column.clone(), cell.clone())];
let expanded = collection.get_filtered_by_columns_values(
cvs,
next_expansion_type.clone(),
db,
);
let key = collection.get_name();
object.insert(key, Value::Array(expanded));
}
}
}
if entry.collection.eq_ignore_ascii_case(&collection_name) {
if let Some(collection) = db.get(&entry.collection) {
if let Some(cell) = object.get(&entry.ref_column) {
let cvs =
vec![ColumnValue::new(entry.column.clone(), cell.clone())];
let expanded = collection.get_filtered_by_columns_values(
cvs,
next_expansion_type.clone(),
db,
);
let key = collection.get_name();
object.insert(key, Value::Array(expanded));
}
}
}
}
Value::Object(object)
}
None => Value::Object(object),
}
}
pub fn expand_row(&self, row: &Value, expansion_type: ExpansionChain, db: &Db) -> Value {
match (row.clone(), expansion_type) {
(Value::Object(map), ExpansionChain::Single(collection_name)) => {
self.expand_object(map, collection_name, ExpansionChain::None, db)
}
(Value::Object(map), ExpansionChain::Child(collection_name, expansion_type)) => {
self.expand_object(map, collection_name, expansion_type.as_ref().clone(), db)
}
_ => row.clone(),
}
}
pub fn expand_list(
&self,
list: Vec<Value>,
expansion_type: ExpansionChain,
db: &Db,
) -> Vec<Value> {
list.iter()
.map(|row| self.expand_row(row, expansion_type.clone(), db))
.collect()
}
pub fn exists(&self, id: &str) -> bool {
self.collection.contains_key(id)
}
pub fn count(&self) -> usize {
self.collection.len()
}
pub fn add(&mut self, item: Value) -> Option<Value> {
let next_id = { self.id_manager.next() };
let mut item = item;
let id_string = if let Some(id_value) = next_id {
let id_string = id_value.to_string();
if let Value::Object(ref mut map) = item {
match id_value {
IdValue::Uuid(id) => {
map.insert(self.config.id_key.clone(), Value::String(id.clone()));
}
IdValue::Int(id) => {
map.insert(self.config.id_key.clone(), Value::Number(id.into()));
}
}
}
Some(id_string)
} else if let Some(Value::String(id_string)) = item.get(self.config.id_key.clone()) {
Some(id_string.clone())
} else if let Some(Value::Number(id_number)) = item.get(self.config.id_key.clone()) {
Some(id_number.to_string())
} else {
None
};
if let Some(id_string) = id_string {
self.ensure_update_schema_for_item(&item);
self.collection.insert(id_string, item.clone());
return Some(item);
}
None
}
pub fn add_batch(&mut self, items: Value) -> Vec<Value> {
let mut added_items = Vec::new();
if let Value::Array(items_array) = items {
let mut max_id = None;
for item in items_array {
if let Value::Object(ref item_map) = item {
self.ensure_update_schema_for_item(&item);
let id_key = self.config.id_key.clone();
let id = item_map.get(&id_key);
let id = match self.id_manager.id_type {
IdType::Uuid => match id {
Some(Value::String(id)) => Some(id.clone()),
_ => None,
},
IdType::Int => match id {
Some(Value::Number(id)) => {
if let Some(current) = max_id {
let id = id.as_u64().unwrap();
if current < id {
max_id = Some(id);
let _ = self.id_manager.set_current(IdValue::Int(id));
}
} else {
max_id = id.as_u64();
let _ =
self.id_manager.set_current(IdValue::Int(max_id.unwrap()));
}
Some(id.to_string())
}
_ => None,
},
IdType::None => match item.get(&id_key) {
Some(Value::String(id_string)) => Some(id_string.clone()),
Some(Value::Number(id_number)) => Some(id_number.to_string()),
_ => None,
},
};
if let Some(id) = id {
self.collection.insert(id.clone(), item.clone());
added_items.push(item);
} else if let Some(id) = self.id_manager.next() {
if let Value::Object(mut owned_map) = item {
let id_value = match id {
IdValue::Uuid(ref s) => Value::String(s.clone()),
IdValue::Int(i) => {
max_id = Some(i);
Value::Number(i.into())
}
};
owned_map.insert(id_key, id_value);
let new_item = Value::Object(owned_map);
self.collection.insert(id.to_string(), new_item.clone());
added_items.push(new_item);
}
}
}
}
}
added_items
}
pub fn update(&mut self, id: &str, item: Value) -> Option<Value> {
let mut item = item;
if let Value::Object(ref mut map) = item {
let id_key = self.config.id_key.clone();
if !map.contains_key(&id_key) {
let id = match self.config.id_type {
IdType::Int => match id.parse::<u64>() {
Ok(num) => Value::Number(Number::from(num)),
Err(_) => Value::String(id.to_string()),
},
_ => Value::String(id.to_string()),
};
map.insert(self.config.id_key.clone(), id);
}
}
if self.collection.contains_key(id) {
self.ensure_update_schema_for_item(&item);
self.collection.insert(id.to_string(), item.clone());
Some(item)
} else {
None
}
}
pub fn update_partial(&mut self, id: &str, partial_item: Value) -> Option<Value> {
if let Some(existing_item) = self.collection.get(id).cloned() {
let mut updated_item = Self::merge_json_values(existing_item, partial_item);
if let Value::Object(ref mut map) = updated_item {
let id_key = self.config.id_key.clone();
if !map.contains_key(&id_key) {
let id = match self.config.id_type {
IdType::Int => match id.parse::<u64>() {
Ok(num) => Value::Number(Number::from(num)),
Err(_) => Value::String(id.to_string()),
},
_ => Value::String(id.to_string()),
};
map.insert(self.config.id_key.clone(), id);
}
}
self.ensure_update_schema_for_item(&updated_item);
self.collection.insert(id.to_string(), updated_item.clone());
Some(updated_item)
} else {
None
}
}
pub fn delete(&mut self, id: &str) -> Option<Value> {
self.collection.remove(id)
}
pub fn clear(&mut self) -> usize {
let count = self.collection.len();
self.collection.clear();
count
}
pub fn load_from_json(&mut self, json_value: Value, keep: bool) -> Result<Vec<Value>, String> {
let Value::Array(_) = json_value else {
return Err("⚠️ Informed JSON does not contain a JSON array in the root, skipping initial data load".to_string());
};
if !keep {
self.clear();
}
let added_items = self.add_batch(json_value);
Ok(added_items)
}
pub fn load_from_file(&mut self, file_path: &OsString) -> Result<String, String> {
let file_path_lossy = file_path.to_string_lossy();
let file_content = fs::read_to_string(file_path).map_err(|_| {
format!(
"⚠️ Could not read file {}, skipping initial data load",
file_path_lossy
)
})?;
let json_value = serde_json::from_str::<Value>(&file_content).map_err(|_| {
format!(
"⚠️ File {} does not contain valid JSON, skipping initial data load",
file_path_lossy
)
})?;
match self.load_from_json(json_value, false) {
Ok(added_items) => Ok(format!(
"✔️ Loaded {} initial items from {}",
added_items.len(),
file_path_lossy
)),
Err(error) => Err(format!(
"Error to process the file {}. Details: {}",
file_path_lossy, error
)),
}
}
}
pub struct DbCollection {
pub(crate) collection: MemoryCollection,
}
impl DbCollection {
pub fn new_coll(name: &str, config: DbConfig) -> Self {
Self {
collection: InternalMemoryCollection::new_coll(name, config).into_protected(),
}
}
pub fn get_reference_column_name(&self) -> String {
self.collection.read().unwrap().get_reference_column_name()
}
pub fn get_all(&self) -> Vec<Value> {
self.collection.read().unwrap().get_all()
}
pub fn get_paginated(&self, offset: usize, limit: usize) -> Vec<Value> {
self.collection.read().unwrap().get_paginated(offset, limit)
}
pub(crate) fn get_filtered_by_columns_values(
&self,
columns_values: Vec<ColumnValue>,
expansion_type: ExpansionChain,
db: &Db,
) -> Vec<Value> {
self.collection
.read()
.unwrap()
.get_filtered_by_columns_values(columns_values, expansion_type, db)
}
pub fn get(&self, id: &str) -> Option<Value> {
self.collection.read().unwrap().get(id)
}
pub fn exists(&self, id: &str) -> bool {
self.collection.read().unwrap().exists(id)
}
pub fn count(&self) -> usize {
self.collection.read().unwrap().count()
}
pub fn add(&self, item: Value) -> Option<Value> {
self.collection.write().unwrap().add(item)
}
pub fn add_batch(&self, items: Value) -> Vec<Value> {
self.collection.write().unwrap().add_batch(items)
}
pub fn update(&self, id: &str, item: Value) -> Option<Value> {
self.collection.write().unwrap().update(id, item)
}
pub fn update_partial(&self, id: &str, partial_item: Value) -> Option<Value> {
self.collection
.write()
.unwrap()
.update_partial(id, partial_item)
}
pub fn delete(&self, id: &str) -> Option<Value> {
self.collection.write().unwrap().delete(id)
}
pub fn clear(&self) -> usize {
self.collection.write().unwrap().clear()
}
pub fn load_from_json(&self, json_value: Value, keep: bool) -> Result<Vec<Value>, String> {
self.collection
.write()
.unwrap()
.load_from_json(json_value, keep)
}
pub fn load_from_file(&self, file_path: &OsString) -> Result<String, String> {
self.collection.write().unwrap().load_from_file(file_path)
}
pub fn write_to_file(&self, file_path: &OsString) -> Result<(), String> {
let file = std::fs::File::create(file_path).expect("Failed to create json file");
let mut w = BufWriter::new(file);
let data = self.get_all();
serde_json::to_writer_pretty(&mut w, &data).expect("Failed to write to a json file");
Ok(())
}
pub fn expand_row(&self, row: &Value, expansion: &str, db: &Db) -> Value {
self.collection
.read()
.unwrap()
.expand_row(row, ExpansionChain::from(expansion), db)
}
pub fn expand_list(&self, list: Vec<Value>, expansion: &str, db: &Db) -> Vec<Value> {
self.collection
.read()
.unwrap()
.expand_list(list, ExpansionChain::from(expansion), db)
}
pub fn schema(&self) -> Option<SchemaDict> {
self.collection.read().ok().and_then(|g| g.schema())
}
pub fn get_name(&self) -> String {
self.collection.read().unwrap().name.clone()
}
pub fn get_config(&self) -> DbConfig {
self.collection.read().unwrap().config.clone()
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
fn create_test_collection() -> InternalMemoryCollection {
InternalMemoryCollection::new("test_collection", DbConfig::int("id"))
}
fn create_uuid_collection() -> InternalMemoryCollection {
InternalMemoryCollection::new("uuid_collection", DbConfig::uuid("id"))
}
fn create_none_collection() -> InternalMemoryCollection {
InternalMemoryCollection::new("none_collection", DbConfig::none("id"))
}
#[test]
fn test_new_collection() {
let collection = create_test_collection();
assert_eq!(collection.count(), 0);
assert_eq!(collection.config.id_key, "id");
assert_eq!(collection.name, "test_collection");
}
#[test]
fn test_into_protected() {
let collection = create_test_collection();
let protected = collection.into_protected();
let guard = protected.read().unwrap();
assert_eq!(guard.count(), 0);
assert_eq!(guard.name, "test_collection");
}
#[test]
fn test_get_all_empty() {
let collection = create_test_collection();
let all_items = collection.get_all();
assert!(all_items.is_empty());
}
#[test]
fn test_get_all_with_items() {
let mut collection = create_test_collection();
collection.add(json!({"name": "Item 1"}));
collection.add(json!({"name": "Item 2"}));
collection.add(json!({"name": "Item 3"}));
let all_items = collection.get_all();
assert_eq!(all_items.len(), 3);
for item in &all_items {
assert!(item.get("id").is_some());
assert!(item.get("name").is_some());
}
}
#[test]
fn test_get_existing_item() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Test Item"})).unwrap();
let id = item.get("id").unwrap().as_u64().unwrap();
let retrieved = collection.get(&id.to_string());
assert!(retrieved.is_some());
assert_eq!(retrieved.unwrap().get("name").unwrap(), "Test Item");
}
#[test]
fn test_get_nonexistent_item() {
let collection = create_test_collection();
let retrieved = collection.get("999");
assert!(retrieved.is_none());
}
#[test]
fn test_get_paginated_empty() {
let collection = create_test_collection();
let paginated = collection.get_paginated(0, 10);
assert!(paginated.is_empty());
}
#[test]
fn test_get_paginated_with_items() {
let mut collection = create_test_collection();
for i in 1..=10 {
collection.add(json!({"name": format!("Item {}", i)}));
}
let first_page = collection.get_paginated(0, 3);
assert_eq!(first_page.len(), 3);
let second_page = collection.get_paginated(3, 3);
assert_eq!(second_page.len(), 3);
let last_page = collection.get_paginated(9, 5);
assert_eq!(last_page.len(), 1);
let empty_page = collection.get_paginated(15, 5);
assert!(empty_page.is_empty());
}
#[test]
fn test_exists() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Test Item"})).unwrap();
let id = item.get("id").unwrap().as_u64().unwrap();
assert!(collection.exists(&id.to_string()));
assert!(!collection.exists("999"));
}
#[test]
fn test_count() {
let mut collection = create_test_collection();
assert_eq!(collection.count(), 0);
collection.add(json!({"name": "Item 1"}));
assert_eq!(collection.count(), 1);
collection.add(json!({"name": "Item 2"}));
assert_eq!(collection.count(), 2);
let all_items = collection.get_all();
let id = all_items[0].get("id").unwrap().as_u64().unwrap();
collection.delete(&id.to_string());
assert_eq!(collection.count(), 1);
}
#[test]
fn test_add_with_int_id() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Test Item"}));
assert!(item.is_some());
let item = item.unwrap();
assert_eq!(item.get("name").unwrap(), "Test Item");
assert_eq!(item.get("id").unwrap(), 1);
let item2 = collection.add(json!({"name": "Test Item 2"})).unwrap();
assert_eq!(item2.get("id").unwrap(), 2);
}
#[test]
fn test_add_with_uuid_id() {
let mut collection = create_uuid_collection();
let item = collection.add(json!({"name": "Test Item"}));
assert!(item.is_some());
let item = item.unwrap();
assert_eq!(item.get("name").unwrap(), "Test Item");
let id = item.get("id").unwrap().as_str().unwrap();
assert!(!id.is_empty());
assert!(id.len() > 10); }
#[test]
fn test_add_with_none_id_existing() {
let mut collection = create_none_collection();
let item = collection.add(json!({"id": "custom-id", "name": "Test Item"}));
assert!(item.is_some());
let item = item.unwrap();
assert_eq!(item.get("name").unwrap(), "Test Item");
assert_eq!(item.get("id").unwrap(), "custom-id");
}
#[test]
fn test_add_with_none_id_number_existing() {
let mut collection = create_none_collection();
let item = collection.add(json!({"id": 1, "name": "Test Item"}));
assert!(item.is_some());
let item = item.unwrap();
assert_eq!(item.get("name").unwrap(), "Test Item");
assert_eq!(item.get("id").unwrap(), 1);
}
#[test]
fn test_add_with_none_id_missing() {
let mut collection = create_none_collection();
let item = collection.add(json!({"name": "Test Item"}));
assert!(item.is_none());
assert_eq!(collection.count(), 0);
}
#[test]
fn test_add_batch_int() {
let mut collection = create_test_collection();
let batch = json!([
{"name": "Item 1"},
{"id": 5, "name": "Item 2"},
{"id": 3, "name": "Item 3"},
{"id": 10, "name": "Item 4"}
]);
let added_items = collection.add_batch(batch);
assert_eq!(added_items.len(), 4); assert_eq!(collection.count(), 4);
let new_item = collection.add(json!({"name": "New Item"})).unwrap();
assert_eq!(new_item.get("id").unwrap(), 11); }
#[test]
fn test_add_batch_uuid() {
let mut collection = create_uuid_collection();
let batch = json!([
{"id": "uuid-1", "name": "Item 1"},
{"id": "uuid-2", "name": "Item 2"},
{"name": "Item 3"} ]);
let added_items = collection.add_batch(batch);
assert_eq!(added_items.len(), 3);
assert_eq!(collection.count(), 3);
}
#[test]
fn test_add_batch_none() {
let mut collection = create_none_collection();
let batch = json!([
{"id": "custom-1", "name": "Item 1"},
{"id": "custom-2", "name": "Item 2"},
{"name": "Item 3"}, {"id": 3, "name": "Item 4"},
]);
let added_items = collection.add_batch(batch);
assert_eq!(added_items.len(), 3);
assert_eq!(collection.count(), 3);
}
#[test]
fn test_add_batch_non_array() {
let mut collection = create_test_collection();
let non_array = json!({"name": "Single Item"});
let added_items = collection.add_batch(non_array);
assert!(added_items.is_empty());
assert_eq!(collection.count(), 0);
}
#[test]
fn test_update_existing_item() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Original Name"})).unwrap();
let id = item.get("id").unwrap().as_u64().unwrap();
let updated = collection.update(
&id.to_string(),
json!({"name": "Updated Name", "description": "New field"}),
);
assert!(updated.is_some());
let updated_item = updated.unwrap();
assert_eq!(updated_item.get("name").unwrap(), "Updated Name");
assert_eq!(updated_item.get("description").unwrap(), "New field");
assert_eq!(updated_item.get("id").unwrap(), id);
let retrieved = collection.get(&id.to_string()).unwrap();
assert_eq!(retrieved.get("name").unwrap(), "Updated Name");
}
#[test]
fn test_update_nonexistent_item() {
let mut collection = create_test_collection();
let updated = collection.update("999", json!({"name": "Updated Name"}));
assert!(updated.is_none());
}
#[test]
fn test_update_partial_existing_item() {
let mut collection = create_test_collection();
let item = collection
.add(json!({
"name": "Original Name",
"description": "Original Description",
"count": 42
}))
.unwrap();
let id = item.get("id").unwrap().as_u64().unwrap();
let updated = collection.update_partial(&id.to_string(), json!({"name": "Updated Name"}));
assert!(updated.is_some());
let updated_item = updated.unwrap();
assert_eq!(updated_item.get("name").unwrap(), "Updated Name");
assert_eq!(
updated_item.get("description").unwrap(),
"Original Description"
); assert_eq!(updated_item.get("count").unwrap(), 42); assert_eq!(updated_item.get("id").unwrap(), id);
}
#[test]
fn test_update_partial_nested_objects() {
let mut collection = create_test_collection();
let item = collection
.add(json!({
"name": "Test Item",
"config": {
"enabled": true,
"timeout": 30,
"nested": {
"value": "original"
}
}
}))
.unwrap();
let id = item.get("id").unwrap().as_u64().unwrap();
let updated = collection.update_partial(
&id.to_string(),
json!({
"config": {
"timeout": 60,
"nested": {
"value": "updated",
"new_field": "added"
}
}
}),
);
assert!(updated.is_some());
let updated_item = updated.unwrap();
let config = updated_item.get("config").unwrap();
assert_eq!(config.get("enabled").unwrap(), true); assert_eq!(config.get("timeout").unwrap(), 60);
let nested = config.get("nested").unwrap();
assert_eq!(nested.get("value").unwrap(), "updated");
assert_eq!(nested.get("new_field").unwrap(), "added");
}
#[test]
fn test_update_partial_nonexistent_item() {
let mut collection = create_test_collection();
let updated = collection.update_partial("999", json!({"name": "Updated Name"}));
assert!(updated.is_none());
}
#[test]
fn test_delete_existing_item() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Test Item"})).unwrap();
let id = item.get("id").unwrap().as_u64().unwrap();
assert_eq!(collection.count(), 1);
let deleted = collection.delete(&id.to_string());
assert!(deleted.is_some());
assert_eq!(deleted.unwrap().get("name").unwrap(), "Test Item");
assert_eq!(collection.count(), 0);
assert!(!collection.exists(&id.to_string()));
}
#[test]
fn test_delete_nonexistent_item() {
let mut collection = create_test_collection();
let deleted = collection.delete("999");
assert!(deleted.is_none());
}
#[test]
fn test_clear_empty_collection() {
let mut collection = create_test_collection();
let count = collection.clear();
assert_eq!(count, 0);
assert_eq!(collection.count(), 0);
}
#[test]
fn test_clear_with_items() {
let mut collection = create_test_collection();
collection.add(json!({"name": "Item 1"}));
collection.add(json!({"name": "Item 2"}));
collection.add(json!({"name": "Item 3"}));
assert_eq!(collection.count(), 3);
let count = collection.clear();
assert_eq!(count, 3);
assert_eq!(collection.count(), 0);
assert!(collection.get_all().is_empty());
}
#[test]
fn test_merge_json_values_objects() {
let base = json!({
"name": "Original",
"config": {
"enabled": true,
"timeout": 30
},
"tags": ["tag1", "tag2"]
});
let update = json!({
"name": "Updated",
"config": {
"timeout": 60,
"new_setting": "value"
},
"description": "New field"
});
let merged = InternalMemoryCollection::merge_json_values(base, update);
assert_eq!(merged.get("name").unwrap(), "Updated");
assert_eq!(merged.get("description").unwrap(), "New field");
assert_eq!(merged.get("tags").unwrap(), &json!(["tag1", "tag2"]));
let config = merged.get("config").unwrap();
assert_eq!(config.get("enabled").unwrap(), true); assert_eq!(config.get("timeout").unwrap(), 60); assert_eq!(config.get("new_setting").unwrap(), "value"); }
#[test]
fn test_merge_json_values_non_objects() {
let base = json!("original");
let update = json!("updated");
let merged = InternalMemoryCollection::merge_json_values(base, update);
assert_eq!(merged, json!("updated"));
let base = json!(42);
let update = json!(100);
let merged = InternalMemoryCollection::merge_json_values(base, update);
assert_eq!(merged, json!(100));
}
#[test]
fn test_id_manager_integration() {
let mut collection = create_test_collection();
let item1 = collection.add(json!({"name": "Item 1"})).unwrap();
assert_eq!(item1.get("id").unwrap(), 1);
let item2 = collection.add(json!({"name": "Item 2"})).unwrap();
assert_eq!(item2.get("id").unwrap(), 2);
let item3 = collection.add(json!({"name": "Item 3"})).unwrap();
assert_eq!(item3.get("id").unwrap(), 3);
}
#[test]
fn test_custom_id_key() {
let mut collection =
InternalMemoryCollection::new("custom_collection", DbConfig::int("customId"));
let item = collection.add(json!({"name": "Test Item"})).unwrap();
assert_eq!(item.get("customId").unwrap(), 1);
assert!(item.get("id").is_none());
let retrieved = collection.get("1").unwrap();
assert_eq!(retrieved.get("customId").unwrap(), 1);
}
use std::fs::File;
use std::io::Write;
use tempfile::TempDir;
#[test]
fn test_load_from_file_valid_json_array() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("test_data.json");
let test_data = json!([
{"id": 1, "name": "Item 1", "description": "First item"},
{"id": 2, "name": "Item 2", "description": "Second item"},
{"id": 3, "name": "Item 3", "description": "Third item"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 3 initial items"));
assert_eq!(collection.count(), 3);
assert!(collection.exists("1"));
assert!(collection.exists("2"));
assert!(collection.exists("3"));
let item1 = collection.get("1").unwrap();
assert_eq!(item1.get("name").unwrap(), "Item 1");
assert_eq!(item1.get("description").unwrap(), "First item");
}
#[test]
fn test_load_from_file_empty_array() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("empty_array.json");
let test_data = json!([]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 0 initial items"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_with_uuid_collection() {
let mut collection = create_uuid_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("uuid_data.json");
let test_data = json!([
{"id": "uuid-1", "name": "Item 1"},
{"id": "uuid-2", "name": "Item 2"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 2 initial items"));
assert_eq!(collection.count(), 2);
assert!(collection.exists("uuid-1"));
assert!(collection.exists("uuid-2"));
}
#[test]
fn test_load_from_file_with_mixed_id_types() {
let mut collection = create_none_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("mixed_data.json");
let test_data = json!([
{"id": "string-id", "name": "Item 1"},
{"id": 42, "name": "Item 2"},
{"name": "Item 3"} ]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 2 initial items"));
assert_eq!(collection.count(), 2);
assert!(collection.exists("string-id"));
assert!(collection.exists("42"));
}
#[test]
fn test_load_from_file_nonexistent_file() {
let mut collection = create_test_collection();
let nonexistent_path = std::ffi::OsString::from("/path/that/does/not/exist.json");
let result = collection.load_from_file(&nonexistent_path);
assert!(result.is_err());
let error_msg = result.unwrap_err();
assert!(error_msg.contains("Could not read file"));
assert!(error_msg.contains("skipping initial data load"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_invalid_json() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("invalid.json");
let mut file = File::create(&file_path).unwrap();
file.write_all(b"{ invalid json content }").unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_err());
let error_msg = result.unwrap_err();
assert!(error_msg.contains("does not contain valid JSON"));
assert!(error_msg.contains("skipping initial data load"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_json_object_not_array() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("object.json");
let test_data = json!({"id": 1, "name": "Single Item"});
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_err());
let error_msg = result.unwrap_err();
assert!(error_msg.contains("does not contain a JSON array"));
assert!(error_msg.contains("skipping initial data load"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_json_primitive_not_array() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("primitive.json");
let mut file = File::create(&file_path).unwrap();
file.write_all(b"\"just a string\"").unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_err());
assert!(
result
.unwrap_err()
.contains("does not contain a JSON array")
);
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_updates_id_manager() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("id_update_test.json");
let test_data = json!([
{"id": 10, "name": "Item 1"},
{"id": 15, "name": "Item 2"},
{"id": 5, "name": "Item 3"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
let new_item = collection.add(json!({"name": "New Item"})).unwrap();
assert_eq!(new_item.get("id").unwrap(), 16);
}
#[test]
fn test_load_from_file_large_dataset() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("large_dataset.json");
let mut items = Vec::new();
for i in 1..=1000 {
items.push(json!({
"id": i,
"name": format!("Item {}", i),
"value": i * 10
}));
}
let test_data = json!(items);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 1000 initial items"));
assert_eq!(collection.count(), 1000);
assert!(collection.exists("1"));
assert!(collection.exists("500"));
assert!(collection.exists("1000"));
let item_500 = collection.get("500").unwrap();
assert_eq!(item_500.get("name").unwrap(), "Item 500");
assert_eq!(item_500.get("value").unwrap(), 5000);
}
#[test]
fn test_load_from_file_with_existing_data() {
let mut collection = create_test_collection();
collection.add(json!({"name": "Existing Item 1"}));
collection.add(json!({"name": "Existing Item 2"}));
assert_eq!(collection.count(), 2);
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("additional_data.json");
let test_data = json!([
{"id": 10, "name": "Loaded Item 1"},
{"id": 11, "name": "Loaded Item 2"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 2 initial items"));
assert_eq!(collection.count(), 2);
assert!(!collection.exists("1")); assert!(!collection.exists("2")); assert!(collection.exists("10")); assert!(collection.exists("11")); }
#[test]
fn test_load_from_file_custom_id_key() {
let mut collection =
InternalMemoryCollection::new("custom_collection", DbConfig::int("customId"));
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("custom_id_data.json");
let test_data = json!([
{"customId": 1, "name": "Item 1"},
{"customId": 2, "name": "Item 2"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 2 initial items"));
assert_eq!(collection.count(), 2);
assert!(collection.exists("1"));
assert!(collection.exists("2"));
let item1 = collection.get("1").unwrap();
assert_eq!(item1.get("customId").unwrap(), 1);
assert_eq!(item1.get("name").unwrap(), "Item 1");
}
#[test]
fn test_write_to_file() {
use serde_json::json;
use std::ffi::OsString;
use std::fs;
use tempfile::TempDir;
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("output.json");
let os_file_path = OsString::from(file_path.to_string_lossy().into_owned());
let config = DbConfig::int("id");
let db_collection = DbCollection::new_coll("test", config);
let item1 = json!({"name": "Alice"});
let item2 = json!({"name": "Bob"});
let stored1 = db_collection.add(item1).unwrap();
let stored2 = db_collection.add(item2).unwrap();
assert!(db_collection.write_to_file(&os_file_path).is_ok());
let content = fs::read_to_string(file_path).unwrap();
let parsed: Vec<serde_json::Value> = serde_json::from_str(&content).unwrap();
assert_eq!(parsed.len(), 2);
let ids: Vec<_> = parsed.iter().filter_map(|v| v.get("id")).collect();
assert!(ids.contains(&stored1.get("id").unwrap()));
assert!(ids.contains(&stored2.get("id").unwrap()));
}
#[test]
fn test_expand_row_no_refs() {
use serde_json::json;
let db = Db::new_with_config(DbConfig::int("id"));
let coll = db.create("items");
let item = coll.add(json!({"id": 1, "value": "test"})).unwrap();
let expanded = coll.expand_row(&item, "", &db);
assert_eq!(expanded, item);
let expanded2 = coll.expand_row(&item, "unknown", &db);
assert_eq!(expanded2, item);
}
#[test]
fn test_expand_list_no_refs() {
use serde_json::json;
let db = Db::new_with_config(DbConfig::int("id"));
let coll = db.create("items");
let a = coll.add(json!({"id": 1, "value": 10})).unwrap();
let b = coll.add(json!({"id": 2, "value": 20})).unwrap();
let list = vec![a.clone(), b.clone()];
let expanded = coll.expand_list(list.clone(), "", &db);
assert_eq!(expanded, list);
let expanded2 = coll.expand_list(list.clone(), "none", &db);
assert_eq!(expanded2, list);
}
#[test]
fn test_expand_row_with_references() {
use serde_json::json;
let db = Db::new_with_config(DbConfig::int("id"));
let authors = db.create("authors");
let a1 = authors.add(json!({"name": "Alice"})).unwrap();
let a2 = authors.add(json!({"name": "Bob"})).unwrap();
let books = db.create("books");
let b1 = books
.add(json!({"title": "Book1", "author_id": a1.get("id").unwrap()}))
.unwrap();
let _ = books
.add(json!({"title": "Book2", "author_id": a2.get("id").unwrap()}))
.unwrap();
assert!(db.create_reference("books", "author_id", "authors", "id"));
let expanded1 = books.expand_row(&b1, "authors", &db);
if let Value::Object(map) = expanded1 {
let arr = map.get("authors").unwrap().as_array().unwrap();
assert_eq!(arr.len(), 1);
assert_eq!(arr[0].get("name").unwrap(), a1.get("name").unwrap());
} else {
panic!("Expected expanded object for book1");
}
}
#[test]
fn test_expand_list_with_references() {
use serde_json::json;
let db = Db::new_with_config(DbConfig::int("id"));
let authors = db.create("authors");
let a1 = authors.add(json!({"name": "Alice"})).unwrap();
let a2 = authors.add(json!({"name": "Bob"})).unwrap();
let books = db.create("books");
let b1 = books
.add(json!({"title": "Book1", "author_id": a1.get("id").unwrap()}))
.unwrap();
let b2 = books
.add(json!({"title": "Book2", "author_id": a2.get("id").unwrap()}))
.unwrap();
assert!(db.create_reference("books", "author_id", "authors", "id"));
let list = vec![b1.clone(), b2.clone()];
let expanded_list = books.expand_list(list.clone(), "authors", &db);
for (orig, exp) in list.iter().zip(expanded_list.iter()) {
if let Value::Object(map) = exp {
let arr = map.get("authors").unwrap().as_array().unwrap();
assert_eq!(arr.len(), 1);
let author_id = orig.get("author_id").unwrap();
assert_eq!(arr[0].get("id").unwrap(), author_id);
} else {
panic!("Expected expanded object in list");
}
}
}
#[test]
fn test_expand_row_parent_to_children() {
use serde_json::json;
let db = Db::new_with_config(DbConfig::int("id"));
let orders = db.create("orders");
let items = db.create("order_items");
let o1 = orders.add(json!({"total": 100})).unwrap();
let _i1 = items
.add(json!({"order_id": o1.get("id").unwrap(), "product": "A"}))
.unwrap();
assert!(db.create_reference("order_items", "order_id", "orders", "id"));
let expanded = orders.expand_row(&o1, "order_items", &db);
if let Value::Object(map) = expanded {
let arr = map.get("order_items").unwrap().as_array().unwrap();
assert_eq!(arr.len(), 1);
assert_eq!(arr[0].get("product").unwrap().as_str().unwrap(), "A");
} else {
panic!("Expected expanded order object");
}
}
#[test]
fn test_expand_row_multi_level() {
use serde_json::{Value, json};
let db = Db::new_with_config(DbConfig::int("id"));
let orders = db.create("orders");
let items = db.create("order_items");
let products = db.create("products");
let o1 = orders.add(json!({ "total": 300 })).unwrap();
let p1 = products
.add(json!({ "name": "Widget", "price": 9.99 }))
.unwrap();
let p2 = products
.add(json!({ "name": "Gadget", "price": 19.99 }))
.unwrap();
let _ = items
.add(json!({
"order_id": o1.get("id").unwrap(),
"product_id": p1.get("id").unwrap()
}))
.unwrap();
let _ = items
.add(json!({
"order_id": o1.get("id").unwrap(),
"product_id": p2.get("id").unwrap()
}))
.unwrap();
assert!(db.create_reference("order_items", "order_id", "orders", "id"));
assert!(db.create_reference("order_items", "product_id", "products", "id"));
let expanded = orders.expand_row(&o1, "order_items.products", &db);
println!("{}", serde_json::to_string_pretty(&expanded).unwrap());
if let Value::Object(map) = expanded {
let items_arr = map.get("order_items").unwrap().as_array().unwrap();
assert_eq!(items_arr.len(), 2);
for item in items_arr {
let item_map = item.as_object().unwrap();
assert_eq!(item_map.get("order_id").unwrap(), o1.get("id").unwrap());
let prod_arr = item_map.get("products").unwrap().as_array().unwrap();
let prod_map = prod_arr[0].as_object().unwrap();
assert!(prod_map.contains_key("name"));
assert!(prod_map.contains_key("price"));
}
} else {
panic!("Expected expanded order object for multi-level expansion");
}
}
}