use std::{collections::HashMap, ffi::OsString, fs, io::BufWriter, sync::RwLock};
use serde_json::Value;
use crate::database::{DbConfig, IdManager, IdType, IdValue, SchemaDict};
pub(crate) type MemoryCollection = RwLock<InternalMemoryCollection>;
pub(crate) struct InternalMemoryCollection {
collection: HashMap<String, Value>,
id_manager: IdManager,
config: DbConfig,
pub name: String,
pub schema: Option<SchemaDict>,
}
impl InternalMemoryCollection {
pub fn new(name: &str, config: DbConfig) -> Self {
let collection: HashMap<String, Value> = HashMap::new();
let id_manager = IdManager::new(config.id_type);
Self {
collection,
id_manager,
config,
name: name.to_ascii_lowercase(),
schema: None,
}
}
pub fn into_protected(self) -> MemoryCollection {
RwLock::new(self)
}
pub fn schema(&self) -> Option<SchemaDict> {
self.schema.as_ref().cloned()
}
pub fn ensure_update_schema_for_item(&mut self, item: &Value) {
if let Value::Object(map) = item {
if self.schema.is_none() {
self.schema = Some(SchemaDict::infer_schema_from_object(map));
} else if let Some(schema) = &mut self.schema {
schema.merge_schema(map);
}
}
}
pub fn merge_json_values(mut base: Value, update: Value) -> Value {
match (&mut base, update) {
(Value::Object(base_map), Value::Object(update_map)) => {
for (key, value) in update_map {
if base_map.contains_key(&key) {
let existing_value = base_map.get(&key).unwrap().clone();
base_map.insert(key, Self::merge_json_values(existing_value, value));
} else {
base_map.insert(key, value);
}
}
base
}
(_, update_value) => update_value,
}
}
pub fn new_coll(name: &str, config: DbConfig) -> Self {
Self::new(name, config)
}
pub fn get_all(&self) -> Vec<Value> {
self.collection.values().cloned().collect::<Vec<Value>>()
}
pub fn get_paginated(&self, offset: usize, limit: usize) -> Vec<Value> {
self.collection.values()
.skip(offset)
.take(limit)
.cloned()
.collect::<Vec<Value>>()
}
pub fn get(&self, id: &str) -> Option<Value> {
self.collection.get(id).cloned()
}
pub fn exists(&self, id: &str) -> bool {
self.collection.contains_key(id)
}
pub fn count(&self) -> usize {
self.collection.len()
}
pub fn add(&mut self, item: Value) -> Option<Value> {
let next_id = {
self.id_manager.next()
};
let mut item = item;
let id_string = if let Some(id_value) = next_id {
let id_string = id_value.to_string();
if let Value::Object(ref mut map) = item {
map.insert(self.config.id_key.clone(), Value::String(id_string.clone()));
}
Some(id_string)
} else if let Some(Value::String(id_string)) = item.get(self.config.id_key.clone()){
Some(id_string.clone())
} else if let Some(Value::Number(id_number)) = item.get(self.config.id_key.clone()){
Some(id_number.to_string())
}else {
None
};
if let Some(id_string) = id_string {
self.ensure_update_schema_for_item(&item);
self.collection.insert(id_string, item.clone());
return Some(item);
}
None
}
pub fn add_batch(&mut self, items: Value) -> Vec<Value> {
let mut added_items = Vec::new();
if let Value::Array(items_array) = items {
let mut max_id = None;
for item in items_array {
if let Value::Object(ref item_map) = item {
self.ensure_update_schema_for_item(&item);
let id = item_map.get(&self.config.id_key);
let id = match self.id_manager.id_type {
IdType::Uuid => match id {
Some(Value::String(id)) => Some(id.clone()),
_ => None
},
IdType::Int => match id {
Some(Value::Number(id)) => {
if let Some(current) = max_id {
let id = id.as_u64().unwrap();
if current < id {
max_id = Some(id);
}
} else {
max_id = id.as_u64();
}
Some(id.to_string())
},
_ => None
},
IdType::None => match item.get(self.config.id_key.clone()) {
Some(Value::String(id_string)) => Some(id_string.clone()),
Some(Value::Number(id_number)) => Some(id_number.to_string()),
_ => None
}
};
if let Some(id) = id {
self.collection.insert(id.clone(), item.clone());
added_items.push(item);
}
}
}
if let Some(value) = max_id {
if self.id_manager.set_current(IdValue::Int(value)).is_err() {
println!("Error to set the value {} to {} collection Id", value, self.name.clone());
}
}
}
added_items
}
pub fn update(&mut self, id: &str, item: Value) -> Option<Value> {
let mut item = item;
if let Value::Object(ref mut map) = item {
map.insert(self.config.id_key.clone(), Value::String(id.to_string()));
}
if self.collection.contains_key(id) {
self.ensure_update_schema_for_item(&item);
self.collection.insert(id.to_string(), item.clone());
Some(item)
} else {
None
}
}
pub fn update_partial(&mut self, id: &str, partial_item: Value) -> Option<Value> {
if let Some(existing_item) = self.collection.get(id).cloned() {
let updated_item = Self::merge_json_values(existing_item, partial_item);
let mut final_item = updated_item;
if let Value::Object(ref mut map) = final_item {
map.insert(self.config.id_key.clone(), Value::String(id.to_string()));
}
self.ensure_update_schema_for_item(&final_item);
self.collection.insert(id.to_string(), final_item.clone());
Some(final_item)
} else {
None
}
}
pub fn delete(&mut self, id: &str) -> Option<Value> {
self.collection.remove(id)
}
pub fn clear(&mut self) -> usize {
let count = self.collection.len();
self.collection.clear();
count
}
pub fn load_from_json(&mut self, json_value: Value, keep: bool) -> Result<Vec<Value>, String> {
let Value::Array(_) = json_value else {
return Err("⚠️ Informed JSON does not contain a JSON array in the root, skipping initial data load".to_string());
};
if !keep {
self.clear();
}
let added_items = self.add_batch(json_value);
Ok(added_items)
}
pub fn load_from_file(&mut self, file_path: &OsString) -> Result<String, String> {
let file_path_lossy = file_path.to_string_lossy();
let file_content = fs::read_to_string(file_path)
.map_err(|_| format!("⚠️ Could not read file {}, skipping initial data load", file_path_lossy))?;
let json_value = serde_json::from_str::<Value>(&file_content)
.map_err(|_| format!("⚠️ File {} does not contain valid JSON, skipping initial data load", file_path_lossy))?;
match self.load_from_json(json_value, false) {
Ok(added_items) => Ok(format!("✔️ Loaded {} initial items from {}", added_items.len(), file_path_lossy)),
Err(error) => Err(format!("Error to process the file {}. Details: {}", file_path_lossy, error)),
}
}
}
pub struct DbCollection {
pub(crate) collection: MemoryCollection,
}
impl DbCollection {
pub fn new_coll(name: &str, config: DbConfig) -> Self {
Self {
collection: InternalMemoryCollection::new_coll(name, config).into_protected()
}
}
pub fn get_all(&self) -> Vec<Value> {
self.collection.read().unwrap().get_all()
}
pub fn get_paginated(&self, offset: usize, limit: usize) -> Vec<Value> {
self.collection.read().unwrap().get_paginated(offset, limit)
}
pub fn get(&self, id: &str) -> Option<Value> {
self.collection.read().unwrap().get(id)
}
pub fn exists(&self, id: &str) -> bool {
self.collection.read().unwrap().exists(id)
}
pub fn count(&self) -> usize {
self.collection.read().unwrap().count()
}
pub fn add(&self, item: Value) -> Option<Value> {
self.collection.write().unwrap().add(item)
}
pub fn add_batch(&self, items: Value) -> Vec<Value> {
self.collection.write().unwrap().add_batch(items)
}
pub fn update(&self, id: &str, item: Value) -> Option<Value> {
self.collection.write().unwrap().update(id, item)
}
pub fn update_partial(&self, id: &str, partial_item: Value) -> Option<Value> {
self.collection.write().unwrap().update_partial(id, partial_item)
}
pub fn delete(&self, id: &str) -> Option<Value> {
self.collection.write().unwrap().delete(id)
}
pub fn clear(&self) -> usize {
self.collection.write().unwrap().clear()
}
pub fn load_from_json(&self, json_value: Value, keep: bool) -> Result<Vec<Value>, String> {
self.collection.write().unwrap().load_from_json(json_value, keep)
}
pub fn load_from_file(&self, file_path: &OsString) -> Result<String, String> {
self.collection.write().unwrap().load_from_file(file_path)
}
pub fn write_to_file(&self, file_path: &OsString) -> Result<(), String> {
let file = std::fs::File::create(file_path).expect("Failed to create json file");
let mut w = BufWriter::new(file);
let data = self.get_all();
serde_json::to_writer_pretty(&mut w, &data).expect("Failed to write to a json file");
Ok(())
}
pub fn schema(&self) -> Option<SchemaDict> {
self.collection.read().ok().and_then(|g| g.schema())
}
pub fn get_config(&self) -> DbConfig {
self.collection.read().unwrap().config.clone()
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
fn create_test_collection() -> InternalMemoryCollection {
InternalMemoryCollection::new("test_collection", DbConfig::int("id"))
}
fn create_uuid_collection() -> InternalMemoryCollection {
InternalMemoryCollection::new("uuid_collection", DbConfig::uuid("id"))
}
fn create_none_collection() -> InternalMemoryCollection {
InternalMemoryCollection::new("none_collection", DbConfig::none("id"))
}
#[test]
fn test_new_collection() {
let collection = create_test_collection();
assert_eq!(collection.count(), 0);
assert_eq!(collection.config.id_key, "id");
assert_eq!(collection.name, "test_collection");
}
#[test]
fn test_into_protected() {
let collection = create_test_collection();
let protected = collection.into_protected();
let guard = protected.read().unwrap();
assert_eq!(guard.count(), 0);
assert_eq!(guard.name, "test_collection");
}
#[test]
fn test_get_all_empty() {
let collection = create_test_collection();
let all_items = collection.get_all();
assert!(all_items.is_empty());
}
#[test]
fn test_get_all_with_items() {
let mut collection = create_test_collection();
collection.add(json!({"name": "Item 1"}));
collection.add(json!({"name": "Item 2"}));
collection.add(json!({"name": "Item 3"}));
let all_items = collection.get_all();
assert_eq!(all_items.len(), 3);
for item in &all_items {
assert!(item.get("id").is_some());
assert!(item.get("name").is_some());
}
}
#[test]
fn test_get_existing_item() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Test Item"})).unwrap();
let id = item.get("id").unwrap().as_str().unwrap();
let retrieved = collection.get(id);
assert!(retrieved.is_some());
assert_eq!(retrieved.unwrap().get("name").unwrap(), "Test Item");
}
#[test]
fn test_get_nonexistent_item() {
let collection = create_test_collection();
let retrieved = collection.get("999");
assert!(retrieved.is_none());
}
#[test]
fn test_get_paginated_empty() {
let collection = create_test_collection();
let paginated = collection.get_paginated(0, 10);
assert!(paginated.is_empty());
}
#[test]
fn test_get_paginated_with_items() {
let mut collection = create_test_collection();
for i in 1..=10 {
collection.add(json!({"name": format!("Item {}", i)}));
}
let first_page = collection.get_paginated(0, 3);
assert_eq!(first_page.len(), 3);
let second_page = collection.get_paginated(3, 3);
assert_eq!(second_page.len(), 3);
let last_page = collection.get_paginated(9, 5);
assert_eq!(last_page.len(), 1);
let empty_page = collection.get_paginated(15, 5);
assert!(empty_page.is_empty());
}
#[test]
fn test_exists() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Test Item"})).unwrap();
let id = item.get("id").unwrap().as_str().unwrap();
assert!(collection.exists(id));
assert!(!collection.exists("999"));
}
#[test]
fn test_count() {
let mut collection = create_test_collection();
assert_eq!(collection.count(), 0);
collection.add(json!({"name": "Item 1"}));
assert_eq!(collection.count(), 1);
collection.add(json!({"name": "Item 2"}));
assert_eq!(collection.count(), 2);
let all_items = collection.get_all();
let id = all_items[0].get("id").unwrap().as_str().unwrap();
collection.delete(id);
assert_eq!(collection.count(), 1);
}
#[test]
fn test_add_with_int_id() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Test Item"}));
assert!(item.is_some());
let item = item.unwrap();
assert_eq!(item.get("name").unwrap(), "Test Item");
assert_eq!(item.get("id").unwrap(), "1");
let item2 = collection.add(json!({"name": "Test Item 2"})).unwrap();
assert_eq!(item2.get("id").unwrap(), "2");
}
#[test]
fn test_add_with_uuid_id() {
let mut collection = create_uuid_collection();
let item = collection.add(json!({"name": "Test Item"}));
assert!(item.is_some());
let item = item.unwrap();
assert_eq!(item.get("name").unwrap(), "Test Item");
let id = item.get("id").unwrap().as_str().unwrap();
assert!(!id.is_empty());
assert!(id.len() > 10); }
#[test]
fn test_add_with_none_id_existing() {
let mut collection = create_none_collection();
let item = collection.add(json!({"id": "custom-id", "name": "Test Item"}));
assert!(item.is_some());
let item = item.unwrap();
assert_eq!(item.get("name").unwrap(), "Test Item");
assert_eq!(item.get("id").unwrap(), "custom-id");
}
#[test]
fn test_add_with_none_id_number_existing() {
let mut collection = create_none_collection();
let item = collection.add(json!({"id": 1, "name": "Test Item"}));
assert!(item.is_some());
let item = item.unwrap();
assert_eq!(item.get("name").unwrap(), "Test Item");
assert_eq!(item.get("id").unwrap(), 1);
}
#[test]
fn test_add_with_none_id_missing() {
let mut collection = create_none_collection();
let item = collection.add(json!({"name": "Test Item"}));
assert!(item.is_none());
assert_eq!(collection.count(), 0);
}
#[test]
fn test_add_batch_int() {
let mut collection = create_test_collection();
let batch = json!([
{"name": "Item 1"},
{"id": 5, "name": "Item 2"},
{"id": 3, "name": "Item 3"},
{"id": 10, "name": "Item 4"}
]);
let added_items = collection.add_batch(batch);
assert_eq!(added_items.len(), 3); assert_eq!(collection.count(), 3);
let new_item = collection.add(json!({"name": "New Item"})).unwrap();
assert_eq!(new_item.get("id").unwrap(), "11"); }
#[test]
fn test_add_batch_uuid() {
let mut collection = create_uuid_collection();
let batch = json!([
{"id": "uuid-1", "name": "Item 1"},
{"id": "uuid-2", "name": "Item 2"},
{"name": "Item 3"} ]);
let added_items = collection.add_batch(batch);
assert_eq!(added_items.len(), 2);
assert_eq!(collection.count(), 2);
}
#[test]
fn test_add_batch_none() {
let mut collection = create_none_collection();
let batch = json!([
{"id": "custom-1", "name": "Item 1"},
{"id": "custom-2", "name": "Item 2"},
{"name": "Item 3"}, {"id": 3, "name": "Item 4"},
]);
let added_items = collection.add_batch(batch);
assert_eq!(added_items.len(), 3);
assert_eq!(collection.count(), 3);
}
#[test]
fn test_add_batch_non_array() {
let mut collection = create_test_collection();
let non_array = json!({"name": "Single Item"});
let added_items = collection.add_batch(non_array);
assert!(added_items.is_empty());
assert_eq!(collection.count(), 0);
}
#[test]
fn test_update_existing_item() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Original Name"})).unwrap();
let id = item.get("id").unwrap().as_str().unwrap();
let updated = collection.update(id, json!({"name": "Updated Name", "description": "New field"}));
assert!(updated.is_some());
let updated_item = updated.unwrap();
assert_eq!(updated_item.get("name").unwrap(), "Updated Name");
assert_eq!(updated_item.get("description").unwrap(), "New field");
assert_eq!(updated_item.get("id").unwrap(), id);
let retrieved = collection.get(id).unwrap();
assert_eq!(retrieved.get("name").unwrap(), "Updated Name");
}
#[test]
fn test_update_nonexistent_item() {
let mut collection = create_test_collection();
let updated = collection.update("999", json!({"name": "Updated Name"}));
assert!(updated.is_none());
}
#[test]
fn test_update_partial_existing_item() {
let mut collection = create_test_collection();
let item = collection.add(json!({
"name": "Original Name",
"description": "Original Description",
"count": 42
})).unwrap();
let id = item.get("id").unwrap().as_str().unwrap();
let updated = collection.update_partial(id, json!({"name": "Updated Name"}));
assert!(updated.is_some());
let updated_item = updated.unwrap();
assert_eq!(updated_item.get("name").unwrap(), "Updated Name");
assert_eq!(updated_item.get("description").unwrap(), "Original Description"); assert_eq!(updated_item.get("count").unwrap(), 42); assert_eq!(updated_item.get("id").unwrap(), id);
}
#[test]
fn test_update_partial_nested_objects() {
let mut collection = create_test_collection();
let item = collection.add(json!({
"name": "Test Item",
"config": {
"enabled": true,
"timeout": 30,
"nested": {
"value": "original"
}
}
})).unwrap();
let id = item.get("id").unwrap().as_str().unwrap();
let updated = collection.update_partial(id, json!({
"config": {
"timeout": 60,
"nested": {
"value": "updated",
"new_field": "added"
}
}
}));
assert!(updated.is_some());
let updated_item = updated.unwrap();
let config = updated_item.get("config").unwrap();
assert_eq!(config.get("enabled").unwrap(), true); assert_eq!(config.get("timeout").unwrap(), 60);
let nested = config.get("nested").unwrap();
assert_eq!(nested.get("value").unwrap(), "updated");
assert_eq!(nested.get("new_field").unwrap(), "added");
}
#[test]
fn test_update_partial_nonexistent_item() {
let mut collection = create_test_collection();
let updated = collection.update_partial("999", json!({"name": "Updated Name"}));
assert!(updated.is_none());
}
#[test]
fn test_delete_existing_item() {
let mut collection = create_test_collection();
let item = collection.add(json!({"name": "Test Item"})).unwrap();
let id = item.get("id").unwrap().as_str().unwrap();
assert_eq!(collection.count(), 1);
let deleted = collection.delete(id);
assert!(deleted.is_some());
assert_eq!(deleted.unwrap().get("name").unwrap(), "Test Item");
assert_eq!(collection.count(), 0);
assert!(!collection.exists(id));
}
#[test]
fn test_delete_nonexistent_item() {
let mut collection = create_test_collection();
let deleted = collection.delete("999");
assert!(deleted.is_none());
}
#[test]
fn test_clear_empty_collection() {
let mut collection = create_test_collection();
let count = collection.clear();
assert_eq!(count, 0);
assert_eq!(collection.count(), 0);
}
#[test]
fn test_clear_with_items() {
let mut collection = create_test_collection();
collection.add(json!({"name": "Item 1"}));
collection.add(json!({"name": "Item 2"}));
collection.add(json!({"name": "Item 3"}));
assert_eq!(collection.count(), 3);
let count = collection.clear();
assert_eq!(count, 3);
assert_eq!(collection.count(), 0);
assert!(collection.get_all().is_empty());
}
#[test]
fn test_merge_json_values_objects() {
let base = json!({
"name": "Original",
"config": {
"enabled": true,
"timeout": 30
},
"tags": ["tag1", "tag2"]
});
let update = json!({
"name": "Updated",
"config": {
"timeout": 60,
"new_setting": "value"
},
"description": "New field"
});
let merged = InternalMemoryCollection::merge_json_values(base, update);
assert_eq!(merged.get("name").unwrap(), "Updated");
assert_eq!(merged.get("description").unwrap(), "New field");
assert_eq!(merged.get("tags").unwrap(), &json!(["tag1", "tag2"]));
let config = merged.get("config").unwrap();
assert_eq!(config.get("enabled").unwrap(), true); assert_eq!(config.get("timeout").unwrap(), 60); assert_eq!(config.get("new_setting").unwrap(), "value"); }
#[test]
fn test_merge_json_values_non_objects() {
let base = json!("original");
let update = json!("updated");
let merged = InternalMemoryCollection::merge_json_values(base, update);
assert_eq!(merged, json!("updated"));
let base = json!(42);
let update = json!(100);
let merged = InternalMemoryCollection::merge_json_values(base, update);
assert_eq!(merged, json!(100));
}
#[test]
fn test_id_manager_integration() {
let mut collection = create_test_collection();
let item1 = collection.add(json!({"name": "Item 1"})).unwrap();
assert_eq!(item1.get("id").unwrap(), "1");
let item2 = collection.add(json!({"name": "Item 2"})).unwrap();
assert_eq!(item2.get("id").unwrap(), "2");
let item3 = collection.add(json!({"name": "Item 3"})).unwrap();
assert_eq!(item3.get("id").unwrap(), "3");
}
#[test]
fn test_custom_id_key() {
let mut collection = InternalMemoryCollection::new(
"custom_collection",
DbConfig::int("customId")
);
let item = collection.add(json!({"name": "Test Item"})).unwrap();
assert_eq!(item.get("customId").unwrap(), "1");
assert!(item.get("id").is_none());
let retrieved = collection.get("1").unwrap();
assert_eq!(retrieved.get("customId").unwrap(), "1");
}
use std::fs::File;
use std::io::Write;
use tempfile::TempDir;
#[test]
fn test_load_from_file_valid_json_array() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("test_data.json");
let test_data = json!([
{"id": 1, "name": "Item 1", "description": "First item"},
{"id": 2, "name": "Item 2", "description": "Second item"},
{"id": 3, "name": "Item 3", "description": "Third item"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 3 initial items"));
assert_eq!(collection.count(), 3);
assert!(collection.exists("1"));
assert!(collection.exists("2"));
assert!(collection.exists("3"));
let item1 = collection.get("1").unwrap();
assert_eq!(item1.get("name").unwrap(), "Item 1");
assert_eq!(item1.get("description").unwrap(), "First item");
}
#[test]
fn test_load_from_file_empty_array() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("empty_array.json");
let test_data = json!([]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 0 initial items"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_with_uuid_collection() {
let mut collection = create_uuid_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("uuid_data.json");
let test_data = json!([
{"id": "uuid-1", "name": "Item 1"},
{"id": "uuid-2", "name": "Item 2"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 2 initial items"));
assert_eq!(collection.count(), 2);
assert!(collection.exists("uuid-1"));
assert!(collection.exists("uuid-2"));
}
#[test]
fn test_load_from_file_with_mixed_id_types() {
let mut collection = create_none_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("mixed_data.json");
let test_data = json!([
{"id": "string-id", "name": "Item 1"},
{"id": 42, "name": "Item 2"},
{"name": "Item 3"} ]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 2 initial items"));
assert_eq!(collection.count(), 2);
assert!(collection.exists("string-id"));
assert!(collection.exists("42"));
}
#[test]
fn test_load_from_file_nonexistent_file() {
let mut collection = create_test_collection();
let nonexistent_path = std::ffi::OsString::from("/path/that/does/not/exist.json");
let result = collection.load_from_file(&nonexistent_path);
assert!(result.is_err());
let error_msg = result.unwrap_err();
assert!(error_msg.contains("Could not read file"));
assert!(error_msg.contains("skipping initial data load"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_invalid_json() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("invalid.json");
let mut file = File::create(&file_path).unwrap();
file.write_all(b"{ invalid json content }").unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_err());
let error_msg = result.unwrap_err();
assert!(error_msg.contains("does not contain valid JSON"));
assert!(error_msg.contains("skipping initial data load"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_json_object_not_array() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("object.json");
let test_data = json!({"id": 1, "name": "Single Item"});
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_err());
let error_msg = result.unwrap_err();
assert!(error_msg.contains("does not contain a JSON array"));
assert!(error_msg.contains("skipping initial data load"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_json_primitive_not_array() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("primitive.json");
let mut file = File::create(&file_path).unwrap();
file.write_all(b"\"just a string\"").unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_err());
assert!(result.unwrap_err().contains("does not contain a JSON array"));
assert_eq!(collection.count(), 0);
}
#[test]
fn test_load_from_file_updates_id_manager() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("id_update_test.json");
let test_data = json!([
{"id": 10, "name": "Item 1"},
{"id": 15, "name": "Item 2"},
{"id": 5, "name": "Item 3"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
let new_item = collection.add(json!({"name": "New Item"})).unwrap();
assert_eq!(new_item.get("id").unwrap(), "16");
}
#[test]
fn test_load_from_file_large_dataset() {
let mut collection = create_test_collection();
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("large_dataset.json");
let mut items = Vec::new();
for i in 1..=1000 {
items.push(json!({
"id": i,
"name": format!("Item {}", i),
"value": i * 10
}));
}
let test_data = json!(items);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 1000 initial items"));
assert_eq!(collection.count(), 1000);
assert!(collection.exists("1"));
assert!(collection.exists("500"));
assert!(collection.exists("1000"));
let item_500 = collection.get("500").unwrap();
assert_eq!(item_500.get("name").unwrap(), "Item 500");
assert_eq!(item_500.get("value").unwrap(), 5000);
}
#[test]
fn test_load_from_file_with_existing_data() {
let mut collection = create_test_collection();
collection.add(json!({"name": "Existing Item 1"}));
collection.add(json!({"name": "Existing Item 2"}));
assert_eq!(collection.count(), 2);
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("additional_data.json");
let test_data = json!([
{"id": 10, "name": "Loaded Item 1"},
{"id": 11, "name": "Loaded Item 2"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 2 initial items"));
assert_eq!(collection.count(), 2);
assert!(!collection.exists("1")); assert!(!collection.exists("2")); assert!(collection.exists("10")); assert!(collection.exists("11")); }
#[test]
fn test_load_from_file_custom_id_key() {
let mut collection = InternalMemoryCollection::new(
"custom_collection",
DbConfig::int("customId"),
);
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("custom_id_data.json");
let test_data = json!([
{"customId": 1, "name": "Item 1"},
{"customId": 2, "name": "Item 2"}
]);
let mut file = File::create(&file_path).unwrap();
file.write_all(test_data.to_string().as_bytes()).unwrap();
let result = collection.load_from_file(&file_path.as_os_str().to_os_string());
assert!(result.is_ok());
assert!(result.unwrap().contains("Loaded 2 initial items"));
assert_eq!(collection.count(), 2);
assert!(collection.exists("1"));
assert!(collection.exists("2"));
let item1 = collection.get("1").unwrap();
assert_eq!(item1.get("customId").unwrap(), 1);
assert_eq!(item1.get("name").unwrap(), "Item 1");
}
#[test]
fn test_write_to_file() {
use tempfile::TempDir;
use std::ffi::OsString;
use std::fs;
use serde_json::json;
let tmp = TempDir::new().unwrap();
let file_path = tmp.path().join("output.json");
let os_file_path = OsString::from(file_path.to_string_lossy().into_owned());
let config = DbConfig::int("id");
let db_collection = DbCollection::new_coll("test", config);
let item1 = json!({"name": "Alice"});
let item2 = json!({"name": "Bob"});
let stored1 = db_collection.add(item1).unwrap();
let stored2 = db_collection.add(item2).unwrap();
assert!(db_collection.write_to_file(&os_file_path).is_ok());
let content = fs::read_to_string(file_path).unwrap();
let parsed: Vec<serde_json::Value> = serde_json::from_str(&content).unwrap();
assert_eq!(parsed.len(), 2);
let ids: Vec<_> = parsed.iter().filter_map(|v| v.get("id")).collect();
assert!(ids.contains(&stored1.get("id").unwrap()));
assert!(ids.contains(&stored2.get("id").unwrap()));
}
}