use crate::config::AuthConfig as ConfigAuthConfig;
use crate::encryption::{utils, AutoEncryptionProcessor, WorkspaceKeyManager};
use crate::workspace::{EntityId, Folder, MockRequest, Workspace, WorkspaceRegistry};
use crate::{Error, Result};
use chrono::{DateTime, Utc};
use once_cell::sync::Lazy;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use tokio::fs;
static CREDIT_CARD_PATTERN: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b")
.expect("CREDIT_CARD_PATTERN regex is valid")
});
static SSN_PATTERN: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"\b\d{3}[-\s]?\d{2}[-\s]?\d{4}\b").expect("SSN_PATTERN regex is valid")
});
#[derive(Debug)]
pub struct WorkspacePersistence {
base_dir: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct SerializableWorkspaceRegistry {
workspaces: Vec<Workspace>,
active_workspace: Option<EntityId>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SyncState {
pub last_sync_timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum SyncStrategy {
Full,
Incremental,
Selective(Vec<String>),
}
#[derive(Debug, Clone, PartialEq)]
pub enum DirectoryStructure {
Flat,
Nested,
Grouped,
}
#[derive(Debug, Clone)]
pub struct SyncResult {
pub synced_workspaces: usize,
pub synced_requests: usize,
pub files_created: usize,
pub target_dir: PathBuf,
}
#[derive(Debug, Clone)]
pub struct EncryptedExportResult {
pub output_path: PathBuf,
pub backup_key: String,
pub exported_at: DateTime<Utc>,
pub workspace_name: String,
pub encryption_enabled: bool,
}
#[derive(Debug, Clone)]
pub struct EncryptedImportResult {
pub workspace_id: String,
pub workspace_name: String,
pub imported_at: DateTime<Utc>,
pub request_count: usize,
pub encryption_restored: bool,
}
#[derive(Debug, Clone)]
pub struct SecurityCheckResult {
pub workspace_id: String,
pub workspace_name: String,
pub warnings: Vec<SecurityWarning>,
pub errors: Vec<SecurityWarning>,
pub is_secure: bool,
pub recommended_actions: Vec<String>,
}
#[derive(Debug, Clone)]
pub struct SecurityWarning {
pub field_type: String,
pub field_name: String,
pub location: String,
pub severity: SecuritySeverity,
pub message: String,
pub suggestion: String,
}
#[derive(Debug, Clone, PartialEq)]
pub enum SecuritySeverity {
Low,
Medium,
High,
Critical,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WorkspaceExport {
pub metadata: WorkspaceMetadata,
pub config: WorkspaceConfig,
pub requests: HashMap<String, ExportedRequest>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WorkspaceMetadata {
pub id: String,
pub name: String,
pub description: Option<String>,
pub exported_at: DateTime<Utc>,
pub request_count: usize,
pub folder_count: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WorkspaceConfig {
pub auth: Option<AuthConfig>,
pub base_url: Option<String>,
pub variables: HashMap<String, String>,
#[serde(default)]
pub reality_level: Option<crate::RealityLevel>,
#[serde(default)]
pub ai_mode: Option<crate::ai_studio::config::AiMode>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AuthConfig {
pub auth_type: String,
pub params: HashMap<String, String>,
}
impl AuthConfig {
pub fn from_config_auth(config_auth: &ConfigAuthConfig) -> Option<Self> {
if let Some(jwt) = &config_auth.jwt {
let mut params = HashMap::new();
if let Some(secret) = &jwt.secret {
params.insert("secret".to_string(), secret.clone());
}
if let Some(rsa_public_key) = &jwt.rsa_public_key {
params.insert("rsa_public_key".to_string(), rsa_public_key.clone());
}
if let Some(ecdsa_public_key) = &jwt.ecdsa_public_key {
params.insert("ecdsa_public_key".to_string(), ecdsa_public_key.clone());
}
if let Some(issuer) = &jwt.issuer {
params.insert("issuer".to_string(), issuer.clone());
}
if let Some(audience) = &jwt.audience {
params.insert("audience".to_string(), audience.clone());
}
if !jwt.algorithms.is_empty() {
params.insert("algorithms".to_string(), jwt.algorithms.join(","));
}
Some(AuthConfig {
auth_type: "jwt".to_string(),
params,
})
} else if let Some(oauth2) = &config_auth.oauth2 {
let mut params = HashMap::new();
params.insert("client_id".to_string(), oauth2.client_id.clone());
params.insert("client_secret".to_string(), oauth2.client_secret.clone());
params.insert("introspection_url".to_string(), oauth2.introspection_url.clone());
if let Some(auth_url) = &oauth2.auth_url {
params.insert("auth_url".to_string(), auth_url.clone());
}
if let Some(token_url) = &oauth2.token_url {
params.insert("token_url".to_string(), token_url.clone());
}
if let Some(token_type_hint) = &oauth2.token_type_hint {
params.insert("token_type_hint".to_string(), token_type_hint.clone());
}
Some(AuthConfig {
auth_type: "oauth2".to_string(),
params,
})
} else if let Some(basic_auth) = &config_auth.basic_auth {
let mut params = HashMap::new();
for (user, pass) in &basic_auth.credentials {
params.insert(user.clone(), pass.clone());
}
Some(AuthConfig {
auth_type: "basic".to_string(),
params,
})
} else if let Some(api_key) = &config_auth.api_key {
let mut params = HashMap::new();
params.insert("header_name".to_string(), api_key.header_name.clone());
if let Some(query_name) = &api_key.query_name {
params.insert("query_name".to_string(), query_name.clone());
}
if !api_key.keys.is_empty() {
params.insert("keys".to_string(), api_key.keys.join(","));
}
Some(AuthConfig {
auth_type: "api_key".to_string(),
params,
})
} else {
None
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExportedRequest {
pub id: String,
pub name: String,
pub method: String,
pub path: String,
pub folder_path: String,
pub headers: HashMap<String, String>,
pub query_params: HashMap<String, String>,
pub body: Option<String>,
pub response_status: Option<u16>,
pub response_body: Option<String>,
pub response_headers: HashMap<String, String>,
pub delay: Option<u64>,
}
impl WorkspacePersistence {
pub fn new<P: AsRef<Path>>(base_dir: P) -> Self {
Self {
base_dir: base_dir.as_ref().to_path_buf(),
}
}
pub fn workspace_dir(&self) -> &Path {
&self.base_dir
}
pub fn workspace_file_path(&self, workspace_id: &str) -> PathBuf {
self.base_dir.join(format!("{}.yaml", workspace_id))
}
pub fn registry_file_path(&self) -> PathBuf {
self.base_dir.join("registry.yaml")
}
pub fn sync_state_file_path(&self) -> PathBuf {
self.base_dir.join("sync_state.yaml")
}
pub async fn ensure_workspace_dir(&self) -> Result<()> {
if !self.base_dir.exists() {
fs::create_dir_all(&self.base_dir).await.map_err(|e| {
Error::io_with_context("creating workspace directory", e.to_string())
})?;
}
Ok(())
}
pub async fn save_workspace(&self, workspace: &Workspace) -> Result<()> {
self.ensure_workspace_dir().await?;
let file_path = self.workspace_file_path(&workspace.id);
let content = serde_yaml::to_string(workspace)
.map_err(|e| Error::config(format!("Failed to serialize workspace: {}", e)))?;
fs::write(&file_path, content)
.await
.map_err(|e| Error::io_with_context("writing workspace file", e.to_string()))?;
Ok(())
}
pub async fn load_workspace(&self, workspace_id: &str) -> Result<Workspace> {
let file_path = self.workspace_file_path(workspace_id);
if !file_path.exists() {
return Err(Error::not_found("Workspace file", &*file_path.to_string_lossy()));
}
let content = fs::read_to_string(&file_path)
.await
.map_err(|e| Error::io_with_context("reading workspace file", e.to_string()))?;
let mut workspace: Workspace = serde_yaml::from_str(&content)
.map_err(|e| Error::config(format!("Failed to deserialize workspace: {}", e)))?;
workspace.initialize_default_mock_environments();
Ok(workspace)
}
pub async fn delete_workspace(&self, workspace_id: &str) -> Result<()> {
let file_path = self.workspace_file_path(workspace_id);
if file_path.exists() {
fs::remove_file(&file_path)
.await
.map_err(|e| Error::io_with_context("deleting workspace file", e.to_string()))?;
}
Ok(())
}
pub async fn save_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
self.ensure_workspace_dir().await?;
let serializable = SerializableWorkspaceRegistry {
workspaces: registry.get_workspaces().into_iter().cloned().collect(),
active_workspace: registry.get_active_workspace_id().map(|s| s.to_string()),
};
let file_path = self.registry_file_path();
let content = serde_yaml::to_string(&serializable)
.map_err(|e| Error::config(format!("Failed to serialize registry: {}", e)))?;
fs::write(&file_path, content)
.await
.map_err(|e| Error::io_with_context("writing registry file", e.to_string()))?;
Ok(())
}
pub async fn load_registry(&self) -> Result<WorkspaceRegistry> {
let file_path = self.registry_file_path();
if !file_path.exists() {
return Ok(WorkspaceRegistry::new());
}
let content = fs::read_to_string(&file_path)
.await
.map_err(|e| Error::io_with_context("reading registry file", e.to_string()))?;
let serializable: SerializableWorkspaceRegistry = serde_yaml::from_str(&content)
.map_err(|e| Error::config(format!("Failed to deserialize registry: {}", e)))?;
let mut registry = WorkspaceRegistry::new();
for workspace_meta in &serializable.workspaces {
match self.load_workspace(&workspace_meta.id).await {
Ok(mut workspace) => {
workspace.initialize_default_mock_environments();
registry.add_workspace(workspace)?;
}
Err(e) => {
tracing::warn!("Failed to load workspace {}: {}", workspace_meta.id, e);
}
}
}
if let Some(active_id) = &serializable.active_workspace {
if let Err(e) = registry.set_active_workspace(Some(active_id.clone())) {
tracing::warn!("Failed to set active workspace {}: {}", active_id, e);
}
}
Ok(registry)
}
pub async fn save_sync_state(&self, sync_state: &SyncState) -> Result<()> {
self.ensure_workspace_dir().await?;
let file_path = self.sync_state_file_path();
let content = serde_yaml::to_string(sync_state)
.map_err(|e| Error::config(format!("Failed to serialize sync state: {}", e)))?;
fs::write(&file_path, content)
.await
.map_err(|e| Error::io_with_context("writing sync state file", e.to_string()))?;
Ok(())
}
pub async fn load_sync_state(&self) -> Result<SyncState> {
let file_path = self.sync_state_file_path();
if !file_path.exists() {
return Ok(SyncState {
last_sync_timestamp: Utc::now(),
});
}
let content = fs::read_to_string(&file_path)
.await
.map_err(|e| Error::io_with_context("reading sync state file", e.to_string()))?;
let sync_state: SyncState = serde_yaml::from_str(&content)
.map_err(|e| Error::config(format!("Failed to deserialize sync state: {}", e)))?;
Ok(sync_state)
}
pub async fn list_workspace_ids(&self) -> Result<Vec<EntityId>> {
if !self.base_dir.exists() {
return Ok(Vec::new());
}
let mut workspace_ids = Vec::new();
let mut entries = fs::read_dir(&self.base_dir)
.await
.map_err(|e| Error::io_with_context("reading workspace directory", e.to_string()))?;
while let Some(entry) = entries
.next_entry()
.await
.map_err(|e| Error::io_with_context("reading directory entry", e.to_string()))?
{
let path = entry.path();
if path.is_file() {
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
if file_name != "registry.yaml" && file_name.ends_with(".yaml") {
if let Some(id) = file_name.strip_suffix(".yaml") {
workspace_ids.push(id.to_string());
}
}
}
}
}
Ok(workspace_ids)
}
pub async fn save_full_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
self.save_registry(registry).await?;
for workspace in registry.get_workspaces() {
self.save_workspace(workspace).await?;
}
Ok(())
}
pub async fn load_full_registry(&self) -> Result<WorkspaceRegistry> {
self.load_registry().await
}
pub async fn backup_workspace(&self, workspace_id: &str, backup_dir: &Path) -> Result<PathBuf> {
let workspace_file = self.workspace_file_path(workspace_id);
if !workspace_file.exists() {
return Err(Error::not_found("Workspace", workspace_id));
}
if !backup_dir.exists() {
fs::create_dir_all(backup_dir)
.await
.map_err(|e| Error::io_with_context("creating backup directory", e.to_string()))?;
}
let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
let backup_filename = format!("{}_{}.yaml", workspace_id, timestamp);
let backup_path = backup_dir.join(backup_filename);
fs::copy(&workspace_file, &backup_path)
.await
.map_err(|e| Error::io_with_context("creating backup", e.to_string()))?;
Ok(backup_path)
}
pub async fn restore_workspace(&self, backup_path: &Path) -> Result<EntityId> {
if !backup_path.exists() {
return Err(Error::not_found("Backup file", &*backup_path.to_string_lossy()));
}
let content = fs::read_to_string(backup_path)
.await
.map_err(|e| Error::io_with_context("reading backup file", e.to_string()))?;
let workspace: Workspace = serde_yaml::from_str(&content)
.map_err(|e| Error::config(format!("Failed to deserialize backup: {}", e)))?;
self.save_workspace(&workspace).await?;
Ok(workspace.id)
}
pub async fn cleanup_old_backups(&self, backup_dir: &Path, keep_count: usize) -> Result<usize> {
if !backup_dir.exists() {
return Ok(0);
}
let mut backup_files = Vec::new();
let mut entries = fs::read_dir(backup_dir)
.await
.map_err(|e| Error::io_with_context("reading backup directory", e.to_string()))?;
while let Some(entry) = entries
.next_entry()
.await
.map_err(|e| Error::io_with_context("reading backup entry", e.to_string()))?
{
let path = entry.path();
if path.is_file() {
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
if file_name.ends_with(".yaml") {
if let Ok(metadata) = entry.metadata().await {
if let Ok(modified) = metadata.modified() {
backup_files.push((path, modified));
}
}
}
}
}
}
backup_files.sort_by(|a, b| b.1.cmp(&a.1));
let mut removed_count = 0;
for (path, _) in backup_files.iter().skip(keep_count) {
if fs::remove_file(path).await.is_ok() {
removed_count += 1;
}
}
Ok(removed_count)
}
#[allow(clippy::too_many_arguments)]
pub async fn sync_to_directory_advanced(
&self,
target_dir: &str,
strategy: &str,
workspace_ids: Option<&str>,
structure: &str,
include_meta: bool,
force: bool,
filename_pattern: &str,
exclude_pattern: Option<&str>,
dry_run: bool,
) -> Result<SyncResult> {
let target_path = PathBuf::from(target_dir);
if !dry_run && !target_path.exists() {
fs::create_dir_all(&target_path)
.await
.map_err(|e| Error::io_with_context("creating target directory", e.to_string()))?;
}
let sync_strategy = match strategy {
"full" => SyncStrategy::Full,
"incremental" => SyncStrategy::Incremental,
"selective" => {
if let Some(ids) = workspace_ids {
let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
SyncStrategy::Selective(workspace_list)
} else {
return Err(Error::validation("Selective strategy requires workspace IDs"));
}
}
_ => return Err(Error::validation(format!("Unknown sync strategy: {}", strategy))),
};
let dir_structure = match structure {
"flat" => DirectoryStructure::Flat,
"nested" => DirectoryStructure::Nested,
"grouped" => DirectoryStructure::Grouped,
_ => {
return Err(Error::validation(format!(
"Unknown directory structure: {}",
structure
)))
}
};
let mut workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
if let Some(exclude) = exclude_pattern {
if let Ok(regex) = Regex::new(exclude) {
workspaces_to_sync.retain(|id| !regex.is_match(id));
}
}
let mut result = SyncResult {
synced_workspaces: 0,
synced_requests: 0,
files_created: 0,
target_dir: target_path.clone(),
};
for workspace_id in workspaces_to_sync {
if let Ok(workspace) = self.load_workspace(&workspace_id).await {
let workspace_result = self
.sync_workspace_to_directory_advanced(
&workspace,
&target_path,
&dir_structure,
include_meta,
force,
filename_pattern,
dry_run,
)
.await?;
result.synced_workspaces += 1;
result.synced_requests += workspace_result.requests_count;
result.files_created += workspace_result.files_created;
}
}
if let SyncStrategy::Incremental = sync_strategy {
let new_sync_state = SyncState {
last_sync_timestamp: Utc::now(),
};
if let Err(e) = self.save_sync_state(&new_sync_state).await {
tracing::warn!("Failed to save sync state: {}", e);
}
}
Ok(result)
}
#[allow(clippy::too_many_arguments)]
async fn sync_workspace_to_directory_advanced(
&self,
workspace: &Workspace,
target_dir: &Path,
structure: &DirectoryStructure,
include_meta: bool,
force: bool,
filename_pattern: &str,
dry_run: bool,
) -> Result<WorkspaceSyncResult> {
let mut result = WorkspaceSyncResult {
requests_count: 0,
files_created: 0,
};
match structure {
DirectoryStructure::Flat => {
let export = self.create_workspace_export(workspace).await?;
let filename = self.generate_filename(filename_pattern, workspace);
let file_path = target_dir.join(format!("{}.yaml", filename));
if force || !file_path.exists() {
if !dry_run {
let content = serde_yaml::to_string(&export).map_err(|e| {
Error::config(format!("Failed to serialize workspace: {}", e))
})?;
fs::write(&file_path, content).await.map_err(|e| {
Error::io_with_context("writing workspace file", e.to_string())
})?;
}
result.files_created += 1;
}
}
DirectoryStructure::Nested => {
let workspace_dir =
target_dir.join(self.generate_filename(filename_pattern, workspace));
if !dry_run && !workspace_dir.exists() {
fs::create_dir_all(&workspace_dir).await.map_err(|e| {
Error::io_with_context("creating workspace directory", e.to_string())
})?;
}
let export = self.create_workspace_export(workspace).await?;
let workspace_file = workspace_dir.join("workspace.yaml");
if force || !workspace_file.exists() {
if !dry_run {
let content = serde_yaml::to_string(&export).map_err(|e| {
Error::config(format!("Failed to serialize workspace: {}", e))
})?;
fs::write(&workspace_file, content).await.map_err(|e| {
Error::io_with_context("writing workspace file", e.to_string())
})?;
}
result.files_created += 1;
}
let requests_dir = workspace_dir.join("requests");
if !dry_run && !requests_dir.exists() {
fs::create_dir_all(&requests_dir).await.map_err(|e| {
Error::io_with_context("creating requests directory", e.to_string())
})?;
}
result.requests_count += self
.export_workspace_requests_advanced(workspace, &requests_dir, force, dry_run)
.await?;
}
DirectoryStructure::Grouped => {
let requests_dir = target_dir.join("requests");
let workspaces_dir = target_dir.join("workspaces");
if !dry_run {
for dir in [&requests_dir, &workspaces_dir] {
if !dir.exists() {
fs::create_dir_all(dir).await.map_err(|e| {
Error::io_with_context("creating directory", e.to_string())
})?;
}
}
}
let export = self.create_workspace_export(workspace).await?;
let filename = self.generate_filename(filename_pattern, workspace);
let workspace_file = workspaces_dir.join(format!("{}.yaml", filename));
if force || !workspace_file.exists() {
if !dry_run {
let content = serde_yaml::to_string(&export).map_err(|e| {
Error::config(format!("Failed to serialize workspace: {}", e))
})?;
fs::write(&workspace_file, content).await.map_err(|e| {
Error::io_with_context("writing workspace file", e.to_string())
})?;
}
result.files_created += 1;
}
result.requests_count += self
.export_workspace_requests_grouped_advanced(
workspace,
&requests_dir,
force,
dry_run,
)
.await?;
}
}
if include_meta && !dry_run {
self.create_metadata_file(workspace, target_dir, structure).await?;
result.files_created += 1;
}
Ok(result)
}
fn generate_filename(&self, pattern: &str, workspace: &Workspace) -> String {
let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
pattern
.replace("{name}", &self.sanitize_filename(&workspace.name))
.replace("{id}", &workspace.id)
.replace("{timestamp}", ×tamp.to_string())
}
async fn export_workspace_requests_advanced(
&self,
workspace: &Workspace,
requests_dir: &Path,
force: bool,
dry_run: bool,
) -> Result<usize> {
let mut count = 0;
for request in &workspace.requests {
let file_path =
requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
if force || !file_path.exists() {
if !dry_run {
let exported = self.convert_request_to_exported(request, "");
let content = serde_yaml::to_string(&exported).map_err(|e| {
Error::config(format!("Failed to serialize request: {}", e))
})?;
fs::write(&file_path, content).await.map_err(|e| {
Error::io_with_context("writing request file", e.to_string())
})?;
}
count += 1;
}
}
for folder in &workspace.folders {
count += self
.export_folder_requests_advanced(folder, requests_dir, force, &folder.name, dry_run)
.await?;
}
Ok(count)
}
async fn export_folder_requests_advanced(
&self,
folder: &Folder,
requests_dir: &Path,
force: bool,
folder_path: &str,
dry_run: bool,
) -> Result<usize> {
use std::collections::VecDeque;
let mut count = 0;
let mut queue = VecDeque::new();
queue.push_back((folder, folder_path.to_string()));
while let Some((current_folder, current_path)) = queue.pop_front() {
for request in ¤t_folder.requests {
let file_path =
requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
if force || !file_path.exists() {
if !dry_run {
let exported = self.convert_request_to_exported(request, ¤t_path);
let content = serde_yaml::to_string(&exported).map_err(|e| {
Error::config(format!("Failed to serialize request: {}", e))
})?;
fs::write(&file_path, content).await.map_err(|e| {
Error::io_with_context("writing request file", e.to_string())
})?;
}
count += 1;
}
}
for subfolder in ¤t_folder.folders {
let subfolder_path = if current_path.is_empty() {
subfolder.name.clone()
} else {
format!("{}/{}", current_path, subfolder.name)
};
queue.push_back((subfolder, subfolder_path));
}
}
Ok(count)
}
async fn export_workspace_requests_grouped_advanced(
&self,
workspace: &Workspace,
requests_dir: &Path,
force: bool,
dry_run: bool,
) -> Result<usize> {
let mut count = 0;
let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
if !dry_run && !workspace_requests_dir.exists() {
fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
Error::io_with_context("creating workspace requests directory", e.to_string())
})?;
}
count += self
.export_workspace_requests_advanced(workspace, &workspace_requests_dir, force, dry_run)
.await?;
Ok(count)
}
pub async fn sync_to_directory(
&self,
target_dir: &str,
strategy: &str,
workspace_ids: Option<&str>,
structure: &str,
include_meta: bool,
force: bool,
) -> Result<SyncResult> {
let target_path = PathBuf::from(target_dir);
if !target_path.exists() {
fs::create_dir_all(&target_path)
.await
.map_err(|e| Error::io_with_context("creating target directory", e.to_string()))?;
}
let sync_strategy = match strategy {
"full" => SyncStrategy::Full,
"incremental" => SyncStrategy::Incremental,
"selective" => {
if let Some(ids) = workspace_ids {
let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
SyncStrategy::Selective(workspace_list)
} else {
return Err(Error::validation("Selective strategy requires workspace IDs"));
}
}
_ => return Err(Error::validation(format!("Unknown sync strategy: {}", strategy))),
};
let dir_structure = match structure {
"flat" => DirectoryStructure::Flat,
"nested" => DirectoryStructure::Nested,
"grouped" => DirectoryStructure::Grouped,
_ => {
return Err(Error::validation(format!(
"Unknown directory structure: {}",
structure
)))
}
};
let workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
let mut result = SyncResult {
synced_workspaces: 0,
synced_requests: 0,
files_created: 0,
target_dir: target_path.clone(),
};
for workspace_id in workspaces_to_sync {
if let Ok(workspace) = self.load_workspace(&workspace_id).await {
let workspace_result = self
.sync_workspace_to_directory(
&workspace,
&target_path,
&dir_structure,
include_meta,
force,
)
.await?;
result.synced_workspaces += 1;
result.synced_requests += workspace_result.requests_count;
result.files_created += workspace_result.files_created;
}
}
if let SyncStrategy::Incremental = sync_strategy {
let new_sync_state = SyncState {
last_sync_timestamp: Utc::now(),
};
if let Err(e) = self.save_sync_state(&new_sync_state).await {
tracing::warn!("Failed to save sync state: {}", e);
}
}
Ok(result)
}
async fn get_workspaces_for_sync(&self, strategy: &SyncStrategy) -> Result<Vec<String>> {
match strategy {
SyncStrategy::Full => self.list_workspace_ids().await,
SyncStrategy::Incremental => {
let sync_state = self.load_sync_state().await?;
let last_sync = sync_state.last_sync_timestamp;
let all_workspace_ids = self.list_workspace_ids().await?;
let mut modified_workspaces = Vec::new();
for workspace_id in all_workspace_ids {
let file_path = self.workspace_file_path(&workspace_id);
if let Ok(metadata) = fs::metadata(&file_path).await {
if let Ok(modified_time) = metadata.modified() {
let modified_datetime = DateTime::<Utc>::from(modified_time);
if modified_datetime > last_sync {
modified_workspaces.push(workspace_id);
}
}
}
}
Ok(modified_workspaces)
}
SyncStrategy::Selective(ids) => Ok(ids.clone()),
}
}
async fn sync_workspace_to_directory(
&self,
workspace: &Workspace,
target_dir: &Path,
structure: &DirectoryStructure,
include_meta: bool,
force: bool,
) -> Result<WorkspaceSyncResult> {
let mut result = WorkspaceSyncResult {
requests_count: 0,
files_created: 0,
};
match structure {
DirectoryStructure::Flat => {
let export = self.create_workspace_export(workspace).await?;
let file_path =
target_dir.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
if force || !file_path.exists() {
let content = serde_yaml::to_string(&export).map_err(|e| {
Error::config(format!("Failed to serialize workspace: {}", e))
})?;
fs::write(&file_path, content).await.map_err(|e| {
Error::io_with_context("writing workspace file", e.to_string())
})?;
result.files_created += 1;
}
}
DirectoryStructure::Nested => {
let workspace_dir = target_dir.join(self.sanitize_filename(&workspace.name));
if !workspace_dir.exists() {
fs::create_dir_all(&workspace_dir).await.map_err(|e| {
Error::io_with_context("creating workspace directory", e.to_string())
})?;
}
let export = self.create_workspace_export(workspace).await?;
let workspace_file = workspace_dir.join("workspace.yaml");
if force || !workspace_file.exists() {
let content = serde_yaml::to_string(&export).map_err(|e| {
Error::config(format!("Failed to serialize workspace: {}", e))
})?;
fs::write(&workspace_file, content).await.map_err(|e| {
Error::io_with_context("writing workspace file", e.to_string())
})?;
result.files_created += 1;
}
let requests_dir = workspace_dir.join("requests");
if !requests_dir.exists() {
fs::create_dir_all(&requests_dir).await.map_err(|e| {
Error::io_with_context("creating requests directory", e.to_string())
})?;
}
result.requests_count +=
self.export_workspace_requests(workspace, &requests_dir, force).await?;
}
DirectoryStructure::Grouped => {
let requests_dir = target_dir.join("requests");
let workspaces_dir = target_dir.join("workspaces");
for dir in [&requests_dir, &workspaces_dir] {
if !dir.exists() {
fs::create_dir_all(dir).await.map_err(|e| {
Error::io_with_context("creating directory", e.to_string())
})?;
}
}
let export = self.create_workspace_export(workspace).await?;
let workspace_file = workspaces_dir
.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
if force || !workspace_file.exists() {
let content = serde_yaml::to_string(&export).map_err(|e| {
Error::config(format!("Failed to serialize workspace: {}", e))
})?;
fs::write(&workspace_file, content).await.map_err(|e| {
Error::io_with_context("writing workspace file", e.to_string())
})?;
result.files_created += 1;
}
result.requests_count +=
self.export_workspace_requests_grouped(workspace, &requests_dir, force).await?;
}
}
if include_meta {
self.create_metadata_file(workspace, target_dir, structure).await?;
result.files_created += 1;
}
Ok(result)
}
async fn create_workspace_export(&self, workspace: &Workspace) -> Result<WorkspaceExport> {
let mut requests = HashMap::new();
self.collect_requests_from_workspace(workspace, &mut requests, "".to_string());
let metadata = WorkspaceMetadata {
id: workspace.id.clone(),
name: workspace.name.clone(),
description: workspace.description.clone(),
exported_at: Utc::now(),
request_count: requests.len(),
folder_count: workspace.folders.len(),
};
let config = WorkspaceConfig {
auth: workspace.config.auth.as_ref().and_then(AuthConfig::from_config_auth),
base_url: workspace.config.base_url.clone(),
variables: workspace.config.global_environment.variables.clone(),
reality_level: workspace.config.reality_level,
ai_mode: None, };
Ok(WorkspaceExport {
metadata,
config,
requests,
})
}
fn collect_requests_from_workspace(
&self,
workspace: &Workspace,
requests: &mut HashMap<String, ExportedRequest>,
folder_path: String,
) {
for request in &workspace.requests {
let exported = self.convert_request_to_exported(request, &folder_path);
requests.insert(request.id.clone(), exported);
}
for folder in &workspace.folders {
let current_path = if folder_path.is_empty() {
folder.name.clone()
} else {
format!("{}/{}", folder_path, folder.name)
};
for request in &folder.requests {
let exported = self.convert_request_to_exported(request, ¤t_path);
requests.insert(request.id.clone(), exported);
}
self.collect_requests_from_folders(folder, requests, current_path);
}
}
fn collect_requests_from_folders(
&self,
folder: &Folder,
requests: &mut HashMap<String, ExportedRequest>,
folder_path: String,
) {
for subfolder in &folder.folders {
let current_path = format!("{}/{}", folder_path, subfolder.name);
for request in &subfolder.requests {
let exported = self.convert_request_to_exported(request, ¤t_path);
requests.insert(request.id.clone(), exported);
}
self.collect_requests_from_folders(subfolder, requests, current_path);
}
}
fn convert_request_to_exported(
&self,
request: &MockRequest,
folder_path: &str,
) -> ExportedRequest {
ExportedRequest {
id: request.id.clone(),
name: request.name.clone(),
method: format!("{:?}", request.method),
path: request.path.clone(),
folder_path: folder_path.to_string(),
headers: request.headers.clone(),
query_params: request.query_params.clone(),
body: request.body.clone(),
response_status: Some(request.response.status_code),
response_body: request.response.body.clone(),
response_headers: request.response.headers.clone(),
delay: request.response.delay_ms,
}
}
pub async fn export_workspace_encrypted(
&self,
workspace: &Workspace,
output_path: &Path,
) -> Result<EncryptedExportResult> {
if !workspace.config.auto_encryption.enabled {
return Err(Error::invalid_state("Encryption is not enabled for this workspace. Enable encryption in workspace settings first."));
}
let encryption_config = workspace.config.auto_encryption.clone();
let processor = AutoEncryptionProcessor::new(&workspace.id, encryption_config);
let mut filtered_workspace = workspace.to_filtered_for_sync();
self.encrypt_workspace_data(&mut filtered_workspace, &processor)?;
let export = self.create_workspace_export(&filtered_workspace).await?;
let export_json = serde_json::to_string_pretty(&export)
.map_err(|e| Error::config(format!("Failed to serialize export: {}", e)))?;
let encrypted_data = utils::encrypt_for_workspace(&workspace.id, &export_json)?;
let key_manager = WorkspaceKeyManager::new();
let backup_key = key_manager.generate_workspace_key_backup(&workspace.id)?;
fs::write(output_path, &encrypted_data)
.await
.map_err(|e| Error::io_with_context("writing encrypted export", e.to_string()))?;
Ok(EncryptedExportResult {
output_path: output_path.to_path_buf(),
backup_key,
exported_at: Utc::now(),
workspace_name: workspace.name.clone(),
encryption_enabled: true,
})
}
pub async fn import_workspace_encrypted(
&self,
encrypted_file: &Path,
_workspace_name: Option<&str>,
_registry: &mut WorkspaceRegistry,
) -> Result<EncryptedImportResult> {
let _encrypted_data = fs::read_to_string(encrypted_file)
.await
.map_err(|e| Error::io_with_context("reading encrypted file", e.to_string()))?;
Err(Error::validation("Encrypted import requires workspace ID and backup key. Use import_workspace_encrypted_with_key instead."))
}
pub async fn import_workspace_encrypted_with_key(
&self,
encrypted_file: &Path,
workspace_id: &str,
backup_key: &str,
workspace_name: Option<&str>,
registry: &mut WorkspaceRegistry,
) -> Result<EncryptedImportResult> {
let key_manager = WorkspaceKeyManager::new();
if !key_manager.has_workspace_key(workspace_id) {
key_manager.restore_workspace_key_from_backup(workspace_id, backup_key)?;
}
let encrypted_data = fs::read_to_string(encrypted_file)
.await
.map_err(|e| Error::io_with_context("reading encrypted file", e.to_string()))?;
let decrypted_json = utils::decrypt_for_workspace(workspace_id, &encrypted_data)?;
let export: WorkspaceExport = serde_json::from_str(&decrypted_json)
.map_err(|e| Error::config(format!("Failed to parse decrypted export: {}", e)))?;
let workspace = self.convert_export_to_workspace(&export, workspace_name)?;
let imported_id = registry.add_workspace(workspace)?;
Ok(EncryptedImportResult {
workspace_id: imported_id,
workspace_name: export.metadata.name.clone(),
imported_at: Utc::now(),
request_count: export.requests.len(),
encryption_restored: true,
})
}
fn encrypt_workspace_data(
&self,
workspace: &mut Workspace,
processor: &AutoEncryptionProcessor,
) -> Result<()> {
for env in &mut workspace.config.environments {
processor.process_env_vars(&mut env.variables)?;
}
processor.process_env_vars(&mut workspace.config.global_environment.variables)?;
Ok(())
}
fn convert_export_to_workspace(
&self,
export: &WorkspaceExport,
name_override: Option<&str>,
) -> Result<Workspace> {
let mut workspace =
Workspace::new(name_override.unwrap_or(&export.metadata.name).to_string());
if let Some(desc) = &export.metadata.description {
workspace.description = Some(desc.clone());
}
for exported_request in export.requests.values() {
let method = self.parse_http_method(&exported_request.method)?;
let mut request = MockRequest::new(
method,
exported_request.path.clone(),
exported_request.name.clone(),
);
if let Some(status) = exported_request.response_status {
request.response.status_code = status;
}
if let Some(body) = &exported_request.response_body {
request.response.body = Some(body.clone());
}
request.response.headers = exported_request.response_headers.clone();
if let Some(delay) = exported_request.delay {
request.response.delay_ms = Some(delay);
}
workspace.add_request(request)?;
}
workspace.config.global_environment.variables = export.config.variables.clone();
Ok(workspace)
}
fn parse_http_method(&self, method_str: &str) -> Result<crate::routing::HttpMethod> {
match method_str.to_uppercase().as_str() {
"GET" => Ok(crate::routing::HttpMethod::GET),
"POST" => Ok(crate::routing::HttpMethod::POST),
"PUT" => Ok(crate::routing::HttpMethod::PUT),
"DELETE" => Ok(crate::routing::HttpMethod::DELETE),
"PATCH" => Ok(crate::routing::HttpMethod::PATCH),
"HEAD" => Ok(crate::routing::HttpMethod::HEAD),
"OPTIONS" => Ok(crate::routing::HttpMethod::OPTIONS),
_ => Err(Error::validation(format!("Unknown HTTP method: {}", method_str))),
}
}
pub fn check_workspace_for_unencrypted_secrets(
&self,
workspace: &Workspace,
) -> Result<SecurityCheckResult> {
let mut warnings = Vec::new();
let errors = Vec::new();
self.check_environment_variables(workspace, &mut warnings)?;
let has_warnings = !warnings.is_empty();
let has_errors = !errors.is_empty();
Ok(SecurityCheckResult {
workspace_id: workspace.id.clone(),
workspace_name: workspace.name.clone(),
warnings,
errors,
is_secure: !has_warnings && !has_errors,
recommended_actions: self.generate_security_recommendations(has_warnings, has_errors),
})
}
fn check_environment_variables(
&self,
workspace: &Workspace,
warnings: &mut Vec<SecurityWarning>,
) -> Result<()> {
let sensitive_keys = [
"password",
"secret",
"key",
"token",
"credential",
"api_key",
"apikey",
"api_secret",
"db_password",
"database_password",
"aws_secret_key",
"aws_session_token",
"private_key",
"authorization",
"auth_token",
"access_token",
"refresh_token",
"cookie",
"session",
"csrf",
"jwt",
"bearer",
];
for (key, value) in &workspace.config.global_environment.variables {
if self.is_potentially_sensitive(key, value, &sensitive_keys) {
warnings.push(SecurityWarning {
field_type: "environment_variable".to_string(),
field_name: key.clone(),
location: "global_environment".to_string(),
severity: SecuritySeverity::High,
message: format!(
"Potentially sensitive environment variable '{}' detected",
key
),
suggestion: "Consider encrypting this value or excluding it from exports"
.to_string(),
});
}
}
for env in &workspace.config.environments {
for (key, value) in &env.variables {
if self.is_potentially_sensitive(key, value, &sensitive_keys) {
warnings.push(SecurityWarning {
field_type: "environment_variable".to_string(),
field_name: key.clone(),
location: format!("environment '{}'", env.name),
severity: SecuritySeverity::High,
message: format!("Potentially sensitive environment variable '{}' detected in environment '{}'", key, env.name),
suggestion: "Consider encrypting this value or excluding it from exports".to_string(),
});
}
}
}
Ok(())
}
fn is_potentially_sensitive(&self, key: &str, value: &str, sensitive_keys: &[&str]) -> bool {
let key_lower = key.to_lowercase();
if sensitive_keys.iter().any(|&sensitive| key_lower.contains(sensitive)) {
return true;
}
self.contains_sensitive_patterns(value)
}
fn contains_sensitive_patterns(&self, value: &str) -> bool {
if CREDIT_CARD_PATTERN.is_match(value) {
return true;
}
if SSN_PATTERN.is_match(value) {
return true;
}
if value.len() > 20 && value.chars().any(|c| c.is_alphanumeric()) {
let alphanumeric_count = value.chars().filter(|c| c.is_alphanumeric()).count();
let total_count = value.len();
if alphanumeric_count as f64 / total_count as f64 > 0.8 {
return true;
}
}
false
}
fn generate_security_recommendations(
&self,
has_warnings: bool,
has_errors: bool,
) -> Vec<String> {
let mut recommendations = Vec::new();
if has_warnings || has_errors {
recommendations.push("Enable encryption for this workspace in settings".to_string());
recommendations.push("Review and encrypt sensitive environment variables".to_string());
recommendations.push("Use encrypted export for sharing workspaces".to_string());
}
if has_errors {
recommendations
.push("CRITICAL: Remove or encrypt sensitive data before proceeding".to_string());
}
recommendations
}
async fn export_workspace_requests(
&self,
workspace: &Workspace,
requests_dir: &Path,
force: bool,
) -> Result<usize> {
let mut count = 0;
for request in &workspace.requests {
let file_path =
requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
if force || !file_path.exists() {
let exported = self.convert_request_to_exported(request, "");
let content = serde_yaml::to_string(&exported)
.map_err(|e| Error::config(format!("Failed to serialize request: {}", e)))?;
fs::write(&file_path, content)
.await
.map_err(|e| Error::io_with_context("writing request file", e.to_string()))?;
count += 1;
}
}
for folder in &workspace.folders {
count += self.export_folder_requests(folder, requests_dir, force, &folder.name).await?;
}
Ok(count)
}
async fn export_folder_requests(
&self,
folder: &Folder,
requests_dir: &Path,
force: bool,
folder_path: &str,
) -> Result<usize> {
use std::collections::VecDeque;
let mut count = 0;
let mut queue = VecDeque::new();
queue.push_back((folder, folder_path.to_string()));
while let Some((current_folder, current_path)) = queue.pop_front() {
for request in ¤t_folder.requests {
let file_path =
requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
if force || !file_path.exists() {
let exported = self.convert_request_to_exported(request, ¤t_path);
let content = serde_yaml::to_string(&exported).map_err(|e| {
Error::config(format!("Failed to serialize request: {}", e))
})?;
fs::write(&file_path, content).await.map_err(|e| {
Error::io_with_context("writing request file", e.to_string())
})?;
count += 1;
}
}
for subfolder in ¤t_folder.folders {
let subfolder_path = if current_path.is_empty() {
subfolder.name.clone()
} else {
format!("{}/{}", current_path, subfolder.name)
};
queue.push_back((subfolder, subfolder_path));
}
}
Ok(count)
}
async fn export_workspace_requests_grouped(
&self,
workspace: &Workspace,
requests_dir: &Path,
force: bool,
) -> Result<usize> {
let mut count = 0;
let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
if !workspace_requests_dir.exists() {
fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
Error::io_with_context("creating workspace requests directory", e.to_string())
})?;
}
count += self
.export_workspace_requests(workspace, &workspace_requests_dir, force)
.await?;
Ok(count)
}
async fn create_metadata_file(
&self,
workspace: &Workspace,
target_dir: &Path,
structure: &DirectoryStructure,
) -> Result<()> {
let metadata = serde_json::json!({
"workspace_id": workspace.id,
"workspace_name": workspace.name,
"description": workspace.description,
"exported_at": Utc::now().to_rfc3339(),
"structure": format!("{:?}", structure),
"version": "1.0",
"source": "mockforge"
});
let metadata_file = target_dir.join(".mockforge-meta.json");
let content = serde_json::to_string_pretty(&metadata)
.map_err(|e| Error::config(format!("Failed to serialize metadata: {}", e)))?;
fs::write(&metadata_file, content)
.await
.map_err(|e| Error::io_with_context("writing metadata file", e.to_string()))?;
Ok(())
}
pub async fn export_reality_preset(
&self,
preset: &crate::RealityPreset,
output_path: &Path,
) -> Result<()> {
self.ensure_workspace_dir().await?;
let content = if output_path.extension().and_then(|s| s.to_str()) == Some("yaml")
|| output_path.extension().and_then(|s| s.to_str()) == Some("yml")
{
serde_yaml::to_string(preset)
.map_err(|e| Error::config(format!("Failed to serialize preset to YAML: {}", e)))?
} else {
serde_json::to_string_pretty(preset)
.map_err(|e| Error::config(format!("Failed to serialize preset to JSON: {}", e)))?
};
if let Some(parent) = output_path.parent() {
fs::create_dir_all(parent)
.await
.map_err(|e| Error::io_with_context("creating preset directory", e.to_string()))?;
}
fs::write(output_path, content)
.await
.map_err(|e| Error::io_with_context("writing preset file", e.to_string()))?;
Ok(())
}
pub async fn import_reality_preset(&self, input_path: &Path) -> Result<crate::RealityPreset> {
let content = fs::read_to_string(input_path)
.await
.map_err(|e| Error::io_with_context("reading preset file", e.to_string()))?;
let preset = if input_path
.extension()
.and_then(|s| s.to_str())
.map(|ext| ext == "yaml" || ext == "yml")
.unwrap_or(false)
{
serde_yaml::from_str(&content).map_err(|e| {
Error::config(format!("Failed to deserialize preset from YAML: {}", e))
})?
} else {
serde_json::from_str(&content).map_err(|e| {
Error::config(format!("Failed to deserialize preset from JSON: {}", e))
})?
};
Ok(preset)
}
pub fn presets_dir(&self) -> PathBuf {
self.base_dir.join("presets")
}
pub async fn list_reality_presets(&self) -> Result<Vec<PathBuf>> {
let presets_dir = self.presets_dir();
if !presets_dir.exists() {
return Ok(vec![]);
}
let mut presets = Vec::new();
let mut entries = fs::read_dir(&presets_dir)
.await
.map_err(|e| Error::io_with_context("reading presets directory", e.to_string()))?;
while let Some(entry) = entries
.next_entry()
.await
.map_err(|e| Error::io_with_context("reading directory entry", e.to_string()))?
{
let path = entry.path();
if path.is_file() {
let ext = path.extension().and_then(|s| s.to_str());
if ext == Some("json") || ext == Some("yaml") || ext == Some("yml") {
presets.push(path);
}
}
}
Ok(presets)
}
fn sanitize_filename(&self, name: &str) -> String {
name.chars()
.map(|c| match c {
'/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
c if c.is_whitespace() => '_',
c => c,
})
.collect::<String>()
.to_lowercase()
}
}
#[derive(Debug)]
struct WorkspaceSyncResult {
requests_count: usize,
files_created: usize,
}
#[cfg(test)]
mod tests {
use super::*;
use crate::workspace::{MockRequest, Workspace};
use crate::HttpMethod;
use tempfile::TempDir;
#[tokio::test]
async fn test_workspace_persistence() {
let temp_dir = TempDir::new().unwrap();
let persistence = WorkspacePersistence::new(temp_dir.path());
let mut workspace = Workspace::new("Test Workspace".to_string());
let request =
MockRequest::new(HttpMethod::GET, "/test".to_string(), "Test Request".to_string());
workspace.add_request(request).unwrap();
persistence.save_workspace(&workspace).await.unwrap();
let loaded = persistence.load_workspace(&workspace.id).await.unwrap();
assert_eq!(loaded.name, workspace.name);
assert_eq!(loaded.requests.len(), 1);
let ids = persistence.list_workspace_ids().await.unwrap();
assert_eq!(ids.len(), 1);
assert_eq!(ids[0], workspace.id);
}
#[tokio::test]
async fn test_registry_persistence() {
let temp_dir = TempDir::new().unwrap();
let persistence = WorkspacePersistence::new(temp_dir.path());
let mut registry = WorkspaceRegistry::new();
let workspace1 = Workspace::new("Workspace 1".to_string());
let workspace2 = Workspace::new("Workspace 2".to_string());
let id1 = registry.add_workspace(workspace1).unwrap();
let _id2 = registry.add_workspace(workspace2).unwrap();
registry.set_active_workspace(Some(id1.clone())).unwrap();
persistence.save_full_registry(®istry).await.unwrap();
let loaded_registry = persistence.load_full_registry().await.unwrap();
assert_eq!(loaded_registry.get_workspaces().len(), 2);
assert_eq!(loaded_registry.get_active_workspace().unwrap().name, "Workspace 1");
}
#[tokio::test]
async fn test_backup_and_restore() {
let temp_dir = TempDir::new().unwrap();
let backup_dir = temp_dir.path().join("backups");
let persistence = WorkspacePersistence::new(temp_dir.path());
let workspace = Workspace::new("Test Workspace".to_string());
persistence.save_workspace(&workspace).await.unwrap();
let backup_path = persistence.backup_workspace(&workspace.id, &backup_dir).await.unwrap();
assert!(backup_path.exists());
persistence.delete_workspace(&workspace.id).await.unwrap();
assert!(persistence.load_workspace(&workspace.id).await.is_err());
let restored_id = persistence.restore_workspace(&backup_path).await.unwrap();
let restored = persistence.load_workspace(&restored_id).await.unwrap();
assert_eq!(restored.name, "Test Workspace");
}
#[test]
fn test_workspace_persistence_new() {
let persistence = WorkspacePersistence::new("/tmp/test");
assert_eq!(persistence.base_dir, PathBuf::from("/tmp/test"));
}
#[test]
fn test_workspace_persistence_workspace_dir() {
let persistence = WorkspacePersistence::new("/tmp/test");
assert_eq!(persistence.workspace_dir(), Path::new("/tmp/test"));
}
#[test]
fn test_workspace_persistence_workspace_file_path() {
let persistence = WorkspacePersistence::new("/tmp/test");
let path = persistence.workspace_file_path("workspace-123");
assert_eq!(path, PathBuf::from("/tmp/test/workspace-123.yaml"));
}
#[test]
fn test_workspace_persistence_registry_file_path() {
let persistence = WorkspacePersistence::new("/tmp/test");
let path = persistence.registry_file_path();
assert_eq!(path, PathBuf::from("/tmp/test/registry.yaml"));
}
#[test]
fn test_workspace_persistence_sync_state_file_path() {
let persistence = WorkspacePersistence::new("/tmp/test");
let path = persistence.sync_state_file_path();
assert_eq!(path, PathBuf::from("/tmp/test/sync_state.yaml"));
}
#[test]
fn test_sync_state_creation() {
let state = SyncState {
last_sync_timestamp: Utc::now(),
};
assert!(state.last_sync_timestamp <= Utc::now());
}
#[test]
fn test_sync_strategy_variants() {
let full = SyncStrategy::Full;
let incremental = SyncStrategy::Incremental;
let selective = SyncStrategy::Selective(vec!["id1".to_string(), "id2".to_string()]);
assert_eq!(full, SyncStrategy::Full);
assert_eq!(incremental, SyncStrategy::Incremental);
assert_eq!(selective, SyncStrategy::Selective(vec!["id1".to_string(), "id2".to_string()]));
}
#[test]
fn test_directory_structure_variants() {
let flat = DirectoryStructure::Flat;
let nested = DirectoryStructure::Nested;
let grouped = DirectoryStructure::Grouped;
assert_eq!(flat, DirectoryStructure::Flat);
assert_eq!(nested, DirectoryStructure::Nested);
assert_eq!(grouped, DirectoryStructure::Grouped);
}
#[test]
fn test_sync_result_creation() {
let result = SyncResult {
synced_workspaces: 5,
synced_requests: 10,
files_created: 15,
target_dir: PathBuf::from("/tmp/sync"),
};
assert_eq!(result.synced_workspaces, 5);
assert_eq!(result.synced_requests, 10);
assert_eq!(result.files_created, 15);
}
#[test]
fn test_encrypted_export_result_creation() {
let result = EncryptedExportResult {
output_path: PathBuf::from("/tmp/export.zip"),
backup_key: "backup-key-123".to_string(),
exported_at: Utc::now(),
workspace_name: "Test Workspace".to_string(),
encryption_enabled: true,
};
assert_eq!(result.workspace_name, "Test Workspace");
assert!(result.encryption_enabled);
}
#[test]
fn test_encrypted_import_result_creation() {
let result = EncryptedImportResult {
workspace_id: "ws-123".to_string(),
workspace_name: "Imported Workspace".to_string(),
imported_at: Utc::now(),
request_count: 5,
encryption_restored: true,
};
assert_eq!(result.workspace_id, "ws-123");
assert_eq!(result.request_count, 5);
}
#[test]
fn test_security_check_result_creation() {
let result = SecurityCheckResult {
workspace_id: "ws-123".to_string(),
workspace_name: "Test Workspace".to_string(),
warnings: vec![],
errors: vec![],
is_secure: true,
recommended_actions: vec!["Action 1".to_string()],
};
assert_eq!(result.workspace_id, "ws-123");
assert!(result.is_secure);
}
#[test]
fn test_security_warning_creation() {
let warning = SecurityWarning {
field_type: "header".to_string(),
field_name: "Authorization".to_string(),
location: "request".to_string(),
severity: SecuritySeverity::High,
message: "Sensitive data detected".to_string(),
suggestion: "Use encryption".to_string(),
};
assert_eq!(warning.severity, SecuritySeverity::High);
assert_eq!(warning.field_name, "Authorization");
}
#[test]
fn test_security_severity_variants() {
assert_eq!(SecuritySeverity::Low, SecuritySeverity::Low);
assert_eq!(SecuritySeverity::Medium, SecuritySeverity::Medium);
assert_eq!(SecuritySeverity::High, SecuritySeverity::High);
assert_eq!(SecuritySeverity::Critical, SecuritySeverity::Critical);
}
#[test]
fn test_workspace_export_creation() {
let export = WorkspaceExport {
metadata: WorkspaceMetadata {
id: "ws-123".to_string(),
name: "Test Workspace".to_string(),
description: None,
exported_at: Utc::now(),
request_count: 5,
folder_count: 2,
},
config: WorkspaceConfig {
auth: None,
base_url: Some("http://localhost:8080".to_string()),
variables: HashMap::new(),
reality_level: None,
ai_mode: None,
},
requests: HashMap::new(),
};
assert_eq!(export.metadata.id, "ws-123");
assert_eq!(export.config.base_url, Some("http://localhost:8080".to_string()));
}
#[test]
fn test_workspace_metadata_creation() {
let metadata = WorkspaceMetadata {
id: "ws-123".to_string(),
name: "Test Workspace".to_string(),
description: Some("Test description".to_string()),
exported_at: Utc::now(),
request_count: 10,
folder_count: 5,
};
assert_eq!(metadata.id, "ws-123");
assert_eq!(metadata.name, "Test Workspace");
assert_eq!(metadata.request_count, 10);
assert_eq!(metadata.folder_count, 5);
}
#[test]
fn test_workspace_config_creation() {
let config = WorkspaceConfig {
auth: None,
base_url: Some("http://localhost:8080".to_string()),
variables: HashMap::new(),
reality_level: None,
ai_mode: None,
};
assert_eq!(config.base_url, Some("http://localhost:8080".to_string()));
}
#[test]
fn test_auth_config_creation() {
let mut params = HashMap::new();
params.insert("token".to_string(), "token-123".to_string());
let auth = AuthConfig {
auth_type: "bearer".to_string(),
params,
};
assert_eq!(auth.auth_type, "bearer");
assert_eq!(auth.params.get("token"), Some(&"token-123".to_string()));
}
#[test]
fn test_exported_request_creation() {
let request = ExportedRequest {
id: "req-123".to_string(),
name: "Test Request".to_string(),
method: "GET".to_string(),
path: "/api/test".to_string(),
folder_path: "/folder1".to_string(),
headers: HashMap::new(),
query_params: HashMap::new(),
body: None,
response_status: Some(200),
response_body: Some("{}".to_string()),
response_headers: HashMap::new(),
delay: Some(100),
};
assert_eq!(request.id, "req-123");
assert_eq!(request.method, "GET");
assert_eq!(request.response_status, Some(200));
}
#[test]
fn test_serializable_workspace_registry_creation() {
let serializable = SerializableWorkspaceRegistry {
workspaces: vec![],
active_workspace: Some("ws-123".to_string()),
};
assert_eq!(serializable.active_workspace, Some("ws-123".to_string()));
assert!(serializable.workspaces.is_empty());
}
#[test]
fn test_serializable_workspace_registry_serialization() {
let serializable = SerializableWorkspaceRegistry {
workspaces: vec![],
active_workspace: Some("ws-123".to_string()),
};
let json = serde_json::to_string(&serializable).unwrap();
assert!(json.contains("ws-123"));
}
#[test]
fn test_sync_state_clone() {
let state1 = SyncState {
last_sync_timestamp: Utc::now(),
};
let state2 = state1.clone();
assert_eq!(state1.last_sync_timestamp, state2.last_sync_timestamp);
}
#[test]
fn test_sync_state_debug() {
let state = SyncState {
last_sync_timestamp: Utc::now(),
};
let debug_str = format!("{:?}", state);
assert!(debug_str.contains("SyncState"));
}
#[test]
fn test_sync_strategy_clone() {
let strategy1 = SyncStrategy::Selective(vec!["id1".to_string()]);
let strategy2 = strategy1.clone();
assert_eq!(strategy1, strategy2);
}
#[test]
fn test_directory_structure_clone() {
let structure1 = DirectoryStructure::Nested;
let structure2 = structure1.clone();
assert_eq!(structure1, structure2);
}
#[test]
fn test_sync_result_clone() {
let result1 = SyncResult {
synced_workspaces: 1,
synced_requests: 2,
files_created: 3,
target_dir: PathBuf::from("/tmp"),
};
let result2 = result1.clone();
assert_eq!(result1.synced_workspaces, result2.synced_workspaces);
}
#[test]
fn test_encrypted_export_result_clone() {
let result1 = EncryptedExportResult {
output_path: PathBuf::from("/tmp/export.zip"),
backup_key: "key".to_string(),
exported_at: Utc::now(),
workspace_name: "Test".to_string(),
encryption_enabled: true,
};
let result2 = result1.clone();
assert_eq!(result1.workspace_name, result2.workspace_name);
}
#[test]
fn test_encrypted_import_result_clone() {
let result1 = EncryptedImportResult {
workspace_id: "ws-1".to_string(),
workspace_name: "Test".to_string(),
imported_at: Utc::now(),
request_count: 5,
encryption_restored: true,
};
let result2 = result1.clone();
assert_eq!(result1.workspace_id, result2.workspace_id);
}
#[test]
fn test_security_check_result_clone() {
let result1 = SecurityCheckResult {
workspace_id: "ws-1".to_string(),
workspace_name: "Test".to_string(),
warnings: vec![],
errors: vec![],
is_secure: true,
recommended_actions: vec![],
};
let result2 = result1.clone();
assert_eq!(result1.workspace_id, result2.workspace_id);
}
#[test]
fn test_security_warning_clone() {
let warning1 = SecurityWarning {
field_type: "header".to_string(),
field_name: "Auth".to_string(),
location: "request".to_string(),
severity: SecuritySeverity::High,
message: "Test".to_string(),
suggestion: "Fix".to_string(),
};
let warning2 = warning1.clone();
assert_eq!(warning1.field_name, warning2.field_name);
}
#[test]
fn test_security_severity_clone() {
let severity1 = SecuritySeverity::Critical;
let severity2 = severity1.clone();
assert_eq!(severity1, severity2);
}
#[test]
fn test_workspace_export_clone() {
let export1 = WorkspaceExport {
metadata: WorkspaceMetadata {
id: "ws-1".to_string(),
name: "Test".to_string(),
description: None,
exported_at: Utc::now(),
request_count: 0,
folder_count: 0,
},
config: WorkspaceConfig {
auth: None,
base_url: None,
variables: HashMap::new(),
reality_level: None,
ai_mode: None,
},
requests: HashMap::new(),
};
let export2 = export1.clone();
assert_eq!(export1.metadata.id, export2.metadata.id);
}
#[test]
fn test_workspace_metadata_clone() {
let metadata1 = WorkspaceMetadata {
id: "ws-1".to_string(),
name: "Test".to_string(),
description: None,
exported_at: Utc::now(),
request_count: 0,
folder_count: 0,
};
let metadata2 = metadata1.clone();
assert_eq!(metadata1.id, metadata2.id);
}
#[test]
fn test_workspace_config_clone() {
let config1 = WorkspaceConfig {
auth: None,
base_url: Some("http://localhost".to_string()),
variables: HashMap::new(),
reality_level: None,
ai_mode: None,
};
let config2 = config1.clone();
assert_eq!(config1.base_url, config2.base_url);
}
#[test]
fn test_auth_config_clone() {
let mut params = HashMap::new();
params.insert("key".to_string(), "value".to_string());
let auth1 = AuthConfig {
auth_type: "bearer".to_string(),
params: params.clone(),
};
let auth2 = auth1.clone();
assert_eq!(auth1.auth_type, auth2.auth_type);
}
#[test]
fn test_exported_request_clone() {
let request1 = ExportedRequest {
id: "req-1".to_string(),
name: "Test".to_string(),
method: "GET".to_string(),
path: "/test".to_string(),
folder_path: "/".to_string(),
headers: HashMap::new(),
query_params: HashMap::new(),
body: None,
response_status: Some(200),
response_body: None,
response_headers: HashMap::new(),
delay: None,
};
let request2 = request1.clone();
assert_eq!(request1.id, request2.id);
}
#[test]
fn test_sync_result_debug() {
let result = SyncResult {
synced_workspaces: 1,
synced_requests: 2,
files_created: 3,
target_dir: PathBuf::from("/tmp"),
};
let debug_str = format!("{:?}", result);
assert!(debug_str.contains("SyncResult"));
}
#[test]
fn test_encrypted_export_result_debug() {
let result = EncryptedExportResult {
output_path: PathBuf::from("/tmp/export.zip"),
backup_key: "key".to_string(),
exported_at: Utc::now(),
workspace_name: "Test".to_string(),
encryption_enabled: true,
};
let debug_str = format!("{:?}", result);
assert!(debug_str.contains("EncryptedExportResult"));
}
#[test]
fn test_encrypted_import_result_debug() {
let result = EncryptedImportResult {
workspace_id: "ws-1".to_string(),
workspace_name: "Test".to_string(),
imported_at: Utc::now(),
request_count: 5,
encryption_restored: true,
};
let debug_str = format!("{:?}", result);
assert!(debug_str.contains("EncryptedImportResult"));
}
#[test]
fn test_security_check_result_debug() {
let result = SecurityCheckResult {
workspace_id: "ws-1".to_string(),
workspace_name: "Test".to_string(),
warnings: vec![],
errors: vec![],
is_secure: true,
recommended_actions: vec![],
};
let debug_str = format!("{:?}", result);
assert!(debug_str.contains("SecurityCheckResult"));
}
#[test]
fn test_security_warning_debug() {
let warning = SecurityWarning {
field_type: "header".to_string(),
field_name: "Auth".to_string(),
location: "request".to_string(),
severity: SecuritySeverity::High,
message: "Test".to_string(),
suggestion: "Fix".to_string(),
};
let debug_str = format!("{:?}", warning);
assert!(debug_str.contains("SecurityWarning"));
}
#[test]
fn test_security_severity_debug() {
let severity = SecuritySeverity::Critical;
let debug_str = format!("{:?}", severity);
assert!(debug_str.contains("Critical"));
}
#[test]
fn test_workspace_export_debug() {
let export = WorkspaceExport {
metadata: WorkspaceMetadata {
id: "ws-1".to_string(),
name: "Test".to_string(),
description: None,
exported_at: Utc::now(),
request_count: 0,
folder_count: 0,
},
config: WorkspaceConfig {
auth: None,
base_url: None,
variables: HashMap::new(),
reality_level: None,
ai_mode: None,
},
requests: HashMap::new(),
};
let debug_str = format!("{:?}", export);
assert!(debug_str.contains("WorkspaceExport"));
}
#[test]
fn test_workspace_metadata_debug() {
let metadata = WorkspaceMetadata {
id: "ws-1".to_string(),
name: "Test".to_string(),
description: None,
exported_at: Utc::now(),
request_count: 0,
folder_count: 0,
};
let debug_str = format!("{:?}", metadata);
assert!(debug_str.contains("WorkspaceMetadata"));
}
#[test]
fn test_workspace_config_debug() {
let config = WorkspaceConfig {
auth: None,
base_url: None,
variables: HashMap::new(),
reality_level: None,
ai_mode: None,
};
let debug_str = format!("{:?}", config);
assert!(debug_str.contains("WorkspaceConfig"));
}
#[test]
fn test_auth_config_debug() {
let auth = AuthConfig {
auth_type: "bearer".to_string(),
params: HashMap::new(),
};
let debug_str = format!("{:?}", auth);
assert!(debug_str.contains("AuthConfig"));
}
#[test]
fn test_exported_request_debug() {
let request = ExportedRequest {
id: "req-1".to_string(),
name: "Test".to_string(),
method: "GET".to_string(),
path: "/test".to_string(),
folder_path: "/".to_string(),
headers: HashMap::new(),
query_params: HashMap::new(),
body: None,
response_status: None,
response_body: None,
response_headers: HashMap::new(),
delay: None,
};
let debug_str = format!("{:?}", request);
assert!(debug_str.contains("ExportedRequest"));
}
#[test]
fn test_sync_strategy_debug() {
let strategy = SyncStrategy::Full;
let debug_str = format!("{:?}", strategy);
assert!(debug_str.contains("Full") || debug_str.contains("SyncStrategy"));
}
#[test]
fn test_directory_structure_debug() {
let structure = DirectoryStructure::Flat;
let debug_str = format!("{:?}", structure);
assert!(debug_str.contains("Flat") || debug_str.contains("DirectoryStructure"));
}
}