Skip to main content

mockforge_core/
workspace_persistence.rs

1//! Persistence layer for workspace configurations
2//!
3//! This module handles saving and loading workspace configurations to/from disk,
4//! enabling persistent storage of workspace hierarchies and configurations.
5
6use crate::config::AuthConfig as ConfigAuthConfig;
7use crate::encryption::{utils, AutoEncryptionProcessor, WorkspaceKeyManager};
8use crate::workspace::{EntityId, Folder, MockRequest, Workspace, WorkspaceRegistry};
9use crate::{Error, Result};
10use chrono::{DateTime, Utc};
11use once_cell::sync::Lazy;
12use regex::Regex;
13use serde::{Deserialize, Serialize};
14use std::collections::HashMap;
15use std::path::{Path, PathBuf};
16use tokio::fs;
17
18// Pre-compiled regex patterns for sensitive data detection
19static CREDIT_CARD_PATTERN: Lazy<Regex> = Lazy::new(|| {
20    Regex::new(r"\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b")
21        .expect("CREDIT_CARD_PATTERN regex is valid")
22});
23
24static SSN_PATTERN: Lazy<Regex> = Lazy::new(|| {
25    Regex::new(r"\b\d{3}[-\s]?\d{2}[-\s]?\d{4}\b").expect("SSN_PATTERN regex is valid")
26});
27
28/// Workspace persistence manager
29#[derive(Debug)]
30pub struct WorkspacePersistence {
31    /// Base directory for workspace storage
32    base_dir: PathBuf,
33}
34
35/// Serializable workspace registry for persistence
36#[derive(Debug, Clone, Serialize, Deserialize)]
37struct SerializableWorkspaceRegistry {
38    workspaces: Vec<Workspace>,
39    active_workspace: Option<EntityId>,
40}
41
42/// Sync state for tracking incremental syncs
43#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SyncState {
45    /// Last time a sync operation was performed
46    pub last_sync_timestamp: DateTime<Utc>,
47}
48
49/// Sync strategy for workspace mirroring
50#[derive(Debug, Clone, PartialEq)]
51pub enum SyncStrategy {
52    /// Sync all workspaces completely
53    Full,
54    /// Sync only changed workspaces (based on modification time)
55    Incremental,
56    /// Sync only specified workspace IDs
57    Selective(Vec<String>),
58}
59
60/// Directory structure for synced workspaces
61#[derive(Debug, Clone, PartialEq)]
62pub enum DirectoryStructure {
63    /// All workspaces in a flat structure: workspace-id.yaml
64    Flat,
65    /// Nested by workspace: workspaces/{name}/workspace.yaml + requests/
66    Nested,
67    /// Grouped by type: requests/, responses/, metadata/
68    Grouped,
69}
70
71/// Result of a workspace sync operation
72#[derive(Debug, Clone)]
73pub struct SyncResult {
74    /// Number of workspaces synced
75    pub synced_workspaces: usize,
76    /// Number of requests synced
77    pub synced_requests: usize,
78    /// Number of files created/updated
79    pub files_created: usize,
80    /// Target directory used
81    pub target_dir: PathBuf,
82}
83
84/// Result of an encrypted workspace export
85#[derive(Debug, Clone)]
86pub struct EncryptedExportResult {
87    /// Path to the encrypted export file
88    pub output_path: PathBuf,
89    /// Backup key for importing on other devices
90    pub backup_key: String,
91    /// When the export was created
92    pub exported_at: DateTime<Utc>,
93    /// Name of the exported workspace
94    pub workspace_name: String,
95    /// Whether encryption was successfully applied
96    pub encryption_enabled: bool,
97}
98
99/// Result of an encrypted workspace import
100#[derive(Debug, Clone)]
101pub struct EncryptedImportResult {
102    /// ID of the imported workspace
103    pub workspace_id: String,
104    /// Name of the imported workspace
105    pub workspace_name: String,
106    /// When the import was completed
107    pub imported_at: DateTime<Utc>,
108    /// Number of requests imported
109    pub request_count: usize,
110    /// Whether encryption was successfully restored
111    pub encryption_restored: bool,
112}
113
114/// Result of a security check for sensitive data
115#[derive(Debug, Clone)]
116pub struct SecurityCheckResult {
117    /// Workspace ID that was checked
118    pub workspace_id: String,
119    /// Workspace name that was checked
120    pub workspace_name: String,
121    /// Security warnings found
122    pub warnings: Vec<SecurityWarning>,
123    /// Security errors found (critical issues)
124    pub errors: Vec<SecurityWarning>,
125    /// Whether the workspace is considered secure
126    pub is_secure: bool,
127    /// Recommended actions to improve security
128    pub recommended_actions: Vec<String>,
129}
130
131/// Security warning or error
132#[derive(Debug, Clone)]
133pub struct SecurityWarning {
134    /// Type of field that contains sensitive data
135    pub field_type: String,
136    /// Name of the field
137    pub field_name: String,
138    /// Location where the sensitive data was found
139    pub location: String,
140    /// Severity of the issue
141    pub severity: SecuritySeverity,
142    /// Human-readable message
143    pub message: String,
144    /// Suggestion for fixing the issue
145    pub suggestion: String,
146}
147
148/// Severity levels for security issues
149#[derive(Debug, Clone, PartialEq)]
150pub enum SecuritySeverity {
151    /// Low risk - informational
152    Low,
153    /// Medium risk - should be reviewed
154    Medium,
155    /// High risk - requires attention
156    High,
157    /// Critical risk - blocks operations
158    Critical,
159}
160
161/// Git-friendly workspace export format
162#[derive(Debug, Clone, Serialize, Deserialize)]
163pub struct WorkspaceExport {
164    /// Workspace metadata
165    pub metadata: WorkspaceMetadata,
166    /// Workspace configuration
167    pub config: WorkspaceConfig,
168    /// All requests organized by folder structure
169    pub requests: HashMap<String, ExportedRequest>,
170}
171
172/// Metadata for exported workspace
173#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct WorkspaceMetadata {
175    /// Original workspace ID
176    pub id: String,
177    /// Workspace name
178    pub name: String,
179    /// Workspace description
180    pub description: Option<String>,
181    /// Export timestamp
182    pub exported_at: DateTime<Utc>,
183    /// Total number of requests
184    pub request_count: usize,
185    /// Total number of folders
186    pub folder_count: usize,
187}
188
189/// Simplified workspace configuration for export
190#[derive(Debug, Clone, Serialize, Deserialize)]
191pub struct WorkspaceConfig {
192    /// Authentication configuration
193    pub auth: Option<AuthConfig>,
194    /// Base URL for requests
195    pub base_url: Option<String>,
196    /// Environment variables
197    pub variables: HashMap<String, String>,
198    /// Reality level for this workspace (1-5)
199    /// Controls the realism of mock behavior (chaos, latency, MockAI)
200    #[serde(default)]
201    pub reality_level: Option<crate::RealityLevel>,
202    /// AI mode for this workspace
203    /// Controls how AI-generated artifacts are used at runtime
204    #[serde(default)]
205    pub ai_mode: Option<crate::ai_studio::config::AiMode>,
206}
207
208/// Authentication configuration for export
209#[derive(Debug, Clone, Serialize, Deserialize)]
210pub struct AuthConfig {
211    /// Authentication type
212    pub auth_type: String,
213    /// Authentication parameters
214    pub params: HashMap<String, String>,
215}
216
217impl AuthConfig {
218    /// Convert from config AuthConfig to export AuthConfig
219    pub fn from_config_auth(config_auth: &ConfigAuthConfig) -> Option<Self> {
220        if let Some(jwt) = &config_auth.jwt {
221            let mut params = HashMap::new();
222            if let Some(secret) = &jwt.secret {
223                params.insert("secret".to_string(), secret.clone());
224            }
225            if let Some(rsa_public_key) = &jwt.rsa_public_key {
226                params.insert("rsa_public_key".to_string(), rsa_public_key.clone());
227            }
228            if let Some(ecdsa_public_key) = &jwt.ecdsa_public_key {
229                params.insert("ecdsa_public_key".to_string(), ecdsa_public_key.clone());
230            }
231            if let Some(issuer) = &jwt.issuer {
232                params.insert("issuer".to_string(), issuer.clone());
233            }
234            if let Some(audience) = &jwt.audience {
235                params.insert("audience".to_string(), audience.clone());
236            }
237            if !jwt.algorithms.is_empty() {
238                params.insert("algorithms".to_string(), jwt.algorithms.join(","));
239            }
240            Some(AuthConfig {
241                auth_type: "jwt".to_string(),
242                params,
243            })
244        } else if let Some(oauth2) = &config_auth.oauth2 {
245            let mut params = HashMap::new();
246            params.insert("client_id".to_string(), oauth2.client_id.clone());
247            params.insert("client_secret".to_string(), oauth2.client_secret.clone());
248            params.insert("introspection_url".to_string(), oauth2.introspection_url.clone());
249            if let Some(auth_url) = &oauth2.auth_url {
250                params.insert("auth_url".to_string(), auth_url.clone());
251            }
252            if let Some(token_url) = &oauth2.token_url {
253                params.insert("token_url".to_string(), token_url.clone());
254            }
255            if let Some(token_type_hint) = &oauth2.token_type_hint {
256                params.insert("token_type_hint".to_string(), token_type_hint.clone());
257            }
258            Some(AuthConfig {
259                auth_type: "oauth2".to_string(),
260                params,
261            })
262        } else if let Some(basic_auth) = &config_auth.basic_auth {
263            let mut params = HashMap::new();
264            for (user, pass) in &basic_auth.credentials {
265                params.insert(user.clone(), pass.clone());
266            }
267            Some(AuthConfig {
268                auth_type: "basic".to_string(),
269                params,
270            })
271        } else if let Some(api_key) = &config_auth.api_key {
272            let mut params = HashMap::new();
273            params.insert("header_name".to_string(), api_key.header_name.clone());
274            if let Some(query_name) = &api_key.query_name {
275                params.insert("query_name".to_string(), query_name.clone());
276            }
277            if !api_key.keys.is_empty() {
278                params.insert("keys".to_string(), api_key.keys.join(","));
279            }
280            Some(AuthConfig {
281                auth_type: "api_key".to_string(),
282                params,
283            })
284        } else {
285            None
286        }
287    }
288}
289
290/// Exported request format
291#[derive(Debug, Clone, Serialize, Deserialize)]
292pub struct ExportedRequest {
293    /// Request ID
294    pub id: String,
295    /// Request name
296    pub name: String,
297    /// HTTP method
298    pub method: String,
299    /// Request path
300    pub path: String,
301    /// Folder path (for organization)
302    pub folder_path: String,
303    /// Request headers
304    pub headers: HashMap<String, String>,
305    /// Query parameters
306    pub query_params: HashMap<String, String>,
307    /// Request body
308    pub body: Option<String>,
309    /// Response status code
310    pub response_status: Option<u16>,
311    /// Response body
312    pub response_body: Option<String>,
313    /// Response headers
314    pub response_headers: HashMap<String, String>,
315    /// Response delay (ms)
316    pub delay: Option<u64>,
317}
318
319impl WorkspacePersistence {
320    /// Create a new persistence manager
321    pub fn new<P: AsRef<Path>>(base_dir: P) -> Self {
322        Self {
323            base_dir: base_dir.as_ref().to_path_buf(),
324        }
325    }
326
327    /// Get the workspace directory path
328    pub fn workspace_dir(&self) -> &Path {
329        &self.base_dir
330    }
331
332    /// Get the path for a specific workspace file
333    pub fn workspace_file_path(&self, workspace_id: &str) -> PathBuf {
334        self.base_dir.join(format!("{}.yaml", workspace_id))
335    }
336
337    /// Get the registry metadata file path
338    pub fn registry_file_path(&self) -> PathBuf {
339        self.base_dir.join("registry.yaml")
340    }
341
342    /// Get the sync state file path
343    pub fn sync_state_file_path(&self) -> PathBuf {
344        self.base_dir.join("sync_state.yaml")
345    }
346
347    /// Ensure the workspace directory exists
348    pub async fn ensure_workspace_dir(&self) -> Result<()> {
349        if !self.base_dir.exists() {
350            fs::create_dir_all(&self.base_dir).await.map_err(|e| {
351                Error::io_with_context("creating workspace directory", e.to_string())
352            })?;
353        }
354        Ok(())
355    }
356
357    /// Save a workspace to disk
358    pub async fn save_workspace(&self, workspace: &Workspace) -> Result<()> {
359        self.ensure_workspace_dir().await?;
360
361        let file_path = self.workspace_file_path(&workspace.id);
362        let content = serde_yaml::to_string(workspace)
363            .map_err(|e| Error::config(format!("Failed to serialize workspace: {}", e)))?;
364
365        fs::write(&file_path, content)
366            .await
367            .map_err(|e| Error::io_with_context("writing workspace file", e.to_string()))?;
368
369        Ok(())
370    }
371
372    /// Load a workspace from disk
373    pub async fn load_workspace(&self, workspace_id: &str) -> Result<Workspace> {
374        let file_path = self.workspace_file_path(workspace_id);
375
376        if !file_path.exists() {
377            return Err(Error::not_found("Workspace file", &*file_path.to_string_lossy()));
378        }
379
380        let content = fs::read_to_string(&file_path)
381            .await
382            .map_err(|e| Error::io_with_context("reading workspace file", e.to_string()))?;
383
384        let mut workspace: Workspace = serde_yaml::from_str(&content)
385            .map_err(|e| Error::config(format!("Failed to deserialize workspace: {}", e)))?;
386
387        // Initialize default mock environments if they don't exist (for backward compatibility)
388        workspace.initialize_default_mock_environments();
389
390        Ok(workspace)
391    }
392
393    /// Delete a workspace from disk
394    pub async fn delete_workspace(&self, workspace_id: &str) -> Result<()> {
395        let file_path = self.workspace_file_path(workspace_id);
396
397        if file_path.exists() {
398            fs::remove_file(&file_path)
399                .await
400                .map_err(|e| Error::io_with_context("deleting workspace file", e.to_string()))?;
401        }
402
403        Ok(())
404    }
405
406    /// Save the workspace registry metadata
407    pub async fn save_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
408        self.ensure_workspace_dir().await?;
409
410        let serializable = SerializableWorkspaceRegistry {
411            workspaces: registry.get_workspaces().into_iter().cloned().collect(),
412            active_workspace: registry.get_active_workspace_id().map(|s| s.to_string()),
413        };
414
415        let file_path = self.registry_file_path();
416        let content = serde_yaml::to_string(&serializable)
417            .map_err(|e| Error::config(format!("Failed to serialize registry: {}", e)))?;
418
419        fs::write(&file_path, content)
420            .await
421            .map_err(|e| Error::io_with_context("writing registry file", e.to_string()))?;
422
423        Ok(())
424    }
425
426    /// Load the workspace registry metadata
427    pub async fn load_registry(&self) -> Result<WorkspaceRegistry> {
428        let file_path = self.registry_file_path();
429
430        if !file_path.exists() {
431            // Return empty registry if no registry file exists
432            return Ok(WorkspaceRegistry::new());
433        }
434
435        let content = fs::read_to_string(&file_path)
436            .await
437            .map_err(|e| Error::io_with_context("reading registry file", e.to_string()))?;
438
439        let serializable: SerializableWorkspaceRegistry = serde_yaml::from_str(&content)
440            .map_err(|e| Error::config(format!("Failed to deserialize registry: {}", e)))?;
441
442        let mut registry = WorkspaceRegistry::new();
443
444        // Load individual workspaces
445        for workspace_meta in &serializable.workspaces {
446            match self.load_workspace(&workspace_meta.id).await {
447                Ok(mut workspace) => {
448                    // Ensure mock environments are initialized (for backward compatibility)
449                    workspace.initialize_default_mock_environments();
450                    registry.add_workspace(workspace)?;
451                }
452                Err(e) => {
453                    tracing::warn!("Failed to load workspace {}: {}", workspace_meta.id, e);
454                }
455            }
456        }
457
458        // Set active workspace
459        if let Some(active_id) = &serializable.active_workspace {
460            if let Err(e) = registry.set_active_workspace(Some(active_id.clone())) {
461                tracing::warn!("Failed to set active workspace {}: {}", active_id, e);
462            }
463        }
464
465        Ok(registry)
466    }
467
468    /// Save the sync state
469    pub async fn save_sync_state(&self, sync_state: &SyncState) -> Result<()> {
470        self.ensure_workspace_dir().await?;
471
472        let file_path = self.sync_state_file_path();
473        let content = serde_yaml::to_string(sync_state)
474            .map_err(|e| Error::config(format!("Failed to serialize sync state: {}", e)))?;
475
476        fs::write(&file_path, content)
477            .await
478            .map_err(|e| Error::io_with_context("writing sync state file", e.to_string()))?;
479
480        Ok(())
481    }
482
483    /// Load the sync state
484    pub async fn load_sync_state(&self) -> Result<SyncState> {
485        let file_path = self.sync_state_file_path();
486
487        if !file_path.exists() {
488            // Return default sync state if no sync state file exists
489            return Ok(SyncState {
490                last_sync_timestamp: Utc::now(),
491            });
492        }
493
494        let content = fs::read_to_string(&file_path)
495            .await
496            .map_err(|e| Error::io_with_context("reading sync state file", e.to_string()))?;
497
498        let sync_state: SyncState = serde_yaml::from_str(&content)
499            .map_err(|e| Error::config(format!("Failed to deserialize sync state: {}", e)))?;
500
501        Ok(sync_state)
502    }
503
504    /// List all workspace IDs from disk
505    pub async fn list_workspace_ids(&self) -> Result<Vec<EntityId>> {
506        if !self.base_dir.exists() {
507            return Ok(Vec::new());
508        }
509
510        let mut workspace_ids = Vec::new();
511
512        let mut entries = fs::read_dir(&self.base_dir)
513            .await
514            .map_err(|e| Error::io_with_context("reading workspace directory", e.to_string()))?;
515
516        while let Some(entry) = entries
517            .next_entry()
518            .await
519            .map_err(|e| Error::io_with_context("reading directory entry", e.to_string()))?
520        {
521            let path = entry.path();
522            if path.is_file() {
523                if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
524                    if file_name != "registry.yaml" && file_name.ends_with(".yaml") {
525                        if let Some(id) = file_name.strip_suffix(".yaml") {
526                            workspace_ids.push(id.to_string());
527                        }
528                    }
529                }
530            }
531        }
532
533        Ok(workspace_ids)
534    }
535
536    /// Save the entire registry and all workspaces
537    pub async fn save_full_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
538        // Save registry metadata
539        self.save_registry(registry).await?;
540
541        // Save all workspaces
542        for workspace in registry.get_workspaces() {
543            self.save_workspace(workspace).await?;
544        }
545
546        Ok(())
547    }
548
549    /// Load the entire registry and all workspaces
550    pub async fn load_full_registry(&self) -> Result<WorkspaceRegistry> {
551        self.load_registry().await
552    }
553
554    /// Backup workspace data
555    pub async fn backup_workspace(&self, workspace_id: &str, backup_dir: &Path) -> Result<PathBuf> {
556        let workspace_file = self.workspace_file_path(workspace_id);
557
558        if !workspace_file.exists() {
559            return Err(Error::not_found("Workspace", workspace_id));
560        }
561
562        // Ensure backup directory exists
563        if !backup_dir.exists() {
564            fs::create_dir_all(backup_dir)
565                .await
566                .map_err(|e| Error::io_with_context("creating backup directory", e.to_string()))?;
567        }
568
569        // Create backup filename with timestamp
570        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
571        let backup_filename = format!("{}_{}.yaml", workspace_id, timestamp);
572        let backup_path = backup_dir.join(backup_filename);
573
574        // Copy workspace file
575        fs::copy(&workspace_file, &backup_path)
576            .await
577            .map_err(|e| Error::io_with_context("creating backup", e.to_string()))?;
578
579        Ok(backup_path)
580    }
581
582    /// Restore workspace from backup
583    pub async fn restore_workspace(&self, backup_path: &Path) -> Result<EntityId> {
584        if !backup_path.exists() {
585            return Err(Error::not_found("Backup file", &*backup_path.to_string_lossy()));
586        }
587
588        // Load workspace from backup
589        let content = fs::read_to_string(backup_path)
590            .await
591            .map_err(|e| Error::io_with_context("reading backup file", e.to_string()))?;
592
593        let workspace: Workspace = serde_yaml::from_str(&content)
594            .map_err(|e| Error::config(format!("Failed to deserialize backup: {}", e)))?;
595
596        // Save restored workspace
597        self.save_workspace(&workspace).await?;
598
599        Ok(workspace.id)
600    }
601
602    /// Clean up old backups
603    pub async fn cleanup_old_backups(&self, backup_dir: &Path, keep_count: usize) -> Result<usize> {
604        if !backup_dir.exists() {
605            return Ok(0);
606        }
607
608        let mut backup_files = Vec::new();
609
610        let mut entries = fs::read_dir(backup_dir)
611            .await
612            .map_err(|e| Error::io_with_context("reading backup directory", e.to_string()))?;
613
614        while let Some(entry) = entries
615            .next_entry()
616            .await
617            .map_err(|e| Error::io_with_context("reading backup entry", e.to_string()))?
618        {
619            let path = entry.path();
620            if path.is_file() {
621                if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
622                    if file_name.ends_with(".yaml") {
623                        if let Ok(metadata) = entry.metadata().await {
624                            if let Ok(modified) = metadata.modified() {
625                                backup_files.push((path, modified));
626                            }
627                        }
628                    }
629                }
630            }
631        }
632
633        // Sort by modification time (newest first)
634        backup_files.sort_by(|a, b| b.1.cmp(&a.1));
635
636        // Remove old backups
637        let mut removed_count = 0;
638        for (path, _) in backup_files.iter().skip(keep_count) {
639            if fs::remove_file(path).await.is_ok() {
640                removed_count += 1;
641            }
642        }
643
644        Ok(removed_count)
645    }
646
647    /// Advanced sync with additional configuration options
648    #[allow(clippy::too_many_arguments)]
649    pub async fn sync_to_directory_advanced(
650        &self,
651        target_dir: &str,
652        strategy: &str,
653        workspace_ids: Option<&str>,
654        structure: &str,
655        include_meta: bool,
656        force: bool,
657        filename_pattern: &str,
658        exclude_pattern: Option<&str>,
659        dry_run: bool,
660    ) -> Result<SyncResult> {
661        let target_path = PathBuf::from(target_dir);
662
663        // Ensure target directory exists (unless dry run)
664        if !dry_run && !target_path.exists() {
665            fs::create_dir_all(&target_path)
666                .await
667                .map_err(|e| Error::io_with_context("creating target directory", e.to_string()))?;
668        }
669
670        // Parse strategy
671        let sync_strategy = match strategy {
672            "full" => SyncStrategy::Full,
673            "incremental" => SyncStrategy::Incremental,
674            "selective" => {
675                if let Some(ids) = workspace_ids {
676                    let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
677                    SyncStrategy::Selective(workspace_list)
678                } else {
679                    return Err(Error::validation("Selective strategy requires workspace IDs"));
680                }
681            }
682            _ => return Err(Error::validation(format!("Unknown sync strategy: {}", strategy))),
683        };
684
685        // Parse directory structure
686        let dir_structure = match structure {
687            "flat" => DirectoryStructure::Flat,
688            "nested" => DirectoryStructure::Nested,
689            "grouped" => DirectoryStructure::Grouped,
690            _ => {
691                return Err(Error::validation(format!(
692                    "Unknown directory structure: {}",
693                    structure
694                )))
695            }
696        };
697
698        // Get workspaces to sync based on strategy
699        let mut workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
700
701        // Apply exclusion filter if provided
702        if let Some(exclude) = exclude_pattern {
703            if let Ok(regex) = Regex::new(exclude) {
704                workspaces_to_sync.retain(|id| !regex.is_match(id));
705            }
706        }
707
708        let mut result = SyncResult {
709            synced_workspaces: 0,
710            synced_requests: 0,
711            files_created: 0,
712            target_dir: target_path.clone(),
713        };
714
715        // Sync each workspace
716        for workspace_id in workspaces_to_sync {
717            if let Ok(workspace) = self.load_workspace(&workspace_id).await {
718                let workspace_result = self
719                    .sync_workspace_to_directory_advanced(
720                        &workspace,
721                        &target_path,
722                        &dir_structure,
723                        include_meta,
724                        force,
725                        filename_pattern,
726                        dry_run,
727                    )
728                    .await?;
729
730                result.synced_workspaces += 1;
731                result.synced_requests += workspace_result.requests_count;
732                result.files_created += workspace_result.files_created;
733            }
734        }
735
736        // Update sync state for incremental syncs
737        if let SyncStrategy::Incremental = sync_strategy {
738            let new_sync_state = SyncState {
739                last_sync_timestamp: Utc::now(),
740            };
741            if let Err(e) = self.save_sync_state(&new_sync_state).await {
742                tracing::warn!("Failed to save sync state: {}", e);
743            }
744        }
745
746        Ok(result)
747    }
748
749    /// Advanced sync for a single workspace with custom filename patterns
750    #[allow(clippy::too_many_arguments)]
751    async fn sync_workspace_to_directory_advanced(
752        &self,
753        workspace: &Workspace,
754        target_dir: &Path,
755        structure: &DirectoryStructure,
756        include_meta: bool,
757        force: bool,
758        filename_pattern: &str,
759        dry_run: bool,
760    ) -> Result<WorkspaceSyncResult> {
761        let mut result = WorkspaceSyncResult {
762            requests_count: 0,
763            files_created: 0,
764        };
765
766        match structure {
767            DirectoryStructure::Flat => {
768                let export = self.create_workspace_export(workspace).await?;
769                let filename = self.generate_filename(filename_pattern, workspace);
770                let file_path = target_dir.join(format!("{}.yaml", filename));
771
772                if force || !file_path.exists() {
773                    if !dry_run {
774                        let content = serde_yaml::to_string(&export).map_err(|e| {
775                            Error::config(format!("Failed to serialize workspace: {}", e))
776                        })?;
777
778                        fs::write(&file_path, content).await.map_err(|e| {
779                            Error::io_with_context("writing workspace file", e.to_string())
780                        })?;
781                    }
782                    result.files_created += 1;
783                }
784            }
785
786            DirectoryStructure::Nested => {
787                let workspace_dir =
788                    target_dir.join(self.generate_filename(filename_pattern, workspace));
789                if !dry_run && !workspace_dir.exists() {
790                    fs::create_dir_all(&workspace_dir).await.map_err(|e| {
791                        Error::io_with_context("creating workspace directory", e.to_string())
792                    })?;
793                }
794
795                // Export main workspace file
796                let export = self.create_workspace_export(workspace).await?;
797                let workspace_file = workspace_dir.join("workspace.yaml");
798
799                if force || !workspace_file.exists() {
800                    if !dry_run {
801                        let content = serde_yaml::to_string(&export).map_err(|e| {
802                            Error::config(format!("Failed to serialize workspace: {}", e))
803                        })?;
804
805                        fs::write(&workspace_file, content).await.map_err(|e| {
806                            Error::io_with_context("writing workspace file", e.to_string())
807                        })?;
808                    }
809                    result.files_created += 1;
810                }
811
812                // Export individual requests
813                let requests_dir = workspace_dir.join("requests");
814                if !dry_run && !requests_dir.exists() {
815                    fs::create_dir_all(&requests_dir).await.map_err(|e| {
816                        Error::io_with_context("creating requests directory", e.to_string())
817                    })?;
818                }
819
820                result.requests_count += self
821                    .export_workspace_requests_advanced(workspace, &requests_dir, force, dry_run)
822                    .await?;
823            }
824
825            DirectoryStructure::Grouped => {
826                // Create grouped directories
827                let requests_dir = target_dir.join("requests");
828                let workspaces_dir = target_dir.join("workspaces");
829
830                if !dry_run {
831                    for dir in [&requests_dir, &workspaces_dir] {
832                        if !dir.exists() {
833                            fs::create_dir_all(dir).await.map_err(|e| {
834                                Error::io_with_context("creating directory", e.to_string())
835                            })?;
836                        }
837                    }
838                }
839
840                // Export workspace metadata
841                let export = self.create_workspace_export(workspace).await?;
842                let filename = self.generate_filename(filename_pattern, workspace);
843                let workspace_file = workspaces_dir.join(format!("{}.yaml", filename));
844
845                if force || !workspace_file.exists() {
846                    if !dry_run {
847                        let content = serde_yaml::to_string(&export).map_err(|e| {
848                            Error::config(format!("Failed to serialize workspace: {}", e))
849                        })?;
850
851                        fs::write(&workspace_file, content).await.map_err(|e| {
852                            Error::io_with_context("writing workspace file", e.to_string())
853                        })?;
854                    }
855                    result.files_created += 1;
856                }
857
858                // Export requests to requests directory
859                result.requests_count += self
860                    .export_workspace_requests_grouped_advanced(
861                        workspace,
862                        &requests_dir,
863                        force,
864                        dry_run,
865                    )
866                    .await?;
867            }
868        }
869
870        // Create metadata file if requested
871        if include_meta && !dry_run {
872            self.create_metadata_file(workspace, target_dir, structure).await?;
873            result.files_created += 1;
874        }
875
876        Ok(result)
877    }
878
879    /// Generate filename from pattern
880    fn generate_filename(&self, pattern: &str, workspace: &Workspace) -> String {
881        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
882
883        pattern
884            .replace("{name}", &self.sanitize_filename(&workspace.name))
885            .replace("{id}", &workspace.id)
886            .replace("{timestamp}", &timestamp.to_string())
887    }
888
889    /// Advanced request export with dry run support
890    async fn export_workspace_requests_advanced(
891        &self,
892        workspace: &Workspace,
893        requests_dir: &Path,
894        force: bool,
895        dry_run: bool,
896    ) -> Result<usize> {
897        let mut count = 0;
898
899        for request in &workspace.requests {
900            let file_path =
901                requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
902            if force || !file_path.exists() {
903                if !dry_run {
904                    let exported = self.convert_request_to_exported(request, "");
905                    let content = serde_yaml::to_string(&exported).map_err(|e| {
906                        Error::config(format!("Failed to serialize request: {}", e))
907                    })?;
908
909                    fs::write(&file_path, content).await.map_err(|e| {
910                        Error::io_with_context("writing request file", e.to_string())
911                    })?;
912                }
913                count += 1;
914            }
915        }
916
917        // Export folder requests
918        for folder in &workspace.folders {
919            count += self
920                .export_folder_requests_advanced(folder, requests_dir, force, &folder.name, dry_run)
921                .await?;
922        }
923
924        Ok(count)
925    }
926
927    /// Advanced folder request export
928    async fn export_folder_requests_advanced(
929        &self,
930        folder: &Folder,
931        requests_dir: &Path,
932        force: bool,
933        folder_path: &str,
934        dry_run: bool,
935    ) -> Result<usize> {
936        use std::collections::VecDeque;
937
938        let mut count = 0;
939        let mut queue = VecDeque::new();
940
941        // Start with the root folder
942        queue.push_back((folder, folder_path.to_string()));
943
944        while let Some((current_folder, current_path)) = queue.pop_front() {
945            // Export requests in current folder
946            for request in &current_folder.requests {
947                let file_path =
948                    requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
949                if force || !file_path.exists() {
950                    if !dry_run {
951                        let exported = self.convert_request_to_exported(request, &current_path);
952                        let content = serde_yaml::to_string(&exported).map_err(|e| {
953                            Error::config(format!("Failed to serialize request: {}", e))
954                        })?;
955
956                        fs::write(&file_path, content).await.map_err(|e| {
957                            Error::io_with_context("writing request file", e.to_string())
958                        })?;
959                    }
960                    count += 1;
961                }
962            }
963
964            // Add subfolders to queue with updated paths
965            for subfolder in &current_folder.folders {
966                let subfolder_path = if current_path.is_empty() {
967                    subfolder.name.clone()
968                } else {
969                    format!("{}/{}", current_path, subfolder.name)
970                };
971                queue.push_back((subfolder, subfolder_path));
972            }
973        }
974
975        Ok(count)
976    }
977
978    /// Advanced grouped request export
979    async fn export_workspace_requests_grouped_advanced(
980        &self,
981        workspace: &Workspace,
982        requests_dir: &Path,
983        force: bool,
984        dry_run: bool,
985    ) -> Result<usize> {
986        let mut count = 0;
987        let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
988
989        if !dry_run && !workspace_requests_dir.exists() {
990            fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
991                Error::io_with_context("creating workspace requests directory", e.to_string())
992            })?;
993        }
994
995        count += self
996            .export_workspace_requests_advanced(workspace, &workspace_requests_dir, force, dry_run)
997            .await?;
998        Ok(count)
999    }
1000
1001    /// Sync workspaces to an external directory for Git/Dropbox integration
1002    pub async fn sync_to_directory(
1003        &self,
1004        target_dir: &str,
1005        strategy: &str,
1006        workspace_ids: Option<&str>,
1007        structure: &str,
1008        include_meta: bool,
1009        force: bool,
1010    ) -> Result<SyncResult> {
1011        let target_path = PathBuf::from(target_dir);
1012
1013        // Ensure target directory exists
1014        if !target_path.exists() {
1015            fs::create_dir_all(&target_path)
1016                .await
1017                .map_err(|e| Error::io_with_context("creating target directory", e.to_string()))?;
1018        }
1019
1020        // Parse strategy
1021        let sync_strategy = match strategy {
1022            "full" => SyncStrategy::Full,
1023            "incremental" => SyncStrategy::Incremental,
1024            "selective" => {
1025                if let Some(ids) = workspace_ids {
1026                    let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
1027                    SyncStrategy::Selective(workspace_list)
1028                } else {
1029                    return Err(Error::validation("Selective strategy requires workspace IDs"));
1030                }
1031            }
1032            _ => return Err(Error::validation(format!("Unknown sync strategy: {}", strategy))),
1033        };
1034
1035        // Parse directory structure
1036        let dir_structure = match structure {
1037            "flat" => DirectoryStructure::Flat,
1038            "nested" => DirectoryStructure::Nested,
1039            "grouped" => DirectoryStructure::Grouped,
1040            _ => {
1041                return Err(Error::validation(format!(
1042                    "Unknown directory structure: {}",
1043                    structure
1044                )))
1045            }
1046        };
1047
1048        // Get workspaces to sync based on strategy
1049        let workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
1050
1051        let mut result = SyncResult {
1052            synced_workspaces: 0,
1053            synced_requests: 0,
1054            files_created: 0,
1055            target_dir: target_path.clone(),
1056        };
1057
1058        // Sync each workspace
1059        for workspace_id in workspaces_to_sync {
1060            if let Ok(workspace) = self.load_workspace(&workspace_id).await {
1061                let workspace_result = self
1062                    .sync_workspace_to_directory(
1063                        &workspace,
1064                        &target_path,
1065                        &dir_structure,
1066                        include_meta,
1067                        force,
1068                    )
1069                    .await?;
1070
1071                result.synced_workspaces += 1;
1072                result.synced_requests += workspace_result.requests_count;
1073                result.files_created += workspace_result.files_created;
1074            }
1075        }
1076
1077        // Update sync state for incremental syncs
1078        if let SyncStrategy::Incremental = sync_strategy {
1079            let new_sync_state = SyncState {
1080                last_sync_timestamp: Utc::now(),
1081            };
1082            if let Err(e) = self.save_sync_state(&new_sync_state).await {
1083                tracing::warn!("Failed to save sync state: {}", e);
1084            }
1085        }
1086
1087        Ok(result)
1088    }
1089
1090    /// Get list of workspace IDs to sync based on strategy
1091    async fn get_workspaces_for_sync(&self, strategy: &SyncStrategy) -> Result<Vec<String>> {
1092        match strategy {
1093            SyncStrategy::Full => self.list_workspace_ids().await,
1094            SyncStrategy::Incremental => {
1095                // Load sync state to get last sync timestamp
1096                let sync_state = self.load_sync_state().await?;
1097                let last_sync = sync_state.last_sync_timestamp;
1098
1099                // Get all workspace IDs
1100                let all_workspace_ids = self.list_workspace_ids().await?;
1101
1102                // Filter workspaces that have been modified since last sync
1103                let mut modified_workspaces = Vec::new();
1104                for workspace_id in all_workspace_ids {
1105                    let file_path = self.workspace_file_path(&workspace_id);
1106                    if let Ok(metadata) = fs::metadata(&file_path).await {
1107                        if let Ok(modified_time) = metadata.modified() {
1108                            let modified_datetime = DateTime::<Utc>::from(modified_time);
1109                            if modified_datetime > last_sync {
1110                                modified_workspaces.push(workspace_id);
1111                            }
1112                        }
1113                    }
1114                }
1115
1116                Ok(modified_workspaces)
1117            }
1118            SyncStrategy::Selective(ids) => Ok(ids.clone()),
1119        }
1120    }
1121
1122    /// Sync a single workspace to the target directory
1123    async fn sync_workspace_to_directory(
1124        &self,
1125        workspace: &Workspace,
1126        target_dir: &Path,
1127        structure: &DirectoryStructure,
1128        include_meta: bool,
1129        force: bool,
1130    ) -> Result<WorkspaceSyncResult> {
1131        let mut result = WorkspaceSyncResult {
1132            requests_count: 0,
1133            files_created: 0,
1134        };
1135
1136        match structure {
1137            DirectoryStructure::Flat => {
1138                let export = self.create_workspace_export(workspace).await?;
1139                let file_path =
1140                    target_dir.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1141
1142                if force || !file_path.exists() {
1143                    let content = serde_yaml::to_string(&export).map_err(|e| {
1144                        Error::config(format!("Failed to serialize workspace: {}", e))
1145                    })?;
1146
1147                    fs::write(&file_path, content).await.map_err(|e| {
1148                        Error::io_with_context("writing workspace file", e.to_string())
1149                    })?;
1150
1151                    result.files_created += 1;
1152                }
1153            }
1154
1155            DirectoryStructure::Nested => {
1156                let workspace_dir = target_dir.join(self.sanitize_filename(&workspace.name));
1157                if !workspace_dir.exists() {
1158                    fs::create_dir_all(&workspace_dir).await.map_err(|e| {
1159                        Error::io_with_context("creating workspace directory", e.to_string())
1160                    })?;
1161                }
1162
1163                // Export main workspace file
1164                let export = self.create_workspace_export(workspace).await?;
1165                let workspace_file = workspace_dir.join("workspace.yaml");
1166
1167                if force || !workspace_file.exists() {
1168                    let content = serde_yaml::to_string(&export).map_err(|e| {
1169                        Error::config(format!("Failed to serialize workspace: {}", e))
1170                    })?;
1171
1172                    fs::write(&workspace_file, content).await.map_err(|e| {
1173                        Error::io_with_context("writing workspace file", e.to_string())
1174                    })?;
1175
1176                    result.files_created += 1;
1177                }
1178
1179                // Export individual requests
1180                let requests_dir = workspace_dir.join("requests");
1181                if !requests_dir.exists() {
1182                    fs::create_dir_all(&requests_dir).await.map_err(|e| {
1183                        Error::io_with_context("creating requests directory", e.to_string())
1184                    })?;
1185                }
1186
1187                result.requests_count +=
1188                    self.export_workspace_requests(workspace, &requests_dir, force).await?;
1189            }
1190
1191            DirectoryStructure::Grouped => {
1192                // Create grouped directories
1193                let requests_dir = target_dir.join("requests");
1194                let workspaces_dir = target_dir.join("workspaces");
1195
1196                for dir in [&requests_dir, &workspaces_dir] {
1197                    if !dir.exists() {
1198                        fs::create_dir_all(dir).await.map_err(|e| {
1199                            Error::io_with_context("creating directory", e.to_string())
1200                        })?;
1201                    }
1202                }
1203
1204                // Export workspace metadata
1205                let export = self.create_workspace_export(workspace).await?;
1206                let workspace_file = workspaces_dir
1207                    .join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1208
1209                if force || !workspace_file.exists() {
1210                    let content = serde_yaml::to_string(&export).map_err(|e| {
1211                        Error::config(format!("Failed to serialize workspace: {}", e))
1212                    })?;
1213
1214                    fs::write(&workspace_file, content).await.map_err(|e| {
1215                        Error::io_with_context("writing workspace file", e.to_string())
1216                    })?;
1217
1218                    result.files_created += 1;
1219                }
1220
1221                // Export requests to requests directory
1222                result.requests_count +=
1223                    self.export_workspace_requests_grouped(workspace, &requests_dir, force).await?;
1224            }
1225        }
1226
1227        // Create metadata file if requested
1228        if include_meta {
1229            self.create_metadata_file(workspace, target_dir, structure).await?;
1230            result.files_created += 1;
1231        }
1232
1233        Ok(result)
1234    }
1235
1236    /// Create a Git-friendly workspace export
1237    async fn create_workspace_export(&self, workspace: &Workspace) -> Result<WorkspaceExport> {
1238        let mut requests = HashMap::new();
1239
1240        // Collect all requests from workspace
1241        self.collect_requests_from_workspace(workspace, &mut requests, "".to_string());
1242
1243        let metadata = WorkspaceMetadata {
1244            id: workspace.id.clone(),
1245            name: workspace.name.clone(),
1246            description: workspace.description.clone(),
1247            exported_at: Utc::now(),
1248            request_count: requests.len(),
1249            folder_count: workspace.folders.len(),
1250        };
1251
1252        let config = WorkspaceConfig {
1253            auth: workspace.config.auth.as_ref().and_then(AuthConfig::from_config_auth),
1254            base_url: workspace.config.base_url.clone(),
1255            variables: workspace.config.global_environment.variables.clone(),
1256            reality_level: workspace.config.reality_level,
1257            ai_mode: None, // Default to None for exported workspaces
1258        };
1259
1260        Ok(WorkspaceExport {
1261            metadata,
1262            config,
1263            requests,
1264        })
1265    }
1266
1267    /// Collect all requests from workspace into a hashmap
1268    fn collect_requests_from_workspace(
1269        &self,
1270        workspace: &Workspace,
1271        requests: &mut HashMap<String, ExportedRequest>,
1272        folder_path: String,
1273    ) {
1274        // Add root-level requests
1275        for request in &workspace.requests {
1276            let exported = self.convert_request_to_exported(request, &folder_path);
1277            requests.insert(request.id.clone(), exported);
1278        }
1279
1280        // Add folder requests recursively
1281        for folder in &workspace.folders {
1282            let current_path = if folder_path.is_empty() {
1283                folder.name.clone()
1284            } else {
1285                format!("{}/{}", folder_path, folder.name)
1286            };
1287
1288            for request in &folder.requests {
1289                let exported = self.convert_request_to_exported(request, &current_path);
1290                requests.insert(request.id.clone(), exported);
1291            }
1292
1293            // Recursively process subfolders
1294            self.collect_requests_from_folders(folder, requests, current_path);
1295        }
1296    }
1297
1298    /// Recursively collect requests from folders
1299    fn collect_requests_from_folders(
1300        &self,
1301        folder: &Folder,
1302        requests: &mut HashMap<String, ExportedRequest>,
1303        folder_path: String,
1304    ) {
1305        for subfolder in &folder.folders {
1306            let current_path = format!("{}/{}", folder_path, subfolder.name);
1307
1308            for request in &subfolder.requests {
1309                let exported = self.convert_request_to_exported(request, &current_path);
1310                requests.insert(request.id.clone(), exported);
1311            }
1312
1313            self.collect_requests_from_folders(subfolder, requests, current_path);
1314        }
1315    }
1316
1317    /// Convert a MockRequest to ExportedRequest
1318    fn convert_request_to_exported(
1319        &self,
1320        request: &MockRequest,
1321        folder_path: &str,
1322    ) -> ExportedRequest {
1323        ExportedRequest {
1324            id: request.id.clone(),
1325            name: request.name.clone(),
1326            method: format!("{:?}", request.method),
1327            path: request.path.clone(),
1328            folder_path: folder_path.to_string(),
1329            headers: request.headers.clone(),
1330            query_params: request.query_params.clone(),
1331            body: request.body.clone(),
1332            response_status: Some(request.response.status_code),
1333            response_body: request.response.body.clone(),
1334            response_headers: request.response.headers.clone(),
1335            delay: request.response.delay_ms,
1336        }
1337    }
1338
1339    /// Export workspace with encryption for secure sharing
1340    pub async fn export_workspace_encrypted(
1341        &self,
1342        workspace: &Workspace,
1343        output_path: &Path,
1344    ) -> Result<EncryptedExportResult> {
1345        // Check if encryption is enabled for this workspace
1346        if !workspace.config.auto_encryption.enabled {
1347            return Err(Error::invalid_state("Encryption is not enabled for this workspace. Enable encryption in workspace settings first."));
1348        }
1349
1350        // Get auto-encryption config
1351        let encryption_config = workspace.config.auto_encryption.clone();
1352        let processor = AutoEncryptionProcessor::new(&workspace.id, encryption_config);
1353
1354        // Create filtered workspace copy for export
1355        let mut filtered_workspace = workspace.to_filtered_for_sync();
1356
1357        // Apply automatic encryption to the filtered workspace
1358        self.encrypt_workspace_data(&mut filtered_workspace, &processor)?;
1359
1360        // Create standard export
1361        let export = self.create_workspace_export(&filtered_workspace).await?;
1362
1363        // Encrypt the entire export
1364        let export_json = serde_json::to_string_pretty(&export)
1365            .map_err(|e| Error::config(format!("Failed to serialize export: {}", e)))?;
1366
1367        let encrypted_data = utils::encrypt_for_workspace(&workspace.id, &export_json)?;
1368
1369        // Generate backup key for sharing
1370        let key_manager = WorkspaceKeyManager::new();
1371        let backup_key = key_manager.generate_workspace_key_backup(&workspace.id)?;
1372
1373        // Write encrypted data to file
1374        fs::write(output_path, &encrypted_data)
1375            .await
1376            .map_err(|e| Error::io_with_context("writing encrypted export", e.to_string()))?;
1377
1378        Ok(EncryptedExportResult {
1379            output_path: output_path.to_path_buf(),
1380            backup_key,
1381            exported_at: Utc::now(),
1382            workspace_name: workspace.name.clone(),
1383            encryption_enabled: true,
1384        })
1385    }
1386
1387    /// Import encrypted workspace
1388    pub async fn import_workspace_encrypted(
1389        &self,
1390        encrypted_file: &Path,
1391        _workspace_name: Option<&str>,
1392        _registry: &mut WorkspaceRegistry,
1393    ) -> Result<EncryptedImportResult> {
1394        // Read encrypted data
1395        let _encrypted_data = fs::read_to_string(encrypted_file)
1396            .await
1397            .map_err(|e| Error::io_with_context("reading encrypted file", e.to_string()))?;
1398
1399        // For import, we need the workspace ID and backup key
1400        // This would typically be provided by the user or extracted from metadata
1401        Err(Error::validation("Encrypted import requires workspace ID and backup key. Use import_workspace_encrypted_with_key instead."))
1402    }
1403
1404    /// Import encrypted workspace with specific workspace ID and backup key
1405    pub async fn import_workspace_encrypted_with_key(
1406        &self,
1407        encrypted_file: &Path,
1408        workspace_id: &str,
1409        backup_key: &str,
1410        workspace_name: Option<&str>,
1411        registry: &mut WorkspaceRegistry,
1412    ) -> Result<EncryptedImportResult> {
1413        // Ensure workspace key exists or restore from backup
1414        let key_manager = WorkspaceKeyManager::new();
1415        if !key_manager.has_workspace_key(workspace_id) {
1416            key_manager.restore_workspace_key_from_backup(workspace_id, backup_key)?;
1417        }
1418
1419        // Read and decrypt the data
1420        let encrypted_data = fs::read_to_string(encrypted_file)
1421            .await
1422            .map_err(|e| Error::io_with_context("reading encrypted file", e.to_string()))?;
1423
1424        let decrypted_json = utils::decrypt_for_workspace(workspace_id, &encrypted_data)?;
1425
1426        // Parse the export data
1427        let export: WorkspaceExport = serde_json::from_str(&decrypted_json)
1428            .map_err(|e| Error::config(format!("Failed to parse decrypted export: {}", e)))?;
1429
1430        // Convert export to workspace
1431        let workspace = self.convert_export_to_workspace(&export, workspace_name)?;
1432
1433        // Add to registry
1434        let imported_id = registry.add_workspace(workspace)?;
1435
1436        Ok(EncryptedImportResult {
1437            workspace_id: imported_id,
1438            workspace_name: export.metadata.name.clone(),
1439            imported_at: Utc::now(),
1440            request_count: export.requests.len(),
1441            encryption_restored: true,
1442        })
1443    }
1444
1445    /// Apply encryption to workspace data before export
1446    fn encrypt_workspace_data(
1447        &self,
1448        workspace: &mut Workspace,
1449        processor: &AutoEncryptionProcessor,
1450    ) -> Result<()> {
1451        // Encrypt environment variables
1452        for env in &mut workspace.config.environments {
1453            processor.process_env_vars(&mut env.variables)?;
1454        }
1455        processor.process_env_vars(&mut workspace.config.global_environment.variables)?;
1456
1457        // Note: Headers and request bodies would be encrypted here when implemented
1458        // For now, we rely on the filtering done by to_filtered_for_sync()
1459
1460        Ok(())
1461    }
1462
1463    /// Convert WorkspaceExport back to Workspace
1464    fn convert_export_to_workspace(
1465        &self,
1466        export: &WorkspaceExport,
1467        name_override: Option<&str>,
1468    ) -> Result<Workspace> {
1469        let mut workspace =
1470            Workspace::new(name_override.unwrap_or(&export.metadata.name).to_string());
1471
1472        // Set description if provided
1473        if let Some(desc) = &export.metadata.description {
1474            workspace.description = Some(desc.clone());
1475        }
1476
1477        // Restore requests from export
1478        for exported_request in export.requests.values() {
1479            // Convert exported request back to MockRequest
1480            let method = self.parse_http_method(&exported_request.method)?;
1481            let mut request = MockRequest::new(
1482                method,
1483                exported_request.path.clone(),
1484                exported_request.name.clone(),
1485            );
1486
1487            // Set additional properties
1488            if let Some(status) = exported_request.response_status {
1489                request.response.status_code = status;
1490            }
1491
1492            // Set other response properties if available
1493            if let Some(body) = &exported_request.response_body {
1494                request.response.body = Some(body.clone());
1495            }
1496            request.response.headers = exported_request.response_headers.clone();
1497            if let Some(delay) = exported_request.delay {
1498                request.response.delay_ms = Some(delay);
1499            }
1500
1501            workspace.add_request(request)?;
1502        }
1503
1504        // Restore configuration
1505        workspace.config.global_environment.variables = export.config.variables.clone();
1506
1507        Ok(workspace)
1508    }
1509
1510    /// Parse HTTP method string to enum
1511    fn parse_http_method(&self, method_str: &str) -> Result<crate::routing::HttpMethod> {
1512        match method_str.to_uppercase().as_str() {
1513            "GET" => Ok(crate::routing::HttpMethod::GET),
1514            "POST" => Ok(crate::routing::HttpMethod::POST),
1515            "PUT" => Ok(crate::routing::HttpMethod::PUT),
1516            "DELETE" => Ok(crate::routing::HttpMethod::DELETE),
1517            "PATCH" => Ok(crate::routing::HttpMethod::PATCH),
1518            "HEAD" => Ok(crate::routing::HttpMethod::HEAD),
1519            "OPTIONS" => Ok(crate::routing::HttpMethod::OPTIONS),
1520            _ => Err(Error::validation(format!("Unknown HTTP method: {}", method_str))),
1521        }
1522    }
1523
1524    /// Check workspace for unencrypted sensitive data before export
1525    pub fn check_workspace_for_unencrypted_secrets(
1526        &self,
1527        workspace: &Workspace,
1528    ) -> Result<SecurityCheckResult> {
1529        let mut warnings = Vec::new();
1530        let errors = Vec::new();
1531
1532        // Check environment variables
1533        self.check_environment_variables(workspace, &mut warnings)?;
1534
1535        // Check for sensitive patterns in request data (when implemented)
1536        // This would check headers, bodies, etc.
1537
1538        let has_warnings = !warnings.is_empty();
1539        let has_errors = !errors.is_empty();
1540
1541        Ok(SecurityCheckResult {
1542            workspace_id: workspace.id.clone(),
1543            workspace_name: workspace.name.clone(),
1544            warnings,
1545            errors,
1546            is_secure: !has_warnings && !has_errors,
1547            recommended_actions: self.generate_security_recommendations(has_warnings, has_errors),
1548        })
1549    }
1550
1551    /// Check environment variables for sensitive data
1552    fn check_environment_variables(
1553        &self,
1554        workspace: &Workspace,
1555        warnings: &mut Vec<SecurityWarning>,
1556    ) -> Result<()> {
1557        let sensitive_keys = [
1558            "password",
1559            "secret",
1560            "key",
1561            "token",
1562            "credential",
1563            "api_key",
1564            "apikey",
1565            "api_secret",
1566            "db_password",
1567            "database_password",
1568            "aws_secret_key",
1569            "aws_session_token",
1570            "private_key",
1571            "authorization",
1572            "auth_token",
1573            "access_token",
1574            "refresh_token",
1575            "cookie",
1576            "session",
1577            "csrf",
1578            "jwt",
1579            "bearer",
1580        ];
1581
1582        // Check global environment
1583        for (key, value) in &workspace.config.global_environment.variables {
1584            if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1585                warnings.push(SecurityWarning {
1586                    field_type: "environment_variable".to_string(),
1587                    field_name: key.clone(),
1588                    location: "global_environment".to_string(),
1589                    severity: SecuritySeverity::High,
1590                    message: format!(
1591                        "Potentially sensitive environment variable '{}' detected",
1592                        key
1593                    ),
1594                    suggestion: "Consider encrypting this value or excluding it from exports"
1595                        .to_string(),
1596                });
1597            }
1598        }
1599
1600        // Check workspace environments
1601        for env in &workspace.config.environments {
1602            for (key, value) in &env.variables {
1603                if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1604                    warnings.push(SecurityWarning {
1605                        field_type: "environment_variable".to_string(),
1606                        field_name: key.clone(),
1607                        location: format!("environment '{}'", env.name),
1608                        severity: SecuritySeverity::High,
1609                        message: format!("Potentially sensitive environment variable '{}' detected in environment '{}'", key, env.name),
1610                        suggestion: "Consider encrypting this value or excluding it from exports".to_string(),
1611                    });
1612                }
1613            }
1614        }
1615
1616        Ok(())
1617    }
1618
1619    /// Check if a key-value pair is potentially sensitive
1620    fn is_potentially_sensitive(&self, key: &str, value: &str, sensitive_keys: &[&str]) -> bool {
1621        let key_lower = key.to_lowercase();
1622
1623        // Check if key contains sensitive keywords
1624        if sensitive_keys.iter().any(|&sensitive| key_lower.contains(sensitive)) {
1625            return true;
1626        }
1627
1628        // Check for patterns that indicate sensitive data
1629        self.contains_sensitive_patterns(value)
1630    }
1631
1632    /// Check if value contains sensitive patterns
1633    fn contains_sensitive_patterns(&self, value: &str) -> bool {
1634        // Credit card pattern
1635        if CREDIT_CARD_PATTERN.is_match(value) {
1636            return true;
1637        }
1638
1639        // SSN pattern
1640        if SSN_PATTERN.is_match(value) {
1641            return true;
1642        }
1643
1644        // Long random-looking strings (potential API keys)
1645        if value.len() > 20 && value.chars().any(|c| c.is_alphanumeric()) {
1646            let alphanumeric_count = value.chars().filter(|c| c.is_alphanumeric()).count();
1647            let total_count = value.len();
1648            if alphanumeric_count as f64 / total_count as f64 > 0.8 {
1649                return true;
1650            }
1651        }
1652
1653        false
1654    }
1655
1656    /// Generate security recommendations based on findings
1657    fn generate_security_recommendations(
1658        &self,
1659        has_warnings: bool,
1660        has_errors: bool,
1661    ) -> Vec<String> {
1662        let mut recommendations = Vec::new();
1663
1664        if has_warnings || has_errors {
1665            recommendations.push("Enable encryption for this workspace in settings".to_string());
1666            recommendations.push("Review and encrypt sensitive environment variables".to_string());
1667            recommendations.push("Use encrypted export for sharing workspaces".to_string());
1668        }
1669
1670        if has_errors {
1671            recommendations
1672                .push("CRITICAL: Remove or encrypt sensitive data before proceeding".to_string());
1673        }
1674
1675        recommendations
1676    }
1677
1678    /// Export individual requests for nested structure
1679    async fn export_workspace_requests(
1680        &self,
1681        workspace: &Workspace,
1682        requests_dir: &Path,
1683        force: bool,
1684    ) -> Result<usize> {
1685        let mut count = 0;
1686
1687        for request in &workspace.requests {
1688            let file_path =
1689                requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1690            if force || !file_path.exists() {
1691                let exported = self.convert_request_to_exported(request, "");
1692                let content = serde_yaml::to_string(&exported)
1693                    .map_err(|e| Error::config(format!("Failed to serialize request: {}", e)))?;
1694
1695                fs::write(&file_path, content)
1696                    .await
1697                    .map_err(|e| Error::io_with_context("writing request file", e.to_string()))?;
1698
1699                count += 1;
1700            }
1701        }
1702
1703        // Export folder requests
1704        for folder in &workspace.folders {
1705            count += self.export_folder_requests(folder, requests_dir, force, &folder.name).await?;
1706        }
1707
1708        Ok(count)
1709    }
1710
1711    /// Export requests from folders recursively
1712    async fn export_folder_requests(
1713        &self,
1714        folder: &Folder,
1715        requests_dir: &Path,
1716        force: bool,
1717        folder_path: &str,
1718    ) -> Result<usize> {
1719        use std::collections::VecDeque;
1720
1721        let mut count = 0;
1722        let mut queue = VecDeque::new();
1723
1724        // Start with the root folder
1725        queue.push_back((folder, folder_path.to_string()));
1726
1727        while let Some((current_folder, current_path)) = queue.pop_front() {
1728            // Export requests in current folder
1729            for request in &current_folder.requests {
1730                let file_path =
1731                    requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1732                if force || !file_path.exists() {
1733                    let exported = self.convert_request_to_exported(request, &current_path);
1734                    let content = serde_yaml::to_string(&exported).map_err(|e| {
1735                        Error::config(format!("Failed to serialize request: {}", e))
1736                    })?;
1737
1738                    fs::write(&file_path, content).await.map_err(|e| {
1739                        Error::io_with_context("writing request file", e.to_string())
1740                    })?;
1741
1742                    count += 1;
1743                }
1744            }
1745
1746            // Add subfolders to queue with updated paths
1747            for subfolder in &current_folder.folders {
1748                let subfolder_path = if current_path.is_empty() {
1749                    subfolder.name.clone()
1750                } else {
1751                    format!("{}/{}", current_path, subfolder.name)
1752                };
1753                queue.push_back((subfolder, subfolder_path));
1754            }
1755        }
1756
1757        Ok(count)
1758    }
1759
1760    /// Export requests for grouped structure
1761    async fn export_workspace_requests_grouped(
1762        &self,
1763        workspace: &Workspace,
1764        requests_dir: &Path,
1765        force: bool,
1766    ) -> Result<usize> {
1767        let mut count = 0;
1768        let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
1769
1770        if !workspace_requests_dir.exists() {
1771            fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
1772                Error::io_with_context("creating workspace requests directory", e.to_string())
1773            })?;
1774        }
1775
1776        count += self
1777            .export_workspace_requests(workspace, &workspace_requests_dir, force)
1778            .await?;
1779        Ok(count)
1780    }
1781
1782    /// Create metadata file for Git integration
1783    async fn create_metadata_file(
1784        &self,
1785        workspace: &Workspace,
1786        target_dir: &Path,
1787        structure: &DirectoryStructure,
1788    ) -> Result<()> {
1789        let metadata = serde_json::json!({
1790            "workspace_id": workspace.id,
1791            "workspace_name": workspace.name,
1792            "description": workspace.description,
1793            "exported_at": Utc::now().to_rfc3339(),
1794            "structure": format!("{:?}", structure),
1795            "version": "1.0",
1796            "source": "mockforge"
1797        });
1798
1799        let metadata_file = target_dir.join(".mockforge-meta.json");
1800        let content = serde_json::to_string_pretty(&metadata)
1801            .map_err(|e| Error::config(format!("Failed to serialize metadata: {}", e)))?;
1802
1803        fs::write(&metadata_file, content)
1804            .await
1805            .map_err(|e| Error::io_with_context("writing metadata file", e.to_string()))?;
1806
1807        Ok(())
1808    }
1809
1810    /// Export a reality preset to a file
1811    ///
1812    /// Exports a reality preset (JSON or YAML format) to the specified path.
1813    /// The preset can be imported later to restore the reality configuration.
1814    pub async fn export_reality_preset(
1815        &self,
1816        preset: &crate::RealityPreset,
1817        output_path: &Path,
1818    ) -> Result<()> {
1819        self.ensure_workspace_dir().await?;
1820
1821        // Determine format from file extension
1822        let content = if output_path.extension().and_then(|s| s.to_str()) == Some("yaml")
1823            || output_path.extension().and_then(|s| s.to_str()) == Some("yml")
1824        {
1825            serde_yaml::to_string(preset)
1826                .map_err(|e| Error::config(format!("Failed to serialize preset to YAML: {}", e)))?
1827        } else {
1828            serde_json::to_string_pretty(preset)
1829                .map_err(|e| Error::config(format!("Failed to serialize preset to JSON: {}", e)))?
1830        };
1831
1832        // Ensure parent directory exists
1833        if let Some(parent) = output_path.parent() {
1834            fs::create_dir_all(parent)
1835                .await
1836                .map_err(|e| Error::io_with_context("creating preset directory", e.to_string()))?;
1837        }
1838
1839        fs::write(output_path, content)
1840            .await
1841            .map_err(|e| Error::io_with_context("writing preset file", e.to_string()))?;
1842
1843        Ok(())
1844    }
1845
1846    /// Import a reality preset from a file
1847    ///
1848    /// Loads a reality preset from a JSON or YAML file and returns it.
1849    /// The preset can then be applied to a workspace or the global configuration.
1850    pub async fn import_reality_preset(&self, input_path: &Path) -> Result<crate::RealityPreset> {
1851        let content = fs::read_to_string(input_path)
1852            .await
1853            .map_err(|e| Error::io_with_context("reading preset file", e.to_string()))?;
1854
1855        // Determine format from file extension
1856        let preset = if input_path
1857            .extension()
1858            .and_then(|s| s.to_str())
1859            .map(|ext| ext == "yaml" || ext == "yml")
1860            .unwrap_or(false)
1861        {
1862            serde_yaml::from_str(&content).map_err(|e| {
1863                Error::config(format!("Failed to deserialize preset from YAML: {}", e))
1864            })?
1865        } else {
1866            serde_json::from_str(&content).map_err(|e| {
1867                Error::config(format!("Failed to deserialize preset from JSON: {}", e))
1868            })?
1869        };
1870
1871        Ok(preset)
1872    }
1873
1874    /// Get the presets directory path
1875    pub fn presets_dir(&self) -> PathBuf {
1876        self.base_dir.join("presets")
1877    }
1878
1879    /// List all available reality presets
1880    ///
1881    /// Scans the presets directory and returns a list of all preset files found.
1882    pub async fn list_reality_presets(&self) -> Result<Vec<PathBuf>> {
1883        let presets_dir = self.presets_dir();
1884        if !presets_dir.exists() {
1885            return Ok(vec![]);
1886        }
1887
1888        let mut presets = Vec::new();
1889        let mut entries = fs::read_dir(&presets_dir)
1890            .await
1891            .map_err(|e| Error::io_with_context("reading presets directory", e.to_string()))?;
1892
1893        while let Some(entry) = entries
1894            .next_entry()
1895            .await
1896            .map_err(|e| Error::io_with_context("reading directory entry", e.to_string()))?
1897        {
1898            let path = entry.path();
1899            if path.is_file() {
1900                let ext = path.extension().and_then(|s| s.to_str());
1901                if ext == Some("json") || ext == Some("yaml") || ext == Some("yml") {
1902                    presets.push(path);
1903                }
1904            }
1905        }
1906
1907        Ok(presets)
1908    }
1909
1910    /// Sanitize filename for filesystem compatibility
1911    fn sanitize_filename(&self, name: &str) -> String {
1912        name.chars()
1913            .map(|c| match c {
1914                '/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
1915                c if c.is_whitespace() => '_',
1916                c => c,
1917            })
1918            .collect::<String>()
1919            .to_lowercase()
1920    }
1921}
1922
1923/// Result of syncing a single workspace
1924#[derive(Debug)]
1925struct WorkspaceSyncResult {
1926    /// Number of requests exported
1927    requests_count: usize,
1928    /// Number of files created
1929    files_created: usize,
1930}
1931
1932#[cfg(test)]
1933mod tests {
1934    use super::*;
1935    use crate::workspace::{MockRequest, Workspace};
1936    use crate::HttpMethod;
1937    use tempfile::TempDir;
1938
1939    #[tokio::test]
1940    async fn test_workspace_persistence() {
1941        let temp_dir = TempDir::new().unwrap();
1942        let persistence = WorkspacePersistence::new(temp_dir.path());
1943
1944        // Create a test workspace
1945        let mut workspace = Workspace::new("Test Workspace".to_string());
1946        let request =
1947            MockRequest::new(HttpMethod::GET, "/test".to_string(), "Test Request".to_string());
1948        workspace.add_request(request).unwrap();
1949
1950        // Save workspace
1951        persistence.save_workspace(&workspace).await.unwrap();
1952
1953        // Load workspace
1954        let loaded = persistence.load_workspace(&workspace.id).await.unwrap();
1955        assert_eq!(loaded.name, workspace.name);
1956        assert_eq!(loaded.requests.len(), 1);
1957
1958        // List workspaces
1959        let ids = persistence.list_workspace_ids().await.unwrap();
1960        assert_eq!(ids.len(), 1);
1961        assert_eq!(ids[0], workspace.id);
1962    }
1963
1964    #[tokio::test]
1965    async fn test_registry_persistence() {
1966        let temp_dir = TempDir::new().unwrap();
1967        let persistence = WorkspacePersistence::new(temp_dir.path());
1968
1969        let mut registry = WorkspaceRegistry::new();
1970
1971        // Add workspaces
1972        let workspace1 = Workspace::new("Workspace 1".to_string());
1973        let workspace2 = Workspace::new("Workspace 2".to_string());
1974
1975        let id1 = registry.add_workspace(workspace1).unwrap();
1976        let _id2 = registry.add_workspace(workspace2).unwrap();
1977
1978        // Set active workspace
1979        registry.set_active_workspace(Some(id1.clone())).unwrap();
1980
1981        // Save registry
1982        persistence.save_full_registry(&registry).await.unwrap();
1983
1984        // Load registry
1985        let loaded_registry = persistence.load_full_registry().await.unwrap();
1986
1987        assert_eq!(loaded_registry.get_workspaces().len(), 2);
1988        assert_eq!(loaded_registry.get_active_workspace().unwrap().name, "Workspace 1");
1989    }
1990
1991    #[tokio::test]
1992    async fn test_backup_and_restore() {
1993        let temp_dir = TempDir::new().unwrap();
1994        let backup_dir = temp_dir.path().join("backups");
1995        let persistence = WorkspacePersistence::new(temp_dir.path());
1996
1997        // Create and save workspace
1998        let workspace = Workspace::new("Test Workspace".to_string());
1999        persistence.save_workspace(&workspace).await.unwrap();
2000
2001        // Create backup
2002        let backup_path = persistence.backup_workspace(&workspace.id, &backup_dir).await.unwrap();
2003        assert!(backup_path.exists());
2004
2005        // Delete original
2006        persistence.delete_workspace(&workspace.id).await.unwrap();
2007        assert!(persistence.load_workspace(&workspace.id).await.is_err());
2008
2009        // Restore from backup
2010        let restored_id = persistence.restore_workspace(&backup_path).await.unwrap();
2011
2012        // Verify restored workspace
2013        let restored = persistence.load_workspace(&restored_id).await.unwrap();
2014        assert_eq!(restored.name, "Test Workspace");
2015    }
2016
2017    #[test]
2018    fn test_workspace_persistence_new() {
2019        let persistence = WorkspacePersistence::new("/tmp/test");
2020        assert_eq!(persistence.base_dir, PathBuf::from("/tmp/test"));
2021    }
2022
2023    #[test]
2024    fn test_workspace_persistence_workspace_dir() {
2025        let persistence = WorkspacePersistence::new("/tmp/test");
2026        assert_eq!(persistence.workspace_dir(), Path::new("/tmp/test"));
2027    }
2028
2029    #[test]
2030    fn test_workspace_persistence_workspace_file_path() {
2031        let persistence = WorkspacePersistence::new("/tmp/test");
2032        let path = persistence.workspace_file_path("workspace-123");
2033        assert_eq!(path, PathBuf::from("/tmp/test/workspace-123.yaml"));
2034    }
2035
2036    #[test]
2037    fn test_workspace_persistence_registry_file_path() {
2038        let persistence = WorkspacePersistence::new("/tmp/test");
2039        let path = persistence.registry_file_path();
2040        assert_eq!(path, PathBuf::from("/tmp/test/registry.yaml"));
2041    }
2042
2043    #[test]
2044    fn test_workspace_persistence_sync_state_file_path() {
2045        let persistence = WorkspacePersistence::new("/tmp/test");
2046        let path = persistence.sync_state_file_path();
2047        assert_eq!(path, PathBuf::from("/tmp/test/sync_state.yaml"));
2048    }
2049
2050    #[test]
2051    fn test_sync_state_creation() {
2052        let state = SyncState {
2053            last_sync_timestamp: Utc::now(),
2054        };
2055        assert!(state.last_sync_timestamp <= Utc::now());
2056    }
2057
2058    #[test]
2059    fn test_sync_strategy_variants() {
2060        let full = SyncStrategy::Full;
2061        let incremental = SyncStrategy::Incremental;
2062        let selective = SyncStrategy::Selective(vec!["id1".to_string(), "id2".to_string()]);
2063
2064        assert_eq!(full, SyncStrategy::Full);
2065        assert_eq!(incremental, SyncStrategy::Incremental);
2066        assert_eq!(selective, SyncStrategy::Selective(vec!["id1".to_string(), "id2".to_string()]));
2067    }
2068
2069    #[test]
2070    fn test_directory_structure_variants() {
2071        let flat = DirectoryStructure::Flat;
2072        let nested = DirectoryStructure::Nested;
2073        let grouped = DirectoryStructure::Grouped;
2074
2075        assert_eq!(flat, DirectoryStructure::Flat);
2076        assert_eq!(nested, DirectoryStructure::Nested);
2077        assert_eq!(grouped, DirectoryStructure::Grouped);
2078    }
2079
2080    #[test]
2081    fn test_sync_result_creation() {
2082        let result = SyncResult {
2083            synced_workspaces: 5,
2084            synced_requests: 10,
2085            files_created: 15,
2086            target_dir: PathBuf::from("/tmp/sync"),
2087        };
2088
2089        assert_eq!(result.synced_workspaces, 5);
2090        assert_eq!(result.synced_requests, 10);
2091        assert_eq!(result.files_created, 15);
2092    }
2093
2094    #[test]
2095    fn test_encrypted_export_result_creation() {
2096        let result = EncryptedExportResult {
2097            output_path: PathBuf::from("/tmp/export.zip"),
2098            backup_key: "backup-key-123".to_string(),
2099            exported_at: Utc::now(),
2100            workspace_name: "Test Workspace".to_string(),
2101            encryption_enabled: true,
2102        };
2103
2104        assert_eq!(result.workspace_name, "Test Workspace");
2105        assert!(result.encryption_enabled);
2106    }
2107
2108    #[test]
2109    fn test_encrypted_import_result_creation() {
2110        let result = EncryptedImportResult {
2111            workspace_id: "ws-123".to_string(),
2112            workspace_name: "Imported Workspace".to_string(),
2113            imported_at: Utc::now(),
2114            request_count: 5,
2115            encryption_restored: true,
2116        };
2117
2118        assert_eq!(result.workspace_id, "ws-123");
2119        assert_eq!(result.request_count, 5);
2120    }
2121
2122    #[test]
2123    fn test_security_check_result_creation() {
2124        let result = SecurityCheckResult {
2125            workspace_id: "ws-123".to_string(),
2126            workspace_name: "Test Workspace".to_string(),
2127            warnings: vec![],
2128            errors: vec![],
2129            is_secure: true,
2130            recommended_actions: vec!["Action 1".to_string()],
2131        };
2132
2133        assert_eq!(result.workspace_id, "ws-123");
2134        assert!(result.is_secure);
2135    }
2136
2137    #[test]
2138    fn test_security_warning_creation() {
2139        let warning = SecurityWarning {
2140            field_type: "header".to_string(),
2141            field_name: "Authorization".to_string(),
2142            location: "request".to_string(),
2143            severity: SecuritySeverity::High,
2144            message: "Sensitive data detected".to_string(),
2145            suggestion: "Use encryption".to_string(),
2146        };
2147
2148        assert_eq!(warning.severity, SecuritySeverity::High);
2149        assert_eq!(warning.field_name, "Authorization");
2150    }
2151
2152    #[test]
2153    fn test_security_severity_variants() {
2154        assert_eq!(SecuritySeverity::Low, SecuritySeverity::Low);
2155        assert_eq!(SecuritySeverity::Medium, SecuritySeverity::Medium);
2156        assert_eq!(SecuritySeverity::High, SecuritySeverity::High);
2157        assert_eq!(SecuritySeverity::Critical, SecuritySeverity::Critical);
2158    }
2159
2160    #[test]
2161    fn test_workspace_export_creation() {
2162        let export = WorkspaceExport {
2163            metadata: WorkspaceMetadata {
2164                id: "ws-123".to_string(),
2165                name: "Test Workspace".to_string(),
2166                description: None,
2167                exported_at: Utc::now(),
2168                request_count: 5,
2169                folder_count: 2,
2170            },
2171            config: WorkspaceConfig {
2172                auth: None,
2173                base_url: Some("http://localhost:8080".to_string()),
2174                variables: HashMap::new(),
2175                reality_level: None,
2176                ai_mode: None,
2177            },
2178            requests: HashMap::new(),
2179        };
2180
2181        assert_eq!(export.metadata.id, "ws-123");
2182        assert_eq!(export.config.base_url, Some("http://localhost:8080".to_string()));
2183    }
2184
2185    #[test]
2186    fn test_workspace_metadata_creation() {
2187        let metadata = WorkspaceMetadata {
2188            id: "ws-123".to_string(),
2189            name: "Test Workspace".to_string(),
2190            description: Some("Test description".to_string()),
2191            exported_at: Utc::now(),
2192            request_count: 10,
2193            folder_count: 5,
2194        };
2195
2196        assert_eq!(metadata.id, "ws-123");
2197        assert_eq!(metadata.name, "Test Workspace");
2198        assert_eq!(metadata.request_count, 10);
2199        assert_eq!(metadata.folder_count, 5);
2200    }
2201
2202    #[test]
2203    fn test_workspace_config_creation() {
2204        let config = WorkspaceConfig {
2205            auth: None,
2206            base_url: Some("http://localhost:8080".to_string()),
2207            variables: HashMap::new(),
2208            reality_level: None,
2209            ai_mode: None,
2210        };
2211
2212        assert_eq!(config.base_url, Some("http://localhost:8080".to_string()));
2213    }
2214
2215    #[test]
2216    fn test_auth_config_creation() {
2217        let mut params = HashMap::new();
2218        params.insert("token".to_string(), "token-123".to_string());
2219        let auth = AuthConfig {
2220            auth_type: "bearer".to_string(),
2221            params,
2222        };
2223
2224        assert_eq!(auth.auth_type, "bearer");
2225        assert_eq!(auth.params.get("token"), Some(&"token-123".to_string()));
2226    }
2227
2228    #[test]
2229    fn test_exported_request_creation() {
2230        let request = ExportedRequest {
2231            id: "req-123".to_string(),
2232            name: "Test Request".to_string(),
2233            method: "GET".to_string(),
2234            path: "/api/test".to_string(),
2235            folder_path: "/folder1".to_string(),
2236            headers: HashMap::new(),
2237            query_params: HashMap::new(),
2238            body: None,
2239            response_status: Some(200),
2240            response_body: Some("{}".to_string()),
2241            response_headers: HashMap::new(),
2242            delay: Some(100),
2243        };
2244
2245        assert_eq!(request.id, "req-123");
2246        assert_eq!(request.method, "GET");
2247        assert_eq!(request.response_status, Some(200));
2248    }
2249
2250    #[test]
2251    fn test_serializable_workspace_registry_creation() {
2252        let serializable = SerializableWorkspaceRegistry {
2253            workspaces: vec![],
2254            active_workspace: Some("ws-123".to_string()),
2255        };
2256
2257        assert_eq!(serializable.active_workspace, Some("ws-123".to_string()));
2258        assert!(serializable.workspaces.is_empty());
2259    }
2260
2261    #[test]
2262    fn test_serializable_workspace_registry_serialization() {
2263        let serializable = SerializableWorkspaceRegistry {
2264            workspaces: vec![],
2265            active_workspace: Some("ws-123".to_string()),
2266        };
2267
2268        let json = serde_json::to_string(&serializable).unwrap();
2269        assert!(json.contains("ws-123"));
2270    }
2271
2272    #[test]
2273    fn test_sync_state_clone() {
2274        let state1 = SyncState {
2275            last_sync_timestamp: Utc::now(),
2276        };
2277        let state2 = state1.clone();
2278        assert_eq!(state1.last_sync_timestamp, state2.last_sync_timestamp);
2279    }
2280
2281    #[test]
2282    fn test_sync_state_debug() {
2283        let state = SyncState {
2284            last_sync_timestamp: Utc::now(),
2285        };
2286        let debug_str = format!("{:?}", state);
2287        assert!(debug_str.contains("SyncState"));
2288    }
2289
2290    #[test]
2291    fn test_sync_strategy_clone() {
2292        let strategy1 = SyncStrategy::Selective(vec!["id1".to_string()]);
2293        let strategy2 = strategy1.clone();
2294        assert_eq!(strategy1, strategy2);
2295    }
2296
2297    #[test]
2298    fn test_directory_structure_clone() {
2299        let structure1 = DirectoryStructure::Nested;
2300        let structure2 = structure1.clone();
2301        assert_eq!(structure1, structure2);
2302    }
2303
2304    #[test]
2305    fn test_sync_result_clone() {
2306        let result1 = SyncResult {
2307            synced_workspaces: 1,
2308            synced_requests: 2,
2309            files_created: 3,
2310            target_dir: PathBuf::from("/tmp"),
2311        };
2312        let result2 = result1.clone();
2313        assert_eq!(result1.synced_workspaces, result2.synced_workspaces);
2314    }
2315
2316    #[test]
2317    fn test_encrypted_export_result_clone() {
2318        let result1 = EncryptedExportResult {
2319            output_path: PathBuf::from("/tmp/export.zip"),
2320            backup_key: "key".to_string(),
2321            exported_at: Utc::now(),
2322            workspace_name: "Test".to_string(),
2323            encryption_enabled: true,
2324        };
2325        let result2 = result1.clone();
2326        assert_eq!(result1.workspace_name, result2.workspace_name);
2327    }
2328
2329    #[test]
2330    fn test_encrypted_import_result_clone() {
2331        let result1 = EncryptedImportResult {
2332            workspace_id: "ws-1".to_string(),
2333            workspace_name: "Test".to_string(),
2334            imported_at: Utc::now(),
2335            request_count: 5,
2336            encryption_restored: true,
2337        };
2338        let result2 = result1.clone();
2339        assert_eq!(result1.workspace_id, result2.workspace_id);
2340    }
2341
2342    #[test]
2343    fn test_security_check_result_clone() {
2344        let result1 = SecurityCheckResult {
2345            workspace_id: "ws-1".to_string(),
2346            workspace_name: "Test".to_string(),
2347            warnings: vec![],
2348            errors: vec![],
2349            is_secure: true,
2350            recommended_actions: vec![],
2351        };
2352        let result2 = result1.clone();
2353        assert_eq!(result1.workspace_id, result2.workspace_id);
2354    }
2355
2356    #[test]
2357    fn test_security_warning_clone() {
2358        let warning1 = SecurityWarning {
2359            field_type: "header".to_string(),
2360            field_name: "Auth".to_string(),
2361            location: "request".to_string(),
2362            severity: SecuritySeverity::High,
2363            message: "Test".to_string(),
2364            suggestion: "Fix".to_string(),
2365        };
2366        let warning2 = warning1.clone();
2367        assert_eq!(warning1.field_name, warning2.field_name);
2368    }
2369
2370    #[test]
2371    fn test_security_severity_clone() {
2372        let severity1 = SecuritySeverity::Critical;
2373        let severity2 = severity1.clone();
2374        assert_eq!(severity1, severity2);
2375    }
2376
2377    #[test]
2378    fn test_workspace_export_clone() {
2379        let export1 = WorkspaceExport {
2380            metadata: WorkspaceMetadata {
2381                id: "ws-1".to_string(),
2382                name: "Test".to_string(),
2383                description: None,
2384                exported_at: Utc::now(),
2385                request_count: 0,
2386                folder_count: 0,
2387            },
2388            config: WorkspaceConfig {
2389                auth: None,
2390                base_url: None,
2391                variables: HashMap::new(),
2392                reality_level: None,
2393                ai_mode: None,
2394            },
2395            requests: HashMap::new(),
2396        };
2397        let export2 = export1.clone();
2398        assert_eq!(export1.metadata.id, export2.metadata.id);
2399    }
2400
2401    #[test]
2402    fn test_workspace_metadata_clone() {
2403        let metadata1 = WorkspaceMetadata {
2404            id: "ws-1".to_string(),
2405            name: "Test".to_string(),
2406            description: None,
2407            exported_at: Utc::now(),
2408            request_count: 0,
2409            folder_count: 0,
2410        };
2411        let metadata2 = metadata1.clone();
2412        assert_eq!(metadata1.id, metadata2.id);
2413    }
2414
2415    #[test]
2416    fn test_workspace_config_clone() {
2417        let config1 = WorkspaceConfig {
2418            auth: None,
2419            base_url: Some("http://localhost".to_string()),
2420            variables: HashMap::new(),
2421            reality_level: None,
2422            ai_mode: None,
2423        };
2424        let config2 = config1.clone();
2425        assert_eq!(config1.base_url, config2.base_url);
2426    }
2427
2428    #[test]
2429    fn test_auth_config_clone() {
2430        let mut params = HashMap::new();
2431        params.insert("key".to_string(), "value".to_string());
2432        let auth1 = AuthConfig {
2433            auth_type: "bearer".to_string(),
2434            params: params.clone(),
2435        };
2436        let auth2 = auth1.clone();
2437        assert_eq!(auth1.auth_type, auth2.auth_type);
2438    }
2439
2440    #[test]
2441    fn test_exported_request_clone() {
2442        let request1 = ExportedRequest {
2443            id: "req-1".to_string(),
2444            name: "Test".to_string(),
2445            method: "GET".to_string(),
2446            path: "/test".to_string(),
2447            folder_path: "/".to_string(),
2448            headers: HashMap::new(),
2449            query_params: HashMap::new(),
2450            body: None,
2451            response_status: Some(200),
2452            response_body: None,
2453            response_headers: HashMap::new(),
2454            delay: None,
2455        };
2456        let request2 = request1.clone();
2457        assert_eq!(request1.id, request2.id);
2458    }
2459
2460    #[test]
2461    fn test_sync_result_debug() {
2462        let result = SyncResult {
2463            synced_workspaces: 1,
2464            synced_requests: 2,
2465            files_created: 3,
2466            target_dir: PathBuf::from("/tmp"),
2467        };
2468        let debug_str = format!("{:?}", result);
2469        assert!(debug_str.contains("SyncResult"));
2470    }
2471
2472    #[test]
2473    fn test_encrypted_export_result_debug() {
2474        let result = EncryptedExportResult {
2475            output_path: PathBuf::from("/tmp/export.zip"),
2476            backup_key: "key".to_string(),
2477            exported_at: Utc::now(),
2478            workspace_name: "Test".to_string(),
2479            encryption_enabled: true,
2480        };
2481        let debug_str = format!("{:?}", result);
2482        assert!(debug_str.contains("EncryptedExportResult"));
2483    }
2484
2485    #[test]
2486    fn test_encrypted_import_result_debug() {
2487        let result = EncryptedImportResult {
2488            workspace_id: "ws-1".to_string(),
2489            workspace_name: "Test".to_string(),
2490            imported_at: Utc::now(),
2491            request_count: 5,
2492            encryption_restored: true,
2493        };
2494        let debug_str = format!("{:?}", result);
2495        assert!(debug_str.contains("EncryptedImportResult"));
2496    }
2497
2498    #[test]
2499    fn test_security_check_result_debug() {
2500        let result = SecurityCheckResult {
2501            workspace_id: "ws-1".to_string(),
2502            workspace_name: "Test".to_string(),
2503            warnings: vec![],
2504            errors: vec![],
2505            is_secure: true,
2506            recommended_actions: vec![],
2507        };
2508        let debug_str = format!("{:?}", result);
2509        assert!(debug_str.contains("SecurityCheckResult"));
2510    }
2511
2512    #[test]
2513    fn test_security_warning_debug() {
2514        let warning = SecurityWarning {
2515            field_type: "header".to_string(),
2516            field_name: "Auth".to_string(),
2517            location: "request".to_string(),
2518            severity: SecuritySeverity::High,
2519            message: "Test".to_string(),
2520            suggestion: "Fix".to_string(),
2521        };
2522        let debug_str = format!("{:?}", warning);
2523        assert!(debug_str.contains("SecurityWarning"));
2524    }
2525
2526    #[test]
2527    fn test_security_severity_debug() {
2528        let severity = SecuritySeverity::Critical;
2529        let debug_str = format!("{:?}", severity);
2530        assert!(debug_str.contains("Critical"));
2531    }
2532
2533    #[test]
2534    fn test_workspace_export_debug() {
2535        let export = WorkspaceExport {
2536            metadata: WorkspaceMetadata {
2537                id: "ws-1".to_string(),
2538                name: "Test".to_string(),
2539                description: None,
2540                exported_at: Utc::now(),
2541                request_count: 0,
2542                folder_count: 0,
2543            },
2544            config: WorkspaceConfig {
2545                auth: None,
2546                base_url: None,
2547                variables: HashMap::new(),
2548                reality_level: None,
2549                ai_mode: None,
2550            },
2551            requests: HashMap::new(),
2552        };
2553        let debug_str = format!("{:?}", export);
2554        assert!(debug_str.contains("WorkspaceExport"));
2555    }
2556
2557    #[test]
2558    fn test_workspace_metadata_debug() {
2559        let metadata = WorkspaceMetadata {
2560            id: "ws-1".to_string(),
2561            name: "Test".to_string(),
2562            description: None,
2563            exported_at: Utc::now(),
2564            request_count: 0,
2565            folder_count: 0,
2566        };
2567        let debug_str = format!("{:?}", metadata);
2568        assert!(debug_str.contains("WorkspaceMetadata"));
2569    }
2570
2571    #[test]
2572    fn test_workspace_config_debug() {
2573        let config = WorkspaceConfig {
2574            auth: None,
2575            base_url: None,
2576            variables: HashMap::new(),
2577            reality_level: None,
2578            ai_mode: None,
2579        };
2580        let debug_str = format!("{:?}", config);
2581        assert!(debug_str.contains("WorkspaceConfig"));
2582    }
2583
2584    #[test]
2585    fn test_auth_config_debug() {
2586        let auth = AuthConfig {
2587            auth_type: "bearer".to_string(),
2588            params: HashMap::new(),
2589        };
2590        let debug_str = format!("{:?}", auth);
2591        assert!(debug_str.contains("AuthConfig"));
2592    }
2593
2594    #[test]
2595    fn test_exported_request_debug() {
2596        let request = ExportedRequest {
2597            id: "req-1".to_string(),
2598            name: "Test".to_string(),
2599            method: "GET".to_string(),
2600            path: "/test".to_string(),
2601            folder_path: "/".to_string(),
2602            headers: HashMap::new(),
2603            query_params: HashMap::new(),
2604            body: None,
2605            response_status: None,
2606            response_body: None,
2607            response_headers: HashMap::new(),
2608            delay: None,
2609        };
2610        let debug_str = format!("{:?}", request);
2611        assert!(debug_str.contains("ExportedRequest"));
2612    }
2613
2614    #[test]
2615    fn test_sync_strategy_debug() {
2616        let strategy = SyncStrategy::Full;
2617        let debug_str = format!("{:?}", strategy);
2618        assert!(debug_str.contains("Full") || debug_str.contains("SyncStrategy"));
2619    }
2620
2621    #[test]
2622    fn test_directory_structure_debug() {
2623        let structure = DirectoryStructure::Flat;
2624        let debug_str = format!("{:?}", structure);
2625        assert!(debug_str.contains("Flat") || debug_str.contains("DirectoryStructure"));
2626    }
2627}