mockforge_core/
workspace_persistence.rs

1//! Persistence layer for workspace configurations
2//!
3//! This module handles saving and loading workspace configurations to/from disk,
4//! enabling persistent storage of workspace hierarchies and configurations.
5
6use crate::config::AuthConfig as ConfigAuthConfig;
7use crate::encryption::{utils, AutoEncryptionProcessor, WorkspaceKeyManager};
8use crate::workspace::{EntityId, Folder, MockRequest, Workspace, WorkspaceRegistry};
9use crate::{Error, Result};
10use chrono::{DateTime, Utc};
11use once_cell::sync::Lazy;
12use regex::Regex;
13use serde::{Deserialize, Serialize};
14use std::collections::HashMap;
15use std::path::{Path, PathBuf};
16use tokio::fs;
17
18// Pre-compiled regex patterns for sensitive data detection
19static CREDIT_CARD_PATTERN: Lazy<Regex> = Lazy::new(|| {
20    Regex::new(r"\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b")
21        .expect("CREDIT_CARD_PATTERN regex is valid")
22});
23
24static SSN_PATTERN: Lazy<Regex> = Lazy::new(|| {
25    Regex::new(r"\b\d{3}[-\s]?\d{2}[-\s]?\d{4}\b").expect("SSN_PATTERN regex is valid")
26});
27
28/// Workspace persistence manager
29#[derive(Debug)]
30pub struct WorkspacePersistence {
31    /// Base directory for workspace storage
32    base_dir: PathBuf,
33}
34
35/// Serializable workspace registry for persistence
36#[derive(Debug, Clone, Serialize, Deserialize)]
37struct SerializableWorkspaceRegistry {
38    workspaces: Vec<Workspace>,
39    active_workspace: Option<EntityId>,
40}
41
42/// Sync state for tracking incremental syncs
43#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SyncState {
45    /// Last time a sync operation was performed
46    pub last_sync_timestamp: DateTime<Utc>,
47}
48
49/// Sync strategy for workspace mirroring
50#[derive(Debug, Clone, PartialEq)]
51pub enum SyncStrategy {
52    /// Sync all workspaces completely
53    Full,
54    /// Sync only changed workspaces (based on modification time)
55    Incremental,
56    /// Sync only specified workspace IDs
57    Selective(Vec<String>),
58}
59
60/// Directory structure for synced workspaces
61#[derive(Debug, Clone, PartialEq)]
62pub enum DirectoryStructure {
63    /// All workspaces in a flat structure: workspace-id.yaml
64    Flat,
65    /// Nested by workspace: workspaces/{name}/workspace.yaml + requests/
66    Nested,
67    /// Grouped by type: requests/, responses/, metadata/
68    Grouped,
69}
70
71/// Result of a workspace sync operation
72#[derive(Debug, Clone)]
73pub struct SyncResult {
74    /// Number of workspaces synced
75    pub synced_workspaces: usize,
76    /// Number of requests synced
77    pub synced_requests: usize,
78    /// Number of files created/updated
79    pub files_created: usize,
80    /// Target directory used
81    pub target_dir: PathBuf,
82}
83
84/// Result of an encrypted workspace export
85#[derive(Debug, Clone)]
86pub struct EncryptedExportResult {
87    /// Path to the encrypted export file
88    pub output_path: PathBuf,
89    /// Backup key for importing on other devices
90    pub backup_key: String,
91    /// When the export was created
92    pub exported_at: DateTime<Utc>,
93    /// Name of the exported workspace
94    pub workspace_name: String,
95    /// Whether encryption was successfully applied
96    pub encryption_enabled: bool,
97}
98
99/// Result of an encrypted workspace import
100#[derive(Debug, Clone)]
101pub struct EncryptedImportResult {
102    /// ID of the imported workspace
103    pub workspace_id: String,
104    /// Name of the imported workspace
105    pub workspace_name: String,
106    /// When the import was completed
107    pub imported_at: DateTime<Utc>,
108    /// Number of requests imported
109    pub request_count: usize,
110    /// Whether encryption was successfully restored
111    pub encryption_restored: bool,
112}
113
114/// Result of a security check for sensitive data
115#[derive(Debug, Clone)]
116pub struct SecurityCheckResult {
117    /// Workspace ID that was checked
118    pub workspace_id: String,
119    /// Workspace name that was checked
120    pub workspace_name: String,
121    /// Security warnings found
122    pub warnings: Vec<SecurityWarning>,
123    /// Security errors found (critical issues)
124    pub errors: Vec<SecurityWarning>,
125    /// Whether the workspace is considered secure
126    pub is_secure: bool,
127    /// Recommended actions to improve security
128    pub recommended_actions: Vec<String>,
129}
130
131/// Security warning or error
132#[derive(Debug, Clone)]
133pub struct SecurityWarning {
134    /// Type of field that contains sensitive data
135    pub field_type: String,
136    /// Name of the field
137    pub field_name: String,
138    /// Location where the sensitive data was found
139    pub location: String,
140    /// Severity of the issue
141    pub severity: SecuritySeverity,
142    /// Human-readable message
143    pub message: String,
144    /// Suggestion for fixing the issue
145    pub suggestion: String,
146}
147
148/// Severity levels for security issues
149#[derive(Debug, Clone, PartialEq)]
150pub enum SecuritySeverity {
151    /// Low risk - informational
152    Low,
153    /// Medium risk - should be reviewed
154    Medium,
155    /// High risk - requires attention
156    High,
157    /// Critical risk - blocks operations
158    Critical,
159}
160
161/// Git-friendly workspace export format
162#[derive(Debug, Clone, Serialize, Deserialize)]
163pub struct WorkspaceExport {
164    /// Workspace metadata
165    pub metadata: WorkspaceMetadata,
166    /// Workspace configuration
167    pub config: WorkspaceConfig,
168    /// All requests organized by folder structure
169    pub requests: HashMap<String, ExportedRequest>,
170}
171
172/// Metadata for exported workspace
173#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct WorkspaceMetadata {
175    /// Original workspace ID
176    pub id: String,
177    /// Workspace name
178    pub name: String,
179    /// Workspace description
180    pub description: Option<String>,
181    /// Export timestamp
182    pub exported_at: DateTime<Utc>,
183    /// Total number of requests
184    pub request_count: usize,
185    /// Total number of folders
186    pub folder_count: usize,
187}
188
189/// Simplified workspace configuration for export
190#[derive(Debug, Clone, Serialize, Deserialize)]
191pub struct WorkspaceConfig {
192    /// Authentication configuration
193    pub auth: Option<AuthConfig>,
194    /// Base URL for requests
195    pub base_url: Option<String>,
196    /// Environment variables
197    pub variables: HashMap<String, String>,
198    /// Reality level for this workspace (1-5)
199    /// Controls the realism of mock behavior (chaos, latency, MockAI)
200    #[serde(default)]
201    pub reality_level: Option<crate::RealityLevel>,
202}
203
204/// Authentication configuration for export
205#[derive(Debug, Clone, Serialize, Deserialize)]
206pub struct AuthConfig {
207    /// Authentication type
208    pub auth_type: String,
209    /// Authentication parameters
210    pub params: HashMap<String, String>,
211}
212
213impl AuthConfig {
214    /// Convert from config AuthConfig to export AuthConfig
215    pub fn from_config_auth(config_auth: &ConfigAuthConfig) -> Option<Self> {
216        if let Some(jwt) = &config_auth.jwt {
217            let mut params = HashMap::new();
218            if let Some(secret) = &jwt.secret {
219                params.insert("secret".to_string(), secret.clone());
220            }
221            if let Some(rsa_public_key) = &jwt.rsa_public_key {
222                params.insert("rsa_public_key".to_string(), rsa_public_key.clone());
223            }
224            if let Some(ecdsa_public_key) = &jwt.ecdsa_public_key {
225                params.insert("ecdsa_public_key".to_string(), ecdsa_public_key.clone());
226            }
227            if let Some(issuer) = &jwt.issuer {
228                params.insert("issuer".to_string(), issuer.clone());
229            }
230            if let Some(audience) = &jwt.audience {
231                params.insert("audience".to_string(), audience.clone());
232            }
233            if !jwt.algorithms.is_empty() {
234                params.insert("algorithms".to_string(), jwt.algorithms.join(","));
235            }
236            Some(AuthConfig {
237                auth_type: "jwt".to_string(),
238                params,
239            })
240        } else if let Some(oauth2) = &config_auth.oauth2 {
241            let mut params = HashMap::new();
242            params.insert("client_id".to_string(), oauth2.client_id.clone());
243            params.insert("client_secret".to_string(), oauth2.client_secret.clone());
244            params.insert("introspection_url".to_string(), oauth2.introspection_url.clone());
245            if let Some(auth_url) = &oauth2.auth_url {
246                params.insert("auth_url".to_string(), auth_url.clone());
247            }
248            if let Some(token_url) = &oauth2.token_url {
249                params.insert("token_url".to_string(), token_url.clone());
250            }
251            if let Some(token_type_hint) = &oauth2.token_type_hint {
252                params.insert("token_type_hint".to_string(), token_type_hint.clone());
253            }
254            Some(AuthConfig {
255                auth_type: "oauth2".to_string(),
256                params,
257            })
258        } else if let Some(basic_auth) = &config_auth.basic_auth {
259            let mut params = HashMap::new();
260            for (user, pass) in &basic_auth.credentials {
261                params.insert(user.clone(), pass.clone());
262            }
263            Some(AuthConfig {
264                auth_type: "basic".to_string(),
265                params,
266            })
267        } else if let Some(api_key) = &config_auth.api_key {
268            let mut params = HashMap::new();
269            params.insert("header_name".to_string(), api_key.header_name.clone());
270            if let Some(query_name) = &api_key.query_name {
271                params.insert("query_name".to_string(), query_name.clone());
272            }
273            if !api_key.keys.is_empty() {
274                params.insert("keys".to_string(), api_key.keys.join(","));
275            }
276            Some(AuthConfig {
277                auth_type: "api_key".to_string(),
278                params,
279            })
280        } else {
281            None
282        }
283    }
284}
285
286/// Exported request format
287#[derive(Debug, Clone, Serialize, Deserialize)]
288pub struct ExportedRequest {
289    /// Request ID
290    pub id: String,
291    /// Request name
292    pub name: String,
293    /// HTTP method
294    pub method: String,
295    /// Request path
296    pub path: String,
297    /// Folder path (for organization)
298    pub folder_path: String,
299    /// Request headers
300    pub headers: HashMap<String, String>,
301    /// Query parameters
302    pub query_params: HashMap<String, String>,
303    /// Request body
304    pub body: Option<String>,
305    /// Response status code
306    pub response_status: Option<u16>,
307    /// Response body
308    pub response_body: Option<String>,
309    /// Response headers
310    pub response_headers: HashMap<String, String>,
311    /// Response delay (ms)
312    pub delay: Option<u64>,
313}
314
315impl WorkspacePersistence {
316    /// Create a new persistence manager
317    pub fn new<P: AsRef<Path>>(base_dir: P) -> Self {
318        Self {
319            base_dir: base_dir.as_ref().to_path_buf(),
320        }
321    }
322
323    /// Get the workspace directory path
324    pub fn workspace_dir(&self) -> &Path {
325        &self.base_dir
326    }
327
328    /// Get the path for a specific workspace file
329    pub fn workspace_file_path(&self, workspace_id: &str) -> PathBuf {
330        self.base_dir.join(format!("{}.yaml", workspace_id))
331    }
332
333    /// Get the registry metadata file path
334    pub fn registry_file_path(&self) -> PathBuf {
335        self.base_dir.join("registry.yaml")
336    }
337
338    /// Get the sync state file path
339    pub fn sync_state_file_path(&self) -> PathBuf {
340        self.base_dir.join("sync_state.yaml")
341    }
342
343    /// Ensure the workspace directory exists
344    pub async fn ensure_workspace_dir(&self) -> Result<()> {
345        if !self.base_dir.exists() {
346            fs::create_dir_all(&self.base_dir).await.map_err(|e| {
347                Error::generic(format!("Failed to create workspace directory: {}", e))
348            })?;
349        }
350        Ok(())
351    }
352
353    /// Save a workspace to disk
354    pub async fn save_workspace(&self, workspace: &Workspace) -> Result<()> {
355        self.ensure_workspace_dir().await?;
356
357        let file_path = self.workspace_file_path(&workspace.id);
358        let content = serde_yaml::to_string(workspace)
359            .map_err(|e| Error::generic(format!("Failed to serialize workspace: {}", e)))?;
360
361        fs::write(&file_path, content)
362            .await
363            .map_err(|e| Error::generic(format!("Failed to write workspace file: {}", e)))?;
364
365        Ok(())
366    }
367
368    /// Load a workspace from disk
369    pub async fn load_workspace(&self, workspace_id: &str) -> Result<Workspace> {
370        let file_path = self.workspace_file_path(workspace_id);
371
372        if !file_path.exists() {
373            return Err(Error::generic(format!("Workspace file not found: {:?}", file_path)));
374        }
375
376        let content = fs::read_to_string(&file_path)
377            .await
378            .map_err(|e| Error::generic(format!("Failed to read workspace file: {}", e)))?;
379
380        let workspace: Workspace = serde_yaml::from_str(&content)
381            .map_err(|e| Error::generic(format!("Failed to deserialize workspace: {}", e)))?;
382
383        Ok(workspace)
384    }
385
386    /// Delete a workspace from disk
387    pub async fn delete_workspace(&self, workspace_id: &str) -> Result<()> {
388        let file_path = self.workspace_file_path(workspace_id);
389
390        if file_path.exists() {
391            fs::remove_file(&file_path)
392                .await
393                .map_err(|e| Error::generic(format!("Failed to delete workspace file: {}", e)))?;
394        }
395
396        Ok(())
397    }
398
399    /// Save the workspace registry metadata
400    pub async fn save_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
401        self.ensure_workspace_dir().await?;
402
403        let serializable = SerializableWorkspaceRegistry {
404            workspaces: registry.get_workspaces().into_iter().cloned().collect(),
405            active_workspace: registry.get_active_workspace_id().map(|s| s.to_string()),
406        };
407
408        let file_path = self.registry_file_path();
409        let content = serde_yaml::to_string(&serializable)
410            .map_err(|e| Error::generic(format!("Failed to serialize registry: {}", e)))?;
411
412        fs::write(&file_path, content)
413            .await
414            .map_err(|e| Error::generic(format!("Failed to write registry file: {}", e)))?;
415
416        Ok(())
417    }
418
419    /// Load the workspace registry metadata
420    pub async fn load_registry(&self) -> Result<WorkspaceRegistry> {
421        let file_path = self.registry_file_path();
422
423        if !file_path.exists() {
424            // Return empty registry if no registry file exists
425            return Ok(WorkspaceRegistry::new());
426        }
427
428        let content = fs::read_to_string(&file_path)
429            .await
430            .map_err(|e| Error::generic(format!("Failed to read registry file: {}", e)))?;
431
432        let serializable: SerializableWorkspaceRegistry = serde_yaml::from_str(&content)
433            .map_err(|e| Error::generic(format!("Failed to deserialize registry: {}", e)))?;
434
435        let mut registry = WorkspaceRegistry::new();
436
437        // Load individual workspaces
438        for workspace_meta in &serializable.workspaces {
439            match self.load_workspace(&workspace_meta.id).await {
440                Ok(workspace) => {
441                    registry.add_workspace(workspace)?;
442                }
443                Err(e) => {
444                    tracing::warn!("Failed to load workspace {}: {}", workspace_meta.id, e);
445                }
446            }
447        }
448
449        // Set active workspace
450        if let Some(active_id) = &serializable.active_workspace {
451            if let Err(e) = registry.set_active_workspace(Some(active_id.clone())) {
452                tracing::warn!("Failed to set active workspace {}: {}", active_id, e);
453            }
454        }
455
456        Ok(registry)
457    }
458
459    /// Save the sync state
460    pub async fn save_sync_state(&self, sync_state: &SyncState) -> Result<()> {
461        self.ensure_workspace_dir().await?;
462
463        let file_path = self.sync_state_file_path();
464        let content = serde_yaml::to_string(sync_state)
465            .map_err(|e| Error::generic(format!("Failed to serialize sync state: {}", e)))?;
466
467        fs::write(&file_path, content)
468            .await
469            .map_err(|e| Error::generic(format!("Failed to write sync state file: {}", e)))?;
470
471        Ok(())
472    }
473
474    /// Load the sync state
475    pub async fn load_sync_state(&self) -> Result<SyncState> {
476        let file_path = self.sync_state_file_path();
477
478        if !file_path.exists() {
479            // Return default sync state if no sync state file exists
480            return Ok(SyncState {
481                last_sync_timestamp: Utc::now(),
482            });
483        }
484
485        let content = fs::read_to_string(&file_path)
486            .await
487            .map_err(|e| Error::generic(format!("Failed to read sync state file: {}", e)))?;
488
489        let sync_state: SyncState = serde_yaml::from_str(&content)
490            .map_err(|e| Error::generic(format!("Failed to deserialize sync state: {}", e)))?;
491
492        Ok(sync_state)
493    }
494
495    /// List all workspace IDs from disk
496    pub async fn list_workspace_ids(&self) -> Result<Vec<EntityId>> {
497        if !self.base_dir.exists() {
498            return Ok(Vec::new());
499        }
500
501        let mut workspace_ids = Vec::new();
502
503        let mut entries = fs::read_dir(&self.base_dir)
504            .await
505            .map_err(|e| Error::generic(format!("Failed to read workspace directory: {}", e)))?;
506
507        while let Some(entry) = entries
508            .next_entry()
509            .await
510            .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
511        {
512            let path = entry.path();
513            if path.is_file() {
514                if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
515                    if file_name != "registry.yaml" && file_name.ends_with(".yaml") {
516                        if let Some(id) = file_name.strip_suffix(".yaml") {
517                            workspace_ids.push(id.to_string());
518                        }
519                    }
520                }
521            }
522        }
523
524        Ok(workspace_ids)
525    }
526
527    /// Save the entire registry and all workspaces
528    pub async fn save_full_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
529        // Save registry metadata
530        self.save_registry(registry).await?;
531
532        // Save all workspaces
533        for workspace in registry.get_workspaces() {
534            self.save_workspace(workspace).await?;
535        }
536
537        Ok(())
538    }
539
540    /// Load the entire registry and all workspaces
541    pub async fn load_full_registry(&self) -> Result<WorkspaceRegistry> {
542        self.load_registry().await
543    }
544
545    /// Backup workspace data
546    pub async fn backup_workspace(&self, workspace_id: &str, backup_dir: &Path) -> Result<PathBuf> {
547        let workspace_file = self.workspace_file_path(workspace_id);
548
549        if !workspace_file.exists() {
550            return Err(Error::generic(format!("Workspace {} does not exist", workspace_id)));
551        }
552
553        // Ensure backup directory exists
554        if !backup_dir.exists() {
555            fs::create_dir_all(backup_dir)
556                .await
557                .map_err(|e| Error::generic(format!("Failed to create backup directory: {}", e)))?;
558        }
559
560        // Create backup filename with timestamp
561        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
562        let backup_filename = format!("{}_{}.yaml", workspace_id, timestamp);
563        let backup_path = backup_dir.join(backup_filename);
564
565        // Copy workspace file
566        fs::copy(&workspace_file, &backup_path)
567            .await
568            .map_err(|e| Error::generic(format!("Failed to create backup: {}", e)))?;
569
570        Ok(backup_path)
571    }
572
573    /// Restore workspace from backup
574    pub async fn restore_workspace(&self, backup_path: &Path) -> Result<EntityId> {
575        if !backup_path.exists() {
576            return Err(Error::generic(format!("Backup file does not exist: {:?}", backup_path)));
577        }
578
579        // Load workspace from backup
580        let content = fs::read_to_string(backup_path)
581            .await
582            .map_err(|e| Error::generic(format!("Failed to read backup file: {}", e)))?;
583
584        let workspace: Workspace = serde_yaml::from_str(&content)
585            .map_err(|e| Error::generic(format!("Failed to deserialize backup: {}", e)))?;
586
587        // Save restored workspace
588        self.save_workspace(&workspace).await?;
589
590        Ok(workspace.id)
591    }
592
593    /// Clean up old backups
594    pub async fn cleanup_old_backups(&self, backup_dir: &Path, keep_count: usize) -> Result<usize> {
595        if !backup_dir.exists() {
596            return Ok(0);
597        }
598
599        let mut backup_files = Vec::new();
600
601        let mut entries = fs::read_dir(backup_dir)
602            .await
603            .map_err(|e| Error::generic(format!("Failed to read backup directory: {}", e)))?;
604
605        while let Some(entry) = entries
606            .next_entry()
607            .await
608            .map_err(|e| Error::generic(format!("Failed to read backup entry: {}", e)))?
609        {
610            let path = entry.path();
611            if path.is_file() {
612                if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
613                    if file_name.ends_with(".yaml") {
614                        if let Ok(metadata) = entry.metadata().await {
615                            if let Ok(modified) = metadata.modified() {
616                                backup_files.push((path, modified));
617                            }
618                        }
619                    }
620                }
621            }
622        }
623
624        // Sort by modification time (newest first)
625        backup_files.sort_by(|a, b| b.1.cmp(&a.1));
626
627        // Remove old backups
628        let mut removed_count = 0;
629        for (path, _) in backup_files.iter().skip(keep_count) {
630            if fs::remove_file(path).await.is_ok() {
631                removed_count += 1;
632            }
633        }
634
635        Ok(removed_count)
636    }
637
638    /// Advanced sync with additional configuration options
639    #[allow(clippy::too_many_arguments)]
640    pub async fn sync_to_directory_advanced(
641        &self,
642        target_dir: &str,
643        strategy: &str,
644        workspace_ids: Option<&str>,
645        structure: &str,
646        include_meta: bool,
647        force: bool,
648        filename_pattern: &str,
649        exclude_pattern: Option<&str>,
650        dry_run: bool,
651    ) -> Result<SyncResult> {
652        let target_path = PathBuf::from(target_dir);
653
654        // Ensure target directory exists (unless dry run)
655        if !dry_run && !target_path.exists() {
656            fs::create_dir_all(&target_path)
657                .await
658                .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
659        }
660
661        // Parse strategy
662        let sync_strategy = match strategy {
663            "full" => SyncStrategy::Full,
664            "incremental" => SyncStrategy::Incremental,
665            "selective" => {
666                if let Some(ids) = workspace_ids {
667                    let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
668                    SyncStrategy::Selective(workspace_list)
669                } else {
670                    return Err(Error::generic("Selective strategy requires workspace IDs"));
671                }
672            }
673            _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
674        };
675
676        // Parse directory structure
677        let dir_structure = match structure {
678            "flat" => DirectoryStructure::Flat,
679            "nested" => DirectoryStructure::Nested,
680            "grouped" => DirectoryStructure::Grouped,
681            _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
682        };
683
684        // Get workspaces to sync based on strategy
685        let mut workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
686
687        // Apply exclusion filter if provided
688        if let Some(exclude) = exclude_pattern {
689            if let Ok(regex) = regex::Regex::new(exclude) {
690                workspaces_to_sync.retain(|id| !regex.is_match(id));
691            }
692        }
693
694        let mut result = SyncResult {
695            synced_workspaces: 0,
696            synced_requests: 0,
697            files_created: 0,
698            target_dir: target_path.clone(),
699        };
700
701        // Sync each workspace
702        for workspace_id in workspaces_to_sync {
703            if let Ok(workspace) = self.load_workspace(&workspace_id).await {
704                let workspace_result = self
705                    .sync_workspace_to_directory_advanced(
706                        &workspace,
707                        &target_path,
708                        &dir_structure,
709                        include_meta,
710                        force,
711                        filename_pattern,
712                        dry_run,
713                    )
714                    .await?;
715
716                result.synced_workspaces += 1;
717                result.synced_requests += workspace_result.requests_count;
718                result.files_created += workspace_result.files_created;
719            }
720        }
721
722        // Update sync state for incremental syncs
723        if let SyncStrategy::Incremental = sync_strategy {
724            let new_sync_state = SyncState {
725                last_sync_timestamp: Utc::now(),
726            };
727            if let Err(e) = self.save_sync_state(&new_sync_state).await {
728                tracing::warn!("Failed to save sync state: {}", e);
729            }
730        }
731
732        Ok(result)
733    }
734
735    /// Advanced sync for a single workspace with custom filename patterns
736    #[allow(clippy::too_many_arguments)]
737    async fn sync_workspace_to_directory_advanced(
738        &self,
739        workspace: &Workspace,
740        target_dir: &Path,
741        structure: &DirectoryStructure,
742        include_meta: bool,
743        force: bool,
744        filename_pattern: &str,
745        dry_run: bool,
746    ) -> Result<WorkspaceSyncResult> {
747        let mut result = WorkspaceSyncResult {
748            requests_count: 0,
749            files_created: 0,
750        };
751
752        match structure {
753            DirectoryStructure::Flat => {
754                let export = self.create_workspace_export(workspace).await?;
755                let filename = self.generate_filename(filename_pattern, workspace);
756                let file_path = target_dir.join(format!("{}.yaml", filename));
757
758                if force || !file_path.exists() {
759                    if !dry_run {
760                        let content = serde_yaml::to_string(&export).map_err(|e| {
761                            Error::generic(format!("Failed to serialize workspace: {}", e))
762                        })?;
763
764                        fs::write(&file_path, content).await.map_err(|e| {
765                            Error::generic(format!("Failed to write workspace file: {}", e))
766                        })?;
767                    }
768                    result.files_created += 1;
769                }
770            }
771
772            DirectoryStructure::Nested => {
773                let workspace_dir =
774                    target_dir.join(self.generate_filename(filename_pattern, workspace));
775                if !dry_run && !workspace_dir.exists() {
776                    fs::create_dir_all(&workspace_dir).await.map_err(|e| {
777                        Error::generic(format!("Failed to create workspace directory: {}", e))
778                    })?;
779                }
780
781                // Export main workspace file
782                let export = self.create_workspace_export(workspace).await?;
783                let workspace_file = workspace_dir.join("workspace.yaml");
784
785                if force || !workspace_file.exists() {
786                    if !dry_run {
787                        let content = serde_yaml::to_string(&export).map_err(|e| {
788                            Error::generic(format!("Failed to serialize workspace: {}", e))
789                        })?;
790
791                        fs::write(&workspace_file, content).await.map_err(|e| {
792                            Error::generic(format!("Failed to write workspace file: {}", e))
793                        })?;
794                    }
795                    result.files_created += 1;
796                }
797
798                // Export individual requests
799                let requests_dir = workspace_dir.join("requests");
800                if !dry_run && !requests_dir.exists() {
801                    fs::create_dir_all(&requests_dir).await.map_err(|e| {
802                        Error::generic(format!("Failed to create requests directory: {}", e))
803                    })?;
804                }
805
806                result.requests_count += self
807                    .export_workspace_requests_advanced(workspace, &requests_dir, force, dry_run)
808                    .await?;
809            }
810
811            DirectoryStructure::Grouped => {
812                // Create grouped directories
813                let requests_dir = target_dir.join("requests");
814                let workspaces_dir = target_dir.join("workspaces");
815
816                if !dry_run {
817                    for dir in [&requests_dir, &workspaces_dir] {
818                        if !dir.exists() {
819                            fs::create_dir_all(dir).await.map_err(|e| {
820                                Error::generic(format!("Failed to create directory: {}", e))
821                            })?;
822                        }
823                    }
824                }
825
826                // Export workspace metadata
827                let export = self.create_workspace_export(workspace).await?;
828                let filename = self.generate_filename(filename_pattern, workspace);
829                let workspace_file = workspaces_dir.join(format!("{}.yaml", filename));
830
831                if force || !workspace_file.exists() {
832                    if !dry_run {
833                        let content = serde_yaml::to_string(&export).map_err(|e| {
834                            Error::generic(format!("Failed to serialize workspace: {}", e))
835                        })?;
836
837                        fs::write(&workspace_file, content).await.map_err(|e| {
838                            Error::generic(format!("Failed to write workspace file: {}", e))
839                        })?;
840                    }
841                    result.files_created += 1;
842                }
843
844                // Export requests to requests directory
845                result.requests_count += self
846                    .export_workspace_requests_grouped_advanced(
847                        workspace,
848                        &requests_dir,
849                        force,
850                        dry_run,
851                    )
852                    .await?;
853            }
854        }
855
856        // Create metadata file if requested
857        if include_meta && !dry_run {
858            self.create_metadata_file(workspace, target_dir, structure).await?;
859            result.files_created += 1;
860        }
861
862        Ok(result)
863    }
864
865    /// Generate filename from pattern
866    fn generate_filename(&self, pattern: &str, workspace: &Workspace) -> String {
867        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
868
869        pattern
870            .replace("{name}", &self.sanitize_filename(&workspace.name))
871            .replace("{id}", &workspace.id)
872            .replace("{timestamp}", &timestamp.to_string())
873    }
874
875    /// Advanced request export with dry run support
876    async fn export_workspace_requests_advanced(
877        &self,
878        workspace: &Workspace,
879        requests_dir: &Path,
880        force: bool,
881        dry_run: bool,
882    ) -> Result<usize> {
883        let mut count = 0;
884
885        for request in &workspace.requests {
886            let file_path =
887                requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
888            if force || !file_path.exists() {
889                if !dry_run {
890                    let exported = self.convert_request_to_exported(request, "");
891                    let content = serde_yaml::to_string(&exported).map_err(|e| {
892                        Error::generic(format!("Failed to serialize request: {}", e))
893                    })?;
894
895                    fs::write(&file_path, content).await.map_err(|e| {
896                        Error::generic(format!("Failed to write request file: {}", e))
897                    })?;
898                }
899                count += 1;
900            }
901        }
902
903        // Export folder requests
904        for folder in &workspace.folders {
905            count += self
906                .export_folder_requests_advanced(folder, requests_dir, force, &folder.name, dry_run)
907                .await?;
908        }
909
910        Ok(count)
911    }
912
913    /// Advanced folder request export
914    async fn export_folder_requests_advanced(
915        &self,
916        folder: &Folder,
917        requests_dir: &Path,
918        force: bool,
919        folder_path: &str,
920        dry_run: bool,
921    ) -> Result<usize> {
922        use std::collections::VecDeque;
923
924        let mut count = 0;
925        let mut queue = VecDeque::new();
926
927        // Start with the root folder
928        queue.push_back((folder, folder_path.to_string()));
929
930        while let Some((current_folder, current_path)) = queue.pop_front() {
931            // Export requests in current folder
932            for request in &current_folder.requests {
933                let file_path =
934                    requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
935                if force || !file_path.exists() {
936                    if !dry_run {
937                        let exported = self.convert_request_to_exported(request, &current_path);
938                        let content = serde_yaml::to_string(&exported).map_err(|e| {
939                            Error::generic(format!("Failed to serialize request: {}", e))
940                        })?;
941
942                        fs::write(&file_path, content).await.map_err(|e| {
943                            Error::generic(format!("Failed to write request file: {}", e))
944                        })?;
945                    }
946                    count += 1;
947                }
948            }
949
950            // Add subfolders to queue with updated paths
951            for subfolder in &current_folder.folders {
952                let subfolder_path = if current_path.is_empty() {
953                    subfolder.name.clone()
954                } else {
955                    format!("{}/{}", current_path, subfolder.name)
956                };
957                queue.push_back((subfolder, subfolder_path));
958            }
959        }
960
961        Ok(count)
962    }
963
964    /// Advanced grouped request export
965    async fn export_workspace_requests_grouped_advanced(
966        &self,
967        workspace: &Workspace,
968        requests_dir: &Path,
969        force: bool,
970        dry_run: bool,
971    ) -> Result<usize> {
972        let mut count = 0;
973        let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
974
975        if !dry_run && !workspace_requests_dir.exists() {
976            fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
977                Error::generic(format!("Failed to create workspace requests directory: {}", e))
978            })?;
979        }
980
981        count += self
982            .export_workspace_requests_advanced(workspace, &workspace_requests_dir, force, dry_run)
983            .await?;
984        Ok(count)
985    }
986
987    /// Sync workspaces to an external directory for Git/Dropbox integration
988    pub async fn sync_to_directory(
989        &self,
990        target_dir: &str,
991        strategy: &str,
992        workspace_ids: Option<&str>,
993        structure: &str,
994        include_meta: bool,
995        force: bool,
996    ) -> Result<SyncResult> {
997        let target_path = PathBuf::from(target_dir);
998
999        // Ensure target directory exists
1000        if !target_path.exists() {
1001            fs::create_dir_all(&target_path)
1002                .await
1003                .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
1004        }
1005
1006        // Parse strategy
1007        let sync_strategy = match strategy {
1008            "full" => SyncStrategy::Full,
1009            "incremental" => SyncStrategy::Incremental,
1010            "selective" => {
1011                if let Some(ids) = workspace_ids {
1012                    let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
1013                    SyncStrategy::Selective(workspace_list)
1014                } else {
1015                    return Err(Error::generic("Selective strategy requires workspace IDs"));
1016                }
1017            }
1018            _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
1019        };
1020
1021        // Parse directory structure
1022        let dir_structure = match structure {
1023            "flat" => DirectoryStructure::Flat,
1024            "nested" => DirectoryStructure::Nested,
1025            "grouped" => DirectoryStructure::Grouped,
1026            _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
1027        };
1028
1029        // Get workspaces to sync based on strategy
1030        let workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
1031
1032        let mut result = SyncResult {
1033            synced_workspaces: 0,
1034            synced_requests: 0,
1035            files_created: 0,
1036            target_dir: target_path.clone(),
1037        };
1038
1039        // Sync each workspace
1040        for workspace_id in workspaces_to_sync {
1041            if let Ok(workspace) = self.load_workspace(&workspace_id).await {
1042                let workspace_result = self
1043                    .sync_workspace_to_directory(
1044                        &workspace,
1045                        &target_path,
1046                        &dir_structure,
1047                        include_meta,
1048                        force,
1049                    )
1050                    .await?;
1051
1052                result.synced_workspaces += 1;
1053                result.synced_requests += workspace_result.requests_count;
1054                result.files_created += workspace_result.files_created;
1055            }
1056        }
1057
1058        // Update sync state for incremental syncs
1059        if let SyncStrategy::Incremental = sync_strategy {
1060            let new_sync_state = SyncState {
1061                last_sync_timestamp: Utc::now(),
1062            };
1063            if let Err(e) = self.save_sync_state(&new_sync_state).await {
1064                tracing::warn!("Failed to save sync state: {}", e);
1065            }
1066        }
1067
1068        Ok(result)
1069    }
1070
1071    /// Get list of workspace IDs to sync based on strategy
1072    async fn get_workspaces_for_sync(&self, strategy: &SyncStrategy) -> Result<Vec<String>> {
1073        match strategy {
1074            SyncStrategy::Full => self.list_workspace_ids().await,
1075            SyncStrategy::Incremental => {
1076                // Load sync state to get last sync timestamp
1077                let sync_state = self.load_sync_state().await?;
1078                let last_sync = sync_state.last_sync_timestamp;
1079
1080                // Get all workspace IDs
1081                let all_workspace_ids = self.list_workspace_ids().await?;
1082
1083                // Filter workspaces that have been modified since last sync
1084                let mut modified_workspaces = Vec::new();
1085                for workspace_id in all_workspace_ids {
1086                    let file_path = self.workspace_file_path(&workspace_id);
1087                    if let Ok(metadata) = fs::metadata(&file_path).await {
1088                        if let Ok(modified_time) = metadata.modified() {
1089                            let modified_datetime = DateTime::<Utc>::from(modified_time);
1090                            if modified_datetime > last_sync {
1091                                modified_workspaces.push(workspace_id);
1092                            }
1093                        }
1094                    }
1095                }
1096
1097                Ok(modified_workspaces)
1098            }
1099            SyncStrategy::Selective(ids) => Ok(ids.clone()),
1100        }
1101    }
1102
1103    /// Sync a single workspace to the target directory
1104    async fn sync_workspace_to_directory(
1105        &self,
1106        workspace: &Workspace,
1107        target_dir: &Path,
1108        structure: &DirectoryStructure,
1109        include_meta: bool,
1110        force: bool,
1111    ) -> Result<WorkspaceSyncResult> {
1112        let mut result = WorkspaceSyncResult {
1113            requests_count: 0,
1114            files_created: 0,
1115        };
1116
1117        match structure {
1118            DirectoryStructure::Flat => {
1119                let export = self.create_workspace_export(workspace).await?;
1120                let file_path =
1121                    target_dir.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1122
1123                if force || !file_path.exists() {
1124                    let content = serde_yaml::to_string(&export).map_err(|e| {
1125                        Error::generic(format!("Failed to serialize workspace: {}", e))
1126                    })?;
1127
1128                    fs::write(&file_path, content).await.map_err(|e| {
1129                        Error::generic(format!("Failed to write workspace file: {}", e))
1130                    })?;
1131
1132                    result.files_created += 1;
1133                }
1134            }
1135
1136            DirectoryStructure::Nested => {
1137                let workspace_dir = target_dir.join(self.sanitize_filename(&workspace.name));
1138                if !workspace_dir.exists() {
1139                    fs::create_dir_all(&workspace_dir).await.map_err(|e| {
1140                        Error::generic(format!("Failed to create workspace directory: {}", e))
1141                    })?;
1142                }
1143
1144                // Export main workspace file
1145                let export = self.create_workspace_export(workspace).await?;
1146                let workspace_file = workspace_dir.join("workspace.yaml");
1147
1148                if force || !workspace_file.exists() {
1149                    let content = serde_yaml::to_string(&export).map_err(|e| {
1150                        Error::generic(format!("Failed to serialize workspace: {}", e))
1151                    })?;
1152
1153                    fs::write(&workspace_file, content).await.map_err(|e| {
1154                        Error::generic(format!("Failed to write workspace file: {}", e))
1155                    })?;
1156
1157                    result.files_created += 1;
1158                }
1159
1160                // Export individual requests
1161                let requests_dir = workspace_dir.join("requests");
1162                if !requests_dir.exists() {
1163                    fs::create_dir_all(&requests_dir).await.map_err(|e| {
1164                        Error::generic(format!("Failed to create requests directory: {}", e))
1165                    })?;
1166                }
1167
1168                result.requests_count +=
1169                    self.export_workspace_requests(workspace, &requests_dir, force).await?;
1170            }
1171
1172            DirectoryStructure::Grouped => {
1173                // Create grouped directories
1174                let requests_dir = target_dir.join("requests");
1175                let workspaces_dir = target_dir.join("workspaces");
1176
1177                for dir in [&requests_dir, &workspaces_dir] {
1178                    if !dir.exists() {
1179                        fs::create_dir_all(dir).await.map_err(|e| {
1180                            Error::generic(format!("Failed to create directory: {}", e))
1181                        })?;
1182                    }
1183                }
1184
1185                // Export workspace metadata
1186                let export = self.create_workspace_export(workspace).await?;
1187                let workspace_file = workspaces_dir
1188                    .join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1189
1190                if force || !workspace_file.exists() {
1191                    let content = serde_yaml::to_string(&export).map_err(|e| {
1192                        Error::generic(format!("Failed to serialize workspace: {}", e))
1193                    })?;
1194
1195                    fs::write(&workspace_file, content).await.map_err(|e| {
1196                        Error::generic(format!("Failed to write workspace file: {}", e))
1197                    })?;
1198
1199                    result.files_created += 1;
1200                }
1201
1202                // Export requests to requests directory
1203                result.requests_count +=
1204                    self.export_workspace_requests_grouped(workspace, &requests_dir, force).await?;
1205            }
1206        }
1207
1208        // Create metadata file if requested
1209        if include_meta {
1210            self.create_metadata_file(workspace, target_dir, structure).await?;
1211            result.files_created += 1;
1212        }
1213
1214        Ok(result)
1215    }
1216
1217    /// Create a Git-friendly workspace export
1218    async fn create_workspace_export(&self, workspace: &Workspace) -> Result<WorkspaceExport> {
1219        let mut requests = HashMap::new();
1220
1221        // Collect all requests from workspace
1222        self.collect_requests_from_workspace(workspace, &mut requests, "".to_string());
1223
1224        let metadata = WorkspaceMetadata {
1225            id: workspace.id.clone(),
1226            name: workspace.name.clone(),
1227            description: workspace.description.clone(),
1228            exported_at: Utc::now(),
1229            request_count: requests.len(),
1230            folder_count: workspace.folders.len(),
1231        };
1232
1233        let config = WorkspaceConfig {
1234            auth: workspace.config.auth.as_ref().and_then(AuthConfig::from_config_auth),
1235            base_url: workspace.config.base_url.clone(),
1236            variables: workspace.config.global_environment.variables.clone(),
1237            reality_level: workspace.config.reality_level,
1238        };
1239
1240        Ok(WorkspaceExport {
1241            metadata,
1242            config,
1243            requests,
1244        })
1245    }
1246
1247    /// Collect all requests from workspace into a hashmap
1248    fn collect_requests_from_workspace(
1249        &self,
1250        workspace: &Workspace,
1251        requests: &mut HashMap<String, ExportedRequest>,
1252        folder_path: String,
1253    ) {
1254        // Add root-level requests
1255        for request in &workspace.requests {
1256            let exported = self.convert_request_to_exported(request, &folder_path);
1257            requests.insert(request.id.clone(), exported);
1258        }
1259
1260        // Add folder requests recursively
1261        for folder in &workspace.folders {
1262            let current_path = if folder_path.is_empty() {
1263                folder.name.clone()
1264            } else {
1265                format!("{}/{}", folder_path, folder.name)
1266            };
1267
1268            for request in &folder.requests {
1269                let exported = self.convert_request_to_exported(request, &current_path);
1270                requests.insert(request.id.clone(), exported);
1271            }
1272
1273            // Recursively process subfolders
1274            self.collect_requests_from_folders(folder, requests, current_path);
1275        }
1276    }
1277
1278    /// Recursively collect requests from folders
1279    fn collect_requests_from_folders(
1280        &self,
1281        folder: &Folder,
1282        requests: &mut HashMap<String, ExportedRequest>,
1283        folder_path: String,
1284    ) {
1285        for subfolder in &folder.folders {
1286            let current_path = format!("{}/{}", folder_path, subfolder.name);
1287
1288            for request in &subfolder.requests {
1289                let exported = self.convert_request_to_exported(request, &current_path);
1290                requests.insert(request.id.clone(), exported);
1291            }
1292
1293            self.collect_requests_from_folders(subfolder, requests, current_path);
1294        }
1295    }
1296
1297    /// Convert a MockRequest to ExportedRequest
1298    fn convert_request_to_exported(
1299        &self,
1300        request: &MockRequest,
1301        folder_path: &str,
1302    ) -> ExportedRequest {
1303        ExportedRequest {
1304            id: request.id.clone(),
1305            name: request.name.clone(),
1306            method: format!("{:?}", request.method),
1307            path: request.path.clone(),
1308            folder_path: folder_path.to_string(),
1309            headers: request.headers.clone(),
1310            query_params: request.query_params.clone(),
1311            body: request.body.clone(),
1312            response_status: Some(request.response.status_code),
1313            response_body: request.response.body.clone(),
1314            response_headers: request.response.headers.clone(),
1315            delay: request.response.delay_ms,
1316        }
1317    }
1318
1319    /// Export workspace with encryption for secure sharing
1320    pub async fn export_workspace_encrypted(
1321        &self,
1322        workspace: &Workspace,
1323        output_path: &Path,
1324    ) -> Result<EncryptedExportResult> {
1325        // Check if encryption is enabled for this workspace
1326        if !workspace.config.auto_encryption.enabled {
1327            return Err(Error::generic("Encryption is not enabled for this workspace. Enable encryption in workspace settings first."));
1328        }
1329
1330        // Get auto-encryption config
1331        let encryption_config = workspace.config.auto_encryption.clone();
1332        let processor = AutoEncryptionProcessor::new(&workspace.id, encryption_config);
1333
1334        // Create filtered workspace copy for export
1335        let mut filtered_workspace = workspace.to_filtered_for_sync();
1336
1337        // Apply automatic encryption to the filtered workspace
1338        self.encrypt_workspace_data(&mut filtered_workspace, &processor)?;
1339
1340        // Create standard export
1341        let export = self.create_workspace_export(&filtered_workspace).await?;
1342
1343        // Encrypt the entire export
1344        let export_json = serde_json::to_string_pretty(&export)
1345            .map_err(|e| Error::generic(format!("Failed to serialize export: {}", e)))?;
1346
1347        let encrypted_data = utils::encrypt_for_workspace(&workspace.id, &export_json)?;
1348
1349        // Generate backup key for sharing
1350        let key_manager = WorkspaceKeyManager::new();
1351        let backup_key = key_manager.generate_workspace_key_backup(&workspace.id)?;
1352
1353        // Write encrypted data to file
1354        fs::write(output_path, &encrypted_data)
1355            .await
1356            .map_err(|e| Error::generic(format!("Failed to write encrypted export: {}", e)))?;
1357
1358        Ok(EncryptedExportResult {
1359            output_path: output_path.to_path_buf(),
1360            backup_key,
1361            exported_at: Utc::now(),
1362            workspace_name: workspace.name.clone(),
1363            encryption_enabled: true,
1364        })
1365    }
1366
1367    /// Import encrypted workspace
1368    pub async fn import_workspace_encrypted(
1369        &self,
1370        encrypted_file: &Path,
1371        _workspace_name: Option<&str>,
1372        _registry: &mut WorkspaceRegistry,
1373    ) -> Result<EncryptedImportResult> {
1374        // Read encrypted data
1375        let _encrypted_data = fs::read_to_string(encrypted_file)
1376            .await
1377            .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1378
1379        // For import, we need the workspace ID and backup key
1380        // This would typically be provided by the user or extracted from metadata
1381        Err(Error::generic("Encrypted import requires workspace ID and backup key. Use import_workspace_encrypted_with_key instead."))
1382    }
1383
1384    /// Import encrypted workspace with specific workspace ID and backup key
1385    pub async fn import_workspace_encrypted_with_key(
1386        &self,
1387        encrypted_file: &Path,
1388        workspace_id: &str,
1389        backup_key: &str,
1390        workspace_name: Option<&str>,
1391        registry: &mut WorkspaceRegistry,
1392    ) -> Result<EncryptedImportResult> {
1393        // Ensure workspace key exists or restore from backup
1394        let key_manager = WorkspaceKeyManager::new();
1395        if !key_manager.has_workspace_key(workspace_id) {
1396            key_manager.restore_workspace_key_from_backup(workspace_id, backup_key)?;
1397        }
1398
1399        // Read and decrypt the data
1400        let encrypted_data = fs::read_to_string(encrypted_file)
1401            .await
1402            .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1403
1404        let decrypted_json = utils::decrypt_for_workspace(workspace_id, &encrypted_data)?;
1405
1406        // Parse the export data
1407        let export: WorkspaceExport = serde_json::from_str(&decrypted_json)
1408            .map_err(|e| Error::generic(format!("Failed to parse decrypted export: {}", e)))?;
1409
1410        // Convert export to workspace
1411        let workspace = self.convert_export_to_workspace(&export, workspace_name)?;
1412
1413        // Add to registry
1414        let imported_id = registry.add_workspace(workspace)?;
1415
1416        Ok(EncryptedImportResult {
1417            workspace_id: imported_id,
1418            workspace_name: export.metadata.name.clone(),
1419            imported_at: Utc::now(),
1420            request_count: export.requests.len(),
1421            encryption_restored: true,
1422        })
1423    }
1424
1425    /// Apply encryption to workspace data before export
1426    fn encrypt_workspace_data(
1427        &self,
1428        workspace: &mut Workspace,
1429        processor: &AutoEncryptionProcessor,
1430    ) -> Result<()> {
1431        // Encrypt environment variables
1432        for env in &mut workspace.config.environments {
1433            processor.process_env_vars(&mut env.variables)?;
1434        }
1435        processor.process_env_vars(&mut workspace.config.global_environment.variables)?;
1436
1437        // Note: Headers and request bodies would be encrypted here when implemented
1438        // For now, we rely on the filtering done by to_filtered_for_sync()
1439
1440        Ok(())
1441    }
1442
1443    /// Convert WorkspaceExport back to Workspace
1444    fn convert_export_to_workspace(
1445        &self,
1446        export: &WorkspaceExport,
1447        name_override: Option<&str>,
1448    ) -> Result<Workspace> {
1449        let mut workspace =
1450            Workspace::new(name_override.unwrap_or(&export.metadata.name).to_string());
1451
1452        // Set description if provided
1453        if let Some(desc) = &export.metadata.description {
1454            workspace.description = Some(desc.clone());
1455        }
1456
1457        // Restore requests from export
1458        for exported_request in export.requests.values() {
1459            // Convert exported request back to MockRequest
1460            let method = self.parse_http_method(&exported_request.method)?;
1461            let mut request = MockRequest::new(
1462                method,
1463                exported_request.path.clone(),
1464                exported_request.name.clone(),
1465            );
1466
1467            // Set additional properties
1468            if let Some(status) = exported_request.response_status {
1469                request.response.status_code = status;
1470            }
1471
1472            // Set other response properties if available
1473            if let Some(body) = &exported_request.response_body {
1474                request.response.body = Some(body.clone());
1475            }
1476            request.response.headers = exported_request.response_headers.clone();
1477            if let Some(delay) = exported_request.delay {
1478                request.response.delay_ms = Some(delay);
1479            }
1480
1481            workspace.add_request(request)?;
1482        }
1483
1484        // Restore configuration
1485        workspace.config.global_environment.variables = export.config.variables.clone();
1486
1487        Ok(workspace)
1488    }
1489
1490    /// Parse HTTP method string to enum
1491    fn parse_http_method(&self, method_str: &str) -> Result<crate::routing::HttpMethod> {
1492        match method_str.to_uppercase().as_str() {
1493            "GET" => Ok(crate::routing::HttpMethod::GET),
1494            "POST" => Ok(crate::routing::HttpMethod::POST),
1495            "PUT" => Ok(crate::routing::HttpMethod::PUT),
1496            "DELETE" => Ok(crate::routing::HttpMethod::DELETE),
1497            "PATCH" => Ok(crate::routing::HttpMethod::PATCH),
1498            "HEAD" => Ok(crate::routing::HttpMethod::HEAD),
1499            "OPTIONS" => Ok(crate::routing::HttpMethod::OPTIONS),
1500            _ => Err(Error::generic(format!("Unknown HTTP method: {}", method_str))),
1501        }
1502    }
1503
1504    /// Check workspace for unencrypted sensitive data before export
1505    pub fn check_workspace_for_unencrypted_secrets(
1506        &self,
1507        workspace: &Workspace,
1508    ) -> Result<SecurityCheckResult> {
1509        let mut warnings = Vec::new();
1510        let errors = Vec::new();
1511
1512        // Check environment variables
1513        self.check_environment_variables(workspace, &mut warnings)?;
1514
1515        // Check for sensitive patterns in request data (when implemented)
1516        // This would check headers, bodies, etc.
1517
1518        let has_warnings = !warnings.is_empty();
1519        let has_errors = !errors.is_empty();
1520
1521        Ok(SecurityCheckResult {
1522            workspace_id: workspace.id.clone(),
1523            workspace_name: workspace.name.clone(),
1524            warnings,
1525            errors,
1526            is_secure: !has_warnings && !has_errors,
1527            recommended_actions: self.generate_security_recommendations(has_warnings, has_errors),
1528        })
1529    }
1530
1531    /// Check environment variables for sensitive data
1532    fn check_environment_variables(
1533        &self,
1534        workspace: &Workspace,
1535        warnings: &mut Vec<SecurityWarning>,
1536    ) -> Result<()> {
1537        let sensitive_keys = [
1538            "password",
1539            "secret",
1540            "key",
1541            "token",
1542            "credential",
1543            "api_key",
1544            "apikey",
1545            "api_secret",
1546            "db_password",
1547            "database_password",
1548            "aws_secret_key",
1549            "aws_session_token",
1550            "private_key",
1551            "authorization",
1552            "auth_token",
1553            "access_token",
1554            "refresh_token",
1555            "cookie",
1556            "session",
1557            "csrf",
1558            "jwt",
1559            "bearer",
1560        ];
1561
1562        // Check global environment
1563        for (key, value) in &workspace.config.global_environment.variables {
1564            if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1565                warnings.push(SecurityWarning {
1566                    field_type: "environment_variable".to_string(),
1567                    field_name: key.clone(),
1568                    location: "global_environment".to_string(),
1569                    severity: SecuritySeverity::High,
1570                    message: format!(
1571                        "Potentially sensitive environment variable '{}' detected",
1572                        key
1573                    ),
1574                    suggestion: "Consider encrypting this value or excluding it from exports"
1575                        .to_string(),
1576                });
1577            }
1578        }
1579
1580        // Check workspace environments
1581        for env in &workspace.config.environments {
1582            for (key, value) in &env.variables {
1583                if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1584                    warnings.push(SecurityWarning {
1585                        field_type: "environment_variable".to_string(),
1586                        field_name: key.clone(),
1587                        location: format!("environment '{}'", env.name),
1588                        severity: SecuritySeverity::High,
1589                        message: format!("Potentially sensitive environment variable '{}' detected in environment '{}'", key, env.name),
1590                        suggestion: "Consider encrypting this value or excluding it from exports".to_string(),
1591                    });
1592                }
1593            }
1594        }
1595
1596        Ok(())
1597    }
1598
1599    /// Check if a key-value pair is potentially sensitive
1600    fn is_potentially_sensitive(&self, key: &str, value: &str, sensitive_keys: &[&str]) -> bool {
1601        let key_lower = key.to_lowercase();
1602
1603        // Check if key contains sensitive keywords
1604        if sensitive_keys.iter().any(|&sensitive| key_lower.contains(sensitive)) {
1605            return true;
1606        }
1607
1608        // Check for patterns that indicate sensitive data
1609        self.contains_sensitive_patterns(value)
1610    }
1611
1612    /// Check if value contains sensitive patterns
1613    fn contains_sensitive_patterns(&self, value: &str) -> bool {
1614        // Credit card pattern
1615        if CREDIT_CARD_PATTERN.is_match(value) {
1616            return true;
1617        }
1618
1619        // SSN pattern
1620        if SSN_PATTERN.is_match(value) {
1621            return true;
1622        }
1623
1624        // Long random-looking strings (potential API keys)
1625        if value.len() > 20 && value.chars().any(|c| c.is_alphanumeric()) {
1626            let alphanumeric_count = value.chars().filter(|c| c.is_alphanumeric()).count();
1627            let total_count = value.len();
1628            if alphanumeric_count as f64 / total_count as f64 > 0.8 {
1629                return true;
1630            }
1631        }
1632
1633        false
1634    }
1635
1636    /// Generate security recommendations based on findings
1637    fn generate_security_recommendations(
1638        &self,
1639        has_warnings: bool,
1640        has_errors: bool,
1641    ) -> Vec<String> {
1642        let mut recommendations = Vec::new();
1643
1644        if has_warnings || has_errors {
1645            recommendations.push("Enable encryption for this workspace in settings".to_string());
1646            recommendations.push("Review and encrypt sensitive environment variables".to_string());
1647            recommendations.push("Use encrypted export for sharing workspaces".to_string());
1648        }
1649
1650        if has_errors {
1651            recommendations
1652                .push("CRITICAL: Remove or encrypt sensitive data before proceeding".to_string());
1653        }
1654
1655        recommendations
1656    }
1657
1658    /// Export individual requests for nested structure
1659    async fn export_workspace_requests(
1660        &self,
1661        workspace: &Workspace,
1662        requests_dir: &Path,
1663        force: bool,
1664    ) -> Result<usize> {
1665        let mut count = 0;
1666
1667        for request in &workspace.requests {
1668            let file_path =
1669                requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1670            if force || !file_path.exists() {
1671                let exported = self.convert_request_to_exported(request, "");
1672                let content = serde_yaml::to_string(&exported)
1673                    .map_err(|e| Error::generic(format!("Failed to serialize request: {}", e)))?;
1674
1675                fs::write(&file_path, content)
1676                    .await
1677                    .map_err(|e| Error::generic(format!("Failed to write request file: {}", e)))?;
1678
1679                count += 1;
1680            }
1681        }
1682
1683        // Export folder requests
1684        for folder in &workspace.folders {
1685            count += self.export_folder_requests(folder, requests_dir, force, &folder.name).await?;
1686        }
1687
1688        Ok(count)
1689    }
1690
1691    /// Export requests from folders recursively
1692    async fn export_folder_requests(
1693        &self,
1694        folder: &Folder,
1695        requests_dir: &Path,
1696        force: bool,
1697        folder_path: &str,
1698    ) -> Result<usize> {
1699        use std::collections::VecDeque;
1700
1701        let mut count = 0;
1702        let mut queue = VecDeque::new();
1703
1704        // Start with the root folder
1705        queue.push_back((folder, folder_path.to_string()));
1706
1707        while let Some((current_folder, current_path)) = queue.pop_front() {
1708            // Export requests in current folder
1709            for request in &current_folder.requests {
1710                let file_path =
1711                    requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1712                if force || !file_path.exists() {
1713                    let exported = self.convert_request_to_exported(request, &current_path);
1714                    let content = serde_yaml::to_string(&exported).map_err(|e| {
1715                        Error::generic(format!("Failed to serialize request: {}", e))
1716                    })?;
1717
1718                    fs::write(&file_path, content).await.map_err(|e| {
1719                        Error::generic(format!("Failed to write request file: {}", e))
1720                    })?;
1721
1722                    count += 1;
1723                }
1724            }
1725
1726            // Add subfolders to queue with updated paths
1727            for subfolder in &current_folder.folders {
1728                let subfolder_path = if current_path.is_empty() {
1729                    subfolder.name.clone()
1730                } else {
1731                    format!("{}/{}", current_path, subfolder.name)
1732                };
1733                queue.push_back((subfolder, subfolder_path));
1734            }
1735        }
1736
1737        Ok(count)
1738    }
1739
1740    /// Export requests for grouped structure
1741    async fn export_workspace_requests_grouped(
1742        &self,
1743        workspace: &Workspace,
1744        requests_dir: &Path,
1745        force: bool,
1746    ) -> Result<usize> {
1747        let mut count = 0;
1748        let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
1749
1750        if !workspace_requests_dir.exists() {
1751            fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
1752                Error::generic(format!("Failed to create workspace requests directory: {}", e))
1753            })?;
1754        }
1755
1756        count += self
1757            .export_workspace_requests(workspace, &workspace_requests_dir, force)
1758            .await?;
1759        Ok(count)
1760    }
1761
1762    /// Create metadata file for Git integration
1763    async fn create_metadata_file(
1764        &self,
1765        workspace: &Workspace,
1766        target_dir: &Path,
1767        structure: &DirectoryStructure,
1768    ) -> Result<()> {
1769        let metadata = serde_json::json!({
1770            "workspace_id": workspace.id,
1771            "workspace_name": workspace.name,
1772            "description": workspace.description,
1773            "exported_at": Utc::now().to_rfc3339(),
1774            "structure": format!("{:?}", structure),
1775            "version": "1.0",
1776            "source": "mockforge"
1777        });
1778
1779        let metadata_file = target_dir.join(".mockforge-meta.json");
1780        let content = serde_json::to_string_pretty(&metadata)
1781            .map_err(|e| Error::generic(format!("Failed to serialize metadata: {}", e)))?;
1782
1783        fs::write(&metadata_file, content)
1784            .await
1785            .map_err(|e| Error::generic(format!("Failed to write metadata file: {}", e)))?;
1786
1787        Ok(())
1788    }
1789
1790    /// Export a reality preset to a file
1791    ///
1792    /// Exports a reality preset (JSON or YAML format) to the specified path.
1793    /// The preset can be imported later to restore the reality configuration.
1794    pub async fn export_reality_preset(
1795        &self,
1796        preset: &crate::RealityPreset,
1797        output_path: &Path,
1798    ) -> Result<()> {
1799        self.ensure_workspace_dir().await?;
1800
1801        // Determine format from file extension
1802        let content = if output_path.extension().and_then(|s| s.to_str()) == Some("yaml")
1803            || output_path.extension().and_then(|s| s.to_str()) == Some("yml")
1804        {
1805            serde_yaml::to_string(preset)
1806                .map_err(|e| Error::generic(format!("Failed to serialize preset to YAML: {}", e)))?
1807        } else {
1808            serde_json::to_string_pretty(preset)
1809                .map_err(|e| Error::generic(format!("Failed to serialize preset to JSON: {}", e)))?
1810        };
1811
1812        // Ensure parent directory exists
1813        if let Some(parent) = output_path.parent() {
1814            fs::create_dir_all(parent)
1815                .await
1816                .map_err(|e| Error::generic(format!("Failed to create preset directory: {}", e)))?;
1817        }
1818
1819        fs::write(output_path, content)
1820            .await
1821            .map_err(|e| Error::generic(format!("Failed to write preset file: {}", e)))?;
1822
1823        Ok(())
1824    }
1825
1826    /// Import a reality preset from a file
1827    ///
1828    /// Loads a reality preset from a JSON or YAML file and returns it.
1829    /// The preset can then be applied to a workspace or the global configuration.
1830    pub async fn import_reality_preset(&self, input_path: &Path) -> Result<crate::RealityPreset> {
1831        let content = fs::read_to_string(input_path)
1832            .await
1833            .map_err(|e| Error::generic(format!("Failed to read preset file: {}", e)))?;
1834
1835        // Determine format from file extension
1836        let preset = if input_path
1837            .extension()
1838            .and_then(|s| s.to_str())
1839            .map(|ext| ext == "yaml" || ext == "yml")
1840            .unwrap_or(false)
1841        {
1842            serde_yaml::from_str(&content).map_err(|e| {
1843                Error::generic(format!("Failed to deserialize preset from YAML: {}", e))
1844            })?
1845        } else {
1846            serde_json::from_str(&content).map_err(|e| {
1847                Error::generic(format!("Failed to deserialize preset from JSON: {}", e))
1848            })?
1849        };
1850
1851        Ok(preset)
1852    }
1853
1854    /// Get the presets directory path
1855    pub fn presets_dir(&self) -> PathBuf {
1856        self.base_dir.join("presets")
1857    }
1858
1859    /// List all available reality presets
1860    ///
1861    /// Scans the presets directory and returns a list of all preset files found.
1862    pub async fn list_reality_presets(&self) -> Result<Vec<PathBuf>> {
1863        let presets_dir = self.presets_dir();
1864        if !presets_dir.exists() {
1865            return Ok(vec![]);
1866        }
1867
1868        let mut presets = Vec::new();
1869        let mut entries = fs::read_dir(&presets_dir)
1870            .await
1871            .map_err(|e| Error::generic(format!("Failed to read presets directory: {}", e)))?;
1872
1873        while let Some(entry) = entries
1874            .next_entry()
1875            .await
1876            .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
1877        {
1878            let path = entry.path();
1879            if path.is_file() {
1880                let ext = path.extension().and_then(|s| s.to_str());
1881                if ext == Some("json") || ext == Some("yaml") || ext == Some("yml") {
1882                    presets.push(path);
1883                }
1884            }
1885        }
1886
1887        Ok(presets)
1888    }
1889
1890    /// Sanitize filename for filesystem compatibility
1891    fn sanitize_filename(&self, name: &str) -> String {
1892        name.chars()
1893            .map(|c| match c {
1894                '/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
1895                c if c.is_whitespace() => '_',
1896                c => c,
1897            })
1898            .collect::<String>()
1899            .to_lowercase()
1900    }
1901}
1902
1903/// Result of syncing a single workspace
1904#[derive(Debug)]
1905struct WorkspaceSyncResult {
1906    /// Number of requests exported
1907    requests_count: usize,
1908    /// Number of files created
1909    files_created: usize,
1910}
1911
1912#[cfg(test)]
1913mod tests {
1914    use super::*;
1915    use crate::workspace::{MockRequest, Workspace};
1916    use crate::HttpMethod;
1917    use tempfile::TempDir;
1918
1919    #[tokio::test]
1920    async fn test_workspace_persistence() {
1921        let temp_dir = TempDir::new().unwrap();
1922        let persistence = WorkspacePersistence::new(temp_dir.path());
1923
1924        // Create a test workspace
1925        let mut workspace = Workspace::new("Test Workspace".to_string());
1926        let request =
1927            MockRequest::new(HttpMethod::GET, "/test".to_string(), "Test Request".to_string());
1928        workspace.add_request(request).unwrap();
1929
1930        // Save workspace
1931        persistence.save_workspace(&workspace).await.unwrap();
1932
1933        // Load workspace
1934        let loaded = persistence.load_workspace(&workspace.id).await.unwrap();
1935        assert_eq!(loaded.name, workspace.name);
1936        assert_eq!(loaded.requests.len(), 1);
1937
1938        // List workspaces
1939        let ids = persistence.list_workspace_ids().await.unwrap();
1940        assert_eq!(ids.len(), 1);
1941        assert_eq!(ids[0], workspace.id);
1942    }
1943
1944    #[tokio::test]
1945    async fn test_registry_persistence() {
1946        let temp_dir = TempDir::new().unwrap();
1947        let persistence = WorkspacePersistence::new(temp_dir.path());
1948
1949        let mut registry = WorkspaceRegistry::new();
1950
1951        // Add workspaces
1952        let workspace1 = Workspace::new("Workspace 1".to_string());
1953        let workspace2 = Workspace::new("Workspace 2".to_string());
1954
1955        let id1 = registry.add_workspace(workspace1).unwrap();
1956        let _id2 = registry.add_workspace(workspace2).unwrap();
1957
1958        // Set active workspace
1959        registry.set_active_workspace(Some(id1.clone())).unwrap();
1960
1961        // Save registry
1962        persistence.save_full_registry(&registry).await.unwrap();
1963
1964        // Load registry
1965        let loaded_registry = persistence.load_full_registry().await.unwrap();
1966
1967        assert_eq!(loaded_registry.get_workspaces().len(), 2);
1968        assert_eq!(loaded_registry.get_active_workspace().unwrap().name, "Workspace 1");
1969    }
1970
1971    #[tokio::test]
1972    async fn test_backup_and_restore() {
1973        let temp_dir = TempDir::new().unwrap();
1974        let backup_dir = temp_dir.path().join("backups");
1975        let persistence = WorkspacePersistence::new(temp_dir.path());
1976
1977        // Create and save workspace
1978        let workspace = Workspace::new("Test Workspace".to_string());
1979        persistence.save_workspace(&workspace).await.unwrap();
1980
1981        // Create backup
1982        let backup_path = persistence.backup_workspace(&workspace.id, &backup_dir).await.unwrap();
1983        assert!(backup_path.exists());
1984
1985        // Delete original
1986        persistence.delete_workspace(&workspace.id).await.unwrap();
1987        assert!(persistence.load_workspace(&workspace.id).await.is_err());
1988
1989        // Restore from backup
1990        let restored_id = persistence.restore_workspace(&backup_path).await.unwrap();
1991
1992        // Verify restored workspace
1993        let restored = persistence.load_workspace(&restored_id).await.unwrap();
1994        assert_eq!(restored.name, "Test Workspace");
1995    }
1996}