mockforge_core/
workspace_persistence.rs

1//! Persistence layer for workspace configurations
2//!
3//! This module handles saving and loading workspace configurations to/from disk,
4//! enabling persistent storage of workspace hierarchies and configurations.
5
6use crate::config::AuthConfig as ConfigAuthConfig;
7use crate::encryption::{utils, AutoEncryptionProcessor, WorkspaceKeyManager};
8use crate::workspace::{EntityId, Folder, MockRequest, Workspace, WorkspaceRegistry};
9use crate::{Error, Result};
10use chrono::{DateTime, Utc};
11use once_cell::sync::Lazy;
12use regex::Regex;
13use serde::{Deserialize, Serialize};
14use std::collections::HashMap;
15use std::path::{Path, PathBuf};
16use tokio::fs;
17
18// Pre-compiled regex patterns for sensitive data detection
19static CREDIT_CARD_PATTERN: Lazy<Regex> = Lazy::new(|| {
20    Regex::new(r"\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b")
21        .expect("CREDIT_CARD_PATTERN regex is valid")
22});
23
24static SSN_PATTERN: Lazy<Regex> = Lazy::new(|| {
25    Regex::new(r"\b\d{3}[-\s]?\d{2}[-\s]?\d{4}\b").expect("SSN_PATTERN regex is valid")
26});
27
28/// Workspace persistence manager
29#[derive(Debug)]
30pub struct WorkspacePersistence {
31    /// Base directory for workspace storage
32    base_dir: PathBuf,
33}
34
35/// Serializable workspace registry for persistence
36#[derive(Debug, Clone, Serialize, Deserialize)]
37struct SerializableWorkspaceRegistry {
38    workspaces: Vec<Workspace>,
39    active_workspace: Option<EntityId>,
40}
41
42/// Sync state for tracking incremental syncs
43#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SyncState {
45    /// Last time a sync operation was performed
46    pub last_sync_timestamp: DateTime<Utc>,
47}
48
49/// Sync strategy for workspace mirroring
50#[derive(Debug, Clone, PartialEq)]
51pub enum SyncStrategy {
52    /// Sync all workspaces completely
53    Full,
54    /// Sync only changed workspaces (based on modification time)
55    Incremental,
56    /// Sync only specified workspace IDs
57    Selective(Vec<String>),
58}
59
60/// Directory structure for synced workspaces
61#[derive(Debug, Clone, PartialEq)]
62pub enum DirectoryStructure {
63    /// All workspaces in a flat structure: workspace-id.yaml
64    Flat,
65    /// Nested by workspace: workspaces/{name}/workspace.yaml + requests/
66    Nested,
67    /// Grouped by type: requests/, responses/, metadata/
68    Grouped,
69}
70
71/// Result of a workspace sync operation
72#[derive(Debug, Clone)]
73pub struct SyncResult {
74    /// Number of workspaces synced
75    pub synced_workspaces: usize,
76    /// Number of requests synced
77    pub synced_requests: usize,
78    /// Number of files created/updated
79    pub files_created: usize,
80    /// Target directory used
81    pub target_dir: PathBuf,
82}
83
84/// Result of an encrypted workspace export
85#[derive(Debug, Clone)]
86pub struct EncryptedExportResult {
87    /// Path to the encrypted export file
88    pub output_path: PathBuf,
89    /// Backup key for importing on other devices
90    pub backup_key: String,
91    /// When the export was created
92    pub exported_at: DateTime<Utc>,
93    /// Name of the exported workspace
94    pub workspace_name: String,
95    /// Whether encryption was successfully applied
96    pub encryption_enabled: bool,
97}
98
99/// Result of an encrypted workspace import
100#[derive(Debug, Clone)]
101pub struct EncryptedImportResult {
102    /// ID of the imported workspace
103    pub workspace_id: String,
104    /// Name of the imported workspace
105    pub workspace_name: String,
106    /// When the import was completed
107    pub imported_at: DateTime<Utc>,
108    /// Number of requests imported
109    pub request_count: usize,
110    /// Whether encryption was successfully restored
111    pub encryption_restored: bool,
112}
113
114/// Result of a security check for sensitive data
115#[derive(Debug, Clone)]
116pub struct SecurityCheckResult {
117    /// Workspace ID that was checked
118    pub workspace_id: String,
119    /// Workspace name that was checked
120    pub workspace_name: String,
121    /// Security warnings found
122    pub warnings: Vec<SecurityWarning>,
123    /// Security errors found (critical issues)
124    pub errors: Vec<SecurityWarning>,
125    /// Whether the workspace is considered secure
126    pub is_secure: bool,
127    /// Recommended actions to improve security
128    pub recommended_actions: Vec<String>,
129}
130
131/// Security warning or error
132#[derive(Debug, Clone)]
133pub struct SecurityWarning {
134    /// Type of field that contains sensitive data
135    pub field_type: String,
136    /// Name of the field
137    pub field_name: String,
138    /// Location where the sensitive data was found
139    pub location: String,
140    /// Severity of the issue
141    pub severity: SecuritySeverity,
142    /// Human-readable message
143    pub message: String,
144    /// Suggestion for fixing the issue
145    pub suggestion: String,
146}
147
148/// Severity levels for security issues
149#[derive(Debug, Clone, PartialEq)]
150pub enum SecuritySeverity {
151    /// Low risk - informational
152    Low,
153    /// Medium risk - should be reviewed
154    Medium,
155    /// High risk - requires attention
156    High,
157    /// Critical risk - blocks operations
158    Critical,
159}
160
161/// Git-friendly workspace export format
162#[derive(Debug, Clone, Serialize, Deserialize)]
163pub struct WorkspaceExport {
164    /// Workspace metadata
165    pub metadata: WorkspaceMetadata,
166    /// Workspace configuration
167    pub config: WorkspaceConfig,
168    /// All requests organized by folder structure
169    pub requests: HashMap<String, ExportedRequest>,
170}
171
172/// Metadata for exported workspace
173#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct WorkspaceMetadata {
175    /// Original workspace ID
176    pub id: String,
177    /// Workspace name
178    pub name: String,
179    /// Workspace description
180    pub description: Option<String>,
181    /// Export timestamp
182    pub exported_at: DateTime<Utc>,
183    /// Total number of requests
184    pub request_count: usize,
185    /// Total number of folders
186    pub folder_count: usize,
187}
188
189/// Simplified workspace configuration for export
190#[derive(Debug, Clone, Serialize, Deserialize)]
191pub struct WorkspaceConfig {
192    /// Authentication configuration
193    pub auth: Option<AuthConfig>,
194    /// Base URL for requests
195    pub base_url: Option<String>,
196    /// Environment variables
197    pub variables: HashMap<String, String>,
198    /// Reality level for this workspace (1-5)
199    /// Controls the realism of mock behavior (chaos, latency, MockAI)
200    #[serde(default)]
201    pub reality_level: Option<crate::RealityLevel>,
202    /// AI mode for this workspace
203    /// Controls how AI-generated artifacts are used at runtime
204    #[serde(default)]
205    pub ai_mode: Option<crate::ai_studio::config::AiMode>,
206}
207
208/// Authentication configuration for export
209#[derive(Debug, Clone, Serialize, Deserialize)]
210pub struct AuthConfig {
211    /// Authentication type
212    pub auth_type: String,
213    /// Authentication parameters
214    pub params: HashMap<String, String>,
215}
216
217impl AuthConfig {
218    /// Convert from config AuthConfig to export AuthConfig
219    pub fn from_config_auth(config_auth: &ConfigAuthConfig) -> Option<Self> {
220        if let Some(jwt) = &config_auth.jwt {
221            let mut params = HashMap::new();
222            if let Some(secret) = &jwt.secret {
223                params.insert("secret".to_string(), secret.clone());
224            }
225            if let Some(rsa_public_key) = &jwt.rsa_public_key {
226                params.insert("rsa_public_key".to_string(), rsa_public_key.clone());
227            }
228            if let Some(ecdsa_public_key) = &jwt.ecdsa_public_key {
229                params.insert("ecdsa_public_key".to_string(), ecdsa_public_key.clone());
230            }
231            if let Some(issuer) = &jwt.issuer {
232                params.insert("issuer".to_string(), issuer.clone());
233            }
234            if let Some(audience) = &jwt.audience {
235                params.insert("audience".to_string(), audience.clone());
236            }
237            if !jwt.algorithms.is_empty() {
238                params.insert("algorithms".to_string(), jwt.algorithms.join(","));
239            }
240            Some(AuthConfig {
241                auth_type: "jwt".to_string(),
242                params,
243            })
244        } else if let Some(oauth2) = &config_auth.oauth2 {
245            let mut params = HashMap::new();
246            params.insert("client_id".to_string(), oauth2.client_id.clone());
247            params.insert("client_secret".to_string(), oauth2.client_secret.clone());
248            params.insert("introspection_url".to_string(), oauth2.introspection_url.clone());
249            if let Some(auth_url) = &oauth2.auth_url {
250                params.insert("auth_url".to_string(), auth_url.clone());
251            }
252            if let Some(token_url) = &oauth2.token_url {
253                params.insert("token_url".to_string(), token_url.clone());
254            }
255            if let Some(token_type_hint) = &oauth2.token_type_hint {
256                params.insert("token_type_hint".to_string(), token_type_hint.clone());
257            }
258            Some(AuthConfig {
259                auth_type: "oauth2".to_string(),
260                params,
261            })
262        } else if let Some(basic_auth) = &config_auth.basic_auth {
263            let mut params = HashMap::new();
264            for (user, pass) in &basic_auth.credentials {
265                params.insert(user.clone(), pass.clone());
266            }
267            Some(AuthConfig {
268                auth_type: "basic".to_string(),
269                params,
270            })
271        } else if let Some(api_key) = &config_auth.api_key {
272            let mut params = HashMap::new();
273            params.insert("header_name".to_string(), api_key.header_name.clone());
274            if let Some(query_name) = &api_key.query_name {
275                params.insert("query_name".to_string(), query_name.clone());
276            }
277            if !api_key.keys.is_empty() {
278                params.insert("keys".to_string(), api_key.keys.join(","));
279            }
280            Some(AuthConfig {
281                auth_type: "api_key".to_string(),
282                params,
283            })
284        } else {
285            None
286        }
287    }
288}
289
290/// Exported request format
291#[derive(Debug, Clone, Serialize, Deserialize)]
292pub struct ExportedRequest {
293    /// Request ID
294    pub id: String,
295    /// Request name
296    pub name: String,
297    /// HTTP method
298    pub method: String,
299    /// Request path
300    pub path: String,
301    /// Folder path (for organization)
302    pub folder_path: String,
303    /// Request headers
304    pub headers: HashMap<String, String>,
305    /// Query parameters
306    pub query_params: HashMap<String, String>,
307    /// Request body
308    pub body: Option<String>,
309    /// Response status code
310    pub response_status: Option<u16>,
311    /// Response body
312    pub response_body: Option<String>,
313    /// Response headers
314    pub response_headers: HashMap<String, String>,
315    /// Response delay (ms)
316    pub delay: Option<u64>,
317}
318
319impl WorkspacePersistence {
320    /// Create a new persistence manager
321    pub fn new<P: AsRef<Path>>(base_dir: P) -> Self {
322        Self {
323            base_dir: base_dir.as_ref().to_path_buf(),
324        }
325    }
326
327    /// Get the workspace directory path
328    pub fn workspace_dir(&self) -> &Path {
329        &self.base_dir
330    }
331
332    /// Get the path for a specific workspace file
333    pub fn workspace_file_path(&self, workspace_id: &str) -> PathBuf {
334        self.base_dir.join(format!("{}.yaml", workspace_id))
335    }
336
337    /// Get the registry metadata file path
338    pub fn registry_file_path(&self) -> PathBuf {
339        self.base_dir.join("registry.yaml")
340    }
341
342    /// Get the sync state file path
343    pub fn sync_state_file_path(&self) -> PathBuf {
344        self.base_dir.join("sync_state.yaml")
345    }
346
347    /// Ensure the workspace directory exists
348    pub async fn ensure_workspace_dir(&self) -> Result<()> {
349        if !self.base_dir.exists() {
350            fs::create_dir_all(&self.base_dir).await.map_err(|e| {
351                Error::generic(format!("Failed to create workspace directory: {}", e))
352            })?;
353        }
354        Ok(())
355    }
356
357    /// Save a workspace to disk
358    pub async fn save_workspace(&self, workspace: &Workspace) -> Result<()> {
359        self.ensure_workspace_dir().await?;
360
361        let file_path = self.workspace_file_path(&workspace.id);
362        let content = serde_yaml::to_string(workspace)
363            .map_err(|e| Error::generic(format!("Failed to serialize workspace: {}", e)))?;
364
365        fs::write(&file_path, content)
366            .await
367            .map_err(|e| Error::generic(format!("Failed to write workspace file: {}", e)))?;
368
369        Ok(())
370    }
371
372    /// Load a workspace from disk
373    pub async fn load_workspace(&self, workspace_id: &str) -> Result<Workspace> {
374        let file_path = self.workspace_file_path(workspace_id);
375
376        if !file_path.exists() {
377            return Err(Error::generic(format!("Workspace file not found: {:?}", file_path)));
378        }
379
380        let content = fs::read_to_string(&file_path)
381            .await
382            .map_err(|e| Error::generic(format!("Failed to read workspace file: {}", e)))?;
383
384        let mut workspace: Workspace = serde_yaml::from_str(&content)
385            .map_err(|e| Error::generic(format!("Failed to deserialize workspace: {}", e)))?;
386
387        // Initialize default mock environments if they don't exist (for backward compatibility)
388        workspace.initialize_default_mock_environments();
389
390        Ok(workspace)
391    }
392
393    /// Delete a workspace from disk
394    pub async fn delete_workspace(&self, workspace_id: &str) -> Result<()> {
395        let file_path = self.workspace_file_path(workspace_id);
396
397        if file_path.exists() {
398            fs::remove_file(&file_path)
399                .await
400                .map_err(|e| Error::generic(format!("Failed to delete workspace file: {}", e)))?;
401        }
402
403        Ok(())
404    }
405
406    /// Save the workspace registry metadata
407    pub async fn save_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
408        self.ensure_workspace_dir().await?;
409
410        let serializable = SerializableWorkspaceRegistry {
411            workspaces: registry.get_workspaces().into_iter().cloned().collect(),
412            active_workspace: registry.get_active_workspace_id().map(|s| s.to_string()),
413        };
414
415        let file_path = self.registry_file_path();
416        let content = serde_yaml::to_string(&serializable)
417            .map_err(|e| Error::generic(format!("Failed to serialize registry: {}", e)))?;
418
419        fs::write(&file_path, content)
420            .await
421            .map_err(|e| Error::generic(format!("Failed to write registry file: {}", e)))?;
422
423        Ok(())
424    }
425
426    /// Load the workspace registry metadata
427    pub async fn load_registry(&self) -> Result<WorkspaceRegistry> {
428        let file_path = self.registry_file_path();
429
430        if !file_path.exists() {
431            // Return empty registry if no registry file exists
432            return Ok(WorkspaceRegistry::new());
433        }
434
435        let content = fs::read_to_string(&file_path)
436            .await
437            .map_err(|e| Error::generic(format!("Failed to read registry file: {}", e)))?;
438
439        let serializable: SerializableWorkspaceRegistry = serde_yaml::from_str(&content)
440            .map_err(|e| Error::generic(format!("Failed to deserialize registry: {}", e)))?;
441
442        let mut registry = WorkspaceRegistry::new();
443
444        // Load individual workspaces
445        for workspace_meta in &serializable.workspaces {
446            match self.load_workspace(&workspace_meta.id).await {
447                Ok(mut workspace) => {
448                    // Ensure mock environments are initialized (for backward compatibility)
449                    workspace.initialize_default_mock_environments();
450                    registry.add_workspace(workspace)?;
451                }
452                Err(e) => {
453                    tracing::warn!("Failed to load workspace {}: {}", workspace_meta.id, e);
454                }
455            }
456        }
457
458        // Set active workspace
459        if let Some(active_id) = &serializable.active_workspace {
460            if let Err(e) = registry.set_active_workspace(Some(active_id.clone())) {
461                tracing::warn!("Failed to set active workspace {}: {}", active_id, e);
462            }
463        }
464
465        Ok(registry)
466    }
467
468    /// Save the sync state
469    pub async fn save_sync_state(&self, sync_state: &SyncState) -> Result<()> {
470        self.ensure_workspace_dir().await?;
471
472        let file_path = self.sync_state_file_path();
473        let content = serde_yaml::to_string(sync_state)
474            .map_err(|e| Error::generic(format!("Failed to serialize sync state: {}", e)))?;
475
476        fs::write(&file_path, content)
477            .await
478            .map_err(|e| Error::generic(format!("Failed to write sync state file: {}", e)))?;
479
480        Ok(())
481    }
482
483    /// Load the sync state
484    pub async fn load_sync_state(&self) -> Result<SyncState> {
485        let file_path = self.sync_state_file_path();
486
487        if !file_path.exists() {
488            // Return default sync state if no sync state file exists
489            return Ok(SyncState {
490                last_sync_timestamp: Utc::now(),
491            });
492        }
493
494        let content = fs::read_to_string(&file_path)
495            .await
496            .map_err(|e| Error::generic(format!("Failed to read sync state file: {}", e)))?;
497
498        let sync_state: SyncState = serde_yaml::from_str(&content)
499            .map_err(|e| Error::generic(format!("Failed to deserialize sync state: {}", e)))?;
500
501        Ok(sync_state)
502    }
503
504    /// List all workspace IDs from disk
505    pub async fn list_workspace_ids(&self) -> Result<Vec<EntityId>> {
506        if !self.base_dir.exists() {
507            return Ok(Vec::new());
508        }
509
510        let mut workspace_ids = Vec::new();
511
512        let mut entries = fs::read_dir(&self.base_dir)
513            .await
514            .map_err(|e| Error::generic(format!("Failed to read workspace directory: {}", e)))?;
515
516        while let Some(entry) = entries
517            .next_entry()
518            .await
519            .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
520        {
521            let path = entry.path();
522            if path.is_file() {
523                if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
524                    if file_name != "registry.yaml" && file_name.ends_with(".yaml") {
525                        if let Some(id) = file_name.strip_suffix(".yaml") {
526                            workspace_ids.push(id.to_string());
527                        }
528                    }
529                }
530            }
531        }
532
533        Ok(workspace_ids)
534    }
535
536    /// Save the entire registry and all workspaces
537    pub async fn save_full_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
538        // Save registry metadata
539        self.save_registry(registry).await?;
540
541        // Save all workspaces
542        for workspace in registry.get_workspaces() {
543            self.save_workspace(workspace).await?;
544        }
545
546        Ok(())
547    }
548
549    /// Load the entire registry and all workspaces
550    pub async fn load_full_registry(&self) -> Result<WorkspaceRegistry> {
551        self.load_registry().await
552    }
553
554    /// Backup workspace data
555    pub async fn backup_workspace(&self, workspace_id: &str, backup_dir: &Path) -> Result<PathBuf> {
556        let workspace_file = self.workspace_file_path(workspace_id);
557
558        if !workspace_file.exists() {
559            return Err(Error::generic(format!("Workspace {} does not exist", workspace_id)));
560        }
561
562        // Ensure backup directory exists
563        if !backup_dir.exists() {
564            fs::create_dir_all(backup_dir)
565                .await
566                .map_err(|e| Error::generic(format!("Failed to create backup directory: {}", e)))?;
567        }
568
569        // Create backup filename with timestamp
570        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
571        let backup_filename = format!("{}_{}.yaml", workspace_id, timestamp);
572        let backup_path = backup_dir.join(backup_filename);
573
574        // Copy workspace file
575        fs::copy(&workspace_file, &backup_path)
576            .await
577            .map_err(|e| Error::generic(format!("Failed to create backup: {}", e)))?;
578
579        Ok(backup_path)
580    }
581
582    /// Restore workspace from backup
583    pub async fn restore_workspace(&self, backup_path: &Path) -> Result<EntityId> {
584        if !backup_path.exists() {
585            return Err(Error::generic(format!("Backup file does not exist: {:?}", backup_path)));
586        }
587
588        // Load workspace from backup
589        let content = fs::read_to_string(backup_path)
590            .await
591            .map_err(|e| Error::generic(format!("Failed to read backup file: {}", e)))?;
592
593        let workspace: Workspace = serde_yaml::from_str(&content)
594            .map_err(|e| Error::generic(format!("Failed to deserialize backup: {}", e)))?;
595
596        // Save restored workspace
597        self.save_workspace(&workspace).await?;
598
599        Ok(workspace.id)
600    }
601
602    /// Clean up old backups
603    pub async fn cleanup_old_backups(&self, backup_dir: &Path, keep_count: usize) -> Result<usize> {
604        if !backup_dir.exists() {
605            return Ok(0);
606        }
607
608        let mut backup_files = Vec::new();
609
610        let mut entries = fs::read_dir(backup_dir)
611            .await
612            .map_err(|e| Error::generic(format!("Failed to read backup directory: {}", e)))?;
613
614        while let Some(entry) = entries
615            .next_entry()
616            .await
617            .map_err(|e| Error::generic(format!("Failed to read backup entry: {}", e)))?
618        {
619            let path = entry.path();
620            if path.is_file() {
621                if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
622                    if file_name.ends_with(".yaml") {
623                        if let Ok(metadata) = entry.metadata().await {
624                            if let Ok(modified) = metadata.modified() {
625                                backup_files.push((path, modified));
626                            }
627                        }
628                    }
629                }
630            }
631        }
632
633        // Sort by modification time (newest first)
634        backup_files.sort_by(|a, b| b.1.cmp(&a.1));
635
636        // Remove old backups
637        let mut removed_count = 0;
638        for (path, _) in backup_files.iter().skip(keep_count) {
639            if fs::remove_file(path).await.is_ok() {
640                removed_count += 1;
641            }
642        }
643
644        Ok(removed_count)
645    }
646
647    /// Advanced sync with additional configuration options
648    #[allow(clippy::too_many_arguments)]
649    pub async fn sync_to_directory_advanced(
650        &self,
651        target_dir: &str,
652        strategy: &str,
653        workspace_ids: Option<&str>,
654        structure: &str,
655        include_meta: bool,
656        force: bool,
657        filename_pattern: &str,
658        exclude_pattern: Option<&str>,
659        dry_run: bool,
660    ) -> Result<SyncResult> {
661        let target_path = PathBuf::from(target_dir);
662
663        // Ensure target directory exists (unless dry run)
664        if !dry_run && !target_path.exists() {
665            fs::create_dir_all(&target_path)
666                .await
667                .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
668        }
669
670        // Parse strategy
671        let sync_strategy = match strategy {
672            "full" => SyncStrategy::Full,
673            "incremental" => SyncStrategy::Incremental,
674            "selective" => {
675                if let Some(ids) = workspace_ids {
676                    let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
677                    SyncStrategy::Selective(workspace_list)
678                } else {
679                    return Err(Error::generic("Selective strategy requires workspace IDs"));
680                }
681            }
682            _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
683        };
684
685        // Parse directory structure
686        let dir_structure = match structure {
687            "flat" => DirectoryStructure::Flat,
688            "nested" => DirectoryStructure::Nested,
689            "grouped" => DirectoryStructure::Grouped,
690            _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
691        };
692
693        // Get workspaces to sync based on strategy
694        let mut workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
695
696        // Apply exclusion filter if provided
697        if let Some(exclude) = exclude_pattern {
698            if let Ok(regex) = regex::Regex::new(exclude) {
699                workspaces_to_sync.retain(|id| !regex.is_match(id));
700            }
701        }
702
703        let mut result = SyncResult {
704            synced_workspaces: 0,
705            synced_requests: 0,
706            files_created: 0,
707            target_dir: target_path.clone(),
708        };
709
710        // Sync each workspace
711        for workspace_id in workspaces_to_sync {
712            if let Ok(workspace) = self.load_workspace(&workspace_id).await {
713                let workspace_result = self
714                    .sync_workspace_to_directory_advanced(
715                        &workspace,
716                        &target_path,
717                        &dir_structure,
718                        include_meta,
719                        force,
720                        filename_pattern,
721                        dry_run,
722                    )
723                    .await?;
724
725                result.synced_workspaces += 1;
726                result.synced_requests += workspace_result.requests_count;
727                result.files_created += workspace_result.files_created;
728            }
729        }
730
731        // Update sync state for incremental syncs
732        if let SyncStrategy::Incremental = sync_strategy {
733            let new_sync_state = SyncState {
734                last_sync_timestamp: Utc::now(),
735            };
736            if let Err(e) = self.save_sync_state(&new_sync_state).await {
737                tracing::warn!("Failed to save sync state: {}", e);
738            }
739        }
740
741        Ok(result)
742    }
743
744    /// Advanced sync for a single workspace with custom filename patterns
745    #[allow(clippy::too_many_arguments)]
746    async fn sync_workspace_to_directory_advanced(
747        &self,
748        workspace: &Workspace,
749        target_dir: &Path,
750        structure: &DirectoryStructure,
751        include_meta: bool,
752        force: bool,
753        filename_pattern: &str,
754        dry_run: bool,
755    ) -> Result<WorkspaceSyncResult> {
756        let mut result = WorkspaceSyncResult {
757            requests_count: 0,
758            files_created: 0,
759        };
760
761        match structure {
762            DirectoryStructure::Flat => {
763                let export = self.create_workspace_export(workspace).await?;
764                let filename = self.generate_filename(filename_pattern, workspace);
765                let file_path = target_dir.join(format!("{}.yaml", filename));
766
767                if force || !file_path.exists() {
768                    if !dry_run {
769                        let content = serde_yaml::to_string(&export).map_err(|e| {
770                            Error::generic(format!("Failed to serialize workspace: {}", e))
771                        })?;
772
773                        fs::write(&file_path, content).await.map_err(|e| {
774                            Error::generic(format!("Failed to write workspace file: {}", e))
775                        })?;
776                    }
777                    result.files_created += 1;
778                }
779            }
780
781            DirectoryStructure::Nested => {
782                let workspace_dir =
783                    target_dir.join(self.generate_filename(filename_pattern, workspace));
784                if !dry_run && !workspace_dir.exists() {
785                    fs::create_dir_all(&workspace_dir).await.map_err(|e| {
786                        Error::generic(format!("Failed to create workspace directory: {}", e))
787                    })?;
788                }
789
790                // Export main workspace file
791                let export = self.create_workspace_export(workspace).await?;
792                let workspace_file = workspace_dir.join("workspace.yaml");
793
794                if force || !workspace_file.exists() {
795                    if !dry_run {
796                        let content = serde_yaml::to_string(&export).map_err(|e| {
797                            Error::generic(format!("Failed to serialize workspace: {}", e))
798                        })?;
799
800                        fs::write(&workspace_file, content).await.map_err(|e| {
801                            Error::generic(format!("Failed to write workspace file: {}", e))
802                        })?;
803                    }
804                    result.files_created += 1;
805                }
806
807                // Export individual requests
808                let requests_dir = workspace_dir.join("requests");
809                if !dry_run && !requests_dir.exists() {
810                    fs::create_dir_all(&requests_dir).await.map_err(|e| {
811                        Error::generic(format!("Failed to create requests directory: {}", e))
812                    })?;
813                }
814
815                result.requests_count += self
816                    .export_workspace_requests_advanced(workspace, &requests_dir, force, dry_run)
817                    .await?;
818            }
819
820            DirectoryStructure::Grouped => {
821                // Create grouped directories
822                let requests_dir = target_dir.join("requests");
823                let workspaces_dir = target_dir.join("workspaces");
824
825                if !dry_run {
826                    for dir in [&requests_dir, &workspaces_dir] {
827                        if !dir.exists() {
828                            fs::create_dir_all(dir).await.map_err(|e| {
829                                Error::generic(format!("Failed to create directory: {}", e))
830                            })?;
831                        }
832                    }
833                }
834
835                // Export workspace metadata
836                let export = self.create_workspace_export(workspace).await?;
837                let filename = self.generate_filename(filename_pattern, workspace);
838                let workspace_file = workspaces_dir.join(format!("{}.yaml", filename));
839
840                if force || !workspace_file.exists() {
841                    if !dry_run {
842                        let content = serde_yaml::to_string(&export).map_err(|e| {
843                            Error::generic(format!("Failed to serialize workspace: {}", e))
844                        })?;
845
846                        fs::write(&workspace_file, content).await.map_err(|e| {
847                            Error::generic(format!("Failed to write workspace file: {}", e))
848                        })?;
849                    }
850                    result.files_created += 1;
851                }
852
853                // Export requests to requests directory
854                result.requests_count += self
855                    .export_workspace_requests_grouped_advanced(
856                        workspace,
857                        &requests_dir,
858                        force,
859                        dry_run,
860                    )
861                    .await?;
862            }
863        }
864
865        // Create metadata file if requested
866        if include_meta && !dry_run {
867            self.create_metadata_file(workspace, target_dir, structure).await?;
868            result.files_created += 1;
869        }
870
871        Ok(result)
872    }
873
874    /// Generate filename from pattern
875    fn generate_filename(&self, pattern: &str, workspace: &Workspace) -> String {
876        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
877
878        pattern
879            .replace("{name}", &self.sanitize_filename(&workspace.name))
880            .replace("{id}", &workspace.id)
881            .replace("{timestamp}", &timestamp.to_string())
882    }
883
884    /// Advanced request export with dry run support
885    async fn export_workspace_requests_advanced(
886        &self,
887        workspace: &Workspace,
888        requests_dir: &Path,
889        force: bool,
890        dry_run: bool,
891    ) -> Result<usize> {
892        let mut count = 0;
893
894        for request in &workspace.requests {
895            let file_path =
896                requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
897            if force || !file_path.exists() {
898                if !dry_run {
899                    let exported = self.convert_request_to_exported(request, "");
900                    let content = serde_yaml::to_string(&exported).map_err(|e| {
901                        Error::generic(format!("Failed to serialize request: {}", e))
902                    })?;
903
904                    fs::write(&file_path, content).await.map_err(|e| {
905                        Error::generic(format!("Failed to write request file: {}", e))
906                    })?;
907                }
908                count += 1;
909            }
910        }
911
912        // Export folder requests
913        for folder in &workspace.folders {
914            count += self
915                .export_folder_requests_advanced(folder, requests_dir, force, &folder.name, dry_run)
916                .await?;
917        }
918
919        Ok(count)
920    }
921
922    /// Advanced folder request export
923    async fn export_folder_requests_advanced(
924        &self,
925        folder: &Folder,
926        requests_dir: &Path,
927        force: bool,
928        folder_path: &str,
929        dry_run: bool,
930    ) -> Result<usize> {
931        use std::collections::VecDeque;
932
933        let mut count = 0;
934        let mut queue = VecDeque::new();
935
936        // Start with the root folder
937        queue.push_back((folder, folder_path.to_string()));
938
939        while let Some((current_folder, current_path)) = queue.pop_front() {
940            // Export requests in current folder
941            for request in &current_folder.requests {
942                let file_path =
943                    requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
944                if force || !file_path.exists() {
945                    if !dry_run {
946                        let exported = self.convert_request_to_exported(request, &current_path);
947                        let content = serde_yaml::to_string(&exported).map_err(|e| {
948                            Error::generic(format!("Failed to serialize request: {}", e))
949                        })?;
950
951                        fs::write(&file_path, content).await.map_err(|e| {
952                            Error::generic(format!("Failed to write request file: {}", e))
953                        })?;
954                    }
955                    count += 1;
956                }
957            }
958
959            // Add subfolders to queue with updated paths
960            for subfolder in &current_folder.folders {
961                let subfolder_path = if current_path.is_empty() {
962                    subfolder.name.clone()
963                } else {
964                    format!("{}/{}", current_path, subfolder.name)
965                };
966                queue.push_back((subfolder, subfolder_path));
967            }
968        }
969
970        Ok(count)
971    }
972
973    /// Advanced grouped request export
974    async fn export_workspace_requests_grouped_advanced(
975        &self,
976        workspace: &Workspace,
977        requests_dir: &Path,
978        force: bool,
979        dry_run: bool,
980    ) -> Result<usize> {
981        let mut count = 0;
982        let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
983
984        if !dry_run && !workspace_requests_dir.exists() {
985            fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
986                Error::generic(format!("Failed to create workspace requests directory: {}", e))
987            })?;
988        }
989
990        count += self
991            .export_workspace_requests_advanced(workspace, &workspace_requests_dir, force, dry_run)
992            .await?;
993        Ok(count)
994    }
995
996    /// Sync workspaces to an external directory for Git/Dropbox integration
997    pub async fn sync_to_directory(
998        &self,
999        target_dir: &str,
1000        strategy: &str,
1001        workspace_ids: Option<&str>,
1002        structure: &str,
1003        include_meta: bool,
1004        force: bool,
1005    ) -> Result<SyncResult> {
1006        let target_path = PathBuf::from(target_dir);
1007
1008        // Ensure target directory exists
1009        if !target_path.exists() {
1010            fs::create_dir_all(&target_path)
1011                .await
1012                .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
1013        }
1014
1015        // Parse strategy
1016        let sync_strategy = match strategy {
1017            "full" => SyncStrategy::Full,
1018            "incremental" => SyncStrategy::Incremental,
1019            "selective" => {
1020                if let Some(ids) = workspace_ids {
1021                    let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
1022                    SyncStrategy::Selective(workspace_list)
1023                } else {
1024                    return Err(Error::generic("Selective strategy requires workspace IDs"));
1025                }
1026            }
1027            _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
1028        };
1029
1030        // Parse directory structure
1031        let dir_structure = match structure {
1032            "flat" => DirectoryStructure::Flat,
1033            "nested" => DirectoryStructure::Nested,
1034            "grouped" => DirectoryStructure::Grouped,
1035            _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
1036        };
1037
1038        // Get workspaces to sync based on strategy
1039        let workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
1040
1041        let mut result = SyncResult {
1042            synced_workspaces: 0,
1043            synced_requests: 0,
1044            files_created: 0,
1045            target_dir: target_path.clone(),
1046        };
1047
1048        // Sync each workspace
1049        for workspace_id in workspaces_to_sync {
1050            if let Ok(workspace) = self.load_workspace(&workspace_id).await {
1051                let workspace_result = self
1052                    .sync_workspace_to_directory(
1053                        &workspace,
1054                        &target_path,
1055                        &dir_structure,
1056                        include_meta,
1057                        force,
1058                    )
1059                    .await?;
1060
1061                result.synced_workspaces += 1;
1062                result.synced_requests += workspace_result.requests_count;
1063                result.files_created += workspace_result.files_created;
1064            }
1065        }
1066
1067        // Update sync state for incremental syncs
1068        if let SyncStrategy::Incremental = sync_strategy {
1069            let new_sync_state = SyncState {
1070                last_sync_timestamp: Utc::now(),
1071            };
1072            if let Err(e) = self.save_sync_state(&new_sync_state).await {
1073                tracing::warn!("Failed to save sync state: {}", e);
1074            }
1075        }
1076
1077        Ok(result)
1078    }
1079
1080    /// Get list of workspace IDs to sync based on strategy
1081    async fn get_workspaces_for_sync(&self, strategy: &SyncStrategy) -> Result<Vec<String>> {
1082        match strategy {
1083            SyncStrategy::Full => self.list_workspace_ids().await,
1084            SyncStrategy::Incremental => {
1085                // Load sync state to get last sync timestamp
1086                let sync_state = self.load_sync_state().await?;
1087                let last_sync = sync_state.last_sync_timestamp;
1088
1089                // Get all workspace IDs
1090                let all_workspace_ids = self.list_workspace_ids().await?;
1091
1092                // Filter workspaces that have been modified since last sync
1093                let mut modified_workspaces = Vec::new();
1094                for workspace_id in all_workspace_ids {
1095                    let file_path = self.workspace_file_path(&workspace_id);
1096                    if let Ok(metadata) = fs::metadata(&file_path).await {
1097                        if let Ok(modified_time) = metadata.modified() {
1098                            let modified_datetime = DateTime::<Utc>::from(modified_time);
1099                            if modified_datetime > last_sync {
1100                                modified_workspaces.push(workspace_id);
1101                            }
1102                        }
1103                    }
1104                }
1105
1106                Ok(modified_workspaces)
1107            }
1108            SyncStrategy::Selective(ids) => Ok(ids.clone()),
1109        }
1110    }
1111
1112    /// Sync a single workspace to the target directory
1113    async fn sync_workspace_to_directory(
1114        &self,
1115        workspace: &Workspace,
1116        target_dir: &Path,
1117        structure: &DirectoryStructure,
1118        include_meta: bool,
1119        force: bool,
1120    ) -> Result<WorkspaceSyncResult> {
1121        let mut result = WorkspaceSyncResult {
1122            requests_count: 0,
1123            files_created: 0,
1124        };
1125
1126        match structure {
1127            DirectoryStructure::Flat => {
1128                let export = self.create_workspace_export(workspace).await?;
1129                let file_path =
1130                    target_dir.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1131
1132                if force || !file_path.exists() {
1133                    let content = serde_yaml::to_string(&export).map_err(|e| {
1134                        Error::generic(format!("Failed to serialize workspace: {}", e))
1135                    })?;
1136
1137                    fs::write(&file_path, content).await.map_err(|e| {
1138                        Error::generic(format!("Failed to write workspace file: {}", e))
1139                    })?;
1140
1141                    result.files_created += 1;
1142                }
1143            }
1144
1145            DirectoryStructure::Nested => {
1146                let workspace_dir = target_dir.join(self.sanitize_filename(&workspace.name));
1147                if !workspace_dir.exists() {
1148                    fs::create_dir_all(&workspace_dir).await.map_err(|e| {
1149                        Error::generic(format!("Failed to create workspace directory: {}", e))
1150                    })?;
1151                }
1152
1153                // Export main workspace file
1154                let export = self.create_workspace_export(workspace).await?;
1155                let workspace_file = workspace_dir.join("workspace.yaml");
1156
1157                if force || !workspace_file.exists() {
1158                    let content = serde_yaml::to_string(&export).map_err(|e| {
1159                        Error::generic(format!("Failed to serialize workspace: {}", e))
1160                    })?;
1161
1162                    fs::write(&workspace_file, content).await.map_err(|e| {
1163                        Error::generic(format!("Failed to write workspace file: {}", e))
1164                    })?;
1165
1166                    result.files_created += 1;
1167                }
1168
1169                // Export individual requests
1170                let requests_dir = workspace_dir.join("requests");
1171                if !requests_dir.exists() {
1172                    fs::create_dir_all(&requests_dir).await.map_err(|e| {
1173                        Error::generic(format!("Failed to create requests directory: {}", e))
1174                    })?;
1175                }
1176
1177                result.requests_count +=
1178                    self.export_workspace_requests(workspace, &requests_dir, force).await?;
1179            }
1180
1181            DirectoryStructure::Grouped => {
1182                // Create grouped directories
1183                let requests_dir = target_dir.join("requests");
1184                let workspaces_dir = target_dir.join("workspaces");
1185
1186                for dir in [&requests_dir, &workspaces_dir] {
1187                    if !dir.exists() {
1188                        fs::create_dir_all(dir).await.map_err(|e| {
1189                            Error::generic(format!("Failed to create directory: {}", e))
1190                        })?;
1191                    }
1192                }
1193
1194                // Export workspace metadata
1195                let export = self.create_workspace_export(workspace).await?;
1196                let workspace_file = workspaces_dir
1197                    .join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1198
1199                if force || !workspace_file.exists() {
1200                    let content = serde_yaml::to_string(&export).map_err(|e| {
1201                        Error::generic(format!("Failed to serialize workspace: {}", e))
1202                    })?;
1203
1204                    fs::write(&workspace_file, content).await.map_err(|e| {
1205                        Error::generic(format!("Failed to write workspace file: {}", e))
1206                    })?;
1207
1208                    result.files_created += 1;
1209                }
1210
1211                // Export requests to requests directory
1212                result.requests_count +=
1213                    self.export_workspace_requests_grouped(workspace, &requests_dir, force).await?;
1214            }
1215        }
1216
1217        // Create metadata file if requested
1218        if include_meta {
1219            self.create_metadata_file(workspace, target_dir, structure).await?;
1220            result.files_created += 1;
1221        }
1222
1223        Ok(result)
1224    }
1225
1226    /// Create a Git-friendly workspace export
1227    async fn create_workspace_export(&self, workspace: &Workspace) -> Result<WorkspaceExport> {
1228        let mut requests = HashMap::new();
1229
1230        // Collect all requests from workspace
1231        self.collect_requests_from_workspace(workspace, &mut requests, "".to_string());
1232
1233        let metadata = WorkspaceMetadata {
1234            id: workspace.id.clone(),
1235            name: workspace.name.clone(),
1236            description: workspace.description.clone(),
1237            exported_at: Utc::now(),
1238            request_count: requests.len(),
1239            folder_count: workspace.folders.len(),
1240        };
1241
1242        let config = WorkspaceConfig {
1243            auth: workspace.config.auth.as_ref().and_then(AuthConfig::from_config_auth),
1244            base_url: workspace.config.base_url.clone(),
1245            variables: workspace.config.global_environment.variables.clone(),
1246            reality_level: workspace.config.reality_level,
1247            ai_mode: None, // Default to None for exported workspaces
1248        };
1249
1250        Ok(WorkspaceExport {
1251            metadata,
1252            config,
1253            requests,
1254        })
1255    }
1256
1257    /// Collect all requests from workspace into a hashmap
1258    fn collect_requests_from_workspace(
1259        &self,
1260        workspace: &Workspace,
1261        requests: &mut HashMap<String, ExportedRequest>,
1262        folder_path: String,
1263    ) {
1264        // Add root-level requests
1265        for request in &workspace.requests {
1266            let exported = self.convert_request_to_exported(request, &folder_path);
1267            requests.insert(request.id.clone(), exported);
1268        }
1269
1270        // Add folder requests recursively
1271        for folder in &workspace.folders {
1272            let current_path = if folder_path.is_empty() {
1273                folder.name.clone()
1274            } else {
1275                format!("{}/{}", folder_path, folder.name)
1276            };
1277
1278            for request in &folder.requests {
1279                let exported = self.convert_request_to_exported(request, &current_path);
1280                requests.insert(request.id.clone(), exported);
1281            }
1282
1283            // Recursively process subfolders
1284            self.collect_requests_from_folders(folder, requests, current_path);
1285        }
1286    }
1287
1288    /// Recursively collect requests from folders
1289    fn collect_requests_from_folders(
1290        &self,
1291        folder: &Folder,
1292        requests: &mut HashMap<String, ExportedRequest>,
1293        folder_path: String,
1294    ) {
1295        for subfolder in &folder.folders {
1296            let current_path = format!("{}/{}", folder_path, subfolder.name);
1297
1298            for request in &subfolder.requests {
1299                let exported = self.convert_request_to_exported(request, &current_path);
1300                requests.insert(request.id.clone(), exported);
1301            }
1302
1303            self.collect_requests_from_folders(subfolder, requests, current_path);
1304        }
1305    }
1306
1307    /// Convert a MockRequest to ExportedRequest
1308    fn convert_request_to_exported(
1309        &self,
1310        request: &MockRequest,
1311        folder_path: &str,
1312    ) -> ExportedRequest {
1313        ExportedRequest {
1314            id: request.id.clone(),
1315            name: request.name.clone(),
1316            method: format!("{:?}", request.method),
1317            path: request.path.clone(),
1318            folder_path: folder_path.to_string(),
1319            headers: request.headers.clone(),
1320            query_params: request.query_params.clone(),
1321            body: request.body.clone(),
1322            response_status: Some(request.response.status_code),
1323            response_body: request.response.body.clone(),
1324            response_headers: request.response.headers.clone(),
1325            delay: request.response.delay_ms,
1326        }
1327    }
1328
1329    /// Export workspace with encryption for secure sharing
1330    pub async fn export_workspace_encrypted(
1331        &self,
1332        workspace: &Workspace,
1333        output_path: &Path,
1334    ) -> Result<EncryptedExportResult> {
1335        // Check if encryption is enabled for this workspace
1336        if !workspace.config.auto_encryption.enabled {
1337            return Err(Error::generic("Encryption is not enabled for this workspace. Enable encryption in workspace settings first."));
1338        }
1339
1340        // Get auto-encryption config
1341        let encryption_config = workspace.config.auto_encryption.clone();
1342        let processor = AutoEncryptionProcessor::new(&workspace.id, encryption_config);
1343
1344        // Create filtered workspace copy for export
1345        let mut filtered_workspace = workspace.to_filtered_for_sync();
1346
1347        // Apply automatic encryption to the filtered workspace
1348        self.encrypt_workspace_data(&mut filtered_workspace, &processor)?;
1349
1350        // Create standard export
1351        let export = self.create_workspace_export(&filtered_workspace).await?;
1352
1353        // Encrypt the entire export
1354        let export_json = serde_json::to_string_pretty(&export)
1355            .map_err(|e| Error::generic(format!("Failed to serialize export: {}", e)))?;
1356
1357        let encrypted_data = utils::encrypt_for_workspace(&workspace.id, &export_json)?;
1358
1359        // Generate backup key for sharing
1360        let key_manager = WorkspaceKeyManager::new();
1361        let backup_key = key_manager.generate_workspace_key_backup(&workspace.id)?;
1362
1363        // Write encrypted data to file
1364        fs::write(output_path, &encrypted_data)
1365            .await
1366            .map_err(|e| Error::generic(format!("Failed to write encrypted export: {}", e)))?;
1367
1368        Ok(EncryptedExportResult {
1369            output_path: output_path.to_path_buf(),
1370            backup_key,
1371            exported_at: Utc::now(),
1372            workspace_name: workspace.name.clone(),
1373            encryption_enabled: true,
1374        })
1375    }
1376
1377    /// Import encrypted workspace
1378    pub async fn import_workspace_encrypted(
1379        &self,
1380        encrypted_file: &Path,
1381        _workspace_name: Option<&str>,
1382        _registry: &mut WorkspaceRegistry,
1383    ) -> Result<EncryptedImportResult> {
1384        // Read encrypted data
1385        let _encrypted_data = fs::read_to_string(encrypted_file)
1386            .await
1387            .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1388
1389        // For import, we need the workspace ID and backup key
1390        // This would typically be provided by the user or extracted from metadata
1391        Err(Error::generic("Encrypted import requires workspace ID and backup key. Use import_workspace_encrypted_with_key instead."))
1392    }
1393
1394    /// Import encrypted workspace with specific workspace ID and backup key
1395    pub async fn import_workspace_encrypted_with_key(
1396        &self,
1397        encrypted_file: &Path,
1398        workspace_id: &str,
1399        backup_key: &str,
1400        workspace_name: Option<&str>,
1401        registry: &mut WorkspaceRegistry,
1402    ) -> Result<EncryptedImportResult> {
1403        // Ensure workspace key exists or restore from backup
1404        let key_manager = WorkspaceKeyManager::new();
1405        if !key_manager.has_workspace_key(workspace_id) {
1406            key_manager.restore_workspace_key_from_backup(workspace_id, backup_key)?;
1407        }
1408
1409        // Read and decrypt the data
1410        let encrypted_data = fs::read_to_string(encrypted_file)
1411            .await
1412            .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1413
1414        let decrypted_json = utils::decrypt_for_workspace(workspace_id, &encrypted_data)?;
1415
1416        // Parse the export data
1417        let export: WorkspaceExport = serde_json::from_str(&decrypted_json)
1418            .map_err(|e| Error::generic(format!("Failed to parse decrypted export: {}", e)))?;
1419
1420        // Convert export to workspace
1421        let workspace = self.convert_export_to_workspace(&export, workspace_name)?;
1422
1423        // Add to registry
1424        let imported_id = registry.add_workspace(workspace)?;
1425
1426        Ok(EncryptedImportResult {
1427            workspace_id: imported_id,
1428            workspace_name: export.metadata.name.clone(),
1429            imported_at: Utc::now(),
1430            request_count: export.requests.len(),
1431            encryption_restored: true,
1432        })
1433    }
1434
1435    /// Apply encryption to workspace data before export
1436    fn encrypt_workspace_data(
1437        &self,
1438        workspace: &mut Workspace,
1439        processor: &AutoEncryptionProcessor,
1440    ) -> Result<()> {
1441        // Encrypt environment variables
1442        for env in &mut workspace.config.environments {
1443            processor.process_env_vars(&mut env.variables)?;
1444        }
1445        processor.process_env_vars(&mut workspace.config.global_environment.variables)?;
1446
1447        // Note: Headers and request bodies would be encrypted here when implemented
1448        // For now, we rely on the filtering done by to_filtered_for_sync()
1449
1450        Ok(())
1451    }
1452
1453    /// Convert WorkspaceExport back to Workspace
1454    fn convert_export_to_workspace(
1455        &self,
1456        export: &WorkspaceExport,
1457        name_override: Option<&str>,
1458    ) -> Result<Workspace> {
1459        let mut workspace =
1460            Workspace::new(name_override.unwrap_or(&export.metadata.name).to_string());
1461
1462        // Set description if provided
1463        if let Some(desc) = &export.metadata.description {
1464            workspace.description = Some(desc.clone());
1465        }
1466
1467        // Restore requests from export
1468        for exported_request in export.requests.values() {
1469            // Convert exported request back to MockRequest
1470            let method = self.parse_http_method(&exported_request.method)?;
1471            let mut request = MockRequest::new(
1472                method,
1473                exported_request.path.clone(),
1474                exported_request.name.clone(),
1475            );
1476
1477            // Set additional properties
1478            if let Some(status) = exported_request.response_status {
1479                request.response.status_code = status;
1480            }
1481
1482            // Set other response properties if available
1483            if let Some(body) = &exported_request.response_body {
1484                request.response.body = Some(body.clone());
1485            }
1486            request.response.headers = exported_request.response_headers.clone();
1487            if let Some(delay) = exported_request.delay {
1488                request.response.delay_ms = Some(delay);
1489            }
1490
1491            workspace.add_request(request)?;
1492        }
1493
1494        // Restore configuration
1495        workspace.config.global_environment.variables = export.config.variables.clone();
1496
1497        Ok(workspace)
1498    }
1499
1500    /// Parse HTTP method string to enum
1501    fn parse_http_method(&self, method_str: &str) -> Result<crate::routing::HttpMethod> {
1502        match method_str.to_uppercase().as_str() {
1503            "GET" => Ok(crate::routing::HttpMethod::GET),
1504            "POST" => Ok(crate::routing::HttpMethod::POST),
1505            "PUT" => Ok(crate::routing::HttpMethod::PUT),
1506            "DELETE" => Ok(crate::routing::HttpMethod::DELETE),
1507            "PATCH" => Ok(crate::routing::HttpMethod::PATCH),
1508            "HEAD" => Ok(crate::routing::HttpMethod::HEAD),
1509            "OPTIONS" => Ok(crate::routing::HttpMethod::OPTIONS),
1510            _ => Err(Error::generic(format!("Unknown HTTP method: {}", method_str))),
1511        }
1512    }
1513
1514    /// Check workspace for unencrypted sensitive data before export
1515    pub fn check_workspace_for_unencrypted_secrets(
1516        &self,
1517        workspace: &Workspace,
1518    ) -> Result<SecurityCheckResult> {
1519        let mut warnings = Vec::new();
1520        let errors = Vec::new();
1521
1522        // Check environment variables
1523        self.check_environment_variables(workspace, &mut warnings)?;
1524
1525        // Check for sensitive patterns in request data (when implemented)
1526        // This would check headers, bodies, etc.
1527
1528        let has_warnings = !warnings.is_empty();
1529        let has_errors = !errors.is_empty();
1530
1531        Ok(SecurityCheckResult {
1532            workspace_id: workspace.id.clone(),
1533            workspace_name: workspace.name.clone(),
1534            warnings,
1535            errors,
1536            is_secure: !has_warnings && !has_errors,
1537            recommended_actions: self.generate_security_recommendations(has_warnings, has_errors),
1538        })
1539    }
1540
1541    /// Check environment variables for sensitive data
1542    fn check_environment_variables(
1543        &self,
1544        workspace: &Workspace,
1545        warnings: &mut Vec<SecurityWarning>,
1546    ) -> Result<()> {
1547        let sensitive_keys = [
1548            "password",
1549            "secret",
1550            "key",
1551            "token",
1552            "credential",
1553            "api_key",
1554            "apikey",
1555            "api_secret",
1556            "db_password",
1557            "database_password",
1558            "aws_secret_key",
1559            "aws_session_token",
1560            "private_key",
1561            "authorization",
1562            "auth_token",
1563            "access_token",
1564            "refresh_token",
1565            "cookie",
1566            "session",
1567            "csrf",
1568            "jwt",
1569            "bearer",
1570        ];
1571
1572        // Check global environment
1573        for (key, value) in &workspace.config.global_environment.variables {
1574            if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1575                warnings.push(SecurityWarning {
1576                    field_type: "environment_variable".to_string(),
1577                    field_name: key.clone(),
1578                    location: "global_environment".to_string(),
1579                    severity: SecuritySeverity::High,
1580                    message: format!(
1581                        "Potentially sensitive environment variable '{}' detected",
1582                        key
1583                    ),
1584                    suggestion: "Consider encrypting this value or excluding it from exports"
1585                        .to_string(),
1586                });
1587            }
1588        }
1589
1590        // Check workspace environments
1591        for env in &workspace.config.environments {
1592            for (key, value) in &env.variables {
1593                if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1594                    warnings.push(SecurityWarning {
1595                        field_type: "environment_variable".to_string(),
1596                        field_name: key.clone(),
1597                        location: format!("environment '{}'", env.name),
1598                        severity: SecuritySeverity::High,
1599                        message: format!("Potentially sensitive environment variable '{}' detected in environment '{}'", key, env.name),
1600                        suggestion: "Consider encrypting this value or excluding it from exports".to_string(),
1601                    });
1602                }
1603            }
1604        }
1605
1606        Ok(())
1607    }
1608
1609    /// Check if a key-value pair is potentially sensitive
1610    fn is_potentially_sensitive(&self, key: &str, value: &str, sensitive_keys: &[&str]) -> bool {
1611        let key_lower = key.to_lowercase();
1612
1613        // Check if key contains sensitive keywords
1614        if sensitive_keys.iter().any(|&sensitive| key_lower.contains(sensitive)) {
1615            return true;
1616        }
1617
1618        // Check for patterns that indicate sensitive data
1619        self.contains_sensitive_patterns(value)
1620    }
1621
1622    /// Check if value contains sensitive patterns
1623    fn contains_sensitive_patterns(&self, value: &str) -> bool {
1624        // Credit card pattern
1625        if CREDIT_CARD_PATTERN.is_match(value) {
1626            return true;
1627        }
1628
1629        // SSN pattern
1630        if SSN_PATTERN.is_match(value) {
1631            return true;
1632        }
1633
1634        // Long random-looking strings (potential API keys)
1635        if value.len() > 20 && value.chars().any(|c| c.is_alphanumeric()) {
1636            let alphanumeric_count = value.chars().filter(|c| c.is_alphanumeric()).count();
1637            let total_count = value.len();
1638            if alphanumeric_count as f64 / total_count as f64 > 0.8 {
1639                return true;
1640            }
1641        }
1642
1643        false
1644    }
1645
1646    /// Generate security recommendations based on findings
1647    fn generate_security_recommendations(
1648        &self,
1649        has_warnings: bool,
1650        has_errors: bool,
1651    ) -> Vec<String> {
1652        let mut recommendations = Vec::new();
1653
1654        if has_warnings || has_errors {
1655            recommendations.push("Enable encryption for this workspace in settings".to_string());
1656            recommendations.push("Review and encrypt sensitive environment variables".to_string());
1657            recommendations.push("Use encrypted export for sharing workspaces".to_string());
1658        }
1659
1660        if has_errors {
1661            recommendations
1662                .push("CRITICAL: Remove or encrypt sensitive data before proceeding".to_string());
1663        }
1664
1665        recommendations
1666    }
1667
1668    /// Export individual requests for nested structure
1669    async fn export_workspace_requests(
1670        &self,
1671        workspace: &Workspace,
1672        requests_dir: &Path,
1673        force: bool,
1674    ) -> Result<usize> {
1675        let mut count = 0;
1676
1677        for request in &workspace.requests {
1678            let file_path =
1679                requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1680            if force || !file_path.exists() {
1681                let exported = self.convert_request_to_exported(request, "");
1682                let content = serde_yaml::to_string(&exported)
1683                    .map_err(|e| Error::generic(format!("Failed to serialize request: {}", e)))?;
1684
1685                fs::write(&file_path, content)
1686                    .await
1687                    .map_err(|e| Error::generic(format!("Failed to write request file: {}", e)))?;
1688
1689                count += 1;
1690            }
1691        }
1692
1693        // Export folder requests
1694        for folder in &workspace.folders {
1695            count += self.export_folder_requests(folder, requests_dir, force, &folder.name).await?;
1696        }
1697
1698        Ok(count)
1699    }
1700
1701    /// Export requests from folders recursively
1702    async fn export_folder_requests(
1703        &self,
1704        folder: &Folder,
1705        requests_dir: &Path,
1706        force: bool,
1707        folder_path: &str,
1708    ) -> Result<usize> {
1709        use std::collections::VecDeque;
1710
1711        let mut count = 0;
1712        let mut queue = VecDeque::new();
1713
1714        // Start with the root folder
1715        queue.push_back((folder, folder_path.to_string()));
1716
1717        while let Some((current_folder, current_path)) = queue.pop_front() {
1718            // Export requests in current folder
1719            for request in &current_folder.requests {
1720                let file_path =
1721                    requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1722                if force || !file_path.exists() {
1723                    let exported = self.convert_request_to_exported(request, &current_path);
1724                    let content = serde_yaml::to_string(&exported).map_err(|e| {
1725                        Error::generic(format!("Failed to serialize request: {}", e))
1726                    })?;
1727
1728                    fs::write(&file_path, content).await.map_err(|e| {
1729                        Error::generic(format!("Failed to write request file: {}", e))
1730                    })?;
1731
1732                    count += 1;
1733                }
1734            }
1735
1736            // Add subfolders to queue with updated paths
1737            for subfolder in &current_folder.folders {
1738                let subfolder_path = if current_path.is_empty() {
1739                    subfolder.name.clone()
1740                } else {
1741                    format!("{}/{}", current_path, subfolder.name)
1742                };
1743                queue.push_back((subfolder, subfolder_path));
1744            }
1745        }
1746
1747        Ok(count)
1748    }
1749
1750    /// Export requests for grouped structure
1751    async fn export_workspace_requests_grouped(
1752        &self,
1753        workspace: &Workspace,
1754        requests_dir: &Path,
1755        force: bool,
1756    ) -> Result<usize> {
1757        let mut count = 0;
1758        let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
1759
1760        if !workspace_requests_dir.exists() {
1761            fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
1762                Error::generic(format!("Failed to create workspace requests directory: {}", e))
1763            })?;
1764        }
1765
1766        count += self
1767            .export_workspace_requests(workspace, &workspace_requests_dir, force)
1768            .await?;
1769        Ok(count)
1770    }
1771
1772    /// Create metadata file for Git integration
1773    async fn create_metadata_file(
1774        &self,
1775        workspace: &Workspace,
1776        target_dir: &Path,
1777        structure: &DirectoryStructure,
1778    ) -> Result<()> {
1779        let metadata = serde_json::json!({
1780            "workspace_id": workspace.id,
1781            "workspace_name": workspace.name,
1782            "description": workspace.description,
1783            "exported_at": Utc::now().to_rfc3339(),
1784            "structure": format!("{:?}", structure),
1785            "version": "1.0",
1786            "source": "mockforge"
1787        });
1788
1789        let metadata_file = target_dir.join(".mockforge-meta.json");
1790        let content = serde_json::to_string_pretty(&metadata)
1791            .map_err(|e| Error::generic(format!("Failed to serialize metadata: {}", e)))?;
1792
1793        fs::write(&metadata_file, content)
1794            .await
1795            .map_err(|e| Error::generic(format!("Failed to write metadata file: {}", e)))?;
1796
1797        Ok(())
1798    }
1799
1800    /// Export a reality preset to a file
1801    ///
1802    /// Exports a reality preset (JSON or YAML format) to the specified path.
1803    /// The preset can be imported later to restore the reality configuration.
1804    pub async fn export_reality_preset(
1805        &self,
1806        preset: &crate::RealityPreset,
1807        output_path: &Path,
1808    ) -> Result<()> {
1809        self.ensure_workspace_dir().await?;
1810
1811        // Determine format from file extension
1812        let content = if output_path.extension().and_then(|s| s.to_str()) == Some("yaml")
1813            || output_path.extension().and_then(|s| s.to_str()) == Some("yml")
1814        {
1815            serde_yaml::to_string(preset)
1816                .map_err(|e| Error::generic(format!("Failed to serialize preset to YAML: {}", e)))?
1817        } else {
1818            serde_json::to_string_pretty(preset)
1819                .map_err(|e| Error::generic(format!("Failed to serialize preset to JSON: {}", e)))?
1820        };
1821
1822        // Ensure parent directory exists
1823        if let Some(parent) = output_path.parent() {
1824            fs::create_dir_all(parent)
1825                .await
1826                .map_err(|e| Error::generic(format!("Failed to create preset directory: {}", e)))?;
1827        }
1828
1829        fs::write(output_path, content)
1830            .await
1831            .map_err(|e| Error::generic(format!("Failed to write preset file: {}", e)))?;
1832
1833        Ok(())
1834    }
1835
1836    /// Import a reality preset from a file
1837    ///
1838    /// Loads a reality preset from a JSON or YAML file and returns it.
1839    /// The preset can then be applied to a workspace or the global configuration.
1840    pub async fn import_reality_preset(&self, input_path: &Path) -> Result<crate::RealityPreset> {
1841        let content = fs::read_to_string(input_path)
1842            .await
1843            .map_err(|e| Error::generic(format!("Failed to read preset file: {}", e)))?;
1844
1845        // Determine format from file extension
1846        let preset = if input_path
1847            .extension()
1848            .and_then(|s| s.to_str())
1849            .map(|ext| ext == "yaml" || ext == "yml")
1850            .unwrap_or(false)
1851        {
1852            serde_yaml::from_str(&content).map_err(|e| {
1853                Error::generic(format!("Failed to deserialize preset from YAML: {}", e))
1854            })?
1855        } else {
1856            serde_json::from_str(&content).map_err(|e| {
1857                Error::generic(format!("Failed to deserialize preset from JSON: {}", e))
1858            })?
1859        };
1860
1861        Ok(preset)
1862    }
1863
1864    /// Get the presets directory path
1865    pub fn presets_dir(&self) -> PathBuf {
1866        self.base_dir.join("presets")
1867    }
1868
1869    /// List all available reality presets
1870    ///
1871    /// Scans the presets directory and returns a list of all preset files found.
1872    pub async fn list_reality_presets(&self) -> Result<Vec<PathBuf>> {
1873        let presets_dir = self.presets_dir();
1874        if !presets_dir.exists() {
1875            return Ok(vec![]);
1876        }
1877
1878        let mut presets = Vec::new();
1879        let mut entries = fs::read_dir(&presets_dir)
1880            .await
1881            .map_err(|e| Error::generic(format!("Failed to read presets directory: {}", e)))?;
1882
1883        while let Some(entry) = entries
1884            .next_entry()
1885            .await
1886            .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
1887        {
1888            let path = entry.path();
1889            if path.is_file() {
1890                let ext = path.extension().and_then(|s| s.to_str());
1891                if ext == Some("json") || ext == Some("yaml") || ext == Some("yml") {
1892                    presets.push(path);
1893                }
1894            }
1895        }
1896
1897        Ok(presets)
1898    }
1899
1900    /// Sanitize filename for filesystem compatibility
1901    fn sanitize_filename(&self, name: &str) -> String {
1902        name.chars()
1903            .map(|c| match c {
1904                '/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
1905                c if c.is_whitespace() => '_',
1906                c => c,
1907            })
1908            .collect::<String>()
1909            .to_lowercase()
1910    }
1911}
1912
1913/// Result of syncing a single workspace
1914#[derive(Debug)]
1915struct WorkspaceSyncResult {
1916    /// Number of requests exported
1917    requests_count: usize,
1918    /// Number of files created
1919    files_created: usize,
1920}
1921
1922#[cfg(test)]
1923mod tests {
1924    use super::*;
1925    use crate::workspace::{MockRequest, Workspace};
1926    use crate::HttpMethod;
1927    use tempfile::TempDir;
1928
1929    #[tokio::test]
1930    async fn test_workspace_persistence() {
1931        let temp_dir = TempDir::new().unwrap();
1932        let persistence = WorkspacePersistence::new(temp_dir.path());
1933
1934        // Create a test workspace
1935        let mut workspace = Workspace::new("Test Workspace".to_string());
1936        let request =
1937            MockRequest::new(HttpMethod::GET, "/test".to_string(), "Test Request".to_string());
1938        workspace.add_request(request).unwrap();
1939
1940        // Save workspace
1941        persistence.save_workspace(&workspace).await.unwrap();
1942
1943        // Load workspace
1944        let loaded = persistence.load_workspace(&workspace.id).await.unwrap();
1945        assert_eq!(loaded.name, workspace.name);
1946        assert_eq!(loaded.requests.len(), 1);
1947
1948        // List workspaces
1949        let ids = persistence.list_workspace_ids().await.unwrap();
1950        assert_eq!(ids.len(), 1);
1951        assert_eq!(ids[0], workspace.id);
1952    }
1953
1954    #[tokio::test]
1955    async fn test_registry_persistence() {
1956        let temp_dir = TempDir::new().unwrap();
1957        let persistence = WorkspacePersistence::new(temp_dir.path());
1958
1959        let mut registry = WorkspaceRegistry::new();
1960
1961        // Add workspaces
1962        let workspace1 = Workspace::new("Workspace 1".to_string());
1963        let workspace2 = Workspace::new("Workspace 2".to_string());
1964
1965        let id1 = registry.add_workspace(workspace1).unwrap();
1966        let _id2 = registry.add_workspace(workspace2).unwrap();
1967
1968        // Set active workspace
1969        registry.set_active_workspace(Some(id1.clone())).unwrap();
1970
1971        // Save registry
1972        persistence.save_full_registry(&registry).await.unwrap();
1973
1974        // Load registry
1975        let loaded_registry = persistence.load_full_registry().await.unwrap();
1976
1977        assert_eq!(loaded_registry.get_workspaces().len(), 2);
1978        assert_eq!(loaded_registry.get_active_workspace().unwrap().name, "Workspace 1");
1979    }
1980
1981    #[tokio::test]
1982    async fn test_backup_and_restore() {
1983        let temp_dir = TempDir::new().unwrap();
1984        let backup_dir = temp_dir.path().join("backups");
1985        let persistence = WorkspacePersistence::new(temp_dir.path());
1986
1987        // Create and save workspace
1988        let workspace = Workspace::new("Test Workspace".to_string());
1989        persistence.save_workspace(&workspace).await.unwrap();
1990
1991        // Create backup
1992        let backup_path = persistence.backup_workspace(&workspace.id, &backup_dir).await.unwrap();
1993        assert!(backup_path.exists());
1994
1995        // Delete original
1996        persistence.delete_workspace(&workspace.id).await.unwrap();
1997        assert!(persistence.load_workspace(&workspace.id).await.is_err());
1998
1999        // Restore from backup
2000        let restored_id = persistence.restore_workspace(&backup_path).await.unwrap();
2001
2002        // Verify restored workspace
2003        let restored = persistence.load_workspace(&restored_id).await.unwrap();
2004        assert_eq!(restored.name, "Test Workspace");
2005    }
2006}