mockforge_core/
workspace_persistence.rs

1//! Persistence layer for workspace configurations
2//!
3//! This module handles saving and loading workspace configurations to/from disk,
4//! enabling persistent storage of workspace hierarchies and configurations.
5
6use crate::config::AuthConfig as ConfigAuthConfig;
7use crate::encryption::{utils, AutoEncryptionProcessor, WorkspaceKeyManager};
8use crate::workspace::{EntityId, Folder, MockRequest, Workspace, WorkspaceRegistry};
9use crate::{Error, Result};
10use chrono::{DateTime, Utc};
11use once_cell::sync::Lazy;
12use regex::Regex;
13use serde::{Deserialize, Serialize};
14use std::collections::HashMap;
15use std::path::{Path, PathBuf};
16use tokio::fs;
17
18// Pre-compiled regex patterns for sensitive data detection
19static CREDIT_CARD_PATTERN: Lazy<Regex> = Lazy::new(|| {
20    Regex::new(r"\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b")
21        .expect("CREDIT_CARD_PATTERN regex is valid")
22});
23
24static SSN_PATTERN: Lazy<Regex> = Lazy::new(|| {
25    Regex::new(r"\b\d{3}[-\s]?\d{2}[-\s]?\d{4}\b").expect("SSN_PATTERN regex is valid")
26});
27
28/// Workspace persistence manager
29#[derive(Debug)]
30pub struct WorkspacePersistence {
31    /// Base directory for workspace storage
32    base_dir: PathBuf,
33}
34
35/// Serializable workspace registry for persistence
36#[derive(Debug, Clone, Serialize, Deserialize)]
37struct SerializableWorkspaceRegistry {
38    workspaces: Vec<Workspace>,
39    active_workspace: Option<EntityId>,
40}
41
42/// Sync state for tracking incremental syncs
43#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SyncState {
45    /// Last time a sync operation was performed
46    pub last_sync_timestamp: DateTime<Utc>,
47}
48
49/// Sync strategy for workspace mirroring
50#[derive(Debug, Clone, PartialEq)]
51pub enum SyncStrategy {
52    /// Sync all workspaces completely
53    Full,
54    /// Sync only changed workspaces (based on modification time)
55    Incremental,
56    /// Sync only specified workspace IDs
57    Selective(Vec<String>),
58}
59
60/// Directory structure for synced workspaces
61#[derive(Debug, Clone, PartialEq)]
62pub enum DirectoryStructure {
63    /// All workspaces in a flat structure: workspace-id.yaml
64    Flat,
65    /// Nested by workspace: workspaces/{name}/workspace.yaml + requests/
66    Nested,
67    /// Grouped by type: requests/, responses/, metadata/
68    Grouped,
69}
70
71/// Result of a workspace sync operation
72#[derive(Debug, Clone)]
73pub struct SyncResult {
74    /// Number of workspaces synced
75    pub synced_workspaces: usize,
76    /// Number of requests synced
77    pub synced_requests: usize,
78    /// Number of files created/updated
79    pub files_created: usize,
80    /// Target directory used
81    pub target_dir: PathBuf,
82}
83
84/// Result of an encrypted workspace export
85#[derive(Debug, Clone)]
86pub struct EncryptedExportResult {
87    /// Path to the encrypted export file
88    pub output_path: PathBuf,
89    /// Backup key for importing on other devices
90    pub backup_key: String,
91    /// When the export was created
92    pub exported_at: DateTime<Utc>,
93    /// Name of the exported workspace
94    pub workspace_name: String,
95    /// Whether encryption was successfully applied
96    pub encryption_enabled: bool,
97}
98
99/// Result of an encrypted workspace import
100#[derive(Debug, Clone)]
101pub struct EncryptedImportResult {
102    /// ID of the imported workspace
103    pub workspace_id: String,
104    /// Name of the imported workspace
105    pub workspace_name: String,
106    /// When the import was completed
107    pub imported_at: DateTime<Utc>,
108    /// Number of requests imported
109    pub request_count: usize,
110    /// Whether encryption was successfully restored
111    pub encryption_restored: bool,
112}
113
114/// Result of a security check for sensitive data
115#[derive(Debug, Clone)]
116pub struct SecurityCheckResult {
117    /// Workspace ID that was checked
118    pub workspace_id: String,
119    /// Workspace name that was checked
120    pub workspace_name: String,
121    /// Security warnings found
122    pub warnings: Vec<SecurityWarning>,
123    /// Security errors found (critical issues)
124    pub errors: Vec<SecurityWarning>,
125    /// Whether the workspace is considered secure
126    pub is_secure: bool,
127    /// Recommended actions to improve security
128    pub recommended_actions: Vec<String>,
129}
130
131/// Security warning or error
132#[derive(Debug, Clone)]
133pub struct SecurityWarning {
134    /// Type of field that contains sensitive data
135    pub field_type: String,
136    /// Name of the field
137    pub field_name: String,
138    /// Location where the sensitive data was found
139    pub location: String,
140    /// Severity of the issue
141    pub severity: SecuritySeverity,
142    /// Human-readable message
143    pub message: String,
144    /// Suggestion for fixing the issue
145    pub suggestion: String,
146}
147
148/// Severity levels for security issues
149#[derive(Debug, Clone, PartialEq)]
150pub enum SecuritySeverity {
151    /// Low risk - informational
152    Low,
153    /// Medium risk - should be reviewed
154    Medium,
155    /// High risk - requires attention
156    High,
157    /// Critical risk - blocks operations
158    Critical,
159}
160
161/// Git-friendly workspace export format
162#[derive(Debug, Clone, Serialize, Deserialize)]
163pub struct WorkspaceExport {
164    /// Workspace metadata
165    pub metadata: WorkspaceMetadata,
166    /// Workspace configuration
167    pub config: WorkspaceConfig,
168    /// All requests organized by folder structure
169    pub requests: HashMap<String, ExportedRequest>,
170}
171
172/// Metadata for exported workspace
173#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct WorkspaceMetadata {
175    /// Original workspace ID
176    pub id: String,
177    /// Workspace name
178    pub name: String,
179    /// Workspace description
180    pub description: Option<String>,
181    /// Export timestamp
182    pub exported_at: DateTime<Utc>,
183    /// Total number of requests
184    pub request_count: usize,
185    /// Total number of folders
186    pub folder_count: usize,
187}
188
189/// Simplified workspace configuration for export
190#[derive(Debug, Clone, Serialize, Deserialize)]
191pub struct WorkspaceConfig {
192    /// Authentication configuration
193    pub auth: Option<AuthConfig>,
194    /// Base URL for requests
195    pub base_url: Option<String>,
196    /// Environment variables
197    pub variables: HashMap<String, String>,
198}
199
200/// Authentication configuration for export
201#[derive(Debug, Clone, Serialize, Deserialize)]
202pub struct AuthConfig {
203    /// Authentication type
204    pub auth_type: String,
205    /// Authentication parameters
206    pub params: HashMap<String, String>,
207}
208
209impl AuthConfig {
210    /// Convert from config AuthConfig to export AuthConfig
211    pub fn from_config_auth(config_auth: &ConfigAuthConfig) -> Option<Self> {
212        if let Some(jwt) = &config_auth.jwt {
213            let mut params = HashMap::new();
214            if let Some(secret) = &jwt.secret {
215                params.insert("secret".to_string(), secret.clone());
216            }
217            if let Some(rsa_public_key) = &jwt.rsa_public_key {
218                params.insert("rsa_public_key".to_string(), rsa_public_key.clone());
219            }
220            if let Some(ecdsa_public_key) = &jwt.ecdsa_public_key {
221                params.insert("ecdsa_public_key".to_string(), ecdsa_public_key.clone());
222            }
223            if let Some(issuer) = &jwt.issuer {
224                params.insert("issuer".to_string(), issuer.clone());
225            }
226            if let Some(audience) = &jwt.audience {
227                params.insert("audience".to_string(), audience.clone());
228            }
229            if !jwt.algorithms.is_empty() {
230                params.insert("algorithms".to_string(), jwt.algorithms.join(","));
231            }
232            Some(AuthConfig {
233                auth_type: "jwt".to_string(),
234                params,
235            })
236        } else if let Some(oauth2) = &config_auth.oauth2 {
237            let mut params = HashMap::new();
238            params.insert("client_id".to_string(), oauth2.client_id.clone());
239            params.insert("client_secret".to_string(), oauth2.client_secret.clone());
240            params.insert("introspection_url".to_string(), oauth2.introspection_url.clone());
241            if let Some(auth_url) = &oauth2.auth_url {
242                params.insert("auth_url".to_string(), auth_url.clone());
243            }
244            if let Some(token_url) = &oauth2.token_url {
245                params.insert("token_url".to_string(), token_url.clone());
246            }
247            if let Some(token_type_hint) = &oauth2.token_type_hint {
248                params.insert("token_type_hint".to_string(), token_type_hint.clone());
249            }
250            Some(AuthConfig {
251                auth_type: "oauth2".to_string(),
252                params,
253            })
254        } else if let Some(basic_auth) = &config_auth.basic_auth {
255            let mut params = HashMap::new();
256            for (user, pass) in &basic_auth.credentials {
257                params.insert(user.clone(), pass.clone());
258            }
259            Some(AuthConfig {
260                auth_type: "basic".to_string(),
261                params,
262            })
263        } else if let Some(api_key) = &config_auth.api_key {
264            let mut params = HashMap::new();
265            params.insert("header_name".to_string(), api_key.header_name.clone());
266            if let Some(query_name) = &api_key.query_name {
267                params.insert("query_name".to_string(), query_name.clone());
268            }
269            if !api_key.keys.is_empty() {
270                params.insert("keys".to_string(), api_key.keys.join(","));
271            }
272            Some(AuthConfig {
273                auth_type: "api_key".to_string(),
274                params,
275            })
276        } else {
277            None
278        }
279    }
280}
281
282/// Exported request format
283#[derive(Debug, Clone, Serialize, Deserialize)]
284pub struct ExportedRequest {
285    /// Request ID
286    pub id: String,
287    /// Request name
288    pub name: String,
289    /// HTTP method
290    pub method: String,
291    /// Request path
292    pub path: String,
293    /// Folder path (for organization)
294    pub folder_path: String,
295    /// Request headers
296    pub headers: HashMap<String, String>,
297    /// Query parameters
298    pub query_params: HashMap<String, String>,
299    /// Request body
300    pub body: Option<String>,
301    /// Response status code
302    pub response_status: Option<u16>,
303    /// Response body
304    pub response_body: Option<String>,
305    /// Response headers
306    pub response_headers: HashMap<String, String>,
307    /// Response delay (ms)
308    pub delay: Option<u64>,
309}
310
311impl WorkspacePersistence {
312    /// Create a new persistence manager
313    pub fn new<P: AsRef<Path>>(base_dir: P) -> Self {
314        Self {
315            base_dir: base_dir.as_ref().to_path_buf(),
316        }
317    }
318
319    /// Get the workspace directory path
320    pub fn workspace_dir(&self) -> &Path {
321        &self.base_dir
322    }
323
324    /// Get the path for a specific workspace file
325    pub fn workspace_file_path(&self, workspace_id: &str) -> PathBuf {
326        self.base_dir.join(format!("{}.yaml", workspace_id))
327    }
328
329    /// Get the registry metadata file path
330    pub fn registry_file_path(&self) -> PathBuf {
331        self.base_dir.join("registry.yaml")
332    }
333
334    /// Get the sync state file path
335    pub fn sync_state_file_path(&self) -> PathBuf {
336        self.base_dir.join("sync_state.yaml")
337    }
338
339    /// Ensure the workspace directory exists
340    pub async fn ensure_workspace_dir(&self) -> Result<()> {
341        if !self.base_dir.exists() {
342            fs::create_dir_all(&self.base_dir).await.map_err(|e| {
343                Error::generic(format!("Failed to create workspace directory: {}", e))
344            })?;
345        }
346        Ok(())
347    }
348
349    /// Save a workspace to disk
350    pub async fn save_workspace(&self, workspace: &Workspace) -> Result<()> {
351        self.ensure_workspace_dir().await?;
352
353        let file_path = self.workspace_file_path(&workspace.id);
354        let content = serde_yaml::to_string(workspace)
355            .map_err(|e| Error::generic(format!("Failed to serialize workspace: {}", e)))?;
356
357        fs::write(&file_path, content)
358            .await
359            .map_err(|e| Error::generic(format!("Failed to write workspace file: {}", e)))?;
360
361        Ok(())
362    }
363
364    /// Load a workspace from disk
365    pub async fn load_workspace(&self, workspace_id: &str) -> Result<Workspace> {
366        let file_path = self.workspace_file_path(workspace_id);
367
368        if !file_path.exists() {
369            return Err(Error::generic(format!("Workspace file not found: {:?}", file_path)));
370        }
371
372        let content = fs::read_to_string(&file_path)
373            .await
374            .map_err(|e| Error::generic(format!("Failed to read workspace file: {}", e)))?;
375
376        let workspace: Workspace = serde_yaml::from_str(&content)
377            .map_err(|e| Error::generic(format!("Failed to deserialize workspace: {}", e)))?;
378
379        Ok(workspace)
380    }
381
382    /// Delete a workspace from disk
383    pub async fn delete_workspace(&self, workspace_id: &str) -> Result<()> {
384        let file_path = self.workspace_file_path(workspace_id);
385
386        if file_path.exists() {
387            fs::remove_file(&file_path)
388                .await
389                .map_err(|e| Error::generic(format!("Failed to delete workspace file: {}", e)))?;
390        }
391
392        Ok(())
393    }
394
395    /// Save the workspace registry metadata
396    pub async fn save_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
397        self.ensure_workspace_dir().await?;
398
399        let serializable = SerializableWorkspaceRegistry {
400            workspaces: registry.get_workspaces().into_iter().cloned().collect(),
401            active_workspace: registry.get_active_workspace_id().map(|s| s.to_string()),
402        };
403
404        let file_path = self.registry_file_path();
405        let content = serde_yaml::to_string(&serializable)
406            .map_err(|e| Error::generic(format!("Failed to serialize registry: {}", e)))?;
407
408        fs::write(&file_path, content)
409            .await
410            .map_err(|e| Error::generic(format!("Failed to write registry file: {}", e)))?;
411
412        Ok(())
413    }
414
415    /// Load the workspace registry metadata
416    pub async fn load_registry(&self) -> Result<WorkspaceRegistry> {
417        let file_path = self.registry_file_path();
418
419        if !file_path.exists() {
420            // Return empty registry if no registry file exists
421            return Ok(WorkspaceRegistry::new());
422        }
423
424        let content = fs::read_to_string(&file_path)
425            .await
426            .map_err(|e| Error::generic(format!("Failed to read registry file: {}", e)))?;
427
428        let serializable: SerializableWorkspaceRegistry = serde_yaml::from_str(&content)
429            .map_err(|e| Error::generic(format!("Failed to deserialize registry: {}", e)))?;
430
431        let mut registry = WorkspaceRegistry::new();
432
433        // Load individual workspaces
434        for workspace_meta in &serializable.workspaces {
435            match self.load_workspace(&workspace_meta.id).await {
436                Ok(workspace) => {
437                    registry.add_workspace(workspace)?;
438                }
439                Err(e) => {
440                    tracing::warn!("Failed to load workspace {}: {}", workspace_meta.id, e);
441                }
442            }
443        }
444
445        // Set active workspace
446        if let Some(active_id) = &serializable.active_workspace {
447            if let Err(e) = registry.set_active_workspace(Some(active_id.clone())) {
448                tracing::warn!("Failed to set active workspace {}: {}", active_id, e);
449            }
450        }
451
452        Ok(registry)
453    }
454
455    /// Save the sync state
456    pub async fn save_sync_state(&self, sync_state: &SyncState) -> Result<()> {
457        self.ensure_workspace_dir().await?;
458
459        let file_path = self.sync_state_file_path();
460        let content = serde_yaml::to_string(sync_state)
461            .map_err(|e| Error::generic(format!("Failed to serialize sync state: {}", e)))?;
462
463        fs::write(&file_path, content)
464            .await
465            .map_err(|e| Error::generic(format!("Failed to write sync state file: {}", e)))?;
466
467        Ok(())
468    }
469
470    /// Load the sync state
471    pub async fn load_sync_state(&self) -> Result<SyncState> {
472        let file_path = self.sync_state_file_path();
473
474        if !file_path.exists() {
475            // Return default sync state if no sync state file exists
476            return Ok(SyncState {
477                last_sync_timestamp: Utc::now(),
478            });
479        }
480
481        let content = fs::read_to_string(&file_path)
482            .await
483            .map_err(|e| Error::generic(format!("Failed to read sync state file: {}", e)))?;
484
485        let sync_state: SyncState = serde_yaml::from_str(&content)
486            .map_err(|e| Error::generic(format!("Failed to deserialize sync state: {}", e)))?;
487
488        Ok(sync_state)
489    }
490
491    /// List all workspace IDs from disk
492    pub async fn list_workspace_ids(&self) -> Result<Vec<EntityId>> {
493        if !self.base_dir.exists() {
494            return Ok(Vec::new());
495        }
496
497        let mut workspace_ids = Vec::new();
498
499        let mut entries = fs::read_dir(&self.base_dir)
500            .await
501            .map_err(|e| Error::generic(format!("Failed to read workspace directory: {}", e)))?;
502
503        while let Some(entry) = entries
504            .next_entry()
505            .await
506            .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
507        {
508            let path = entry.path();
509            if path.is_file() {
510                if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
511                    if file_name != "registry.yaml" && file_name.ends_with(".yaml") {
512                        if let Some(id) = file_name.strip_suffix(".yaml") {
513                            workspace_ids.push(id.to_string());
514                        }
515                    }
516                }
517            }
518        }
519
520        Ok(workspace_ids)
521    }
522
523    /// Save the entire registry and all workspaces
524    pub async fn save_full_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
525        // Save registry metadata
526        self.save_registry(registry).await?;
527
528        // Save all workspaces
529        for workspace in registry.get_workspaces() {
530            self.save_workspace(workspace).await?;
531        }
532
533        Ok(())
534    }
535
536    /// Load the entire registry and all workspaces
537    pub async fn load_full_registry(&self) -> Result<WorkspaceRegistry> {
538        self.load_registry().await
539    }
540
541    /// Backup workspace data
542    pub async fn backup_workspace(&self, workspace_id: &str, backup_dir: &Path) -> Result<PathBuf> {
543        let workspace_file = self.workspace_file_path(workspace_id);
544
545        if !workspace_file.exists() {
546            return Err(Error::generic(format!("Workspace {} does not exist", workspace_id)));
547        }
548
549        // Ensure backup directory exists
550        if !backup_dir.exists() {
551            fs::create_dir_all(backup_dir)
552                .await
553                .map_err(|e| Error::generic(format!("Failed to create backup directory: {}", e)))?;
554        }
555
556        // Create backup filename with timestamp
557        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
558        let backup_filename = format!("{}_{}.yaml", workspace_id, timestamp);
559        let backup_path = backup_dir.join(backup_filename);
560
561        // Copy workspace file
562        fs::copy(&workspace_file, &backup_path)
563            .await
564            .map_err(|e| Error::generic(format!("Failed to create backup: {}", e)))?;
565
566        Ok(backup_path)
567    }
568
569    /// Restore workspace from backup
570    pub async fn restore_workspace(&self, backup_path: &Path) -> Result<EntityId> {
571        if !backup_path.exists() {
572            return Err(Error::generic(format!("Backup file does not exist: {:?}", backup_path)));
573        }
574
575        // Load workspace from backup
576        let content = fs::read_to_string(backup_path)
577            .await
578            .map_err(|e| Error::generic(format!("Failed to read backup file: {}", e)))?;
579
580        let workspace: Workspace = serde_yaml::from_str(&content)
581            .map_err(|e| Error::generic(format!("Failed to deserialize backup: {}", e)))?;
582
583        // Save restored workspace
584        self.save_workspace(&workspace).await?;
585
586        Ok(workspace.id)
587    }
588
589    /// Clean up old backups
590    pub async fn cleanup_old_backups(&self, backup_dir: &Path, keep_count: usize) -> Result<usize> {
591        if !backup_dir.exists() {
592            return Ok(0);
593        }
594
595        let mut backup_files = Vec::new();
596
597        let mut entries = fs::read_dir(backup_dir)
598            .await
599            .map_err(|e| Error::generic(format!("Failed to read backup directory: {}", e)))?;
600
601        while let Some(entry) = entries
602            .next_entry()
603            .await
604            .map_err(|e| Error::generic(format!("Failed to read backup entry: {}", e)))?
605        {
606            let path = entry.path();
607            if path.is_file() {
608                if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
609                    if file_name.ends_with(".yaml") {
610                        if let Ok(metadata) = entry.metadata().await {
611                            if let Ok(modified) = metadata.modified() {
612                                backup_files.push((path, modified));
613                            }
614                        }
615                    }
616                }
617            }
618        }
619
620        // Sort by modification time (newest first)
621        backup_files.sort_by(|a, b| b.1.cmp(&a.1));
622
623        // Remove old backups
624        let mut removed_count = 0;
625        for (path, _) in backup_files.iter().skip(keep_count) {
626            if fs::remove_file(path).await.is_ok() {
627                removed_count += 1;
628            }
629        }
630
631        Ok(removed_count)
632    }
633
634    /// Advanced sync with additional configuration options
635    #[allow(clippy::too_many_arguments)]
636    pub async fn sync_to_directory_advanced(
637        &self,
638        target_dir: &str,
639        strategy: &str,
640        workspace_ids: Option<&str>,
641        structure: &str,
642        include_meta: bool,
643        force: bool,
644        filename_pattern: &str,
645        exclude_pattern: Option<&str>,
646        dry_run: bool,
647    ) -> Result<SyncResult> {
648        let target_path = PathBuf::from(target_dir);
649
650        // Ensure target directory exists (unless dry run)
651        if !dry_run && !target_path.exists() {
652            fs::create_dir_all(&target_path)
653                .await
654                .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
655        }
656
657        // Parse strategy
658        let sync_strategy = match strategy {
659            "full" => SyncStrategy::Full,
660            "incremental" => SyncStrategy::Incremental,
661            "selective" => {
662                if let Some(ids) = workspace_ids {
663                    let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
664                    SyncStrategy::Selective(workspace_list)
665                } else {
666                    return Err(Error::generic("Selective strategy requires workspace IDs"));
667                }
668            }
669            _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
670        };
671
672        // Parse directory structure
673        let dir_structure = match structure {
674            "flat" => DirectoryStructure::Flat,
675            "nested" => DirectoryStructure::Nested,
676            "grouped" => DirectoryStructure::Grouped,
677            _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
678        };
679
680        // Get workspaces to sync based on strategy
681        let mut workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
682
683        // Apply exclusion filter if provided
684        if let Some(exclude) = exclude_pattern {
685            if let Ok(regex) = regex::Regex::new(exclude) {
686                workspaces_to_sync.retain(|id| !regex.is_match(id));
687            }
688        }
689
690        let mut result = SyncResult {
691            synced_workspaces: 0,
692            synced_requests: 0,
693            files_created: 0,
694            target_dir: target_path.clone(),
695        };
696
697        // Sync each workspace
698        for workspace_id in workspaces_to_sync {
699            if let Ok(workspace) = self.load_workspace(&workspace_id).await {
700                let workspace_result = self
701                    .sync_workspace_to_directory_advanced(
702                        &workspace,
703                        &target_path,
704                        &dir_structure,
705                        include_meta,
706                        force,
707                        filename_pattern,
708                        dry_run,
709                    )
710                    .await?;
711
712                result.synced_workspaces += 1;
713                result.synced_requests += workspace_result.requests_count;
714                result.files_created += workspace_result.files_created;
715            }
716        }
717
718        // Update sync state for incremental syncs
719        if let SyncStrategy::Incremental = sync_strategy {
720            let new_sync_state = SyncState {
721                last_sync_timestamp: Utc::now(),
722            };
723            if let Err(e) = self.save_sync_state(&new_sync_state).await {
724                tracing::warn!("Failed to save sync state: {}", e);
725            }
726        }
727
728        Ok(result)
729    }
730
731    /// Advanced sync for a single workspace with custom filename patterns
732    #[allow(clippy::too_many_arguments)]
733    async fn sync_workspace_to_directory_advanced(
734        &self,
735        workspace: &Workspace,
736        target_dir: &Path,
737        structure: &DirectoryStructure,
738        include_meta: bool,
739        force: bool,
740        filename_pattern: &str,
741        dry_run: bool,
742    ) -> Result<WorkspaceSyncResult> {
743        let mut result = WorkspaceSyncResult {
744            requests_count: 0,
745            files_created: 0,
746        };
747
748        match structure {
749            DirectoryStructure::Flat => {
750                let export = self.create_workspace_export(workspace).await?;
751                let filename = self.generate_filename(filename_pattern, workspace);
752                let file_path = target_dir.join(format!("{}.yaml", filename));
753
754                if force || !file_path.exists() {
755                    if !dry_run {
756                        let content = serde_yaml::to_string(&export).map_err(|e| {
757                            Error::generic(format!("Failed to serialize workspace: {}", e))
758                        })?;
759
760                        fs::write(&file_path, content).await.map_err(|e| {
761                            Error::generic(format!("Failed to write workspace file: {}", e))
762                        })?;
763                    }
764                    result.files_created += 1;
765                }
766            }
767
768            DirectoryStructure::Nested => {
769                let workspace_dir =
770                    target_dir.join(self.generate_filename(filename_pattern, workspace));
771                if !dry_run && !workspace_dir.exists() {
772                    fs::create_dir_all(&workspace_dir).await.map_err(|e| {
773                        Error::generic(format!("Failed to create workspace directory: {}", e))
774                    })?;
775                }
776
777                // Export main workspace file
778                let export = self.create_workspace_export(workspace).await?;
779                let workspace_file = workspace_dir.join("workspace.yaml");
780
781                if force || !workspace_file.exists() {
782                    if !dry_run {
783                        let content = serde_yaml::to_string(&export).map_err(|e| {
784                            Error::generic(format!("Failed to serialize workspace: {}", e))
785                        })?;
786
787                        fs::write(&workspace_file, content).await.map_err(|e| {
788                            Error::generic(format!("Failed to write workspace file: {}", e))
789                        })?;
790                    }
791                    result.files_created += 1;
792                }
793
794                // Export individual requests
795                let requests_dir = workspace_dir.join("requests");
796                if !dry_run && !requests_dir.exists() {
797                    fs::create_dir_all(&requests_dir).await.map_err(|e| {
798                        Error::generic(format!("Failed to create requests directory: {}", e))
799                    })?;
800                }
801
802                result.requests_count += self
803                    .export_workspace_requests_advanced(workspace, &requests_dir, force, dry_run)
804                    .await?;
805            }
806
807            DirectoryStructure::Grouped => {
808                // Create grouped directories
809                let requests_dir = target_dir.join("requests");
810                let workspaces_dir = target_dir.join("workspaces");
811
812                if !dry_run {
813                    for dir in [&requests_dir, &workspaces_dir] {
814                        if !dir.exists() {
815                            fs::create_dir_all(dir).await.map_err(|e| {
816                                Error::generic(format!("Failed to create directory: {}", e))
817                            })?;
818                        }
819                    }
820                }
821
822                // Export workspace metadata
823                let export = self.create_workspace_export(workspace).await?;
824                let filename = self.generate_filename(filename_pattern, workspace);
825                let workspace_file = workspaces_dir.join(format!("{}.yaml", filename));
826
827                if force || !workspace_file.exists() {
828                    if !dry_run {
829                        let content = serde_yaml::to_string(&export).map_err(|e| {
830                            Error::generic(format!("Failed to serialize workspace: {}", e))
831                        })?;
832
833                        fs::write(&workspace_file, content).await.map_err(|e| {
834                            Error::generic(format!("Failed to write workspace file: {}", e))
835                        })?;
836                    }
837                    result.files_created += 1;
838                }
839
840                // Export requests to requests directory
841                result.requests_count += self
842                    .export_workspace_requests_grouped_advanced(
843                        workspace,
844                        &requests_dir,
845                        force,
846                        dry_run,
847                    )
848                    .await?;
849            }
850        }
851
852        // Create metadata file if requested
853        if include_meta && !dry_run {
854            self.create_metadata_file(workspace, target_dir, structure).await?;
855            result.files_created += 1;
856        }
857
858        Ok(result)
859    }
860
861    /// Generate filename from pattern
862    fn generate_filename(&self, pattern: &str, workspace: &Workspace) -> String {
863        let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
864
865        pattern
866            .replace("{name}", &self.sanitize_filename(&workspace.name))
867            .replace("{id}", &workspace.id)
868            .replace("{timestamp}", &timestamp.to_string())
869    }
870
871    /// Advanced request export with dry run support
872    async fn export_workspace_requests_advanced(
873        &self,
874        workspace: &Workspace,
875        requests_dir: &Path,
876        force: bool,
877        dry_run: bool,
878    ) -> Result<usize> {
879        let mut count = 0;
880
881        for request in &workspace.requests {
882            let file_path =
883                requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
884            if force || !file_path.exists() {
885                if !dry_run {
886                    let exported = self.convert_request_to_exported(request, "");
887                    let content = serde_yaml::to_string(&exported).map_err(|e| {
888                        Error::generic(format!("Failed to serialize request: {}", e))
889                    })?;
890
891                    fs::write(&file_path, content).await.map_err(|e| {
892                        Error::generic(format!("Failed to write request file: {}", e))
893                    })?;
894                }
895                count += 1;
896            }
897        }
898
899        // Export folder requests
900        for folder in &workspace.folders {
901            count += self
902                .export_folder_requests_advanced(folder, requests_dir, force, &folder.name, dry_run)
903                .await?;
904        }
905
906        Ok(count)
907    }
908
909    /// Advanced folder request export
910    async fn export_folder_requests_advanced(
911        &self,
912        folder: &Folder,
913        requests_dir: &Path,
914        force: bool,
915        folder_path: &str,
916        dry_run: bool,
917    ) -> Result<usize> {
918        use std::collections::VecDeque;
919
920        let mut count = 0;
921        let mut queue = VecDeque::new();
922
923        // Start with the root folder
924        queue.push_back((folder, folder_path.to_string()));
925
926        while let Some((current_folder, current_path)) = queue.pop_front() {
927            // Export requests in current folder
928            for request in &current_folder.requests {
929                let file_path =
930                    requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
931                if force || !file_path.exists() {
932                    if !dry_run {
933                        let exported = self.convert_request_to_exported(request, &current_path);
934                        let content = serde_yaml::to_string(&exported).map_err(|e| {
935                            Error::generic(format!("Failed to serialize request: {}", e))
936                        })?;
937
938                        fs::write(&file_path, content).await.map_err(|e| {
939                            Error::generic(format!("Failed to write request file: {}", e))
940                        })?;
941                    }
942                    count += 1;
943                }
944            }
945
946            // Add subfolders to queue with updated paths
947            for subfolder in &current_folder.folders {
948                let subfolder_path = if current_path.is_empty() {
949                    subfolder.name.clone()
950                } else {
951                    format!("{}/{}", current_path, subfolder.name)
952                };
953                queue.push_back((subfolder, subfolder_path));
954            }
955        }
956
957        Ok(count)
958    }
959
960    /// Advanced grouped request export
961    async fn export_workspace_requests_grouped_advanced(
962        &self,
963        workspace: &Workspace,
964        requests_dir: &Path,
965        force: bool,
966        dry_run: bool,
967    ) -> Result<usize> {
968        let mut count = 0;
969        let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
970
971        if !dry_run && !workspace_requests_dir.exists() {
972            fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
973                Error::generic(format!("Failed to create workspace requests directory: {}", e))
974            })?;
975        }
976
977        count += self
978            .export_workspace_requests_advanced(workspace, &workspace_requests_dir, force, dry_run)
979            .await?;
980        Ok(count)
981    }
982
983    /// Sync workspaces to an external directory for Git/Dropbox integration
984    pub async fn sync_to_directory(
985        &self,
986        target_dir: &str,
987        strategy: &str,
988        workspace_ids: Option<&str>,
989        structure: &str,
990        include_meta: bool,
991        force: bool,
992    ) -> Result<SyncResult> {
993        let target_path = PathBuf::from(target_dir);
994
995        // Ensure target directory exists
996        if !target_path.exists() {
997            fs::create_dir_all(&target_path)
998                .await
999                .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
1000        }
1001
1002        // Parse strategy
1003        let sync_strategy = match strategy {
1004            "full" => SyncStrategy::Full,
1005            "incremental" => SyncStrategy::Incremental,
1006            "selective" => {
1007                if let Some(ids) = workspace_ids {
1008                    let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
1009                    SyncStrategy::Selective(workspace_list)
1010                } else {
1011                    return Err(Error::generic("Selective strategy requires workspace IDs"));
1012                }
1013            }
1014            _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
1015        };
1016
1017        // Parse directory structure
1018        let dir_structure = match structure {
1019            "flat" => DirectoryStructure::Flat,
1020            "nested" => DirectoryStructure::Nested,
1021            "grouped" => DirectoryStructure::Grouped,
1022            _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
1023        };
1024
1025        // Get workspaces to sync based on strategy
1026        let workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
1027
1028        let mut result = SyncResult {
1029            synced_workspaces: 0,
1030            synced_requests: 0,
1031            files_created: 0,
1032            target_dir: target_path.clone(),
1033        };
1034
1035        // Sync each workspace
1036        for workspace_id in workspaces_to_sync {
1037            if let Ok(workspace) = self.load_workspace(&workspace_id).await {
1038                let workspace_result = self
1039                    .sync_workspace_to_directory(
1040                        &workspace,
1041                        &target_path,
1042                        &dir_structure,
1043                        include_meta,
1044                        force,
1045                    )
1046                    .await?;
1047
1048                result.synced_workspaces += 1;
1049                result.synced_requests += workspace_result.requests_count;
1050                result.files_created += workspace_result.files_created;
1051            }
1052        }
1053
1054        // Update sync state for incremental syncs
1055        if let SyncStrategy::Incremental = sync_strategy {
1056            let new_sync_state = SyncState {
1057                last_sync_timestamp: Utc::now(),
1058            };
1059            if let Err(e) = self.save_sync_state(&new_sync_state).await {
1060                tracing::warn!("Failed to save sync state: {}", e);
1061            }
1062        }
1063
1064        Ok(result)
1065    }
1066
1067    /// Get list of workspace IDs to sync based on strategy
1068    async fn get_workspaces_for_sync(&self, strategy: &SyncStrategy) -> Result<Vec<String>> {
1069        match strategy {
1070            SyncStrategy::Full => self.list_workspace_ids().await,
1071            SyncStrategy::Incremental => {
1072                // Load sync state to get last sync timestamp
1073                let sync_state = self.load_sync_state().await?;
1074                let last_sync = sync_state.last_sync_timestamp;
1075
1076                // Get all workspace IDs
1077                let all_workspace_ids = self.list_workspace_ids().await?;
1078
1079                // Filter workspaces that have been modified since last sync
1080                let mut modified_workspaces = Vec::new();
1081                for workspace_id in all_workspace_ids {
1082                    let file_path = self.workspace_file_path(&workspace_id);
1083                    if let Ok(metadata) = fs::metadata(&file_path).await {
1084                        if let Ok(modified_time) = metadata.modified() {
1085                            let modified_datetime = DateTime::<Utc>::from(modified_time);
1086                            if modified_datetime > last_sync {
1087                                modified_workspaces.push(workspace_id);
1088                            }
1089                        }
1090                    }
1091                }
1092
1093                Ok(modified_workspaces)
1094            }
1095            SyncStrategy::Selective(ids) => Ok(ids.clone()),
1096        }
1097    }
1098
1099    /// Sync a single workspace to the target directory
1100    async fn sync_workspace_to_directory(
1101        &self,
1102        workspace: &Workspace,
1103        target_dir: &Path,
1104        structure: &DirectoryStructure,
1105        include_meta: bool,
1106        force: bool,
1107    ) -> Result<WorkspaceSyncResult> {
1108        let mut result = WorkspaceSyncResult {
1109            requests_count: 0,
1110            files_created: 0,
1111        };
1112
1113        match structure {
1114            DirectoryStructure::Flat => {
1115                let export = self.create_workspace_export(workspace).await?;
1116                let file_path =
1117                    target_dir.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1118
1119                if force || !file_path.exists() {
1120                    let content = serde_yaml::to_string(&export).map_err(|e| {
1121                        Error::generic(format!("Failed to serialize workspace: {}", e))
1122                    })?;
1123
1124                    fs::write(&file_path, content).await.map_err(|e| {
1125                        Error::generic(format!("Failed to write workspace file: {}", e))
1126                    })?;
1127
1128                    result.files_created += 1;
1129                }
1130            }
1131
1132            DirectoryStructure::Nested => {
1133                let workspace_dir = target_dir.join(self.sanitize_filename(&workspace.name));
1134                if !workspace_dir.exists() {
1135                    fs::create_dir_all(&workspace_dir).await.map_err(|e| {
1136                        Error::generic(format!("Failed to create workspace directory: {}", e))
1137                    })?;
1138                }
1139
1140                // Export main workspace file
1141                let export = self.create_workspace_export(workspace).await?;
1142                let workspace_file = workspace_dir.join("workspace.yaml");
1143
1144                if force || !workspace_file.exists() {
1145                    let content = serde_yaml::to_string(&export).map_err(|e| {
1146                        Error::generic(format!("Failed to serialize workspace: {}", e))
1147                    })?;
1148
1149                    fs::write(&workspace_file, content).await.map_err(|e| {
1150                        Error::generic(format!("Failed to write workspace file: {}", e))
1151                    })?;
1152
1153                    result.files_created += 1;
1154                }
1155
1156                // Export individual requests
1157                let requests_dir = workspace_dir.join("requests");
1158                if !requests_dir.exists() {
1159                    fs::create_dir_all(&requests_dir).await.map_err(|e| {
1160                        Error::generic(format!("Failed to create requests directory: {}", e))
1161                    })?;
1162                }
1163
1164                result.requests_count +=
1165                    self.export_workspace_requests(workspace, &requests_dir, force).await?;
1166            }
1167
1168            DirectoryStructure::Grouped => {
1169                // Create grouped directories
1170                let requests_dir = target_dir.join("requests");
1171                let workspaces_dir = target_dir.join("workspaces");
1172
1173                for dir in [&requests_dir, &workspaces_dir] {
1174                    if !dir.exists() {
1175                        fs::create_dir_all(dir).await.map_err(|e| {
1176                            Error::generic(format!("Failed to create directory: {}", e))
1177                        })?;
1178                    }
1179                }
1180
1181                // Export workspace metadata
1182                let export = self.create_workspace_export(workspace).await?;
1183                let workspace_file = workspaces_dir
1184                    .join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1185
1186                if force || !workspace_file.exists() {
1187                    let content = serde_yaml::to_string(&export).map_err(|e| {
1188                        Error::generic(format!("Failed to serialize workspace: {}", e))
1189                    })?;
1190
1191                    fs::write(&workspace_file, content).await.map_err(|e| {
1192                        Error::generic(format!("Failed to write workspace file: {}", e))
1193                    })?;
1194
1195                    result.files_created += 1;
1196                }
1197
1198                // Export requests to requests directory
1199                result.requests_count +=
1200                    self.export_workspace_requests_grouped(workspace, &requests_dir, force).await?;
1201            }
1202        }
1203
1204        // Create metadata file if requested
1205        if include_meta {
1206            self.create_metadata_file(workspace, target_dir, structure).await?;
1207            result.files_created += 1;
1208        }
1209
1210        Ok(result)
1211    }
1212
1213    /// Create a Git-friendly workspace export
1214    async fn create_workspace_export(&self, workspace: &Workspace) -> Result<WorkspaceExport> {
1215        let mut requests = HashMap::new();
1216
1217        // Collect all requests from workspace
1218        self.collect_requests_from_workspace(workspace, &mut requests, "".to_string());
1219
1220        let metadata = WorkspaceMetadata {
1221            id: workspace.id.clone(),
1222            name: workspace.name.clone(),
1223            description: workspace.description.clone(),
1224            exported_at: Utc::now(),
1225            request_count: requests.len(),
1226            folder_count: workspace.folders.len(),
1227        };
1228
1229        let config = WorkspaceConfig {
1230            auth: workspace.config.auth.as_ref().and_then(AuthConfig::from_config_auth),
1231            base_url: workspace.config.base_url.clone(),
1232            variables: workspace.config.global_environment.variables.clone(),
1233        };
1234
1235        Ok(WorkspaceExport {
1236            metadata,
1237            config,
1238            requests,
1239        })
1240    }
1241
1242    /// Collect all requests from workspace into a hashmap
1243    fn collect_requests_from_workspace(
1244        &self,
1245        workspace: &Workspace,
1246        requests: &mut HashMap<String, ExportedRequest>,
1247        folder_path: String,
1248    ) {
1249        // Add root-level requests
1250        for request in &workspace.requests {
1251            let exported = self.convert_request_to_exported(request, &folder_path);
1252            requests.insert(request.id.clone(), exported);
1253        }
1254
1255        // Add folder requests recursively
1256        for folder in &workspace.folders {
1257            let current_path = if folder_path.is_empty() {
1258                folder.name.clone()
1259            } else {
1260                format!("{}/{}", folder_path, folder.name)
1261            };
1262
1263            for request in &folder.requests {
1264                let exported = self.convert_request_to_exported(request, &current_path);
1265                requests.insert(request.id.clone(), exported);
1266            }
1267
1268            // Recursively process subfolders
1269            self.collect_requests_from_folders(folder, requests, current_path);
1270        }
1271    }
1272
1273    /// Recursively collect requests from folders
1274    fn collect_requests_from_folders(
1275        &self,
1276        folder: &Folder,
1277        requests: &mut HashMap<String, ExportedRequest>,
1278        folder_path: String,
1279    ) {
1280        for subfolder in &folder.folders {
1281            let current_path = format!("{}/{}", folder_path, subfolder.name);
1282
1283            for request in &subfolder.requests {
1284                let exported = self.convert_request_to_exported(request, &current_path);
1285                requests.insert(request.id.clone(), exported);
1286            }
1287
1288            self.collect_requests_from_folders(subfolder, requests, current_path);
1289        }
1290    }
1291
1292    /// Convert a MockRequest to ExportedRequest
1293    fn convert_request_to_exported(
1294        &self,
1295        request: &MockRequest,
1296        folder_path: &str,
1297    ) -> ExportedRequest {
1298        ExportedRequest {
1299            id: request.id.clone(),
1300            name: request.name.clone(),
1301            method: format!("{:?}", request.method),
1302            path: request.path.clone(),
1303            folder_path: folder_path.to_string(),
1304            headers: request.headers.clone(),
1305            query_params: request.query_params.clone(),
1306            body: request.body.clone(),
1307            response_status: Some(request.response.status_code),
1308            response_body: request.response.body.clone(),
1309            response_headers: request.response.headers.clone(),
1310            delay: request.response.delay_ms,
1311        }
1312    }
1313
1314    /// Export workspace with encryption for secure sharing
1315    pub async fn export_workspace_encrypted(
1316        &self,
1317        workspace: &Workspace,
1318        output_path: &Path,
1319    ) -> Result<EncryptedExportResult> {
1320        // Check if encryption is enabled for this workspace
1321        if !workspace.config.auto_encryption.enabled {
1322            return Err(Error::generic("Encryption is not enabled for this workspace. Enable encryption in workspace settings first."));
1323        }
1324
1325        // Get auto-encryption config
1326        let encryption_config = workspace.config.auto_encryption.clone();
1327        let processor = AutoEncryptionProcessor::new(&workspace.id, encryption_config);
1328
1329        // Create filtered workspace copy for export
1330        let mut filtered_workspace = workspace.to_filtered_for_sync();
1331
1332        // Apply automatic encryption to the filtered workspace
1333        self.encrypt_workspace_data(&mut filtered_workspace, &processor)?;
1334
1335        // Create standard export
1336        let export = self.create_workspace_export(&filtered_workspace).await?;
1337
1338        // Encrypt the entire export
1339        let export_json = serde_json::to_string_pretty(&export)
1340            .map_err(|e| Error::generic(format!("Failed to serialize export: {}", e)))?;
1341
1342        let encrypted_data = utils::encrypt_for_workspace(&workspace.id, &export_json)?;
1343
1344        // Generate backup key for sharing
1345        let key_manager = WorkspaceKeyManager::new();
1346        let backup_key = key_manager.generate_workspace_key_backup(&workspace.id)?;
1347
1348        // Write encrypted data to file
1349        fs::write(output_path, &encrypted_data)
1350            .await
1351            .map_err(|e| Error::generic(format!("Failed to write encrypted export: {}", e)))?;
1352
1353        Ok(EncryptedExportResult {
1354            output_path: output_path.to_path_buf(),
1355            backup_key,
1356            exported_at: Utc::now(),
1357            workspace_name: workspace.name.clone(),
1358            encryption_enabled: true,
1359        })
1360    }
1361
1362    /// Import encrypted workspace
1363    pub async fn import_workspace_encrypted(
1364        &self,
1365        encrypted_file: &Path,
1366        _workspace_name: Option<&str>,
1367        _registry: &mut WorkspaceRegistry,
1368    ) -> Result<EncryptedImportResult> {
1369        // Read encrypted data
1370        let _encrypted_data = fs::read_to_string(encrypted_file)
1371            .await
1372            .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1373
1374        // For import, we need the workspace ID and backup key
1375        // This would typically be provided by the user or extracted from metadata
1376        Err(Error::generic("Encrypted import requires workspace ID and backup key. Use import_workspace_encrypted_with_key instead."))
1377    }
1378
1379    /// Import encrypted workspace with specific workspace ID and backup key
1380    pub async fn import_workspace_encrypted_with_key(
1381        &self,
1382        encrypted_file: &Path,
1383        workspace_id: &str,
1384        backup_key: &str,
1385        workspace_name: Option<&str>,
1386        registry: &mut WorkspaceRegistry,
1387    ) -> Result<EncryptedImportResult> {
1388        // Ensure workspace key exists or restore from backup
1389        let key_manager = WorkspaceKeyManager::new();
1390        if !key_manager.has_workspace_key(workspace_id) {
1391            key_manager.restore_workspace_key_from_backup(workspace_id, backup_key)?;
1392        }
1393
1394        // Read and decrypt the data
1395        let encrypted_data = fs::read_to_string(encrypted_file)
1396            .await
1397            .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1398
1399        let decrypted_json = utils::decrypt_for_workspace(workspace_id, &encrypted_data)?;
1400
1401        // Parse the export data
1402        let export: WorkspaceExport = serde_json::from_str(&decrypted_json)
1403            .map_err(|e| Error::generic(format!("Failed to parse decrypted export: {}", e)))?;
1404
1405        // Convert export to workspace
1406        let workspace = self.convert_export_to_workspace(&export, workspace_name)?;
1407
1408        // Add to registry
1409        let imported_id = registry.add_workspace(workspace)?;
1410
1411        Ok(EncryptedImportResult {
1412            workspace_id: imported_id,
1413            workspace_name: export.metadata.name.clone(),
1414            imported_at: Utc::now(),
1415            request_count: export.requests.len(),
1416            encryption_restored: true,
1417        })
1418    }
1419
1420    /// Apply encryption to workspace data before export
1421    fn encrypt_workspace_data(
1422        &self,
1423        workspace: &mut Workspace,
1424        processor: &AutoEncryptionProcessor,
1425    ) -> Result<()> {
1426        // Encrypt environment variables
1427        for env in &mut workspace.config.environments {
1428            processor.process_env_vars(&mut env.variables)?;
1429        }
1430        processor.process_env_vars(&mut workspace.config.global_environment.variables)?;
1431
1432        // Note: Headers and request bodies would be encrypted here when implemented
1433        // For now, we rely on the filtering done by to_filtered_for_sync()
1434
1435        Ok(())
1436    }
1437
1438    /// Convert WorkspaceExport back to Workspace
1439    fn convert_export_to_workspace(
1440        &self,
1441        export: &WorkspaceExport,
1442        name_override: Option<&str>,
1443    ) -> Result<Workspace> {
1444        let mut workspace =
1445            Workspace::new(name_override.unwrap_or(&export.metadata.name).to_string());
1446
1447        // Set description if provided
1448        if let Some(desc) = &export.metadata.description {
1449            workspace.description = Some(desc.clone());
1450        }
1451
1452        // Restore requests from export
1453        for exported_request in export.requests.values() {
1454            // Convert exported request back to MockRequest
1455            let method = self.parse_http_method(&exported_request.method)?;
1456            let mut request = MockRequest::new(
1457                method,
1458                exported_request.path.clone(),
1459                exported_request.name.clone(),
1460            );
1461
1462            // Set additional properties
1463            if let Some(status) = exported_request.response_status {
1464                request.response.status_code = status;
1465            }
1466
1467            // Set other response properties if available
1468            if let Some(body) = &exported_request.response_body {
1469                request.response.body = Some(body.clone());
1470            }
1471            request.response.headers = exported_request.response_headers.clone();
1472            if let Some(delay) = exported_request.delay {
1473                request.response.delay_ms = Some(delay);
1474            }
1475
1476            workspace.add_request(request)?;
1477        }
1478
1479        // Restore configuration
1480        workspace.config.global_environment.variables = export.config.variables.clone();
1481
1482        Ok(workspace)
1483    }
1484
1485    /// Parse HTTP method string to enum
1486    fn parse_http_method(&self, method_str: &str) -> Result<crate::routing::HttpMethod> {
1487        match method_str.to_uppercase().as_str() {
1488            "GET" => Ok(crate::routing::HttpMethod::GET),
1489            "POST" => Ok(crate::routing::HttpMethod::POST),
1490            "PUT" => Ok(crate::routing::HttpMethod::PUT),
1491            "DELETE" => Ok(crate::routing::HttpMethod::DELETE),
1492            "PATCH" => Ok(crate::routing::HttpMethod::PATCH),
1493            "HEAD" => Ok(crate::routing::HttpMethod::HEAD),
1494            "OPTIONS" => Ok(crate::routing::HttpMethod::OPTIONS),
1495            _ => Err(Error::generic(format!("Unknown HTTP method: {}", method_str))),
1496        }
1497    }
1498
1499    /// Check workspace for unencrypted sensitive data before export
1500    pub fn check_workspace_for_unencrypted_secrets(
1501        &self,
1502        workspace: &Workspace,
1503    ) -> Result<SecurityCheckResult> {
1504        let mut warnings = Vec::new();
1505        let errors = Vec::new();
1506
1507        // Check environment variables
1508        self.check_environment_variables(workspace, &mut warnings)?;
1509
1510        // Check for sensitive patterns in request data (when implemented)
1511        // This would check headers, bodies, etc.
1512
1513        let has_warnings = !warnings.is_empty();
1514        let has_errors = !errors.is_empty();
1515
1516        Ok(SecurityCheckResult {
1517            workspace_id: workspace.id.clone(),
1518            workspace_name: workspace.name.clone(),
1519            warnings,
1520            errors,
1521            is_secure: !has_warnings && !has_errors,
1522            recommended_actions: self.generate_security_recommendations(has_warnings, has_errors),
1523        })
1524    }
1525
1526    /// Check environment variables for sensitive data
1527    fn check_environment_variables(
1528        &self,
1529        workspace: &Workspace,
1530        warnings: &mut Vec<SecurityWarning>,
1531    ) -> Result<()> {
1532        let sensitive_keys = [
1533            "password",
1534            "secret",
1535            "key",
1536            "token",
1537            "credential",
1538            "api_key",
1539            "apikey",
1540            "api_secret",
1541            "db_password",
1542            "database_password",
1543            "aws_secret_key",
1544            "aws_session_token",
1545            "private_key",
1546            "authorization",
1547            "auth_token",
1548            "access_token",
1549            "refresh_token",
1550            "cookie",
1551            "session",
1552            "csrf",
1553            "jwt",
1554            "bearer",
1555        ];
1556
1557        // Check global environment
1558        for (key, value) in &workspace.config.global_environment.variables {
1559            if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1560                warnings.push(SecurityWarning {
1561                    field_type: "environment_variable".to_string(),
1562                    field_name: key.clone(),
1563                    location: "global_environment".to_string(),
1564                    severity: SecuritySeverity::High,
1565                    message: format!(
1566                        "Potentially sensitive environment variable '{}' detected",
1567                        key
1568                    ),
1569                    suggestion: "Consider encrypting this value or excluding it from exports"
1570                        .to_string(),
1571                });
1572            }
1573        }
1574
1575        // Check workspace environments
1576        for env in &workspace.config.environments {
1577            for (key, value) in &env.variables {
1578                if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1579                    warnings.push(SecurityWarning {
1580                        field_type: "environment_variable".to_string(),
1581                        field_name: key.clone(),
1582                        location: format!("environment '{}'", env.name),
1583                        severity: SecuritySeverity::High,
1584                        message: format!("Potentially sensitive environment variable '{}' detected in environment '{}'", key, env.name),
1585                        suggestion: "Consider encrypting this value or excluding it from exports".to_string(),
1586                    });
1587                }
1588            }
1589        }
1590
1591        Ok(())
1592    }
1593
1594    /// Check if a key-value pair is potentially sensitive
1595    fn is_potentially_sensitive(&self, key: &str, value: &str, sensitive_keys: &[&str]) -> bool {
1596        let key_lower = key.to_lowercase();
1597
1598        // Check if key contains sensitive keywords
1599        if sensitive_keys.iter().any(|&sensitive| key_lower.contains(sensitive)) {
1600            return true;
1601        }
1602
1603        // Check for patterns that indicate sensitive data
1604        self.contains_sensitive_patterns(value)
1605    }
1606
1607    /// Check if value contains sensitive patterns
1608    fn contains_sensitive_patterns(&self, value: &str) -> bool {
1609        // Credit card pattern
1610        if CREDIT_CARD_PATTERN.is_match(value) {
1611            return true;
1612        }
1613
1614        // SSN pattern
1615        if SSN_PATTERN.is_match(value) {
1616            return true;
1617        }
1618
1619        // Long random-looking strings (potential API keys)
1620        if value.len() > 20 && value.chars().any(|c| c.is_alphanumeric()) {
1621            let alphanumeric_count = value.chars().filter(|c| c.is_alphanumeric()).count();
1622            let total_count = value.len();
1623            if alphanumeric_count as f64 / total_count as f64 > 0.8 {
1624                return true;
1625            }
1626        }
1627
1628        false
1629    }
1630
1631    /// Generate security recommendations based on findings
1632    fn generate_security_recommendations(
1633        &self,
1634        has_warnings: bool,
1635        has_errors: bool,
1636    ) -> Vec<String> {
1637        let mut recommendations = Vec::new();
1638
1639        if has_warnings || has_errors {
1640            recommendations.push("Enable encryption for this workspace in settings".to_string());
1641            recommendations.push("Review and encrypt sensitive environment variables".to_string());
1642            recommendations.push("Use encrypted export for sharing workspaces".to_string());
1643        }
1644
1645        if has_errors {
1646            recommendations
1647                .push("CRITICAL: Remove or encrypt sensitive data before proceeding".to_string());
1648        }
1649
1650        recommendations
1651    }
1652
1653    /// Export individual requests for nested structure
1654    async fn export_workspace_requests(
1655        &self,
1656        workspace: &Workspace,
1657        requests_dir: &Path,
1658        force: bool,
1659    ) -> Result<usize> {
1660        let mut count = 0;
1661
1662        for request in &workspace.requests {
1663            let file_path =
1664                requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1665            if force || !file_path.exists() {
1666                let exported = self.convert_request_to_exported(request, "");
1667                let content = serde_yaml::to_string(&exported)
1668                    .map_err(|e| Error::generic(format!("Failed to serialize request: {}", e)))?;
1669
1670                fs::write(&file_path, content)
1671                    .await
1672                    .map_err(|e| Error::generic(format!("Failed to write request file: {}", e)))?;
1673
1674                count += 1;
1675            }
1676        }
1677
1678        // Export folder requests
1679        for folder in &workspace.folders {
1680            count += self.export_folder_requests(folder, requests_dir, force, &folder.name).await?;
1681        }
1682
1683        Ok(count)
1684    }
1685
1686    /// Export requests from folders recursively
1687    async fn export_folder_requests(
1688        &self,
1689        folder: &Folder,
1690        requests_dir: &Path,
1691        force: bool,
1692        folder_path: &str,
1693    ) -> Result<usize> {
1694        use std::collections::VecDeque;
1695
1696        let mut count = 0;
1697        let mut queue = VecDeque::new();
1698
1699        // Start with the root folder
1700        queue.push_back((folder, folder_path.to_string()));
1701
1702        while let Some((current_folder, current_path)) = queue.pop_front() {
1703            // Export requests in current folder
1704            for request in &current_folder.requests {
1705                let file_path =
1706                    requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1707                if force || !file_path.exists() {
1708                    let exported = self.convert_request_to_exported(request, &current_path);
1709                    let content = serde_yaml::to_string(&exported).map_err(|e| {
1710                        Error::generic(format!("Failed to serialize request: {}", e))
1711                    })?;
1712
1713                    fs::write(&file_path, content).await.map_err(|e| {
1714                        Error::generic(format!("Failed to write request file: {}", e))
1715                    })?;
1716
1717                    count += 1;
1718                }
1719            }
1720
1721            // Add subfolders to queue with updated paths
1722            for subfolder in &current_folder.folders {
1723                let subfolder_path = if current_path.is_empty() {
1724                    subfolder.name.clone()
1725                } else {
1726                    format!("{}/{}", current_path, subfolder.name)
1727                };
1728                queue.push_back((subfolder, subfolder_path));
1729            }
1730        }
1731
1732        Ok(count)
1733    }
1734
1735    /// Export requests for grouped structure
1736    async fn export_workspace_requests_grouped(
1737        &self,
1738        workspace: &Workspace,
1739        requests_dir: &Path,
1740        force: bool,
1741    ) -> Result<usize> {
1742        let mut count = 0;
1743        let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
1744
1745        if !workspace_requests_dir.exists() {
1746            fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
1747                Error::generic(format!("Failed to create workspace requests directory: {}", e))
1748            })?;
1749        }
1750
1751        count += self
1752            .export_workspace_requests(workspace, &workspace_requests_dir, force)
1753            .await?;
1754        Ok(count)
1755    }
1756
1757    /// Create metadata file for Git integration
1758    async fn create_metadata_file(
1759        &self,
1760        workspace: &Workspace,
1761        target_dir: &Path,
1762        structure: &DirectoryStructure,
1763    ) -> Result<()> {
1764        let metadata = serde_json::json!({
1765            "workspace_id": workspace.id,
1766            "workspace_name": workspace.name,
1767            "description": workspace.description,
1768            "exported_at": Utc::now().to_rfc3339(),
1769            "structure": format!("{:?}", structure),
1770            "version": "1.0",
1771            "source": "mockforge"
1772        });
1773
1774        let metadata_file = target_dir.join(".mockforge-meta.json");
1775        let content = serde_json::to_string_pretty(&metadata)
1776            .map_err(|e| Error::generic(format!("Failed to serialize metadata: {}", e)))?;
1777
1778        fs::write(&metadata_file, content)
1779            .await
1780            .map_err(|e| Error::generic(format!("Failed to write metadata file: {}", e)))?;
1781
1782        Ok(())
1783    }
1784
1785    /// Sanitize filename for filesystem compatibility
1786    fn sanitize_filename(&self, name: &str) -> String {
1787        name.chars()
1788            .map(|c| match c {
1789                '/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
1790                c if c.is_whitespace() => '_',
1791                c => c,
1792            })
1793            .collect::<String>()
1794            .to_lowercase()
1795    }
1796}
1797
1798/// Result of syncing a single workspace
1799#[derive(Debug)]
1800struct WorkspaceSyncResult {
1801    /// Number of requests exported
1802    requests_count: usize,
1803    /// Number of files created
1804    files_created: usize,
1805}
1806
1807#[cfg(test)]
1808mod tests {
1809    use super::*;
1810    use crate::workspace::{MockRequest, Workspace};
1811    use crate::HttpMethod;
1812    use tempfile::TempDir;
1813
1814    #[tokio::test]
1815    async fn test_workspace_persistence() {
1816        let temp_dir = TempDir::new().unwrap();
1817        let persistence = WorkspacePersistence::new(temp_dir.path());
1818
1819        // Create a test workspace
1820        let mut workspace = Workspace::new("Test Workspace".to_string());
1821        let request =
1822            MockRequest::new(HttpMethod::GET, "/test".to_string(), "Test Request".to_string());
1823        workspace.add_request(request).unwrap();
1824
1825        // Save workspace
1826        persistence.save_workspace(&workspace).await.unwrap();
1827
1828        // Load workspace
1829        let loaded = persistence.load_workspace(&workspace.id).await.unwrap();
1830        assert_eq!(loaded.name, workspace.name);
1831        assert_eq!(loaded.requests.len(), 1);
1832
1833        // List workspaces
1834        let ids = persistence.list_workspace_ids().await.unwrap();
1835        assert_eq!(ids.len(), 1);
1836        assert_eq!(ids[0], workspace.id);
1837    }
1838
1839    #[tokio::test]
1840    async fn test_registry_persistence() {
1841        let temp_dir = TempDir::new().unwrap();
1842        let persistence = WorkspacePersistence::new(temp_dir.path());
1843
1844        let mut registry = WorkspaceRegistry::new();
1845
1846        // Add workspaces
1847        let workspace1 = Workspace::new("Workspace 1".to_string());
1848        let workspace2 = Workspace::new("Workspace 2".to_string());
1849
1850        let id1 = registry.add_workspace(workspace1).unwrap();
1851        let _id2 = registry.add_workspace(workspace2).unwrap();
1852
1853        // Set active workspace
1854        registry.set_active_workspace(Some(id1.clone())).unwrap();
1855
1856        // Save registry
1857        persistence.save_full_registry(&registry).await.unwrap();
1858
1859        // Load registry
1860        let loaded_registry = persistence.load_full_registry().await.unwrap();
1861
1862        assert_eq!(loaded_registry.get_workspaces().len(), 2);
1863        assert_eq!(loaded_registry.get_active_workspace().unwrap().name, "Workspace 1");
1864    }
1865
1866    #[tokio::test]
1867    async fn test_backup_and_restore() {
1868        let temp_dir = TempDir::new().unwrap();
1869        let backup_dir = temp_dir.path().join("backups");
1870        let persistence = WorkspacePersistence::new(temp_dir.path());
1871
1872        // Create and save workspace
1873        let workspace = Workspace::new("Test Workspace".to_string());
1874        persistence.save_workspace(&workspace).await.unwrap();
1875
1876        // Create backup
1877        let backup_path = persistence.backup_workspace(&workspace.id, &backup_dir).await.unwrap();
1878        assert!(backup_path.exists());
1879
1880        // Delete original
1881        persistence.delete_workspace(&workspace.id).await.unwrap();
1882        assert!(persistence.load_workspace(&workspace.id).await.is_err());
1883
1884        // Restore from backup
1885        let restored_id = persistence.restore_workspace(&backup_path).await.unwrap();
1886
1887        // Verify restored workspace
1888        let restored = persistence.load_workspace(&restored_id).await.unwrap();
1889        assert_eq!(restored.name, "Test Workspace");
1890    }
1891}