1use crate::config::AuthConfig as ConfigAuthConfig;
7use crate::encryption::{utils, AutoEncryptionProcessor, WorkspaceKeyManager};
8use crate::workspace::{EntityId, Folder, MockRequest, Workspace, WorkspaceRegistry};
9use crate::{Error, Result};
10use chrono::{DateTime, Utc};
11use once_cell::sync::Lazy;
12use regex::Regex;
13use serde::{Deserialize, Serialize};
14use std::collections::HashMap;
15use std::path::{Path, PathBuf};
16use tokio::fs;
17
18static CREDIT_CARD_PATTERN: Lazy<Regex> = Lazy::new(|| {
20 Regex::new(r"\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b")
21 .expect("CREDIT_CARD_PATTERN regex is valid")
22});
23
24static SSN_PATTERN: Lazy<Regex> = Lazy::new(|| {
25 Regex::new(r"\b\d{3}[-\s]?\d{2}[-\s]?\d{4}\b").expect("SSN_PATTERN regex is valid")
26});
27
28#[derive(Debug)]
30pub struct WorkspacePersistence {
31 base_dir: PathBuf,
33}
34
35#[derive(Debug, Clone, Serialize, Deserialize)]
37struct SerializableWorkspaceRegistry {
38 workspaces: Vec<Workspace>,
39 active_workspace: Option<EntityId>,
40}
41
42#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SyncState {
45 pub last_sync_timestamp: DateTime<Utc>,
47}
48
49#[derive(Debug, Clone, PartialEq)]
51pub enum SyncStrategy {
52 Full,
54 Incremental,
56 Selective(Vec<String>),
58}
59
60#[derive(Debug, Clone, PartialEq)]
62pub enum DirectoryStructure {
63 Flat,
65 Nested,
67 Grouped,
69}
70
71#[derive(Debug, Clone)]
73pub struct SyncResult {
74 pub synced_workspaces: usize,
76 pub synced_requests: usize,
78 pub files_created: usize,
80 pub target_dir: PathBuf,
82}
83
84#[derive(Debug, Clone)]
86pub struct EncryptedExportResult {
87 pub output_path: PathBuf,
89 pub backup_key: String,
91 pub exported_at: DateTime<Utc>,
93 pub workspace_name: String,
95 pub encryption_enabled: bool,
97}
98
99#[derive(Debug, Clone)]
101pub struct EncryptedImportResult {
102 pub workspace_id: String,
104 pub workspace_name: String,
106 pub imported_at: DateTime<Utc>,
108 pub request_count: usize,
110 pub encryption_restored: bool,
112}
113
114#[derive(Debug, Clone)]
116pub struct SecurityCheckResult {
117 pub workspace_id: String,
119 pub workspace_name: String,
121 pub warnings: Vec<SecurityWarning>,
123 pub errors: Vec<SecurityWarning>,
125 pub is_secure: bool,
127 pub recommended_actions: Vec<String>,
129}
130
131#[derive(Debug, Clone)]
133pub struct SecurityWarning {
134 pub field_type: String,
136 pub field_name: String,
138 pub location: String,
140 pub severity: SecuritySeverity,
142 pub message: String,
144 pub suggestion: String,
146}
147
148#[derive(Debug, Clone, PartialEq)]
150pub enum SecuritySeverity {
151 Low,
153 Medium,
155 High,
157 Critical,
159}
160
161#[derive(Debug, Clone, Serialize, Deserialize)]
163pub struct WorkspaceExport {
164 pub metadata: WorkspaceMetadata,
166 pub config: WorkspaceConfig,
168 pub requests: HashMap<String, ExportedRequest>,
170}
171
172#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct WorkspaceMetadata {
175 pub id: String,
177 pub name: String,
179 pub description: Option<String>,
181 pub exported_at: DateTime<Utc>,
183 pub request_count: usize,
185 pub folder_count: usize,
187}
188
189#[derive(Debug, Clone, Serialize, Deserialize)]
191pub struct WorkspaceConfig {
192 pub auth: Option<AuthConfig>,
194 pub base_url: Option<String>,
196 pub variables: HashMap<String, String>,
198 #[serde(default)]
201 pub reality_level: Option<crate::RealityLevel>,
202 #[serde(default)]
205 pub ai_mode: Option<crate::ai_studio::config::AiMode>,
206}
207
208#[derive(Debug, Clone, Serialize, Deserialize)]
210pub struct AuthConfig {
211 pub auth_type: String,
213 pub params: HashMap<String, String>,
215}
216
217impl AuthConfig {
218 pub fn from_config_auth(config_auth: &ConfigAuthConfig) -> Option<Self> {
220 if let Some(jwt) = &config_auth.jwt {
221 let mut params = HashMap::new();
222 if let Some(secret) = &jwt.secret {
223 params.insert("secret".to_string(), secret.clone());
224 }
225 if let Some(rsa_public_key) = &jwt.rsa_public_key {
226 params.insert("rsa_public_key".to_string(), rsa_public_key.clone());
227 }
228 if let Some(ecdsa_public_key) = &jwt.ecdsa_public_key {
229 params.insert("ecdsa_public_key".to_string(), ecdsa_public_key.clone());
230 }
231 if let Some(issuer) = &jwt.issuer {
232 params.insert("issuer".to_string(), issuer.clone());
233 }
234 if let Some(audience) = &jwt.audience {
235 params.insert("audience".to_string(), audience.clone());
236 }
237 if !jwt.algorithms.is_empty() {
238 params.insert("algorithms".to_string(), jwt.algorithms.join(","));
239 }
240 Some(AuthConfig {
241 auth_type: "jwt".to_string(),
242 params,
243 })
244 } else if let Some(oauth2) = &config_auth.oauth2 {
245 let mut params = HashMap::new();
246 params.insert("client_id".to_string(), oauth2.client_id.clone());
247 params.insert("client_secret".to_string(), oauth2.client_secret.clone());
248 params.insert("introspection_url".to_string(), oauth2.introspection_url.clone());
249 if let Some(auth_url) = &oauth2.auth_url {
250 params.insert("auth_url".to_string(), auth_url.clone());
251 }
252 if let Some(token_url) = &oauth2.token_url {
253 params.insert("token_url".to_string(), token_url.clone());
254 }
255 if let Some(token_type_hint) = &oauth2.token_type_hint {
256 params.insert("token_type_hint".to_string(), token_type_hint.clone());
257 }
258 Some(AuthConfig {
259 auth_type: "oauth2".to_string(),
260 params,
261 })
262 } else if let Some(basic_auth) = &config_auth.basic_auth {
263 let mut params = HashMap::new();
264 for (user, pass) in &basic_auth.credentials {
265 params.insert(user.clone(), pass.clone());
266 }
267 Some(AuthConfig {
268 auth_type: "basic".to_string(),
269 params,
270 })
271 } else if let Some(api_key) = &config_auth.api_key {
272 let mut params = HashMap::new();
273 params.insert("header_name".to_string(), api_key.header_name.clone());
274 if let Some(query_name) = &api_key.query_name {
275 params.insert("query_name".to_string(), query_name.clone());
276 }
277 if !api_key.keys.is_empty() {
278 params.insert("keys".to_string(), api_key.keys.join(","));
279 }
280 Some(AuthConfig {
281 auth_type: "api_key".to_string(),
282 params,
283 })
284 } else {
285 None
286 }
287 }
288}
289
290#[derive(Debug, Clone, Serialize, Deserialize)]
292pub struct ExportedRequest {
293 pub id: String,
295 pub name: String,
297 pub method: String,
299 pub path: String,
301 pub folder_path: String,
303 pub headers: HashMap<String, String>,
305 pub query_params: HashMap<String, String>,
307 pub body: Option<String>,
309 pub response_status: Option<u16>,
311 pub response_body: Option<String>,
313 pub response_headers: HashMap<String, String>,
315 pub delay: Option<u64>,
317}
318
319impl WorkspacePersistence {
320 pub fn new<P: AsRef<Path>>(base_dir: P) -> Self {
322 Self {
323 base_dir: base_dir.as_ref().to_path_buf(),
324 }
325 }
326
327 pub fn workspace_dir(&self) -> &Path {
329 &self.base_dir
330 }
331
332 pub fn workspace_file_path(&self, workspace_id: &str) -> PathBuf {
334 self.base_dir.join(format!("{}.yaml", workspace_id))
335 }
336
337 pub fn registry_file_path(&self) -> PathBuf {
339 self.base_dir.join("registry.yaml")
340 }
341
342 pub fn sync_state_file_path(&self) -> PathBuf {
344 self.base_dir.join("sync_state.yaml")
345 }
346
347 pub async fn ensure_workspace_dir(&self) -> Result<()> {
349 if !self.base_dir.exists() {
350 fs::create_dir_all(&self.base_dir).await.map_err(|e| {
351 Error::generic(format!("Failed to create workspace directory: {}", e))
352 })?;
353 }
354 Ok(())
355 }
356
357 pub async fn save_workspace(&self, workspace: &Workspace) -> Result<()> {
359 self.ensure_workspace_dir().await?;
360
361 let file_path = self.workspace_file_path(&workspace.id);
362 let content = serde_yaml::to_string(workspace)
363 .map_err(|e| Error::generic(format!("Failed to serialize workspace: {}", e)))?;
364
365 fs::write(&file_path, content)
366 .await
367 .map_err(|e| Error::generic(format!("Failed to write workspace file: {}", e)))?;
368
369 Ok(())
370 }
371
372 pub async fn load_workspace(&self, workspace_id: &str) -> Result<Workspace> {
374 let file_path = self.workspace_file_path(workspace_id);
375
376 if !file_path.exists() {
377 return Err(Error::generic(format!("Workspace file not found: {:?}", file_path)));
378 }
379
380 let content = fs::read_to_string(&file_path)
381 .await
382 .map_err(|e| Error::generic(format!("Failed to read workspace file: {}", e)))?;
383
384 let mut workspace: Workspace = serde_yaml::from_str(&content)
385 .map_err(|e| Error::generic(format!("Failed to deserialize workspace: {}", e)))?;
386
387 workspace.initialize_default_mock_environments();
389
390 Ok(workspace)
391 }
392
393 pub async fn delete_workspace(&self, workspace_id: &str) -> Result<()> {
395 let file_path = self.workspace_file_path(workspace_id);
396
397 if file_path.exists() {
398 fs::remove_file(&file_path)
399 .await
400 .map_err(|e| Error::generic(format!("Failed to delete workspace file: {}", e)))?;
401 }
402
403 Ok(())
404 }
405
406 pub async fn save_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
408 self.ensure_workspace_dir().await?;
409
410 let serializable = SerializableWorkspaceRegistry {
411 workspaces: registry.get_workspaces().into_iter().cloned().collect(),
412 active_workspace: registry.get_active_workspace_id().map(|s| s.to_string()),
413 };
414
415 let file_path = self.registry_file_path();
416 let content = serde_yaml::to_string(&serializable)
417 .map_err(|e| Error::generic(format!("Failed to serialize registry: {}", e)))?;
418
419 fs::write(&file_path, content)
420 .await
421 .map_err(|e| Error::generic(format!("Failed to write registry file: {}", e)))?;
422
423 Ok(())
424 }
425
426 pub async fn load_registry(&self) -> Result<WorkspaceRegistry> {
428 let file_path = self.registry_file_path();
429
430 if !file_path.exists() {
431 return Ok(WorkspaceRegistry::new());
433 }
434
435 let content = fs::read_to_string(&file_path)
436 .await
437 .map_err(|e| Error::generic(format!("Failed to read registry file: {}", e)))?;
438
439 let serializable: SerializableWorkspaceRegistry = serde_yaml::from_str(&content)
440 .map_err(|e| Error::generic(format!("Failed to deserialize registry: {}", e)))?;
441
442 let mut registry = WorkspaceRegistry::new();
443
444 for workspace_meta in &serializable.workspaces {
446 match self.load_workspace(&workspace_meta.id).await {
447 Ok(mut workspace) => {
448 workspace.initialize_default_mock_environments();
450 registry.add_workspace(workspace)?;
451 }
452 Err(e) => {
453 tracing::warn!("Failed to load workspace {}: {}", workspace_meta.id, e);
454 }
455 }
456 }
457
458 if let Some(active_id) = &serializable.active_workspace {
460 if let Err(e) = registry.set_active_workspace(Some(active_id.clone())) {
461 tracing::warn!("Failed to set active workspace {}: {}", active_id, e);
462 }
463 }
464
465 Ok(registry)
466 }
467
468 pub async fn save_sync_state(&self, sync_state: &SyncState) -> Result<()> {
470 self.ensure_workspace_dir().await?;
471
472 let file_path = self.sync_state_file_path();
473 let content = serde_yaml::to_string(sync_state)
474 .map_err(|e| Error::generic(format!("Failed to serialize sync state: {}", e)))?;
475
476 fs::write(&file_path, content)
477 .await
478 .map_err(|e| Error::generic(format!("Failed to write sync state file: {}", e)))?;
479
480 Ok(())
481 }
482
483 pub async fn load_sync_state(&self) -> Result<SyncState> {
485 let file_path = self.sync_state_file_path();
486
487 if !file_path.exists() {
488 return Ok(SyncState {
490 last_sync_timestamp: Utc::now(),
491 });
492 }
493
494 let content = fs::read_to_string(&file_path)
495 .await
496 .map_err(|e| Error::generic(format!("Failed to read sync state file: {}", e)))?;
497
498 let sync_state: SyncState = serde_yaml::from_str(&content)
499 .map_err(|e| Error::generic(format!("Failed to deserialize sync state: {}", e)))?;
500
501 Ok(sync_state)
502 }
503
504 pub async fn list_workspace_ids(&self) -> Result<Vec<EntityId>> {
506 if !self.base_dir.exists() {
507 return Ok(Vec::new());
508 }
509
510 let mut workspace_ids = Vec::new();
511
512 let mut entries = fs::read_dir(&self.base_dir)
513 .await
514 .map_err(|e| Error::generic(format!("Failed to read workspace directory: {}", e)))?;
515
516 while let Some(entry) = entries
517 .next_entry()
518 .await
519 .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
520 {
521 let path = entry.path();
522 if path.is_file() {
523 if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
524 if file_name != "registry.yaml" && file_name.ends_with(".yaml") {
525 if let Some(id) = file_name.strip_suffix(".yaml") {
526 workspace_ids.push(id.to_string());
527 }
528 }
529 }
530 }
531 }
532
533 Ok(workspace_ids)
534 }
535
536 pub async fn save_full_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
538 self.save_registry(registry).await?;
540
541 for workspace in registry.get_workspaces() {
543 self.save_workspace(workspace).await?;
544 }
545
546 Ok(())
547 }
548
549 pub async fn load_full_registry(&self) -> Result<WorkspaceRegistry> {
551 self.load_registry().await
552 }
553
554 pub async fn backup_workspace(&self, workspace_id: &str, backup_dir: &Path) -> Result<PathBuf> {
556 let workspace_file = self.workspace_file_path(workspace_id);
557
558 if !workspace_file.exists() {
559 return Err(Error::generic(format!("Workspace {} does not exist", workspace_id)));
560 }
561
562 if !backup_dir.exists() {
564 fs::create_dir_all(backup_dir)
565 .await
566 .map_err(|e| Error::generic(format!("Failed to create backup directory: {}", e)))?;
567 }
568
569 let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
571 let backup_filename = format!("{}_{}.yaml", workspace_id, timestamp);
572 let backup_path = backup_dir.join(backup_filename);
573
574 fs::copy(&workspace_file, &backup_path)
576 .await
577 .map_err(|e| Error::generic(format!("Failed to create backup: {}", e)))?;
578
579 Ok(backup_path)
580 }
581
582 pub async fn restore_workspace(&self, backup_path: &Path) -> Result<EntityId> {
584 if !backup_path.exists() {
585 return Err(Error::generic(format!("Backup file does not exist: {:?}", backup_path)));
586 }
587
588 let content = fs::read_to_string(backup_path)
590 .await
591 .map_err(|e| Error::generic(format!("Failed to read backup file: {}", e)))?;
592
593 let workspace: Workspace = serde_yaml::from_str(&content)
594 .map_err(|e| Error::generic(format!("Failed to deserialize backup: {}", e)))?;
595
596 self.save_workspace(&workspace).await?;
598
599 Ok(workspace.id)
600 }
601
602 pub async fn cleanup_old_backups(&self, backup_dir: &Path, keep_count: usize) -> Result<usize> {
604 if !backup_dir.exists() {
605 return Ok(0);
606 }
607
608 let mut backup_files = Vec::new();
609
610 let mut entries = fs::read_dir(backup_dir)
611 .await
612 .map_err(|e| Error::generic(format!("Failed to read backup directory: {}", e)))?;
613
614 while let Some(entry) = entries
615 .next_entry()
616 .await
617 .map_err(|e| Error::generic(format!("Failed to read backup entry: {}", e)))?
618 {
619 let path = entry.path();
620 if path.is_file() {
621 if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
622 if file_name.ends_with(".yaml") {
623 if let Ok(metadata) = entry.metadata().await {
624 if let Ok(modified) = metadata.modified() {
625 backup_files.push((path, modified));
626 }
627 }
628 }
629 }
630 }
631 }
632
633 backup_files.sort_by(|a, b| b.1.cmp(&a.1));
635
636 let mut removed_count = 0;
638 for (path, _) in backup_files.iter().skip(keep_count) {
639 if fs::remove_file(path).await.is_ok() {
640 removed_count += 1;
641 }
642 }
643
644 Ok(removed_count)
645 }
646
647 #[allow(clippy::too_many_arguments)]
649 pub async fn sync_to_directory_advanced(
650 &self,
651 target_dir: &str,
652 strategy: &str,
653 workspace_ids: Option<&str>,
654 structure: &str,
655 include_meta: bool,
656 force: bool,
657 filename_pattern: &str,
658 exclude_pattern: Option<&str>,
659 dry_run: bool,
660 ) -> Result<SyncResult> {
661 let target_path = PathBuf::from(target_dir);
662
663 if !dry_run && !target_path.exists() {
665 fs::create_dir_all(&target_path)
666 .await
667 .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
668 }
669
670 let sync_strategy = match strategy {
672 "full" => SyncStrategy::Full,
673 "incremental" => SyncStrategy::Incremental,
674 "selective" => {
675 if let Some(ids) = workspace_ids {
676 let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
677 SyncStrategy::Selective(workspace_list)
678 } else {
679 return Err(Error::generic("Selective strategy requires workspace IDs"));
680 }
681 }
682 _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
683 };
684
685 let dir_structure = match structure {
687 "flat" => DirectoryStructure::Flat,
688 "nested" => DirectoryStructure::Nested,
689 "grouped" => DirectoryStructure::Grouped,
690 _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
691 };
692
693 let mut workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
695
696 if let Some(exclude) = exclude_pattern {
698 if let Ok(regex) = regex::Regex::new(exclude) {
699 workspaces_to_sync.retain(|id| !regex.is_match(id));
700 }
701 }
702
703 let mut result = SyncResult {
704 synced_workspaces: 0,
705 synced_requests: 0,
706 files_created: 0,
707 target_dir: target_path.clone(),
708 };
709
710 for workspace_id in workspaces_to_sync {
712 if let Ok(workspace) = self.load_workspace(&workspace_id).await {
713 let workspace_result = self
714 .sync_workspace_to_directory_advanced(
715 &workspace,
716 &target_path,
717 &dir_structure,
718 include_meta,
719 force,
720 filename_pattern,
721 dry_run,
722 )
723 .await?;
724
725 result.synced_workspaces += 1;
726 result.synced_requests += workspace_result.requests_count;
727 result.files_created += workspace_result.files_created;
728 }
729 }
730
731 if let SyncStrategy::Incremental = sync_strategy {
733 let new_sync_state = SyncState {
734 last_sync_timestamp: Utc::now(),
735 };
736 if let Err(e) = self.save_sync_state(&new_sync_state).await {
737 tracing::warn!("Failed to save sync state: {}", e);
738 }
739 }
740
741 Ok(result)
742 }
743
744 #[allow(clippy::too_many_arguments)]
746 async fn sync_workspace_to_directory_advanced(
747 &self,
748 workspace: &Workspace,
749 target_dir: &Path,
750 structure: &DirectoryStructure,
751 include_meta: bool,
752 force: bool,
753 filename_pattern: &str,
754 dry_run: bool,
755 ) -> Result<WorkspaceSyncResult> {
756 let mut result = WorkspaceSyncResult {
757 requests_count: 0,
758 files_created: 0,
759 };
760
761 match structure {
762 DirectoryStructure::Flat => {
763 let export = self.create_workspace_export(workspace).await?;
764 let filename = self.generate_filename(filename_pattern, workspace);
765 let file_path = target_dir.join(format!("{}.yaml", filename));
766
767 if force || !file_path.exists() {
768 if !dry_run {
769 let content = serde_yaml::to_string(&export).map_err(|e| {
770 Error::generic(format!("Failed to serialize workspace: {}", e))
771 })?;
772
773 fs::write(&file_path, content).await.map_err(|e| {
774 Error::generic(format!("Failed to write workspace file: {}", e))
775 })?;
776 }
777 result.files_created += 1;
778 }
779 }
780
781 DirectoryStructure::Nested => {
782 let workspace_dir =
783 target_dir.join(self.generate_filename(filename_pattern, workspace));
784 if !dry_run && !workspace_dir.exists() {
785 fs::create_dir_all(&workspace_dir).await.map_err(|e| {
786 Error::generic(format!("Failed to create workspace directory: {}", e))
787 })?;
788 }
789
790 let export = self.create_workspace_export(workspace).await?;
792 let workspace_file = workspace_dir.join("workspace.yaml");
793
794 if force || !workspace_file.exists() {
795 if !dry_run {
796 let content = serde_yaml::to_string(&export).map_err(|e| {
797 Error::generic(format!("Failed to serialize workspace: {}", e))
798 })?;
799
800 fs::write(&workspace_file, content).await.map_err(|e| {
801 Error::generic(format!("Failed to write workspace file: {}", e))
802 })?;
803 }
804 result.files_created += 1;
805 }
806
807 let requests_dir = workspace_dir.join("requests");
809 if !dry_run && !requests_dir.exists() {
810 fs::create_dir_all(&requests_dir).await.map_err(|e| {
811 Error::generic(format!("Failed to create requests directory: {}", e))
812 })?;
813 }
814
815 result.requests_count += self
816 .export_workspace_requests_advanced(workspace, &requests_dir, force, dry_run)
817 .await?;
818 }
819
820 DirectoryStructure::Grouped => {
821 let requests_dir = target_dir.join("requests");
823 let workspaces_dir = target_dir.join("workspaces");
824
825 if !dry_run {
826 for dir in [&requests_dir, &workspaces_dir] {
827 if !dir.exists() {
828 fs::create_dir_all(dir).await.map_err(|e| {
829 Error::generic(format!("Failed to create directory: {}", e))
830 })?;
831 }
832 }
833 }
834
835 let export = self.create_workspace_export(workspace).await?;
837 let filename = self.generate_filename(filename_pattern, workspace);
838 let workspace_file = workspaces_dir.join(format!("{}.yaml", filename));
839
840 if force || !workspace_file.exists() {
841 if !dry_run {
842 let content = serde_yaml::to_string(&export).map_err(|e| {
843 Error::generic(format!("Failed to serialize workspace: {}", e))
844 })?;
845
846 fs::write(&workspace_file, content).await.map_err(|e| {
847 Error::generic(format!("Failed to write workspace file: {}", e))
848 })?;
849 }
850 result.files_created += 1;
851 }
852
853 result.requests_count += self
855 .export_workspace_requests_grouped_advanced(
856 workspace,
857 &requests_dir,
858 force,
859 dry_run,
860 )
861 .await?;
862 }
863 }
864
865 if include_meta && !dry_run {
867 self.create_metadata_file(workspace, target_dir, structure).await?;
868 result.files_created += 1;
869 }
870
871 Ok(result)
872 }
873
874 fn generate_filename(&self, pattern: &str, workspace: &Workspace) -> String {
876 let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
877
878 pattern
879 .replace("{name}", &self.sanitize_filename(&workspace.name))
880 .replace("{id}", &workspace.id)
881 .replace("{timestamp}", ×tamp.to_string())
882 }
883
884 async fn export_workspace_requests_advanced(
886 &self,
887 workspace: &Workspace,
888 requests_dir: &Path,
889 force: bool,
890 dry_run: bool,
891 ) -> Result<usize> {
892 let mut count = 0;
893
894 for request in &workspace.requests {
895 let file_path =
896 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
897 if force || !file_path.exists() {
898 if !dry_run {
899 let exported = self.convert_request_to_exported(request, "");
900 let content = serde_yaml::to_string(&exported).map_err(|e| {
901 Error::generic(format!("Failed to serialize request: {}", e))
902 })?;
903
904 fs::write(&file_path, content).await.map_err(|e| {
905 Error::generic(format!("Failed to write request file: {}", e))
906 })?;
907 }
908 count += 1;
909 }
910 }
911
912 for folder in &workspace.folders {
914 count += self
915 .export_folder_requests_advanced(folder, requests_dir, force, &folder.name, dry_run)
916 .await?;
917 }
918
919 Ok(count)
920 }
921
922 async fn export_folder_requests_advanced(
924 &self,
925 folder: &Folder,
926 requests_dir: &Path,
927 force: bool,
928 folder_path: &str,
929 dry_run: bool,
930 ) -> Result<usize> {
931 use std::collections::VecDeque;
932
933 let mut count = 0;
934 let mut queue = VecDeque::new();
935
936 queue.push_back((folder, folder_path.to_string()));
938
939 while let Some((current_folder, current_path)) = queue.pop_front() {
940 for request in ¤t_folder.requests {
942 let file_path =
943 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
944 if force || !file_path.exists() {
945 if !dry_run {
946 let exported = self.convert_request_to_exported(request, ¤t_path);
947 let content = serde_yaml::to_string(&exported).map_err(|e| {
948 Error::generic(format!("Failed to serialize request: {}", e))
949 })?;
950
951 fs::write(&file_path, content).await.map_err(|e| {
952 Error::generic(format!("Failed to write request file: {}", e))
953 })?;
954 }
955 count += 1;
956 }
957 }
958
959 for subfolder in ¤t_folder.folders {
961 let subfolder_path = if current_path.is_empty() {
962 subfolder.name.clone()
963 } else {
964 format!("{}/{}", current_path, subfolder.name)
965 };
966 queue.push_back((subfolder, subfolder_path));
967 }
968 }
969
970 Ok(count)
971 }
972
973 async fn export_workspace_requests_grouped_advanced(
975 &self,
976 workspace: &Workspace,
977 requests_dir: &Path,
978 force: bool,
979 dry_run: bool,
980 ) -> Result<usize> {
981 let mut count = 0;
982 let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
983
984 if !dry_run && !workspace_requests_dir.exists() {
985 fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
986 Error::generic(format!("Failed to create workspace requests directory: {}", e))
987 })?;
988 }
989
990 count += self
991 .export_workspace_requests_advanced(workspace, &workspace_requests_dir, force, dry_run)
992 .await?;
993 Ok(count)
994 }
995
996 pub async fn sync_to_directory(
998 &self,
999 target_dir: &str,
1000 strategy: &str,
1001 workspace_ids: Option<&str>,
1002 structure: &str,
1003 include_meta: bool,
1004 force: bool,
1005 ) -> Result<SyncResult> {
1006 let target_path = PathBuf::from(target_dir);
1007
1008 if !target_path.exists() {
1010 fs::create_dir_all(&target_path)
1011 .await
1012 .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
1013 }
1014
1015 let sync_strategy = match strategy {
1017 "full" => SyncStrategy::Full,
1018 "incremental" => SyncStrategy::Incremental,
1019 "selective" => {
1020 if let Some(ids) = workspace_ids {
1021 let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
1022 SyncStrategy::Selective(workspace_list)
1023 } else {
1024 return Err(Error::generic("Selective strategy requires workspace IDs"));
1025 }
1026 }
1027 _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
1028 };
1029
1030 let dir_structure = match structure {
1032 "flat" => DirectoryStructure::Flat,
1033 "nested" => DirectoryStructure::Nested,
1034 "grouped" => DirectoryStructure::Grouped,
1035 _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
1036 };
1037
1038 let workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
1040
1041 let mut result = SyncResult {
1042 synced_workspaces: 0,
1043 synced_requests: 0,
1044 files_created: 0,
1045 target_dir: target_path.clone(),
1046 };
1047
1048 for workspace_id in workspaces_to_sync {
1050 if let Ok(workspace) = self.load_workspace(&workspace_id).await {
1051 let workspace_result = self
1052 .sync_workspace_to_directory(
1053 &workspace,
1054 &target_path,
1055 &dir_structure,
1056 include_meta,
1057 force,
1058 )
1059 .await?;
1060
1061 result.synced_workspaces += 1;
1062 result.synced_requests += workspace_result.requests_count;
1063 result.files_created += workspace_result.files_created;
1064 }
1065 }
1066
1067 if let SyncStrategy::Incremental = sync_strategy {
1069 let new_sync_state = SyncState {
1070 last_sync_timestamp: Utc::now(),
1071 };
1072 if let Err(e) = self.save_sync_state(&new_sync_state).await {
1073 tracing::warn!("Failed to save sync state: {}", e);
1074 }
1075 }
1076
1077 Ok(result)
1078 }
1079
1080 async fn get_workspaces_for_sync(&self, strategy: &SyncStrategy) -> Result<Vec<String>> {
1082 match strategy {
1083 SyncStrategy::Full => self.list_workspace_ids().await,
1084 SyncStrategy::Incremental => {
1085 let sync_state = self.load_sync_state().await?;
1087 let last_sync = sync_state.last_sync_timestamp;
1088
1089 let all_workspace_ids = self.list_workspace_ids().await?;
1091
1092 let mut modified_workspaces = Vec::new();
1094 for workspace_id in all_workspace_ids {
1095 let file_path = self.workspace_file_path(&workspace_id);
1096 if let Ok(metadata) = fs::metadata(&file_path).await {
1097 if let Ok(modified_time) = metadata.modified() {
1098 let modified_datetime = DateTime::<Utc>::from(modified_time);
1099 if modified_datetime > last_sync {
1100 modified_workspaces.push(workspace_id);
1101 }
1102 }
1103 }
1104 }
1105
1106 Ok(modified_workspaces)
1107 }
1108 SyncStrategy::Selective(ids) => Ok(ids.clone()),
1109 }
1110 }
1111
1112 async fn sync_workspace_to_directory(
1114 &self,
1115 workspace: &Workspace,
1116 target_dir: &Path,
1117 structure: &DirectoryStructure,
1118 include_meta: bool,
1119 force: bool,
1120 ) -> Result<WorkspaceSyncResult> {
1121 let mut result = WorkspaceSyncResult {
1122 requests_count: 0,
1123 files_created: 0,
1124 };
1125
1126 match structure {
1127 DirectoryStructure::Flat => {
1128 let export = self.create_workspace_export(workspace).await?;
1129 let file_path =
1130 target_dir.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1131
1132 if force || !file_path.exists() {
1133 let content = serde_yaml::to_string(&export).map_err(|e| {
1134 Error::generic(format!("Failed to serialize workspace: {}", e))
1135 })?;
1136
1137 fs::write(&file_path, content).await.map_err(|e| {
1138 Error::generic(format!("Failed to write workspace file: {}", e))
1139 })?;
1140
1141 result.files_created += 1;
1142 }
1143 }
1144
1145 DirectoryStructure::Nested => {
1146 let workspace_dir = target_dir.join(self.sanitize_filename(&workspace.name));
1147 if !workspace_dir.exists() {
1148 fs::create_dir_all(&workspace_dir).await.map_err(|e| {
1149 Error::generic(format!("Failed to create workspace directory: {}", e))
1150 })?;
1151 }
1152
1153 let export = self.create_workspace_export(workspace).await?;
1155 let workspace_file = workspace_dir.join("workspace.yaml");
1156
1157 if force || !workspace_file.exists() {
1158 let content = serde_yaml::to_string(&export).map_err(|e| {
1159 Error::generic(format!("Failed to serialize workspace: {}", e))
1160 })?;
1161
1162 fs::write(&workspace_file, content).await.map_err(|e| {
1163 Error::generic(format!("Failed to write workspace file: {}", e))
1164 })?;
1165
1166 result.files_created += 1;
1167 }
1168
1169 let requests_dir = workspace_dir.join("requests");
1171 if !requests_dir.exists() {
1172 fs::create_dir_all(&requests_dir).await.map_err(|e| {
1173 Error::generic(format!("Failed to create requests directory: {}", e))
1174 })?;
1175 }
1176
1177 result.requests_count +=
1178 self.export_workspace_requests(workspace, &requests_dir, force).await?;
1179 }
1180
1181 DirectoryStructure::Grouped => {
1182 let requests_dir = target_dir.join("requests");
1184 let workspaces_dir = target_dir.join("workspaces");
1185
1186 for dir in [&requests_dir, &workspaces_dir] {
1187 if !dir.exists() {
1188 fs::create_dir_all(dir).await.map_err(|e| {
1189 Error::generic(format!("Failed to create directory: {}", e))
1190 })?;
1191 }
1192 }
1193
1194 let export = self.create_workspace_export(workspace).await?;
1196 let workspace_file = workspaces_dir
1197 .join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1198
1199 if force || !workspace_file.exists() {
1200 let content = serde_yaml::to_string(&export).map_err(|e| {
1201 Error::generic(format!("Failed to serialize workspace: {}", e))
1202 })?;
1203
1204 fs::write(&workspace_file, content).await.map_err(|e| {
1205 Error::generic(format!("Failed to write workspace file: {}", e))
1206 })?;
1207
1208 result.files_created += 1;
1209 }
1210
1211 result.requests_count +=
1213 self.export_workspace_requests_grouped(workspace, &requests_dir, force).await?;
1214 }
1215 }
1216
1217 if include_meta {
1219 self.create_metadata_file(workspace, target_dir, structure).await?;
1220 result.files_created += 1;
1221 }
1222
1223 Ok(result)
1224 }
1225
1226 async fn create_workspace_export(&self, workspace: &Workspace) -> Result<WorkspaceExport> {
1228 let mut requests = HashMap::new();
1229
1230 self.collect_requests_from_workspace(workspace, &mut requests, "".to_string());
1232
1233 let metadata = WorkspaceMetadata {
1234 id: workspace.id.clone(),
1235 name: workspace.name.clone(),
1236 description: workspace.description.clone(),
1237 exported_at: Utc::now(),
1238 request_count: requests.len(),
1239 folder_count: workspace.folders.len(),
1240 };
1241
1242 let config = WorkspaceConfig {
1243 auth: workspace.config.auth.as_ref().and_then(AuthConfig::from_config_auth),
1244 base_url: workspace.config.base_url.clone(),
1245 variables: workspace.config.global_environment.variables.clone(),
1246 reality_level: workspace.config.reality_level,
1247 ai_mode: None, };
1249
1250 Ok(WorkspaceExport {
1251 metadata,
1252 config,
1253 requests,
1254 })
1255 }
1256
1257 fn collect_requests_from_workspace(
1259 &self,
1260 workspace: &Workspace,
1261 requests: &mut HashMap<String, ExportedRequest>,
1262 folder_path: String,
1263 ) {
1264 for request in &workspace.requests {
1266 let exported = self.convert_request_to_exported(request, &folder_path);
1267 requests.insert(request.id.clone(), exported);
1268 }
1269
1270 for folder in &workspace.folders {
1272 let current_path = if folder_path.is_empty() {
1273 folder.name.clone()
1274 } else {
1275 format!("{}/{}", folder_path, folder.name)
1276 };
1277
1278 for request in &folder.requests {
1279 let exported = self.convert_request_to_exported(request, ¤t_path);
1280 requests.insert(request.id.clone(), exported);
1281 }
1282
1283 self.collect_requests_from_folders(folder, requests, current_path);
1285 }
1286 }
1287
1288 fn collect_requests_from_folders(
1290 &self,
1291 folder: &Folder,
1292 requests: &mut HashMap<String, ExportedRequest>,
1293 folder_path: String,
1294 ) {
1295 for subfolder in &folder.folders {
1296 let current_path = format!("{}/{}", folder_path, subfolder.name);
1297
1298 for request in &subfolder.requests {
1299 let exported = self.convert_request_to_exported(request, ¤t_path);
1300 requests.insert(request.id.clone(), exported);
1301 }
1302
1303 self.collect_requests_from_folders(subfolder, requests, current_path);
1304 }
1305 }
1306
1307 fn convert_request_to_exported(
1309 &self,
1310 request: &MockRequest,
1311 folder_path: &str,
1312 ) -> ExportedRequest {
1313 ExportedRequest {
1314 id: request.id.clone(),
1315 name: request.name.clone(),
1316 method: format!("{:?}", request.method),
1317 path: request.path.clone(),
1318 folder_path: folder_path.to_string(),
1319 headers: request.headers.clone(),
1320 query_params: request.query_params.clone(),
1321 body: request.body.clone(),
1322 response_status: Some(request.response.status_code),
1323 response_body: request.response.body.clone(),
1324 response_headers: request.response.headers.clone(),
1325 delay: request.response.delay_ms,
1326 }
1327 }
1328
1329 pub async fn export_workspace_encrypted(
1331 &self,
1332 workspace: &Workspace,
1333 output_path: &Path,
1334 ) -> Result<EncryptedExportResult> {
1335 if !workspace.config.auto_encryption.enabled {
1337 return Err(Error::generic("Encryption is not enabled for this workspace. Enable encryption in workspace settings first."));
1338 }
1339
1340 let encryption_config = workspace.config.auto_encryption.clone();
1342 let processor = AutoEncryptionProcessor::new(&workspace.id, encryption_config);
1343
1344 let mut filtered_workspace = workspace.to_filtered_for_sync();
1346
1347 self.encrypt_workspace_data(&mut filtered_workspace, &processor)?;
1349
1350 let export = self.create_workspace_export(&filtered_workspace).await?;
1352
1353 let export_json = serde_json::to_string_pretty(&export)
1355 .map_err(|e| Error::generic(format!("Failed to serialize export: {}", e)))?;
1356
1357 let encrypted_data = utils::encrypt_for_workspace(&workspace.id, &export_json)?;
1358
1359 let key_manager = WorkspaceKeyManager::new();
1361 let backup_key = key_manager.generate_workspace_key_backup(&workspace.id)?;
1362
1363 fs::write(output_path, &encrypted_data)
1365 .await
1366 .map_err(|e| Error::generic(format!("Failed to write encrypted export: {}", e)))?;
1367
1368 Ok(EncryptedExportResult {
1369 output_path: output_path.to_path_buf(),
1370 backup_key,
1371 exported_at: Utc::now(),
1372 workspace_name: workspace.name.clone(),
1373 encryption_enabled: true,
1374 })
1375 }
1376
1377 pub async fn import_workspace_encrypted(
1379 &self,
1380 encrypted_file: &Path,
1381 _workspace_name: Option<&str>,
1382 _registry: &mut WorkspaceRegistry,
1383 ) -> Result<EncryptedImportResult> {
1384 let _encrypted_data = fs::read_to_string(encrypted_file)
1386 .await
1387 .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1388
1389 Err(Error::generic("Encrypted import requires workspace ID and backup key. Use import_workspace_encrypted_with_key instead."))
1392 }
1393
1394 pub async fn import_workspace_encrypted_with_key(
1396 &self,
1397 encrypted_file: &Path,
1398 workspace_id: &str,
1399 backup_key: &str,
1400 workspace_name: Option<&str>,
1401 registry: &mut WorkspaceRegistry,
1402 ) -> Result<EncryptedImportResult> {
1403 let key_manager = WorkspaceKeyManager::new();
1405 if !key_manager.has_workspace_key(workspace_id) {
1406 key_manager.restore_workspace_key_from_backup(workspace_id, backup_key)?;
1407 }
1408
1409 let encrypted_data = fs::read_to_string(encrypted_file)
1411 .await
1412 .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1413
1414 let decrypted_json = utils::decrypt_for_workspace(workspace_id, &encrypted_data)?;
1415
1416 let export: WorkspaceExport = serde_json::from_str(&decrypted_json)
1418 .map_err(|e| Error::generic(format!("Failed to parse decrypted export: {}", e)))?;
1419
1420 let workspace = self.convert_export_to_workspace(&export, workspace_name)?;
1422
1423 let imported_id = registry.add_workspace(workspace)?;
1425
1426 Ok(EncryptedImportResult {
1427 workspace_id: imported_id,
1428 workspace_name: export.metadata.name.clone(),
1429 imported_at: Utc::now(),
1430 request_count: export.requests.len(),
1431 encryption_restored: true,
1432 })
1433 }
1434
1435 fn encrypt_workspace_data(
1437 &self,
1438 workspace: &mut Workspace,
1439 processor: &AutoEncryptionProcessor,
1440 ) -> Result<()> {
1441 for env in &mut workspace.config.environments {
1443 processor.process_env_vars(&mut env.variables)?;
1444 }
1445 processor.process_env_vars(&mut workspace.config.global_environment.variables)?;
1446
1447 Ok(())
1451 }
1452
1453 fn convert_export_to_workspace(
1455 &self,
1456 export: &WorkspaceExport,
1457 name_override: Option<&str>,
1458 ) -> Result<Workspace> {
1459 let mut workspace =
1460 Workspace::new(name_override.unwrap_or(&export.metadata.name).to_string());
1461
1462 if let Some(desc) = &export.metadata.description {
1464 workspace.description = Some(desc.clone());
1465 }
1466
1467 for exported_request in export.requests.values() {
1469 let method = self.parse_http_method(&exported_request.method)?;
1471 let mut request = MockRequest::new(
1472 method,
1473 exported_request.path.clone(),
1474 exported_request.name.clone(),
1475 );
1476
1477 if let Some(status) = exported_request.response_status {
1479 request.response.status_code = status;
1480 }
1481
1482 if let Some(body) = &exported_request.response_body {
1484 request.response.body = Some(body.clone());
1485 }
1486 request.response.headers = exported_request.response_headers.clone();
1487 if let Some(delay) = exported_request.delay {
1488 request.response.delay_ms = Some(delay);
1489 }
1490
1491 workspace.add_request(request)?;
1492 }
1493
1494 workspace.config.global_environment.variables = export.config.variables.clone();
1496
1497 Ok(workspace)
1498 }
1499
1500 fn parse_http_method(&self, method_str: &str) -> Result<crate::routing::HttpMethod> {
1502 match method_str.to_uppercase().as_str() {
1503 "GET" => Ok(crate::routing::HttpMethod::GET),
1504 "POST" => Ok(crate::routing::HttpMethod::POST),
1505 "PUT" => Ok(crate::routing::HttpMethod::PUT),
1506 "DELETE" => Ok(crate::routing::HttpMethod::DELETE),
1507 "PATCH" => Ok(crate::routing::HttpMethod::PATCH),
1508 "HEAD" => Ok(crate::routing::HttpMethod::HEAD),
1509 "OPTIONS" => Ok(crate::routing::HttpMethod::OPTIONS),
1510 _ => Err(Error::generic(format!("Unknown HTTP method: {}", method_str))),
1511 }
1512 }
1513
1514 pub fn check_workspace_for_unencrypted_secrets(
1516 &self,
1517 workspace: &Workspace,
1518 ) -> Result<SecurityCheckResult> {
1519 let mut warnings = Vec::new();
1520 let errors = Vec::new();
1521
1522 self.check_environment_variables(workspace, &mut warnings)?;
1524
1525 let has_warnings = !warnings.is_empty();
1529 let has_errors = !errors.is_empty();
1530
1531 Ok(SecurityCheckResult {
1532 workspace_id: workspace.id.clone(),
1533 workspace_name: workspace.name.clone(),
1534 warnings,
1535 errors,
1536 is_secure: !has_warnings && !has_errors,
1537 recommended_actions: self.generate_security_recommendations(has_warnings, has_errors),
1538 })
1539 }
1540
1541 fn check_environment_variables(
1543 &self,
1544 workspace: &Workspace,
1545 warnings: &mut Vec<SecurityWarning>,
1546 ) -> Result<()> {
1547 let sensitive_keys = [
1548 "password",
1549 "secret",
1550 "key",
1551 "token",
1552 "credential",
1553 "api_key",
1554 "apikey",
1555 "api_secret",
1556 "db_password",
1557 "database_password",
1558 "aws_secret_key",
1559 "aws_session_token",
1560 "private_key",
1561 "authorization",
1562 "auth_token",
1563 "access_token",
1564 "refresh_token",
1565 "cookie",
1566 "session",
1567 "csrf",
1568 "jwt",
1569 "bearer",
1570 ];
1571
1572 for (key, value) in &workspace.config.global_environment.variables {
1574 if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1575 warnings.push(SecurityWarning {
1576 field_type: "environment_variable".to_string(),
1577 field_name: key.clone(),
1578 location: "global_environment".to_string(),
1579 severity: SecuritySeverity::High,
1580 message: format!(
1581 "Potentially sensitive environment variable '{}' detected",
1582 key
1583 ),
1584 suggestion: "Consider encrypting this value or excluding it from exports"
1585 .to_string(),
1586 });
1587 }
1588 }
1589
1590 for env in &workspace.config.environments {
1592 for (key, value) in &env.variables {
1593 if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1594 warnings.push(SecurityWarning {
1595 field_type: "environment_variable".to_string(),
1596 field_name: key.clone(),
1597 location: format!("environment '{}'", env.name),
1598 severity: SecuritySeverity::High,
1599 message: format!("Potentially sensitive environment variable '{}' detected in environment '{}'", key, env.name),
1600 suggestion: "Consider encrypting this value or excluding it from exports".to_string(),
1601 });
1602 }
1603 }
1604 }
1605
1606 Ok(())
1607 }
1608
1609 fn is_potentially_sensitive(&self, key: &str, value: &str, sensitive_keys: &[&str]) -> bool {
1611 let key_lower = key.to_lowercase();
1612
1613 if sensitive_keys.iter().any(|&sensitive| key_lower.contains(sensitive)) {
1615 return true;
1616 }
1617
1618 self.contains_sensitive_patterns(value)
1620 }
1621
1622 fn contains_sensitive_patterns(&self, value: &str) -> bool {
1624 if CREDIT_CARD_PATTERN.is_match(value) {
1626 return true;
1627 }
1628
1629 if SSN_PATTERN.is_match(value) {
1631 return true;
1632 }
1633
1634 if value.len() > 20 && value.chars().any(|c| c.is_alphanumeric()) {
1636 let alphanumeric_count = value.chars().filter(|c| c.is_alphanumeric()).count();
1637 let total_count = value.len();
1638 if alphanumeric_count as f64 / total_count as f64 > 0.8 {
1639 return true;
1640 }
1641 }
1642
1643 false
1644 }
1645
1646 fn generate_security_recommendations(
1648 &self,
1649 has_warnings: bool,
1650 has_errors: bool,
1651 ) -> Vec<String> {
1652 let mut recommendations = Vec::new();
1653
1654 if has_warnings || has_errors {
1655 recommendations.push("Enable encryption for this workspace in settings".to_string());
1656 recommendations.push("Review and encrypt sensitive environment variables".to_string());
1657 recommendations.push("Use encrypted export for sharing workspaces".to_string());
1658 }
1659
1660 if has_errors {
1661 recommendations
1662 .push("CRITICAL: Remove or encrypt sensitive data before proceeding".to_string());
1663 }
1664
1665 recommendations
1666 }
1667
1668 async fn export_workspace_requests(
1670 &self,
1671 workspace: &Workspace,
1672 requests_dir: &Path,
1673 force: bool,
1674 ) -> Result<usize> {
1675 let mut count = 0;
1676
1677 for request in &workspace.requests {
1678 let file_path =
1679 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1680 if force || !file_path.exists() {
1681 let exported = self.convert_request_to_exported(request, "");
1682 let content = serde_yaml::to_string(&exported)
1683 .map_err(|e| Error::generic(format!("Failed to serialize request: {}", e)))?;
1684
1685 fs::write(&file_path, content)
1686 .await
1687 .map_err(|e| Error::generic(format!("Failed to write request file: {}", e)))?;
1688
1689 count += 1;
1690 }
1691 }
1692
1693 for folder in &workspace.folders {
1695 count += self.export_folder_requests(folder, requests_dir, force, &folder.name).await?;
1696 }
1697
1698 Ok(count)
1699 }
1700
1701 async fn export_folder_requests(
1703 &self,
1704 folder: &Folder,
1705 requests_dir: &Path,
1706 force: bool,
1707 folder_path: &str,
1708 ) -> Result<usize> {
1709 use std::collections::VecDeque;
1710
1711 let mut count = 0;
1712 let mut queue = VecDeque::new();
1713
1714 queue.push_back((folder, folder_path.to_string()));
1716
1717 while let Some((current_folder, current_path)) = queue.pop_front() {
1718 for request in ¤t_folder.requests {
1720 let file_path =
1721 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1722 if force || !file_path.exists() {
1723 let exported = self.convert_request_to_exported(request, ¤t_path);
1724 let content = serde_yaml::to_string(&exported).map_err(|e| {
1725 Error::generic(format!("Failed to serialize request: {}", e))
1726 })?;
1727
1728 fs::write(&file_path, content).await.map_err(|e| {
1729 Error::generic(format!("Failed to write request file: {}", e))
1730 })?;
1731
1732 count += 1;
1733 }
1734 }
1735
1736 for subfolder in ¤t_folder.folders {
1738 let subfolder_path = if current_path.is_empty() {
1739 subfolder.name.clone()
1740 } else {
1741 format!("{}/{}", current_path, subfolder.name)
1742 };
1743 queue.push_back((subfolder, subfolder_path));
1744 }
1745 }
1746
1747 Ok(count)
1748 }
1749
1750 async fn export_workspace_requests_grouped(
1752 &self,
1753 workspace: &Workspace,
1754 requests_dir: &Path,
1755 force: bool,
1756 ) -> Result<usize> {
1757 let mut count = 0;
1758 let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
1759
1760 if !workspace_requests_dir.exists() {
1761 fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
1762 Error::generic(format!("Failed to create workspace requests directory: {}", e))
1763 })?;
1764 }
1765
1766 count += self
1767 .export_workspace_requests(workspace, &workspace_requests_dir, force)
1768 .await?;
1769 Ok(count)
1770 }
1771
1772 async fn create_metadata_file(
1774 &self,
1775 workspace: &Workspace,
1776 target_dir: &Path,
1777 structure: &DirectoryStructure,
1778 ) -> Result<()> {
1779 let metadata = serde_json::json!({
1780 "workspace_id": workspace.id,
1781 "workspace_name": workspace.name,
1782 "description": workspace.description,
1783 "exported_at": Utc::now().to_rfc3339(),
1784 "structure": format!("{:?}", structure),
1785 "version": "1.0",
1786 "source": "mockforge"
1787 });
1788
1789 let metadata_file = target_dir.join(".mockforge-meta.json");
1790 let content = serde_json::to_string_pretty(&metadata)
1791 .map_err(|e| Error::generic(format!("Failed to serialize metadata: {}", e)))?;
1792
1793 fs::write(&metadata_file, content)
1794 .await
1795 .map_err(|e| Error::generic(format!("Failed to write metadata file: {}", e)))?;
1796
1797 Ok(())
1798 }
1799
1800 pub async fn export_reality_preset(
1805 &self,
1806 preset: &crate::RealityPreset,
1807 output_path: &Path,
1808 ) -> Result<()> {
1809 self.ensure_workspace_dir().await?;
1810
1811 let content = if output_path.extension().and_then(|s| s.to_str()) == Some("yaml")
1813 || output_path.extension().and_then(|s| s.to_str()) == Some("yml")
1814 {
1815 serde_yaml::to_string(preset)
1816 .map_err(|e| Error::generic(format!("Failed to serialize preset to YAML: {}", e)))?
1817 } else {
1818 serde_json::to_string_pretty(preset)
1819 .map_err(|e| Error::generic(format!("Failed to serialize preset to JSON: {}", e)))?
1820 };
1821
1822 if let Some(parent) = output_path.parent() {
1824 fs::create_dir_all(parent)
1825 .await
1826 .map_err(|e| Error::generic(format!("Failed to create preset directory: {}", e)))?;
1827 }
1828
1829 fs::write(output_path, content)
1830 .await
1831 .map_err(|e| Error::generic(format!("Failed to write preset file: {}", e)))?;
1832
1833 Ok(())
1834 }
1835
1836 pub async fn import_reality_preset(&self, input_path: &Path) -> Result<crate::RealityPreset> {
1841 let content = fs::read_to_string(input_path)
1842 .await
1843 .map_err(|e| Error::generic(format!("Failed to read preset file: {}", e)))?;
1844
1845 let preset = if input_path
1847 .extension()
1848 .and_then(|s| s.to_str())
1849 .map(|ext| ext == "yaml" || ext == "yml")
1850 .unwrap_or(false)
1851 {
1852 serde_yaml::from_str(&content).map_err(|e| {
1853 Error::generic(format!("Failed to deserialize preset from YAML: {}", e))
1854 })?
1855 } else {
1856 serde_json::from_str(&content).map_err(|e| {
1857 Error::generic(format!("Failed to deserialize preset from JSON: {}", e))
1858 })?
1859 };
1860
1861 Ok(preset)
1862 }
1863
1864 pub fn presets_dir(&self) -> PathBuf {
1866 self.base_dir.join("presets")
1867 }
1868
1869 pub async fn list_reality_presets(&self) -> Result<Vec<PathBuf>> {
1873 let presets_dir = self.presets_dir();
1874 if !presets_dir.exists() {
1875 return Ok(vec![]);
1876 }
1877
1878 let mut presets = Vec::new();
1879 let mut entries = fs::read_dir(&presets_dir)
1880 .await
1881 .map_err(|e| Error::generic(format!("Failed to read presets directory: {}", e)))?;
1882
1883 while let Some(entry) = entries
1884 .next_entry()
1885 .await
1886 .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
1887 {
1888 let path = entry.path();
1889 if path.is_file() {
1890 let ext = path.extension().and_then(|s| s.to_str());
1891 if ext == Some("json") || ext == Some("yaml") || ext == Some("yml") {
1892 presets.push(path);
1893 }
1894 }
1895 }
1896
1897 Ok(presets)
1898 }
1899
1900 fn sanitize_filename(&self, name: &str) -> String {
1902 name.chars()
1903 .map(|c| match c {
1904 '/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
1905 c if c.is_whitespace() => '_',
1906 c => c,
1907 })
1908 .collect::<String>()
1909 .to_lowercase()
1910 }
1911}
1912
1913#[derive(Debug)]
1915struct WorkspaceSyncResult {
1916 requests_count: usize,
1918 files_created: usize,
1920}
1921
1922#[cfg(test)]
1923mod tests {
1924 use super::*;
1925 use crate::workspace::{MockRequest, Workspace};
1926 use crate::HttpMethod;
1927 use tempfile::TempDir;
1928
1929 #[tokio::test]
1930 async fn test_workspace_persistence() {
1931 let temp_dir = TempDir::new().unwrap();
1932 let persistence = WorkspacePersistence::new(temp_dir.path());
1933
1934 let mut workspace = Workspace::new("Test Workspace".to_string());
1936 let request =
1937 MockRequest::new(HttpMethod::GET, "/test".to_string(), "Test Request".to_string());
1938 workspace.add_request(request).unwrap();
1939
1940 persistence.save_workspace(&workspace).await.unwrap();
1942
1943 let loaded = persistence.load_workspace(&workspace.id).await.unwrap();
1945 assert_eq!(loaded.name, workspace.name);
1946 assert_eq!(loaded.requests.len(), 1);
1947
1948 let ids = persistence.list_workspace_ids().await.unwrap();
1950 assert_eq!(ids.len(), 1);
1951 assert_eq!(ids[0], workspace.id);
1952 }
1953
1954 #[tokio::test]
1955 async fn test_registry_persistence() {
1956 let temp_dir = TempDir::new().unwrap();
1957 let persistence = WorkspacePersistence::new(temp_dir.path());
1958
1959 let mut registry = WorkspaceRegistry::new();
1960
1961 let workspace1 = Workspace::new("Workspace 1".to_string());
1963 let workspace2 = Workspace::new("Workspace 2".to_string());
1964
1965 let id1 = registry.add_workspace(workspace1).unwrap();
1966 let _id2 = registry.add_workspace(workspace2).unwrap();
1967
1968 registry.set_active_workspace(Some(id1.clone())).unwrap();
1970
1971 persistence.save_full_registry(®istry).await.unwrap();
1973
1974 let loaded_registry = persistence.load_full_registry().await.unwrap();
1976
1977 assert_eq!(loaded_registry.get_workspaces().len(), 2);
1978 assert_eq!(loaded_registry.get_active_workspace().unwrap().name, "Workspace 1");
1979 }
1980
1981 #[tokio::test]
1982 async fn test_backup_and_restore() {
1983 let temp_dir = TempDir::new().unwrap();
1984 let backup_dir = temp_dir.path().join("backups");
1985 let persistence = WorkspacePersistence::new(temp_dir.path());
1986
1987 let workspace = Workspace::new("Test Workspace".to_string());
1989 persistence.save_workspace(&workspace).await.unwrap();
1990
1991 let backup_path = persistence.backup_workspace(&workspace.id, &backup_dir).await.unwrap();
1993 assert!(backup_path.exists());
1994
1995 persistence.delete_workspace(&workspace.id).await.unwrap();
1997 assert!(persistence.load_workspace(&workspace.id).await.is_err());
1998
1999 let restored_id = persistence.restore_workspace(&backup_path).await.unwrap();
2001
2002 let restored = persistence.load_workspace(&restored_id).await.unwrap();
2004 assert_eq!(restored.name, "Test Workspace");
2005 }
2006}