1use crate::config::AuthConfig as ConfigAuthConfig;
7use crate::encryption::{utils, AutoEncryptionProcessor, WorkspaceKeyManager};
8use crate::workspace::{EntityId, Folder, MockRequest, Workspace, WorkspaceRegistry};
9use crate::{Error, Result};
10use chrono::{DateTime, Utc};
11use once_cell::sync::Lazy;
12use regex::Regex;
13use serde::{Deserialize, Serialize};
14use std::collections::HashMap;
15use std::path::{Path, PathBuf};
16use tokio::fs;
17
18static CREDIT_CARD_PATTERN: Lazy<Regex> = Lazy::new(|| {
20 Regex::new(r"\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b")
21 .expect("CREDIT_CARD_PATTERN regex is valid")
22});
23
24static SSN_PATTERN: Lazy<Regex> = Lazy::new(|| {
25 Regex::new(r"\b\d{3}[-\s]?\d{2}[-\s]?\d{4}\b").expect("SSN_PATTERN regex is valid")
26});
27
28#[derive(Debug)]
30pub struct WorkspacePersistence {
31 base_dir: PathBuf,
33}
34
35#[derive(Debug, Clone, Serialize, Deserialize)]
37struct SerializableWorkspaceRegistry {
38 workspaces: Vec<Workspace>,
39 active_workspace: Option<EntityId>,
40}
41
42#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SyncState {
45 pub last_sync_timestamp: DateTime<Utc>,
47}
48
49#[derive(Debug, Clone, PartialEq)]
51pub enum SyncStrategy {
52 Full,
54 Incremental,
56 Selective(Vec<String>),
58}
59
60#[derive(Debug, Clone, PartialEq)]
62pub enum DirectoryStructure {
63 Flat,
65 Nested,
67 Grouped,
69}
70
71#[derive(Debug, Clone)]
73pub struct SyncResult {
74 pub synced_workspaces: usize,
76 pub synced_requests: usize,
78 pub files_created: usize,
80 pub target_dir: PathBuf,
82}
83
84#[derive(Debug, Clone)]
86pub struct EncryptedExportResult {
87 pub output_path: PathBuf,
89 pub backup_key: String,
91 pub exported_at: DateTime<Utc>,
93 pub workspace_name: String,
95 pub encryption_enabled: bool,
97}
98
99#[derive(Debug, Clone)]
101pub struct EncryptedImportResult {
102 pub workspace_id: String,
104 pub workspace_name: String,
106 pub imported_at: DateTime<Utc>,
108 pub request_count: usize,
110 pub encryption_restored: bool,
112}
113
114#[derive(Debug, Clone)]
116pub struct SecurityCheckResult {
117 pub workspace_id: String,
119 pub workspace_name: String,
121 pub warnings: Vec<SecurityWarning>,
123 pub errors: Vec<SecurityWarning>,
125 pub is_secure: bool,
127 pub recommended_actions: Vec<String>,
129}
130
131#[derive(Debug, Clone)]
133pub struct SecurityWarning {
134 pub field_type: String,
136 pub field_name: String,
138 pub location: String,
140 pub severity: SecuritySeverity,
142 pub message: String,
144 pub suggestion: String,
146}
147
148#[derive(Debug, Clone, PartialEq)]
150pub enum SecuritySeverity {
151 Low,
153 Medium,
155 High,
157 Critical,
159}
160
161#[derive(Debug, Clone, Serialize, Deserialize)]
163pub struct WorkspaceExport {
164 pub metadata: WorkspaceMetadata,
166 pub config: WorkspaceConfig,
168 pub requests: HashMap<String, ExportedRequest>,
170}
171
172#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct WorkspaceMetadata {
175 pub id: String,
177 pub name: String,
179 pub description: Option<String>,
181 pub exported_at: DateTime<Utc>,
183 pub request_count: usize,
185 pub folder_count: usize,
187}
188
189#[derive(Debug, Clone, Serialize, Deserialize)]
191pub struct WorkspaceConfig {
192 pub auth: Option<AuthConfig>,
194 pub base_url: Option<String>,
196 pub variables: HashMap<String, String>,
198 #[serde(default)]
201 pub reality_level: Option<crate::RealityLevel>,
202}
203
204#[derive(Debug, Clone, Serialize, Deserialize)]
206pub struct AuthConfig {
207 pub auth_type: String,
209 pub params: HashMap<String, String>,
211}
212
213impl AuthConfig {
214 pub fn from_config_auth(config_auth: &ConfigAuthConfig) -> Option<Self> {
216 if let Some(jwt) = &config_auth.jwt {
217 let mut params = HashMap::new();
218 if let Some(secret) = &jwt.secret {
219 params.insert("secret".to_string(), secret.clone());
220 }
221 if let Some(rsa_public_key) = &jwt.rsa_public_key {
222 params.insert("rsa_public_key".to_string(), rsa_public_key.clone());
223 }
224 if let Some(ecdsa_public_key) = &jwt.ecdsa_public_key {
225 params.insert("ecdsa_public_key".to_string(), ecdsa_public_key.clone());
226 }
227 if let Some(issuer) = &jwt.issuer {
228 params.insert("issuer".to_string(), issuer.clone());
229 }
230 if let Some(audience) = &jwt.audience {
231 params.insert("audience".to_string(), audience.clone());
232 }
233 if !jwt.algorithms.is_empty() {
234 params.insert("algorithms".to_string(), jwt.algorithms.join(","));
235 }
236 Some(AuthConfig {
237 auth_type: "jwt".to_string(),
238 params,
239 })
240 } else if let Some(oauth2) = &config_auth.oauth2 {
241 let mut params = HashMap::new();
242 params.insert("client_id".to_string(), oauth2.client_id.clone());
243 params.insert("client_secret".to_string(), oauth2.client_secret.clone());
244 params.insert("introspection_url".to_string(), oauth2.introspection_url.clone());
245 if let Some(auth_url) = &oauth2.auth_url {
246 params.insert("auth_url".to_string(), auth_url.clone());
247 }
248 if let Some(token_url) = &oauth2.token_url {
249 params.insert("token_url".to_string(), token_url.clone());
250 }
251 if let Some(token_type_hint) = &oauth2.token_type_hint {
252 params.insert("token_type_hint".to_string(), token_type_hint.clone());
253 }
254 Some(AuthConfig {
255 auth_type: "oauth2".to_string(),
256 params,
257 })
258 } else if let Some(basic_auth) = &config_auth.basic_auth {
259 let mut params = HashMap::new();
260 for (user, pass) in &basic_auth.credentials {
261 params.insert(user.clone(), pass.clone());
262 }
263 Some(AuthConfig {
264 auth_type: "basic".to_string(),
265 params,
266 })
267 } else if let Some(api_key) = &config_auth.api_key {
268 let mut params = HashMap::new();
269 params.insert("header_name".to_string(), api_key.header_name.clone());
270 if let Some(query_name) = &api_key.query_name {
271 params.insert("query_name".to_string(), query_name.clone());
272 }
273 if !api_key.keys.is_empty() {
274 params.insert("keys".to_string(), api_key.keys.join(","));
275 }
276 Some(AuthConfig {
277 auth_type: "api_key".to_string(),
278 params,
279 })
280 } else {
281 None
282 }
283 }
284}
285
286#[derive(Debug, Clone, Serialize, Deserialize)]
288pub struct ExportedRequest {
289 pub id: String,
291 pub name: String,
293 pub method: String,
295 pub path: String,
297 pub folder_path: String,
299 pub headers: HashMap<String, String>,
301 pub query_params: HashMap<String, String>,
303 pub body: Option<String>,
305 pub response_status: Option<u16>,
307 pub response_body: Option<String>,
309 pub response_headers: HashMap<String, String>,
311 pub delay: Option<u64>,
313}
314
315impl WorkspacePersistence {
316 pub fn new<P: AsRef<Path>>(base_dir: P) -> Self {
318 Self {
319 base_dir: base_dir.as_ref().to_path_buf(),
320 }
321 }
322
323 pub fn workspace_dir(&self) -> &Path {
325 &self.base_dir
326 }
327
328 pub fn workspace_file_path(&self, workspace_id: &str) -> PathBuf {
330 self.base_dir.join(format!("{}.yaml", workspace_id))
331 }
332
333 pub fn registry_file_path(&self) -> PathBuf {
335 self.base_dir.join("registry.yaml")
336 }
337
338 pub fn sync_state_file_path(&self) -> PathBuf {
340 self.base_dir.join("sync_state.yaml")
341 }
342
343 pub async fn ensure_workspace_dir(&self) -> Result<()> {
345 if !self.base_dir.exists() {
346 fs::create_dir_all(&self.base_dir).await.map_err(|e| {
347 Error::generic(format!("Failed to create workspace directory: {}", e))
348 })?;
349 }
350 Ok(())
351 }
352
353 pub async fn save_workspace(&self, workspace: &Workspace) -> Result<()> {
355 self.ensure_workspace_dir().await?;
356
357 let file_path = self.workspace_file_path(&workspace.id);
358 let content = serde_yaml::to_string(workspace)
359 .map_err(|e| Error::generic(format!("Failed to serialize workspace: {}", e)))?;
360
361 fs::write(&file_path, content)
362 .await
363 .map_err(|e| Error::generic(format!("Failed to write workspace file: {}", e)))?;
364
365 Ok(())
366 }
367
368 pub async fn load_workspace(&self, workspace_id: &str) -> Result<Workspace> {
370 let file_path = self.workspace_file_path(workspace_id);
371
372 if !file_path.exists() {
373 return Err(Error::generic(format!("Workspace file not found: {:?}", file_path)));
374 }
375
376 let content = fs::read_to_string(&file_path)
377 .await
378 .map_err(|e| Error::generic(format!("Failed to read workspace file: {}", e)))?;
379
380 let workspace: Workspace = serde_yaml::from_str(&content)
381 .map_err(|e| Error::generic(format!("Failed to deserialize workspace: {}", e)))?;
382
383 Ok(workspace)
384 }
385
386 pub async fn delete_workspace(&self, workspace_id: &str) -> Result<()> {
388 let file_path = self.workspace_file_path(workspace_id);
389
390 if file_path.exists() {
391 fs::remove_file(&file_path)
392 .await
393 .map_err(|e| Error::generic(format!("Failed to delete workspace file: {}", e)))?;
394 }
395
396 Ok(())
397 }
398
399 pub async fn save_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
401 self.ensure_workspace_dir().await?;
402
403 let serializable = SerializableWorkspaceRegistry {
404 workspaces: registry.get_workspaces().into_iter().cloned().collect(),
405 active_workspace: registry.get_active_workspace_id().map(|s| s.to_string()),
406 };
407
408 let file_path = self.registry_file_path();
409 let content = serde_yaml::to_string(&serializable)
410 .map_err(|e| Error::generic(format!("Failed to serialize registry: {}", e)))?;
411
412 fs::write(&file_path, content)
413 .await
414 .map_err(|e| Error::generic(format!("Failed to write registry file: {}", e)))?;
415
416 Ok(())
417 }
418
419 pub async fn load_registry(&self) -> Result<WorkspaceRegistry> {
421 let file_path = self.registry_file_path();
422
423 if !file_path.exists() {
424 return Ok(WorkspaceRegistry::new());
426 }
427
428 let content = fs::read_to_string(&file_path)
429 .await
430 .map_err(|e| Error::generic(format!("Failed to read registry file: {}", e)))?;
431
432 let serializable: SerializableWorkspaceRegistry = serde_yaml::from_str(&content)
433 .map_err(|e| Error::generic(format!("Failed to deserialize registry: {}", e)))?;
434
435 let mut registry = WorkspaceRegistry::new();
436
437 for workspace_meta in &serializable.workspaces {
439 match self.load_workspace(&workspace_meta.id).await {
440 Ok(workspace) => {
441 registry.add_workspace(workspace)?;
442 }
443 Err(e) => {
444 tracing::warn!("Failed to load workspace {}: {}", workspace_meta.id, e);
445 }
446 }
447 }
448
449 if let Some(active_id) = &serializable.active_workspace {
451 if let Err(e) = registry.set_active_workspace(Some(active_id.clone())) {
452 tracing::warn!("Failed to set active workspace {}: {}", active_id, e);
453 }
454 }
455
456 Ok(registry)
457 }
458
459 pub async fn save_sync_state(&self, sync_state: &SyncState) -> Result<()> {
461 self.ensure_workspace_dir().await?;
462
463 let file_path = self.sync_state_file_path();
464 let content = serde_yaml::to_string(sync_state)
465 .map_err(|e| Error::generic(format!("Failed to serialize sync state: {}", e)))?;
466
467 fs::write(&file_path, content)
468 .await
469 .map_err(|e| Error::generic(format!("Failed to write sync state file: {}", e)))?;
470
471 Ok(())
472 }
473
474 pub async fn load_sync_state(&self) -> Result<SyncState> {
476 let file_path = self.sync_state_file_path();
477
478 if !file_path.exists() {
479 return Ok(SyncState {
481 last_sync_timestamp: Utc::now(),
482 });
483 }
484
485 let content = fs::read_to_string(&file_path)
486 .await
487 .map_err(|e| Error::generic(format!("Failed to read sync state file: {}", e)))?;
488
489 let sync_state: SyncState = serde_yaml::from_str(&content)
490 .map_err(|e| Error::generic(format!("Failed to deserialize sync state: {}", e)))?;
491
492 Ok(sync_state)
493 }
494
495 pub async fn list_workspace_ids(&self) -> Result<Vec<EntityId>> {
497 if !self.base_dir.exists() {
498 return Ok(Vec::new());
499 }
500
501 let mut workspace_ids = Vec::new();
502
503 let mut entries = fs::read_dir(&self.base_dir)
504 .await
505 .map_err(|e| Error::generic(format!("Failed to read workspace directory: {}", e)))?;
506
507 while let Some(entry) = entries
508 .next_entry()
509 .await
510 .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
511 {
512 let path = entry.path();
513 if path.is_file() {
514 if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
515 if file_name != "registry.yaml" && file_name.ends_with(".yaml") {
516 if let Some(id) = file_name.strip_suffix(".yaml") {
517 workspace_ids.push(id.to_string());
518 }
519 }
520 }
521 }
522 }
523
524 Ok(workspace_ids)
525 }
526
527 pub async fn save_full_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
529 self.save_registry(registry).await?;
531
532 for workspace in registry.get_workspaces() {
534 self.save_workspace(workspace).await?;
535 }
536
537 Ok(())
538 }
539
540 pub async fn load_full_registry(&self) -> Result<WorkspaceRegistry> {
542 self.load_registry().await
543 }
544
545 pub async fn backup_workspace(&self, workspace_id: &str, backup_dir: &Path) -> Result<PathBuf> {
547 let workspace_file = self.workspace_file_path(workspace_id);
548
549 if !workspace_file.exists() {
550 return Err(Error::generic(format!("Workspace {} does not exist", workspace_id)));
551 }
552
553 if !backup_dir.exists() {
555 fs::create_dir_all(backup_dir)
556 .await
557 .map_err(|e| Error::generic(format!("Failed to create backup directory: {}", e)))?;
558 }
559
560 let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
562 let backup_filename = format!("{}_{}.yaml", workspace_id, timestamp);
563 let backup_path = backup_dir.join(backup_filename);
564
565 fs::copy(&workspace_file, &backup_path)
567 .await
568 .map_err(|e| Error::generic(format!("Failed to create backup: {}", e)))?;
569
570 Ok(backup_path)
571 }
572
573 pub async fn restore_workspace(&self, backup_path: &Path) -> Result<EntityId> {
575 if !backup_path.exists() {
576 return Err(Error::generic(format!("Backup file does not exist: {:?}", backup_path)));
577 }
578
579 let content = fs::read_to_string(backup_path)
581 .await
582 .map_err(|e| Error::generic(format!("Failed to read backup file: {}", e)))?;
583
584 let workspace: Workspace = serde_yaml::from_str(&content)
585 .map_err(|e| Error::generic(format!("Failed to deserialize backup: {}", e)))?;
586
587 self.save_workspace(&workspace).await?;
589
590 Ok(workspace.id)
591 }
592
593 pub async fn cleanup_old_backups(&self, backup_dir: &Path, keep_count: usize) -> Result<usize> {
595 if !backup_dir.exists() {
596 return Ok(0);
597 }
598
599 let mut backup_files = Vec::new();
600
601 let mut entries = fs::read_dir(backup_dir)
602 .await
603 .map_err(|e| Error::generic(format!("Failed to read backup directory: {}", e)))?;
604
605 while let Some(entry) = entries
606 .next_entry()
607 .await
608 .map_err(|e| Error::generic(format!("Failed to read backup entry: {}", e)))?
609 {
610 let path = entry.path();
611 if path.is_file() {
612 if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
613 if file_name.ends_with(".yaml") {
614 if let Ok(metadata) = entry.metadata().await {
615 if let Ok(modified) = metadata.modified() {
616 backup_files.push((path, modified));
617 }
618 }
619 }
620 }
621 }
622 }
623
624 backup_files.sort_by(|a, b| b.1.cmp(&a.1));
626
627 let mut removed_count = 0;
629 for (path, _) in backup_files.iter().skip(keep_count) {
630 if fs::remove_file(path).await.is_ok() {
631 removed_count += 1;
632 }
633 }
634
635 Ok(removed_count)
636 }
637
638 #[allow(clippy::too_many_arguments)]
640 pub async fn sync_to_directory_advanced(
641 &self,
642 target_dir: &str,
643 strategy: &str,
644 workspace_ids: Option<&str>,
645 structure: &str,
646 include_meta: bool,
647 force: bool,
648 filename_pattern: &str,
649 exclude_pattern: Option<&str>,
650 dry_run: bool,
651 ) -> Result<SyncResult> {
652 let target_path = PathBuf::from(target_dir);
653
654 if !dry_run && !target_path.exists() {
656 fs::create_dir_all(&target_path)
657 .await
658 .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
659 }
660
661 let sync_strategy = match strategy {
663 "full" => SyncStrategy::Full,
664 "incremental" => SyncStrategy::Incremental,
665 "selective" => {
666 if let Some(ids) = workspace_ids {
667 let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
668 SyncStrategy::Selective(workspace_list)
669 } else {
670 return Err(Error::generic("Selective strategy requires workspace IDs"));
671 }
672 }
673 _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
674 };
675
676 let dir_structure = match structure {
678 "flat" => DirectoryStructure::Flat,
679 "nested" => DirectoryStructure::Nested,
680 "grouped" => DirectoryStructure::Grouped,
681 _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
682 };
683
684 let mut workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
686
687 if let Some(exclude) = exclude_pattern {
689 if let Ok(regex) = regex::Regex::new(exclude) {
690 workspaces_to_sync.retain(|id| !regex.is_match(id));
691 }
692 }
693
694 let mut result = SyncResult {
695 synced_workspaces: 0,
696 synced_requests: 0,
697 files_created: 0,
698 target_dir: target_path.clone(),
699 };
700
701 for workspace_id in workspaces_to_sync {
703 if let Ok(workspace) = self.load_workspace(&workspace_id).await {
704 let workspace_result = self
705 .sync_workspace_to_directory_advanced(
706 &workspace,
707 &target_path,
708 &dir_structure,
709 include_meta,
710 force,
711 filename_pattern,
712 dry_run,
713 )
714 .await?;
715
716 result.synced_workspaces += 1;
717 result.synced_requests += workspace_result.requests_count;
718 result.files_created += workspace_result.files_created;
719 }
720 }
721
722 if let SyncStrategy::Incremental = sync_strategy {
724 let new_sync_state = SyncState {
725 last_sync_timestamp: Utc::now(),
726 };
727 if let Err(e) = self.save_sync_state(&new_sync_state).await {
728 tracing::warn!("Failed to save sync state: {}", e);
729 }
730 }
731
732 Ok(result)
733 }
734
735 #[allow(clippy::too_many_arguments)]
737 async fn sync_workspace_to_directory_advanced(
738 &self,
739 workspace: &Workspace,
740 target_dir: &Path,
741 structure: &DirectoryStructure,
742 include_meta: bool,
743 force: bool,
744 filename_pattern: &str,
745 dry_run: bool,
746 ) -> Result<WorkspaceSyncResult> {
747 let mut result = WorkspaceSyncResult {
748 requests_count: 0,
749 files_created: 0,
750 };
751
752 match structure {
753 DirectoryStructure::Flat => {
754 let export = self.create_workspace_export(workspace).await?;
755 let filename = self.generate_filename(filename_pattern, workspace);
756 let file_path = target_dir.join(format!("{}.yaml", filename));
757
758 if force || !file_path.exists() {
759 if !dry_run {
760 let content = serde_yaml::to_string(&export).map_err(|e| {
761 Error::generic(format!("Failed to serialize workspace: {}", e))
762 })?;
763
764 fs::write(&file_path, content).await.map_err(|e| {
765 Error::generic(format!("Failed to write workspace file: {}", e))
766 })?;
767 }
768 result.files_created += 1;
769 }
770 }
771
772 DirectoryStructure::Nested => {
773 let workspace_dir =
774 target_dir.join(self.generate_filename(filename_pattern, workspace));
775 if !dry_run && !workspace_dir.exists() {
776 fs::create_dir_all(&workspace_dir).await.map_err(|e| {
777 Error::generic(format!("Failed to create workspace directory: {}", e))
778 })?;
779 }
780
781 let export = self.create_workspace_export(workspace).await?;
783 let workspace_file = workspace_dir.join("workspace.yaml");
784
785 if force || !workspace_file.exists() {
786 if !dry_run {
787 let content = serde_yaml::to_string(&export).map_err(|e| {
788 Error::generic(format!("Failed to serialize workspace: {}", e))
789 })?;
790
791 fs::write(&workspace_file, content).await.map_err(|e| {
792 Error::generic(format!("Failed to write workspace file: {}", e))
793 })?;
794 }
795 result.files_created += 1;
796 }
797
798 let requests_dir = workspace_dir.join("requests");
800 if !dry_run && !requests_dir.exists() {
801 fs::create_dir_all(&requests_dir).await.map_err(|e| {
802 Error::generic(format!("Failed to create requests directory: {}", e))
803 })?;
804 }
805
806 result.requests_count += self
807 .export_workspace_requests_advanced(workspace, &requests_dir, force, dry_run)
808 .await?;
809 }
810
811 DirectoryStructure::Grouped => {
812 let requests_dir = target_dir.join("requests");
814 let workspaces_dir = target_dir.join("workspaces");
815
816 if !dry_run {
817 for dir in [&requests_dir, &workspaces_dir] {
818 if !dir.exists() {
819 fs::create_dir_all(dir).await.map_err(|e| {
820 Error::generic(format!("Failed to create directory: {}", e))
821 })?;
822 }
823 }
824 }
825
826 let export = self.create_workspace_export(workspace).await?;
828 let filename = self.generate_filename(filename_pattern, workspace);
829 let workspace_file = workspaces_dir.join(format!("{}.yaml", filename));
830
831 if force || !workspace_file.exists() {
832 if !dry_run {
833 let content = serde_yaml::to_string(&export).map_err(|e| {
834 Error::generic(format!("Failed to serialize workspace: {}", e))
835 })?;
836
837 fs::write(&workspace_file, content).await.map_err(|e| {
838 Error::generic(format!("Failed to write workspace file: {}", e))
839 })?;
840 }
841 result.files_created += 1;
842 }
843
844 result.requests_count += self
846 .export_workspace_requests_grouped_advanced(
847 workspace,
848 &requests_dir,
849 force,
850 dry_run,
851 )
852 .await?;
853 }
854 }
855
856 if include_meta && !dry_run {
858 self.create_metadata_file(workspace, target_dir, structure).await?;
859 result.files_created += 1;
860 }
861
862 Ok(result)
863 }
864
865 fn generate_filename(&self, pattern: &str, workspace: &Workspace) -> String {
867 let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
868
869 pattern
870 .replace("{name}", &self.sanitize_filename(&workspace.name))
871 .replace("{id}", &workspace.id)
872 .replace("{timestamp}", ×tamp.to_string())
873 }
874
875 async fn export_workspace_requests_advanced(
877 &self,
878 workspace: &Workspace,
879 requests_dir: &Path,
880 force: bool,
881 dry_run: bool,
882 ) -> Result<usize> {
883 let mut count = 0;
884
885 for request in &workspace.requests {
886 let file_path =
887 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
888 if force || !file_path.exists() {
889 if !dry_run {
890 let exported = self.convert_request_to_exported(request, "");
891 let content = serde_yaml::to_string(&exported).map_err(|e| {
892 Error::generic(format!("Failed to serialize request: {}", e))
893 })?;
894
895 fs::write(&file_path, content).await.map_err(|e| {
896 Error::generic(format!("Failed to write request file: {}", e))
897 })?;
898 }
899 count += 1;
900 }
901 }
902
903 for folder in &workspace.folders {
905 count += self
906 .export_folder_requests_advanced(folder, requests_dir, force, &folder.name, dry_run)
907 .await?;
908 }
909
910 Ok(count)
911 }
912
913 async fn export_folder_requests_advanced(
915 &self,
916 folder: &Folder,
917 requests_dir: &Path,
918 force: bool,
919 folder_path: &str,
920 dry_run: bool,
921 ) -> Result<usize> {
922 use std::collections::VecDeque;
923
924 let mut count = 0;
925 let mut queue = VecDeque::new();
926
927 queue.push_back((folder, folder_path.to_string()));
929
930 while let Some((current_folder, current_path)) = queue.pop_front() {
931 for request in ¤t_folder.requests {
933 let file_path =
934 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
935 if force || !file_path.exists() {
936 if !dry_run {
937 let exported = self.convert_request_to_exported(request, ¤t_path);
938 let content = serde_yaml::to_string(&exported).map_err(|e| {
939 Error::generic(format!("Failed to serialize request: {}", e))
940 })?;
941
942 fs::write(&file_path, content).await.map_err(|e| {
943 Error::generic(format!("Failed to write request file: {}", e))
944 })?;
945 }
946 count += 1;
947 }
948 }
949
950 for subfolder in ¤t_folder.folders {
952 let subfolder_path = if current_path.is_empty() {
953 subfolder.name.clone()
954 } else {
955 format!("{}/{}", current_path, subfolder.name)
956 };
957 queue.push_back((subfolder, subfolder_path));
958 }
959 }
960
961 Ok(count)
962 }
963
964 async fn export_workspace_requests_grouped_advanced(
966 &self,
967 workspace: &Workspace,
968 requests_dir: &Path,
969 force: bool,
970 dry_run: bool,
971 ) -> Result<usize> {
972 let mut count = 0;
973 let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
974
975 if !dry_run && !workspace_requests_dir.exists() {
976 fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
977 Error::generic(format!("Failed to create workspace requests directory: {}", e))
978 })?;
979 }
980
981 count += self
982 .export_workspace_requests_advanced(workspace, &workspace_requests_dir, force, dry_run)
983 .await?;
984 Ok(count)
985 }
986
987 pub async fn sync_to_directory(
989 &self,
990 target_dir: &str,
991 strategy: &str,
992 workspace_ids: Option<&str>,
993 structure: &str,
994 include_meta: bool,
995 force: bool,
996 ) -> Result<SyncResult> {
997 let target_path = PathBuf::from(target_dir);
998
999 if !target_path.exists() {
1001 fs::create_dir_all(&target_path)
1002 .await
1003 .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
1004 }
1005
1006 let sync_strategy = match strategy {
1008 "full" => SyncStrategy::Full,
1009 "incremental" => SyncStrategy::Incremental,
1010 "selective" => {
1011 if let Some(ids) = workspace_ids {
1012 let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
1013 SyncStrategy::Selective(workspace_list)
1014 } else {
1015 return Err(Error::generic("Selective strategy requires workspace IDs"));
1016 }
1017 }
1018 _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
1019 };
1020
1021 let dir_structure = match structure {
1023 "flat" => DirectoryStructure::Flat,
1024 "nested" => DirectoryStructure::Nested,
1025 "grouped" => DirectoryStructure::Grouped,
1026 _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
1027 };
1028
1029 let workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
1031
1032 let mut result = SyncResult {
1033 synced_workspaces: 0,
1034 synced_requests: 0,
1035 files_created: 0,
1036 target_dir: target_path.clone(),
1037 };
1038
1039 for workspace_id in workspaces_to_sync {
1041 if let Ok(workspace) = self.load_workspace(&workspace_id).await {
1042 let workspace_result = self
1043 .sync_workspace_to_directory(
1044 &workspace,
1045 &target_path,
1046 &dir_structure,
1047 include_meta,
1048 force,
1049 )
1050 .await?;
1051
1052 result.synced_workspaces += 1;
1053 result.synced_requests += workspace_result.requests_count;
1054 result.files_created += workspace_result.files_created;
1055 }
1056 }
1057
1058 if let SyncStrategy::Incremental = sync_strategy {
1060 let new_sync_state = SyncState {
1061 last_sync_timestamp: Utc::now(),
1062 };
1063 if let Err(e) = self.save_sync_state(&new_sync_state).await {
1064 tracing::warn!("Failed to save sync state: {}", e);
1065 }
1066 }
1067
1068 Ok(result)
1069 }
1070
1071 async fn get_workspaces_for_sync(&self, strategy: &SyncStrategy) -> Result<Vec<String>> {
1073 match strategy {
1074 SyncStrategy::Full => self.list_workspace_ids().await,
1075 SyncStrategy::Incremental => {
1076 let sync_state = self.load_sync_state().await?;
1078 let last_sync = sync_state.last_sync_timestamp;
1079
1080 let all_workspace_ids = self.list_workspace_ids().await?;
1082
1083 let mut modified_workspaces = Vec::new();
1085 for workspace_id in all_workspace_ids {
1086 let file_path = self.workspace_file_path(&workspace_id);
1087 if let Ok(metadata) = fs::metadata(&file_path).await {
1088 if let Ok(modified_time) = metadata.modified() {
1089 let modified_datetime = DateTime::<Utc>::from(modified_time);
1090 if modified_datetime > last_sync {
1091 modified_workspaces.push(workspace_id);
1092 }
1093 }
1094 }
1095 }
1096
1097 Ok(modified_workspaces)
1098 }
1099 SyncStrategy::Selective(ids) => Ok(ids.clone()),
1100 }
1101 }
1102
1103 async fn sync_workspace_to_directory(
1105 &self,
1106 workspace: &Workspace,
1107 target_dir: &Path,
1108 structure: &DirectoryStructure,
1109 include_meta: bool,
1110 force: bool,
1111 ) -> Result<WorkspaceSyncResult> {
1112 let mut result = WorkspaceSyncResult {
1113 requests_count: 0,
1114 files_created: 0,
1115 };
1116
1117 match structure {
1118 DirectoryStructure::Flat => {
1119 let export = self.create_workspace_export(workspace).await?;
1120 let file_path =
1121 target_dir.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1122
1123 if force || !file_path.exists() {
1124 let content = serde_yaml::to_string(&export).map_err(|e| {
1125 Error::generic(format!("Failed to serialize workspace: {}", e))
1126 })?;
1127
1128 fs::write(&file_path, content).await.map_err(|e| {
1129 Error::generic(format!("Failed to write workspace file: {}", e))
1130 })?;
1131
1132 result.files_created += 1;
1133 }
1134 }
1135
1136 DirectoryStructure::Nested => {
1137 let workspace_dir = target_dir.join(self.sanitize_filename(&workspace.name));
1138 if !workspace_dir.exists() {
1139 fs::create_dir_all(&workspace_dir).await.map_err(|e| {
1140 Error::generic(format!("Failed to create workspace directory: {}", e))
1141 })?;
1142 }
1143
1144 let export = self.create_workspace_export(workspace).await?;
1146 let workspace_file = workspace_dir.join("workspace.yaml");
1147
1148 if force || !workspace_file.exists() {
1149 let content = serde_yaml::to_string(&export).map_err(|e| {
1150 Error::generic(format!("Failed to serialize workspace: {}", e))
1151 })?;
1152
1153 fs::write(&workspace_file, content).await.map_err(|e| {
1154 Error::generic(format!("Failed to write workspace file: {}", e))
1155 })?;
1156
1157 result.files_created += 1;
1158 }
1159
1160 let requests_dir = workspace_dir.join("requests");
1162 if !requests_dir.exists() {
1163 fs::create_dir_all(&requests_dir).await.map_err(|e| {
1164 Error::generic(format!("Failed to create requests directory: {}", e))
1165 })?;
1166 }
1167
1168 result.requests_count +=
1169 self.export_workspace_requests(workspace, &requests_dir, force).await?;
1170 }
1171
1172 DirectoryStructure::Grouped => {
1173 let requests_dir = target_dir.join("requests");
1175 let workspaces_dir = target_dir.join("workspaces");
1176
1177 for dir in [&requests_dir, &workspaces_dir] {
1178 if !dir.exists() {
1179 fs::create_dir_all(dir).await.map_err(|e| {
1180 Error::generic(format!("Failed to create directory: {}", e))
1181 })?;
1182 }
1183 }
1184
1185 let export = self.create_workspace_export(workspace).await?;
1187 let workspace_file = workspaces_dir
1188 .join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1189
1190 if force || !workspace_file.exists() {
1191 let content = serde_yaml::to_string(&export).map_err(|e| {
1192 Error::generic(format!("Failed to serialize workspace: {}", e))
1193 })?;
1194
1195 fs::write(&workspace_file, content).await.map_err(|e| {
1196 Error::generic(format!("Failed to write workspace file: {}", e))
1197 })?;
1198
1199 result.files_created += 1;
1200 }
1201
1202 result.requests_count +=
1204 self.export_workspace_requests_grouped(workspace, &requests_dir, force).await?;
1205 }
1206 }
1207
1208 if include_meta {
1210 self.create_metadata_file(workspace, target_dir, structure).await?;
1211 result.files_created += 1;
1212 }
1213
1214 Ok(result)
1215 }
1216
1217 async fn create_workspace_export(&self, workspace: &Workspace) -> Result<WorkspaceExport> {
1219 let mut requests = HashMap::new();
1220
1221 self.collect_requests_from_workspace(workspace, &mut requests, "".to_string());
1223
1224 let metadata = WorkspaceMetadata {
1225 id: workspace.id.clone(),
1226 name: workspace.name.clone(),
1227 description: workspace.description.clone(),
1228 exported_at: Utc::now(),
1229 request_count: requests.len(),
1230 folder_count: workspace.folders.len(),
1231 };
1232
1233 let config = WorkspaceConfig {
1234 auth: workspace.config.auth.as_ref().and_then(AuthConfig::from_config_auth),
1235 base_url: workspace.config.base_url.clone(),
1236 variables: workspace.config.global_environment.variables.clone(),
1237 reality_level: workspace.config.reality_level,
1238 };
1239
1240 Ok(WorkspaceExport {
1241 metadata,
1242 config,
1243 requests,
1244 })
1245 }
1246
1247 fn collect_requests_from_workspace(
1249 &self,
1250 workspace: &Workspace,
1251 requests: &mut HashMap<String, ExportedRequest>,
1252 folder_path: String,
1253 ) {
1254 for request in &workspace.requests {
1256 let exported = self.convert_request_to_exported(request, &folder_path);
1257 requests.insert(request.id.clone(), exported);
1258 }
1259
1260 for folder in &workspace.folders {
1262 let current_path = if folder_path.is_empty() {
1263 folder.name.clone()
1264 } else {
1265 format!("{}/{}", folder_path, folder.name)
1266 };
1267
1268 for request in &folder.requests {
1269 let exported = self.convert_request_to_exported(request, ¤t_path);
1270 requests.insert(request.id.clone(), exported);
1271 }
1272
1273 self.collect_requests_from_folders(folder, requests, current_path);
1275 }
1276 }
1277
1278 fn collect_requests_from_folders(
1280 &self,
1281 folder: &Folder,
1282 requests: &mut HashMap<String, ExportedRequest>,
1283 folder_path: String,
1284 ) {
1285 for subfolder in &folder.folders {
1286 let current_path = format!("{}/{}", folder_path, subfolder.name);
1287
1288 for request in &subfolder.requests {
1289 let exported = self.convert_request_to_exported(request, ¤t_path);
1290 requests.insert(request.id.clone(), exported);
1291 }
1292
1293 self.collect_requests_from_folders(subfolder, requests, current_path);
1294 }
1295 }
1296
1297 fn convert_request_to_exported(
1299 &self,
1300 request: &MockRequest,
1301 folder_path: &str,
1302 ) -> ExportedRequest {
1303 ExportedRequest {
1304 id: request.id.clone(),
1305 name: request.name.clone(),
1306 method: format!("{:?}", request.method),
1307 path: request.path.clone(),
1308 folder_path: folder_path.to_string(),
1309 headers: request.headers.clone(),
1310 query_params: request.query_params.clone(),
1311 body: request.body.clone(),
1312 response_status: Some(request.response.status_code),
1313 response_body: request.response.body.clone(),
1314 response_headers: request.response.headers.clone(),
1315 delay: request.response.delay_ms,
1316 }
1317 }
1318
1319 pub async fn export_workspace_encrypted(
1321 &self,
1322 workspace: &Workspace,
1323 output_path: &Path,
1324 ) -> Result<EncryptedExportResult> {
1325 if !workspace.config.auto_encryption.enabled {
1327 return Err(Error::generic("Encryption is not enabled for this workspace. Enable encryption in workspace settings first."));
1328 }
1329
1330 let encryption_config = workspace.config.auto_encryption.clone();
1332 let processor = AutoEncryptionProcessor::new(&workspace.id, encryption_config);
1333
1334 let mut filtered_workspace = workspace.to_filtered_for_sync();
1336
1337 self.encrypt_workspace_data(&mut filtered_workspace, &processor)?;
1339
1340 let export = self.create_workspace_export(&filtered_workspace).await?;
1342
1343 let export_json = serde_json::to_string_pretty(&export)
1345 .map_err(|e| Error::generic(format!("Failed to serialize export: {}", e)))?;
1346
1347 let encrypted_data = utils::encrypt_for_workspace(&workspace.id, &export_json)?;
1348
1349 let key_manager = WorkspaceKeyManager::new();
1351 let backup_key = key_manager.generate_workspace_key_backup(&workspace.id)?;
1352
1353 fs::write(output_path, &encrypted_data)
1355 .await
1356 .map_err(|e| Error::generic(format!("Failed to write encrypted export: {}", e)))?;
1357
1358 Ok(EncryptedExportResult {
1359 output_path: output_path.to_path_buf(),
1360 backup_key,
1361 exported_at: Utc::now(),
1362 workspace_name: workspace.name.clone(),
1363 encryption_enabled: true,
1364 })
1365 }
1366
1367 pub async fn import_workspace_encrypted(
1369 &self,
1370 encrypted_file: &Path,
1371 _workspace_name: Option<&str>,
1372 _registry: &mut WorkspaceRegistry,
1373 ) -> Result<EncryptedImportResult> {
1374 let _encrypted_data = fs::read_to_string(encrypted_file)
1376 .await
1377 .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1378
1379 Err(Error::generic("Encrypted import requires workspace ID and backup key. Use import_workspace_encrypted_with_key instead."))
1382 }
1383
1384 pub async fn import_workspace_encrypted_with_key(
1386 &self,
1387 encrypted_file: &Path,
1388 workspace_id: &str,
1389 backup_key: &str,
1390 workspace_name: Option<&str>,
1391 registry: &mut WorkspaceRegistry,
1392 ) -> Result<EncryptedImportResult> {
1393 let key_manager = WorkspaceKeyManager::new();
1395 if !key_manager.has_workspace_key(workspace_id) {
1396 key_manager.restore_workspace_key_from_backup(workspace_id, backup_key)?;
1397 }
1398
1399 let encrypted_data = fs::read_to_string(encrypted_file)
1401 .await
1402 .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1403
1404 let decrypted_json = utils::decrypt_for_workspace(workspace_id, &encrypted_data)?;
1405
1406 let export: WorkspaceExport = serde_json::from_str(&decrypted_json)
1408 .map_err(|e| Error::generic(format!("Failed to parse decrypted export: {}", e)))?;
1409
1410 let workspace = self.convert_export_to_workspace(&export, workspace_name)?;
1412
1413 let imported_id = registry.add_workspace(workspace)?;
1415
1416 Ok(EncryptedImportResult {
1417 workspace_id: imported_id,
1418 workspace_name: export.metadata.name.clone(),
1419 imported_at: Utc::now(),
1420 request_count: export.requests.len(),
1421 encryption_restored: true,
1422 })
1423 }
1424
1425 fn encrypt_workspace_data(
1427 &self,
1428 workspace: &mut Workspace,
1429 processor: &AutoEncryptionProcessor,
1430 ) -> Result<()> {
1431 for env in &mut workspace.config.environments {
1433 processor.process_env_vars(&mut env.variables)?;
1434 }
1435 processor.process_env_vars(&mut workspace.config.global_environment.variables)?;
1436
1437 Ok(())
1441 }
1442
1443 fn convert_export_to_workspace(
1445 &self,
1446 export: &WorkspaceExport,
1447 name_override: Option<&str>,
1448 ) -> Result<Workspace> {
1449 let mut workspace =
1450 Workspace::new(name_override.unwrap_or(&export.metadata.name).to_string());
1451
1452 if let Some(desc) = &export.metadata.description {
1454 workspace.description = Some(desc.clone());
1455 }
1456
1457 for exported_request in export.requests.values() {
1459 let method = self.parse_http_method(&exported_request.method)?;
1461 let mut request = MockRequest::new(
1462 method,
1463 exported_request.path.clone(),
1464 exported_request.name.clone(),
1465 );
1466
1467 if let Some(status) = exported_request.response_status {
1469 request.response.status_code = status;
1470 }
1471
1472 if let Some(body) = &exported_request.response_body {
1474 request.response.body = Some(body.clone());
1475 }
1476 request.response.headers = exported_request.response_headers.clone();
1477 if let Some(delay) = exported_request.delay {
1478 request.response.delay_ms = Some(delay);
1479 }
1480
1481 workspace.add_request(request)?;
1482 }
1483
1484 workspace.config.global_environment.variables = export.config.variables.clone();
1486
1487 Ok(workspace)
1488 }
1489
1490 fn parse_http_method(&self, method_str: &str) -> Result<crate::routing::HttpMethod> {
1492 match method_str.to_uppercase().as_str() {
1493 "GET" => Ok(crate::routing::HttpMethod::GET),
1494 "POST" => Ok(crate::routing::HttpMethod::POST),
1495 "PUT" => Ok(crate::routing::HttpMethod::PUT),
1496 "DELETE" => Ok(crate::routing::HttpMethod::DELETE),
1497 "PATCH" => Ok(crate::routing::HttpMethod::PATCH),
1498 "HEAD" => Ok(crate::routing::HttpMethod::HEAD),
1499 "OPTIONS" => Ok(crate::routing::HttpMethod::OPTIONS),
1500 _ => Err(Error::generic(format!("Unknown HTTP method: {}", method_str))),
1501 }
1502 }
1503
1504 pub fn check_workspace_for_unencrypted_secrets(
1506 &self,
1507 workspace: &Workspace,
1508 ) -> Result<SecurityCheckResult> {
1509 let mut warnings = Vec::new();
1510 let errors = Vec::new();
1511
1512 self.check_environment_variables(workspace, &mut warnings)?;
1514
1515 let has_warnings = !warnings.is_empty();
1519 let has_errors = !errors.is_empty();
1520
1521 Ok(SecurityCheckResult {
1522 workspace_id: workspace.id.clone(),
1523 workspace_name: workspace.name.clone(),
1524 warnings,
1525 errors,
1526 is_secure: !has_warnings && !has_errors,
1527 recommended_actions: self.generate_security_recommendations(has_warnings, has_errors),
1528 })
1529 }
1530
1531 fn check_environment_variables(
1533 &self,
1534 workspace: &Workspace,
1535 warnings: &mut Vec<SecurityWarning>,
1536 ) -> Result<()> {
1537 let sensitive_keys = [
1538 "password",
1539 "secret",
1540 "key",
1541 "token",
1542 "credential",
1543 "api_key",
1544 "apikey",
1545 "api_secret",
1546 "db_password",
1547 "database_password",
1548 "aws_secret_key",
1549 "aws_session_token",
1550 "private_key",
1551 "authorization",
1552 "auth_token",
1553 "access_token",
1554 "refresh_token",
1555 "cookie",
1556 "session",
1557 "csrf",
1558 "jwt",
1559 "bearer",
1560 ];
1561
1562 for (key, value) in &workspace.config.global_environment.variables {
1564 if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1565 warnings.push(SecurityWarning {
1566 field_type: "environment_variable".to_string(),
1567 field_name: key.clone(),
1568 location: "global_environment".to_string(),
1569 severity: SecuritySeverity::High,
1570 message: format!(
1571 "Potentially sensitive environment variable '{}' detected",
1572 key
1573 ),
1574 suggestion: "Consider encrypting this value or excluding it from exports"
1575 .to_string(),
1576 });
1577 }
1578 }
1579
1580 for env in &workspace.config.environments {
1582 for (key, value) in &env.variables {
1583 if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1584 warnings.push(SecurityWarning {
1585 field_type: "environment_variable".to_string(),
1586 field_name: key.clone(),
1587 location: format!("environment '{}'", env.name),
1588 severity: SecuritySeverity::High,
1589 message: format!("Potentially sensitive environment variable '{}' detected in environment '{}'", key, env.name),
1590 suggestion: "Consider encrypting this value or excluding it from exports".to_string(),
1591 });
1592 }
1593 }
1594 }
1595
1596 Ok(())
1597 }
1598
1599 fn is_potentially_sensitive(&self, key: &str, value: &str, sensitive_keys: &[&str]) -> bool {
1601 let key_lower = key.to_lowercase();
1602
1603 if sensitive_keys.iter().any(|&sensitive| key_lower.contains(sensitive)) {
1605 return true;
1606 }
1607
1608 self.contains_sensitive_patterns(value)
1610 }
1611
1612 fn contains_sensitive_patterns(&self, value: &str) -> bool {
1614 if CREDIT_CARD_PATTERN.is_match(value) {
1616 return true;
1617 }
1618
1619 if SSN_PATTERN.is_match(value) {
1621 return true;
1622 }
1623
1624 if value.len() > 20 && value.chars().any(|c| c.is_alphanumeric()) {
1626 let alphanumeric_count = value.chars().filter(|c| c.is_alphanumeric()).count();
1627 let total_count = value.len();
1628 if alphanumeric_count as f64 / total_count as f64 > 0.8 {
1629 return true;
1630 }
1631 }
1632
1633 false
1634 }
1635
1636 fn generate_security_recommendations(
1638 &self,
1639 has_warnings: bool,
1640 has_errors: bool,
1641 ) -> Vec<String> {
1642 let mut recommendations = Vec::new();
1643
1644 if has_warnings || has_errors {
1645 recommendations.push("Enable encryption for this workspace in settings".to_string());
1646 recommendations.push("Review and encrypt sensitive environment variables".to_string());
1647 recommendations.push("Use encrypted export for sharing workspaces".to_string());
1648 }
1649
1650 if has_errors {
1651 recommendations
1652 .push("CRITICAL: Remove or encrypt sensitive data before proceeding".to_string());
1653 }
1654
1655 recommendations
1656 }
1657
1658 async fn export_workspace_requests(
1660 &self,
1661 workspace: &Workspace,
1662 requests_dir: &Path,
1663 force: bool,
1664 ) -> Result<usize> {
1665 let mut count = 0;
1666
1667 for request in &workspace.requests {
1668 let file_path =
1669 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1670 if force || !file_path.exists() {
1671 let exported = self.convert_request_to_exported(request, "");
1672 let content = serde_yaml::to_string(&exported)
1673 .map_err(|e| Error::generic(format!("Failed to serialize request: {}", e)))?;
1674
1675 fs::write(&file_path, content)
1676 .await
1677 .map_err(|e| Error::generic(format!("Failed to write request file: {}", e)))?;
1678
1679 count += 1;
1680 }
1681 }
1682
1683 for folder in &workspace.folders {
1685 count += self.export_folder_requests(folder, requests_dir, force, &folder.name).await?;
1686 }
1687
1688 Ok(count)
1689 }
1690
1691 async fn export_folder_requests(
1693 &self,
1694 folder: &Folder,
1695 requests_dir: &Path,
1696 force: bool,
1697 folder_path: &str,
1698 ) -> Result<usize> {
1699 use std::collections::VecDeque;
1700
1701 let mut count = 0;
1702 let mut queue = VecDeque::new();
1703
1704 queue.push_back((folder, folder_path.to_string()));
1706
1707 while let Some((current_folder, current_path)) = queue.pop_front() {
1708 for request in ¤t_folder.requests {
1710 let file_path =
1711 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1712 if force || !file_path.exists() {
1713 let exported = self.convert_request_to_exported(request, ¤t_path);
1714 let content = serde_yaml::to_string(&exported).map_err(|e| {
1715 Error::generic(format!("Failed to serialize request: {}", e))
1716 })?;
1717
1718 fs::write(&file_path, content).await.map_err(|e| {
1719 Error::generic(format!("Failed to write request file: {}", e))
1720 })?;
1721
1722 count += 1;
1723 }
1724 }
1725
1726 for subfolder in ¤t_folder.folders {
1728 let subfolder_path = if current_path.is_empty() {
1729 subfolder.name.clone()
1730 } else {
1731 format!("{}/{}", current_path, subfolder.name)
1732 };
1733 queue.push_back((subfolder, subfolder_path));
1734 }
1735 }
1736
1737 Ok(count)
1738 }
1739
1740 async fn export_workspace_requests_grouped(
1742 &self,
1743 workspace: &Workspace,
1744 requests_dir: &Path,
1745 force: bool,
1746 ) -> Result<usize> {
1747 let mut count = 0;
1748 let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
1749
1750 if !workspace_requests_dir.exists() {
1751 fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
1752 Error::generic(format!("Failed to create workspace requests directory: {}", e))
1753 })?;
1754 }
1755
1756 count += self
1757 .export_workspace_requests(workspace, &workspace_requests_dir, force)
1758 .await?;
1759 Ok(count)
1760 }
1761
1762 async fn create_metadata_file(
1764 &self,
1765 workspace: &Workspace,
1766 target_dir: &Path,
1767 structure: &DirectoryStructure,
1768 ) -> Result<()> {
1769 let metadata = serde_json::json!({
1770 "workspace_id": workspace.id,
1771 "workspace_name": workspace.name,
1772 "description": workspace.description,
1773 "exported_at": Utc::now().to_rfc3339(),
1774 "structure": format!("{:?}", structure),
1775 "version": "1.0",
1776 "source": "mockforge"
1777 });
1778
1779 let metadata_file = target_dir.join(".mockforge-meta.json");
1780 let content = serde_json::to_string_pretty(&metadata)
1781 .map_err(|e| Error::generic(format!("Failed to serialize metadata: {}", e)))?;
1782
1783 fs::write(&metadata_file, content)
1784 .await
1785 .map_err(|e| Error::generic(format!("Failed to write metadata file: {}", e)))?;
1786
1787 Ok(())
1788 }
1789
1790 pub async fn export_reality_preset(
1795 &self,
1796 preset: &crate::RealityPreset,
1797 output_path: &Path,
1798 ) -> Result<()> {
1799 self.ensure_workspace_dir().await?;
1800
1801 let content = if output_path.extension().and_then(|s| s.to_str()) == Some("yaml")
1803 || output_path.extension().and_then(|s| s.to_str()) == Some("yml")
1804 {
1805 serde_yaml::to_string(preset)
1806 .map_err(|e| Error::generic(format!("Failed to serialize preset to YAML: {}", e)))?
1807 } else {
1808 serde_json::to_string_pretty(preset)
1809 .map_err(|e| Error::generic(format!("Failed to serialize preset to JSON: {}", e)))?
1810 };
1811
1812 if let Some(parent) = output_path.parent() {
1814 fs::create_dir_all(parent)
1815 .await
1816 .map_err(|e| Error::generic(format!("Failed to create preset directory: {}", e)))?;
1817 }
1818
1819 fs::write(output_path, content)
1820 .await
1821 .map_err(|e| Error::generic(format!("Failed to write preset file: {}", e)))?;
1822
1823 Ok(())
1824 }
1825
1826 pub async fn import_reality_preset(&self, input_path: &Path) -> Result<crate::RealityPreset> {
1831 let content = fs::read_to_string(input_path)
1832 .await
1833 .map_err(|e| Error::generic(format!("Failed to read preset file: {}", e)))?;
1834
1835 let preset = if input_path
1837 .extension()
1838 .and_then(|s| s.to_str())
1839 .map(|ext| ext == "yaml" || ext == "yml")
1840 .unwrap_or(false)
1841 {
1842 serde_yaml::from_str(&content).map_err(|e| {
1843 Error::generic(format!("Failed to deserialize preset from YAML: {}", e))
1844 })?
1845 } else {
1846 serde_json::from_str(&content).map_err(|e| {
1847 Error::generic(format!("Failed to deserialize preset from JSON: {}", e))
1848 })?
1849 };
1850
1851 Ok(preset)
1852 }
1853
1854 pub fn presets_dir(&self) -> PathBuf {
1856 self.base_dir.join("presets")
1857 }
1858
1859 pub async fn list_reality_presets(&self) -> Result<Vec<PathBuf>> {
1863 let presets_dir = self.presets_dir();
1864 if !presets_dir.exists() {
1865 return Ok(vec![]);
1866 }
1867
1868 let mut presets = Vec::new();
1869 let mut entries = fs::read_dir(&presets_dir)
1870 .await
1871 .map_err(|e| Error::generic(format!("Failed to read presets directory: {}", e)))?;
1872
1873 while let Some(entry) = entries
1874 .next_entry()
1875 .await
1876 .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
1877 {
1878 let path = entry.path();
1879 if path.is_file() {
1880 let ext = path.extension().and_then(|s| s.to_str());
1881 if ext == Some("json") || ext == Some("yaml") || ext == Some("yml") {
1882 presets.push(path);
1883 }
1884 }
1885 }
1886
1887 Ok(presets)
1888 }
1889
1890 fn sanitize_filename(&self, name: &str) -> String {
1892 name.chars()
1893 .map(|c| match c {
1894 '/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
1895 c if c.is_whitespace() => '_',
1896 c => c,
1897 })
1898 .collect::<String>()
1899 .to_lowercase()
1900 }
1901}
1902
1903#[derive(Debug)]
1905struct WorkspaceSyncResult {
1906 requests_count: usize,
1908 files_created: usize,
1910}
1911
1912#[cfg(test)]
1913mod tests {
1914 use super::*;
1915 use crate::workspace::{MockRequest, Workspace};
1916 use crate::HttpMethod;
1917 use tempfile::TempDir;
1918
1919 #[tokio::test]
1920 async fn test_workspace_persistence() {
1921 let temp_dir = TempDir::new().unwrap();
1922 let persistence = WorkspacePersistence::new(temp_dir.path());
1923
1924 let mut workspace = Workspace::new("Test Workspace".to_string());
1926 let request =
1927 MockRequest::new(HttpMethod::GET, "/test".to_string(), "Test Request".to_string());
1928 workspace.add_request(request).unwrap();
1929
1930 persistence.save_workspace(&workspace).await.unwrap();
1932
1933 let loaded = persistence.load_workspace(&workspace.id).await.unwrap();
1935 assert_eq!(loaded.name, workspace.name);
1936 assert_eq!(loaded.requests.len(), 1);
1937
1938 let ids = persistence.list_workspace_ids().await.unwrap();
1940 assert_eq!(ids.len(), 1);
1941 assert_eq!(ids[0], workspace.id);
1942 }
1943
1944 #[tokio::test]
1945 async fn test_registry_persistence() {
1946 let temp_dir = TempDir::new().unwrap();
1947 let persistence = WorkspacePersistence::new(temp_dir.path());
1948
1949 let mut registry = WorkspaceRegistry::new();
1950
1951 let workspace1 = Workspace::new("Workspace 1".to_string());
1953 let workspace2 = Workspace::new("Workspace 2".to_string());
1954
1955 let id1 = registry.add_workspace(workspace1).unwrap();
1956 let _id2 = registry.add_workspace(workspace2).unwrap();
1957
1958 registry.set_active_workspace(Some(id1.clone())).unwrap();
1960
1961 persistence.save_full_registry(®istry).await.unwrap();
1963
1964 let loaded_registry = persistence.load_full_registry().await.unwrap();
1966
1967 assert_eq!(loaded_registry.get_workspaces().len(), 2);
1968 assert_eq!(loaded_registry.get_active_workspace().unwrap().name, "Workspace 1");
1969 }
1970
1971 #[tokio::test]
1972 async fn test_backup_and_restore() {
1973 let temp_dir = TempDir::new().unwrap();
1974 let backup_dir = temp_dir.path().join("backups");
1975 let persistence = WorkspacePersistence::new(temp_dir.path());
1976
1977 let workspace = Workspace::new("Test Workspace".to_string());
1979 persistence.save_workspace(&workspace).await.unwrap();
1980
1981 let backup_path = persistence.backup_workspace(&workspace.id, &backup_dir).await.unwrap();
1983 assert!(backup_path.exists());
1984
1985 persistence.delete_workspace(&workspace.id).await.unwrap();
1987 assert!(persistence.load_workspace(&workspace.id).await.is_err());
1988
1989 let restored_id = persistence.restore_workspace(&backup_path).await.unwrap();
1991
1992 let restored = persistence.load_workspace(&restored_id).await.unwrap();
1994 assert_eq!(restored.name, "Test Workspace");
1995 }
1996}