1use crate::config::AuthConfig as ConfigAuthConfig;
7use crate::encryption::{utils, AutoEncryptionProcessor, WorkspaceKeyManager};
8use crate::workspace::{EntityId, Folder, MockRequest, Workspace, WorkspaceRegistry};
9use crate::{Error, Result};
10use chrono::{DateTime, Utc};
11use once_cell::sync::Lazy;
12use regex::Regex;
13use serde::{Deserialize, Serialize};
14use std::collections::HashMap;
15use std::path::{Path, PathBuf};
16use tokio::fs;
17
18static CREDIT_CARD_PATTERN: Lazy<Regex> = Lazy::new(|| {
20 Regex::new(r"\b\d{4}[-\s]?\d{4}[-\s]?\d{4}[-\s]?\d{4}\b")
21 .expect("CREDIT_CARD_PATTERN regex is valid")
22});
23
24static SSN_PATTERN: Lazy<Regex> = Lazy::new(|| {
25 Regex::new(r"\b\d{3}[-\s]?\d{2}[-\s]?\d{4}\b").expect("SSN_PATTERN regex is valid")
26});
27
28#[derive(Debug)]
30pub struct WorkspacePersistence {
31 base_dir: PathBuf,
33}
34
35#[derive(Debug, Clone, Serialize, Deserialize)]
37struct SerializableWorkspaceRegistry {
38 workspaces: Vec<Workspace>,
39 active_workspace: Option<EntityId>,
40}
41
42#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SyncState {
45 pub last_sync_timestamp: DateTime<Utc>,
47}
48
49#[derive(Debug, Clone, PartialEq)]
51pub enum SyncStrategy {
52 Full,
54 Incremental,
56 Selective(Vec<String>),
58}
59
60#[derive(Debug, Clone, PartialEq)]
62pub enum DirectoryStructure {
63 Flat,
65 Nested,
67 Grouped,
69}
70
71#[derive(Debug, Clone)]
73pub struct SyncResult {
74 pub synced_workspaces: usize,
76 pub synced_requests: usize,
78 pub files_created: usize,
80 pub target_dir: PathBuf,
82}
83
84#[derive(Debug, Clone)]
86pub struct EncryptedExportResult {
87 pub output_path: PathBuf,
89 pub backup_key: String,
91 pub exported_at: DateTime<Utc>,
93 pub workspace_name: String,
95 pub encryption_enabled: bool,
97}
98
99#[derive(Debug, Clone)]
101pub struct EncryptedImportResult {
102 pub workspace_id: String,
104 pub workspace_name: String,
106 pub imported_at: DateTime<Utc>,
108 pub request_count: usize,
110 pub encryption_restored: bool,
112}
113
114#[derive(Debug, Clone)]
116pub struct SecurityCheckResult {
117 pub workspace_id: String,
119 pub workspace_name: String,
121 pub warnings: Vec<SecurityWarning>,
123 pub errors: Vec<SecurityWarning>,
125 pub is_secure: bool,
127 pub recommended_actions: Vec<String>,
129}
130
131#[derive(Debug, Clone)]
133pub struct SecurityWarning {
134 pub field_type: String,
136 pub field_name: String,
138 pub location: String,
140 pub severity: SecuritySeverity,
142 pub message: String,
144 pub suggestion: String,
146}
147
148#[derive(Debug, Clone, PartialEq)]
150pub enum SecuritySeverity {
151 Low,
153 Medium,
155 High,
157 Critical,
159}
160
161#[derive(Debug, Clone, Serialize, Deserialize)]
163pub struct WorkspaceExport {
164 pub metadata: WorkspaceMetadata,
166 pub config: WorkspaceConfig,
168 pub requests: HashMap<String, ExportedRequest>,
170}
171
172#[derive(Debug, Clone, Serialize, Deserialize)]
174pub struct WorkspaceMetadata {
175 pub id: String,
177 pub name: String,
179 pub description: Option<String>,
181 pub exported_at: DateTime<Utc>,
183 pub request_count: usize,
185 pub folder_count: usize,
187}
188
189#[derive(Debug, Clone, Serialize, Deserialize)]
191pub struct WorkspaceConfig {
192 pub auth: Option<AuthConfig>,
194 pub base_url: Option<String>,
196 pub variables: HashMap<String, String>,
198}
199
200#[derive(Debug, Clone, Serialize, Deserialize)]
202pub struct AuthConfig {
203 pub auth_type: String,
205 pub params: HashMap<String, String>,
207}
208
209impl AuthConfig {
210 pub fn from_config_auth(config_auth: &ConfigAuthConfig) -> Option<Self> {
212 if let Some(jwt) = &config_auth.jwt {
213 let mut params = HashMap::new();
214 if let Some(secret) = &jwt.secret {
215 params.insert("secret".to_string(), secret.clone());
216 }
217 if let Some(rsa_public_key) = &jwt.rsa_public_key {
218 params.insert("rsa_public_key".to_string(), rsa_public_key.clone());
219 }
220 if let Some(ecdsa_public_key) = &jwt.ecdsa_public_key {
221 params.insert("ecdsa_public_key".to_string(), ecdsa_public_key.clone());
222 }
223 if let Some(issuer) = &jwt.issuer {
224 params.insert("issuer".to_string(), issuer.clone());
225 }
226 if let Some(audience) = &jwt.audience {
227 params.insert("audience".to_string(), audience.clone());
228 }
229 if !jwt.algorithms.is_empty() {
230 params.insert("algorithms".to_string(), jwt.algorithms.join(","));
231 }
232 Some(AuthConfig {
233 auth_type: "jwt".to_string(),
234 params,
235 })
236 } else if let Some(oauth2) = &config_auth.oauth2 {
237 let mut params = HashMap::new();
238 params.insert("client_id".to_string(), oauth2.client_id.clone());
239 params.insert("client_secret".to_string(), oauth2.client_secret.clone());
240 params.insert("introspection_url".to_string(), oauth2.introspection_url.clone());
241 if let Some(auth_url) = &oauth2.auth_url {
242 params.insert("auth_url".to_string(), auth_url.clone());
243 }
244 if let Some(token_url) = &oauth2.token_url {
245 params.insert("token_url".to_string(), token_url.clone());
246 }
247 if let Some(token_type_hint) = &oauth2.token_type_hint {
248 params.insert("token_type_hint".to_string(), token_type_hint.clone());
249 }
250 Some(AuthConfig {
251 auth_type: "oauth2".to_string(),
252 params,
253 })
254 } else if let Some(basic_auth) = &config_auth.basic_auth {
255 let mut params = HashMap::new();
256 for (user, pass) in &basic_auth.credentials {
257 params.insert(user.clone(), pass.clone());
258 }
259 Some(AuthConfig {
260 auth_type: "basic".to_string(),
261 params,
262 })
263 } else if let Some(api_key) = &config_auth.api_key {
264 let mut params = HashMap::new();
265 params.insert("header_name".to_string(), api_key.header_name.clone());
266 if let Some(query_name) = &api_key.query_name {
267 params.insert("query_name".to_string(), query_name.clone());
268 }
269 if !api_key.keys.is_empty() {
270 params.insert("keys".to_string(), api_key.keys.join(","));
271 }
272 Some(AuthConfig {
273 auth_type: "api_key".to_string(),
274 params,
275 })
276 } else {
277 None
278 }
279 }
280}
281
282#[derive(Debug, Clone, Serialize, Deserialize)]
284pub struct ExportedRequest {
285 pub id: String,
287 pub name: String,
289 pub method: String,
291 pub path: String,
293 pub folder_path: String,
295 pub headers: HashMap<String, String>,
297 pub query_params: HashMap<String, String>,
299 pub body: Option<String>,
301 pub response_status: Option<u16>,
303 pub response_body: Option<String>,
305 pub response_headers: HashMap<String, String>,
307 pub delay: Option<u64>,
309}
310
311impl WorkspacePersistence {
312 pub fn new<P: AsRef<Path>>(base_dir: P) -> Self {
314 Self {
315 base_dir: base_dir.as_ref().to_path_buf(),
316 }
317 }
318
319 pub fn workspace_dir(&self) -> &Path {
321 &self.base_dir
322 }
323
324 pub fn workspace_file_path(&self, workspace_id: &str) -> PathBuf {
326 self.base_dir.join(format!("{}.yaml", workspace_id))
327 }
328
329 pub fn registry_file_path(&self) -> PathBuf {
331 self.base_dir.join("registry.yaml")
332 }
333
334 pub fn sync_state_file_path(&self) -> PathBuf {
336 self.base_dir.join("sync_state.yaml")
337 }
338
339 pub async fn ensure_workspace_dir(&self) -> Result<()> {
341 if !self.base_dir.exists() {
342 fs::create_dir_all(&self.base_dir).await.map_err(|e| {
343 Error::generic(format!("Failed to create workspace directory: {}", e))
344 })?;
345 }
346 Ok(())
347 }
348
349 pub async fn save_workspace(&self, workspace: &Workspace) -> Result<()> {
351 self.ensure_workspace_dir().await?;
352
353 let file_path = self.workspace_file_path(&workspace.id);
354 let content = serde_yaml::to_string(workspace)
355 .map_err(|e| Error::generic(format!("Failed to serialize workspace: {}", e)))?;
356
357 fs::write(&file_path, content)
358 .await
359 .map_err(|e| Error::generic(format!("Failed to write workspace file: {}", e)))?;
360
361 Ok(())
362 }
363
364 pub async fn load_workspace(&self, workspace_id: &str) -> Result<Workspace> {
366 let file_path = self.workspace_file_path(workspace_id);
367
368 if !file_path.exists() {
369 return Err(Error::generic(format!("Workspace file not found: {:?}", file_path)));
370 }
371
372 let content = fs::read_to_string(&file_path)
373 .await
374 .map_err(|e| Error::generic(format!("Failed to read workspace file: {}", e)))?;
375
376 let workspace: Workspace = serde_yaml::from_str(&content)
377 .map_err(|e| Error::generic(format!("Failed to deserialize workspace: {}", e)))?;
378
379 Ok(workspace)
380 }
381
382 pub async fn delete_workspace(&self, workspace_id: &str) -> Result<()> {
384 let file_path = self.workspace_file_path(workspace_id);
385
386 if file_path.exists() {
387 fs::remove_file(&file_path)
388 .await
389 .map_err(|e| Error::generic(format!("Failed to delete workspace file: {}", e)))?;
390 }
391
392 Ok(())
393 }
394
395 pub async fn save_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
397 self.ensure_workspace_dir().await?;
398
399 let serializable = SerializableWorkspaceRegistry {
400 workspaces: registry.get_workspaces().into_iter().cloned().collect(),
401 active_workspace: registry.get_active_workspace_id().map(|s| s.to_string()),
402 };
403
404 let file_path = self.registry_file_path();
405 let content = serde_yaml::to_string(&serializable)
406 .map_err(|e| Error::generic(format!("Failed to serialize registry: {}", e)))?;
407
408 fs::write(&file_path, content)
409 .await
410 .map_err(|e| Error::generic(format!("Failed to write registry file: {}", e)))?;
411
412 Ok(())
413 }
414
415 pub async fn load_registry(&self) -> Result<WorkspaceRegistry> {
417 let file_path = self.registry_file_path();
418
419 if !file_path.exists() {
420 return Ok(WorkspaceRegistry::new());
422 }
423
424 let content = fs::read_to_string(&file_path)
425 .await
426 .map_err(|e| Error::generic(format!("Failed to read registry file: {}", e)))?;
427
428 let serializable: SerializableWorkspaceRegistry = serde_yaml::from_str(&content)
429 .map_err(|e| Error::generic(format!("Failed to deserialize registry: {}", e)))?;
430
431 let mut registry = WorkspaceRegistry::new();
432
433 for workspace_meta in &serializable.workspaces {
435 match self.load_workspace(&workspace_meta.id).await {
436 Ok(workspace) => {
437 registry.add_workspace(workspace)?;
438 }
439 Err(e) => {
440 tracing::warn!("Failed to load workspace {}: {}", workspace_meta.id, e);
441 }
442 }
443 }
444
445 if let Some(active_id) = &serializable.active_workspace {
447 if let Err(e) = registry.set_active_workspace(Some(active_id.clone())) {
448 tracing::warn!("Failed to set active workspace {}: {}", active_id, e);
449 }
450 }
451
452 Ok(registry)
453 }
454
455 pub async fn save_sync_state(&self, sync_state: &SyncState) -> Result<()> {
457 self.ensure_workspace_dir().await?;
458
459 let file_path = self.sync_state_file_path();
460 let content = serde_yaml::to_string(sync_state)
461 .map_err(|e| Error::generic(format!("Failed to serialize sync state: {}", e)))?;
462
463 fs::write(&file_path, content)
464 .await
465 .map_err(|e| Error::generic(format!("Failed to write sync state file: {}", e)))?;
466
467 Ok(())
468 }
469
470 pub async fn load_sync_state(&self) -> Result<SyncState> {
472 let file_path = self.sync_state_file_path();
473
474 if !file_path.exists() {
475 return Ok(SyncState {
477 last_sync_timestamp: Utc::now(),
478 });
479 }
480
481 let content = fs::read_to_string(&file_path)
482 .await
483 .map_err(|e| Error::generic(format!("Failed to read sync state file: {}", e)))?;
484
485 let sync_state: SyncState = serde_yaml::from_str(&content)
486 .map_err(|e| Error::generic(format!("Failed to deserialize sync state: {}", e)))?;
487
488 Ok(sync_state)
489 }
490
491 pub async fn list_workspace_ids(&self) -> Result<Vec<EntityId>> {
493 if !self.base_dir.exists() {
494 return Ok(Vec::new());
495 }
496
497 let mut workspace_ids = Vec::new();
498
499 let mut entries = fs::read_dir(&self.base_dir)
500 .await
501 .map_err(|e| Error::generic(format!("Failed to read workspace directory: {}", e)))?;
502
503 while let Some(entry) = entries
504 .next_entry()
505 .await
506 .map_err(|e| Error::generic(format!("Failed to read directory entry: {}", e)))?
507 {
508 let path = entry.path();
509 if path.is_file() {
510 if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
511 if file_name != "registry.yaml" && file_name.ends_with(".yaml") {
512 if let Some(id) = file_name.strip_suffix(".yaml") {
513 workspace_ids.push(id.to_string());
514 }
515 }
516 }
517 }
518 }
519
520 Ok(workspace_ids)
521 }
522
523 pub async fn save_full_registry(&self, registry: &WorkspaceRegistry) -> Result<()> {
525 self.save_registry(registry).await?;
527
528 for workspace in registry.get_workspaces() {
530 self.save_workspace(workspace).await?;
531 }
532
533 Ok(())
534 }
535
536 pub async fn load_full_registry(&self) -> Result<WorkspaceRegistry> {
538 self.load_registry().await
539 }
540
541 pub async fn backup_workspace(&self, workspace_id: &str, backup_dir: &Path) -> Result<PathBuf> {
543 let workspace_file = self.workspace_file_path(workspace_id);
544
545 if !workspace_file.exists() {
546 return Err(Error::generic(format!("Workspace {} does not exist", workspace_id)));
547 }
548
549 if !backup_dir.exists() {
551 fs::create_dir_all(backup_dir)
552 .await
553 .map_err(|e| Error::generic(format!("Failed to create backup directory: {}", e)))?;
554 }
555
556 let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
558 let backup_filename = format!("{}_{}.yaml", workspace_id, timestamp);
559 let backup_path = backup_dir.join(backup_filename);
560
561 fs::copy(&workspace_file, &backup_path)
563 .await
564 .map_err(|e| Error::generic(format!("Failed to create backup: {}", e)))?;
565
566 Ok(backup_path)
567 }
568
569 pub async fn restore_workspace(&self, backup_path: &Path) -> Result<EntityId> {
571 if !backup_path.exists() {
572 return Err(Error::generic(format!("Backup file does not exist: {:?}", backup_path)));
573 }
574
575 let content = fs::read_to_string(backup_path)
577 .await
578 .map_err(|e| Error::generic(format!("Failed to read backup file: {}", e)))?;
579
580 let workspace: Workspace = serde_yaml::from_str(&content)
581 .map_err(|e| Error::generic(format!("Failed to deserialize backup: {}", e)))?;
582
583 self.save_workspace(&workspace).await?;
585
586 Ok(workspace.id)
587 }
588
589 pub async fn cleanup_old_backups(&self, backup_dir: &Path, keep_count: usize) -> Result<usize> {
591 if !backup_dir.exists() {
592 return Ok(0);
593 }
594
595 let mut backup_files = Vec::new();
596
597 let mut entries = fs::read_dir(backup_dir)
598 .await
599 .map_err(|e| Error::generic(format!("Failed to read backup directory: {}", e)))?;
600
601 while let Some(entry) = entries
602 .next_entry()
603 .await
604 .map_err(|e| Error::generic(format!("Failed to read backup entry: {}", e)))?
605 {
606 let path = entry.path();
607 if path.is_file() {
608 if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
609 if file_name.ends_with(".yaml") {
610 if let Ok(metadata) = entry.metadata().await {
611 if let Ok(modified) = metadata.modified() {
612 backup_files.push((path, modified));
613 }
614 }
615 }
616 }
617 }
618 }
619
620 backup_files.sort_by(|a, b| b.1.cmp(&a.1));
622
623 let mut removed_count = 0;
625 for (path, _) in backup_files.iter().skip(keep_count) {
626 if fs::remove_file(path).await.is_ok() {
627 removed_count += 1;
628 }
629 }
630
631 Ok(removed_count)
632 }
633
634 #[allow(clippy::too_many_arguments)]
636 pub async fn sync_to_directory_advanced(
637 &self,
638 target_dir: &str,
639 strategy: &str,
640 workspace_ids: Option<&str>,
641 structure: &str,
642 include_meta: bool,
643 force: bool,
644 filename_pattern: &str,
645 exclude_pattern: Option<&str>,
646 dry_run: bool,
647 ) -> Result<SyncResult> {
648 let target_path = PathBuf::from(target_dir);
649
650 if !dry_run && !target_path.exists() {
652 fs::create_dir_all(&target_path)
653 .await
654 .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
655 }
656
657 let sync_strategy = match strategy {
659 "full" => SyncStrategy::Full,
660 "incremental" => SyncStrategy::Incremental,
661 "selective" => {
662 if let Some(ids) = workspace_ids {
663 let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
664 SyncStrategy::Selective(workspace_list)
665 } else {
666 return Err(Error::generic("Selective strategy requires workspace IDs"));
667 }
668 }
669 _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
670 };
671
672 let dir_structure = match structure {
674 "flat" => DirectoryStructure::Flat,
675 "nested" => DirectoryStructure::Nested,
676 "grouped" => DirectoryStructure::Grouped,
677 _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
678 };
679
680 let mut workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
682
683 if let Some(exclude) = exclude_pattern {
685 if let Ok(regex) = regex::Regex::new(exclude) {
686 workspaces_to_sync.retain(|id| !regex.is_match(id));
687 }
688 }
689
690 let mut result = SyncResult {
691 synced_workspaces: 0,
692 synced_requests: 0,
693 files_created: 0,
694 target_dir: target_path.clone(),
695 };
696
697 for workspace_id in workspaces_to_sync {
699 if let Ok(workspace) = self.load_workspace(&workspace_id).await {
700 let workspace_result = self
701 .sync_workspace_to_directory_advanced(
702 &workspace,
703 &target_path,
704 &dir_structure,
705 include_meta,
706 force,
707 filename_pattern,
708 dry_run,
709 )
710 .await?;
711
712 result.synced_workspaces += 1;
713 result.synced_requests += workspace_result.requests_count;
714 result.files_created += workspace_result.files_created;
715 }
716 }
717
718 if let SyncStrategy::Incremental = sync_strategy {
720 let new_sync_state = SyncState {
721 last_sync_timestamp: Utc::now(),
722 };
723 if let Err(e) = self.save_sync_state(&new_sync_state).await {
724 tracing::warn!("Failed to save sync state: {}", e);
725 }
726 }
727
728 Ok(result)
729 }
730
731 #[allow(clippy::too_many_arguments)]
733 async fn sync_workspace_to_directory_advanced(
734 &self,
735 workspace: &Workspace,
736 target_dir: &Path,
737 structure: &DirectoryStructure,
738 include_meta: bool,
739 force: bool,
740 filename_pattern: &str,
741 dry_run: bool,
742 ) -> Result<WorkspaceSyncResult> {
743 let mut result = WorkspaceSyncResult {
744 requests_count: 0,
745 files_created: 0,
746 };
747
748 match structure {
749 DirectoryStructure::Flat => {
750 let export = self.create_workspace_export(workspace).await?;
751 let filename = self.generate_filename(filename_pattern, workspace);
752 let file_path = target_dir.join(format!("{}.yaml", filename));
753
754 if force || !file_path.exists() {
755 if !dry_run {
756 let content = serde_yaml::to_string(&export).map_err(|e| {
757 Error::generic(format!("Failed to serialize workspace: {}", e))
758 })?;
759
760 fs::write(&file_path, content).await.map_err(|e| {
761 Error::generic(format!("Failed to write workspace file: {}", e))
762 })?;
763 }
764 result.files_created += 1;
765 }
766 }
767
768 DirectoryStructure::Nested => {
769 let workspace_dir =
770 target_dir.join(self.generate_filename(filename_pattern, workspace));
771 if !dry_run && !workspace_dir.exists() {
772 fs::create_dir_all(&workspace_dir).await.map_err(|e| {
773 Error::generic(format!("Failed to create workspace directory: {}", e))
774 })?;
775 }
776
777 let export = self.create_workspace_export(workspace).await?;
779 let workspace_file = workspace_dir.join("workspace.yaml");
780
781 if force || !workspace_file.exists() {
782 if !dry_run {
783 let content = serde_yaml::to_string(&export).map_err(|e| {
784 Error::generic(format!("Failed to serialize workspace: {}", e))
785 })?;
786
787 fs::write(&workspace_file, content).await.map_err(|e| {
788 Error::generic(format!("Failed to write workspace file: {}", e))
789 })?;
790 }
791 result.files_created += 1;
792 }
793
794 let requests_dir = workspace_dir.join("requests");
796 if !dry_run && !requests_dir.exists() {
797 fs::create_dir_all(&requests_dir).await.map_err(|e| {
798 Error::generic(format!("Failed to create requests directory: {}", e))
799 })?;
800 }
801
802 result.requests_count += self
803 .export_workspace_requests_advanced(workspace, &requests_dir, force, dry_run)
804 .await?;
805 }
806
807 DirectoryStructure::Grouped => {
808 let requests_dir = target_dir.join("requests");
810 let workspaces_dir = target_dir.join("workspaces");
811
812 if !dry_run {
813 for dir in [&requests_dir, &workspaces_dir] {
814 if !dir.exists() {
815 fs::create_dir_all(dir).await.map_err(|e| {
816 Error::generic(format!("Failed to create directory: {}", e))
817 })?;
818 }
819 }
820 }
821
822 let export = self.create_workspace_export(workspace).await?;
824 let filename = self.generate_filename(filename_pattern, workspace);
825 let workspace_file = workspaces_dir.join(format!("{}.yaml", filename));
826
827 if force || !workspace_file.exists() {
828 if !dry_run {
829 let content = serde_yaml::to_string(&export).map_err(|e| {
830 Error::generic(format!("Failed to serialize workspace: {}", e))
831 })?;
832
833 fs::write(&workspace_file, content).await.map_err(|e| {
834 Error::generic(format!("Failed to write workspace file: {}", e))
835 })?;
836 }
837 result.files_created += 1;
838 }
839
840 result.requests_count += self
842 .export_workspace_requests_grouped_advanced(
843 workspace,
844 &requests_dir,
845 force,
846 dry_run,
847 )
848 .await?;
849 }
850 }
851
852 if include_meta && !dry_run {
854 self.create_metadata_file(workspace, target_dir, structure).await?;
855 result.files_created += 1;
856 }
857
858 Ok(result)
859 }
860
861 fn generate_filename(&self, pattern: &str, workspace: &Workspace) -> String {
863 let timestamp = Utc::now().format("%Y%m%d_%H%M%S");
864
865 pattern
866 .replace("{name}", &self.sanitize_filename(&workspace.name))
867 .replace("{id}", &workspace.id)
868 .replace("{timestamp}", ×tamp.to_string())
869 }
870
871 async fn export_workspace_requests_advanced(
873 &self,
874 workspace: &Workspace,
875 requests_dir: &Path,
876 force: bool,
877 dry_run: bool,
878 ) -> Result<usize> {
879 let mut count = 0;
880
881 for request in &workspace.requests {
882 let file_path =
883 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
884 if force || !file_path.exists() {
885 if !dry_run {
886 let exported = self.convert_request_to_exported(request, "");
887 let content = serde_yaml::to_string(&exported).map_err(|e| {
888 Error::generic(format!("Failed to serialize request: {}", e))
889 })?;
890
891 fs::write(&file_path, content).await.map_err(|e| {
892 Error::generic(format!("Failed to write request file: {}", e))
893 })?;
894 }
895 count += 1;
896 }
897 }
898
899 for folder in &workspace.folders {
901 count += self
902 .export_folder_requests_advanced(folder, requests_dir, force, &folder.name, dry_run)
903 .await?;
904 }
905
906 Ok(count)
907 }
908
909 async fn export_folder_requests_advanced(
911 &self,
912 folder: &Folder,
913 requests_dir: &Path,
914 force: bool,
915 folder_path: &str,
916 dry_run: bool,
917 ) -> Result<usize> {
918 use std::collections::VecDeque;
919
920 let mut count = 0;
921 let mut queue = VecDeque::new();
922
923 queue.push_back((folder, folder_path.to_string()));
925
926 while let Some((current_folder, current_path)) = queue.pop_front() {
927 for request in ¤t_folder.requests {
929 let file_path =
930 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
931 if force || !file_path.exists() {
932 if !dry_run {
933 let exported = self.convert_request_to_exported(request, ¤t_path);
934 let content = serde_yaml::to_string(&exported).map_err(|e| {
935 Error::generic(format!("Failed to serialize request: {}", e))
936 })?;
937
938 fs::write(&file_path, content).await.map_err(|e| {
939 Error::generic(format!("Failed to write request file: {}", e))
940 })?;
941 }
942 count += 1;
943 }
944 }
945
946 for subfolder in ¤t_folder.folders {
948 let subfolder_path = if current_path.is_empty() {
949 subfolder.name.clone()
950 } else {
951 format!("{}/{}", current_path, subfolder.name)
952 };
953 queue.push_back((subfolder, subfolder_path));
954 }
955 }
956
957 Ok(count)
958 }
959
960 async fn export_workspace_requests_grouped_advanced(
962 &self,
963 workspace: &Workspace,
964 requests_dir: &Path,
965 force: bool,
966 dry_run: bool,
967 ) -> Result<usize> {
968 let mut count = 0;
969 let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
970
971 if !dry_run && !workspace_requests_dir.exists() {
972 fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
973 Error::generic(format!("Failed to create workspace requests directory: {}", e))
974 })?;
975 }
976
977 count += self
978 .export_workspace_requests_advanced(workspace, &workspace_requests_dir, force, dry_run)
979 .await?;
980 Ok(count)
981 }
982
983 pub async fn sync_to_directory(
985 &self,
986 target_dir: &str,
987 strategy: &str,
988 workspace_ids: Option<&str>,
989 structure: &str,
990 include_meta: bool,
991 force: bool,
992 ) -> Result<SyncResult> {
993 let target_path = PathBuf::from(target_dir);
994
995 if !target_path.exists() {
997 fs::create_dir_all(&target_path)
998 .await
999 .map_err(|e| Error::generic(format!("Failed to create target directory: {}", e)))?;
1000 }
1001
1002 let sync_strategy = match strategy {
1004 "full" => SyncStrategy::Full,
1005 "incremental" => SyncStrategy::Incremental,
1006 "selective" => {
1007 if let Some(ids) = workspace_ids {
1008 let workspace_list = ids.split(',').map(|s| s.trim().to_string()).collect();
1009 SyncStrategy::Selective(workspace_list)
1010 } else {
1011 return Err(Error::generic("Selective strategy requires workspace IDs"));
1012 }
1013 }
1014 _ => return Err(Error::generic(format!("Unknown sync strategy: {}", strategy))),
1015 };
1016
1017 let dir_structure = match structure {
1019 "flat" => DirectoryStructure::Flat,
1020 "nested" => DirectoryStructure::Nested,
1021 "grouped" => DirectoryStructure::Grouped,
1022 _ => return Err(Error::generic(format!("Unknown directory structure: {}", structure))),
1023 };
1024
1025 let workspaces_to_sync = self.get_workspaces_for_sync(&sync_strategy).await?;
1027
1028 let mut result = SyncResult {
1029 synced_workspaces: 0,
1030 synced_requests: 0,
1031 files_created: 0,
1032 target_dir: target_path.clone(),
1033 };
1034
1035 for workspace_id in workspaces_to_sync {
1037 if let Ok(workspace) = self.load_workspace(&workspace_id).await {
1038 let workspace_result = self
1039 .sync_workspace_to_directory(
1040 &workspace,
1041 &target_path,
1042 &dir_structure,
1043 include_meta,
1044 force,
1045 )
1046 .await?;
1047
1048 result.synced_workspaces += 1;
1049 result.synced_requests += workspace_result.requests_count;
1050 result.files_created += workspace_result.files_created;
1051 }
1052 }
1053
1054 if let SyncStrategy::Incremental = sync_strategy {
1056 let new_sync_state = SyncState {
1057 last_sync_timestamp: Utc::now(),
1058 };
1059 if let Err(e) = self.save_sync_state(&new_sync_state).await {
1060 tracing::warn!("Failed to save sync state: {}", e);
1061 }
1062 }
1063
1064 Ok(result)
1065 }
1066
1067 async fn get_workspaces_for_sync(&self, strategy: &SyncStrategy) -> Result<Vec<String>> {
1069 match strategy {
1070 SyncStrategy::Full => self.list_workspace_ids().await,
1071 SyncStrategy::Incremental => {
1072 let sync_state = self.load_sync_state().await?;
1074 let last_sync = sync_state.last_sync_timestamp;
1075
1076 let all_workspace_ids = self.list_workspace_ids().await?;
1078
1079 let mut modified_workspaces = Vec::new();
1081 for workspace_id in all_workspace_ids {
1082 let file_path = self.workspace_file_path(&workspace_id);
1083 if let Ok(metadata) = fs::metadata(&file_path).await {
1084 if let Ok(modified_time) = metadata.modified() {
1085 let modified_datetime = DateTime::<Utc>::from(modified_time);
1086 if modified_datetime > last_sync {
1087 modified_workspaces.push(workspace_id);
1088 }
1089 }
1090 }
1091 }
1092
1093 Ok(modified_workspaces)
1094 }
1095 SyncStrategy::Selective(ids) => Ok(ids.clone()),
1096 }
1097 }
1098
1099 async fn sync_workspace_to_directory(
1101 &self,
1102 workspace: &Workspace,
1103 target_dir: &Path,
1104 structure: &DirectoryStructure,
1105 include_meta: bool,
1106 force: bool,
1107 ) -> Result<WorkspaceSyncResult> {
1108 let mut result = WorkspaceSyncResult {
1109 requests_count: 0,
1110 files_created: 0,
1111 };
1112
1113 match structure {
1114 DirectoryStructure::Flat => {
1115 let export = self.create_workspace_export(workspace).await?;
1116 let file_path =
1117 target_dir.join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1118
1119 if force || !file_path.exists() {
1120 let content = serde_yaml::to_string(&export).map_err(|e| {
1121 Error::generic(format!("Failed to serialize workspace: {}", e))
1122 })?;
1123
1124 fs::write(&file_path, content).await.map_err(|e| {
1125 Error::generic(format!("Failed to write workspace file: {}", e))
1126 })?;
1127
1128 result.files_created += 1;
1129 }
1130 }
1131
1132 DirectoryStructure::Nested => {
1133 let workspace_dir = target_dir.join(self.sanitize_filename(&workspace.name));
1134 if !workspace_dir.exists() {
1135 fs::create_dir_all(&workspace_dir).await.map_err(|e| {
1136 Error::generic(format!("Failed to create workspace directory: {}", e))
1137 })?;
1138 }
1139
1140 let export = self.create_workspace_export(workspace).await?;
1142 let workspace_file = workspace_dir.join("workspace.yaml");
1143
1144 if force || !workspace_file.exists() {
1145 let content = serde_yaml::to_string(&export).map_err(|e| {
1146 Error::generic(format!("Failed to serialize workspace: {}", e))
1147 })?;
1148
1149 fs::write(&workspace_file, content).await.map_err(|e| {
1150 Error::generic(format!("Failed to write workspace file: {}", e))
1151 })?;
1152
1153 result.files_created += 1;
1154 }
1155
1156 let requests_dir = workspace_dir.join("requests");
1158 if !requests_dir.exists() {
1159 fs::create_dir_all(&requests_dir).await.map_err(|e| {
1160 Error::generic(format!("Failed to create requests directory: {}", e))
1161 })?;
1162 }
1163
1164 result.requests_count +=
1165 self.export_workspace_requests(workspace, &requests_dir, force).await?;
1166 }
1167
1168 DirectoryStructure::Grouped => {
1169 let requests_dir = target_dir.join("requests");
1171 let workspaces_dir = target_dir.join("workspaces");
1172
1173 for dir in [&requests_dir, &workspaces_dir] {
1174 if !dir.exists() {
1175 fs::create_dir_all(dir).await.map_err(|e| {
1176 Error::generic(format!("Failed to create directory: {}", e))
1177 })?;
1178 }
1179 }
1180
1181 let export = self.create_workspace_export(workspace).await?;
1183 let workspace_file = workspaces_dir
1184 .join(format!("{}.yaml", self.sanitize_filename(&workspace.name)));
1185
1186 if force || !workspace_file.exists() {
1187 let content = serde_yaml::to_string(&export).map_err(|e| {
1188 Error::generic(format!("Failed to serialize workspace: {}", e))
1189 })?;
1190
1191 fs::write(&workspace_file, content).await.map_err(|e| {
1192 Error::generic(format!("Failed to write workspace file: {}", e))
1193 })?;
1194
1195 result.files_created += 1;
1196 }
1197
1198 result.requests_count +=
1200 self.export_workspace_requests_grouped(workspace, &requests_dir, force).await?;
1201 }
1202 }
1203
1204 if include_meta {
1206 self.create_metadata_file(workspace, target_dir, structure).await?;
1207 result.files_created += 1;
1208 }
1209
1210 Ok(result)
1211 }
1212
1213 async fn create_workspace_export(&self, workspace: &Workspace) -> Result<WorkspaceExport> {
1215 let mut requests = HashMap::new();
1216
1217 self.collect_requests_from_workspace(workspace, &mut requests, "".to_string());
1219
1220 let metadata = WorkspaceMetadata {
1221 id: workspace.id.clone(),
1222 name: workspace.name.clone(),
1223 description: workspace.description.clone(),
1224 exported_at: Utc::now(),
1225 request_count: requests.len(),
1226 folder_count: workspace.folders.len(),
1227 };
1228
1229 let config = WorkspaceConfig {
1230 auth: workspace.config.auth.as_ref().and_then(AuthConfig::from_config_auth),
1231 base_url: workspace.config.base_url.clone(),
1232 variables: workspace.config.global_environment.variables.clone(),
1233 };
1234
1235 Ok(WorkspaceExport {
1236 metadata,
1237 config,
1238 requests,
1239 })
1240 }
1241
1242 fn collect_requests_from_workspace(
1244 &self,
1245 workspace: &Workspace,
1246 requests: &mut HashMap<String, ExportedRequest>,
1247 folder_path: String,
1248 ) {
1249 for request in &workspace.requests {
1251 let exported = self.convert_request_to_exported(request, &folder_path);
1252 requests.insert(request.id.clone(), exported);
1253 }
1254
1255 for folder in &workspace.folders {
1257 let current_path = if folder_path.is_empty() {
1258 folder.name.clone()
1259 } else {
1260 format!("{}/{}", folder_path, folder.name)
1261 };
1262
1263 for request in &folder.requests {
1264 let exported = self.convert_request_to_exported(request, ¤t_path);
1265 requests.insert(request.id.clone(), exported);
1266 }
1267
1268 self.collect_requests_from_folders(folder, requests, current_path);
1270 }
1271 }
1272
1273 fn collect_requests_from_folders(
1275 &self,
1276 folder: &Folder,
1277 requests: &mut HashMap<String, ExportedRequest>,
1278 folder_path: String,
1279 ) {
1280 for subfolder in &folder.folders {
1281 let current_path = format!("{}/{}", folder_path, subfolder.name);
1282
1283 for request in &subfolder.requests {
1284 let exported = self.convert_request_to_exported(request, ¤t_path);
1285 requests.insert(request.id.clone(), exported);
1286 }
1287
1288 self.collect_requests_from_folders(subfolder, requests, current_path);
1289 }
1290 }
1291
1292 fn convert_request_to_exported(
1294 &self,
1295 request: &MockRequest,
1296 folder_path: &str,
1297 ) -> ExportedRequest {
1298 ExportedRequest {
1299 id: request.id.clone(),
1300 name: request.name.clone(),
1301 method: format!("{:?}", request.method),
1302 path: request.path.clone(),
1303 folder_path: folder_path.to_string(),
1304 headers: request.headers.clone(),
1305 query_params: request.query_params.clone(),
1306 body: request.body.clone(),
1307 response_status: Some(request.response.status_code),
1308 response_body: request.response.body.clone(),
1309 response_headers: request.response.headers.clone(),
1310 delay: request.response.delay_ms,
1311 }
1312 }
1313
1314 pub async fn export_workspace_encrypted(
1316 &self,
1317 workspace: &Workspace,
1318 output_path: &Path,
1319 ) -> Result<EncryptedExportResult> {
1320 if !workspace.config.auto_encryption.enabled {
1322 return Err(Error::generic("Encryption is not enabled for this workspace. Enable encryption in workspace settings first."));
1323 }
1324
1325 let encryption_config = workspace.config.auto_encryption.clone();
1327 let processor = AutoEncryptionProcessor::new(&workspace.id, encryption_config);
1328
1329 let mut filtered_workspace = workspace.to_filtered_for_sync();
1331
1332 self.encrypt_workspace_data(&mut filtered_workspace, &processor)?;
1334
1335 let export = self.create_workspace_export(&filtered_workspace).await?;
1337
1338 let export_json = serde_json::to_string_pretty(&export)
1340 .map_err(|e| Error::generic(format!("Failed to serialize export: {}", e)))?;
1341
1342 let encrypted_data = utils::encrypt_for_workspace(&workspace.id, &export_json)?;
1343
1344 let key_manager = WorkspaceKeyManager::new();
1346 let backup_key = key_manager.generate_workspace_key_backup(&workspace.id)?;
1347
1348 fs::write(output_path, &encrypted_data)
1350 .await
1351 .map_err(|e| Error::generic(format!("Failed to write encrypted export: {}", e)))?;
1352
1353 Ok(EncryptedExportResult {
1354 output_path: output_path.to_path_buf(),
1355 backup_key,
1356 exported_at: Utc::now(),
1357 workspace_name: workspace.name.clone(),
1358 encryption_enabled: true,
1359 })
1360 }
1361
1362 pub async fn import_workspace_encrypted(
1364 &self,
1365 encrypted_file: &Path,
1366 _workspace_name: Option<&str>,
1367 _registry: &mut WorkspaceRegistry,
1368 ) -> Result<EncryptedImportResult> {
1369 let _encrypted_data = fs::read_to_string(encrypted_file)
1371 .await
1372 .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1373
1374 Err(Error::generic("Encrypted import requires workspace ID and backup key. Use import_workspace_encrypted_with_key instead."))
1377 }
1378
1379 pub async fn import_workspace_encrypted_with_key(
1381 &self,
1382 encrypted_file: &Path,
1383 workspace_id: &str,
1384 backup_key: &str,
1385 workspace_name: Option<&str>,
1386 registry: &mut WorkspaceRegistry,
1387 ) -> Result<EncryptedImportResult> {
1388 let key_manager = WorkspaceKeyManager::new();
1390 if !key_manager.has_workspace_key(workspace_id) {
1391 key_manager.restore_workspace_key_from_backup(workspace_id, backup_key)?;
1392 }
1393
1394 let encrypted_data = fs::read_to_string(encrypted_file)
1396 .await
1397 .map_err(|e| Error::generic(format!("Failed to read encrypted file: {}", e)))?;
1398
1399 let decrypted_json = utils::decrypt_for_workspace(workspace_id, &encrypted_data)?;
1400
1401 let export: WorkspaceExport = serde_json::from_str(&decrypted_json)
1403 .map_err(|e| Error::generic(format!("Failed to parse decrypted export: {}", e)))?;
1404
1405 let workspace = self.convert_export_to_workspace(&export, workspace_name)?;
1407
1408 let imported_id = registry.add_workspace(workspace)?;
1410
1411 Ok(EncryptedImportResult {
1412 workspace_id: imported_id,
1413 workspace_name: export.metadata.name.clone(),
1414 imported_at: Utc::now(),
1415 request_count: export.requests.len(),
1416 encryption_restored: true,
1417 })
1418 }
1419
1420 fn encrypt_workspace_data(
1422 &self,
1423 workspace: &mut Workspace,
1424 processor: &AutoEncryptionProcessor,
1425 ) -> Result<()> {
1426 for env in &mut workspace.config.environments {
1428 processor.process_env_vars(&mut env.variables)?;
1429 }
1430 processor.process_env_vars(&mut workspace.config.global_environment.variables)?;
1431
1432 Ok(())
1436 }
1437
1438 fn convert_export_to_workspace(
1440 &self,
1441 export: &WorkspaceExport,
1442 name_override: Option<&str>,
1443 ) -> Result<Workspace> {
1444 let mut workspace =
1445 Workspace::new(name_override.unwrap_or(&export.metadata.name).to_string());
1446
1447 if let Some(desc) = &export.metadata.description {
1449 workspace.description = Some(desc.clone());
1450 }
1451
1452 for exported_request in export.requests.values() {
1454 let method = self.parse_http_method(&exported_request.method)?;
1456 let mut request = MockRequest::new(
1457 method,
1458 exported_request.path.clone(),
1459 exported_request.name.clone(),
1460 );
1461
1462 if let Some(status) = exported_request.response_status {
1464 request.response.status_code = status;
1465 }
1466
1467 if let Some(body) = &exported_request.response_body {
1469 request.response.body = Some(body.clone());
1470 }
1471 request.response.headers = exported_request.response_headers.clone();
1472 if let Some(delay) = exported_request.delay {
1473 request.response.delay_ms = Some(delay);
1474 }
1475
1476 workspace.add_request(request)?;
1477 }
1478
1479 workspace.config.global_environment.variables = export.config.variables.clone();
1481
1482 Ok(workspace)
1483 }
1484
1485 fn parse_http_method(&self, method_str: &str) -> Result<crate::routing::HttpMethod> {
1487 match method_str.to_uppercase().as_str() {
1488 "GET" => Ok(crate::routing::HttpMethod::GET),
1489 "POST" => Ok(crate::routing::HttpMethod::POST),
1490 "PUT" => Ok(crate::routing::HttpMethod::PUT),
1491 "DELETE" => Ok(crate::routing::HttpMethod::DELETE),
1492 "PATCH" => Ok(crate::routing::HttpMethod::PATCH),
1493 "HEAD" => Ok(crate::routing::HttpMethod::HEAD),
1494 "OPTIONS" => Ok(crate::routing::HttpMethod::OPTIONS),
1495 _ => Err(Error::generic(format!("Unknown HTTP method: {}", method_str))),
1496 }
1497 }
1498
1499 pub fn check_workspace_for_unencrypted_secrets(
1501 &self,
1502 workspace: &Workspace,
1503 ) -> Result<SecurityCheckResult> {
1504 let mut warnings = Vec::new();
1505 let errors = Vec::new();
1506
1507 self.check_environment_variables(workspace, &mut warnings)?;
1509
1510 let has_warnings = !warnings.is_empty();
1514 let has_errors = !errors.is_empty();
1515
1516 Ok(SecurityCheckResult {
1517 workspace_id: workspace.id.clone(),
1518 workspace_name: workspace.name.clone(),
1519 warnings,
1520 errors,
1521 is_secure: !has_warnings && !has_errors,
1522 recommended_actions: self.generate_security_recommendations(has_warnings, has_errors),
1523 })
1524 }
1525
1526 fn check_environment_variables(
1528 &self,
1529 workspace: &Workspace,
1530 warnings: &mut Vec<SecurityWarning>,
1531 ) -> Result<()> {
1532 let sensitive_keys = [
1533 "password",
1534 "secret",
1535 "key",
1536 "token",
1537 "credential",
1538 "api_key",
1539 "apikey",
1540 "api_secret",
1541 "db_password",
1542 "database_password",
1543 "aws_secret_key",
1544 "aws_session_token",
1545 "private_key",
1546 "authorization",
1547 "auth_token",
1548 "access_token",
1549 "refresh_token",
1550 "cookie",
1551 "session",
1552 "csrf",
1553 "jwt",
1554 "bearer",
1555 ];
1556
1557 for (key, value) in &workspace.config.global_environment.variables {
1559 if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1560 warnings.push(SecurityWarning {
1561 field_type: "environment_variable".to_string(),
1562 field_name: key.clone(),
1563 location: "global_environment".to_string(),
1564 severity: SecuritySeverity::High,
1565 message: format!(
1566 "Potentially sensitive environment variable '{}' detected",
1567 key
1568 ),
1569 suggestion: "Consider encrypting this value or excluding it from exports"
1570 .to_string(),
1571 });
1572 }
1573 }
1574
1575 for env in &workspace.config.environments {
1577 for (key, value) in &env.variables {
1578 if self.is_potentially_sensitive(key, value, &sensitive_keys) {
1579 warnings.push(SecurityWarning {
1580 field_type: "environment_variable".to_string(),
1581 field_name: key.clone(),
1582 location: format!("environment '{}'", env.name),
1583 severity: SecuritySeverity::High,
1584 message: format!("Potentially sensitive environment variable '{}' detected in environment '{}'", key, env.name),
1585 suggestion: "Consider encrypting this value or excluding it from exports".to_string(),
1586 });
1587 }
1588 }
1589 }
1590
1591 Ok(())
1592 }
1593
1594 fn is_potentially_sensitive(&self, key: &str, value: &str, sensitive_keys: &[&str]) -> bool {
1596 let key_lower = key.to_lowercase();
1597
1598 if sensitive_keys.iter().any(|&sensitive| key_lower.contains(sensitive)) {
1600 return true;
1601 }
1602
1603 self.contains_sensitive_patterns(value)
1605 }
1606
1607 fn contains_sensitive_patterns(&self, value: &str) -> bool {
1609 if CREDIT_CARD_PATTERN.is_match(value) {
1611 return true;
1612 }
1613
1614 if SSN_PATTERN.is_match(value) {
1616 return true;
1617 }
1618
1619 if value.len() > 20 && value.chars().any(|c| c.is_alphanumeric()) {
1621 let alphanumeric_count = value.chars().filter(|c| c.is_alphanumeric()).count();
1622 let total_count = value.len();
1623 if alphanumeric_count as f64 / total_count as f64 > 0.8 {
1624 return true;
1625 }
1626 }
1627
1628 false
1629 }
1630
1631 fn generate_security_recommendations(
1633 &self,
1634 has_warnings: bool,
1635 has_errors: bool,
1636 ) -> Vec<String> {
1637 let mut recommendations = Vec::new();
1638
1639 if has_warnings || has_errors {
1640 recommendations.push("Enable encryption for this workspace in settings".to_string());
1641 recommendations.push("Review and encrypt sensitive environment variables".to_string());
1642 recommendations.push("Use encrypted export for sharing workspaces".to_string());
1643 }
1644
1645 if has_errors {
1646 recommendations
1647 .push("CRITICAL: Remove or encrypt sensitive data before proceeding".to_string());
1648 }
1649
1650 recommendations
1651 }
1652
1653 async fn export_workspace_requests(
1655 &self,
1656 workspace: &Workspace,
1657 requests_dir: &Path,
1658 force: bool,
1659 ) -> Result<usize> {
1660 let mut count = 0;
1661
1662 for request in &workspace.requests {
1663 let file_path =
1664 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1665 if force || !file_path.exists() {
1666 let exported = self.convert_request_to_exported(request, "");
1667 let content = serde_yaml::to_string(&exported)
1668 .map_err(|e| Error::generic(format!("Failed to serialize request: {}", e)))?;
1669
1670 fs::write(&file_path, content)
1671 .await
1672 .map_err(|e| Error::generic(format!("Failed to write request file: {}", e)))?;
1673
1674 count += 1;
1675 }
1676 }
1677
1678 for folder in &workspace.folders {
1680 count += self.export_folder_requests(folder, requests_dir, force, &folder.name).await?;
1681 }
1682
1683 Ok(count)
1684 }
1685
1686 async fn export_folder_requests(
1688 &self,
1689 folder: &Folder,
1690 requests_dir: &Path,
1691 force: bool,
1692 folder_path: &str,
1693 ) -> Result<usize> {
1694 use std::collections::VecDeque;
1695
1696 let mut count = 0;
1697 let mut queue = VecDeque::new();
1698
1699 queue.push_back((folder, folder_path.to_string()));
1701
1702 while let Some((current_folder, current_path)) = queue.pop_front() {
1703 for request in ¤t_folder.requests {
1705 let file_path =
1706 requests_dir.join(format!("{}.yaml", self.sanitize_filename(&request.name)));
1707 if force || !file_path.exists() {
1708 let exported = self.convert_request_to_exported(request, ¤t_path);
1709 let content = serde_yaml::to_string(&exported).map_err(|e| {
1710 Error::generic(format!("Failed to serialize request: {}", e))
1711 })?;
1712
1713 fs::write(&file_path, content).await.map_err(|e| {
1714 Error::generic(format!("Failed to write request file: {}", e))
1715 })?;
1716
1717 count += 1;
1718 }
1719 }
1720
1721 for subfolder in ¤t_folder.folders {
1723 let subfolder_path = if current_path.is_empty() {
1724 subfolder.name.clone()
1725 } else {
1726 format!("{}/{}", current_path, subfolder.name)
1727 };
1728 queue.push_back((subfolder, subfolder_path));
1729 }
1730 }
1731
1732 Ok(count)
1733 }
1734
1735 async fn export_workspace_requests_grouped(
1737 &self,
1738 workspace: &Workspace,
1739 requests_dir: &Path,
1740 force: bool,
1741 ) -> Result<usize> {
1742 let mut count = 0;
1743 let workspace_requests_dir = requests_dir.join(self.sanitize_filename(&workspace.name));
1744
1745 if !workspace_requests_dir.exists() {
1746 fs::create_dir_all(&workspace_requests_dir).await.map_err(|e| {
1747 Error::generic(format!("Failed to create workspace requests directory: {}", e))
1748 })?;
1749 }
1750
1751 count += self
1752 .export_workspace_requests(workspace, &workspace_requests_dir, force)
1753 .await?;
1754 Ok(count)
1755 }
1756
1757 async fn create_metadata_file(
1759 &self,
1760 workspace: &Workspace,
1761 target_dir: &Path,
1762 structure: &DirectoryStructure,
1763 ) -> Result<()> {
1764 let metadata = serde_json::json!({
1765 "workspace_id": workspace.id,
1766 "workspace_name": workspace.name,
1767 "description": workspace.description,
1768 "exported_at": Utc::now().to_rfc3339(),
1769 "structure": format!("{:?}", structure),
1770 "version": "1.0",
1771 "source": "mockforge"
1772 });
1773
1774 let metadata_file = target_dir.join(".mockforge-meta.json");
1775 let content = serde_json::to_string_pretty(&metadata)
1776 .map_err(|e| Error::generic(format!("Failed to serialize metadata: {}", e)))?;
1777
1778 fs::write(&metadata_file, content)
1779 .await
1780 .map_err(|e| Error::generic(format!("Failed to write metadata file: {}", e)))?;
1781
1782 Ok(())
1783 }
1784
1785 fn sanitize_filename(&self, name: &str) -> String {
1787 name.chars()
1788 .map(|c| match c {
1789 '/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
1790 c if c.is_whitespace() => '_',
1791 c => c,
1792 })
1793 .collect::<String>()
1794 .to_lowercase()
1795 }
1796}
1797
1798#[derive(Debug)]
1800struct WorkspaceSyncResult {
1801 requests_count: usize,
1803 files_created: usize,
1805}
1806
1807#[cfg(test)]
1808mod tests {
1809 use super::*;
1810 use crate::workspace::{MockRequest, Workspace};
1811 use crate::HttpMethod;
1812 use tempfile::TempDir;
1813
1814 #[tokio::test]
1815 async fn test_workspace_persistence() {
1816 let temp_dir = TempDir::new().unwrap();
1817 let persistence = WorkspacePersistence::new(temp_dir.path());
1818
1819 let mut workspace = Workspace::new("Test Workspace".to_string());
1821 let request =
1822 MockRequest::new(HttpMethod::GET, "/test".to_string(), "Test Request".to_string());
1823 workspace.add_request(request).unwrap();
1824
1825 persistence.save_workspace(&workspace).await.unwrap();
1827
1828 let loaded = persistence.load_workspace(&workspace.id).await.unwrap();
1830 assert_eq!(loaded.name, workspace.name);
1831 assert_eq!(loaded.requests.len(), 1);
1832
1833 let ids = persistence.list_workspace_ids().await.unwrap();
1835 assert_eq!(ids.len(), 1);
1836 assert_eq!(ids[0], workspace.id);
1837 }
1838
1839 #[tokio::test]
1840 async fn test_registry_persistence() {
1841 let temp_dir = TempDir::new().unwrap();
1842 let persistence = WorkspacePersistence::new(temp_dir.path());
1843
1844 let mut registry = WorkspaceRegistry::new();
1845
1846 let workspace1 = Workspace::new("Workspace 1".to_string());
1848 let workspace2 = Workspace::new("Workspace 2".to_string());
1849
1850 let id1 = registry.add_workspace(workspace1).unwrap();
1851 let _id2 = registry.add_workspace(workspace2).unwrap();
1852
1853 registry.set_active_workspace(Some(id1.clone())).unwrap();
1855
1856 persistence.save_full_registry(®istry).await.unwrap();
1858
1859 let loaded_registry = persistence.load_full_registry().await.unwrap();
1861
1862 assert_eq!(loaded_registry.get_workspaces().len(), 2);
1863 assert_eq!(loaded_registry.get_active_workspace().unwrap().name, "Workspace 1");
1864 }
1865
1866 #[tokio::test]
1867 async fn test_backup_and_restore() {
1868 let temp_dir = TempDir::new().unwrap();
1869 let backup_dir = temp_dir.path().join("backups");
1870 let persistence = WorkspacePersistence::new(temp_dir.path());
1871
1872 let workspace = Workspace::new("Test Workspace".to_string());
1874 persistence.save_workspace(&workspace).await.unwrap();
1875
1876 let backup_path = persistence.backup_workspace(&workspace.id, &backup_dir).await.unwrap();
1878 assert!(backup_path.exists());
1879
1880 persistence.delete_workspace(&workspace.id).await.unwrap();
1882 assert!(persistence.load_workspace(&workspace.id).await.is_err());
1883
1884 let restored_id = persistence.restore_workspace(&backup_path).await.unwrap();
1886
1887 let restored = persistence.load_workspace(&restored_id).await.unwrap();
1889 assert_eq!(restored.name, "Test Workspace");
1890 }
1891}