1use crate::error::{ParseError, ParseResult};
33use crate::import_optimizer::ImportOptimizer;
34#[cfg(feature = "modernize")]
35use crate::modernize::PerlModernizer as ModernizeEngine;
36#[cfg(feature = "workspace_refactor")]
37use crate::workspace_index::WorkspaceIndex;
38#[cfg(feature = "workspace_refactor")]
39use crate::workspace_refactor::WorkspaceRefactor;
40use perl_parser_core::line_index::LineIndex;
41use perl_parser_core::{Node, NodeKind, Parser, SourceLocation};
42use perl_qualified_name::{
43 is_valid_identifier_part, validate_perl_qualified_name as validate_package_name,
44};
45use serde::{Deserialize, Serialize};
46use std::collections::{HashMap, HashSet};
47use std::fs;
48use std::path::{Path, PathBuf};
49
50pub struct RefactoringEngine {
55 #[cfg(feature = "workspace_refactor")]
57 #[allow(dead_code)]
58 workspace_refactor: WorkspaceRefactor,
59 #[cfg(not(feature = "workspace_refactor"))]
60 #[allow(dead_code)]
61 workspace_refactor: temp_stubs::WorkspaceRefactor,
62 #[cfg(feature = "modernize")]
64 modernize: crate::modernize::PerlModernizer,
65 #[cfg(not(feature = "modernize"))]
67 modernize: temp_stubs::ModernizeEngine,
68 import_optimizer: ImportOptimizer,
70 config: RefactoringConfig,
72 operation_history: Vec<RefactoringOperation>,
74}
75
76#[derive(Debug, Clone, Serialize, Deserialize)]
78pub struct RefactoringConfig {
79 pub safe_mode: bool,
81 pub max_files_per_operation: usize,
83 pub create_backups: bool,
85 pub operation_timeout: u64,
87 pub parallel_processing: bool,
89 pub max_backup_retention: usize,
91 pub backup_max_age_seconds: u64,
93 #[serde(skip)]
95 pub backup_root: Option<PathBuf>,
96}
97
98impl Default for RefactoringConfig {
99 fn default() -> Self {
100 Self {
101 safe_mode: true,
102 max_files_per_operation: 100,
103 create_backups: true,
104 operation_timeout: 60,
105 parallel_processing: true,
106 max_backup_retention: 10,
107 backup_max_age_seconds: 7 * 24 * 60 * 60, backup_root: None,
109 }
110 }
111}
112
113#[derive(Debug, Clone, Serialize, Deserialize)]
115pub enum RefactoringType {
116 SymbolRename {
118 old_name: String,
120 new_name: String,
122 scope: RefactoringScope,
124 },
125 ExtractMethod {
127 method_name: String,
129 start_position: (usize, usize),
131 end_position: (usize, usize),
133 },
134 MoveCode {
136 source_file: PathBuf,
138 target_file: PathBuf,
140 elements: Vec<String>,
142 },
143 Modernize {
145 patterns: Vec<ModernizationPattern>,
147 },
148 OptimizeImports {
150 remove_unused: bool,
152 sort_alphabetically: bool,
154 group_by_type: bool,
156 },
157 Inline {
159 symbol_name: String,
161 all_occurrences: bool,
163 },
164}
165
166#[derive(Debug, Clone, Serialize, Deserialize)]
168pub enum RefactoringScope {
169 File(PathBuf),
171 Workspace,
173 Directory(PathBuf),
175 FileSet(Vec<PathBuf>),
177 Package {
179 file: PathBuf,
181 name: String,
183 },
184 Function {
186 file: PathBuf,
188 name: String,
190 },
191 Block {
193 file: PathBuf,
195 start: (u32, u32),
197 end: (u32, u32),
199 },
200}
201
202#[derive(Debug, Clone, Serialize, Deserialize)]
204pub enum ModernizationPattern {
205 SubroutineCalls,
207 StrictWarnings,
209 DeprecatedOperators,
211 VariableDeclarations,
213 PackageDeclarations,
215}
216
217#[derive(Debug, Clone)]
219pub struct RefactoringOperation {
220 pub id: String,
222 pub operation_type: RefactoringType,
224 pub modified_files: Vec<PathBuf>,
226 pub timestamp: std::time::SystemTime,
228 pub backup_info: Option<BackupInfo>,
230}
231
232#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
234pub struct BackupInfo {
235 pub backup_dir: PathBuf,
237 pub file_mappings: HashMap<PathBuf, PathBuf>,
239}
240
241#[derive(Debug, Clone)]
243pub struct BackupCleanupResult {
244 pub directories_removed: usize,
246 pub space_reclaimed: u64,
248}
249
250#[derive(Debug, Clone)]
252#[allow(dead_code)] struct BackupDirMetadata {
254 path: PathBuf,
256 modified: std::time::SystemTime,
258 size: u64,
260}
261
262#[derive(Debug, Clone, Serialize, Deserialize)]
264pub struct RefactoringResult {
265 pub success: bool,
267 pub files_modified: usize,
269 pub changes_made: usize,
271 pub warnings: Vec<String>,
273 pub errors: Vec<String>,
275 pub operation_id: Option<String>,
277}
278
279impl RefactoringEngine {
280 pub fn new() -> Self {
282 Self::with_config(RefactoringConfig::default())
283 }
284
285 pub fn with_config(config: RefactoringConfig) -> Self {
287 Self {
288 #[cfg(feature = "workspace_refactor")]
289 workspace_refactor: WorkspaceRefactor::new(WorkspaceIndex::default()),
290 #[cfg(not(feature = "workspace_refactor"))]
291 workspace_refactor: temp_stubs::WorkspaceRefactor::new(),
292 #[cfg(feature = "modernize")]
293 modernize: ModernizeEngine::new(),
294 #[cfg(not(feature = "modernize"))]
295 modernize: temp_stubs::ModernizeEngine::new(),
296 import_optimizer: ImportOptimizer::new(),
297 config,
298 operation_history: Vec::new(),
299 }
300 }
301
302 pub fn refactor(
304 &mut self,
305 operation_type: RefactoringType,
306 files: Vec<PathBuf>,
307 ) -> ParseResult<RefactoringResult> {
308 let operation_id = self.generate_operation_id();
309
310 if self.config.safe_mode {
312 self.validate_operation(&operation_type, &files)?;
313 }
314
315 let backup_info = if self.config.create_backups {
317 Some(self.create_backup(&files, &operation_id)?)
318 } else {
319 None
320 };
321
322 let result = match operation_type.clone() {
324 RefactoringType::SymbolRename { old_name, new_name, scope } => {
325 self.perform_symbol_rename(&old_name, &new_name, &scope)
326 }
327 RefactoringType::ExtractMethod { method_name, start_position, end_position } => {
328 self.perform_extract_method(&method_name, start_position, end_position, &files)
329 }
330 RefactoringType::MoveCode { source_file, target_file, elements } => {
331 self.perform_move_code(&source_file, &target_file, &elements)
332 }
333 RefactoringType::Modernize { patterns } => self.perform_modernize(&patterns, &files),
334 RefactoringType::OptimizeImports {
335 remove_unused,
336 sort_alphabetically,
337 group_by_type,
338 } => self.perform_optimize_imports(
339 remove_unused,
340 sort_alphabetically,
341 group_by_type,
342 &files,
343 ),
344 RefactoringType::Inline { symbol_name, all_occurrences } => {
345 self.perform_inline(&symbol_name, all_occurrences, &files)
346 }
347 };
348
349 let operation = RefactoringOperation {
351 id: operation_id.clone(),
352 operation_type,
353 modified_files: files,
354 timestamp: std::time::SystemTime::now(),
355 backup_info,
356 };
357 self.operation_history.push(operation);
358
359 match result {
361 Ok(mut res) => {
362 res.operation_id = Some(operation_id);
363 Ok(res)
364 }
365 Err(e) => Err(e),
366 }
367 }
368
369 pub fn rollback(&mut self, operation_id: &str) -> ParseResult<RefactoringResult> {
371 let operation =
373 self.operation_history.iter().find(|op| op.id == operation_id).ok_or_else(|| {
374 ParseError::SyntaxError {
375 message: format!("Operation {} not found", operation_id),
376 location: 0,
377 }
378 })?;
379
380 if let Some(backup_info) = &operation.backup_info {
381 let mut restored_count = 0;
383 for (original, backup) in &backup_info.file_mappings {
384 if backup.exists() {
385 std::fs::copy(backup, original).map_err(|e| ParseError::SyntaxError {
386 message: format!("Failed to restore {}: {}", original.display(), e),
387 location: 0,
388 })?;
389 restored_count += 1;
390 }
391 }
392
393 Ok(RefactoringResult {
394 success: true,
395 files_modified: restored_count,
396 changes_made: restored_count,
397 warnings: vec![],
398 errors: vec![],
399 operation_id: None,
400 })
401 } else {
402 Err(ParseError::SyntaxError {
403 message: "No backup available for rollback".to_string(),
404 location: 0,
405 })
406 }
407 }
408
409 pub fn get_operation_history(&self) -> &[RefactoringOperation] {
411 &self.operation_history
412 }
413
414 pub fn clear_history(&mut self) -> ParseResult<BackupCleanupResult> {
416 let cleanup_result = self.cleanup_backup_directories()?;
417 self.operation_history.clear();
418 Ok(cleanup_result)
419 }
420
421 pub fn index_file(&mut self, path: &Path, content: &str) -> ParseResult<()> {
423 #[cfg(feature = "workspace_refactor")]
424 {
425 let uri_str = crate::workspace_index::fs_path_to_uri(path).map_err(|e| {
426 ParseError::SyntaxError {
427 message: format!("URI conversion failed: {}", e),
428 location: 0,
429 }
430 })?;
431 let url = url::Url::parse(&uri_str).map_err(|e| ParseError::SyntaxError {
432 message: format!("URL parsing failed: {}", e),
433 location: 0,
434 })?;
435 self.workspace_refactor._index.index_file(url, content.to_string()).map_err(|e| {
436 ParseError::SyntaxError { message: format!("Indexing failed: {}", e), location: 0 }
437 })?;
438 }
439 let _ = content; Ok(())
441 }
442
443 fn generate_operation_id(&self) -> String {
446 let duration =
447 std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap_or_default();
448 format!("refactor_{}_{}", duration.as_secs(), duration.subsec_nanos())
449 }
450
451 fn validate_operation(
452 &self,
453 operation_type: &RefactoringType,
454 files: &[PathBuf],
455 ) -> ParseResult<()> {
456 if files.len() > self.config.max_files_per_operation {
458 return Err(ParseError::SyntaxError {
459 message: format!(
460 "Operation exceeds maximum file limit: {} files provided, {} allowed",
461 files.len(),
462 self.config.max_files_per_operation
463 ),
464 location: 0,
465 });
466 }
467
468 match operation_type {
470 RefactoringType::SymbolRename { old_name, new_name, scope } => {
471 self.validate_perl_identifier(old_name, "old_name")?;
472 self.validate_perl_identifier(new_name, "new_name")?;
473
474 if old_name == new_name {
476 return Err(ParseError::SyntaxError {
477 message: format!(
478 "SymbolRename: old_name and new_name must be different (got '{}')",
479 old_name
480 ),
481 location: 0,
482 });
483 }
484
485 let old_sigil = Self::extract_sigil(old_name);
487 let new_sigil = Self::extract_sigil(new_name);
488 if old_sigil != new_sigil {
489 return Err(ParseError::SyntaxError {
490 message: format!(
491 "SymbolRename: sigil mismatch - old_name '{}' has sigil {:?}, new_name '{}' has sigil {:?}",
492 old_name, old_sigil, new_name, new_sigil
493 ),
494 location: 0,
495 });
496 }
497
498 match scope {
500 RefactoringScope::File(path) => {
501 self.validate_file_exists(path)?;
502 }
503 RefactoringScope::Directory(path) => {
504 self.validate_directory_exists(path)?;
505 }
506 RefactoringScope::FileSet(paths) => {
507 if paths.is_empty() {
508 return Err(ParseError::SyntaxError {
509 message: "FileSet scope requires at least one file".to_string(),
510 location: 0,
511 });
512 }
513 if paths.len() > self.config.max_files_per_operation {
515 return Err(ParseError::SyntaxError {
516 message: format!(
517 "FileSet scope exceeds maximum file limit: {} files provided, {} allowed",
518 paths.len(),
519 self.config.max_files_per_operation
520 ),
521 location: 0,
522 });
523 }
524 for path in paths {
525 self.validate_file_exists(path)?;
526 }
527 }
528 RefactoringScope::Workspace => {
529 }
531 RefactoringScope::Package { file, .. }
532 | RefactoringScope::Function { file, .. }
533 | RefactoringScope::Block { file, .. } => {
534 self.validate_file_exists(file)?;
535 }
536 }
537 }
538
539 RefactoringType::ExtractMethod { method_name, start_position, end_position } => {
540 self.validate_perl_subroutine_name(method_name)?;
541
542 if method_name.starts_with('&') {
545 return Err(ParseError::SyntaxError {
546 message: format!(
547 "ExtractMethod method_name must be a bare identifier (no leading '&'): got '{}'",
548 method_name
549 ),
550 location: 0,
551 });
552 }
553
554 if files.is_empty() {
556 return Err(ParseError::SyntaxError {
557 message: "ExtractMethod requires a target file".to_string(),
558 location: 0,
559 });
560 }
561 if files.len() > 1 {
562 return Err(ParseError::SyntaxError {
563 message: "ExtractMethod operates on a single file".to_string(),
564 location: 0,
565 });
566 }
567 self.validate_file_exists(&files[0])?;
568
569 if start_position >= end_position {
571 return Err(ParseError::SyntaxError {
572 message: format!(
573 "Invalid extraction range: start {:?} must be before end {:?}",
574 start_position, end_position
575 ),
576 location: 0,
577 });
578 }
579 }
580
581 RefactoringType::MoveCode { source_file, target_file, elements } => {
582 self.validate_file_exists(source_file)?;
583
584 if source_file == target_file {
586 return Err(ParseError::SyntaxError {
587 message: format!(
588 "MoveCode: source_file and target_file must be different (got '{}')",
589 source_file.display()
590 ),
591 location: 0,
592 });
593 }
594
595 if let Some(parent) = target_file.parent() {
597 if !parent.as_os_str().is_empty() && !parent.exists() {
598 return Err(ParseError::SyntaxError {
599 message: format!(
600 "Target directory does not exist: {}",
601 parent.display()
602 ),
603 location: 0,
604 });
605 }
606 }
607
608 if elements.is_empty() {
609 return Err(ParseError::SyntaxError {
610 message: "MoveCode requires at least one element to move".to_string(),
611 location: 0,
612 });
613 }
614
615 for element in elements {
617 self.validate_perl_qualified_name(element)?;
618 }
619 }
620
621 RefactoringType::Modernize { patterns } => {
622 if patterns.is_empty() {
623 return Err(ParseError::SyntaxError {
624 message: "Modernize requires at least one pattern".to_string(),
625 location: 0,
626 });
627 }
628 for file in files {
630 self.validate_file_exists(file)?;
631 }
632 }
633
634 RefactoringType::OptimizeImports { .. } => {
635 for file in files {
637 self.validate_file_exists(file)?;
638 }
639 }
640
641 RefactoringType::Inline { symbol_name, .. } => {
642 self.validate_perl_identifier(symbol_name, "symbol_name")?;
643
644 if files.is_empty() {
646 return Err(ParseError::SyntaxError {
647 message: "Inline requires at least one target file".to_string(),
648 location: 0,
649 });
650 }
651 for file in files {
652 self.validate_file_exists(file)?;
653 }
654 }
655 }
656
657 Ok(())
658 }
659
660 fn validate_perl_identifier(&self, name: &str, param_name: &str) -> ParseResult<()> {
665 if name.is_empty() {
666 return Err(ParseError::SyntaxError {
667 message: format!("{} cannot be empty", param_name),
668 location: 0,
669 });
670 }
671
672 let bare_name = name.strip_prefix(['$', '@', '%', '&', '*']).unwrap_or(name);
674
675 if bare_name.is_empty() {
676 return Err(ParseError::SyntaxError {
677 message: format!("{} cannot be only a sigil", param_name),
678 location: 0,
679 });
680 }
681
682 let parts: Vec<&str> = bare_name.split("::").collect();
687 for (i, part) in parts.iter().enumerate() {
688 if part.is_empty() {
689 if i == 0 {
691 continue;
692 }
693 return Err(ParseError::SyntaxError {
695 message: format!(
696 "Invalid Perl identifier in {}: '{}' (contains empty segment - trailing or double ::)",
697 param_name, name
698 ),
699 location: 0,
700 });
701 }
702 if !is_valid_identifier_part(part) {
703 return Err(ParseError::SyntaxError {
704 message: format!(
705 "Invalid Perl identifier in {}: '{}' (must start with letter/underscore)",
706 param_name, name
707 ),
708 location: 0,
709 });
710 }
711 }
712
713 Ok(())
714 }
715
716 fn validate_perl_subroutine_name(&self, name: &str) -> ParseResult<()> {
718 if name.is_empty() {
719 return Err(ParseError::SyntaxError {
720 message: "Subroutine name cannot be empty".to_string(),
721 location: 0,
722 });
723 }
724
725 let bare_name = name.strip_prefix('&').unwrap_or(name);
727
728 if bare_name.starts_with(['$', '@', '%', '*']) {
730 return Err(ParseError::SyntaxError {
731 message: format!("Invalid sigil for subroutine name: '{}'", name),
732 location: 0,
733 });
734 }
735
736 if !is_valid_identifier_part(bare_name) {
737 return Err(ParseError::SyntaxError {
738 message: format!(
739 "Invalid subroutine name: '{}' (must start with letter/underscore)",
740 name
741 ),
742 location: 0,
743 });
744 }
745
746 Ok(())
747 }
748
749 fn validate_perl_qualified_name(&self, name: &str) -> ParseResult<()> {
752 validate_package_name(name).map_err(|error| ParseError::SyntaxError {
753 message: format!("Invalid qualified name '{}': {}", name, error),
754 location: 0,
755 })
756 }
757
758 fn extract_sigil(name: &str) -> Option<char> {
760 let first_char = name.chars().next()?;
761 if matches!(first_char, '$' | '@' | '%' | '&' | '*') { Some(first_char) } else { None }
762 }
763
764 fn validate_file_exists(&self, path: &Path) -> ParseResult<()> {
765 if !path.exists() {
766 return Err(ParseError::SyntaxError {
767 message: format!("File does not exist: {}", path.display()),
768 location: 0,
769 });
770 }
771 if !path.is_file() {
772 return Err(ParseError::SyntaxError {
773 message: format!("Path is not a file: {}", path.display()),
774 location: 0,
775 });
776 }
777 Ok(())
778 }
779
780 fn validate_directory_exists(&self, path: &Path) -> ParseResult<()> {
781 if !path.exists() {
782 return Err(ParseError::SyntaxError {
783 message: format!("Directory does not exist: {}", path.display()),
784 location: 0,
785 });
786 }
787 if !path.is_dir() {
788 return Err(ParseError::SyntaxError {
789 message: format!("Path is not a directory: {}", path.display()),
790 location: 0,
791 });
792 }
793 Ok(())
794 }
795
796 fn create_backup(&self, files: &[PathBuf], operation_id: &str) -> ParseResult<BackupInfo> {
797 let backup_dir = self.backup_root().join(operation_id);
798
799 if !backup_dir.exists() {
800 std::fs::create_dir_all(&backup_dir).map_err(|e| ParseError::SyntaxError {
801 message: format!("Failed to create backup directory: {}", e),
802 location: 0,
803 })?;
804 }
805
806 let mut file_mappings = HashMap::new();
807
808 for (i, file) in files.iter().enumerate() {
809 if file.exists() {
810 let extension = file.extension().and_then(|s| s.to_str()).unwrap_or("");
812 let backup_filename = if extension.is_empty() {
813 format!("file_{}", i)
814 } else {
815 format!("file_{}.{}", i, extension)
816 };
817
818 let backup_path = backup_dir.join(backup_filename);
819
820 std::fs::copy(file, &backup_path).map_err(|e| ParseError::SyntaxError {
821 message: format!("Failed to create backup for {}: {}", file.display(), e),
822 location: 0,
823 })?;
824
825 file_mappings.insert(file.clone(), backup_path);
826 }
827 }
828
829 Ok(BackupInfo { backup_dir, file_mappings })
830 }
831
832 fn backup_root(&self) -> PathBuf {
834 self.config
835 .backup_root
836 .clone()
837 .unwrap_or_else(|| std::env::temp_dir().join("perl_refactor_backups"))
838 }
839
840 fn cleanup_backup_directories(&self) -> ParseResult<BackupCleanupResult> {
841 let backup_root = self.backup_root();
842
843 if !backup_root.exists() {
844 return Ok(BackupCleanupResult { directories_removed: 0, space_reclaimed: 0 });
845 }
846
847 let mut backup_dirs = self.collect_backup_directories(&backup_root)?;
849
850 let dirs_to_remove = self.apply_retention_policies(&mut backup_dirs)?;
852
853 let (directories_removed, space_reclaimed) =
855 self.remove_backup_directories(&dirs_to_remove)?;
856
857 Ok(BackupCleanupResult { directories_removed, space_reclaimed })
858 }
859
860 fn collect_backup_directories(
861 &self,
862 backup_root: &PathBuf,
863 ) -> ParseResult<Vec<BackupDirMetadata>> {
864 let mut backup_dirs = Vec::new();
865
866 let entries = std::fs::read_dir(backup_root).map_err(|e| ParseError::SyntaxError {
867 message: format!("Failed to read backup directory: {}", e),
868 location: 0,
869 })?;
870
871 for entry in entries {
872 let entry = entry.map_err(|e| ParseError::SyntaxError {
873 message: format!("Failed to read directory entry: {}", e),
874 location: 0,
875 })?;
876
877 let path = entry.path();
878 if path.is_dir() {
879 if self.validate_backup_directory(&path)? {
881 let metadata =
882 std::fs::metadata(&path).map_err(|e| ParseError::SyntaxError {
883 message: format!(
884 "Failed to read metadata for {}: {}",
885 path.display(),
886 e
887 ),
888 location: 0,
889 })?;
890
891 let modified = metadata.modified().map_err(|e| ParseError::SyntaxError {
892 message: format!(
893 "Failed to get modification time for {}: {}",
894 path.display(),
895 e
896 ),
897 location: 0,
898 })?;
899
900 let size = self.calculate_directory_size(&path)?;
901
902 backup_dirs.push(BackupDirMetadata { path, modified, size });
903 }
904 }
905 }
906
907 Ok(backup_dirs)
908 }
909
910 fn validate_backup_directory(&self, dir: &PathBuf) -> ParseResult<bool> {
911 let dir_name = dir.file_name().and_then(|n| n.to_str()).unwrap_or("");
913
914 if !dir_name.starts_with("refactor_") {
915 return Ok(false);
916 }
917
918 let metadata = std::fs::symlink_metadata(dir).map_err(|e| ParseError::SyntaxError {
920 message: format!("Failed to read symlink metadata for {}: {}", dir.display(), e),
921 location: 0,
922 })?;
923
924 if !metadata.is_dir() || metadata.file_type().is_symlink() {
925 return Ok(false);
926 }
927
928 Ok(true)
929 }
930
931 fn calculate_directory_size(&self, dir: &PathBuf) -> ParseResult<u64> {
932 let mut total_size = 0u64;
933
934 let entries = std::fs::read_dir(dir).map_err(|e| ParseError::SyntaxError {
935 message: format!("Failed to read directory {}: {}", dir.display(), e),
936 location: 0,
937 })?;
938
939 for entry in entries {
940 let entry = entry.map_err(|e| ParseError::SyntaxError {
941 message: format!("Failed to read entry: {}", e),
942 location: 0,
943 })?;
944
945 let metadata = entry.metadata().map_err(|e| ParseError::SyntaxError {
946 message: format!("Failed to read entry metadata: {}", e),
947 location: 0,
948 })?;
949
950 if metadata.is_file() {
951 total_size += metadata.len();
952 }
953 }
954
955 Ok(total_size)
956 }
957
958 fn apply_retention_policies(
959 &self,
960 backup_dirs: &mut Vec<BackupDirMetadata>,
961 ) -> ParseResult<Vec<PathBuf>> {
962 let mut dirs_to_remove = Vec::new();
963
964 backup_dirs.sort_by_key(|d| d.modified);
966
967 let now = std::time::SystemTime::now();
968
969 if self.config.backup_max_age_seconds > 0 {
971 let max_age = std::time::Duration::from_secs(self.config.backup_max_age_seconds);
972
973 backup_dirs.retain(|dir| {
974 if let Ok(age) = now.duration_since(dir.modified) {
975 if age > max_age {
976 dirs_to_remove.push(dir.path.clone());
977 return false;
978 }
979 }
980 true
981 });
982 }
983
984 if self.config.max_backup_retention == 0 {
987 for dir in backup_dirs.iter() {
989 if !dirs_to_remove.contains(&dir.path) {
990 dirs_to_remove.push(dir.path.clone());
991 }
992 }
993 } else if backup_dirs.len() > self.config.max_backup_retention {
994 let excess_count = backup_dirs.len() - self.config.max_backup_retention;
995 for dir in backup_dirs.iter().take(excess_count) {
996 if !dirs_to_remove.contains(&dir.path) {
997 dirs_to_remove.push(dir.path.clone());
998 }
999 }
1000 }
1001
1002 Ok(dirs_to_remove)
1003 }
1004
1005 fn remove_backup_directories(&self, dirs_to_remove: &[PathBuf]) -> ParseResult<(usize, u64)> {
1006 let mut directories_removed = 0;
1007 let mut space_reclaimed = 0u64;
1008
1009 for dir in dirs_to_remove {
1010 let size = self.calculate_directory_size(dir)?;
1011
1012 std::fs::remove_dir_all(dir).map_err(|e| ParseError::SyntaxError {
1013 message: format!("Failed to remove backup directory {}: {}", dir.display(), e),
1014 location: 0,
1015 })?;
1016
1017 directories_removed += 1;
1018 space_reclaimed += size;
1019 }
1020
1021 Ok((directories_removed, space_reclaimed))
1022 }
1023
1024 fn perform_symbol_rename(
1025 &mut self,
1026 old_name: &str,
1027 new_name: &str,
1028 scope: &RefactoringScope,
1029 ) -> ParseResult<RefactoringResult> {
1030 #[cfg(feature = "workspace_refactor")]
1031 {
1032 let rename_result = match scope {
1033 RefactoringScope::Workspace
1034 | RefactoringScope::File(_)
1035 | RefactoringScope::Directory(_)
1036 | RefactoringScope::FileSet(_)
1037 | RefactoringScope::Package { .. }
1038 | RefactoringScope::Function { .. }
1039 | RefactoringScope::Block { .. } => {
1040 let target_file = match scope {
1044 RefactoringScope::File(path) => path,
1045 RefactoringScope::Package { file, .. } => file,
1046 RefactoringScope::Function { file, .. } => file,
1047 RefactoringScope::Block { file, .. } => file,
1048 _ => Path::new(""),
1049 };
1050
1051 self.workspace_refactor.rename_symbol(old_name, new_name, target_file, (0, 0))
1052 }
1053 };
1054
1055 match rename_result {
1056 Ok(result) => {
1057 let filtered_result = self.filter_rename_result_by_scope(result, scope)?;
1058 let files_modified = self.apply_file_edits(&filtered_result.file_edits)?;
1059 let changes_made =
1060 filtered_result.file_edits.iter().map(|e| e.edits.len()).sum();
1061
1062 let refac_result = RefactoringResult {
1063 success: true,
1064 files_modified,
1065 changes_made,
1066 warnings: vec![],
1067 errors: vec![],
1068 operation_id: None,
1069 };
1070 Ok(refac_result)
1071 }
1072 Err(e) => Ok(RefactoringResult {
1073 success: false,
1074 files_modified: 0,
1075 changes_made: 0,
1076 warnings: vec![],
1077 errors: vec![format!("Rename failed: {}", e)],
1078 operation_id: None,
1079 }),
1080 }
1081 }
1082
1083 #[cfg(not(feature = "workspace_refactor"))]
1084 {
1085 Ok(RefactoringResult {
1086 success: false,
1087 files_modified: 0,
1088 changes_made: 0,
1089 warnings: vec!["Workspace refactoring feature disabled".to_string()],
1090 errors: vec![],
1091 operation_id: None,
1092 })
1093 }
1094 }
1095
1096 #[cfg(feature = "workspace_refactor")]
1097 fn filter_rename_result_by_scope(
1098 &self,
1099 mut result: crate::workspace_refactor::RefactorResult,
1100 scope: &RefactoringScope,
1101 ) -> ParseResult<crate::workspace_refactor::RefactorResult> {
1102 match scope {
1103 RefactoringScope::Workspace
1104 | RefactoringScope::Directory(_)
1105 | RefactoringScope::FileSet(_) => Ok(result),
1106 RefactoringScope::File(target_file) => {
1107 result
1108 .file_edits
1109 .retain(|file_edit| Self::paths_match(&file_edit.file_path, target_file));
1110 Ok(result)
1111 }
1112 RefactoringScope::Package { file, name } => {
1113 let source = fs::read_to_string(file).map_err(|error| ParseError::SyntaxError {
1114 message: format!("Failed to read file {}: {error}", file.display()),
1115 location: 0,
1116 })?;
1117
1118 let Some((start_off, end_off)) = Self::find_package_byte_range(&source, name)
1119 else {
1120 result.file_edits.clear();
1121 return Ok(result);
1122 };
1123
1124 result.file_edits = Self::filter_file_edits_to_range(
1125 std::mem::take(&mut result.file_edits),
1126 file,
1127 start_off,
1128 end_off,
1129 );
1130 Ok(result)
1131 }
1132 RefactoringScope::Function { file, name } => {
1133 let source = fs::read_to_string(file).map_err(|error| ParseError::SyntaxError {
1134 message: format!("Failed to read file {}: {error}", file.display()),
1135 location: 0,
1136 })?;
1137
1138 let Some((start_off, end_off)) = Self::find_function_byte_range(&source, name)
1139 else {
1140 result.file_edits.clear();
1141 return Ok(result);
1142 };
1143
1144 result.file_edits = Self::filter_file_edits_to_range(
1145 std::mem::take(&mut result.file_edits),
1146 file,
1147 start_off,
1148 end_off,
1149 );
1150 Ok(result)
1151 }
1152 RefactoringScope::Block { file, start, end } => {
1153 let source = fs::read_to_string(file).map_err(|error| ParseError::SyntaxError {
1154 message: format!("Failed to read file {}: {error}", file.display()),
1155 location: 0,
1156 })?;
1157 let line_index = LineIndex::new(source.clone());
1158 let start_off =
1159 Self::offset_with_fallback(&line_index, &source, start.0, start.1, false);
1160 let end_off = Self::offset_with_fallback(&line_index, &source, end.0, end.1, true);
1161
1162 let (start_off, end_off) =
1163 if start_off <= end_off { (start_off, end_off) } else { (end_off, start_off) };
1164
1165 result.file_edits = Self::filter_file_edits_to_range(
1166 std::mem::take(&mut result.file_edits),
1167 file,
1168 start_off,
1169 end_off,
1170 );
1171 Ok(result)
1172 }
1173 }
1174 }
1175
1176 #[cfg(feature = "workspace_refactor")]
1177 fn filter_file_edits_to_range(
1178 file_edits: Vec<crate::workspace_refactor::FileEdit>,
1179 target_file: &Path,
1180 start_off: usize,
1181 end_off: usize,
1182 ) -> Vec<crate::workspace_refactor::FileEdit> {
1183 file_edits
1184 .into_iter()
1185 .filter_map(|mut file_edit| {
1186 if !Self::paths_match(&file_edit.file_path, target_file) {
1187 return None;
1188 }
1189
1190 file_edit.edits.retain(|edit| edit.start >= start_off && edit.end <= end_off);
1191 if file_edit.edits.is_empty() { None } else { Some(file_edit) }
1192 })
1193 .collect()
1194 }
1195
1196 #[cfg(feature = "workspace_refactor")]
1197 fn paths_match(a: &Path, b: &Path) -> bool {
1198 if a == b {
1199 return true;
1200 }
1201
1202 match (a.canonicalize(), b.canonicalize()) {
1203 (Ok(canonical_a), Ok(canonical_b)) => canonical_a == canonical_b,
1204 _ => false,
1205 }
1206 }
1207
1208 #[cfg(feature = "workspace_refactor")]
1209 fn find_package_byte_range(source: &str, package_name: &str) -> Option<(usize, usize)> {
1210 let package_decl = format!("package {package_name}");
1211 let start = source.find(&package_decl)?;
1212 let search_start = start + package_decl.len();
1213 let end = source[search_start..]
1214 .find("package ")
1215 .map(|idx| search_start + idx)
1216 .unwrap_or(source.len());
1217 Some((start, end))
1218 }
1219
1220 #[cfg(feature = "workspace_refactor")]
1221 fn find_function_byte_range(source: &str, function_name: &str) -> Option<(usize, usize)> {
1222 let sub_decl = format!("sub {function_name}");
1223 let start = source.find(&sub_decl)?;
1224 let open_brace =
1225 source[start + sub_decl.len()..].find('{').map(|idx| start + sub_decl.len() + idx)?;
1226
1227 let mut depth = 0usize;
1228 for (relative_idx, ch) in source[open_brace..].char_indices() {
1229 match ch {
1230 '{' => depth += 1,
1231 '}' => {
1232 depth = depth.saturating_sub(1);
1233 if depth == 0 {
1234 let end = open_brace + relative_idx + ch.len_utf8();
1235 return Some((start, end));
1236 }
1237 }
1238 _ => {}
1239 }
1240 }
1241
1242 None
1243 }
1244
1245 #[cfg(feature = "workspace_refactor")]
1246 fn offset_with_fallback(
1247 line_index: &LineIndex,
1248 source: &str,
1249 line: u32,
1250 column: u32,
1251 end_boundary: bool,
1252 ) -> usize {
1253 if let Some(offset) = line_index.position_to_offset(line, column) {
1254 return offset;
1255 }
1256
1257 if end_boundary {
1258 if let Some(next_line_start) = line_index.position_to_offset(line.saturating_add(1), 0)
1259 {
1260 return next_line_start;
1261 }
1262 return source.len();
1263 }
1264
1265 line_index.position_to_offset(line, 0).unwrap_or(0)
1266 }
1267
1268 fn perform_extract_method(
1269 &mut self,
1270 method_name: &str,
1271 start_position: (usize, usize),
1272 end_position: (usize, usize),
1273 files: &[PathBuf],
1274 ) -> ParseResult<RefactoringResult> {
1275 let file_path = if let Some(f) = files.first() {
1276 f
1277 } else {
1278 return Err(ParseError::SyntaxError {
1279 message: "No file specified for extraction".to_string(),
1280 location: 0,
1281 });
1282 };
1283
1284 let source_code = std::fs::read_to_string(file_path).map_err(|e| {
1285 ParseError::SyntaxError { message: format!("Failed to read file: {}", e), location: 0 }
1286 })?;
1287
1288 let line_ending = if source_code.contains("\r\n") { "\r\n" } else { "\n" };
1289
1290 let line_index = LineIndex::new(source_code.clone());
1292 let start_offset = line_index
1293 .position_to_offset(start_position.0 as u32, start_position.1 as u32)
1294 .ok_or_else(|| ParseError::SyntaxError {
1295 message: "Invalid start position".to_string(),
1296 location: 0,
1297 })?;
1298 let end_offset = line_index
1299 .position_to_offset(end_position.0 as u32, end_position.1 as u32)
1300 .ok_or_else(|| ParseError::SyntaxError {
1301 message: "Invalid end position".to_string(),
1302 location: 0,
1303 })?;
1304
1305 if start_offset >= end_offset {
1306 return Err(ParseError::SyntaxError {
1307 message: "Start position must be before end position".to_string(),
1308 location: 0,
1309 });
1310 }
1311
1312 let mut parser = Parser::new(&source_code);
1314 let ast = parser.parse()?;
1315
1316 let analysis = analyze_extraction(&ast, start_offset, end_offset);
1318
1319 let extracted_code = &source_code[start_offset..end_offset];
1321
1322 let mut new_sub = format!(
1323 "{}# Extracted from lines {}-{} {}sub {} {{{}",
1324 line_ending,
1325 start_position.0 + 1,
1326 end_position.0, line_ending,
1328 method_name,
1329 line_ending
1330 );
1331
1332 if !analysis.inputs.is_empty() {
1334 new_sub.push_str(
1335 &format!(" my ({}) = @_;\n", analysis.inputs.join(", "))
1336 .replace('\n', line_ending),
1337 );
1338 }
1339
1340 new_sub.push_str(" ");
1342 new_sub.push_str(extracted_code.trim());
1343 new_sub.push_str(line_ending);
1344
1345 if !analysis.outputs.is_empty() {
1347 new_sub.push_str(
1348 &format!(" return ({});\n", analysis.outputs.join(", "))
1349 .replace('\n', line_ending),
1350 );
1351 }
1352 new_sub.push_str("}\n".replace('\n', line_ending).as_str());
1353
1354 let mut indentation = String::new();
1356 if let Some(first_line) = extracted_code.lines().find(|l| !l.trim().is_empty()) {
1357 let trimmed = first_line.trim_start();
1358 indentation = first_line[..first_line.len() - trimmed.len()].to_string();
1359 } else if let Some(line_start) = source_code[..start_offset].rfind('\n') {
1360 let prefix = &source_code[line_start + 1..start_offset];
1361 if prefix.trim().is_empty() {
1362 indentation = prefix.to_string();
1363 }
1364 }
1365
1366 let inputs_str = analysis.inputs.join(", ");
1368 let mut call = format!("{}({})", method_name, inputs_str);
1369
1370 if !analysis.outputs.is_empty() {
1371 let outputs_str = analysis.outputs.join(", ");
1372 call = format!("({}) = {}", outputs_str, call);
1373 }
1374 call.push(';');
1375
1376 let mut call_with_indent = format!("{}{}", indentation, call);
1378 if source_code[start_offset..end_offset].ends_with('\n') {
1379 call_with_indent.push_str(line_ending);
1380 }
1381
1382 let mut final_source = String::new();
1384 let prefix_len =
1385 if source_code[..start_offset].ends_with(&indentation) { indentation.len() } else { 0 };
1386 final_source.push_str(&source_code[..start_offset - prefix_len]);
1387 final_source.push_str(&call_with_indent);
1388 final_source.push_str(&source_code[end_offset..]);
1389
1390 let insert_pos = if let Some(idx) = final_source.rfind(&format!("{}1;", line_ending)) {
1392 idx + line_ending.len()
1394 } else if let Some(idx) = final_source.rfind(&format!("{}__DATA__", line_ending)) {
1395 idx + line_ending.len()
1396 } else if let Some(idx) = final_source.rfind(&format!("{}__END__", line_ending)) {
1397 idx + line_ending.len()
1398 } else {
1399 final_source.len()
1400 };
1401
1402 final_source.insert_str(insert_pos, &new_sub);
1403
1404 if !self.config.safe_mode {
1405 std::fs::write(file_path, final_source).map_err(|e| ParseError::SyntaxError {
1406 message: format!("Failed to write file: {}", e),
1407 location: 0,
1408 })?;
1409 }
1410
1411 Ok(RefactoringResult {
1412 success: true,
1413 files_modified: 1,
1414 changes_made: 2, warnings: vec![],
1416 errors: vec![],
1417 operation_id: None,
1418 })
1419 }
1420
1421 fn perform_move_code(
1422 &mut self,
1423 source_file: &Path,
1424 target_file: &Path,
1425 elements: &[String],
1426 ) -> ParseResult<RefactoringResult> {
1427 let source_path = fs::canonicalize(source_file).map_err(|e| ParseError::SyntaxError {
1429 message: format!("Failed to resolve source path: {}", e),
1430 location: 0,
1431 })?;
1432 let target_path = fs::canonicalize(target_file).map_err(|e| ParseError::SyntaxError {
1433 message: format!("Failed to resolve target path: {}", e),
1434 location: 0,
1435 })?;
1436
1437 if source_path == target_path {
1438 return Err(ParseError::SyntaxError {
1439 message: "Source and target files must be different".to_string(),
1440 location: 0,
1441 });
1442 }
1443
1444 let source_content =
1446 fs::read_to_string(&source_path).map_err(|e| ParseError::SyntaxError {
1447 message: format!("Failed to read source file: {}", e),
1448 location: 0,
1449 })?;
1450
1451 let mut target_content =
1452 fs::read_to_string(&target_path).map_err(|e| ParseError::SyntaxError {
1453 message: format!("Failed to read target file: {}", e),
1454 location: 0,
1455 })?;
1456
1457 let mut parser = Parser::new(&source_content);
1459 let ast = parser.parse().map_err(|e| ParseError::SyntaxError {
1460 message: format!("Failed to parse source file: {}", e),
1461 location: 0,
1462 })?;
1463
1464 struct ElementToMove {
1466 location: SourceLocation,
1467 content: String,
1468 }
1469
1470 let mut elements_to_move: Vec<ElementToMove> = Vec::new();
1471 let mut warnings = Vec::new();
1472
1473 let mut found_names: HashSet<String> = HashSet::new();
1475 ast.for_each_child(|child| {
1476 if let NodeKind::Subroutine { name, .. } = &child.kind {
1477 if let Some(sub_name) = name {
1478 if elements.contains(sub_name) {
1479 found_names.insert(sub_name.clone());
1480 elements_to_move.push(ElementToMove {
1481 location: child.location,
1482 content: source_content[child.location.start..child.location.end]
1483 .to_string(),
1484 });
1485 }
1486 }
1487 }
1488 });
1489
1490 for element in elements {
1492 if !found_names.contains(element) {
1493 warnings.push(format!("Subroutine '{}' not found in source file", element));
1494 }
1495 }
1496
1497 if elements_to_move.is_empty() {
1498 return Ok(RefactoringResult {
1499 success: false,
1500 files_modified: 0,
1501 changes_made: 0,
1502 warnings: vec!["No elements found to move".to_string()],
1503 errors: vec![],
1504 operation_id: None,
1505 });
1506 }
1507
1508 elements_to_move.sort_by(|a, b| b.location.start.cmp(&a.location.start));
1510
1511 let mut modified_source = source_content.clone();
1512
1513 for element in &elements_to_move {
1515 let start = element.location.start;
1516 let end = element.location.end;
1517
1518 let remove_end =
1520 if end < modified_source.len() && modified_source.as_bytes()[end] == b'\n' {
1521 end + 1
1522 } else {
1523 end
1524 };
1525
1526 modified_source.replace_range(start..remove_end, "");
1527 }
1528
1529 elements_to_move.sort_by(|a, b| a.location.start.cmp(&b.location.start));
1531
1532 let mut moved_content = String::new();
1534 for element in &elements_to_move {
1535 moved_content.push_str(&element.content);
1536 moved_content.push('\n');
1537 }
1538
1539 let insertion_index = if let Some(idx) = target_content.rfind("\n1;") {
1541 idx + 1 } else if let Some(idx) = target_content.rfind("\nreturn 1;") {
1543 idx + 1
1544 } else {
1545 target_content.len()
1546 };
1547
1548 if insertion_index < target_content.len() {
1549 target_content.insert_str(insertion_index, &moved_content);
1551 } else {
1552 target_content.push('\n');
1553 target_content.push_str(&moved_content);
1554 }
1555
1556 fs::write(&target_path, target_content).map_err(|e| ParseError::SyntaxError {
1558 message: format!("Failed to write to target file: {}", e),
1559 location: 0,
1560 })?;
1561
1562 fs::write(&source_path, modified_source).map_err(|e| ParseError::SyntaxError {
1563 message: format!("Failed to write source file: {}", e),
1564 location: 0,
1565 })?;
1566
1567 warnings.push("Warning: Imports and references were not updated. Please review the moved code for missing dependencies.".to_string());
1569
1570 Ok(RefactoringResult {
1571 success: true,
1572 files_modified: 2,
1573 changes_made: elements_to_move.len(),
1574 warnings,
1575 errors: vec![],
1576 operation_id: None,
1577 })
1578 }
1579
1580 fn perform_modernize(
1581 &mut self,
1582 patterns: &[ModernizationPattern],
1583 files: &[PathBuf],
1584 ) -> ParseResult<RefactoringResult> {
1585 let mut total_changes = 0;
1587 let mut modified_files = 0;
1588 let mut warnings = Vec::new();
1589
1590 for file in files {
1591 if let Ok(changes) = self.modernize.modernize_file(file, patterns) {
1592 if changes > 0 {
1593 modified_files += 1;
1594 total_changes += changes;
1595 }
1596 } else {
1597 warnings.push(format!("Failed to modernize {}", file.display()));
1598 }
1599 }
1600
1601 Ok(RefactoringResult {
1602 success: true,
1603 files_modified: modified_files,
1604 changes_made: total_changes,
1605 warnings,
1606 errors: vec![],
1607 operation_id: None,
1608 })
1609 }
1610
1611 fn perform_optimize_imports(
1612 &mut self,
1613 remove_unused: bool,
1614 sort_alphabetically: bool,
1615 group_by_type: bool,
1616 files: &[PathBuf],
1617 ) -> ParseResult<RefactoringResult> {
1618 let mut total_changes = 0;
1620 let mut modified_files = 0;
1621
1622 for file in files {
1623 let analysis = self
1624 .import_optimizer
1625 .analyze_file(file)
1626 .map_err(|e| ParseError::SyntaxError { message: e, location: 0 })?;
1627 let mut changes_made = 0;
1628
1629 if remove_unused && !analysis.unused_imports.is_empty() {
1630 changes_made += analysis.unused_imports.len();
1631 }
1632
1633 if sort_alphabetically {
1634 changes_made += 1; }
1636
1637 if group_by_type {
1638 changes_made += 1; }
1640
1641 if changes_made > 0 {
1642 modified_files += 1;
1643 total_changes += changes_made;
1644 }
1645 }
1646
1647 Ok(RefactoringResult {
1648 success: true,
1649 files_modified: modified_files,
1650 changes_made: total_changes,
1651 warnings: vec![],
1652 errors: vec![],
1653 operation_id: None,
1654 })
1655 }
1656
1657 fn perform_inline(
1658 &mut self,
1659 symbol_name: &str,
1660 all_occurrences: bool, files: &[PathBuf],
1662 ) -> ParseResult<RefactoringResult> {
1663 let mut warnings = Vec::new();
1664
1665 if symbol_name.starts_with('$')
1667 || symbol_name.starts_with('@')
1668 || symbol_name.starts_with('%')
1669 {
1670 #[cfg(feature = "workspace_refactor")]
1671 {
1672 if all_occurrences {
1673 let def_file = files.first().ok_or_else(|| ParseError::SyntaxError {
1682 message:
1683 "Inline all_occurrences requires at least one file (definition file)"
1684 .to_string(),
1685 location: 0,
1686 })?;
1687 match self.workspace_refactor.inline_variable_all(symbol_name, def_file, (0, 0))
1688 {
1689 Ok(refactor_result) => {
1690 let edits = refactor_result.file_edits;
1691 if edits.is_empty() {
1692 warnings.push(format!(
1693 "Symbol '{}' not found across workspace",
1694 symbol_name
1695 ));
1696 return Ok(RefactoringResult {
1697 success: false,
1698 files_modified: 0,
1699 changes_made: 0,
1700 warnings,
1701 errors: vec![],
1702 operation_id: None,
1703 });
1704 }
1705 let changes_made = edits.iter().map(|e| e.edits.len()).sum::<usize>();
1706 let files_modified = self.apply_file_edits(&edits)?;
1707 return Ok(RefactoringResult {
1708 success: true,
1709 files_modified,
1710 changes_made,
1711 warnings: refactor_result.warnings,
1712 errors: vec![],
1713 operation_id: None,
1714 });
1715 }
1716 Err(crate::workspace_refactor::RefactorError::SymbolNotFound {
1717 ..
1718 }) => {
1719 warnings.push(format!(
1720 "Symbol '{}' definition not found in provided files",
1721 symbol_name
1722 ));
1723 }
1724 Err(e) => {
1725 warnings.push(format!("Error during workspace inlining: {}", e));
1726 }
1727 }
1728 } else {
1729 let mut files_modified = 0;
1731 let mut changes_made = 0;
1732 let mut applied = false;
1733
1734 for file in files {
1735 match self.workspace_refactor.inline_variable(symbol_name, file, (0, 0)) {
1736 Ok(refactor_result) => {
1737 let edits = refactor_result.file_edits;
1738 if !edits.is_empty() {
1739 let mod_count = self.apply_file_edits(&edits)?;
1740 if mod_count > 0 {
1741 files_modified += mod_count;
1742 changes_made +=
1743 edits.iter().map(|e| e.edits.len()).sum::<usize>();
1744 applied = true;
1745 break;
1746 }
1747 }
1748 }
1749 Err(crate::workspace_refactor::RefactorError::SymbolNotFound {
1750 ..
1751 }) => continue,
1752 Err(e) => {
1753 warnings.push(format!("Error checking {}: {}", file.display(), e));
1754 }
1755 }
1756 }
1757
1758 if !applied && warnings.is_empty() {
1759 warnings.push(format!(
1760 "Symbol '{}' definition not found in provided files",
1761 symbol_name
1762 ));
1763 }
1764
1765 return Ok(RefactoringResult {
1766 success: applied,
1767 files_modified,
1768 changes_made,
1769 warnings,
1770 errors: vec![],
1771 operation_id: None,
1772 });
1773 }
1774 }
1775
1776 #[cfg(not(feature = "workspace_refactor"))]
1777 {
1778 let _ = files; warnings.push("Workspace refactoring feature is disabled".to_string());
1780 }
1781 } else {
1782 let _ = files; warnings.push(format!(
1784 "Inlining for symbol '{}' not implemented (only variables supported)",
1785 symbol_name
1786 ));
1787 }
1788
1789 Ok(RefactoringResult {
1790 success: false,
1791 files_modified: 0,
1792 changes_made: 0,
1793 warnings,
1794 errors: vec![],
1795 operation_id: None,
1796 })
1797 }
1798
1799 #[cfg(feature = "workspace_refactor")]
1800 fn apply_file_edits(
1801 &self,
1802 file_edits: &[crate::workspace_refactor::FileEdit],
1803 ) -> ParseResult<usize> {
1804 let mut files_modified = 0;
1805
1806 for file_edit in file_edits {
1807 if !file_edit.file_path.exists() {
1808 continue;
1809 }
1810
1811 let content = std::fs::read_to_string(&file_edit.file_path).map_err(|e| {
1812 ParseError::SyntaxError {
1813 message: format!(
1814 "Failed to read file {}: {}",
1815 file_edit.file_path.display(),
1816 e
1817 ),
1818 location: 0,
1819 }
1820 })?;
1821
1822 let mut edits = file_edit.edits.clone();
1825 edits.sort_by(|a, b| b.start.cmp(&a.start));
1826
1827 let mut new_content = content.clone();
1829 for edit in edits {
1830 if edit.end > new_content.len() {
1831 return Err(ParseError::SyntaxError {
1832 message: format!(
1833 "Edit out of bounds for {}: range {}..{} in content len {}",
1834 file_edit.file_path.display(),
1835 edit.start,
1836 edit.end,
1837 new_content.len()
1838 ),
1839 location: 0,
1840 });
1841 }
1842 new_content.replace_range(edit.start..edit.end, &edit.new_text);
1843 }
1844
1845 if new_content != content {
1846 std::fs::write(&file_edit.file_path, new_content).map_err(|e| {
1847 ParseError::SyntaxError {
1848 message: format!(
1849 "Failed to write file {}: {}",
1850 file_edit.file_path.display(),
1851 e
1852 ),
1853 location: 0,
1854 }
1855 })?;
1856 files_modified += 1;
1857 }
1858 }
1859
1860 Ok(files_modified)
1861 }
1862}
1863
1864impl Default for RefactoringEngine {
1865 fn default() -> Self {
1866 Self::new()
1867 }
1868}
1869
1870mod temp_stubs {
1872 use super::*;
1873
1874 #[allow(dead_code)]
1875 #[derive(Debug)]
1876 pub(super) struct WorkspaceRefactor;
1878 #[allow(dead_code)]
1879 impl WorkspaceRefactor {
1880 pub(super) fn new() -> Self {
1882 Self
1883 }
1884 }
1885
1886 #[allow(dead_code)]
1887 #[derive(Debug)]
1888 pub(super) struct ModernizeEngine;
1890 #[allow(dead_code)]
1891 impl ModernizeEngine {
1892 pub(super) fn new() -> Self {
1894 Self
1895 }
1896
1897 pub(super) fn modernize_file(
1899 &mut self,
1900 _file: &Path,
1901 _patterns: &[ModernizationPattern],
1902 ) -> ParseResult<usize> {
1903 Ok(0)
1904 }
1905 }
1906}
1907
1908struct ExtractionAnalysis {
1909 inputs: Vec<String>,
1910 outputs: Vec<String>,
1911}
1912
1913fn analyze_extraction(ast: &Node, start: usize, end: usize) -> ExtractionAnalysis {
1914 let mut inputs = HashSet::new();
1915 let mut outputs = HashSet::new();
1916 let mut declared_in_scope = HashSet::new();
1917 let mut declared_in_range = HashSet::new();
1918
1919 visit_node(
1920 ast,
1921 start,
1922 end,
1923 &mut inputs,
1924 &mut outputs,
1925 &mut declared_in_scope,
1926 &mut declared_in_range,
1927 );
1928
1929 let mut inputs_vec: Vec<_> = inputs.into_iter().collect();
1930 inputs_vec.sort();
1931 let mut outputs_vec: Vec<_> = outputs.into_iter().collect();
1932 outputs_vec.sort();
1933
1934 ExtractionAnalysis { inputs: inputs_vec, outputs: outputs_vec }
1935}
1936
1937fn visit_node(
1938 node: &Node,
1939 start: usize,
1940 end: usize,
1941 inputs: &mut HashSet<String>,
1942 outputs: &mut HashSet<String>,
1943 declared_in_scope: &mut HashSet<String>,
1944 declared_in_range: &mut HashSet<String>,
1945) {
1946 let in_range = node.location.start >= start && node.location.end <= end;
1947
1948 match &node.kind {
1949 NodeKind::VariableDeclaration { declarator, variable, initializer, .. } => {
1950 if declarator == "my" || declarator == "state" {
1951 let name = extract_var_name(variable);
1952 if in_range {
1953 declared_in_range.insert(name);
1954 } else {
1955 declared_in_scope.insert(name);
1956 }
1957 }
1958 if let Some(init) = initializer {
1959 visit_node(init, start, end, inputs, outputs, declared_in_scope, declared_in_range);
1960 }
1961 }
1962 NodeKind::VariableListDeclaration { declarator, variables, initializer, .. } => {
1963 if declarator == "my" || declarator == "state" {
1964 for var in variables {
1965 let name = extract_var_name(var);
1966 if in_range {
1967 declared_in_range.insert(name);
1968 } else {
1969 declared_in_scope.insert(name);
1970 }
1971 }
1972 }
1973 if let Some(init) = initializer {
1974 visit_node(init, start, end, inputs, outputs, declared_in_scope, declared_in_range);
1975 }
1976 }
1977 NodeKind::MandatoryParameter { variable }
1978 | NodeKind::SlurpyParameter { variable }
1979 | NodeKind::NamedParameter { variable } => {
1980 let name = extract_var_name(variable);
1981 if in_range {
1982 declared_in_range.insert(name);
1983 } else {
1984 declared_in_scope.insert(name);
1985 }
1986 }
1987 NodeKind::OptionalParameter { variable, default_value } => {
1988 let name = extract_var_name(variable);
1989 if in_range {
1990 declared_in_range.insert(name);
1991 } else {
1992 declared_in_scope.insert(name);
1993 }
1994 visit_node(
1995 default_value,
1996 start,
1997 end,
1998 inputs,
1999 outputs,
2000 declared_in_scope,
2001 declared_in_range,
2002 );
2003 }
2004 NodeKind::Variable { sigil, name } => {
2005 let full_name = format!("{}{}", sigil, name);
2006 if in_range {
2007 if !declared_in_range.contains(&full_name) && declared_in_scope.contains(&full_name)
2009 {
2010 inputs.insert(full_name.clone());
2011 }
2012 } else if node.location.start >= end {
2013 if declared_in_range.contains(&full_name) || inputs.contains(&full_name) {
2016 outputs.insert(full_name);
2017 }
2018 }
2019 }
2020 NodeKind::Block { statements } => {
2021 let mut inner_scope = declared_in_scope.clone();
2022 for stmt in statements {
2023 visit_node(stmt, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2024 }
2025 }
2026 NodeKind::Subroutine { signature, body, .. } => {
2027 let mut inner_scope = declared_in_scope.clone();
2028 if let Some(sig) = signature {
2029 visit_node(sig, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2030 }
2031 visit_node(body, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2032 }
2033 NodeKind::Try { body, catch_blocks, finally_block } => {
2034 visit_node(body, start, end, inputs, outputs, declared_in_scope, declared_in_range);
2035 for (var, catch_body) in catch_blocks {
2036 let mut inner_scope = declared_in_scope.clone();
2037 if let Some(v_name) = var {
2038 let full_name = if v_name.starts_with(['$', '@', '%']) {
2040 v_name.clone()
2041 } else {
2042 format!("${}", v_name)
2043 };
2044 if in_range {
2045 declared_in_range.insert(full_name);
2046 } else {
2047 declared_in_scope.insert(full_name);
2048 }
2049 }
2050 visit_node(
2051 catch_body,
2052 start,
2053 end,
2054 inputs,
2055 outputs,
2056 &mut inner_scope,
2057 declared_in_range,
2058 );
2059 }
2060 if let Some(finally) = finally_block {
2061 visit_node(
2062 finally,
2063 start,
2064 end,
2065 inputs,
2066 outputs,
2067 declared_in_scope,
2068 declared_in_range,
2069 );
2070 }
2071 }
2072 NodeKind::Foreach { variable, list, body, continue_block } => {
2073 visit_node(list, start, end, inputs, outputs, declared_in_scope, declared_in_range);
2075
2076 if let Some(cb) = continue_block {
2078 visit_node(cb, start, end, inputs, outputs, declared_in_scope, declared_in_range);
2079 }
2080
2081 let mut inner_scope = declared_in_scope.clone();
2083 visit_node(variable, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2084 visit_node(body, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2085 }
2086 NodeKind::For { init, condition, update, body, continue_block } => {
2087 let mut inner_scope = declared_in_scope.clone();
2088 if let Some(n) = init {
2089 visit_node(n, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2090 }
2091 if let Some(n) = condition {
2092 visit_node(n, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2093 }
2094 if let Some(n) = update {
2095 visit_node(n, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2096 }
2097 visit_node(body, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2098 if let Some(n) = continue_block {
2099 visit_node(n, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2100 }
2101 }
2102 _ => {
2103 for child in node.children() {
2104 visit_node(
2105 child,
2106 start,
2107 end,
2108 inputs,
2109 outputs,
2110 declared_in_scope,
2111 declared_in_range,
2112 );
2113 }
2114 }
2115 }
2116}
2117
2118fn extract_var_name(node: &Node) -> String {
2119 match &node.kind {
2120 NodeKind::Variable { sigil, name } => format!("{}{}", sigil, name),
2121 NodeKind::VariableWithAttributes { variable, .. } => extract_var_name(variable),
2122 _ => String::new(),
2123 }
2124}
2125
2126#[cfg(test)]
2127mod tests {
2128 use super::*;
2129 use perl_tdd_support::{must, must_some};
2130
2131 #[test]
2132 fn test_operation_id_generation() {
2133 let engine = RefactoringEngine::new();
2134 let id1 = engine.generate_operation_id();
2135 let id2 = engine.generate_operation_id();
2136 assert_ne!(id1, id2);
2137 assert!(id1.starts_with("refactor_"));
2138 }
2139
2140 #[test]
2141 fn test_config_defaults() {
2142 let config = RefactoringConfig::default();
2143 assert!(config.safe_mode);
2144 assert_eq!(config.max_files_per_operation, 100);
2145 assert!(config.create_backups);
2146 assert_eq!(config.operation_timeout, 60);
2147 assert!(config.parallel_processing);
2148 }
2149
2150 #[test]
2151 fn test_extract_method_basic() {
2152 use std::io::Write;
2153 let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2154 let code = r#"
2155sub test {
2156 my $x = 1;
2157 my $y = 2;
2158 # Start extraction
2159 print $x;
2160 my $z = $x + $y;
2161 print $z;
2162 # End extraction
2163 return $z;
2164}
2165"#;
2166 must(write!(file, "{}", code));
2167 let path = file.path().to_path_buf();
2168
2169 let mut engine = RefactoringEngine::new();
2170 engine.config.safe_mode = false;
2171
2172 let result = must(engine.perform_extract_method(
2176 "extracted_sub",
2177 (5, 0),
2178 (8, 0),
2179 std::slice::from_ref(&path),
2180 ));
2181
2182 assert!(result.success);
2183
2184 let new_code = must(std::fs::read_to_string(&path));
2185 println!("New code:\n{}", new_code);
2186
2187 assert!(new_code.contains("sub extracted_sub {"));
2191 assert!(new_code.contains("my ($x, $y) = @_;"));
2192 assert!(new_code.contains("return ($z);"));
2193 assert!(new_code.contains("($z) = extracted_sub($x, $y);"));
2195 }
2196
2197 #[test]
2198 fn test_extract_method_with_placement() {
2199 use std::io::Write;
2200 let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2201 let code = r#"
2202package MyModule;
2203use strict;
2204use warnings;
2205
2206sub existing {
2207 my $val = 10;
2208 # start
2209 print $val;
2210 my $new_val = $val * 2;
2211 # end
2212 return $new_val;
2213}
2214
22151;
2216"#;
2217 must(write!(file, "{}", code));
2218 let path = file.path().to_path_buf();
2219
2220 let mut engine = RefactoringEngine::new();
2221 engine.config.safe_mode = false;
2222
2223 let result = must(engine.perform_extract_method(
2227 "helper",
2228 (8, 0),
2229 (10, 0),
2230 std::slice::from_ref(&path),
2231 ));
2232
2233 assert!(result.success);
2234
2235 let new_code = must(std::fs::read_to_string(&path));
2236 println!("New code with placement:\n{}", new_code);
2237
2238 assert!(new_code.contains("sub helper {"));
2240 assert!(must_some(new_code.find("sub helper {")) < must_some(new_code.find("1;")));
2241
2242 assert!(new_code.contains("my ($val) = @_;"));
2243 assert!(new_code.contains("return ($new_val);"));
2244 assert!(new_code.contains("($new_val) = helper($val);"));
2245 }
2246
2247 #[test]
2248 fn test_extract_method_complex_vars() {
2249 use std::io::Write;
2250 let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2251 let code = r#"
2252sub complex {
2253 my $sum = 0;
2254 my @items = (1..10);
2255 # start
2256 foreach my $item (@items) {
2257 $sum += $item;
2258 }
2259 state $call_count = 0;
2260 $call_count++;
2261 # end
2262 return ($sum, $call_count);
2263}
2264"#;
2265 must(write!(file, "{}", code));
2266 let path = file.path().to_path_buf();
2267
2268 let mut engine = RefactoringEngine::new();
2269 engine.config.safe_mode = false;
2270
2271 let result = must(engine.perform_extract_method(
2274 "do_math",
2275 (5, 0),
2276 (10, 0),
2277 std::slice::from_ref(&path),
2278 ));
2279
2280 assert!(result.success);
2281 let new_code = must(std::fs::read_to_string(&path));
2282 println!("New code complex:\n{}", new_code);
2283
2284 assert!(new_code.contains("sub do_math {"));
2286 assert!(new_code.contains("my ($sum, @items) = @_;"));
2288 assert!(new_code.contains("return ($call_count, $sum);"));
2290 assert!(new_code.contains("($call_count, $sum) = do_math($sum, @items);"));
2292 assert!(new_code.contains(" ($call_count, $sum) = do_math($sum, @items);"));
2294 }
2295
2296 mod validation_tests {
2301 use super::*;
2302 use perl_tdd_support::{must, must_err};
2303 use serial_test::serial;
2304
2305 #[test]
2308 fn test_validate_identifier_bare_name() {
2309 let engine = RefactoringEngine::new();
2310 assert!(engine.validate_perl_identifier("foo", "test").is_ok());
2311 assert!(engine.validate_perl_identifier("_private", "test").is_ok());
2312 assert!(engine.validate_perl_identifier("CamelCase", "test").is_ok());
2313 assert!(engine.validate_perl_identifier("name_with_123", "test").is_ok());
2314 }
2315
2316 #[test]
2317 fn test_validate_identifier_with_sigils() {
2318 let engine = RefactoringEngine::new();
2319 assert!(engine.validate_perl_identifier("$scalar", "test").is_ok());
2321 assert!(engine.validate_perl_identifier("@array", "test").is_ok());
2322 assert!(engine.validate_perl_identifier("%hash", "test").is_ok());
2323 assert!(engine.validate_perl_identifier("&sub", "test").is_ok());
2324 assert!(engine.validate_perl_identifier("*glob", "test").is_ok());
2325 }
2326
2327 #[test]
2328 fn test_validate_identifier_qualified_names() {
2329 let engine = RefactoringEngine::new();
2330 assert!(engine.validate_perl_identifier("Package::name", "test").is_ok());
2331 assert!(engine.validate_perl_identifier("$Package::var", "test").is_ok());
2332 assert!(engine.validate_perl_identifier("@Deep::Nested::array", "test").is_ok());
2333 assert!(engine.validate_perl_identifier("::main_package", "test").is_ok());
2334 }
2335
2336 #[test]
2337 fn test_validate_identifier_empty_rejected() {
2338 let engine = RefactoringEngine::new();
2339 assert!(engine.validate_perl_identifier("", "test").is_err());
2340 }
2341
2342 #[test]
2343 fn test_validate_identifier_sigil_only_rejected() {
2344 let engine = RefactoringEngine::new();
2345 assert!(engine.validate_perl_identifier("$", "test").is_err());
2346 assert!(engine.validate_perl_identifier("@", "test").is_err());
2347 assert!(engine.validate_perl_identifier("%", "test").is_err());
2348 }
2349
2350 #[test]
2351 fn test_validate_identifier_invalid_start_char() {
2352 let engine = RefactoringEngine::new();
2353 assert!(engine.validate_perl_identifier("123abc", "test").is_err());
2354 assert!(engine.validate_perl_identifier("$123abc", "test").is_err());
2355 assert!(engine.validate_perl_identifier("-invalid", "test").is_err());
2356 }
2357
2358 #[test]
2361 fn test_validate_subroutine_name_valid() {
2362 let engine = RefactoringEngine::new();
2363 assert!(engine.validate_perl_subroutine_name("my_sub").is_ok());
2364 assert!(engine.validate_perl_subroutine_name("_private_sub").is_ok());
2365 assert!(engine.validate_perl_subroutine_name("&explicit_sub").is_ok());
2366 }
2367
2368 #[test]
2369 fn test_validate_subroutine_name_invalid_sigils() {
2370 let engine = RefactoringEngine::new();
2371 assert!(engine.validate_perl_subroutine_name("$not_a_sub").is_err());
2373 assert!(engine.validate_perl_subroutine_name("@not_a_sub").is_err());
2374 assert!(engine.validate_perl_subroutine_name("%not_a_sub").is_err());
2375 }
2376
2377 #[test]
2378 fn test_validate_subroutine_name_empty() {
2379 let engine = RefactoringEngine::new();
2380 assert!(engine.validate_perl_subroutine_name("").is_err());
2381 }
2382
2383 #[test]
2386 fn test_validate_qualified_name_valid() {
2387 let engine = RefactoringEngine::new();
2388 assert!(engine.validate_perl_qualified_name("Package").is_ok());
2389 assert!(engine.validate_perl_qualified_name("Package::Sub").is_ok());
2390 assert!(engine.validate_perl_qualified_name("Deep::Nested::Name").is_ok());
2391 }
2392
2393 #[test]
2394 fn test_validate_qualified_name_empty_rejected() {
2395 let engine = RefactoringEngine::new();
2396 assert!(engine.validate_perl_qualified_name("").is_err());
2397 assert!(engine.validate_perl_qualified_name("::").is_err());
2398 }
2399
2400 #[test]
2401 fn test_validate_qualified_name_invalid_segment() {
2402 let engine = RefactoringEngine::new();
2403 assert!(engine.validate_perl_qualified_name("Package::123invalid").is_err());
2404 }
2405
2406 #[test]
2409 fn test_validate_file_count_limit() {
2410 let engine = RefactoringEngine::new();
2411 let files: Vec<PathBuf> =
2413 (0..150).map(|i| PathBuf::from(format!("/fake/{}.pl", i))).collect();
2414
2415 let op = RefactoringType::OptimizeImports {
2416 remove_unused: true,
2417 sort_alphabetically: true,
2418 group_by_type: false,
2419 };
2420
2421 let result = engine.validate_operation(&op, &files);
2422 assert!(result.is_err());
2423 let err_msg = format!("{:?}", must_err(result));
2424 assert!(err_msg.contains("exceeds maximum file limit"));
2425 }
2426
2427 #[test]
2430 fn test_extract_method_requires_file() {
2431 let engine = RefactoringEngine::new();
2432 let op = RefactoringType::ExtractMethod {
2433 method_name: "new_method".to_string(),
2434 start_position: (1, 0),
2435 end_position: (5, 0),
2436 };
2437
2438 let result = engine.validate_operation(&op, &[]);
2439 assert!(result.is_err());
2440 let err_msg = format!("{:?}", must_err(result));
2441 assert!(err_msg.contains("requires a target file"));
2442 }
2443
2444 #[test]
2445 fn test_extract_method_single_file_only() {
2446 let file1: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2447 let file2: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2448
2449 let engine = RefactoringEngine::new();
2450 let op = RefactoringType::ExtractMethod {
2451 method_name: "new_method".to_string(),
2452 start_position: (1, 0),
2453 end_position: (5, 0),
2454 };
2455
2456 let result = engine
2457 .validate_operation(&op, &[file1.path().to_path_buf(), file2.path().to_path_buf()]);
2458 assert!(result.is_err());
2459 let err_msg = format!("{:?}", must_err(result));
2460 assert!(err_msg.contains("operates on a single file"));
2461 }
2462
2463 #[test]
2464 fn test_extract_method_invalid_range() {
2465 let file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2466
2467 let engine = RefactoringEngine::new();
2468 let op = RefactoringType::ExtractMethod {
2469 method_name: "new_method".to_string(),
2470 start_position: (10, 0),
2471 end_position: (5, 0), };
2473
2474 let result = engine.validate_operation(&op, &[file.path().to_path_buf()]);
2475 assert!(result.is_err());
2476 let err_msg = format!("{:?}", must_err(result));
2477 assert!(err_msg.contains("must be before end"));
2478 }
2479
2480 #[test]
2481 fn test_extract_method_invalid_subroutine_name() {
2482 let file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2483
2484 let engine = RefactoringEngine::new();
2485 let op = RefactoringType::ExtractMethod {
2486 method_name: "$invalid".to_string(), start_position: (1, 0),
2488 end_position: (5, 0),
2489 };
2490
2491 let result = engine.validate_operation(&op, &[file.path().to_path_buf()]);
2492 assert!(result.is_err());
2493 }
2494
2495 #[test]
2498 fn test_move_code_requires_elements() {
2499 use std::io::Write;
2500 let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2501 must(write!(file, "# source"));
2502
2503 let engine = RefactoringEngine::new();
2504 let op = RefactoringType::MoveCode {
2505 source_file: file.path().to_path_buf(),
2506 target_file: PathBuf::from("target.pl"),
2507 elements: vec![], };
2509
2510 let result = engine.validate_operation(&op, &[]);
2511 assert!(result.is_err());
2512 let err_msg = format!("{:?}", must_err(result));
2513 assert!(err_msg.contains("requires at least one element"));
2514 }
2515
2516 #[test]
2519 fn test_symbol_rename_accepts_sigils() {
2520 use std::io::Write;
2521 let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2522 must(write!(file, "my $old = 1;"));
2523
2524 let engine = RefactoringEngine::new();
2525 let op = RefactoringType::SymbolRename {
2526 old_name: "$old_var".to_string(),
2527 new_name: "$new_var".to_string(),
2528 scope: RefactoringScope::File(file.path().to_path_buf()),
2529 };
2530
2531 let result = engine.validate_operation(&op, &[]);
2532 assert!(result.is_ok());
2533 }
2534
2535 #[test]
2536 fn test_symbol_rename_workspace_scope_no_files_required() {
2537 let engine = RefactoringEngine::new();
2538 let op = RefactoringType::SymbolRename {
2539 old_name: "old_sub".to_string(),
2540 new_name: "new_sub".to_string(),
2541 scope: RefactoringScope::Workspace,
2542 };
2543
2544 let result = engine.validate_operation(&op, &[]);
2545 assert!(result.is_ok());
2546 }
2547
2548 #[test]
2549 fn test_symbol_rename_fileset_requires_files() {
2550 let engine = RefactoringEngine::new();
2551 let op = RefactoringType::SymbolRename {
2552 old_name: "old_sub".to_string(),
2553 new_name: "new_sub".to_string(),
2554 scope: RefactoringScope::FileSet(vec![]), };
2556
2557 let result = engine.validate_operation(&op, &[]);
2558 assert!(result.is_err());
2559 let err_msg = format!("{:?}", must_err(result));
2560 assert!(err_msg.contains("requires at least one file"));
2561 }
2562
2563 #[test]
2566 fn test_inline_requires_files() {
2567 let engine = RefactoringEngine::new();
2568 let op =
2569 RefactoringType::Inline { symbol_name: "$var".to_string(), all_occurrences: true };
2570
2571 let result = engine.validate_operation(&op, &[]);
2572 assert!(result.is_err());
2573 let err_msg = format!("{:?}", must_err(result));
2574 assert!(err_msg.contains("requires at least one target file"));
2575 }
2576
2577 #[test]
2580 fn test_modernize_requires_patterns() {
2581 let engine = RefactoringEngine::new();
2582 let op = RefactoringType::Modernize { patterns: vec![] };
2583
2584 let result = engine.validate_operation(&op, &[]);
2585 assert!(result.is_err());
2586 let err_msg = format!("{:?}", must_err(result));
2587 assert!(err_msg.contains("requires at least one pattern"));
2588 }
2589
2590 #[test]
2593 fn test_symbol_rename_sigil_consistency_required() {
2594 let engine = RefactoringEngine::new();
2595 let op = RefactoringType::SymbolRename {
2597 old_name: "$foo".to_string(),
2598 new_name: "@foo".to_string(),
2599 scope: RefactoringScope::Workspace,
2600 };
2601
2602 let result = engine.validate_operation(&op, &[]);
2603 assert!(result.is_err());
2604 let err_msg = format!("{:?}", must_err(result));
2605 assert!(err_msg.contains("sigil mismatch"));
2606 }
2607
2608 #[test]
2609 fn test_symbol_rename_sigil_consistency_no_sigil_to_sigil() {
2610 let engine = RefactoringEngine::new();
2611 let op = RefactoringType::SymbolRename {
2613 old_name: "foo".to_string(),
2614 new_name: "$foo".to_string(),
2615 scope: RefactoringScope::Workspace,
2616 };
2617
2618 let result = engine.validate_operation(&op, &[]);
2619 assert!(result.is_err());
2620 let err_msg = format!("{:?}", must_err(result));
2621 assert!(err_msg.contains("sigil mismatch"));
2622 }
2623
2624 #[test]
2625 fn test_symbol_rename_same_name_rejected() {
2626 let engine = RefactoringEngine::new();
2627 let op = RefactoringType::SymbolRename {
2628 old_name: "$foo".to_string(),
2629 new_name: "$foo".to_string(),
2630 scope: RefactoringScope::Workspace,
2631 };
2632
2633 let result = engine.validate_operation(&op, &[]);
2634 assert!(result.is_err());
2635 let err_msg = format!("{:?}", must_err(result));
2636 assert!(err_msg.contains("must be different"));
2637 }
2638
2639 #[test]
2642 fn test_validate_identifier_double_separator_rejected() {
2643 let engine = RefactoringEngine::new();
2644 assert!(engine.validate_perl_identifier("Foo::::Bar", "test").is_err());
2646 assert!(engine.validate_perl_identifier("$Foo::::Bar", "test").is_err());
2647 }
2648
2649 #[test]
2650 fn test_validate_identifier_trailing_separator_rejected() {
2651 let engine = RefactoringEngine::new();
2652 assert!(engine.validate_perl_identifier("Foo::", "test").is_err());
2654 assert!(engine.validate_perl_identifier("$Foo::Bar::", "test").is_err());
2655 }
2656
2657 #[test]
2658 fn test_validate_identifier_leading_separator_allowed() {
2659 let engine = RefactoringEngine::new();
2660 assert!(engine.validate_perl_identifier("::Foo", "test").is_ok());
2662 assert!(engine.validate_perl_identifier("::Foo::Bar", "test").is_ok());
2663 assert!(engine.validate_perl_identifier("$::Foo", "test").is_ok());
2664 }
2665
2666 #[test]
2667 fn test_validate_qualified_name_double_separator_rejected() {
2668 let engine = RefactoringEngine::new();
2669 assert!(engine.validate_perl_qualified_name("Foo::::Bar").is_err());
2670 }
2671
2672 #[test]
2673 fn test_validate_qualified_name_trailing_separator_rejected() {
2674 let engine = RefactoringEngine::new();
2675 assert!(engine.validate_perl_qualified_name("Foo::").is_err());
2676 assert!(engine.validate_perl_qualified_name("Foo::Bar::").is_err());
2677 }
2678
2679 #[test]
2680 fn test_validate_qualified_name_leading_separator_rejected() {
2681 let engine = RefactoringEngine::new();
2682 assert!(engine.validate_perl_qualified_name("::Foo").is_err());
2684 }
2685
2686 #[test]
2687 fn test_validate_qualified_name_sigil_rejected() {
2688 let engine = RefactoringEngine::new();
2689 assert!(engine.validate_perl_qualified_name("$foo").is_err());
2691 assert!(engine.validate_perl_qualified_name("@array").is_err());
2692 }
2693
2694 #[test]
2697 fn test_validate_identifier_unicode_allowed() {
2698 let engine = RefactoringEngine::new();
2699 assert!(engine.validate_perl_identifier("$π", "test").is_ok());
2701 assert!(engine.validate_perl_identifier("$αβγ", "test").is_ok());
2702 assert!(engine.validate_perl_identifier("日本語", "test").is_ok());
2703 }
2704
2705 #[test]
2706 fn test_validate_qualified_name_unicode_allowed() {
2707 let engine = RefactoringEngine::new();
2708 assert!(engine.validate_perl_qualified_name("Müller").is_ok());
2710 assert!(engine.validate_perl_qualified_name("Müller::Util").is_ok());
2711 assert!(engine.validate_perl_qualified_name("日本::パッケージ").is_ok());
2712 }
2713
2714 #[test]
2717 fn test_extract_method_ampersand_prefix_rejected() {
2718 let file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2719
2720 let engine = RefactoringEngine::new();
2721 let op = RefactoringType::ExtractMethod {
2722 method_name: "&foo".to_string(), start_position: (1, 0),
2724 end_position: (5, 0),
2725 };
2726
2727 let result = engine.validate_operation(&op, &[file.path().to_path_buf()]);
2728 assert!(result.is_err());
2729 let err_msg = format!("{:?}", must_err(result));
2730 assert!(err_msg.contains("bare identifier"));
2731 assert!(err_msg.contains("no leading '&'"));
2732 }
2733
2734 #[test]
2737 fn test_move_code_same_file_rejected() {
2738 use std::io::Write;
2739 let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2740 must(write!(file, "# source"));
2741
2742 let engine = RefactoringEngine::new();
2743 let op = RefactoringType::MoveCode {
2744 source_file: file.path().to_path_buf(),
2745 target_file: file.path().to_path_buf(), elements: vec!["some_sub".to_string()],
2747 };
2748
2749 let result = engine.validate_operation(&op, &[]);
2750 assert!(result.is_err());
2751 let err_msg = format!("{:?}", must_err(result));
2752 assert!(err_msg.contains("must be different"));
2753 }
2754
2755 #[test]
2758 fn test_fileset_scope_max_files_limit() {
2759 let files: Vec<tempfile::NamedTempFile> =
2761 (0..5).map(|_| must(tempfile::NamedTempFile::new())).collect();
2762 let paths: Vec<_> = files.iter().map(|f| f.path().to_path_buf()).collect();
2763
2764 let config =
2766 RefactoringConfig { max_files_per_operation: 3, ..RefactoringConfig::default() };
2767 let engine = RefactoringEngine::with_config(config);
2768
2769 let op = RefactoringType::SymbolRename {
2770 old_name: "old_sub".to_string(),
2771 new_name: "new_sub".to_string(),
2772 scope: RefactoringScope::FileSet(paths), };
2774
2775 let result = engine.validate_operation(&op, &[]);
2776 assert!(result.is_err());
2777 let err_msg = format!("{:?}", must_err(result));
2778 assert!(err_msg.contains("exceeds maximum file limit"));
2779 }
2780
2781 #[test]
2784 fn test_cleanup_no_backups() {
2785 let temp_dir = must(tempfile::tempdir());
2786 let config = RefactoringConfig {
2787 backup_root: Some(temp_dir.path().to_path_buf()),
2788 ..RefactoringConfig::default()
2789 };
2790 let mut engine = RefactoringEngine::with_config(config);
2791 let result = must(engine.clear_history());
2792 assert_eq!(result.directories_removed, 0);
2793 assert_eq!(result.space_reclaimed, 0);
2794 }
2795
2796 #[test]
2797 #[serial]
2798 fn test_cleanup_backup_directories() {
2799 use std::fs;
2800
2801 let temp_dir = must(tempfile::tempdir());
2802 let backup_root = temp_dir.path().to_path_buf();
2803
2804 let backup = backup_root.join("refactor_100_0");
2806 must(fs::create_dir_all(&backup));
2807 must(fs::write(backup.join("file.pl"), "sub test {}"));
2808
2809 let config = RefactoringConfig {
2810 backup_root: Some(backup_root),
2811 max_backup_retention: 0, ..RefactoringConfig::default()
2813 };
2814 let mut engine = RefactoringEngine::with_config(config);
2815 let result = must(engine.clear_history());
2816
2817 assert!(result.directories_removed >= 1);
2819 assert_eq!(engine.operation_history.len(), 0);
2820 }
2821
2822 #[test]
2823 #[serial]
2824 fn test_cleanup_respects_retention_count() {
2825 use std::io::Write;
2826
2827 let config = RefactoringConfig {
2828 create_backups: true,
2829 max_backup_retention: 2,
2830 backup_max_age_seconds: 0, ..RefactoringConfig::default()
2832 };
2833
2834 let mut engine = RefactoringEngine::with_config(config);
2835
2836 for i in 0..4 {
2838 let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2839 must(writeln!(file, "sub test{} {{ }}", i));
2840 let path = file.path().to_path_buf();
2841
2842 let op = RefactoringType::SymbolRename {
2843 old_name: format!("test{}", i),
2844 new_name: format!("renamed_test{}", i),
2845 scope: RefactoringScope::File(path.clone()),
2846 };
2847
2848 let _ = engine.refactor(op, vec![path]);
2849 std::thread::sleep(std::time::Duration::from_millis(100)); }
2851
2852 let result = must(engine.clear_history());
2854
2855 assert!(result.directories_removed >= 2);
2857 }
2858
2859 #[test]
2860 #[serial]
2861 fn test_cleanup_respects_age_limit() {
2862 use std::fs;
2863
2864 let temp_dir = must(tempfile::tempdir());
2865 let backup_root = temp_dir.path().to_path_buf();
2866 must(fs::create_dir_all(&backup_root));
2867
2868 let old_backup = backup_root.join("refactor_1000_0");
2870 must(fs::create_dir_all(&old_backup));
2871
2872 let test_file = old_backup.join("file_0.pl");
2874 must(fs::write(&test_file, "sub old_backup { }"));
2875
2876 let deadline = std::time::Instant::now() + std::time::Duration::from_secs(5);
2878 let mut reached_age_limit = false;
2879 while std::time::Instant::now() < deadline {
2880 if let Ok(metadata) = fs::metadata(&old_backup)
2881 && let Ok(modified) = metadata.modified()
2882 && let Ok(age) = std::time::SystemTime::now().duration_since(modified)
2883 && age > std::time::Duration::from_secs(1)
2884 {
2885 reached_age_limit = true;
2886 break;
2887 }
2888
2889 std::thread::sleep(std::time::Duration::from_millis(50));
2890 }
2891 assert!(
2892 reached_age_limit,
2893 "backup directory did not age past threshold within test timeout"
2894 );
2895
2896 let config = RefactoringConfig {
2897 backup_root: Some(backup_root),
2898 backup_max_age_seconds: 1, ..RefactoringConfig::default()
2900 };
2901
2902 let mut engine = RefactoringEngine::with_config(config);
2903
2904 let result = engine.clear_history();
2906 assert!(result.is_ok());
2907
2908 let cleanup_result = must(result);
2910 assert!(cleanup_result.directories_removed >= 1);
2911 }
2912
2913 #[test]
2914 fn test_validate_backup_directory_structure() {
2915 let engine = RefactoringEngine::new();
2916
2917 let backup_root = std::env::temp_dir().join("perl_refactor_backups");
2918 let _ = std::fs::create_dir_all(&backup_root);
2919
2920 let valid_backup = backup_root.join("refactor_123_456");
2922 let _ = std::fs::create_dir_all(&valid_backup);
2923 assert!(must(engine.validate_backup_directory(&valid_backup)));
2924
2925 let invalid_backup = backup_root.join("invalid_backup");
2927 let _ = std::fs::create_dir_all(&invalid_backup);
2928 assert!(!must(engine.validate_backup_directory(&invalid_backup)));
2929
2930 let _ = std::fs::remove_dir_all(&backup_root);
2932 }
2933
2934 #[test]
2935 fn test_calculate_directory_size() {
2936 let engine = RefactoringEngine::new();
2937
2938 let temp_dir = must(tempfile::tempdir());
2939 let dir_path = temp_dir.path().to_path_buf();
2940
2941 let file1 = dir_path.join("file1.txt");
2943 let file2 = dir_path.join("file2.txt");
2944
2945 must(std::fs::write(&file1, "hello")); must(std::fs::write(&file2, "world!")); let total_size = must(engine.calculate_directory_size(&dir_path));
2949 assert_eq!(total_size, 11);
2950 }
2951
2952 #[test]
2953 #[serial]
2954 fn test_backup_cleanup_result_space_reclaimed() {
2955 use std::fs;
2956
2957 let temp_dir = must(tempfile::tempdir());
2958 let backup_root = temp_dir.path().to_path_buf();
2959
2960 let backup = backup_root.join("refactor_100_0");
2962 must(fs::create_dir_all(&backup));
2963
2964 let test_content = "sub test { print 'hello world'; }"; must(fs::write(backup.join("file.pl"), test_content));
2966
2967 let config = RefactoringConfig {
2968 backup_root: Some(backup_root),
2969 max_backup_retention: 0, ..RefactoringConfig::default()
2971 };
2972 let mut engine = RefactoringEngine::with_config(config);
2973
2974 let result = must(engine.clear_history());
2976 assert!(result.space_reclaimed > 0);
2977 }
2978
2979 #[test]
2982 #[serial]
2983 fn cleanup_test_identifies_all_backup_directories() {
2984 use std::fs;
2987
2988 let temp_dir = must(tempfile::tempdir());
2989 let backup_root = temp_dir.path().to_path_buf();
2990
2991 let backup1 = backup_root.join("refactor_100_0");
2993 let backup2 = backup_root.join("refactor_200_0");
2994 must(fs::create_dir_all(&backup1));
2995 must(fs::create_dir_all(&backup2));
2996
2997 must(fs::write(backup1.join("file1.pl"), "sub test1 {}"));
2999 must(fs::write(backup2.join("file2.pl"), "sub test2 {}"));
3000
3001 let config = RefactoringConfig {
3002 backup_root: Some(backup_root),
3003 max_backup_retention: 0, ..RefactoringConfig::default()
3005 };
3006 let mut engine = RefactoringEngine::with_config(config);
3007 let result = must(engine.clear_history());
3008
3009 assert_eq!(result.directories_removed, 2);
3011 assert_eq!(engine.operation_history.len(), 0);
3012
3013 assert!(!backup1.exists());
3015 assert!(!backup2.exists());
3016 }
3017
3018 #[test]
3019 #[serial]
3020 fn cleanup_test_respects_retention_count() {
3021 use std::fs;
3024 use std::thread;
3025 use std::time::Duration;
3026
3027 let temp_dir = must(tempfile::tempdir());
3028 let backup_root = temp_dir.path().to_path_buf();
3029
3030 let backups = [
3032 backup_root.join("refactor_100_0"),
3033 backup_root.join("refactor_200_0"),
3034 backup_root.join("refactor_300_0"),
3035 backup_root.join("refactor_400_0"),
3036 ];
3037
3038 for (i, backup) in backups.iter().enumerate() {
3039 must(fs::create_dir_all(backup));
3040 must(fs::write(backup.join("file.pl"), format!("sub test{} {{}}", i)));
3041 thread::sleep(Duration::from_millis(50));
3043 }
3044
3045 let config = RefactoringConfig {
3046 create_backups: true,
3047 max_backup_retention: 2,
3048 backup_max_age_seconds: 0, backup_root: Some(backup_root),
3050 ..RefactoringConfig::default()
3051 };
3052
3053 let mut engine = RefactoringEngine::with_config(config);
3054 let result = must(engine.clear_history());
3055
3056 assert_eq!(result.directories_removed, 2);
3058
3059 assert!(!backups[0].exists());
3061 assert!(!backups[1].exists());
3062 }
3064
3065 #[test]
3066 #[serial]
3067 fn cleanup_test_respects_age_limit() {
3068 use std::fs;
3071
3072 let temp_dir = must(tempfile::tempdir());
3073 let backup_root = temp_dir.path().to_path_buf();
3074
3075 let old_backup = backup_root.join("refactor_1000_0");
3077 must(fs::create_dir_all(&old_backup));
3078
3079 let test_file = old_backup.join("file_0.pl");
3081 must(fs::write(&test_file, "sub old_backup { }"));
3082
3083 let deadline = std::time::Instant::now() + std::time::Duration::from_secs(5);
3086 let mut reached_age_limit = false;
3087 while std::time::Instant::now() < deadline {
3088 if let Ok(metadata) = fs::metadata(&old_backup)
3089 && let Ok(modified) = metadata.modified()
3090 && let Ok(age) = std::time::SystemTime::now().duration_since(modified)
3091 && age > std::time::Duration::from_secs(1)
3092 {
3093 reached_age_limit = true;
3094 break;
3095 }
3096
3097 std::thread::sleep(std::time::Duration::from_millis(50));
3098 }
3099 assert!(
3100 reached_age_limit,
3101 "backup directory did not age past threshold within test timeout"
3102 );
3103
3104 let config = RefactoringConfig {
3105 backup_max_age_seconds: 1, backup_root: Some(backup_root),
3107 ..RefactoringConfig::default()
3108 };
3109
3110 let mut engine = RefactoringEngine::with_config(config);
3111
3112 let result = engine.clear_history();
3114 assert!(result.is_ok());
3115
3116 let cleanup_result = must(result);
3118 assert_eq!(cleanup_result.directories_removed, 1);
3119
3120 assert!(!old_backup.exists());
3122 }
3124
3125 #[test]
3126 #[serial]
3127 fn cleanup_test_space_reclaimed() {
3128 use std::fs;
3130
3131 let temp_dir = must(tempfile::tempdir());
3132 let backup_root = temp_dir.path().to_path_buf();
3133
3134 let backup = backup_root.join("refactor_100_0");
3136 must(fs::create_dir_all(&backup));
3137
3138 let test_content = "sub test { print 'hello world'; }"; must(fs::write(backup.join("file1.pl"), test_content));
3140 must(fs::write(backup.join("file2.pl"), test_content));
3141
3142 let config = RefactoringConfig {
3143 backup_root: Some(backup_root),
3144 max_backup_retention: 0, ..RefactoringConfig::default()
3146 };
3147 let mut engine = RefactoringEngine::with_config(config);
3148
3149 let result = must(engine.clear_history());
3151 assert_eq!(result.directories_removed, 1);
3152 assert_eq!(result.space_reclaimed, 66); assert!(!backup.exists());
3156 }
3157
3158 #[test]
3159 #[serial]
3160 fn cleanup_test_only_removes_refactor_backups() {
3161 use std::fs;
3163
3164 let temp_dir = must(tempfile::tempdir());
3165 let backup_root = temp_dir.path().to_path_buf();
3166
3167 let refactor_backup = backup_root.join("refactor_100_0");
3169 must(fs::create_dir_all(&refactor_backup));
3170 must(fs::write(refactor_backup.join("file.pl"), "test"));
3171
3172 let other_dir = backup_root.join("other_backup");
3174 must(fs::create_dir_all(&other_dir));
3175 must(fs::write(other_dir.join("file.pl"), "test"));
3176
3177 let config = RefactoringConfig {
3178 backup_root: Some(backup_root),
3179 max_backup_retention: 0, ..RefactoringConfig::default()
3181 };
3182 let mut engine = RefactoringEngine::with_config(config);
3183 let result = must(engine.clear_history());
3184
3185 assert_eq!(result.directories_removed, 1);
3187 assert!(!refactor_backup.exists());
3188 assert!(other_dir.exists()); }
3191
3192 #[test]
3193 #[serial]
3194 fn cleanup_test_with_zero_retention_removes_all() {
3195 use std::fs;
3197
3198 let temp_dir = must(tempfile::tempdir());
3199 let backup_root = temp_dir.path().to_path_buf();
3200
3201 for i in 0..3 {
3203 let backup = backup_root.join(format!("refactor_{}_0", i * 100));
3204 must(fs::create_dir_all(&backup));
3205 must(fs::write(backup.join("file.pl"), "test"));
3206 }
3207
3208 let config = RefactoringConfig {
3209 max_backup_retention: 0, backup_max_age_seconds: 0,
3211 backup_root: Some(backup_root),
3212 ..RefactoringConfig::default()
3213 };
3214
3215 let mut engine = RefactoringEngine::with_config(config);
3216 let result = must(engine.clear_history());
3217
3218 assert_eq!(result.directories_removed, 3);
3220 }
3222
3223 #[test]
3224 #[serial]
3225 fn comprehensive_backup_cleanup_all_acs() {
3226 use std::fs;
3236 use std::thread;
3237 use std::time::Duration;
3238
3239 let temp_dir1 = must(tempfile::tempdir());
3241 let backup_root1 = temp_dir1.path().to_path_buf();
3242
3243 let valid_backup = backup_root1.join("refactor_test_1");
3244 let invalid_backup = backup_root1.join("other_backup");
3245 must(fs::create_dir_all(&valid_backup));
3246 must(fs::create_dir_all(&invalid_backup));
3247 must(fs::write(valid_backup.join("file.pl"), "test"));
3248 must(fs::write(invalid_backup.join("file.pl"), "test"));
3249
3250 let config1 = RefactoringConfig {
3251 backup_root: Some(backup_root1.clone()),
3252 max_backup_retention: 0, ..RefactoringConfig::default()
3254 };
3255 let engine = RefactoringEngine::with_config(config1.clone());
3256 assert!(must(engine.validate_backup_directory(&valid_backup)));
3257 assert!(!must(engine.validate_backup_directory(&invalid_backup)));
3258
3259 let mut engine2 = RefactoringEngine::with_config(config1);
3261 let result1 = must(engine2.clear_history());
3262 assert_eq!(result1.directories_removed, 1); assert_eq!(result1.space_reclaimed, 4); assert!(!valid_backup.exists());
3265 assert!(invalid_backup.exists()); let temp_dir2 = must(tempfile::tempdir());
3269 let backup_root2 = temp_dir2.path().to_path_buf();
3270
3271 for i in 0..4 {
3272 let backup = backup_root2.join(format!("refactor_retention_{}", i));
3273 must(fs::create_dir_all(&backup));
3274 must(fs::write(backup.join("file.pl"), "x"));
3275 thread::sleep(Duration::from_millis(50));
3276 }
3277
3278 let config2 = RefactoringConfig {
3279 max_backup_retention: 2,
3280 backup_max_age_seconds: 0,
3281 backup_root: Some(backup_root2),
3282 ..RefactoringConfig::default()
3283 };
3284 let mut engine3 = RefactoringEngine::with_config(config2);
3285 let result2 = must(engine3.clear_history());
3286 assert_eq!(result2.directories_removed, 2); let temp_dir3 = must(tempfile::tempdir());
3290 let backup_root3 = temp_dir3.path().to_path_buf();
3291
3292 let old_backup = backup_root3.join("refactor_age_test");
3293 must(fs::create_dir_all(&old_backup));
3294 must(fs::write(old_backup.join("file.pl"), "old"));
3295
3296 let config3 = RefactoringConfig {
3297 backup_max_age_seconds: 1,
3298 max_backup_retention: 0,
3299 backup_root: Some(backup_root3),
3300 ..RefactoringConfig::default()
3301 };
3302 let mut engine4 = RefactoringEngine::with_config(config3);
3303 thread::sleep(Duration::from_secs(2));
3304
3305 let result3 = must(engine4.clear_history());
3306 assert_eq!(result3.directories_removed, 1);
3307 assert!(!old_backup.exists());
3308 }
3310 }
3311
3312 #[cfg(feature = "workspace_refactor")]
3319 #[test]
3320 fn test_inline_all_occurrences_routes_to_workspace_lookup() {
3321 let temp_dir = must(tempfile::tempdir());
3322 let path_a = temp_dir.path().join("a.pl");
3323 let path_b = temp_dir.path().join("b.pl");
3324
3325 let content_a = "my $const = 42;\nprint $const;\n";
3327 let content_b = "print $const;\n";
3328
3329 must(std::fs::write(&path_a, content_a));
3330 must(std::fs::write(&path_b, content_b));
3331
3332 let mut engine = RefactoringEngine::new();
3333 engine.config.safe_mode = false;
3334
3335 must(engine.index_file(&path_a, content_a));
3337 must(engine.index_file(&path_b, content_b));
3338
3339 let result = must(engine.refactor(
3341 RefactoringType::Inline { symbol_name: "$const".to_string(), all_occurrences: true },
3342 vec![path_a.clone()],
3343 ));
3344
3345 assert!(result.success, "expected success, warnings: {:?}", result.warnings);
3346
3347 let updated_b = must(std::fs::read_to_string(&path_b));
3349 assert!(
3350 !updated_b.contains("$const"),
3351 "expected $const to be inlined in path_b, but found: {:?}",
3352 updated_b
3353 );
3354 }
3355
3356 #[cfg(feature = "workspace_refactor")]
3359 #[test]
3360 fn test_inline_single_occurrence_stops_after_first_file() {
3361 let temp_dir = must(tempfile::tempdir());
3362 let path_a = temp_dir.path().join("a.pl");
3363
3364 let content_a = "my $x = 99;\nprint $x;\n";
3365 must(std::fs::write(&path_a, content_a));
3366
3367 let mut engine = RefactoringEngine::new();
3368 engine.config.safe_mode = false;
3369
3370 must(engine.index_file(&path_a, content_a));
3371
3372 let result = must(engine.refactor(
3373 RefactoringType::Inline { symbol_name: "$x".to_string(), all_occurrences: false },
3374 vec![path_a.clone()],
3375 ));
3376
3377 assert!(result.success, "expected success, warnings: {:?}", result.warnings);
3378 assert_eq!(result.files_modified, 1);
3379 }
3380}