Skip to main content

perl_refactoring/refactor/
refactoring.rs

1//! Unified refactoring engine for Perl code transformations
2//!
3//! This module provides a comprehensive refactoring engine that combines workspace-level
4//! operations with modern code transformations. It serves as the primary entry point
5//! for all refactoring operations in the Perl LSP ecosystem.
6//!
7//! ## LSP Workflow Integration
8//!
9//! The refactoring engine operates within the standard LSP workflow:
10//! **Parse → Index → Navigate → Complete → Analyze**
11//!
12//! - **Parse Stage**: Analyzes Perl syntax to understand code structure
13//! - **Index Stage**: Builds cross-file symbol relationships for safe refactoring
14//! - **Navigate Stage**: Updates references and maintains navigation integrity
15//! - **Complete Stage**: Ensures completion accuracy after refactoring changes
16//! - **Analyze Stage**: Validates refactoring results and provides feedback
17//!
18//! ## Performance Characteristics
19//!
20//! Optimized for enterprise Perl development with large codebases:
21//! - **Memory Efficiency**: Streaming approach for large file processing
22//! - **Incremental Updates**: Only processes changed portions during refactoring
23//! - **Parallel Operations**: Thread-safe refactoring for multi-file changes
24//!
25//! ## Architecture
26//!
27//! The unified engine integrates existing specialized refactoring modules:
28//! - workspace_refactor: Cross-file operations and symbol management
29//! - modernize: Code modernization and best practice application
30//! - import_optimizer: Import statement optimization and cleanup
31
32use crate::error::{ParseError, ParseResult};
33use crate::import_optimizer::ImportOptimizer;
34#[cfg(feature = "modernize")]
35use crate::modernize::PerlModernizer as ModernizeEngine;
36#[cfg(feature = "workspace_refactor")]
37use crate::workspace_index::WorkspaceIndex;
38#[cfg(feature = "workspace_refactor")]
39use crate::workspace_refactor::WorkspaceRefactor;
40use perl_parser_core::line_index::LineIndex;
41use perl_parser_core::{Node, NodeKind, Parser, SourceLocation};
42use perl_qualified_name::{
43    is_valid_identifier_part, validate_perl_qualified_name as validate_package_name,
44};
45use serde::{Deserialize, Serialize};
46use std::collections::{HashMap, HashSet};
47use std::fs;
48use std::path::{Path, PathBuf};
49
50/// Unified refactoring engine that coordinates all refactoring operations
51///
52/// Provides a single interface for all types of code transformations,
53/// from simple symbol renames to complex workspace restructuring.
54pub struct RefactoringEngine {
55    /// Workspace-level refactoring operations (architectural placeholder for future implementation)
56    #[cfg(feature = "workspace_refactor")]
57    #[allow(dead_code)]
58    workspace_refactor: WorkspaceRefactor,
59    #[cfg(not(feature = "workspace_refactor"))]
60    #[allow(dead_code)]
61    workspace_refactor: temp_stubs::WorkspaceRefactor,
62    /// Code modernization engine for updating legacy Perl patterns
63    #[cfg(feature = "modernize")]
64    modernize: crate::modernize::PerlModernizer,
65    /// Code modernization engine stub (feature disabled)
66    #[cfg(not(feature = "modernize"))]
67    modernize: temp_stubs::ModernizeEngine,
68    /// Import optimization engine for cleaning up use statements
69    import_optimizer: ImportOptimizer,
70    /// Configuration for refactoring operations
71    config: RefactoringConfig,
72    /// Cache of recent operations for rollback support
73    operation_history: Vec<RefactoringOperation>,
74}
75
76/// Configuration for refactoring operations
77#[derive(Debug, Clone, Serialize, Deserialize)]
78pub struct RefactoringConfig {
79    /// Enable safe mode (validate before applying changes)
80    pub safe_mode: bool,
81    /// Maximum number of files to process in a single operation
82    pub max_files_per_operation: usize,
83    /// Enable automatic backup creation
84    pub create_backups: bool,
85    /// Timeout for individual refactoring operations (seconds)
86    pub operation_timeout: u64,
87    /// Enable parallel processing for multi-file operations
88    pub parallel_processing: bool,
89    /// Maximum number of backup directories to retain (0 = unlimited)
90    pub max_backup_retention: usize,
91    /// Maximum age of backup directories in seconds (0 = no age limit)
92    pub backup_max_age_seconds: u64,
93    /// Custom backup root directory (defaults to temp_dir/perl_refactor_backups)
94    #[serde(skip)]
95    pub backup_root: Option<PathBuf>,
96}
97
98impl Default for RefactoringConfig {
99    fn default() -> Self {
100        Self {
101            safe_mode: true,
102            max_files_per_operation: 100,
103            create_backups: true,
104            operation_timeout: 60,
105            parallel_processing: true,
106            max_backup_retention: 10,
107            backup_max_age_seconds: 7 * 24 * 60 * 60, // 7 days
108            backup_root: None,
109        }
110    }
111}
112
113/// Types of refactoring operations supported by the engine
114#[derive(Debug, Clone, Serialize, Deserialize)]
115pub enum RefactoringType {
116    /// Rename symbols across workspace
117    SymbolRename {
118        /// Original symbol name to find
119        old_name: String,
120        /// New name to replace with
121        new_name: String,
122        /// Scope of the rename operation
123        scope: RefactoringScope,
124    },
125    /// Extract methods from existing code
126    ExtractMethod {
127        /// Name for the extracted method
128        method_name: String,
129        /// Start position (line, column) of code to extract
130        start_position: (usize, usize),
131        /// End position (line, column) of code to extract
132        end_position: (usize, usize),
133    },
134    /// Move code between files
135    MoveCode {
136        /// Source file containing the code to move
137        source_file: PathBuf,
138        /// Destination file for the moved code
139        target_file: PathBuf,
140        /// Names of elements (subs, packages) to move
141        elements: Vec<String>,
142    },
143    /// Modernize legacy code patterns
144    Modernize {
145        /// Modernization patterns to apply
146        patterns: Vec<ModernizationPattern>,
147    },
148    /// Optimize imports across files
149    OptimizeImports {
150        /// Remove unused import statements
151        remove_unused: bool,
152        /// Sort imports alphabetically
153        sort_alphabetically: bool,
154        /// Group imports by type (core, CPAN, local)
155        group_by_type: bool,
156    },
157    /// Inline variables or methods
158    Inline {
159        /// Name of the symbol to inline
160        symbol_name: String,
161        /// Whether to inline all occurrences or just the selected one
162        all_occurrences: bool,
163    },
164}
165
166/// Scope of refactoring operations
167#[derive(Debug, Clone, Serialize, Deserialize)]
168pub enum RefactoringScope {
169    /// Single file operation
170    File(PathBuf),
171    /// Workspace-wide operation
172    Workspace,
173    /// Specific directory tree
174    Directory(PathBuf),
175    /// Custom set of files
176    FileSet(Vec<PathBuf>),
177    /// Package scope within a file
178    Package {
179        /// File containing the package declaration.
180        file: PathBuf,
181        /// Package name to scope the operation to.
182        name: String,
183    },
184    /// Function scope within a file
185    Function {
186        /// File containing the function definition.
187        file: PathBuf,
188        /// Function name to scope the operation to.
189        name: String,
190    },
191    /// Arbitrary block scope within a file (start, end positions)
192    Block {
193        /// File containing the block.
194        file: PathBuf,
195        /// Start position as (line, column).
196        start: (u32, u32),
197        /// End position as (line, column).
198        end: (u32, u32),
199    },
200}
201
202/// Modernization patterns for legacy code
203#[derive(Debug, Clone, Serialize, Deserialize)]
204pub enum ModernizationPattern {
205    /// Convert old-style subroutine calls to modern syntax
206    SubroutineCalls,
207    /// Add missing use strict/warnings
208    StrictWarnings,
209    /// Replace deprecated operators
210    DeprecatedOperators,
211    /// Modernize variable declarations
212    VariableDeclarations,
213    /// Update package declarations
214    PackageDeclarations,
215}
216
217/// Record of a refactoring operation for rollback support
218#[derive(Debug, Clone)]
219pub struct RefactoringOperation {
220    /// Unique identifier for the operation
221    pub id: String,
222    /// Type of operation performed
223    pub operation_type: RefactoringType,
224    /// Files modified during the operation
225    pub modified_files: Vec<PathBuf>,
226    /// Timestamp when operation was performed
227    pub timestamp: std::time::SystemTime,
228    /// Backup information for rollback
229    pub backup_info: Option<BackupInfo>,
230}
231
232/// Backup information for operation rollback
233#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
234pub struct BackupInfo {
235    /// Directory containing backup files
236    pub backup_dir: PathBuf,
237    /// Mapping of original files to backup locations
238    pub file_mappings: HashMap<PathBuf, PathBuf>,
239}
240
241/// Result of backup cleanup operation
242#[derive(Debug, Clone)]
243pub struct BackupCleanupResult {
244    /// Number of backup directories removed
245    pub directories_removed: usize,
246    /// Total space reclaimed in bytes
247    pub space_reclaimed: u64,
248}
249
250/// Metadata for a backup directory used during cleanup
251#[derive(Debug, Clone)]
252#[allow(dead_code)] // size field reserved for future cleanup policy implementations
253struct BackupDirMetadata {
254    /// Path to the backup directory
255    path: PathBuf,
256    /// Last modification time
257    modified: std::time::SystemTime,
258    /// Total size in bytes
259    size: u64,
260}
261
262/// Result of a refactoring operation
263#[derive(Debug, Clone, Serialize, Deserialize)]
264pub struct RefactoringResult {
265    /// Whether the operation succeeded
266    pub success: bool,
267    /// Number of files modified
268    pub files_modified: usize,
269    /// Number of changes made
270    pub changes_made: usize,
271    /// Warning messages from the operation
272    pub warnings: Vec<String>,
273    /// Error messages if operation failed
274    pub errors: Vec<String>,
275    /// Operation identifier for rollback
276    pub operation_id: Option<String>,
277}
278
279impl RefactoringEngine {
280    /// Create a new refactoring engine with default configuration
281    pub fn new() -> Self {
282        Self::with_config(RefactoringConfig::default())
283    }
284
285    /// Create a new refactoring engine with custom configuration
286    pub fn with_config(config: RefactoringConfig) -> Self {
287        Self {
288            #[cfg(feature = "workspace_refactor")]
289            workspace_refactor: WorkspaceRefactor::new(WorkspaceIndex::default()),
290            #[cfg(not(feature = "workspace_refactor"))]
291            workspace_refactor: temp_stubs::WorkspaceRefactor::new(),
292            #[cfg(feature = "modernize")]
293            modernize: ModernizeEngine::new(),
294            #[cfg(not(feature = "modernize"))]
295            modernize: temp_stubs::ModernizeEngine::new(),
296            import_optimizer: ImportOptimizer::new(),
297            config,
298            operation_history: Vec::new(),
299        }
300    }
301
302    /// Perform a refactoring operation
303    pub fn refactor(
304        &mut self,
305        operation_type: RefactoringType,
306        files: Vec<PathBuf>,
307    ) -> ParseResult<RefactoringResult> {
308        let operation_id = self.generate_operation_id();
309
310        // Validate operation if in safe mode
311        if self.config.safe_mode {
312            self.validate_operation(&operation_type, &files)?;
313        }
314
315        // Create backup if enabled
316        let backup_info = if self.config.create_backups {
317            Some(self.create_backup(&files, &operation_id)?)
318        } else {
319            None
320        };
321
322        // Perform the operation
323        let result = match operation_type.clone() {
324            RefactoringType::SymbolRename { old_name, new_name, scope } => {
325                self.perform_symbol_rename(&old_name, &new_name, &scope)
326            }
327            RefactoringType::ExtractMethod { method_name, start_position, end_position } => {
328                self.perform_extract_method(&method_name, start_position, end_position, &files)
329            }
330            RefactoringType::MoveCode { source_file, target_file, elements } => {
331                self.perform_move_code(&source_file, &target_file, &elements)
332            }
333            RefactoringType::Modernize { patterns } => self.perform_modernize(&patterns, &files),
334            RefactoringType::OptimizeImports {
335                remove_unused,
336                sort_alphabetically,
337                group_by_type,
338            } => self.perform_optimize_imports(
339                remove_unused,
340                sort_alphabetically,
341                group_by_type,
342                &files,
343            ),
344            RefactoringType::Inline { symbol_name, all_occurrences } => {
345                self.perform_inline(&symbol_name, all_occurrences, &files)
346            }
347        };
348
349        // Record operation in history
350        let operation = RefactoringOperation {
351            id: operation_id.clone(),
352            operation_type,
353            modified_files: files,
354            timestamp: std::time::SystemTime::now(),
355            backup_info,
356        };
357        self.operation_history.push(operation);
358
359        // Return result with operation ID
360        match result {
361            Ok(mut res) => {
362                res.operation_id = Some(operation_id);
363                Ok(res)
364            }
365            Err(e) => Err(e),
366        }
367    }
368
369    /// Rollback a previous refactoring operation
370    pub fn rollback(&mut self, operation_id: &str) -> ParseResult<RefactoringResult> {
371        // Find the operation in history
372        let operation =
373            self.operation_history.iter().find(|op| op.id == operation_id).ok_or_else(|| {
374                ParseError::SyntaxError {
375                    message: format!("Operation {} not found", operation_id),
376                    location: 0,
377                }
378            })?;
379
380        if let Some(backup_info) = &operation.backup_info {
381            // Restore files from backup
382            let mut restored_count = 0;
383            for (original, backup) in &backup_info.file_mappings {
384                if backup.exists() {
385                    std::fs::copy(backup, original).map_err(|e| ParseError::SyntaxError {
386                        message: format!("Failed to restore {}: {}", original.display(), e),
387                        location: 0,
388                    })?;
389                    restored_count += 1;
390                }
391            }
392
393            Ok(RefactoringResult {
394                success: true,
395                files_modified: restored_count,
396                changes_made: restored_count,
397                warnings: vec![],
398                errors: vec![],
399                operation_id: None,
400            })
401        } else {
402            Err(ParseError::SyntaxError {
403                message: "No backup available for rollback".to_string(),
404                location: 0,
405            })
406        }
407    }
408
409    /// Get list of recent operations
410    pub fn get_operation_history(&self) -> &[RefactoringOperation] {
411        &self.operation_history
412    }
413
414    /// Clear operation history and cleanup backups
415    pub fn clear_history(&mut self) -> ParseResult<BackupCleanupResult> {
416        let cleanup_result = self.cleanup_backup_directories()?;
417        self.operation_history.clear();
418        Ok(cleanup_result)
419    }
420
421    /// Index a file for workspace-aware refactoring operations
422    pub fn index_file(&mut self, path: &Path, content: &str) -> ParseResult<()> {
423        #[cfg(feature = "workspace_refactor")]
424        {
425            let uri_str = crate::workspace_index::fs_path_to_uri(path).map_err(|e| {
426                ParseError::SyntaxError {
427                    message: format!("URI conversion failed: {}", e),
428                    location: 0,
429                }
430            })?;
431            let url = url::Url::parse(&uri_str).map_err(|e| ParseError::SyntaxError {
432                message: format!("URL parsing failed: {}", e),
433                location: 0,
434            })?;
435            self.workspace_refactor._index.index_file(url, content.to_string()).map_err(|e| {
436                ParseError::SyntaxError { message: format!("Indexing failed: {}", e), location: 0 }
437            })?;
438        }
439        let _ = content; // Acknowledge when feature disabled
440        Ok(())
441    }
442
443    // Private implementation methods
444
445    fn generate_operation_id(&self) -> String {
446        let duration =
447            std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap_or_default();
448        format!("refactor_{}_{}", duration.as_secs(), duration.subsec_nanos())
449    }
450
451    fn validate_operation(
452        &self,
453        operation_type: &RefactoringType,
454        files: &[PathBuf],
455    ) -> ParseResult<()> {
456        // Check file count limit
457        if files.len() > self.config.max_files_per_operation {
458            return Err(ParseError::SyntaxError {
459                message: format!(
460                    "Operation exceeds maximum file limit: {} files provided, {} allowed",
461                    files.len(),
462                    self.config.max_files_per_operation
463                ),
464                location: 0,
465            });
466        }
467
468        // Operation-specific validation
469        match operation_type {
470            RefactoringType::SymbolRename { old_name, new_name, scope } => {
471                self.validate_perl_identifier(old_name, "old_name")?;
472                self.validate_perl_identifier(new_name, "new_name")?;
473
474                // old_name and new_name must be different
475                if old_name == new_name {
476                    return Err(ParseError::SyntaxError {
477                        message: format!(
478                            "SymbolRename: old_name and new_name must be different (got '{}')",
479                            old_name
480                        ),
481                        location: 0,
482                    });
483                }
484
485                // Sigil consistency: if old_name has a sigil, new_name must have the same sigil
486                let old_sigil = Self::extract_sigil(old_name);
487                let new_sigil = Self::extract_sigil(new_name);
488                if old_sigil != new_sigil {
489                    return Err(ParseError::SyntaxError {
490                        message: format!(
491                            "SymbolRename: sigil mismatch - old_name '{}' has sigil {:?}, new_name '{}' has sigil {:?}",
492                            old_name, old_sigil, new_name, new_sigil
493                        ),
494                        location: 0,
495                    });
496                }
497
498                // Validate scope-specific file requirements
499                match scope {
500                    RefactoringScope::File(path) => {
501                        self.validate_file_exists(path)?;
502                    }
503                    RefactoringScope::Directory(path) => {
504                        self.validate_directory_exists(path)?;
505                    }
506                    RefactoringScope::FileSet(paths) => {
507                        if paths.is_empty() {
508                            return Err(ParseError::SyntaxError {
509                                message: "FileSet scope requires at least one file".to_string(),
510                                location: 0,
511                            });
512                        }
513                        // Enforce max_files_per_operation on FileSet scope
514                        if paths.len() > self.config.max_files_per_operation {
515                            return Err(ParseError::SyntaxError {
516                                message: format!(
517                                    "FileSet scope exceeds maximum file limit: {} files provided, {} allowed",
518                                    paths.len(),
519                                    self.config.max_files_per_operation
520                                ),
521                                location: 0,
522                            });
523                        }
524                        for path in paths {
525                            self.validate_file_exists(path)?;
526                        }
527                    }
528                    RefactoringScope::Workspace => {
529                        // Workspace scope doesn't require specific files
530                    }
531                    RefactoringScope::Package { file, .. }
532                    | RefactoringScope::Function { file, .. }
533                    | RefactoringScope::Block { file, .. } => {
534                        self.validate_file_exists(file)?;
535                    }
536                }
537            }
538
539            RefactoringType::ExtractMethod { method_name, start_position, end_position } => {
540                self.validate_perl_subroutine_name(method_name)?;
541
542                // ExtractMethod generates `sub name {}`, so method_name must be a bare identifier
543                // (no leading '&' sigil, which would produce invalid Perl like `sub &foo {}`)
544                if method_name.starts_with('&') {
545                    return Err(ParseError::SyntaxError {
546                        message: format!(
547                            "ExtractMethod method_name must be a bare identifier (no leading '&'): got '{}'",
548                            method_name
549                        ),
550                        location: 0,
551                    });
552                }
553
554                // ExtractMethod requires exactly one file
555                if files.is_empty() {
556                    return Err(ParseError::SyntaxError {
557                        message: "ExtractMethod requires a target file".to_string(),
558                        location: 0,
559                    });
560                }
561                if files.len() > 1 {
562                    return Err(ParseError::SyntaxError {
563                        message: "ExtractMethod operates on a single file".to_string(),
564                        location: 0,
565                    });
566                }
567                self.validate_file_exists(&files[0])?;
568
569                // Validate position ordering
570                if start_position >= end_position {
571                    return Err(ParseError::SyntaxError {
572                        message: format!(
573                            "Invalid extraction range: start {:?} must be before end {:?}",
574                            start_position, end_position
575                        ),
576                        location: 0,
577                    });
578                }
579            }
580
581            RefactoringType::MoveCode { source_file, target_file, elements } => {
582                self.validate_file_exists(source_file)?;
583
584                // Reject moving code to the same file
585                if source_file == target_file {
586                    return Err(ParseError::SyntaxError {
587                        message: format!(
588                            "MoveCode: source_file and target_file must be different (got '{}')",
589                            source_file.display()
590                        ),
591                        location: 0,
592                    });
593                }
594
595                // Target file may not exist yet (will be created)
596                if let Some(parent) = target_file.parent() {
597                    if !parent.as_os_str().is_empty() && !parent.exists() {
598                        return Err(ParseError::SyntaxError {
599                            message: format!(
600                                "Target directory does not exist: {}",
601                                parent.display()
602                            ),
603                            location: 0,
604                        });
605                    }
606                }
607
608                if elements.is_empty() {
609                    return Err(ParseError::SyntaxError {
610                        message: "MoveCode requires at least one element to move".to_string(),
611                        location: 0,
612                    });
613                }
614
615                // Validate element names (subs or packages)
616                for element in elements {
617                    self.validate_perl_qualified_name(element)?;
618                }
619            }
620
621            RefactoringType::Modernize { patterns } => {
622                if patterns.is_empty() {
623                    return Err(ParseError::SyntaxError {
624                        message: "Modernize requires at least one pattern".to_string(),
625                        location: 0,
626                    });
627                }
628                // Modernize can work on explicit files or scan workspace
629                for file in files {
630                    self.validate_file_exists(file)?;
631                }
632            }
633
634            RefactoringType::OptimizeImports { .. } => {
635                // OptimizeImports can work on explicit files or scan workspace
636                for file in files {
637                    self.validate_file_exists(file)?;
638                }
639            }
640
641            RefactoringType::Inline { symbol_name, .. } => {
642                self.validate_perl_identifier(symbol_name, "symbol_name")?;
643
644                // Inline requires at least one file
645                if files.is_empty() {
646                    return Err(ParseError::SyntaxError {
647                        message: "Inline requires at least one target file".to_string(),
648                        location: 0,
649                    });
650                }
651                for file in files {
652                    self.validate_file_exists(file)?;
653                }
654            }
655        }
656
657        Ok(())
658    }
659
660    /// Validates a Perl identifier (variable, subroutine, or package name).
661    ///
662    /// Perl identifiers can have sigils ($, @, %, &, *) and the name portion
663    /// must start with a letter or underscore, followed by alphanumerics/underscores.
664    fn validate_perl_identifier(&self, name: &str, param_name: &str) -> ParseResult<()> {
665        if name.is_empty() {
666            return Err(ParseError::SyntaxError {
667                message: format!("{} cannot be empty", param_name),
668                location: 0,
669            });
670        }
671
672        // Strip optional sigil
673        let bare_name = name.strip_prefix(['$', '@', '%', '&', '*']).unwrap_or(name);
674
675        if bare_name.is_empty() {
676            return Err(ParseError::SyntaxError {
677                message: format!("{} cannot be only a sigil", param_name),
678                location: 0,
679            });
680        }
681
682        // Handle qualified names (Package::name)
683        // Allow leading :: (for main package or absolute names), but reject:
684        // - trailing :: (like "Foo::")
685        // - double :: in the middle (like "Foo::::Bar")
686        let parts: Vec<&str> = bare_name.split("::").collect();
687        for (i, part) in parts.iter().enumerate() {
688            if part.is_empty() {
689                // Allow empty only at position 0 (leading ::)
690                if i == 0 {
691                    continue;
692                }
693                // Reject trailing :: or double ::
694                return Err(ParseError::SyntaxError {
695                    message: format!(
696                        "Invalid Perl identifier in {}: '{}' (contains empty segment - trailing or double ::)",
697                        param_name, name
698                    ),
699                    location: 0,
700                });
701            }
702            if !is_valid_identifier_part(part) {
703                return Err(ParseError::SyntaxError {
704                    message: format!(
705                        "Invalid Perl identifier in {}: '{}' (must start with letter/underscore)",
706                        param_name, name
707                    ),
708                    location: 0,
709                });
710            }
711        }
712
713        Ok(())
714    }
715
716    /// Validates a Perl subroutine name (no sigil allowed, but & is optional).
717    fn validate_perl_subroutine_name(&self, name: &str) -> ParseResult<()> {
718        if name.is_empty() {
719            return Err(ParseError::SyntaxError {
720                message: "Subroutine name cannot be empty".to_string(),
721                location: 0,
722            });
723        }
724
725        // Strip optional & sigil (only valid sigil for subs)
726        let bare_name = name.strip_prefix('&').unwrap_or(name);
727
728        // Reject other sigils
729        if bare_name.starts_with(['$', '@', '%', '*']) {
730            return Err(ParseError::SyntaxError {
731                message: format!("Invalid sigil for subroutine name: '{}'", name),
732                location: 0,
733            });
734        }
735
736        if !is_valid_identifier_part(bare_name) {
737            return Err(ParseError::SyntaxError {
738                message: format!(
739                    "Invalid subroutine name: '{}' (must start with letter/underscore)",
740                    name
741                ),
742                location: 0,
743            });
744        }
745
746        Ok(())
747    }
748
749    /// Validates a qualified Perl name (Package::Subpackage::name).
750    /// Used for MoveCode elements - does not allow sigils, leading ::, trailing ::, or double ::.
751    fn validate_perl_qualified_name(&self, name: &str) -> ParseResult<()> {
752        validate_package_name(name).map_err(|error| ParseError::SyntaxError {
753            message: format!("Invalid qualified name '{}': {}", name, error),
754            location: 0,
755        })
756    }
757
758    /// Extracts the sigil from a Perl identifier, if present.
759    fn extract_sigil(name: &str) -> Option<char> {
760        let first_char = name.chars().next()?;
761        if matches!(first_char, '$' | '@' | '%' | '&' | '*') { Some(first_char) } else { None }
762    }
763
764    fn validate_file_exists(&self, path: &Path) -> ParseResult<()> {
765        if !path.exists() {
766            return Err(ParseError::SyntaxError {
767                message: format!("File does not exist: {}", path.display()),
768                location: 0,
769            });
770        }
771        if !path.is_file() {
772            return Err(ParseError::SyntaxError {
773                message: format!("Path is not a file: {}", path.display()),
774                location: 0,
775            });
776        }
777        Ok(())
778    }
779
780    fn validate_directory_exists(&self, path: &Path) -> ParseResult<()> {
781        if !path.exists() {
782            return Err(ParseError::SyntaxError {
783                message: format!("Directory does not exist: {}", path.display()),
784                location: 0,
785            });
786        }
787        if !path.is_dir() {
788            return Err(ParseError::SyntaxError {
789                message: format!("Path is not a directory: {}", path.display()),
790                location: 0,
791            });
792        }
793        Ok(())
794    }
795
796    fn create_backup(&self, files: &[PathBuf], operation_id: &str) -> ParseResult<BackupInfo> {
797        let backup_dir = self.backup_root().join(operation_id);
798
799        if !backup_dir.exists() {
800            std::fs::create_dir_all(&backup_dir).map_err(|e| ParseError::SyntaxError {
801                message: format!("Failed to create backup directory: {}", e),
802                location: 0,
803            })?;
804        }
805
806        let mut file_mappings = HashMap::new();
807
808        for (i, file) in files.iter().enumerate() {
809            if file.exists() {
810                // Use index and extension to create a unique, safe filename
811                let extension = file.extension().and_then(|s| s.to_str()).unwrap_or("");
812                let backup_filename = if extension.is_empty() {
813                    format!("file_{}", i)
814                } else {
815                    format!("file_{}.{}", i, extension)
816                };
817
818                let backup_path = backup_dir.join(backup_filename);
819
820                std::fs::copy(file, &backup_path).map_err(|e| ParseError::SyntaxError {
821                    message: format!("Failed to create backup for {}: {}", file.display(), e),
822                    location: 0,
823                })?;
824
825                file_mappings.insert(file.clone(), backup_path);
826            }
827        }
828
829        Ok(BackupInfo { backup_dir, file_mappings })
830    }
831
832    /// Returns the backup root directory, using the configured path or the default temp location.
833    fn backup_root(&self) -> PathBuf {
834        self.config
835            .backup_root
836            .clone()
837            .unwrap_or_else(|| std::env::temp_dir().join("perl_refactor_backups"))
838    }
839
840    fn cleanup_backup_directories(&self) -> ParseResult<BackupCleanupResult> {
841        let backup_root = self.backup_root();
842
843        if !backup_root.exists() {
844            return Ok(BackupCleanupResult { directories_removed: 0, space_reclaimed: 0 });
845        }
846
847        // Collect all backup directories with metadata
848        let mut backup_dirs = self.collect_backup_directories(&backup_root)?;
849
850        // Apply retention policies
851        let dirs_to_remove = self.apply_retention_policies(&mut backup_dirs)?;
852
853        // Remove selected backup directories and calculate space reclaimed
854        let (directories_removed, space_reclaimed) =
855            self.remove_backup_directories(&dirs_to_remove)?;
856
857        Ok(BackupCleanupResult { directories_removed, space_reclaimed })
858    }
859
860    fn collect_backup_directories(
861        &self,
862        backup_root: &PathBuf,
863    ) -> ParseResult<Vec<BackupDirMetadata>> {
864        let mut backup_dirs = Vec::new();
865
866        let entries = std::fs::read_dir(backup_root).map_err(|e| ParseError::SyntaxError {
867            message: format!("Failed to read backup directory: {}", e),
868            location: 0,
869        })?;
870
871        for entry in entries {
872            let entry = entry.map_err(|e| ParseError::SyntaxError {
873                message: format!("Failed to read directory entry: {}", e),
874                location: 0,
875            })?;
876
877            let path = entry.path();
878            if path.is_dir() {
879                // Validate backup directory structure
880                if self.validate_backup_directory(&path)? {
881                    let metadata =
882                        std::fs::metadata(&path).map_err(|e| ParseError::SyntaxError {
883                            message: format!(
884                                "Failed to read metadata for {}: {}",
885                                path.display(),
886                                e
887                            ),
888                            location: 0,
889                        })?;
890
891                    let modified = metadata.modified().map_err(|e| ParseError::SyntaxError {
892                        message: format!(
893                            "Failed to get modification time for {}: {}",
894                            path.display(),
895                            e
896                        ),
897                        location: 0,
898                    })?;
899
900                    let size = self.calculate_directory_size(&path)?;
901
902                    backup_dirs.push(BackupDirMetadata { path, modified, size });
903                }
904            }
905        }
906
907        Ok(backup_dirs)
908    }
909
910    fn validate_backup_directory(&self, dir: &PathBuf) -> ParseResult<bool> {
911        // Check if directory name starts with "refactor_" (expected pattern)
912        let dir_name = dir.file_name().and_then(|n| n.to_str()).unwrap_or("");
913
914        if !dir_name.starts_with("refactor_") {
915            return Ok(false);
916        }
917
918        // Ensure it's a directory and not a symlink (security check)
919        let metadata = std::fs::symlink_metadata(dir).map_err(|e| ParseError::SyntaxError {
920            message: format!("Failed to read symlink metadata for {}: {}", dir.display(), e),
921            location: 0,
922        })?;
923
924        if !metadata.is_dir() || metadata.file_type().is_symlink() {
925            return Ok(false);
926        }
927
928        Ok(true)
929    }
930
931    fn calculate_directory_size(&self, dir: &PathBuf) -> ParseResult<u64> {
932        let mut total_size = 0u64;
933
934        let entries = std::fs::read_dir(dir).map_err(|e| ParseError::SyntaxError {
935            message: format!("Failed to read directory {}: {}", dir.display(), e),
936            location: 0,
937        })?;
938
939        for entry in entries {
940            let entry = entry.map_err(|e| ParseError::SyntaxError {
941                message: format!("Failed to read entry: {}", e),
942                location: 0,
943            })?;
944
945            let metadata = entry.metadata().map_err(|e| ParseError::SyntaxError {
946                message: format!("Failed to read entry metadata: {}", e),
947                location: 0,
948            })?;
949
950            if metadata.is_file() {
951                total_size += metadata.len();
952            }
953        }
954
955        Ok(total_size)
956    }
957
958    fn apply_retention_policies(
959        &self,
960        backup_dirs: &mut Vec<BackupDirMetadata>,
961    ) -> ParseResult<Vec<PathBuf>> {
962        let mut dirs_to_remove = Vec::new();
963
964        // Sort by modification time (oldest first)
965        backup_dirs.sort_by_key(|d| d.modified);
966
967        let now = std::time::SystemTime::now();
968
969        // Apply age-based retention policy
970        if self.config.backup_max_age_seconds > 0 {
971            let max_age = std::time::Duration::from_secs(self.config.backup_max_age_seconds);
972
973            backup_dirs.retain(|dir| {
974                if let Ok(age) = now.duration_since(dir.modified) {
975                    if age > max_age {
976                        dirs_to_remove.push(dir.path.clone());
977                        return false;
978                    }
979                }
980                true
981            });
982        }
983
984        // Apply count-based retention policy
985        // max_backup_retention = 0 means "remove all", > 0 means "keep at most N"
986        if self.config.max_backup_retention == 0 {
987            // Remove all remaining backups
988            for dir in backup_dirs.iter() {
989                if !dirs_to_remove.contains(&dir.path) {
990                    dirs_to_remove.push(dir.path.clone());
991                }
992            }
993        } else if backup_dirs.len() > self.config.max_backup_retention {
994            let excess_count = backup_dirs.len() - self.config.max_backup_retention;
995            for dir in backup_dirs.iter().take(excess_count) {
996                if !dirs_to_remove.contains(&dir.path) {
997                    dirs_to_remove.push(dir.path.clone());
998                }
999            }
1000        }
1001
1002        Ok(dirs_to_remove)
1003    }
1004
1005    fn remove_backup_directories(&self, dirs_to_remove: &[PathBuf]) -> ParseResult<(usize, u64)> {
1006        let mut directories_removed = 0;
1007        let mut space_reclaimed = 0u64;
1008
1009        for dir in dirs_to_remove {
1010            let size = self.calculate_directory_size(dir)?;
1011
1012            std::fs::remove_dir_all(dir).map_err(|e| ParseError::SyntaxError {
1013                message: format!("Failed to remove backup directory {}: {}", dir.display(), e),
1014                location: 0,
1015            })?;
1016
1017            directories_removed += 1;
1018            space_reclaimed += size;
1019        }
1020
1021        Ok((directories_removed, space_reclaimed))
1022    }
1023
1024    fn perform_symbol_rename(
1025        &mut self,
1026        old_name: &str,
1027        new_name: &str,
1028        scope: &RefactoringScope,
1029    ) -> ParseResult<RefactoringResult> {
1030        #[cfg(feature = "workspace_refactor")]
1031        {
1032            let rename_result = match scope {
1033                RefactoringScope::Workspace
1034                | RefactoringScope::File(_)
1035                | RefactoringScope::Directory(_)
1036                | RefactoringScope::FileSet(_)
1037                | RefactoringScope::Package { .. }
1038                | RefactoringScope::Function { .. }
1039                | RefactoringScope::Block { .. } => {
1040                    // For workspace scope or any other scope, we use rename_symbol
1041                    // The underlying WorkspaceRefactor uses the WorkspaceIndex to find all occurrences
1042                    // based on the symbol key (pkg + name + sigil).
1043                    let target_file = match scope {
1044                        RefactoringScope::File(path) => path,
1045                        RefactoringScope::Package { file, .. } => file,
1046                        RefactoringScope::Function { file, .. } => file,
1047                        RefactoringScope::Block { file, .. } => file,
1048                        _ => Path::new(""),
1049                    };
1050
1051                    self.workspace_refactor.rename_symbol(old_name, new_name, target_file, (0, 0))
1052                }
1053            };
1054
1055            match rename_result {
1056                Ok(result) => {
1057                    let filtered_result = self.filter_rename_result_by_scope(result, scope)?;
1058                    let files_modified = self.apply_file_edits(&filtered_result.file_edits)?;
1059                    let changes_made =
1060                        filtered_result.file_edits.iter().map(|e| e.edits.len()).sum();
1061                    println!(
1062                        "perform_symbol_rename DEBUG: result.success=true, files_modified={}, changes_made={}",
1063                        files_modified, changes_made
1064                    );
1065
1066                    let refac_result = RefactoringResult {
1067                        success: true,
1068                        files_modified,
1069                        changes_made,
1070                        warnings: vec![],
1071                        errors: vec![],
1072                        operation_id: None,
1073                    };
1074                    println!("perform_symbol_rename DEBUG: returning result: {:?}", refac_result);
1075                    Ok(refac_result)
1076                }
1077                Err(e) => Ok(RefactoringResult {
1078                    success: false,
1079                    files_modified: 0,
1080                    changes_made: 0,
1081                    warnings: vec![],
1082                    errors: vec![format!("Rename failed: {}", e)],
1083                    operation_id: None,
1084                }),
1085            }
1086        }
1087
1088        #[cfg(not(feature = "workspace_refactor"))]
1089        {
1090            Ok(RefactoringResult {
1091                success: false,
1092                files_modified: 0,
1093                changes_made: 0,
1094                warnings: vec!["Workspace refactoring feature disabled".to_string()],
1095                errors: vec![],
1096                operation_id: None,
1097            })
1098        }
1099    }
1100
1101    #[cfg(feature = "workspace_refactor")]
1102    fn filter_rename_result_by_scope(
1103        &self,
1104        mut result: crate::workspace_refactor::RefactorResult,
1105        scope: &RefactoringScope,
1106    ) -> ParseResult<crate::workspace_refactor::RefactorResult> {
1107        match scope {
1108            RefactoringScope::Workspace
1109            | RefactoringScope::Directory(_)
1110            | RefactoringScope::FileSet(_) => Ok(result),
1111            RefactoringScope::File(target_file) => {
1112                result
1113                    .file_edits
1114                    .retain(|file_edit| Self::paths_match(&file_edit.file_path, target_file));
1115                Ok(result)
1116            }
1117            RefactoringScope::Package { file, name } => {
1118                let source = fs::read_to_string(file).map_err(|error| ParseError::SyntaxError {
1119                    message: format!("Failed to read file {}: {error}", file.display()),
1120                    location: 0,
1121                })?;
1122
1123                let Some((start_off, end_off)) = Self::find_package_byte_range(&source, name)
1124                else {
1125                    result.file_edits.clear();
1126                    return Ok(result);
1127                };
1128
1129                result.file_edits = Self::filter_file_edits_to_range(
1130                    std::mem::take(&mut result.file_edits),
1131                    file,
1132                    start_off,
1133                    end_off,
1134                );
1135                Ok(result)
1136            }
1137            RefactoringScope::Function { file, name } => {
1138                let source = fs::read_to_string(file).map_err(|error| ParseError::SyntaxError {
1139                    message: format!("Failed to read file {}: {error}", file.display()),
1140                    location: 0,
1141                })?;
1142
1143                let Some((start_off, end_off)) = Self::find_function_byte_range(&source, name)
1144                else {
1145                    result.file_edits.clear();
1146                    return Ok(result);
1147                };
1148
1149                result.file_edits = Self::filter_file_edits_to_range(
1150                    std::mem::take(&mut result.file_edits),
1151                    file,
1152                    start_off,
1153                    end_off,
1154                );
1155                Ok(result)
1156            }
1157            RefactoringScope::Block { file, start, end } => {
1158                let source = fs::read_to_string(file).map_err(|error| ParseError::SyntaxError {
1159                    message: format!("Failed to read file {}: {error}", file.display()),
1160                    location: 0,
1161                })?;
1162                let line_index = LineIndex::new(source.clone());
1163                let start_off =
1164                    Self::offset_with_fallback(&line_index, &source, start.0, start.1, false);
1165                let end_off = Self::offset_with_fallback(&line_index, &source, end.0, end.1, true);
1166
1167                let (start_off, end_off) =
1168                    if start_off <= end_off { (start_off, end_off) } else { (end_off, start_off) };
1169
1170                result.file_edits = Self::filter_file_edits_to_range(
1171                    std::mem::take(&mut result.file_edits),
1172                    file,
1173                    start_off,
1174                    end_off,
1175                );
1176                Ok(result)
1177            }
1178        }
1179    }
1180
1181    #[cfg(feature = "workspace_refactor")]
1182    fn filter_file_edits_to_range(
1183        file_edits: Vec<crate::workspace_refactor::FileEdit>,
1184        target_file: &Path,
1185        start_off: usize,
1186        end_off: usize,
1187    ) -> Vec<crate::workspace_refactor::FileEdit> {
1188        file_edits
1189            .into_iter()
1190            .filter_map(|mut file_edit| {
1191                if !Self::paths_match(&file_edit.file_path, target_file) {
1192                    return None;
1193                }
1194
1195                file_edit.edits.retain(|edit| edit.start >= start_off && edit.end <= end_off);
1196                if file_edit.edits.is_empty() { None } else { Some(file_edit) }
1197            })
1198            .collect()
1199    }
1200
1201    #[cfg(feature = "workspace_refactor")]
1202    fn paths_match(a: &Path, b: &Path) -> bool {
1203        if a == b {
1204            return true;
1205        }
1206
1207        match (a.canonicalize(), b.canonicalize()) {
1208            (Ok(canonical_a), Ok(canonical_b)) => canonical_a == canonical_b,
1209            _ => false,
1210        }
1211    }
1212
1213    #[cfg(feature = "workspace_refactor")]
1214    fn find_package_byte_range(source: &str, package_name: &str) -> Option<(usize, usize)> {
1215        let package_decl = format!("package {package_name}");
1216        let start = source.find(&package_decl)?;
1217        let search_start = start + package_decl.len();
1218        let end = source[search_start..]
1219            .find("package ")
1220            .map(|idx| search_start + idx)
1221            .unwrap_or(source.len());
1222        Some((start, end))
1223    }
1224
1225    #[cfg(feature = "workspace_refactor")]
1226    fn find_function_byte_range(source: &str, function_name: &str) -> Option<(usize, usize)> {
1227        let sub_decl = format!("sub {function_name}");
1228        let start = source.find(&sub_decl)?;
1229        let open_brace =
1230            source[start + sub_decl.len()..].find('{').map(|idx| start + sub_decl.len() + idx)?;
1231
1232        let mut depth = 0usize;
1233        for (relative_idx, ch) in source[open_brace..].char_indices() {
1234            match ch {
1235                '{' => depth += 1,
1236                '}' => {
1237                    depth = depth.saturating_sub(1);
1238                    if depth == 0 {
1239                        let end = open_brace + relative_idx + ch.len_utf8();
1240                        return Some((start, end));
1241                    }
1242                }
1243                _ => {}
1244            }
1245        }
1246
1247        None
1248    }
1249
1250    #[cfg(feature = "workspace_refactor")]
1251    fn offset_with_fallback(
1252        line_index: &LineIndex,
1253        source: &str,
1254        line: u32,
1255        column: u32,
1256        end_boundary: bool,
1257    ) -> usize {
1258        if let Some(offset) = line_index.position_to_offset(line, column) {
1259            return offset;
1260        }
1261
1262        if end_boundary {
1263            if let Some(next_line_start) = line_index.position_to_offset(line.saturating_add(1), 0)
1264            {
1265                return next_line_start;
1266            }
1267            return source.len();
1268        }
1269
1270        line_index.position_to_offset(line, 0).unwrap_or(0)
1271    }
1272
1273    fn perform_extract_method(
1274        &mut self,
1275        method_name: &str,
1276        start_position: (usize, usize),
1277        end_position: (usize, usize),
1278        files: &[PathBuf],
1279    ) -> ParseResult<RefactoringResult> {
1280        let file_path = if let Some(f) = files.first() {
1281            f
1282        } else {
1283            return Err(ParseError::SyntaxError {
1284                message: "No file specified for extraction".to_string(),
1285                location: 0,
1286            });
1287        };
1288
1289        let source_code = std::fs::read_to_string(file_path).map_err(|e| {
1290            ParseError::SyntaxError { message: format!("Failed to read file: {}", e), location: 0 }
1291        })?;
1292
1293        let line_ending = if source_code.contains("\r\n") { "\r\n" } else { "\n" };
1294
1295        // Calculate offsets
1296        let line_index = LineIndex::new(source_code.clone());
1297        let start_offset = line_index
1298            .position_to_offset(start_position.0 as u32, start_position.1 as u32)
1299            .ok_or_else(|| ParseError::SyntaxError {
1300                message: "Invalid start position".to_string(),
1301                location: 0,
1302            })?;
1303        let end_offset = line_index
1304            .position_to_offset(end_position.0 as u32, end_position.1 as u32)
1305            .ok_or_else(|| ParseError::SyntaxError {
1306                message: "Invalid end position".to_string(),
1307                location: 0,
1308            })?;
1309
1310        if start_offset >= end_offset {
1311            return Err(ParseError::SyntaxError {
1312                message: "Start position must be before end position".to_string(),
1313                location: 0,
1314            });
1315        }
1316
1317        // Parse
1318        let mut parser = Parser::new(&source_code);
1319        let ast = parser.parse()?;
1320
1321        // Analyze variables
1322        let analysis = analyze_extraction(&ast, start_offset, end_offset);
1323
1324        // Generate Code
1325        let extracted_code = &source_code[start_offset..end_offset];
1326
1327        let mut new_sub = format!(
1328            "{}# Extracted from lines {}-{} {}sub {} {{{}",
1329            line_ending,
1330            start_position.0 + 1,
1331            end_position.0, // end position is exclusive in display usually if it's (line, 0)
1332            line_ending,
1333            method_name,
1334            line_ending
1335        );
1336
1337        // Handle inputs
1338        if !analysis.inputs.is_empty() {
1339            new_sub.push_str(
1340                &format!("    my ({}) = @_;\n", analysis.inputs.join(", "))
1341                    .replace('\n', line_ending),
1342            );
1343        }
1344
1345        // Body
1346        new_sub.push_str("    ");
1347        new_sub.push_str(extracted_code.trim());
1348        new_sub.push_str(line_ending);
1349
1350        // Handle outputs
1351        if !analysis.outputs.is_empty() {
1352            new_sub.push_str(
1353                &format!("    return ({});\n", analysis.outputs.join(", "))
1354                    .replace('\n', line_ending),
1355            );
1356        }
1357        new_sub.push_str("}\n".replace('\n', line_ending).as_str());
1358
1359        // Identify indentation for the call site
1360        let mut indentation = String::new();
1361        if let Some(first_line) = extracted_code.lines().find(|l| !l.trim().is_empty()) {
1362            let trimmed = first_line.trim_start();
1363            indentation = first_line[..first_line.len() - trimmed.len()].to_string();
1364        } else if let Some(line_start) = source_code[..start_offset].rfind('\n') {
1365            let prefix = &source_code[line_start + 1..start_offset];
1366            if prefix.trim().is_empty() {
1367                indentation = prefix.to_string();
1368            }
1369        }
1370
1371        // Generate Call
1372        let inputs_str = analysis.inputs.join(", ");
1373        let mut call = format!("{}({})", method_name, inputs_str);
1374
1375        if !analysis.outputs.is_empty() {
1376            let outputs_str = analysis.outputs.join(", ");
1377            call = format!("({}) = {}", outputs_str, call);
1378        }
1379        call.push(';');
1380
1381        // Add indentation and newline if appropriate
1382        let mut call_with_indent = format!("{}{}", indentation, call);
1383        if source_code[start_offset..end_offset].ends_with('\n') {
1384            call_with_indent.push_str(line_ending);
1385        }
1386
1387        // Apply changes
1388        let mut final_source = String::new();
1389        let prefix_len =
1390            if source_code[..start_offset].ends_with(&indentation) { indentation.len() } else { 0 };
1391        final_source.push_str(&source_code[..start_offset - prefix_len]);
1392        final_source.push_str(&call_with_indent);
1393        final_source.push_str(&source_code[end_offset..]);
1394
1395        // Find smart placement for the new subroutine
1396        let insert_pos = if let Some(idx) = final_source.rfind(&format!("{}1;", line_ending)) {
1397            // Place before the final 1;
1398            idx + line_ending.len()
1399        } else if let Some(idx) = final_source.rfind(&format!("{}__DATA__", line_ending)) {
1400            idx + line_ending.len()
1401        } else if let Some(idx) = final_source.rfind(&format!("{}__END__", line_ending)) {
1402            idx + line_ending.len()
1403        } else {
1404            final_source.len()
1405        };
1406
1407        final_source.insert_str(insert_pos, &new_sub);
1408
1409        if !self.config.safe_mode {
1410            std::fs::write(file_path, final_source).map_err(|e| ParseError::SyntaxError {
1411                message: format!("Failed to write file: {}", e),
1412                location: 0,
1413            })?;
1414        }
1415
1416        Ok(RefactoringResult {
1417            success: true,
1418            files_modified: 1,
1419            changes_made: 2, // call + sub
1420            warnings: vec![],
1421            errors: vec![],
1422            operation_id: None,
1423        })
1424    }
1425
1426    fn perform_move_code(
1427        &mut self,
1428        source_file: &Path,
1429        target_file: &Path,
1430        elements: &[String],
1431    ) -> ParseResult<RefactoringResult> {
1432        // Validate that source and target are different files
1433        let source_path = fs::canonicalize(source_file).map_err(|e| ParseError::SyntaxError {
1434            message: format!("Failed to resolve source path: {}", e),
1435            location: 0,
1436        })?;
1437        let target_path = fs::canonicalize(target_file).map_err(|e| ParseError::SyntaxError {
1438            message: format!("Failed to resolve target path: {}", e),
1439            location: 0,
1440        })?;
1441
1442        if source_path == target_path {
1443            return Err(ParseError::SyntaxError {
1444                message: "Source and target files must be different".to_string(),
1445                location: 0,
1446            });
1447        }
1448
1449        // Read files first to prevent partial failure/data loss
1450        let source_content =
1451            fs::read_to_string(&source_path).map_err(|e| ParseError::SyntaxError {
1452                message: format!("Failed to read source file: {}", e),
1453                location: 0,
1454            })?;
1455
1456        let mut target_content =
1457            fs::read_to_string(&target_path).map_err(|e| ParseError::SyntaxError {
1458                message: format!("Failed to read target file: {}", e),
1459                location: 0,
1460            })?;
1461
1462        // Parse source file to find elements
1463        let mut parser = Parser::new(&source_content);
1464        let ast = parser.parse().map_err(|e| ParseError::SyntaxError {
1465            message: format!("Failed to parse source file: {}", e),
1466            location: 0,
1467        })?;
1468
1469        // Store location AND content for each element
1470        struct ElementToMove {
1471            location: SourceLocation,
1472            content: String,
1473        }
1474
1475        let mut elements_to_move: Vec<ElementToMove> = Vec::new();
1476        let mut warnings = Vec::new();
1477
1478        // Find elements in the AST
1479        let mut found_names: HashSet<String> = HashSet::new();
1480        ast.for_each_child(|child| {
1481            if let NodeKind::Subroutine { name, .. } = &child.kind {
1482                if let Some(sub_name) = name {
1483                    if elements.contains(sub_name) {
1484                        found_names.insert(sub_name.clone());
1485                        elements_to_move.push(ElementToMove {
1486                            location: child.location,
1487                            content: source_content[child.location.start..child.location.end]
1488                                .to_string(),
1489                        });
1490                    }
1491                }
1492            }
1493        });
1494
1495        // Warn about elements that weren't found
1496        for element in elements {
1497            if !found_names.contains(element) {
1498                warnings.push(format!("Subroutine '{}' not found in source file", element));
1499            }
1500        }
1501
1502        if elements_to_move.is_empty() {
1503            return Ok(RefactoringResult {
1504                success: false,
1505                files_modified: 0,
1506                changes_made: 0,
1507                warnings: vec!["No elements found to move".to_string()],
1508                errors: vec![],
1509                operation_id: None,
1510            });
1511        }
1512
1513        // Sort by start position descending for safe removal from source
1514        elements_to_move.sort_by(|a, b| b.location.start.cmp(&a.location.start));
1515
1516        let mut modified_source = source_content.clone();
1517
1518        // Remove from source (in descending order)
1519        for element in &elements_to_move {
1520            let start = element.location.start;
1521            let end = element.location.end;
1522
1523            // Check for trailing newline to remove
1524            let remove_end =
1525                if end < modified_source.len() && modified_source.as_bytes()[end] == b'\n' {
1526                    end + 1
1527                } else {
1528                    end
1529                };
1530
1531            modified_source.replace_range(start..remove_end, "");
1532        }
1533
1534        // Sort by start position ascending for correct append order
1535        elements_to_move.sort_by(|a, b| a.location.start.cmp(&b.location.start));
1536
1537        // Construct moved content
1538        let mut moved_content = String::new();
1539        for element in &elements_to_move {
1540            moved_content.push_str(&element.content);
1541            moved_content.push('\n');
1542        }
1543
1544        // Calculate insertion point in target
1545        let insertion_index = if let Some(idx) = target_content.rfind("\n1;") {
1546            idx + 1 // Insert after the newline, before 1;
1547        } else if let Some(idx) = target_content.rfind("\nreturn 1;") {
1548            idx + 1
1549        } else {
1550            target_content.len()
1551        };
1552
1553        if insertion_index < target_content.len() {
1554            // moved_content already ends with newline from loop above
1555            target_content.insert_str(insertion_index, &moved_content);
1556        } else {
1557            target_content.push('\n');
1558            target_content.push_str(&moved_content);
1559        }
1560
1561        // Write files - Target first, then Source (safer)
1562        fs::write(&target_path, target_content).map_err(|e| ParseError::SyntaxError {
1563            message: format!("Failed to write to target file: {}", e),
1564            location: 0,
1565        })?;
1566
1567        fs::write(&source_path, modified_source).map_err(|e| ParseError::SyntaxError {
1568            message: format!("Failed to write source file: {}", e),
1569            location: 0,
1570        })?;
1571
1572        // Add warning about missing dependency analysis
1573        warnings.push("Warning: Imports and references were not updated. Please review the moved code for missing dependencies.".to_string());
1574
1575        Ok(RefactoringResult {
1576            success: true,
1577            files_modified: 2,
1578            changes_made: elements_to_move.len(),
1579            warnings,
1580            errors: vec![],
1581            operation_id: None,
1582        })
1583    }
1584
1585    fn perform_modernize(
1586        &mut self,
1587        patterns: &[ModernizationPattern],
1588        files: &[PathBuf],
1589    ) -> ParseResult<RefactoringResult> {
1590        // Delegate to modernize engine
1591        let mut total_changes = 0;
1592        let mut modified_files = 0;
1593        let mut warnings = Vec::new();
1594
1595        for file in files {
1596            if let Ok(changes) = self.modernize.modernize_file(file, patterns) {
1597                if changes > 0 {
1598                    modified_files += 1;
1599                    total_changes += changes;
1600                }
1601            } else {
1602                warnings.push(format!("Failed to modernize {}", file.display()));
1603            }
1604        }
1605
1606        Ok(RefactoringResult {
1607            success: true,
1608            files_modified: modified_files,
1609            changes_made: total_changes,
1610            warnings,
1611            errors: vec![],
1612            operation_id: None,
1613        })
1614    }
1615
1616    fn perform_optimize_imports(
1617        &mut self,
1618        remove_unused: bool,
1619        sort_alphabetically: bool,
1620        group_by_type: bool,
1621        files: &[PathBuf],
1622    ) -> ParseResult<RefactoringResult> {
1623        // Delegate to import optimizer
1624        let mut total_changes = 0;
1625        let mut modified_files = 0;
1626
1627        for file in files {
1628            let analysis = self
1629                .import_optimizer
1630                .analyze_file(file)
1631                .map_err(|e| ParseError::SyntaxError { message: e, location: 0 })?;
1632            let mut changes_made = 0;
1633
1634            if remove_unused && !analysis.unused_imports.is_empty() {
1635                changes_made += analysis.unused_imports.len();
1636            }
1637
1638            if sort_alphabetically {
1639                changes_made += 1; // Count sorting as one change per file
1640            }
1641
1642            if group_by_type {
1643                changes_made += 1; // Count grouping as one change per file
1644            }
1645
1646            if changes_made > 0 {
1647                modified_files += 1;
1648                total_changes += changes_made;
1649            }
1650        }
1651
1652        Ok(RefactoringResult {
1653            success: true,
1654            files_modified: modified_files,
1655            changes_made: total_changes,
1656            warnings: vec![],
1657            errors: vec![],
1658            operation_id: None,
1659        })
1660    }
1661
1662    fn perform_inline(
1663        &mut self,
1664        symbol_name: &str,
1665        all_occurrences: bool, // AC1: Implement multi-file occurrence inlining
1666        files: &[PathBuf],
1667    ) -> ParseResult<RefactoringResult> {
1668        let mut warnings = Vec::new();
1669
1670        // Variable inlining - only supported for variables with sigils
1671        if symbol_name.starts_with('$')
1672            || symbol_name.starts_with('@')
1673            || symbol_name.starts_with('%')
1674        {
1675            #[cfg(feature = "workspace_refactor")]
1676            {
1677                if all_occurrences {
1678                    // Route to workspace-wide inlining — finds all references via WorkspaceIndex.
1679                    // Requires that files[0] is the definition file; callers must call
1680                    // index_file() for each workspace file before invoking this path so that
1681                    // cross-file lookup works. The `files` slice beyond [0] is not used here;
1682                    // the workspace index is the authoritative source of all occurrences.
1683                    // Known limitation: SymbolKey uses pkg="main" hardcoded, so
1684                    // package-qualified variables from other packages fall through to a
1685                    // text-scan fallback inside inline_variable_all.
1686                    let def_file = files.first().ok_or_else(|| ParseError::SyntaxError {
1687                        message:
1688                            "Inline all_occurrences requires at least one file (definition file)"
1689                                .to_string(),
1690                        location: 0,
1691                    })?;
1692                    match self.workspace_refactor.inline_variable_all(symbol_name, def_file, (0, 0))
1693                    {
1694                        Ok(refactor_result) => {
1695                            let edits = refactor_result.file_edits;
1696                            if edits.is_empty() {
1697                                warnings.push(format!(
1698                                    "Symbol '{}' not found across workspace",
1699                                    symbol_name
1700                                ));
1701                                return Ok(RefactoringResult {
1702                                    success: false,
1703                                    files_modified: 0,
1704                                    changes_made: 0,
1705                                    warnings,
1706                                    errors: vec![],
1707                                    operation_id: None,
1708                                });
1709                            }
1710                            let changes_made = edits.iter().map(|e| e.edits.len()).sum::<usize>();
1711                            let files_modified = self.apply_file_edits(&edits)?;
1712                            return Ok(RefactoringResult {
1713                                success: true,
1714                                files_modified,
1715                                changes_made,
1716                                warnings: refactor_result.warnings,
1717                                errors: vec![],
1718                                operation_id: None,
1719                            });
1720                        }
1721                        Err(crate::workspace_refactor::RefactorError::SymbolNotFound {
1722                            ..
1723                        }) => {
1724                            warnings.push(format!(
1725                                "Symbol '{}' definition not found in provided files",
1726                                symbol_name
1727                            ));
1728                        }
1729                        Err(e) => {
1730                            warnings.push(format!("Error during workspace inlining: {}", e));
1731                        }
1732                    }
1733                } else {
1734                    // Single-file path: iterate provided files, stop after first success.
1735                    let mut files_modified = 0;
1736                    let mut changes_made = 0;
1737                    let mut applied = false;
1738
1739                    for file in files {
1740                        match self.workspace_refactor.inline_variable(symbol_name, file, (0, 0)) {
1741                            Ok(refactor_result) => {
1742                                let edits = refactor_result.file_edits;
1743                                if !edits.is_empty() {
1744                                    let mod_count = self.apply_file_edits(&edits)?;
1745                                    if mod_count > 0 {
1746                                        files_modified += mod_count;
1747                                        changes_made +=
1748                                            edits.iter().map(|e| e.edits.len()).sum::<usize>();
1749                                        applied = true;
1750                                        break;
1751                                    }
1752                                }
1753                            }
1754                            Err(crate::workspace_refactor::RefactorError::SymbolNotFound {
1755                                ..
1756                            }) => continue,
1757                            Err(e) => {
1758                                warnings.push(format!("Error checking {}: {}", file.display(), e));
1759                            }
1760                        }
1761                    }
1762
1763                    if !applied && warnings.is_empty() {
1764                        warnings.push(format!(
1765                            "Symbol '{}' definition not found in provided files",
1766                            symbol_name
1767                        ));
1768                    }
1769
1770                    return Ok(RefactoringResult {
1771                        success: applied,
1772                        files_modified,
1773                        changes_made,
1774                        warnings,
1775                        errors: vec![],
1776                        operation_id: None,
1777                    });
1778                }
1779            }
1780
1781            #[cfg(not(feature = "workspace_refactor"))]
1782            {
1783                let _ = files; // Acknowledge parameter when feature is disabled
1784                warnings.push("Workspace refactoring feature is disabled".to_string());
1785            }
1786        } else {
1787            let _ = files; // Acknowledge parameter for non-variable symbols
1788            warnings.push(format!(
1789                "Inlining for symbol '{}' not implemented (only variables supported)",
1790                symbol_name
1791            ));
1792        }
1793
1794        Ok(RefactoringResult {
1795            success: false,
1796            files_modified: 0,
1797            changes_made: 0,
1798            warnings,
1799            errors: vec![],
1800            operation_id: None,
1801        })
1802    }
1803
1804    #[cfg(feature = "workspace_refactor")]
1805    fn apply_file_edits(
1806        &self,
1807        file_edits: &[crate::workspace_refactor::FileEdit],
1808    ) -> ParseResult<usize> {
1809        let mut files_modified = 0;
1810
1811        for file_edit in file_edits {
1812            if !file_edit.file_path.exists() {
1813                continue;
1814            }
1815
1816            let content = std::fs::read_to_string(&file_edit.file_path).map_err(|e| {
1817                ParseError::SyntaxError {
1818                    message: format!(
1819                        "Failed to read file {}: {}",
1820                        file_edit.file_path.display(),
1821                        e
1822                    ),
1823                    location: 0,
1824                }
1825            })?;
1826
1827            // Clone and sort edits by start position in descending order to apply them safely
1828            // (applying from end to start preserves earlier byte positions)
1829            let mut edits = file_edit.edits.clone();
1830            edits.sort_by(|a, b| b.start.cmp(&a.start));
1831
1832            // Clone content for comparison after modifications
1833            let mut new_content = content.clone();
1834            for edit in edits {
1835                if edit.end > new_content.len() {
1836                    return Err(ParseError::SyntaxError {
1837                        message: format!(
1838                            "Edit out of bounds for {}: range {}..{} in content len {}",
1839                            file_edit.file_path.display(),
1840                            edit.start,
1841                            edit.end,
1842                            new_content.len()
1843                        ),
1844                        location: 0,
1845                    });
1846                }
1847                new_content.replace_range(edit.start..edit.end, &edit.new_text);
1848            }
1849
1850            if new_content != content {
1851                std::fs::write(&file_edit.file_path, new_content).map_err(|e| {
1852                    ParseError::SyntaxError {
1853                        message: format!(
1854                            "Failed to write file {}: {}",
1855                            file_edit.file_path.display(),
1856                            e
1857                        ),
1858                        location: 0,
1859                    }
1860                })?;
1861                files_modified += 1;
1862            }
1863        }
1864
1865        Ok(files_modified)
1866    }
1867}
1868
1869impl Default for RefactoringEngine {
1870    fn default() -> Self {
1871        Self::new()
1872    }
1873}
1874
1875// Temporary stub implementations for missing dependencies
1876mod temp_stubs {
1877    use super::*;
1878
1879    #[allow(dead_code)]
1880    #[derive(Debug)]
1881    /// Workspace refactor stub used when the `workspace_refactor` feature is disabled.
1882    pub(super) struct WorkspaceRefactor;
1883    #[allow(dead_code)]
1884    impl WorkspaceRefactor {
1885        /// Create a new stub workspace refactor instance.
1886        pub(super) fn new() -> Self {
1887            Self
1888        }
1889    }
1890
1891    #[allow(dead_code)]
1892    #[derive(Debug)]
1893    /// Modernization engine stub used when the `modernize` feature is disabled.
1894    pub(super) struct ModernizeEngine;
1895    #[allow(dead_code)]
1896    impl ModernizeEngine {
1897        /// Create a new stub modernizer instance.
1898        pub(super) fn new() -> Self {
1899            Self
1900        }
1901
1902        /// Placeholder modernization hook that reports no changes.
1903        pub(super) fn modernize_file(
1904            &mut self,
1905            _file: &Path,
1906            _patterns: &[ModernizationPattern],
1907        ) -> ParseResult<usize> {
1908            Ok(0)
1909        }
1910    }
1911}
1912
1913struct ExtractionAnalysis {
1914    inputs: Vec<String>,
1915    outputs: Vec<String>,
1916}
1917
1918fn analyze_extraction(ast: &Node, start: usize, end: usize) -> ExtractionAnalysis {
1919    let mut inputs = HashSet::new();
1920    let mut outputs = HashSet::new();
1921    let mut declared_in_scope = HashSet::new();
1922    let mut declared_in_range = HashSet::new();
1923
1924    visit_node(
1925        ast,
1926        start,
1927        end,
1928        &mut inputs,
1929        &mut outputs,
1930        &mut declared_in_scope,
1931        &mut declared_in_range,
1932    );
1933
1934    let mut inputs_vec: Vec<_> = inputs.into_iter().collect();
1935    inputs_vec.sort();
1936    let mut outputs_vec: Vec<_> = outputs.into_iter().collect();
1937    outputs_vec.sort();
1938
1939    ExtractionAnalysis { inputs: inputs_vec, outputs: outputs_vec }
1940}
1941
1942fn visit_node(
1943    node: &Node,
1944    start: usize,
1945    end: usize,
1946    inputs: &mut HashSet<String>,
1947    outputs: &mut HashSet<String>,
1948    declared_in_scope: &mut HashSet<String>,
1949    declared_in_range: &mut HashSet<String>,
1950) {
1951    let in_range = node.location.start >= start && node.location.end <= end;
1952
1953    match &node.kind {
1954        NodeKind::VariableDeclaration { declarator, variable, initializer, .. } => {
1955            if declarator == "my" || declarator == "state" {
1956                let name = extract_var_name(variable);
1957                if in_range {
1958                    declared_in_range.insert(name);
1959                } else {
1960                    declared_in_scope.insert(name);
1961                }
1962            }
1963            if let Some(init) = initializer {
1964                visit_node(init, start, end, inputs, outputs, declared_in_scope, declared_in_range);
1965            }
1966        }
1967        NodeKind::VariableListDeclaration { declarator, variables, initializer, .. } => {
1968            if declarator == "my" || declarator == "state" {
1969                for var in variables {
1970                    let name = extract_var_name(var);
1971                    if in_range {
1972                        declared_in_range.insert(name);
1973                    } else {
1974                        declared_in_scope.insert(name);
1975                    }
1976                }
1977            }
1978            if let Some(init) = initializer {
1979                visit_node(init, start, end, inputs, outputs, declared_in_scope, declared_in_range);
1980            }
1981        }
1982        NodeKind::MandatoryParameter { variable }
1983        | NodeKind::SlurpyParameter { variable }
1984        | NodeKind::NamedParameter { variable } => {
1985            let name = extract_var_name(variable);
1986            if in_range {
1987                declared_in_range.insert(name);
1988            } else {
1989                declared_in_scope.insert(name);
1990            }
1991        }
1992        NodeKind::OptionalParameter { variable, default_value } => {
1993            let name = extract_var_name(variable);
1994            if in_range {
1995                declared_in_range.insert(name);
1996            } else {
1997                declared_in_scope.insert(name);
1998            }
1999            visit_node(
2000                default_value,
2001                start,
2002                end,
2003                inputs,
2004                outputs,
2005                declared_in_scope,
2006                declared_in_range,
2007            );
2008        }
2009        NodeKind::Variable { sigil, name } => {
2010            let full_name = format!("{}{}", sigil, name);
2011            if in_range {
2012                // If not declared in range, check if declared in outer scope.
2013                if !declared_in_range.contains(&full_name) && declared_in_scope.contains(&full_name)
2014                {
2015                    inputs.insert(full_name.clone());
2016                }
2017            } else if node.location.start >= end {
2018                // Usage after range
2019                // If declared in range OR used in range (input), it might have changed and is used after.
2020                if declared_in_range.contains(&full_name) || inputs.contains(&full_name) {
2021                    outputs.insert(full_name);
2022                }
2023            }
2024        }
2025        NodeKind::Block { statements } => {
2026            let mut inner_scope = declared_in_scope.clone();
2027            for stmt in statements {
2028                visit_node(stmt, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2029            }
2030        }
2031        NodeKind::Subroutine { signature, body, .. } => {
2032            let mut inner_scope = declared_in_scope.clone();
2033            if let Some(sig) = signature {
2034                visit_node(sig, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2035            }
2036            visit_node(body, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2037        }
2038        NodeKind::Try { body, catch_blocks, finally_block } => {
2039            visit_node(body, start, end, inputs, outputs, declared_in_scope, declared_in_range);
2040            for (var, catch_body) in catch_blocks {
2041                let mut inner_scope = declared_in_scope.clone();
2042                if let Some(v_name) = var {
2043                    // Check if v_name has sigil, if not assume $
2044                    let full_name = if v_name.starts_with(['$', '@', '%']) {
2045                        v_name.clone()
2046                    } else {
2047                        format!("${}", v_name)
2048                    };
2049                    if in_range {
2050                        declared_in_range.insert(full_name);
2051                    } else {
2052                        declared_in_scope.insert(full_name);
2053                    }
2054                }
2055                visit_node(
2056                    catch_body,
2057                    start,
2058                    end,
2059                    inputs,
2060                    outputs,
2061                    &mut inner_scope,
2062                    declared_in_range,
2063                );
2064            }
2065            if let Some(finally) = finally_block {
2066                visit_node(
2067                    finally,
2068                    start,
2069                    end,
2070                    inputs,
2071                    outputs,
2072                    declared_in_scope,
2073                    declared_in_range,
2074                );
2075            }
2076        }
2077        NodeKind::Foreach { variable, list, body, continue_block } => {
2078            // Visit list with outer scope
2079            visit_node(list, start, end, inputs, outputs, declared_in_scope, declared_in_range);
2080
2081            // Visit continue block if present
2082            if let Some(cb) = continue_block {
2083                visit_node(cb, start, end, inputs, outputs, declared_in_scope, declared_in_range);
2084            }
2085
2086            // Create inner scope for variable and body
2087            let mut inner_scope = declared_in_scope.clone();
2088            visit_node(variable, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2089            visit_node(body, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2090        }
2091        NodeKind::For { init, condition, update, body, continue_block } => {
2092            let mut inner_scope = declared_in_scope.clone();
2093            if let Some(n) = init {
2094                visit_node(n, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2095            }
2096            if let Some(n) = condition {
2097                visit_node(n, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2098            }
2099            if let Some(n) = update {
2100                visit_node(n, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2101            }
2102            visit_node(body, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2103            if let Some(n) = continue_block {
2104                visit_node(n, start, end, inputs, outputs, &mut inner_scope, declared_in_range);
2105            }
2106        }
2107        _ => {
2108            for child in node.children() {
2109                visit_node(
2110                    child,
2111                    start,
2112                    end,
2113                    inputs,
2114                    outputs,
2115                    declared_in_scope,
2116                    declared_in_range,
2117                );
2118            }
2119        }
2120    }
2121}
2122
2123fn extract_var_name(node: &Node) -> String {
2124    match &node.kind {
2125        NodeKind::Variable { sigil, name } => format!("{}{}", sigil, name),
2126        NodeKind::VariableWithAttributes { variable, .. } => extract_var_name(variable),
2127        _ => String::new(),
2128    }
2129}
2130
2131#[cfg(test)]
2132mod tests {
2133    use super::*;
2134    use perl_tdd_support::{must, must_some};
2135
2136    #[test]
2137    fn test_operation_id_generation() {
2138        let engine = RefactoringEngine::new();
2139        let id1 = engine.generate_operation_id();
2140        let id2 = engine.generate_operation_id();
2141        assert_ne!(id1, id2);
2142        assert!(id1.starts_with("refactor_"));
2143    }
2144
2145    #[test]
2146    fn test_config_defaults() {
2147        let config = RefactoringConfig::default();
2148        assert!(config.safe_mode);
2149        assert_eq!(config.max_files_per_operation, 100);
2150        assert!(config.create_backups);
2151        assert_eq!(config.operation_timeout, 60);
2152        assert!(config.parallel_processing);
2153    }
2154
2155    #[test]
2156    fn test_extract_method_basic() {
2157        use std::io::Write;
2158        let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2159        let code = r#"
2160sub test {
2161    my $x = 1;
2162    my $y = 2;
2163    # Start extraction
2164    print $x;
2165    my $z = $x + $y;
2166    print $z;
2167    # End extraction
2168    return $z;
2169}
2170"#;
2171        must(write!(file, "{}", code));
2172        let path = file.path().to_path_buf();
2173
2174        let mut engine = RefactoringEngine::new();
2175        engine.config.safe_mode = false;
2176
2177        // Lines are 0-indexed.
2178        // Line 5: "    print $x;\n"
2179        // Line 8: "    # End extraction\n"
2180        let result = must(engine.perform_extract_method(
2181            "extracted_sub",
2182            (5, 0),
2183            (8, 0),
2184            std::slice::from_ref(&path),
2185        ));
2186
2187        assert!(result.success);
2188
2189        let new_code = must(std::fs::read_to_string(&path));
2190        println!("New code:\n{}", new_code);
2191
2192        // Inputs: $x, $y (used in range, declared before)
2193        // Outputs: $z (declared in range, used after)
2194
2195        assert!(new_code.contains("sub extracted_sub {"));
2196        assert!(new_code.contains("my ($x, $y) = @_;"));
2197        assert!(new_code.contains("return ($z);"));
2198        // Call verification order depends on how we generate it
2199        assert!(new_code.contains("($z) = extracted_sub($x, $y);"));
2200    }
2201
2202    #[test]
2203    fn test_extract_method_with_placement() {
2204        use std::io::Write;
2205        let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2206        let code = r#"
2207package MyModule;
2208use strict;
2209use warnings;
2210
2211sub existing {
2212    my $val = 10;
2213    # start
2214    print $val;
2215    my $new_val = $val * 2;
2216    # end
2217    return $new_val;
2218}
2219
22201;
2221"#;
2222        must(write!(file, "{}", code));
2223        let path = file.path().to_path_buf();
2224
2225        let mut engine = RefactoringEngine::new();
2226        engine.config.safe_mode = false;
2227
2228        // selection should include lines 8 and 9 (0-indexed)
2229        // Line 8: "    print $val;\n"
2230        // Line 9: "    my $new_val = $val * 2;\n"
2231        let result = must(engine.perform_extract_method(
2232            "helper",
2233            (8, 0),
2234            (10, 0),
2235            std::slice::from_ref(&path),
2236        ));
2237
2238        assert!(result.success);
2239
2240        let new_code = must(std::fs::read_to_string(&path));
2241        println!("New code with placement:\n{}", new_code);
2242
2243        // Check placement: helper should be before 1;
2244        assert!(new_code.contains("sub helper {"));
2245        assert!(must_some(new_code.find("sub helper {")) < must_some(new_code.find("1;")));
2246
2247        assert!(new_code.contains("my ($val) = @_;"));
2248        assert!(new_code.contains("return ($new_val);"));
2249        assert!(new_code.contains("($new_val) = helper($val);"));
2250    }
2251
2252    #[test]
2253    fn test_extract_method_complex_vars() {
2254        use std::io::Write;
2255        let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2256        let code = r#"
2257sub complex {
2258    my $sum = 0;
2259    my @items = (1..10);
2260    # start
2261    foreach my $item (@items) {
2262        $sum += $item;
2263    }
2264    state $call_count = 0;
2265    $call_count++;
2266    # end
2267    return ($sum, $call_count);
2268}
2269"#;
2270        must(write!(file, "{}", code));
2271        let path = file.path().to_path_buf();
2272
2273        let mut engine = RefactoringEngine::new();
2274        engine.config.safe_mode = false;
2275
2276        // Line 5: "    foreach my $item (@items) {"
2277        // Line 10: "    # end"
2278        let result = must(engine.perform_extract_method(
2279            "do_math",
2280            (5, 0),
2281            (10, 0),
2282            std::slice::from_ref(&path),
2283        ));
2284
2285        assert!(result.success);
2286        let new_code = must(std::fs::read_to_string(&path));
2287        println!("New code complex:\n{}", new_code);
2288
2289        // check if sub created
2290        assert!(new_code.contains("sub do_math {"));
2291        // check inputs
2292        assert!(new_code.contains("my ($sum, @items) = @_;"));
2293        // check outputs
2294        assert!(new_code.contains("return ($call_count, $sum);"));
2295        // check call
2296        assert!(new_code.contains("($call_count, $sum) = do_math($sum, @items);"));
2297        // check indentation of call
2298        assert!(new_code.contains("    ($call_count, $sum) = do_math($sum, @items);"));
2299    }
2300
2301    // ============================================================
2302    // Validation tests for validate_operation
2303    // ============================================================
2304
2305    mod validation_tests {
2306        use super::*;
2307        use perl_tdd_support::{must, must_err};
2308        use serial_test::serial;
2309
2310        // --- Perl identifier validation tests ---
2311
2312        #[test]
2313        fn test_validate_identifier_bare_name() {
2314            let engine = RefactoringEngine::new();
2315            assert!(engine.validate_perl_identifier("foo", "test").is_ok());
2316            assert!(engine.validate_perl_identifier("_private", "test").is_ok());
2317            assert!(engine.validate_perl_identifier("CamelCase", "test").is_ok());
2318            assert!(engine.validate_perl_identifier("name_with_123", "test").is_ok());
2319        }
2320
2321        #[test]
2322        fn test_validate_identifier_with_sigils() {
2323            let engine = RefactoringEngine::new();
2324            // All valid Perl sigils should be accepted
2325            assert!(engine.validate_perl_identifier("$scalar", "test").is_ok());
2326            assert!(engine.validate_perl_identifier("@array", "test").is_ok());
2327            assert!(engine.validate_perl_identifier("%hash", "test").is_ok());
2328            assert!(engine.validate_perl_identifier("&sub", "test").is_ok());
2329            assert!(engine.validate_perl_identifier("*glob", "test").is_ok());
2330        }
2331
2332        #[test]
2333        fn test_validate_identifier_qualified_names() {
2334            let engine = RefactoringEngine::new();
2335            assert!(engine.validate_perl_identifier("Package::name", "test").is_ok());
2336            assert!(engine.validate_perl_identifier("$Package::var", "test").is_ok());
2337            assert!(engine.validate_perl_identifier("@Deep::Nested::array", "test").is_ok());
2338            assert!(engine.validate_perl_identifier("::main_package", "test").is_ok());
2339        }
2340
2341        #[test]
2342        fn test_validate_identifier_empty_rejected() {
2343            let engine = RefactoringEngine::new();
2344            assert!(engine.validate_perl_identifier("", "test").is_err());
2345        }
2346
2347        #[test]
2348        fn test_validate_identifier_sigil_only_rejected() {
2349            let engine = RefactoringEngine::new();
2350            assert!(engine.validate_perl_identifier("$", "test").is_err());
2351            assert!(engine.validate_perl_identifier("@", "test").is_err());
2352            assert!(engine.validate_perl_identifier("%", "test").is_err());
2353        }
2354
2355        #[test]
2356        fn test_validate_identifier_invalid_start_char() {
2357            let engine = RefactoringEngine::new();
2358            assert!(engine.validate_perl_identifier("123abc", "test").is_err());
2359            assert!(engine.validate_perl_identifier("$123abc", "test").is_err());
2360            assert!(engine.validate_perl_identifier("-invalid", "test").is_err());
2361        }
2362
2363        // --- Subroutine name validation tests ---
2364
2365        #[test]
2366        fn test_validate_subroutine_name_valid() {
2367            let engine = RefactoringEngine::new();
2368            assert!(engine.validate_perl_subroutine_name("my_sub").is_ok());
2369            assert!(engine.validate_perl_subroutine_name("_private_sub").is_ok());
2370            assert!(engine.validate_perl_subroutine_name("&explicit_sub").is_ok());
2371        }
2372
2373        #[test]
2374        fn test_validate_subroutine_name_invalid_sigils() {
2375            let engine = RefactoringEngine::new();
2376            // Subs cannot have $, @, %, * sigils
2377            assert!(engine.validate_perl_subroutine_name("$not_a_sub").is_err());
2378            assert!(engine.validate_perl_subroutine_name("@not_a_sub").is_err());
2379            assert!(engine.validate_perl_subroutine_name("%not_a_sub").is_err());
2380        }
2381
2382        #[test]
2383        fn test_validate_subroutine_name_empty() {
2384            let engine = RefactoringEngine::new();
2385            assert!(engine.validate_perl_subroutine_name("").is_err());
2386        }
2387
2388        // --- Qualified name validation tests ---
2389
2390        #[test]
2391        fn test_validate_qualified_name_valid() {
2392            let engine = RefactoringEngine::new();
2393            assert!(engine.validate_perl_qualified_name("Package").is_ok());
2394            assert!(engine.validate_perl_qualified_name("Package::Sub").is_ok());
2395            assert!(engine.validate_perl_qualified_name("Deep::Nested::Name").is_ok());
2396        }
2397
2398        #[test]
2399        fn test_validate_qualified_name_empty_rejected() {
2400            let engine = RefactoringEngine::new();
2401            assert!(engine.validate_perl_qualified_name("").is_err());
2402            assert!(engine.validate_perl_qualified_name("::").is_err());
2403        }
2404
2405        #[test]
2406        fn test_validate_qualified_name_invalid_segment() {
2407            let engine = RefactoringEngine::new();
2408            assert!(engine.validate_perl_qualified_name("Package::123invalid").is_err());
2409        }
2410
2411        // --- File count limit validation tests ---
2412
2413        #[test]
2414        fn test_validate_file_count_limit() {
2415            let engine = RefactoringEngine::new();
2416            // Create more files than allowed
2417            let files: Vec<PathBuf> =
2418                (0..150).map(|i| PathBuf::from(format!("/fake/{}.pl", i))).collect();
2419
2420            let op = RefactoringType::OptimizeImports {
2421                remove_unused: true,
2422                sort_alphabetically: true,
2423                group_by_type: false,
2424            };
2425
2426            let result = engine.validate_operation(&op, &files);
2427            assert!(result.is_err());
2428            let err_msg = format!("{:?}", must_err(result));
2429            assert!(err_msg.contains("exceeds maximum file limit"));
2430        }
2431
2432        // --- ExtractMethod validation tests ---
2433
2434        #[test]
2435        fn test_extract_method_requires_file() {
2436            let engine = RefactoringEngine::new();
2437            let op = RefactoringType::ExtractMethod {
2438                method_name: "new_method".to_string(),
2439                start_position: (1, 0),
2440                end_position: (5, 0),
2441            };
2442
2443            let result = engine.validate_operation(&op, &[]);
2444            assert!(result.is_err());
2445            let err_msg = format!("{:?}", must_err(result));
2446            assert!(err_msg.contains("requires a target file"));
2447        }
2448
2449        #[test]
2450        fn test_extract_method_single_file_only() {
2451            let file1: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2452            let file2: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2453
2454            let engine = RefactoringEngine::new();
2455            let op = RefactoringType::ExtractMethod {
2456                method_name: "new_method".to_string(),
2457                start_position: (1, 0),
2458                end_position: (5, 0),
2459            };
2460
2461            let result = engine
2462                .validate_operation(&op, &[file1.path().to_path_buf(), file2.path().to_path_buf()]);
2463            assert!(result.is_err());
2464            let err_msg = format!("{:?}", must_err(result));
2465            assert!(err_msg.contains("operates on a single file"));
2466        }
2467
2468        #[test]
2469        fn test_extract_method_invalid_range() {
2470            let file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2471
2472            let engine = RefactoringEngine::new();
2473            let op = RefactoringType::ExtractMethod {
2474                method_name: "new_method".to_string(),
2475                start_position: (10, 0),
2476                end_position: (5, 0), // end before start
2477            };
2478
2479            let result = engine.validate_operation(&op, &[file.path().to_path_buf()]);
2480            assert!(result.is_err());
2481            let err_msg = format!("{:?}", must_err(result));
2482            assert!(err_msg.contains("must be before end"));
2483        }
2484
2485        #[test]
2486        fn test_extract_method_invalid_subroutine_name() {
2487            let file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2488
2489            let engine = RefactoringEngine::new();
2490            let op = RefactoringType::ExtractMethod {
2491                method_name: "$invalid".to_string(), // sigil not allowed for sub names
2492                start_position: (1, 0),
2493                end_position: (5, 0),
2494            };
2495
2496            let result = engine.validate_operation(&op, &[file.path().to_path_buf()]);
2497            assert!(result.is_err());
2498        }
2499
2500        // --- MoveCode validation tests ---
2501
2502        #[test]
2503        fn test_move_code_requires_elements() {
2504            use std::io::Write;
2505            let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2506            must(write!(file, "# source"));
2507
2508            let engine = RefactoringEngine::new();
2509            let op = RefactoringType::MoveCode {
2510                source_file: file.path().to_path_buf(),
2511                target_file: PathBuf::from("target.pl"),
2512                elements: vec![], // empty
2513            };
2514
2515            let result = engine.validate_operation(&op, &[]);
2516            assert!(result.is_err());
2517            let err_msg = format!("{:?}", must_err(result));
2518            assert!(err_msg.contains("requires at least one element"));
2519        }
2520
2521        // --- SymbolRename validation tests ---
2522
2523        #[test]
2524        fn test_symbol_rename_accepts_sigils() {
2525            use std::io::Write;
2526            let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2527            must(write!(file, "my $old = 1;"));
2528
2529            let engine = RefactoringEngine::new();
2530            let op = RefactoringType::SymbolRename {
2531                old_name: "$old_var".to_string(),
2532                new_name: "$new_var".to_string(),
2533                scope: RefactoringScope::File(file.path().to_path_buf()),
2534            };
2535
2536            let result = engine.validate_operation(&op, &[]);
2537            assert!(result.is_ok());
2538        }
2539
2540        #[test]
2541        fn test_symbol_rename_workspace_scope_no_files_required() {
2542            let engine = RefactoringEngine::new();
2543            let op = RefactoringType::SymbolRename {
2544                old_name: "old_sub".to_string(),
2545                new_name: "new_sub".to_string(),
2546                scope: RefactoringScope::Workspace,
2547            };
2548
2549            let result = engine.validate_operation(&op, &[]);
2550            assert!(result.is_ok());
2551        }
2552
2553        #[test]
2554        fn test_symbol_rename_fileset_requires_files() {
2555            let engine = RefactoringEngine::new();
2556            let op = RefactoringType::SymbolRename {
2557                old_name: "old_sub".to_string(),
2558                new_name: "new_sub".to_string(),
2559                scope: RefactoringScope::FileSet(vec![]), // empty
2560            };
2561
2562            let result = engine.validate_operation(&op, &[]);
2563            assert!(result.is_err());
2564            let err_msg = format!("{:?}", must_err(result));
2565            assert!(err_msg.contains("requires at least one file"));
2566        }
2567
2568        // --- Inline validation tests ---
2569
2570        #[test]
2571        fn test_inline_requires_files() {
2572            let engine = RefactoringEngine::new();
2573            let op =
2574                RefactoringType::Inline { symbol_name: "$var".to_string(), all_occurrences: true };
2575
2576            let result = engine.validate_operation(&op, &[]);
2577            assert!(result.is_err());
2578            let err_msg = format!("{:?}", must_err(result));
2579            assert!(err_msg.contains("requires at least one target file"));
2580        }
2581
2582        // --- Modernize validation tests ---
2583
2584        #[test]
2585        fn test_modernize_requires_patterns() {
2586            let engine = RefactoringEngine::new();
2587            let op = RefactoringType::Modernize { patterns: vec![] };
2588
2589            let result = engine.validate_operation(&op, &[]);
2590            assert!(result.is_err());
2591            let err_msg = format!("{:?}", must_err(result));
2592            assert!(err_msg.contains("requires at least one pattern"));
2593        }
2594
2595        // --- Sigil consistency tests ---
2596
2597        #[test]
2598        fn test_symbol_rename_sigil_consistency_required() {
2599            let engine = RefactoringEngine::new();
2600            // $foo -> @foo should fail (different sigils)
2601            let op = RefactoringType::SymbolRename {
2602                old_name: "$foo".to_string(),
2603                new_name: "@foo".to_string(),
2604                scope: RefactoringScope::Workspace,
2605            };
2606
2607            let result = engine.validate_operation(&op, &[]);
2608            assert!(result.is_err());
2609            let err_msg = format!("{:?}", must_err(result));
2610            assert!(err_msg.contains("sigil mismatch"));
2611        }
2612
2613        #[test]
2614        fn test_symbol_rename_sigil_consistency_no_sigil_to_sigil() {
2615            let engine = RefactoringEngine::new();
2616            // bare name -> sigiled name should fail
2617            let op = RefactoringType::SymbolRename {
2618                old_name: "foo".to_string(),
2619                new_name: "$foo".to_string(),
2620                scope: RefactoringScope::Workspace,
2621            };
2622
2623            let result = engine.validate_operation(&op, &[]);
2624            assert!(result.is_err());
2625            let err_msg = format!("{:?}", must_err(result));
2626            assert!(err_msg.contains("sigil mismatch"));
2627        }
2628
2629        #[test]
2630        fn test_symbol_rename_same_name_rejected() {
2631            let engine = RefactoringEngine::new();
2632            let op = RefactoringType::SymbolRename {
2633                old_name: "$foo".to_string(),
2634                new_name: "$foo".to_string(),
2635                scope: RefactoringScope::Workspace,
2636            };
2637
2638            let result = engine.validate_operation(&op, &[]);
2639            assert!(result.is_err());
2640            let err_msg = format!("{:?}", must_err(result));
2641            assert!(err_msg.contains("must be different"));
2642        }
2643
2644        // --- Double separator and trailing :: tests ---
2645
2646        #[test]
2647        fn test_validate_identifier_double_separator_rejected() {
2648            let engine = RefactoringEngine::new();
2649            // Double :: should be rejected
2650            assert!(engine.validate_perl_identifier("Foo::::Bar", "test").is_err());
2651            assert!(engine.validate_perl_identifier("$Foo::::Bar", "test").is_err());
2652        }
2653
2654        #[test]
2655        fn test_validate_identifier_trailing_separator_rejected() {
2656            let engine = RefactoringEngine::new();
2657            // Trailing :: should be rejected
2658            assert!(engine.validate_perl_identifier("Foo::", "test").is_err());
2659            assert!(engine.validate_perl_identifier("$Foo::Bar::", "test").is_err());
2660        }
2661
2662        #[test]
2663        fn test_validate_identifier_leading_separator_allowed() {
2664            let engine = RefactoringEngine::new();
2665            // Leading :: should be allowed (for main package/absolute names)
2666            assert!(engine.validate_perl_identifier("::Foo", "test").is_ok());
2667            assert!(engine.validate_perl_identifier("::Foo::Bar", "test").is_ok());
2668            assert!(engine.validate_perl_identifier("$::Foo", "test").is_ok());
2669        }
2670
2671        #[test]
2672        fn test_validate_qualified_name_double_separator_rejected() {
2673            let engine = RefactoringEngine::new();
2674            assert!(engine.validate_perl_qualified_name("Foo::::Bar").is_err());
2675        }
2676
2677        #[test]
2678        fn test_validate_qualified_name_trailing_separator_rejected() {
2679            let engine = RefactoringEngine::new();
2680            assert!(engine.validate_perl_qualified_name("Foo::").is_err());
2681            assert!(engine.validate_perl_qualified_name("Foo::Bar::").is_err());
2682        }
2683
2684        #[test]
2685        fn test_validate_qualified_name_leading_separator_rejected() {
2686            let engine = RefactoringEngine::new();
2687            // For qualified names (MoveCode elements), leading :: is also rejected
2688            assert!(engine.validate_perl_qualified_name("::Foo").is_err());
2689        }
2690
2691        #[test]
2692        fn test_validate_qualified_name_sigil_rejected() {
2693            let engine = RefactoringEngine::new();
2694            // Qualified names (for MoveCode) should not have sigils
2695            assert!(engine.validate_perl_qualified_name("$foo").is_err());
2696            assert!(engine.validate_perl_qualified_name("@array").is_err());
2697        }
2698
2699        // --- Unicode identifier tests ---
2700
2701        #[test]
2702        fn test_validate_identifier_unicode_allowed() {
2703            let engine = RefactoringEngine::new();
2704            // Perl supports Unicode identifiers
2705            assert!(engine.validate_perl_identifier("$π", "test").is_ok());
2706            assert!(engine.validate_perl_identifier("$αβγ", "test").is_ok());
2707            assert!(engine.validate_perl_identifier("日本語", "test").is_ok());
2708        }
2709
2710        #[test]
2711        fn test_validate_qualified_name_unicode_allowed() {
2712            let engine = RefactoringEngine::new();
2713            // Unicode package names should be allowed
2714            assert!(engine.validate_perl_qualified_name("Müller").is_ok());
2715            assert!(engine.validate_perl_qualified_name("Müller::Util").is_ok());
2716            assert!(engine.validate_perl_qualified_name("日本::パッケージ").is_ok());
2717        }
2718
2719        // --- ExtractMethod '&' prefix tests ---
2720
2721        #[test]
2722        fn test_extract_method_ampersand_prefix_rejected() {
2723            let file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2724
2725            let engine = RefactoringEngine::new();
2726            let op = RefactoringType::ExtractMethod {
2727                method_name: "&foo".to_string(), // leading & should be rejected
2728                start_position: (1, 0),
2729                end_position: (5, 0),
2730            };
2731
2732            let result = engine.validate_operation(&op, &[file.path().to_path_buf()]);
2733            assert!(result.is_err());
2734            let err_msg = format!("{:?}", must_err(result));
2735            assert!(err_msg.contains("bare identifier"));
2736            assert!(err_msg.contains("no leading '&'"));
2737        }
2738
2739        // --- MoveCode same-file tests ---
2740
2741        #[test]
2742        fn test_move_code_same_file_rejected() {
2743            use std::io::Write;
2744            let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2745            must(write!(file, "# source"));
2746
2747            let engine = RefactoringEngine::new();
2748            let op = RefactoringType::MoveCode {
2749                source_file: file.path().to_path_buf(),
2750                target_file: file.path().to_path_buf(), // same as source
2751                elements: vec!["some_sub".to_string()],
2752            };
2753
2754            let result = engine.validate_operation(&op, &[]);
2755            assert!(result.is_err());
2756            let err_msg = format!("{:?}", must_err(result));
2757            assert!(err_msg.contains("must be different"));
2758        }
2759
2760        // --- FileSet scope max_files tests ---
2761
2762        #[test]
2763        fn test_fileset_scope_max_files_limit() {
2764            // Create temp files for the test
2765            let files: Vec<tempfile::NamedTempFile> =
2766                (0..5).map(|_| must(tempfile::NamedTempFile::new())).collect();
2767            let paths: Vec<_> = files.iter().map(|f| f.path().to_path_buf()).collect();
2768
2769            // Create engine with low max_files limit
2770            let config =
2771                RefactoringConfig { max_files_per_operation: 3, ..RefactoringConfig::default() };
2772            let engine = RefactoringEngine::with_config(config);
2773
2774            let op = RefactoringType::SymbolRename {
2775                old_name: "old_sub".to_string(),
2776                new_name: "new_sub".to_string(),
2777                scope: RefactoringScope::FileSet(paths), // 5 files, but limit is 3
2778            };
2779
2780            let result = engine.validate_operation(&op, &[]);
2781            assert!(result.is_err());
2782            let err_msg = format!("{:?}", must_err(result));
2783            assert!(err_msg.contains("exceeds maximum file limit"));
2784        }
2785
2786        // --- Backup cleanup tests ---
2787
2788        #[test]
2789        fn test_cleanup_no_backups() {
2790            let temp_dir = must(tempfile::tempdir());
2791            let config = RefactoringConfig {
2792                backup_root: Some(temp_dir.path().to_path_buf()),
2793                ..RefactoringConfig::default()
2794            };
2795            let mut engine = RefactoringEngine::with_config(config);
2796            let result = must(engine.clear_history());
2797            assert_eq!(result.directories_removed, 0);
2798            assert_eq!(result.space_reclaimed, 0);
2799        }
2800
2801        #[test]
2802        #[serial]
2803        fn test_cleanup_backup_directories() {
2804            use std::fs;
2805
2806            let temp_dir = must(tempfile::tempdir());
2807            let backup_root = temp_dir.path().to_path_buf();
2808
2809            // Manually create a backup directory
2810            let backup = backup_root.join("refactor_100_0");
2811            must(fs::create_dir_all(&backup));
2812            must(fs::write(backup.join("file.pl"), "sub test {}"));
2813
2814            let config = RefactoringConfig {
2815                backup_root: Some(backup_root),
2816                max_backup_retention: 0, // Remove all
2817                ..RefactoringConfig::default()
2818            };
2819            let mut engine = RefactoringEngine::with_config(config);
2820            let result = must(engine.clear_history());
2821
2822            // Should have removed at least one directory
2823            assert!(result.directories_removed >= 1);
2824            assert_eq!(engine.operation_history.len(), 0);
2825        }
2826
2827        #[test]
2828        #[serial]
2829        fn test_cleanup_respects_retention_count() {
2830            use std::io::Write;
2831
2832            let config = RefactoringConfig {
2833                create_backups: true,
2834                max_backup_retention: 2,
2835                backup_max_age_seconds: 0, // Disable age-based retention
2836                ..RefactoringConfig::default()
2837            };
2838
2839            let mut engine = RefactoringEngine::with_config(config);
2840
2841            // Create multiple backups
2842            for i in 0..4 {
2843                let mut file: tempfile::NamedTempFile = must(tempfile::NamedTempFile::new());
2844                must(writeln!(file, "sub test{} {{ }}", i));
2845                let path = file.path().to_path_buf();
2846
2847                let op = RefactoringType::SymbolRename {
2848                    old_name: format!("test{}", i),
2849                    new_name: format!("renamed_test{}", i),
2850                    scope: RefactoringScope::File(path.clone()),
2851                };
2852
2853                let _ = engine.refactor(op, vec![path]);
2854                std::thread::sleep(std::time::Duration::from_millis(100)); // Ensure different timestamps
2855            }
2856
2857            // Clean up with retention policy
2858            let result = must(engine.clear_history());
2859
2860            // Should have removed excess directories (4 created - 2 retained = 2 removed)
2861            assert!(result.directories_removed >= 2);
2862        }
2863
2864        #[test]
2865        #[serial]
2866        fn test_cleanup_respects_age_limit() {
2867            use std::fs;
2868
2869            let temp_dir = must(tempfile::tempdir());
2870            let backup_root = temp_dir.path().to_path_buf();
2871            must(fs::create_dir_all(&backup_root));
2872
2873            // Create an old backup directory manually
2874            let old_backup = backup_root.join("refactor_1000_0");
2875            must(fs::create_dir_all(&old_backup));
2876
2877            // Create a test file in the old backup
2878            let test_file = old_backup.join("file_0.pl");
2879            must(fs::write(&test_file, "sub old_backup { }"));
2880
2881            // Wait until filesystem metadata reports the backup is older than the age threshold.
2882            let deadline = std::time::Instant::now() + std::time::Duration::from_secs(5);
2883            let mut reached_age_limit = false;
2884            while std::time::Instant::now() < deadline {
2885                if let Ok(metadata) = fs::metadata(&old_backup)
2886                    && let Ok(modified) = metadata.modified()
2887                    && let Ok(age) = std::time::SystemTime::now().duration_since(modified)
2888                    && age > std::time::Duration::from_secs(1)
2889                {
2890                    reached_age_limit = true;
2891                    break;
2892                }
2893
2894                std::thread::sleep(std::time::Duration::from_millis(50));
2895            }
2896            assert!(
2897                reached_age_limit,
2898                "backup directory did not age past threshold within test timeout"
2899            );
2900
2901            let config = RefactoringConfig {
2902                backup_root: Some(backup_root),
2903                backup_max_age_seconds: 1, // 1 second age limit
2904                ..RefactoringConfig::default()
2905            };
2906
2907            let mut engine = RefactoringEngine::with_config(config);
2908
2909            // Run cleanup
2910            let result = engine.clear_history();
2911            assert!(result.is_ok());
2912
2913            // The old backup should be cleaned up
2914            let cleanup_result = must(result);
2915            assert!(cleanup_result.directories_removed >= 1);
2916        }
2917
2918        #[test]
2919        fn test_validate_backup_directory_structure() {
2920            let engine = RefactoringEngine::new();
2921
2922            let backup_root = std::env::temp_dir().join("perl_refactor_backups");
2923            let _ = std::fs::create_dir_all(&backup_root);
2924
2925            // Valid backup directory
2926            let valid_backup = backup_root.join("refactor_123_456");
2927            let _ = std::fs::create_dir_all(&valid_backup);
2928            assert!(must(engine.validate_backup_directory(&valid_backup)));
2929
2930            // Invalid backup directory (wrong prefix)
2931            let invalid_backup = backup_root.join("invalid_backup");
2932            let _ = std::fs::create_dir_all(&invalid_backup);
2933            assert!(!must(engine.validate_backup_directory(&invalid_backup)));
2934
2935            // Cleanup
2936            let _ = std::fs::remove_dir_all(&backup_root);
2937        }
2938
2939        #[test]
2940        fn test_calculate_directory_size() {
2941            let engine = RefactoringEngine::new();
2942
2943            let temp_dir = must(tempfile::tempdir());
2944            let dir_path = temp_dir.path().to_path_buf();
2945
2946            // Create test files with known sizes
2947            let file1 = dir_path.join("file1.txt");
2948            let file2 = dir_path.join("file2.txt");
2949
2950            must(std::fs::write(&file1, "hello")); // 5 bytes
2951            must(std::fs::write(&file2, "world!")); // 6 bytes
2952
2953            let total_size = must(engine.calculate_directory_size(&dir_path));
2954            assert_eq!(total_size, 11);
2955        }
2956
2957        #[test]
2958        #[serial]
2959        fn test_backup_cleanup_result_space_reclaimed() {
2960            use std::fs;
2961
2962            let temp_dir = must(tempfile::tempdir());
2963            let backup_root = temp_dir.path().to_path_buf();
2964
2965            // Create backup directory with files of known size
2966            let backup = backup_root.join("refactor_100_0");
2967            must(fs::create_dir_all(&backup));
2968
2969            let test_content = "sub test { print 'hello world'; }"; // 33 bytes
2970            must(fs::write(backup.join("file.pl"), test_content));
2971
2972            let config = RefactoringConfig {
2973                backup_root: Some(backup_root),
2974                max_backup_retention: 0, // Remove all
2975                ..RefactoringConfig::default()
2976            };
2977            let mut engine = RefactoringEngine::with_config(config);
2978
2979            // Clean up and verify space was reclaimed
2980            let result = must(engine.clear_history());
2981            assert!(result.space_reclaimed > 0);
2982        }
2983
2984        // --- Robust backup cleanup tests (non-flaky) ---
2985
2986        #[test]
2987        #[serial]
2988        fn cleanup_test_identifies_all_backup_directories() {
2989            // AC1: When clear_history() is called, all backup directories are identified
2990            // AC5: Method returns count of backup directories removed
2991            use std::fs;
2992
2993            let temp_dir = must(tempfile::tempdir());
2994            let backup_root = temp_dir.path().to_path_buf();
2995
2996            // Manually create backup directories
2997            let backup1 = backup_root.join("refactor_100_0");
2998            let backup2 = backup_root.join("refactor_200_0");
2999            must(fs::create_dir_all(&backup1));
3000            must(fs::create_dir_all(&backup2));
3001
3002            // Create test files in backups
3003            must(fs::write(backup1.join("file1.pl"), "sub test1 {}"));
3004            must(fs::write(backup2.join("file2.pl"), "sub test2 {}"));
3005
3006            let config = RefactoringConfig {
3007                backup_root: Some(backup_root),
3008                max_backup_retention: 0, // Remove all
3009                ..RefactoringConfig::default()
3010            };
3011            let mut engine = RefactoringEngine::with_config(config);
3012            let result = must(engine.clear_history());
3013
3014            // Should have removed both directories
3015            assert_eq!(result.directories_removed, 2);
3016            assert_eq!(engine.operation_history.len(), 0);
3017
3018            // Verify directories are actually removed
3019            assert!(!backup1.exists());
3020            assert!(!backup2.exists());
3021        }
3022
3023        #[test]
3024        #[serial]
3025        fn cleanup_test_respects_retention_count() {
3026            // AC2: Backup cleanup removes backup files older than a configurable retention period
3027            // AC3: Operation provides option to keep recent backups (e.g., last N operations)
3028            use std::fs;
3029            use std::thread;
3030            use std::time::Duration;
3031
3032            let temp_dir = must(tempfile::tempdir());
3033            let backup_root = temp_dir.path().to_path_buf();
3034
3035            // Manually create 4 backup directories with different timestamps
3036            let backups = [
3037                backup_root.join("refactor_100_0"),
3038                backup_root.join("refactor_200_0"),
3039                backup_root.join("refactor_300_0"),
3040                backup_root.join("refactor_400_0"),
3041            ];
3042
3043            for (i, backup) in backups.iter().enumerate() {
3044                must(fs::create_dir_all(backup));
3045                must(fs::write(backup.join("file.pl"), format!("sub test{} {{}}", i)));
3046                // Sleep to ensure different modification times
3047                thread::sleep(Duration::from_millis(50));
3048            }
3049
3050            let config = RefactoringConfig {
3051                create_backups: true,
3052                max_backup_retention: 2,
3053                backup_max_age_seconds: 0, // Disable age-based retention
3054                backup_root: Some(backup_root),
3055                ..RefactoringConfig::default()
3056            };
3057
3058            let mut engine = RefactoringEngine::with_config(config);
3059            let result = must(engine.clear_history());
3060
3061            // Should have removed 2 oldest directories, kept 2 newest
3062            assert_eq!(result.directories_removed, 2);
3063
3064            // Verify oldest two are removed
3065            assert!(!backups[0].exists());
3066            assert!(!backups[1].exists());
3067            // temp_dir cleanup is automatic
3068        }
3069
3070        #[test]
3071        #[serial]
3072        fn cleanup_test_respects_age_limit() {
3073            // AC2: Backup cleanup removes backup files older than a configurable retention period
3074            // AC6: Errors during cleanup are logged but don't prevent operation history clearing
3075            use std::fs;
3076
3077            let temp_dir = must(tempfile::tempdir());
3078            let backup_root = temp_dir.path().to_path_buf();
3079
3080            // Create an old backup directory manually
3081            let old_backup = backup_root.join("refactor_1000_0");
3082            must(fs::create_dir_all(&old_backup));
3083
3084            // Create a test file in the old backup
3085            let test_file = old_backup.join("file_0.pl");
3086            must(fs::write(&test_file, "sub old_backup { }"));
3087
3088            // Poll filesystem metadata until the backup is older than the age threshold,
3089            // matching the pattern used by `test_cleanup_respects_age_limit` above.
3090            let deadline = std::time::Instant::now() + std::time::Duration::from_secs(5);
3091            let mut reached_age_limit = false;
3092            while std::time::Instant::now() < deadline {
3093                if let Ok(metadata) = fs::metadata(&old_backup)
3094                    && let Ok(modified) = metadata.modified()
3095                    && let Ok(age) = std::time::SystemTime::now().duration_since(modified)
3096                    && age > std::time::Duration::from_secs(1)
3097                {
3098                    reached_age_limit = true;
3099                    break;
3100                }
3101
3102                std::thread::sleep(std::time::Duration::from_millis(50));
3103            }
3104            assert!(
3105                reached_age_limit,
3106                "backup directory did not age past threshold within test timeout"
3107            );
3108
3109            let config = RefactoringConfig {
3110                backup_max_age_seconds: 1, // 1 second age limit
3111                backup_root: Some(backup_root),
3112                ..RefactoringConfig::default()
3113            };
3114
3115            let mut engine = RefactoringEngine::with_config(config);
3116
3117            // Run cleanup
3118            let result = engine.clear_history();
3119            assert!(result.is_ok());
3120
3121            // The old backup should be cleaned up
3122            let cleanup_result = must(result);
3123            assert_eq!(cleanup_result.directories_removed, 1);
3124
3125            // Verify directory is actually removed
3126            assert!(!old_backup.exists());
3127            // temp_dir cleanup is automatic
3128        }
3129
3130        #[test]
3131        #[serial]
3132        fn cleanup_test_space_reclaimed() {
3133            // AC5: Method returns count of backup directories removed and total disk space reclaimed
3134            use std::fs;
3135
3136            let temp_dir = must(tempfile::tempdir());
3137            let backup_root = temp_dir.path().to_path_buf();
3138
3139            // Create backup directory with files of known size
3140            let backup = backup_root.join("refactor_100_0");
3141            must(fs::create_dir_all(&backup));
3142
3143            let test_content = "sub test { print 'hello world'; }"; // 33 bytes
3144            must(fs::write(backup.join("file1.pl"), test_content));
3145            must(fs::write(backup.join("file2.pl"), test_content));
3146
3147            let config = RefactoringConfig {
3148                backup_root: Some(backup_root),
3149                max_backup_retention: 0, // Remove all
3150                ..RefactoringConfig::default()
3151            };
3152            let mut engine = RefactoringEngine::with_config(config);
3153
3154            // Clean up and verify space was reclaimed
3155            let result = must(engine.clear_history());
3156            assert_eq!(result.directories_removed, 1);
3157            assert_eq!(result.space_reclaimed, 66); // 33 * 2 bytes
3158
3159            // Verify directory is actually removed
3160            assert!(!backup.exists());
3161        }
3162
3163        #[test]
3164        #[serial]
3165        fn cleanup_test_only_removes_refactor_backups() {
3166            // AC8: Cleanup respects backup directory naming convention and only removes refactoring engine backups
3167            use std::fs;
3168
3169            let temp_dir = must(tempfile::tempdir());
3170            let backup_root = temp_dir.path().to_path_buf();
3171
3172            // Create valid refactor backup
3173            let refactor_backup = backup_root.join("refactor_100_0");
3174            must(fs::create_dir_all(&refactor_backup));
3175            must(fs::write(refactor_backup.join("file.pl"), "test"));
3176
3177            // Create non-refactor directory (should not be removed)
3178            let other_dir = backup_root.join("other_backup");
3179            must(fs::create_dir_all(&other_dir));
3180            must(fs::write(other_dir.join("file.pl"), "test"));
3181
3182            let config = RefactoringConfig {
3183                backup_root: Some(backup_root),
3184                max_backup_retention: 0, // Remove all
3185                ..RefactoringConfig::default()
3186            };
3187            let mut engine = RefactoringEngine::with_config(config);
3188            let result = must(engine.clear_history());
3189
3190            // Should only remove refactor backup, not other directory
3191            assert_eq!(result.directories_removed, 1);
3192            assert!(!refactor_backup.exists());
3193            assert!(other_dir.exists()); // Should still exist
3194            // temp_dir cleanup is automatic
3195        }
3196
3197        #[test]
3198        #[serial]
3199        fn cleanup_test_with_zero_retention_removes_all() {
3200            // AC2: When max_backup_retention is 0, all backups are removed
3201            use std::fs;
3202
3203            let temp_dir = must(tempfile::tempdir());
3204            let backup_root = temp_dir.path().to_path_buf();
3205
3206            // Create multiple backup directories
3207            for i in 0..3 {
3208                let backup = backup_root.join(format!("refactor_{}_0", i * 100));
3209                must(fs::create_dir_all(&backup));
3210                must(fs::write(backup.join("file.pl"), "test"));
3211            }
3212
3213            let config = RefactoringConfig {
3214                max_backup_retention: 0, // Remove all
3215                backup_max_age_seconds: 0,
3216                backup_root: Some(backup_root),
3217                ..RefactoringConfig::default()
3218            };
3219
3220            let mut engine = RefactoringEngine::with_config(config);
3221            let result = must(engine.clear_history());
3222
3223            // All backups should be removed
3224            assert_eq!(result.directories_removed, 3);
3225            // temp_dir cleanup is automatic
3226        }
3227
3228        #[test]
3229        #[serial]
3230        fn comprehensive_backup_cleanup_all_acs() {
3231            // Comprehensive test covering all ACs to avoid race conditions from multiple tests
3232            // AC1: Identifies all backup directories
3233            // AC2: Respects configurable retention period and age limits
3234            // AC3: Keeps recent backups when configured
3235            // AC4: Validates directory structure before deletion
3236            // AC5: Returns count of directories removed and space reclaimed
3237            // AC6: Errors don't prevent history clearing
3238            // AC7: Configuration options work
3239            // AC8: Only removes refactoring engine backups
3240            use std::fs;
3241            use std::thread;
3242            use std::time::Duration;
3243
3244            // Test AC4 & AC8: Validation and selective removal
3245            let temp_dir1 = must(tempfile::tempdir());
3246            let backup_root1 = temp_dir1.path().to_path_buf();
3247
3248            let valid_backup = backup_root1.join("refactor_test_1");
3249            let invalid_backup = backup_root1.join("other_backup");
3250            must(fs::create_dir_all(&valid_backup));
3251            must(fs::create_dir_all(&invalid_backup));
3252            must(fs::write(valid_backup.join("file.pl"), "test"));
3253            must(fs::write(invalid_backup.join("file.pl"), "test"));
3254
3255            let config1 = RefactoringConfig {
3256                backup_root: Some(backup_root1.clone()),
3257                max_backup_retention: 0, // Remove all for this test
3258                ..RefactoringConfig::default()
3259            };
3260            let engine = RefactoringEngine::with_config(config1.clone());
3261            assert!(must(engine.validate_backup_directory(&valid_backup)));
3262            assert!(!must(engine.validate_backup_directory(&invalid_backup)));
3263
3264            // Test AC1 & AC5: Identifies and removes with space calculation
3265            let mut engine2 = RefactoringEngine::with_config(config1);
3266            let result1 = must(engine2.clear_history());
3267            assert_eq!(result1.directories_removed, 1); // Only valid backup removed
3268            assert_eq!(result1.space_reclaimed, 4); // "test" = 4 bytes
3269            assert!(!valid_backup.exists());
3270            assert!(invalid_backup.exists()); // AC8: Other dir still exists
3271
3272            // Test AC2 & AC3: Retention count
3273            let temp_dir2 = must(tempfile::tempdir());
3274            let backup_root2 = temp_dir2.path().to_path_buf();
3275
3276            for i in 0..4 {
3277                let backup = backup_root2.join(format!("refactor_retention_{}", i));
3278                must(fs::create_dir_all(&backup));
3279                must(fs::write(backup.join("file.pl"), "x"));
3280                thread::sleep(Duration::from_millis(50));
3281            }
3282
3283            let config2 = RefactoringConfig {
3284                max_backup_retention: 2,
3285                backup_max_age_seconds: 0,
3286                backup_root: Some(backup_root2),
3287                ..RefactoringConfig::default()
3288            };
3289            let mut engine3 = RefactoringEngine::with_config(config2);
3290            let result2 = must(engine3.clear_history());
3291            assert_eq!(result2.directories_removed, 2); // Oldest 2 removed
3292
3293            // Test AC2: Age-based retention
3294            let temp_dir3 = must(tempfile::tempdir());
3295            let backup_root3 = temp_dir3.path().to_path_buf();
3296
3297            let old_backup = backup_root3.join("refactor_age_test");
3298            must(fs::create_dir_all(&old_backup));
3299            must(fs::write(old_backup.join("file.pl"), "old"));
3300
3301            let config3 = RefactoringConfig {
3302                backup_max_age_seconds: 1,
3303                max_backup_retention: 0,
3304                backup_root: Some(backup_root3),
3305                ..RefactoringConfig::default()
3306            };
3307            let mut engine4 = RefactoringEngine::with_config(config3);
3308            thread::sleep(Duration::from_secs(2));
3309
3310            let result3 = must(engine4.clear_history());
3311            assert_eq!(result3.directories_removed, 1);
3312            assert!(!old_backup.exists());
3313            // temp_dir cleanup is automatic
3314        }
3315    }
3316
3317    // --- Inline all_occurrences routing tests ---
3318
3319    /// AC1: When all_occurrences=true, perform_inline routes to inline_variable_all, which
3320    /// uses the workspace index to find references across ALL indexed files — not just those
3321    /// explicitly passed in `files`. path_b is indexed but intentionally omitted from `files`;
3322    /// the workspace lookup must still find and inline the usage there.
3323    #[cfg(feature = "workspace_refactor")]
3324    #[test]
3325    fn test_inline_all_occurrences_routes_to_workspace_lookup() {
3326        let temp_dir = must(tempfile::tempdir());
3327        let path_a = temp_dir.path().join("a.pl");
3328        let path_b = temp_dir.path().join("b.pl");
3329
3330        // path_a holds the definition; path_b only references $const
3331        let content_a = "my $const = 42;\nprint $const;\n";
3332        let content_b = "print $const;\n";
3333
3334        must(std::fs::write(&path_a, content_a));
3335        must(std::fs::write(&path_b, content_b));
3336
3337        let mut engine = RefactoringEngine::new();
3338        engine.config.safe_mode = false;
3339
3340        // Index both files — workspace index must know about path_b for cross-file lookup
3341        must(engine.index_file(&path_a, content_a));
3342        must(engine.index_file(&path_b, content_b));
3343
3344        // Pass only path_a (definition file); path_b is omitted from `files` intentionally
3345        let result = must(engine.refactor(
3346            RefactoringType::Inline { symbol_name: "$const".to_string(), all_occurrences: true },
3347            vec![path_a.clone()],
3348        ));
3349
3350        assert!(result.success, "expected success, warnings: {:?}", result.warnings);
3351
3352        // path_b must have its $const replaced — inline_variable_all found it via workspace index
3353        let updated_b = must(std::fs::read_to_string(&path_b));
3354        assert!(
3355            !updated_b.contains("$const"),
3356            "expected $const to be inlined in path_b, but found: {:?}",
3357            updated_b
3358        );
3359    }
3360
3361    /// When all_occurrences=false, only the single-file path is taken.
3362    /// The definition file (path_a) contains the symbol; inlining within it should succeed.
3363    #[cfg(feature = "workspace_refactor")]
3364    #[test]
3365    fn test_inline_single_occurrence_stops_after_first_file() {
3366        let temp_dir = must(tempfile::tempdir());
3367        let path_a = temp_dir.path().join("a.pl");
3368
3369        let content_a = "my $x = 99;\nprint $x;\n";
3370        must(std::fs::write(&path_a, content_a));
3371
3372        let mut engine = RefactoringEngine::new();
3373        engine.config.safe_mode = false;
3374
3375        must(engine.index_file(&path_a, content_a));
3376
3377        let result = must(engine.refactor(
3378            RefactoringType::Inline { symbol_name: "$x".to_string(), all_occurrences: false },
3379            vec![path_a.clone()],
3380        ));
3381
3382        assert!(result.success, "expected success, warnings: {:?}", result.warnings);
3383        assert_eq!(result.files_modified, 1);
3384    }
3385}