1use anyhow::Result;
2use std::collections::HashSet;
3use std::path::{Path, PathBuf};
4
5mod code_docstring;
7mod deps;
8mod deps_check;
9mod detector;
10mod dir_context;
11mod extractor;
12mod format_handlers;
13mod generator;
14mod geocoding;
15mod image_ocr;
16mod key_phrases;
17mod location_timestamp;
18mod metadata_cache;
19mod pdf_content;
20mod rename_history;
21mod renamer;
22mod scorer;
23mod series_detector;
24mod stem_analyzer;
25mod text_content;
26mod video_ocr;
27
28pub use deps_check::{detect_needed_dependencies, Dependency, DependencyNeeds};
30pub use detector::FileCategory;
31pub use rename_history::{RenameHistory, RenameOperation};
32
33#[derive(Debug, Clone)]
35pub struct RenameConfig {
36 pub skip_hidden: bool,
38 pub include_location: bool,
40 pub include_timestamp: bool,
42 pub multiframe_video: bool,
44 pub geocode: bool,
47 pub enable_cache: bool,
49 pub cache_path: Option<PathBuf>,
51}
52
53impl Default for RenameConfig {
54 fn default() -> Self {
55 Self {
56 skip_hidden: false,
57 include_location: true, include_timestamp: true, multiframe_video: true, geocode: true, enable_cache: true, cache_path: None, }
64 }
65}
66
67#[derive(Debug, Clone)]
69pub struct FileAnalysis {
70 pub original_path: PathBuf,
72 pub original_name: String,
74 pub proposed_name: Option<String>,
76 pub file_category: FileCategory,
78}
79
80#[derive(Debug, Clone)]
82pub struct RenameResult {
83 pub original_path: PathBuf,
85 pub new_name: String,
87 pub success: bool,
89 pub error: Option<String>,
91}
92
93pub struct RenameEngine {
95 config: RenameConfig,
96}
97
98impl RenameEngine {
99 pub fn new(config: RenameConfig) -> Self {
101 Self { config }
102 }
103
104 pub fn default() -> Self {
106 Self::new(RenameConfig::default())
107 }
108
109 pub fn analyze_directory(&self, directory: &Path) -> Result<Vec<FileAnalysis>> {
112 let mut analyses = Vec::new();
113
114 let files = self.scan_files(directory)?;
116
117 let cache_path = self.config.cache_path.clone().unwrap_or_else(|| {
119 directory.join(".nameback_cache.json")
120 });
121
122 let mut cache = if self.config.enable_cache {
123 metadata_cache::MetadataCache::load(cache_path.clone()).unwrap_or_else(|_| {
124 log::debug!("Failed to load cache, creating new one");
125 metadata_cache::MetadataCache::new(cache_path.clone())
126 })
127 } else {
128 metadata_cache::MetadataCache::new(cache_path.clone())
129 };
130
131 if self.config.enable_cache {
133 cache.cleanup_stale_entries(&files);
134 }
135
136 let series_list = series_detector::detect_series(&files);
138 log::info!("Detected {} file series", series_list.len());
139
140 let mut file_series_map = std::collections::HashMap::new();
142 for series in &series_list {
143 for (file_path, _) in &series.files {
144 file_series_map.insert(file_path.clone(), series.clone());
145 }
146 }
147
148 let mut existing_names = HashSet::new();
150 for file_path in &files {
151 if let Some(filename) = file_path.file_name() {
152 if let Some(name) = filename.to_str() {
153 existing_names.insert(name.to_string());
154 }
155 }
156 }
157
158 use rayon::prelude::*;
160 use std::sync::Mutex;
161
162 let existing_names = Mutex::new(existing_names);
164 let cache = Mutex::new(cache);
165
166 analyses = files
168 .par_iter()
169 .filter_map(|file_path| {
170 if self.config.enable_cache {
172 let cache_guard = cache.lock().unwrap();
173 if let Ok(true) = cache_guard.has_valid_entry(file_path) {
174 if let Some(entry) = cache_guard.get(file_path) {
175 log::debug!("Cache hit for {}", file_path.display());
176 let category = match entry.category.as_str() {
177 "Image" => FileCategory::Image,
178 "Document" => FileCategory::Document,
179 "Audio" => FileCategory::Audio,
180 "Video" => FileCategory::Video,
181 "Email" => FileCategory::Email,
182 "Web" => FileCategory::Web,
183 "Archive" => FileCategory::Archive,
184 "SourceCode" => FileCategory::SourceCode,
185 _ => FileCategory::Unknown,
186 };
187
188 let original_name = file_path
189 .file_name()
190 .and_then(|n| n.to_str())
191 .unwrap_or("unknown")
192 .to_string();
193
194 return Some(FileAnalysis {
195 original_path: file_path.clone(),
196 original_name,
197 proposed_name: entry.proposed_name.clone(),
198 file_category: category,
199 });
200 }
201 }
202 drop(cache_guard); }
204
205 match self.analyze_file_parallel(file_path, &existing_names) {
207 Ok(mut analysis) => {
208 if let Some(series) = file_series_map.get(file_path) {
210 if let Some(proposed_name) = &analysis.proposed_name {
212 let base_name = if let Some(pos) = proposed_name.rfind('.') {
214 &proposed_name[..pos]
215 } else {
216 proposed_name
217 };
218
219 if let Some(series_name) = series_detector::apply_series_naming(
221 series,
222 file_path,
223 base_name,
224 ) {
225 analysis.proposed_name = Some(series_name);
226 }
227 }
228 }
229
230 if self.config.enable_cache {
232 let mut cache_guard = cache.lock().unwrap();
233 let category_str = match analysis.file_category {
234 FileCategory::Image => "Image",
235 FileCategory::Document => "Document",
236 FileCategory::Audio => "Audio",
237 FileCategory::Video => "Video",
238 FileCategory::Email => "Email",
239 FileCategory::Web => "Web",
240 FileCategory::Archive => "Archive",
241 FileCategory::SourceCode => "SourceCode",
242 FileCategory::Unknown => "Unknown",
243 };
244
245 if let Err(e) = cache_guard.insert(
246 file_path,
247 analysis.proposed_name.clone(),
248 category_str,
249 ) {
250 log::warn!("Failed to cache entry for {}: {}", file_path.display(), e);
251 }
252 }
253
254 Some(analysis)
255 },
256 Err(e) => {
257 log::warn!("Failed to analyze {}: {}", file_path.display(), e);
258 if let Some(name) = file_path.file_name().and_then(|n| n.to_str()) {
260 Some(FileAnalysis {
261 original_path: file_path.clone(),
262 original_name: name.to_string(),
263 proposed_name: None,
264 file_category: FileCategory::Unknown,
265 })
266 } else {
267 None
268 }
269 }
270 }
271 })
272 .collect();
273
274 if self.config.enable_cache {
276 let cache_guard = cache.lock().unwrap();
277 if let Err(e) = cache_guard.save() {
278 log::warn!("Failed to save cache: {}", e);
279 } else {
280 let stats = cache_guard.stats();
281 log::info!(
282 "Cached {} entries ({} bytes)",
283 stats.total_entries,
284 stats.cache_size_bytes
285 );
286 }
287 }
288
289 Ok(analyses)
290 }
291
292 pub fn rename_files(&self, analyses: &[FileAnalysis], dry_run: bool) -> Vec<RenameResult> {
295 self.rename_files_with_history(analyses, dry_run, None)
296 }
297
298 pub fn rename_files_with_history(
301 &self,
302 analyses: &[FileAnalysis],
303 dry_run: bool,
304 mut history: Option<&mut RenameHistory>,
305 ) -> Vec<RenameResult> {
306 let mut results = Vec::new();
307
308 for analysis in analyses {
309 if let Some(new_name) = &analysis.proposed_name {
310 match renamer::rename_file(&analysis.original_path, new_name, dry_run) {
311 Ok(new_path) => {
312 if let Some(hist) = history.as_deref_mut() {
314 if !dry_run {
315 let operation = RenameOperation::new(
316 analysis.original_path.clone(),
317 new_path.clone(),
318 );
319 hist.add(operation);
320 }
321 }
322
323 results.push(RenameResult {
324 original_path: analysis.original_path.clone(),
325 new_name: new_name.clone(),
326 success: true,
327 error: None,
328 });
329 }
330 Err(e) => {
331 results.push(RenameResult {
332 original_path: analysis.original_path.clone(),
333 new_name: new_name.clone(),
334 success: false,
335 error: Some(e.to_string()),
336 });
337 }
338 }
339 }
340 }
341
342 results
343 }
344
345 pub fn process_directory(&self, directory: &Path, dry_run: bool) -> Result<Vec<RenameResult>> {
347 let analyses = self.analyze_directory(directory)?;
348 Ok(self.rename_files(&analyses, dry_run))
349 }
350
351 fn scan_files(&self, directory: &Path) -> Result<Vec<PathBuf>> {
354 use walkdir::WalkDir;
355
356 let mut files = Vec::new();
357
358 for entry in WalkDir::new(directory)
359 .follow_links(false)
360 .into_iter()
361 .filter_entry(|e| {
362 if self.config.skip_hidden {
363 !e.file_name()
364 .to_str()
365 .map(|s| s.starts_with('.'))
366 .unwrap_or(false)
367 } else {
368 true
369 }
370 })
371 {
372 match entry {
373 Ok(entry) => {
374 if entry.file_type().is_file() {
375 files.push(entry.path().to_path_buf());
376 }
377 }
378 Err(e) => {
379 log::warn!("Failed to access entry: {}", e);
380 }
381 }
382 }
383
384 Ok(files)
385 }
386
387 fn analyze_file(
388 &self,
389 file_path: &Path,
390 existing_names: &mut HashSet<String>,
391 ) -> Result<FileAnalysis> {
392 let file_category = detector::detect_file_type(file_path)?;
394
395 let original_name = file_path
396 .file_name()
397 .and_then(|n| n.to_str())
398 .unwrap_or("unknown")
399 .to_string();
400
401 if file_category == FileCategory::Unknown {
403 return Ok(FileAnalysis {
404 original_path: file_path.to_path_buf(),
405 original_name,
406 proposed_name: None,
407 file_category,
408 });
409 }
410
411 let metadata = match extractor::extract_metadata(file_path, &self.config) {
413 Ok(m) => m,
414 Err(_) => {
415 return Ok(FileAnalysis {
416 original_path: file_path.to_path_buf(),
417 original_name,
418 proposed_name: None,
419 file_category,
420 });
421 }
422 };
423
424 let candidate_name = metadata.extract_name(&file_category, file_path);
426
427 let proposed_name = candidate_name.map(|name| {
428 let extension = file_path.extension();
429 generator::generate_filename_with_metadata(&name, extension, existing_names, Some(&metadata))
430 });
431
432 Ok(FileAnalysis {
433 original_path: file_path.to_path_buf(),
434 original_name,
435 proposed_name,
436 file_category,
437 })
438 }
439
440 fn analyze_file_parallel(
442 &self,
443 file_path: &Path,
444 existing_names: &std::sync::Mutex<HashSet<String>>,
445 ) -> Result<FileAnalysis> {
446 let file_category = detector::detect_file_type(file_path)?;
448
449 let original_name = file_path
450 .file_name()
451 .and_then(|n| n.to_str())
452 .unwrap_or("unknown")
453 .to_string();
454
455 if file_category == FileCategory::Unknown {
457 return Ok(FileAnalysis {
458 original_path: file_path.to_path_buf(),
459 original_name,
460 proposed_name: None,
461 file_category,
462 });
463 }
464
465 let metadata = match extractor::extract_metadata(file_path, &self.config) {
467 Ok(m) => m,
468 Err(_) => {
469 return Ok(FileAnalysis {
470 original_path: file_path.to_path_buf(),
471 original_name,
472 proposed_name: None,
473 file_category,
474 });
475 }
476 };
477
478 let candidate_name = metadata.extract_name(&file_category, file_path);
480
481 let proposed_name = candidate_name.map(|name| {
482 let extension = file_path.extension();
483 let mut names = existing_names.lock().unwrap();
485 generator::generate_filename_with_metadata(&name, extension, &mut names, Some(&metadata))
486 });
487
488 Ok(FileAnalysis {
489 original_path: file_path.to_path_buf(),
490 original_name,
491 proposed_name,
492 file_category,
493 })
494 }
495}
496
497pub fn check_dependencies() -> Result<()> {
499 deps::print_dependency_status();
500 Ok(())
501}
502
503pub fn install_dependencies() -> Result<()> {
505 deps::run_installer().map_err(|e| anyhow::anyhow!(e))
506}
507
508pub fn install_dependencies_with_progress(
510 progress: Option<deps::ProgressCallback>,
511) -> Result<()> {
512 deps::run_installer_with_progress(progress).map_err(|e| anyhow::anyhow!(e))
513}
514
515pub use deps::ProgressCallback;