1use anyhow::Result;
2use std::collections::HashSet;
3use std::path::{Path, PathBuf};
4
5mod code_docstring;
7mod deps;
8mod deps_check;
9mod detector;
10mod dir_context;
11mod extractor;
12mod format_handlers;
13mod generator;
14mod geocoding;
15mod image_ocr;
16mod key_phrases;
17mod location_timestamp;
18mod metadata_cache;
19mod pdf_content;
20mod rename_history;
21mod renamer;
22mod scorer;
23mod series_detector;
24mod stem_analyzer;
25mod text_content;
26mod video_ocr;
27
28pub use deps_check::{detect_needed_dependencies, Dependency, DependencyNeeds};
30pub use detector::FileCategory;
31pub use rename_history::{RenameHistory, RenameOperation};
32
33#[derive(Debug, Clone)]
35pub struct RenameConfig {
36 pub skip_hidden: bool,
38 pub include_location: bool,
40 pub include_timestamp: bool,
42 pub multiframe_video: bool,
44 pub geocode: bool,
47 pub enable_cache: bool,
49 pub cache_path: Option<PathBuf>,
51}
52
53impl Default for RenameConfig {
54 fn default() -> Self {
55 Self {
56 skip_hidden: false,
57 include_location: true, include_timestamp: true, multiframe_video: true, geocode: true, enable_cache: true, cache_path: None, }
64 }
65}
66
67#[derive(Debug, Clone)]
69pub struct FileAnalysis {
70 pub original_path: PathBuf,
72 pub original_name: String,
74 pub proposed_name: Option<String>,
76 pub file_category: FileCategory,
78}
79
80#[derive(Debug, Clone)]
82pub struct RenameResult {
83 pub original_path: PathBuf,
85 pub new_name: String,
87 pub success: bool,
89 pub error: Option<String>,
91}
92
93pub struct RenameEngine {
95 config: RenameConfig,
96}
97
98impl RenameEngine {
99 pub fn new(config: RenameConfig) -> Self {
101 Self { config }
102 }
103
104 pub fn with_defaults() -> Self {
106 Self::new(RenameConfig::default())
107 }
108
109 pub fn analyze_directory(&self, directory: &Path) -> Result<Vec<FileAnalysis>> {
112 let analyses;
113
114 let files = self.scan_files(directory)?;
116
117 let cache_path = self.config.cache_path.clone().unwrap_or_else(|| {
119 directory.join(".nameback_cache.json")
120 });
121
122 let mut cache = if self.config.enable_cache {
123 metadata_cache::MetadataCache::load(cache_path.clone()).unwrap_or_else(|_| {
124 log::debug!("Failed to load cache, creating new one");
125 metadata_cache::MetadataCache::new(cache_path.clone())
126 })
127 } else {
128 metadata_cache::MetadataCache::new(cache_path.clone())
129 };
130
131 if self.config.enable_cache {
133 cache.cleanup_stale_entries(&files);
134 }
135
136 let series_list = series_detector::detect_series(&files);
138 log::info!("Detected {} file series", series_list.len());
139
140 let mut file_series_map = std::collections::HashMap::new();
142 for series in &series_list {
143 for (file_path, _) in &series.files {
144 file_series_map.insert(file_path.clone(), series.clone());
145 }
146 }
147
148 let mut existing_names = HashSet::new();
150 for file_path in &files {
151 if let Some(filename) = file_path.file_name() {
152 if let Some(name) = filename.to_str() {
153 existing_names.insert(name.to_string());
154 }
155 }
156 }
157
158 use rayon::prelude::*;
160 use std::sync::Mutex;
161
162 let existing_names = Mutex::new(existing_names);
164 let cache = Mutex::new(cache);
165
166 analyses = files
168 .par_iter()
169 .filter_map(|file_path| {
170 if self.config.enable_cache {
172 let cache_guard = cache.lock().unwrap();
173 if let Ok(true) = cache_guard.has_valid_entry(file_path) {
174 if let Some(entry) = cache_guard.get(file_path) {
175 log::debug!("Cache hit for {}", file_path.display());
176 let category = match entry.category.as_str() {
177 "Image" => FileCategory::Image,
178 "Document" => FileCategory::Document,
179 "Audio" => FileCategory::Audio,
180 "Video" => FileCategory::Video,
181 "Email" => FileCategory::Email,
182 "Web" => FileCategory::Web,
183 "Archive" => FileCategory::Archive,
184 "SourceCode" => FileCategory::SourceCode,
185 _ => FileCategory::Unknown,
186 };
187
188 let original_name = file_path
189 .file_name()
190 .and_then(|n| n.to_str())
191 .unwrap_or("unknown")
192 .to_string();
193
194 return Some(FileAnalysis {
195 original_path: file_path.clone(),
196 original_name,
197 proposed_name: entry.proposed_name.clone(),
198 file_category: category,
199 });
200 }
201 }
202 drop(cache_guard); }
204
205 match self.analyze_file_parallel(file_path, &existing_names) {
207 Ok(mut analysis) => {
208 if let Some(series) = file_series_map.get(file_path) {
210 if let Some(proposed_name) = &analysis.proposed_name {
212 let base_name = if let Some(pos) = proposed_name.rfind('.') {
214 &proposed_name[..pos]
215 } else {
216 proposed_name
217 };
218
219 if let Some(series_name) = series_detector::apply_series_naming(
221 series,
222 file_path,
223 base_name,
224 ) {
225 analysis.proposed_name = Some(series_name);
226 }
227 }
228 }
229
230 if self.config.enable_cache {
232 let mut cache_guard = cache.lock().unwrap();
233 let category_str = match analysis.file_category {
234 FileCategory::Image => "Image",
235 FileCategory::Document => "Document",
236 FileCategory::Audio => "Audio",
237 FileCategory::Video => "Video",
238 FileCategory::Email => "Email",
239 FileCategory::Web => "Web",
240 FileCategory::Archive => "Archive",
241 FileCategory::SourceCode => "SourceCode",
242 FileCategory::Unknown => "Unknown",
243 };
244
245 if let Err(e) = cache_guard.insert(
246 file_path,
247 analysis.proposed_name.clone(),
248 category_str,
249 ) {
250 log::warn!("Failed to cache entry for {}: {}", file_path.display(), e);
251 }
252 }
253
254 Some(analysis)
255 },
256 Err(e) => {
257 log::warn!("Failed to analyze {}: {}", file_path.display(), e);
258 file_path.file_name().and_then(|n| n.to_str()).map(|name| FileAnalysis {
260 original_path: file_path.clone(),
261 original_name: name.to_string(),
262 proposed_name: None,
263 file_category: FileCategory::Unknown,
264 })
265 }
266 }
267 })
268 .collect();
269
270 if self.config.enable_cache {
272 let cache_guard = cache.lock().unwrap();
273 if let Err(e) = cache_guard.save() {
274 log::warn!("Failed to save cache: {}", e);
275 } else {
276 let stats = cache_guard.stats();
277 log::info!(
278 "Cached {} entries ({} bytes)",
279 stats.total_entries,
280 stats.cache_size_bytes
281 );
282 }
283 }
284
285 Ok(analyses)
286 }
287
288 pub fn rename_files(&self, analyses: &[FileAnalysis], dry_run: bool) -> Vec<RenameResult> {
291 self.rename_files_with_history(analyses, dry_run, None)
292 }
293
294 pub fn rename_files_with_history(
297 &self,
298 analyses: &[FileAnalysis],
299 dry_run: bool,
300 mut history: Option<&mut RenameHistory>,
301 ) -> Vec<RenameResult> {
302 let mut results = Vec::new();
303
304 for analysis in analyses {
305 if let Some(new_name) = &analysis.proposed_name {
306 match renamer::rename_file(&analysis.original_path, new_name, dry_run) {
307 Ok(new_path) => {
308 if let Some(hist) = history.as_deref_mut() {
310 if !dry_run {
311 let operation = RenameOperation::new(
312 analysis.original_path.clone(),
313 new_path.clone(),
314 );
315 hist.add(operation);
316 }
317 }
318
319 results.push(RenameResult {
320 original_path: analysis.original_path.clone(),
321 new_name: new_name.clone(),
322 success: true,
323 error: None,
324 });
325 }
326 Err(e) => {
327 results.push(RenameResult {
328 original_path: analysis.original_path.clone(),
329 new_name: new_name.clone(),
330 success: false,
331 error: Some(e.to_string()),
332 });
333 }
334 }
335 }
336 }
337
338 results
339 }
340
341 pub fn process_directory(&self, directory: &Path, dry_run: bool) -> Result<Vec<RenameResult>> {
343 let analyses = self.analyze_directory(directory)?;
344 Ok(self.rename_files(&analyses, dry_run))
345 }
346
347 fn scan_files(&self, directory: &Path) -> Result<Vec<PathBuf>> {
350 use walkdir::WalkDir;
351
352 let mut files = Vec::new();
353
354 for entry in WalkDir::new(directory)
355 .follow_links(false)
356 .into_iter()
357 .filter_entry(|e| {
358 let filename = e.file_name().to_str().unwrap_or("");
359
360 if filename == ".nameback_cache.json" {
362 return false;
363 }
364
365 if self.config.skip_hidden && filename.starts_with('.') {
367 return false;
368 }
369
370 true
371 })
372 {
373 match entry {
374 Ok(entry) => {
375 if entry.file_type().is_file() {
376 files.push(entry.path().to_path_buf());
377 }
378 }
379 Err(e) => {
380 log::warn!("Failed to access entry: {}", e);
381 }
382 }
383 }
384
385 Ok(files)
386 }
387
388
389 fn analyze_file_parallel(
391 &self,
392 file_path: &Path,
393 existing_names: &std::sync::Mutex<HashSet<String>>,
394 ) -> Result<FileAnalysis> {
395 let file_category = detector::detect_file_type(file_path)?;
397
398 let original_name = file_path
399 .file_name()
400 .and_then(|n| n.to_str())
401 .unwrap_or("unknown")
402 .to_string();
403
404 if file_category == FileCategory::Unknown {
406 return Ok(FileAnalysis {
407 original_path: file_path.to_path_buf(),
408 original_name,
409 proposed_name: None,
410 file_category,
411 });
412 }
413
414 let metadata = match extractor::extract_metadata(file_path, &self.config) {
416 Ok(m) => m,
417 Err(_) => {
418 return Ok(FileAnalysis {
419 original_path: file_path.to_path_buf(),
420 original_name,
421 proposed_name: None,
422 file_category,
423 });
424 }
425 };
426
427 let candidate_name = metadata.extract_name(&file_category, file_path);
429
430 let proposed_name = candidate_name.map(|name| {
431 let extension = file_path.extension();
432 let mut names = existing_names.lock().unwrap();
434 generator::generate_filename_with_metadata(&name, extension, &mut names, Some(&metadata))
435 });
436
437 Ok(FileAnalysis {
438 original_path: file_path.to_path_buf(),
439 original_name,
440 proposed_name,
441 file_category,
442 })
443 }
444}
445
446pub fn check_dependencies() -> Result<()> {
448 deps::print_dependency_status();
449 Ok(())
450}
451
452pub fn install_dependencies() -> Result<()> {
454 deps::run_installer().map_err(|e| anyhow::anyhow!(e))
455}
456
457pub fn install_dependencies_with_progress(
459 progress: Option<deps::ProgressCallback>,
460) -> Result<()> {
461 deps::run_installer_with_progress(progress).map_err(|e| anyhow::anyhow!(e))
462}
463
464pub use deps::ProgressCallback;