heroforge-core 0.2.2

Pure Rust core library for reading and writing Fossil SCM repositories
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
//! High-level filesystem interface for Heroforge repositories.
//!
//! `FsInterface` provides a filesystem-like API with staging directory support.
//! All writes go to staging first, with background commits to the database.
//!
//! # Architecture
//!
//! ```text
//! ┌─────────────────────────────────────────────────────────────────┐
//! │                      FsInterface (sync API)                     │
//! │  - RwLock<StagingState>                                         │
//! │  - Author name (set at initialization)                          │
//! │  - All read/write operations acquire appropriate locks          │
//! └─────────────────────────────────────────────────────────────────┘
//!//!           ┌───────────────────┴───────────────────┐
//!           │                                       │
//!           ▼                                       ▼
//! ┌─────────────────────┐                 ┌─────────────────────────┐
//! │   Staging Directory │                 │    Commit Thread        │
//! │                     │                 │    (background)         │
//! └─────────────────────┘                 └─────────────────────────┘
//! ```
//!
//! # Example
//!
//! ```no_run
//! use heroforge_core::Repository;
//! use heroforge_core::fs::FsInterface;
//! use std::sync::Arc;
//!
//! let repo = Arc::new(Repository::open_rw("project.forge")?);
//! let fs = FsInterface::new(repo, "developer@example.com")?;
//!
//! // Write goes to staging (fast)
//! fs.write_file("config.json", b"{}")?;
//!
//! // Read checks staging first, then database
//! let content = fs.read_file("config.json")?;
//!
//! // Partial update (read-modify-write pattern)
//! fs.write_at("data.bin", 100, b"updated")?;
//!
//! // Force immediate commit (normally auto-commits every 1 minute)
//! fs.commit()?;
//! # Ok::<(), heroforge_core::FossilError>(())
//! ```

use std::path::PathBuf;
use std::sync::Arc;

use crate::fs::commit_thread::{CommitConfig, CommitTimer, commit_now};
use crate::fs::errors::{FsError, FsResult};
use crate::fs::operations::{DirectoryEntry, FileKind, FileMetadata, FilePermissions, FindResults};
use crate::fs::staging::{MAX_FILE_SIZE, Staging};
use crate::repo::Repository;

/// Status of the FsInterface
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FsInterfaceStatus {
    /// Active and accepting operations
    Active,
    /// Commit in progress
    Committing,
    /// Closed/disposed
    Closed,
}

/// High-level filesystem interface with staging support.
///
/// This is the recommended way to interact with Heroforge repositories for
/// filesystem-like operations. It provides:
///
/// - Fast writes via staging directory
/// - Layered reads (staging → database)
/// - Automatic background commits
/// - Partial file updates (read-modify-write)
/// - Thread-safe operations via RwLock
pub struct FsInterface {
    /// Reference to the underlying repository
    repo: Arc<Repository>,

    /// Staging state
    staging: Staging,

    /// Background commit timer
    commit_timer: Option<CommitTimer>,

    /// Current status
    status: FsInterfaceStatus,

    /// Current branch name
    branch: String,
}

impl FsInterface {
    /// Create a new FsInterface for a repository.
    ///
    /// # Arguments
    ///
    /// * `repo` - The repository to operate on
    /// * `author` - Author name for all commits from this interface
    ///
    /// # Example
    ///
    /// ```no_run
    /// use heroforge_core::Repository;
    /// use heroforge_core::fs::FsInterface;
    /// use std::sync::Arc;
    ///
    /// let repo = Arc::new(Repository::open_rw("project.forge")?);
    /// let fs = FsInterface::new(repo, "developer@example.com")?;
    /// # Ok::<(), heroforge_core::FossilError>(())
    /// ```
    pub fn new(repo: Arc<Repository>, author: &str) -> FsResult<Self> {
        Self::with_config(repo, author, CommitConfig::default())
    }

    /// Create a new FsInterface with custom configuration.
    pub fn with_config(
        repo: Arc<Repository>,
        author: &str,
        config: CommitConfig,
    ) -> FsResult<Self> {
        // Create staging directory alongside the repository
        let staging_dir = Self::get_staging_dir(&repo)?;

        let staging = Staging::new(staging_dir, author.to_string())?;

        // Start background commit timer
        let commit_timer = Some(CommitTimer::start(config));

        Ok(Self {
            repo,
            staging,
            commit_timer,
            status: FsInterfaceStatus::Active,
            branch: "trunk".to_string(),
        })
    }

    /// Create a new FsInterface without background commit timer.
    /// Useful for testing or manual commit control.
    pub fn without_timer(repo: Arc<Repository>, author: &str) -> FsResult<Self> {
        let staging_dir = Self::get_staging_dir(&repo)?;
        let staging = Staging::new(staging_dir, author.to_string())?;

        Ok(Self {
            repo,
            staging,
            commit_timer: None,
            status: FsInterfaceStatus::Active,
            branch: "trunk".to_string(),
        })
    }

    /// Get the staging directory path for a repository
    fn get_staging_dir(repo: &Repository) -> FsResult<PathBuf> {
        // Use project code to create unique staging directory
        let project_code = repo
            .project_code()
            .map_err(|e| FsError::DatabaseError(format!("Failed to get project code: {}", e)))?;

        let staging_dir = std::env::temp_dir()
            .join("heroforge-staging")
            .join(&project_code[..8.min(project_code.len())]);

        Ok(staging_dir)
    }

    /// Get the current status
    pub fn status(&self) -> FsInterfaceStatus {
        self.status
    }

    /// Get the author name
    pub fn author(&self) -> String {
        self.staging.read().author().to_string()
    }

    /// Get the current branch
    pub fn branch(&self) -> &str {
        &self.branch
    }

    /// Check if there are uncommitted changes
    pub fn has_changes(&self) -> bool {
        self.staging.read().is_dirty()
    }

    // ========================================================================
    // Read Operations (check staging first, then database)
    // ========================================================================

    /// Check if a path exists (in staging or database).
    pub fn exists(&self, path: &str) -> FsResult<bool> {
        self.validate_path(path)?;

        let state = self.staging.read();

        // Check staging first
        if state.has_file(path) {
            return Ok(!state.is_deleted(path));
        }

        // Check database
        self.exists_in_db(path)
    }

    /// Check if a path is a directory.
    pub fn is_dir(&self, path: &str) -> FsResult<bool> {
        self.validate_path(path)?;

        // Check if any file in staging starts with this path
        let state = self.staging.read();
        let prefix = if path.ends_with('/') {
            path.to_string()
        } else {
            format!("{}/", path)
        };

        for key in state.files().keys() {
            if key.starts_with(&prefix) && !state.is_deleted(key) {
                return Ok(true);
            }
        }

        // Check database
        self.is_dir_in_db(path)
    }

    /// Check if a path is a file.
    pub fn is_file(&self, path: &str) -> FsResult<bool> {
        self.validate_path(path)?;

        let state = self.staging.read();

        if state.has_file(path) {
            let file = state.get_file(path).unwrap();
            return Ok(!file.is_deleted);
        }

        self.is_file_in_db(path)
    }

    /// Get file metadata.
    pub fn stat(&self, path: &str) -> FsResult<FileMetadata> {
        self.validate_path(path)?;

        let state = self.staging.read();

        // Check staging first
        if let Some(staged) = state.get_file(path) {
            if staged.is_deleted {
                return Err(FsError::NotFound(path.to_string()));
            }

            return Ok(FileMetadata {
                path: path.to_string(),
                is_dir: false,
                size: staged.size,
                permissions: FilePermissions::file(),
                is_symlink: false,
                symlink_target: None,
                modified: staged.staged_at.elapsed().as_secs() as i64,
                hash: staged.original_hash.clone(),
                kind: FileKind::File,
            });
        }

        // Check database
        self.stat_from_db(path)
    }

    /// Read file content as bytes.
    ///
    /// Checks staging directory first, then falls back to database.
    pub fn read_file(&self, path: &str) -> FsResult<Vec<u8>> {
        self.validate_path(path)?;

        let state = self.staging.read();

        // Check staging first
        if state.has_file(path) {
            if state.is_deleted(path) {
                return Err(FsError::NotFound(path.to_string()));
            }
            return state.read_file(path);
        }

        // Read from database
        self.read_file_from_db(path)
    }

    /// Read file content as string.
    pub fn read_file_string(&self, path: &str) -> FsResult<String> {
        let bytes = self.read_file(path)?;
        String::from_utf8(bytes).map_err(|e| FsError::Encoding(e.to_string()))
    }

    /// List directory contents.
    pub fn list_dir(&self, path: &str) -> FsResult<Vec<DirectoryEntry>> {
        self.validate_path(path)?;

        let mut entries: std::collections::HashMap<String, DirectoryEntry> =
            std::collections::HashMap::new();

        // Get entries from staging
        let state = self.staging.read();
        for name in state.list_dir(path) {
            if let Some(staged) = state.get_file(&name) {
                if !staged.is_deleted {
                    let entry_name = name.rsplit('/').next().unwrap_or(&name).to_string();
                    entries.insert(
                        entry_name.clone(),
                        DirectoryEntry {
                            name: entry_name,
                            is_dir: false,
                            size: staged.size,
                            permissions: FilePermissions::file(),
                            modified: staged.staged_at.elapsed().as_secs() as i64,
                        },
                    );
                }
            }
        }

        // Get entries from database (if not overridden by staging)
        if let Ok(db_entries) = self.list_dir_from_db(path) {
            for entry in db_entries {
                if !entries.contains_key(&entry.name) {
                    // Check if deleted in staging
                    let full_path = if path.is_empty() || path == "/" {
                        entry.name.clone()
                    } else {
                        format!("{}/{}", path.trim_end_matches('/'), entry.name)
                    };
                    if !state.is_deleted(&full_path) {
                        entries.insert(entry.name.clone(), entry);
                    }
                }
            }
        }

        let mut result: Vec<_> = entries.into_values().collect();
        result.sort_by(|a, b| a.name.cmp(&b.name));
        Ok(result)
    }

    /// Find files matching a glob pattern.
    pub fn find(&self, pattern: &str) -> FsResult<FindResults> {
        if pattern.is_empty() {
            return Err(FsError::PatternError("Pattern cannot be empty".to_string()));
        }

        let glob = glob::Pattern::new(pattern).map_err(|e| FsError::PatternError(e.to_string()))?;

        let mut files: Vec<String> = Vec::new();
        let state = self.staging.read();

        // Find in staging
        for (path, staged) in state.files() {
            if !staged.is_deleted && glob.matches(path) {
                files.push(path.clone());
            }
        }

        // Find in database
        if let Ok(db_files) = self.find_in_db(pattern) {
            for path in db_files {
                if !files.contains(&path) && !state.is_deleted(&path) {
                    files.push(path);
                }
            }
        }

        files.sort();

        Ok(FindResults {
            count: files.len(),
            files,
            dirs_traversed: 0,
        })
    }

    /// Calculate disk usage of a path.
    pub fn disk_usage(&self, path: &str) -> FsResult<u64> {
        self.validate_path(path)?;

        let mut total: u64 = 0;
        let state = self.staging.read();

        // Sum staging files
        let prefix = if path.is_empty() || path == "/" {
            String::new()
        } else {
            format!("{}/", path.trim_end_matches('/'))
        };

        for (file_path, staged) in state.files() {
            if (prefix.is_empty() || file_path.starts_with(&prefix)) && !staged.is_deleted {
                total += staged.size;
            }
        }

        // Add database files (if not in staging)
        if let Ok(db_usage) = self.disk_usage_from_db(path) {
            total += db_usage;
        }

        Ok(total)
    }

    /// Count files matching a pattern.
    pub fn count_files(&self, pattern: &str) -> FsResult<usize> {
        let results = self.find(pattern)?;
        Ok(results.count)
    }

    // ========================================================================
    // Write Operations (all go to staging first)
    // ========================================================================

    /// Write file content.
    ///
    /// The file is written to the staging directory immediately.
    /// It will be committed to the database during the next auto-commit
    /// or when `commit()` is called.
    pub fn write_file(&self, path: &str, content: &[u8]) -> FsResult<()> {
        self.validate_path(path)?;
        self.validate_size(path, content.len() as u64)?;

        let mut state = self.staging.write();
        state.stage_file(path, content)
    }

    /// Write file content from a string.
    pub fn write_file_string(&self, path: &str, content: &str) -> FsResult<()> {
        self.write_file(path, content.as_bytes())
    }

    /// Write at a specific offset in a file (partial update).
    ///
    /// If the file exists only in the database, it will be promoted to staging first.
    /// This implements the read-modify-write pattern described in the spec.
    pub fn write_at(&self, path: &str, offset: u64, data: &[u8]) -> FsResult<()> {
        self.validate_path(path)?;

        let mut state = self.staging.write();

        // Check if file is already in staging
        if state.has_file(path) {
            if state.is_deleted(path) {
                return Err(FsError::NotFound(path.to_string()));
            }
            // File is in staging, write directly
            state.write_at(path, offset, data)?;
            state.mark_modified(path);
            return Ok(());
        }

        // File not in staging - need to promote from database
        drop(state); // Release lock before database read

        let content = self.read_file_from_db(path)?;
        let hash = self.get_file_hash_from_db(path)?;

        let mut state = self.staging.write();

        // Promote to staging
        state.stage_promoted(path, &content, hash)?;

        // Now write at offset
        state.write_at(path, offset, data)?;
        state.mark_modified(path);

        Ok(())
    }

    /// Delete a file.
    pub fn delete_file(&self, path: &str) -> FsResult<()> {
        self.validate_path(path)?;

        let mut state = self.staging.write();
        state.delete_file(path)
    }

    /// Delete a directory recursively.
    pub fn delete_dir(&self, path: &str) -> FsResult<()> {
        self.validate_path(path)?;

        let mut state = self.staging.write();
        state.delete_dir(path)
    }

    /// Copy a file.
    pub fn copy_file(&self, src: &str, dst: &str) -> FsResult<()> {
        self.validate_path(src)?;
        self.validate_path(dst)?;

        // Read from wherever the file exists
        let content = self.read_file(src)?;

        let mut state = self.staging.write();
        state.stage_file(dst, &content)
    }

    /// Move a file.
    pub fn move_file(&self, src: &str, dst: &str) -> FsResult<()> {
        self.copy_file(src, dst)?;
        self.delete_file(src)
    }

    /// Copy a directory recursively.
    pub fn copy_dir(&self, src: &str, dst: &str) -> FsResult<()> {
        self.validate_path(src)?;
        self.validate_path(dst)?;

        // Find all files in source directory
        let pattern = format!("{}/**/*", src.trim_end_matches('/'));
        let files = self.find(&pattern)?;

        for file_path in files.files {
            let rel_path = file_path
                .strip_prefix(src.trim_end_matches('/'))
                .unwrap_or(&file_path)
                .trim_start_matches('/');
            let dst_path = format!("{}/{}", dst.trim_end_matches('/'), rel_path);

            let content = self.read_file(&file_path)?;
            let mut state = self.staging.write();
            state.stage_file(&dst_path, &content)?;
        }

        Ok(())
    }

    /// Move a directory recursively.
    pub fn move_dir(&self, src: &str, dst: &str) -> FsResult<()> {
        self.copy_dir(src, dst)?;
        self.delete_dir(src)
    }

    // ========================================================================
    // Commit Operations
    // ========================================================================

    /// Force an immediate commit of all staged changes.
    ///
    /// Returns the commit hash, or "no-changes" if nothing was staged.
    pub fn commit(&self) -> FsResult<String> {
        commit_now(&self.staging, &self.repo)
    }

    /// Force a commit with a custom message.
    pub fn commit_with_message(&self, message: &str) -> FsResult<String> {
        let mut state = self.staging.write();

        if !state.is_dirty() {
            return Ok("no-changes".to_string());
        }

        let author = state.author().to_string();
        let branch = state.branch().to_string();

        // Collect staged changes (new/modified files and deletions)
        let mut staged_files: Vec<(String, Vec<u8>)> = Vec::new();
        let mut deletions: std::collections::HashSet<String> = std::collections::HashSet::new();

        for (path, staged_file) in state.files() {
            if staged_file.is_deleted {
                deletions.insert(path.clone());
            } else if staged_file.modified {
                let content = state.read_file(path)?;
                staged_files.push((path.clone(), content));
            }
        }

        if staged_files.is_empty() && deletions.is_empty() {
            state.mark_clean();
            return Ok("no-changes".to_string());
        }

        // Get parent commit hash
        let parent_hash = self
            .repo
            .branches()
            .get(&branch)
            .ok()
            .and_then(|b| b.tip().ok())
            .map(|c| c.hash);

        // Build complete file list: parent files + staged changes - deletions
        let mut files_to_commit: Vec<(String, Vec<u8>)> = Vec::new();
        let staged_paths: std::collections::HashSet<String> =
            staged_files.iter().map(|(p, _)| p.clone()).collect();

        // First, add files from parent that aren't being modified or deleted
        if let Some(ref parent) = parent_hash {
            if let Ok(parent_files) = self.repo.list_files_internal(parent) {
                for file_info in parent_files {
                    // Skip if this file is being deleted
                    if deletions.contains(&file_info.name) {
                        continue;
                    }
                    // Skip if this file is being replaced with staged content
                    if staged_paths.contains(&file_info.name) {
                        continue;
                    }
                    // Read the file content from parent and include it
                    if let Ok(content) = self.repo.read_file_internal(parent, &file_info.name) {
                        files_to_commit.push((file_info.name, content));
                    }
                }
            }
        }

        // Add all staged files (new and modified)
        files_to_commit.extend(staged_files);

        let files_refs: Vec<(&str, &[u8])> = files_to_commit
            .iter()
            .map(|(p, c)| (p.as_str(), c.as_slice()))
            .collect();

        let commit_hash = self
            .repo
            .commit_internal(
                &files_refs,
                message,
                &author,
                parent_hash.as_deref(),
                Some(&branch),
            )
            .map_err(|e| FsError::DatabaseError(format!("Commit failed: {}", e)))?;

        state.clear()?;

        Ok(commit_hash)
    }

    // ========================================================================
    // Branch Operations (force commit before switching)
    // ========================================================================

    /// Switch to a different branch.
    ///
    /// Forces a commit of any staged changes before switching.
    pub fn switch_branch(&mut self, branch: &str) -> FsResult<()> {
        // Force commit before branch switch
        self.commit()?;

        self.branch = branch.to_string();
        self.staging.write().set_branch(branch.to_string());

        Ok(())
    }

    // ========================================================================
    // Helper Methods
    // ========================================================================

    /// Validate path format
    fn validate_path(&self, path: &str) -> FsResult<()> {
        if path.is_empty() {
            return Err(FsError::InvalidPath("Path cannot be empty".to_string()));
        }
        if path.contains('\0') {
            return Err(FsError::InvalidPath(
                "Path cannot contain null bytes".to_string(),
            ));
        }
        Ok(())
    }

    /// Validate file size
    fn validate_size(&self, path: &str, size: u64) -> FsResult<()> {
        if size > MAX_FILE_SIZE {
            return Err(FsError::FileTooLarge {
                path: path.to_string(),
                size,
                max: MAX_FILE_SIZE,
            });
        }
        Ok(())
    }

    // Database access helpers

    fn exists_in_db(&self, path: &str) -> FsResult<bool> {
        let checkin = self.get_branch_tip()?;
        match self.repo.read_file_internal(&checkin, path) {
            Ok(_) => Ok(true),
            Err(_) => Ok(false),
        }
    }

    fn is_dir_in_db(&self, path: &str) -> FsResult<bool> {
        let checkin = self.get_branch_tip()?;
        match self.repo.list_directory_internal(&checkin, path) {
            Ok(files) => Ok(!files.is_empty()),
            Err(_) => Ok(false),
        }
    }

    fn is_file_in_db(&self, path: &str) -> FsResult<bool> {
        let checkin = self.get_branch_tip()?;
        match self.repo.read_file_internal(&checkin, path) {
            Ok(_) => Ok(true),
            Err(_) => Ok(false),
        }
    }

    fn stat_from_db(&self, path: &str) -> FsResult<FileMetadata> {
        let checkin = self.get_branch_tip()?;
        let files = self
            .repo
            .list_files_internal(&checkin)
            .map_err(|e| FsError::DatabaseError(e.to_string()))?;

        for file in files {
            if file.name == path {
                return Ok(FileMetadata {
                    path: path.to_string(),
                    is_dir: false,
                    size: file.size.unwrap_or(0) as u64,
                    permissions: FilePermissions::file(),
                    is_symlink: false,
                    symlink_target: None,
                    modified: 0,
                    hash: Some(file.hash),
                    kind: FileKind::File,
                });
            }
        }

        Err(FsError::NotFound(path.to_string()))
    }

    fn read_file_from_db(&self, path: &str) -> FsResult<Vec<u8>> {
        let checkin = self.get_branch_tip()?;
        self.repo
            .read_file_internal(&checkin, path)
            .map_err(|e| FsError::NotFound(format!("{}: {}", path, e)))
    }

    fn get_file_hash_from_db(&self, path: &str) -> FsResult<String> {
        let checkin = self.get_branch_tip()?;
        let files = self
            .repo
            .list_files_internal(&checkin)
            .map_err(|e| FsError::DatabaseError(e.to_string()))?;

        for file in files {
            if file.name == path {
                return Ok(file.hash);
            }
        }

        Err(FsError::NotFound(path.to_string()))
    }

    fn list_dir_from_db(&self, path: &str) -> FsResult<Vec<DirectoryEntry>> {
        let checkin = self.get_branch_tip()?;
        let files = self
            .repo
            .list_directory_internal(&checkin, path)
            .map_err(|e| FsError::DatabaseError(e.to_string()))?;

        Ok(files
            .into_iter()
            .map(|f| DirectoryEntry {
                name: f.name.rsplit('/').next().unwrap_or(&f.name).to_string(),
                is_dir: false,
                size: f.size.unwrap_or(0) as u64,
                permissions: FilePermissions::file(),
                modified: 0,
            })
            .collect())
    }

    fn find_in_db(&self, pattern: &str) -> FsResult<Vec<String>> {
        let checkin = self.get_branch_tip()?;
        let files = self
            .repo
            .find_files_internal(&checkin, pattern)
            .map_err(|e| FsError::DatabaseError(e.to_string()))?;

        Ok(files.into_iter().map(|f| f.name).collect())
    }

    fn disk_usage_from_db(&self, path: &str) -> FsResult<u64> {
        let checkin = self.get_branch_tip()?;
        let files = self
            .repo
            .list_files_internal(&checkin)
            .map_err(|e| FsError::DatabaseError(e.to_string()))?;

        let prefix = if path.is_empty() || path == "/" {
            String::new()
        } else {
            format!("{}/", path.trim_end_matches('/'))
        };

        let total: u64 = files
            .into_iter()
            .filter(|f| prefix.is_empty() || f.name.starts_with(&prefix))
            .map(|f| f.size.unwrap_or(0) as u64)
            .sum();

        Ok(total)
    }

    fn get_branch_tip(&self) -> FsResult<String> {
        let branch_ref = self
            .repo
            .branches()
            .get(&self.branch)
            .map_err(|e| FsError::DatabaseError(format!("Failed to get branch: {}", e)))?;

        let tip = branch_ref
            .tip()
            .map_err(|e| FsError::DatabaseError(format!("Failed to get branch tip: {}", e)))?;

        Ok(tip.hash)
    }
}

impl Drop for FsInterface {
    fn drop(&mut self) {
        // Perform final commit and stop timer
        let _ = self.commit();
        if let Some(mut timer) = self.commit_timer.take() {
            timer.stop();
        }
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use tempfile::tempdir;

    // Note: Full integration tests require a real repository
    // These tests focus on unit-level behavior

    #[test]
    fn test_validate_path() {
        // Create a minimal test - actual repo tests would go in integration tests
    }
}