use std::path::PathBuf;
use std::sync::Arc;
use crate::fs::commit_thread::{CommitConfig, CommitTimer, commit_now};
use crate::fs::errors::{FsError, FsResult};
use crate::fs::operations::{DirectoryEntry, FileKind, FileMetadata, FilePermissions, FindResults};
use crate::fs::staging::{MAX_FILE_SIZE, Staging};
use crate::repo::Repository;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FsInterfaceStatus {
Active,
Committing,
Closed,
}
pub struct FsInterface {
repo: Arc<Repository>,
staging: Staging,
commit_timer: Option<CommitTimer>,
status: FsInterfaceStatus,
branch: String,
}
impl FsInterface {
pub fn new(repo: Arc<Repository>, author: &str) -> FsResult<Self> {
Self::with_config(repo, author, CommitConfig::default())
}
pub fn with_config(
repo: Arc<Repository>,
author: &str,
config: CommitConfig,
) -> FsResult<Self> {
let staging_dir = Self::get_staging_dir(&repo)?;
let staging = Staging::new(staging_dir, author.to_string())?;
let commit_timer = Some(CommitTimer::start(config));
Ok(Self {
repo,
staging,
commit_timer,
status: FsInterfaceStatus::Active,
branch: "trunk".to_string(),
})
}
pub fn without_timer(repo: Arc<Repository>, author: &str) -> FsResult<Self> {
let staging_dir = Self::get_staging_dir(&repo)?;
let staging = Staging::new(staging_dir, author.to_string())?;
Ok(Self {
repo,
staging,
commit_timer: None,
status: FsInterfaceStatus::Active,
branch: "trunk".to_string(),
})
}
fn get_staging_dir(repo: &Repository) -> FsResult<PathBuf> {
let project_code = repo
.project_code()
.map_err(|e| FsError::DatabaseError(format!("Failed to get project code: {}", e)))?;
let staging_dir = std::env::temp_dir()
.join("heroforge-staging")
.join(&project_code[..8.min(project_code.len())]);
Ok(staging_dir)
}
pub fn status(&self) -> FsInterfaceStatus {
self.status
}
pub fn author(&self) -> String {
self.staging.read().author().to_string()
}
pub fn branch(&self) -> &str {
&self.branch
}
pub fn has_changes(&self) -> bool {
self.staging.read().is_dirty()
}
pub fn exists(&self, path: &str) -> FsResult<bool> {
self.validate_path(path)?;
let state = self.staging.read();
if state.has_file(path) {
return Ok(!state.is_deleted(path));
}
self.exists_in_db(path)
}
pub fn is_dir(&self, path: &str) -> FsResult<bool> {
self.validate_path(path)?;
let state = self.staging.read();
let prefix = if path.ends_with('/') {
path.to_string()
} else {
format!("{}/", path)
};
for key in state.files().keys() {
if key.starts_with(&prefix) && !state.is_deleted(key) {
return Ok(true);
}
}
self.is_dir_in_db(path)
}
pub fn is_file(&self, path: &str) -> FsResult<bool> {
self.validate_path(path)?;
let state = self.staging.read();
if state.has_file(path) {
let file = state.get_file(path).unwrap();
return Ok(!file.is_deleted);
}
self.is_file_in_db(path)
}
pub fn stat(&self, path: &str) -> FsResult<FileMetadata> {
self.validate_path(path)?;
let state = self.staging.read();
if let Some(staged) = state.get_file(path) {
if staged.is_deleted {
return Err(FsError::NotFound(path.to_string()));
}
return Ok(FileMetadata {
path: path.to_string(),
is_dir: false,
size: staged.size,
permissions: FilePermissions::file(),
is_symlink: false,
symlink_target: None,
modified: staged.staged_at.elapsed().as_secs() as i64,
hash: staged.original_hash.clone(),
kind: FileKind::File,
});
}
self.stat_from_db(path)
}
pub fn read_file(&self, path: &str) -> FsResult<Vec<u8>> {
self.validate_path(path)?;
let state = self.staging.read();
if state.has_file(path) {
if state.is_deleted(path) {
return Err(FsError::NotFound(path.to_string()));
}
return state.read_file(path);
}
self.read_file_from_db(path)
}
pub fn read_file_string(&self, path: &str) -> FsResult<String> {
let bytes = self.read_file(path)?;
String::from_utf8(bytes).map_err(|e| FsError::Encoding(e.to_string()))
}
pub fn list_dir(&self, path: &str) -> FsResult<Vec<DirectoryEntry>> {
self.validate_path(path)?;
let mut entries: std::collections::HashMap<String, DirectoryEntry> =
std::collections::HashMap::new();
let state = self.staging.read();
for name in state.list_dir(path) {
if let Some(staged) = state.get_file(&name) {
if !staged.is_deleted {
let entry_name = name.rsplit('/').next().unwrap_or(&name).to_string();
entries.insert(
entry_name.clone(),
DirectoryEntry {
name: entry_name,
is_dir: false,
size: staged.size,
permissions: FilePermissions::file(),
modified: staged.staged_at.elapsed().as_secs() as i64,
},
);
}
}
}
if let Ok(db_entries) = self.list_dir_from_db(path) {
for entry in db_entries {
if !entries.contains_key(&entry.name) {
let full_path = if path.is_empty() || path == "/" {
entry.name.clone()
} else {
format!("{}/{}", path.trim_end_matches('/'), entry.name)
};
if !state.is_deleted(&full_path) {
entries.insert(entry.name.clone(), entry);
}
}
}
}
let mut result: Vec<_> = entries.into_values().collect();
result.sort_by(|a, b| a.name.cmp(&b.name));
Ok(result)
}
pub fn find(&self, pattern: &str) -> FsResult<FindResults> {
if pattern.is_empty() {
return Err(FsError::PatternError("Pattern cannot be empty".to_string()));
}
let glob = glob::Pattern::new(pattern).map_err(|e| FsError::PatternError(e.to_string()))?;
let mut files: Vec<String> = Vec::new();
let state = self.staging.read();
for (path, staged) in state.files() {
if !staged.is_deleted && glob.matches(path) {
files.push(path.clone());
}
}
if let Ok(db_files) = self.find_in_db(pattern) {
for path in db_files {
if !files.contains(&path) && !state.is_deleted(&path) {
files.push(path);
}
}
}
files.sort();
Ok(FindResults {
count: files.len(),
files,
dirs_traversed: 0,
})
}
pub fn disk_usage(&self, path: &str) -> FsResult<u64> {
self.validate_path(path)?;
let mut total: u64 = 0;
let state = self.staging.read();
let prefix = if path.is_empty() || path == "/" {
String::new()
} else {
format!("{}/", path.trim_end_matches('/'))
};
for (file_path, staged) in state.files() {
if (prefix.is_empty() || file_path.starts_with(&prefix)) && !staged.is_deleted {
total += staged.size;
}
}
if let Ok(db_usage) = self.disk_usage_from_db(path) {
total += db_usage;
}
Ok(total)
}
pub fn count_files(&self, pattern: &str) -> FsResult<usize> {
let results = self.find(pattern)?;
Ok(results.count)
}
pub fn write_file(&self, path: &str, content: &[u8]) -> FsResult<()> {
self.validate_path(path)?;
self.validate_size(path, content.len() as u64)?;
let mut state = self.staging.write();
state.stage_file(path, content)
}
pub fn write_file_string(&self, path: &str, content: &str) -> FsResult<()> {
self.write_file(path, content.as_bytes())
}
pub fn write_at(&self, path: &str, offset: u64, data: &[u8]) -> FsResult<()> {
self.validate_path(path)?;
let mut state = self.staging.write();
if state.has_file(path) {
if state.is_deleted(path) {
return Err(FsError::NotFound(path.to_string()));
}
state.write_at(path, offset, data)?;
state.mark_modified(path);
return Ok(());
}
drop(state);
let content = self.read_file_from_db(path)?;
let hash = self.get_file_hash_from_db(path)?;
let mut state = self.staging.write();
state.stage_promoted(path, &content, hash)?;
state.write_at(path, offset, data)?;
state.mark_modified(path);
Ok(())
}
pub fn delete_file(&self, path: &str) -> FsResult<()> {
self.validate_path(path)?;
let mut state = self.staging.write();
state.delete_file(path)
}
pub fn delete_dir(&self, path: &str) -> FsResult<()> {
self.validate_path(path)?;
let mut state = self.staging.write();
state.delete_dir(path)
}
pub fn copy_file(&self, src: &str, dst: &str) -> FsResult<()> {
self.validate_path(src)?;
self.validate_path(dst)?;
let content = self.read_file(src)?;
let mut state = self.staging.write();
state.stage_file(dst, &content)
}
pub fn move_file(&self, src: &str, dst: &str) -> FsResult<()> {
self.copy_file(src, dst)?;
self.delete_file(src)
}
pub fn copy_dir(&self, src: &str, dst: &str) -> FsResult<()> {
self.validate_path(src)?;
self.validate_path(dst)?;
let pattern = format!("{}/**/*", src.trim_end_matches('/'));
let files = self.find(&pattern)?;
for file_path in files.files {
let rel_path = file_path
.strip_prefix(src.trim_end_matches('/'))
.unwrap_or(&file_path)
.trim_start_matches('/');
let dst_path = format!("{}/{}", dst.trim_end_matches('/'), rel_path);
let content = self.read_file(&file_path)?;
let mut state = self.staging.write();
state.stage_file(&dst_path, &content)?;
}
Ok(())
}
pub fn move_dir(&self, src: &str, dst: &str) -> FsResult<()> {
self.copy_dir(src, dst)?;
self.delete_dir(src)
}
pub fn commit(&self) -> FsResult<String> {
commit_now(&self.staging, &self.repo)
}
pub fn commit_with_message(&self, message: &str) -> FsResult<String> {
let mut state = self.staging.write();
if !state.is_dirty() {
return Ok("no-changes".to_string());
}
let author = state.author().to_string();
let branch = state.branch().to_string();
let mut staged_files: Vec<(String, Vec<u8>)> = Vec::new();
let mut deletions: std::collections::HashSet<String> = std::collections::HashSet::new();
for (path, staged_file) in state.files() {
if staged_file.is_deleted {
deletions.insert(path.clone());
} else if staged_file.modified {
let content = state.read_file(path)?;
staged_files.push((path.clone(), content));
}
}
if staged_files.is_empty() && deletions.is_empty() {
state.mark_clean();
return Ok("no-changes".to_string());
}
let parent_hash = self
.repo
.branches()
.get(&branch)
.ok()
.and_then(|b| b.tip().ok())
.map(|c| c.hash);
let mut files_to_commit: Vec<(String, Vec<u8>)> = Vec::new();
let staged_paths: std::collections::HashSet<String> =
staged_files.iter().map(|(p, _)| p.clone()).collect();
if let Some(ref parent) = parent_hash {
if let Ok(parent_files) = self.repo.list_files_internal(parent) {
for file_info in parent_files {
if deletions.contains(&file_info.name) {
continue;
}
if staged_paths.contains(&file_info.name) {
continue;
}
if let Ok(content) = self.repo.read_file_internal(parent, &file_info.name) {
files_to_commit.push((file_info.name, content));
}
}
}
}
files_to_commit.extend(staged_files);
let files_refs: Vec<(&str, &[u8])> = files_to_commit
.iter()
.map(|(p, c)| (p.as_str(), c.as_slice()))
.collect();
let commit_hash = self
.repo
.commit_internal(
&files_refs,
message,
&author,
parent_hash.as_deref(),
Some(&branch),
)
.map_err(|e| FsError::DatabaseError(format!("Commit failed: {}", e)))?;
state.clear()?;
Ok(commit_hash)
}
pub fn switch_branch(&mut self, branch: &str) -> FsResult<()> {
self.commit()?;
self.branch = branch.to_string();
self.staging.write().set_branch(branch.to_string());
Ok(())
}
fn validate_path(&self, path: &str) -> FsResult<()> {
if path.is_empty() {
return Err(FsError::InvalidPath("Path cannot be empty".to_string()));
}
if path.contains('\0') {
return Err(FsError::InvalidPath(
"Path cannot contain null bytes".to_string(),
));
}
Ok(())
}
fn validate_size(&self, path: &str, size: u64) -> FsResult<()> {
if size > MAX_FILE_SIZE {
return Err(FsError::FileTooLarge {
path: path.to_string(),
size,
max: MAX_FILE_SIZE,
});
}
Ok(())
}
fn exists_in_db(&self, path: &str) -> FsResult<bool> {
let checkin = self.get_branch_tip()?;
match self.repo.read_file_internal(&checkin, path) {
Ok(_) => Ok(true),
Err(_) => Ok(false),
}
}
fn is_dir_in_db(&self, path: &str) -> FsResult<bool> {
let checkin = self.get_branch_tip()?;
match self.repo.list_directory_internal(&checkin, path) {
Ok(files) => Ok(!files.is_empty()),
Err(_) => Ok(false),
}
}
fn is_file_in_db(&self, path: &str) -> FsResult<bool> {
let checkin = self.get_branch_tip()?;
match self.repo.read_file_internal(&checkin, path) {
Ok(_) => Ok(true),
Err(_) => Ok(false),
}
}
fn stat_from_db(&self, path: &str) -> FsResult<FileMetadata> {
let checkin = self.get_branch_tip()?;
let files = self
.repo
.list_files_internal(&checkin)
.map_err(|e| FsError::DatabaseError(e.to_string()))?;
for file in files {
if file.name == path {
return Ok(FileMetadata {
path: path.to_string(),
is_dir: false,
size: file.size.unwrap_or(0) as u64,
permissions: FilePermissions::file(),
is_symlink: false,
symlink_target: None,
modified: 0,
hash: Some(file.hash),
kind: FileKind::File,
});
}
}
Err(FsError::NotFound(path.to_string()))
}
fn read_file_from_db(&self, path: &str) -> FsResult<Vec<u8>> {
let checkin = self.get_branch_tip()?;
self.repo
.read_file_internal(&checkin, path)
.map_err(|e| FsError::NotFound(format!("{}: {}", path, e)))
}
fn get_file_hash_from_db(&self, path: &str) -> FsResult<String> {
let checkin = self.get_branch_tip()?;
let files = self
.repo
.list_files_internal(&checkin)
.map_err(|e| FsError::DatabaseError(e.to_string()))?;
for file in files {
if file.name == path {
return Ok(file.hash);
}
}
Err(FsError::NotFound(path.to_string()))
}
fn list_dir_from_db(&self, path: &str) -> FsResult<Vec<DirectoryEntry>> {
let checkin = self.get_branch_tip()?;
let files = self
.repo
.list_directory_internal(&checkin, path)
.map_err(|e| FsError::DatabaseError(e.to_string()))?;
Ok(files
.into_iter()
.map(|f| DirectoryEntry {
name: f.name.rsplit('/').next().unwrap_or(&f.name).to_string(),
is_dir: false,
size: f.size.unwrap_or(0) as u64,
permissions: FilePermissions::file(),
modified: 0,
})
.collect())
}
fn find_in_db(&self, pattern: &str) -> FsResult<Vec<String>> {
let checkin = self.get_branch_tip()?;
let files = self
.repo
.find_files_internal(&checkin, pattern)
.map_err(|e| FsError::DatabaseError(e.to_string()))?;
Ok(files.into_iter().map(|f| f.name).collect())
}
fn disk_usage_from_db(&self, path: &str) -> FsResult<u64> {
let checkin = self.get_branch_tip()?;
let files = self
.repo
.list_files_internal(&checkin)
.map_err(|e| FsError::DatabaseError(e.to_string()))?;
let prefix = if path.is_empty() || path == "/" {
String::new()
} else {
format!("{}/", path.trim_end_matches('/'))
};
let total: u64 = files
.into_iter()
.filter(|f| prefix.is_empty() || f.name.starts_with(&prefix))
.map(|f| f.size.unwrap_or(0) as u64)
.sum();
Ok(total)
}
fn get_branch_tip(&self) -> FsResult<String> {
let branch_ref = self
.repo
.branches()
.get(&self.branch)
.map_err(|e| FsError::DatabaseError(format!("Failed to get branch: {}", e)))?;
let tip = branch_ref
.tip()
.map_err(|e| FsError::DatabaseError(format!("Failed to get branch tip: {}", e)))?;
Ok(tip.hash)
}
}
impl Drop for FsInterface {
fn drop(&mut self) {
let _ = self.commit();
if let Some(mut timer) = self.commit_timer.take() {
timer.stop();
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[test]
fn test_validate_path() {
}
}