1use crate::cas::store::{BlobStore, CasError};
25use crate::dag::graph::{DagError, PatchDag};
26use crate::engine::apply::{ApplyError, apply_patch_chain, resolve_payload_to_hash};
27use crate::engine::diff::{DiffEntry, DiffType, diff_trees};
28use crate::engine::tree::FileTree;
29use crate::metadata::MetaError;
30use crate::patch::conflict::Conflict;
31use crate::patch::merge::MergeResult;
32use crate::patch::types::{FileChange, OperationType, Patch, PatchId, TouchSet};
33use serde::{Deserialize, Serialize};
34use std::cell::RefCell;
35use std::collections::{HashMap, HashSet, VecDeque};
36use std::fs;
37use std::io;
38use std::path::{Path, PathBuf};
39use suture_common::{BranchName, CommonError, FileStatus, Hash, RepoPath};
40use thiserror::Error;
41
42#[derive(Error, Debug)]
44pub enum RepoError {
45 #[error("not a suture repository: {0}")]
47 NotARepository(PathBuf),
48
49 #[error("repository already exists: {0}")]
51 AlreadyExists(PathBuf),
52
53 #[error("CAS error: {0}")]
55 Cas(#[from] CasError),
56
57 #[error("DAG error: {0}")]
59 Dag(#[from] DagError),
60
61 #[error("metadata error: {0}")]
63 Meta(#[from] MetaError),
64
65 #[error("I/O error: {0}")]
67 Io(#[from] std::io::Error),
68
69 #[error("patch application error: {0}")]
71 Apply(#[from] ApplyError),
72
73 #[error("patch error: {0}")]
75 Patch(String),
76
77 #[error("nothing to commit")]
79 NothingToCommit,
80
81 #[error("merge in progress — resolve conflicts first")]
83 MergeInProgress,
84
85 #[error("uncommitted changes would be overwritten (staged: {0})")]
87 DirtyWorkingTree(usize),
88
89 #[error("branch not found: {0}")]
91 BranchNotFound(String),
92
93 #[error("common error: {0}")]
95 Common(#[from] CommonError),
96
97 #[error("{0}")]
99 Custom(String),
100
101 #[error("unsupported operation: {0}")]
103 Unsupported(String),
104}
105
106#[derive(Debug, Clone, Copy, PartialEq, Eq)]
108pub enum ResetMode {
109 Soft,
111 Mixed,
113 Hard,
115}
116
117pub struct Repository {
119 root: PathBuf,
121 #[allow(dead_code)]
123 suture_dir: PathBuf,
124 cas: BlobStore,
126 dag: PatchDag,
128 meta: crate::metadata::MetadataStore,
130 author: String,
132 ignore_patterns: Vec<String>,
134 pending_merge_parents: Vec<PatchId>,
136 cached_head_snapshot: RefCell<Option<FileTree>>,
138 cached_head_id: RefCell<Option<PatchId>>,
140 cached_head_branch: RefCell<Option<String>>,
142 repo_config: crate::metadata::repo_config::RepoConfig,
144 is_worktree: bool,
146}
147
148impl Repository {
149 pub fn init(path: &Path, author: &str) -> Result<Self, RepoError> {
151 let suture_dir = path.join(".suture");
152 if suture_dir.exists() {
153 return Err(RepoError::AlreadyExists(path.to_path_buf()));
154 }
155
156 fs::create_dir_all(suture_dir.join("objects"))?;
158
159 let mut cas = BlobStore::new(&suture_dir)?;
162 cas.set_verify_on_read(false);
163
164 let meta = crate::metadata::MetadataStore::open(&suture_dir.join("metadata.db"))?;
166
167 let mut dag = PatchDag::new();
169
170 let root_patch = Patch::new(
172 OperationType::Create,
173 TouchSet::empty(),
174 None,
175 vec![],
176 vec![],
177 author.to_string(),
178 "Initial commit".to_string(),
179 );
180 let root_id = dag.add_patch(root_patch.clone(), vec![])?;
181
182 meta.store_patch(&root_patch)?;
184
185 let main_branch = BranchName::new("main").expect("hardcoded 'main' is always valid");
187 dag.create_branch(main_branch.clone(), root_id)?;
188 meta.set_branch(&main_branch, &root_id)?;
189
190 meta.set_config("author", author)?;
192
193 let ignore_patterns = load_ignore_patterns(path);
195
196 Ok(Self {
197 root: path.to_path_buf(),
198 suture_dir,
199 cas,
200 dag,
201 meta,
202 author: author.to_string(),
203 ignore_patterns,
204 pending_merge_parents: Vec::new(),
205 cached_head_snapshot: RefCell::new(None),
206 cached_head_id: RefCell::new(None),
207 cached_head_branch: RefCell::new(None),
208 repo_config: crate::metadata::repo_config::RepoConfig::default(),
209 is_worktree: false,
210 })
211 }
212 pub fn open(path: &Path) -> Result<Self, RepoError> {
216 let suture_dir = path.join(".suture");
217 if !suture_dir.exists() {
218 return Err(RepoError::NotARepository(path.to_path_buf()));
219 }
220
221 let is_worktree = suture_dir.join("worktree").exists();
222
223 let mut cas = BlobStore::new(&suture_dir)?;
225 cas.set_verify_on_read(false);
226 let meta = crate::metadata::MetadataStore::open(&suture_dir.join("metadata.db"))?;
227
228 let mut dag = PatchDag::new();
230
231 let all_patch_ids: Vec<PatchId> = {
233 let mut stmt = meta
234 .conn()
235 .prepare("SELECT id FROM patches ORDER BY id")
236 .map_err(|e: rusqlite::Error| RepoError::Custom(e.to_string()))?;
237 let rows = stmt
238 .query_map([], |row: &rusqlite::Row| row.get::<_, String>(0))
239 .map_err(|e: rusqlite::Error| RepoError::Custom(e.to_string()))?;
240 rows.filter_map(|r: Result<String, _>| r.ok())
241 .filter_map(|hex| Hash::from_hex(&hex).ok())
242 .collect()
243 };
244
245 let mut loaded: HashSet<PatchId> = HashSet::new();
247 let mut attempts = 0;
248 while loaded.len() < all_patch_ids.len() && attempts < all_patch_ids.len() + 1 {
249 for patch_id in &all_patch_ids {
250 if loaded.contains(patch_id) {
251 continue;
252 }
253 if let Ok(patch) = meta.get_patch(patch_id) {
254 let parents_ready = patch
256 .parent_ids
257 .iter()
258 .all(|pid| loaded.contains(pid) || *pid == Hash::ZERO);
259 if parents_ready {
260 let valid_parents: Vec<PatchId> = patch
262 .parent_ids
263 .iter()
264 .filter(|pid| loaded.contains(pid))
265 .copied()
266 .collect();
267 let _ = dag.add_patch(patch, valid_parents);
268 loaded.insert(*patch_id);
269 }
270 }
271 }
272 attempts += 1;
273 }
274
275 let branches = meta.list_branches()?;
277 for (name, target_id) in &branches {
278 let branch_name = match BranchName::new(name) {
279 Ok(b) => b,
280 Err(_) => continue,
281 };
282 if !dag.branch_exists(&branch_name) {
283 let _ = dag.create_branch(branch_name, *target_id);
284 }
285 }
286
287 let author = meta
288 .get_config("user.name")
289 .unwrap_or(None)
290 .or_else(|| meta.get_config("author").unwrap_or(None))
291 .unwrap_or_else(|| "unknown".to_string());
292
293 let restored_parents = restore_pending_merge_parents(&meta);
295
296 let ignore_patterns = load_ignore_patterns(path);
298
299 let repo_config = crate::metadata::repo_config::RepoConfig::load(path);
301
302 Ok(Self {
303 root: path.to_path_buf(),
304 suture_dir,
305 cas,
306 dag,
307 meta,
308 author,
309 ignore_patterns,
310 pending_merge_parents: restored_parents,
311 cached_head_snapshot: RefCell::new(None),
312 cached_head_id: RefCell::new(None),
313 cached_head_branch: RefCell::new(None),
314 repo_config,
315 is_worktree,
316 })
317 }
318 pub fn open_in_memory() -> Result<Self, RepoError> {
324 let temp_root = tempfile::tempdir().map_err(RepoError::Io)?.keep();
325 let suture_dir = temp_root.join(".suture");
326 fs::create_dir_all(&suture_dir)?;
327
328 let mut cas = BlobStore::new(&suture_dir)?;
329 cas.set_verify_on_read(false);
330 let meta = crate::metadata::MetadataStore::open_in_memory()?;
331
332 let mut dag = PatchDag::new();
333 let root_patch = Patch::new(
334 OperationType::Create,
335 TouchSet::empty(),
336 None,
337 vec![],
338 vec![],
339 "suture".to_string(),
340 "Initial commit".to_string(),
341 );
342 let root_id = dag.add_patch(root_patch.clone(), vec![])?;
343 meta.store_patch(&root_patch)?;
344
345 let main_branch = BranchName::new("main").expect("hardcoded 'main' is always valid");
346 dag.create_branch(main_branch.clone(), root_id)?;
347 meta.set_branch(&main_branch, &root_id)?;
348 meta.set_config("author", "suture")?;
349
350 Ok(Self {
351 root: temp_root,
352 suture_dir,
353 cas,
354 dag,
355 meta,
356 author: "suture".to_string(),
357 ignore_patterns: Vec::new(),
358 pending_merge_parents: Vec::new(),
359 cached_head_snapshot: RefCell::new(None),
360 cached_head_id: RefCell::new(None),
361 cached_head_branch: RefCell::new(None),
362 repo_config: crate::metadata::repo_config::RepoConfig::default(),
363 is_worktree: false,
364 })
365 }
366
367 pub fn create_branch(&mut self, name: &str, target: Option<&str>) -> Result<(), RepoError> {
373 let branch = BranchName::new(name)?;
374 let target_id = match target {
375 Some(t) => {
376 if t == "HEAD" {
378 let head = self
379 .dag
380 .head()
381 .ok_or_else(|| RepoError::Custom("no HEAD".to_string()))?;
382 head.1
383 } else if let Some(rest) = t.strip_prefix("HEAD~") {
384 let n: usize = rest
385 .parse()
386 .map_err(|_| RepoError::Custom(format!("invalid HEAD~N: {}", t)))?;
387 let (_, head_id) = self.head()?;
388 let mut current = head_id;
389 for _ in 0..n {
390 let patch = self.dag.get_patch(¤t).ok_or_else(|| {
391 RepoError::Custom("HEAD ancestor not found".to_string())
392 })?;
393 current = patch
394 .parent_ids
395 .first()
396 .ok_or_else(|| {
397 RepoError::Custom("HEAD has no parent".to_string())
398 })?
399 .to_owned();
400 }
401 current
402 } else if let Ok(bn) = BranchName::new(t) {
403 self.dag
404 .get_branch(&bn)
405 .ok_or_else(|| RepoError::BranchNotFound(t.to_string()))?
406 } else {
407 Hash::from_hex(t)
408 .map_err(|_| RepoError::Custom(format!("invalid target: {}", t)))?
409 }
410 }
411 None => {
412 let head = self
413 .dag
414 .head()
415 .ok_or_else(|| RepoError::Custom("no HEAD branch".to_string()))?;
416 head.1
417 }
418 };
419
420 self.dag.create_branch(branch.clone(), target_id)?;
421 self.meta.set_branch(&branch, &target_id)?;
422 Ok(())
423 }
424
425 pub fn head(&self) -> Result<(String, PatchId), RepoError> {
430 if let Some(ref cached) = *self.cached_head_id.borrow()
431 && let Some(ref branch) = *self.cached_head_branch.borrow()
432 {
433 return Ok((branch.clone(), *cached));
434 }
435 let branch_name = self.read_head_branch()?;
436
437 let bn = BranchName::new(&branch_name)?;
438 let target_id = self
439 .dag
440 .get_branch(&bn)
441 .ok_or_else(|| RepoError::BranchNotFound(branch_name.clone()))?;
442
443 *self.cached_head_branch.borrow_mut() = Some(branch_name.clone());
444 *self.cached_head_id.borrow_mut() = Some(target_id);
445 Ok((branch_name, target_id))
446 }
447
448 pub fn list_branches(&self) -> Vec<(String, PatchId)> {
450 self.dag.list_branches()
451 }
452
453 pub fn delete_branch(&mut self, name: &str) -> Result<(), RepoError> {
455 let (current_branch, _) = self.head()?;
456 if current_branch == name {
457 return Err(RepoError::Custom(format!(
458 "cannot delete the current branch '{}'",
459 name
460 )));
461 }
462 let branch = BranchName::new(name)?;
463 self.dag.delete_branch(&branch)?;
464 self.meta
466 .conn()
467 .execute(
468 "DELETE FROM branches WHERE name = ?1",
469 rusqlite::params![name],
470 )
471 .map_err(|e| RepoError::Custom(e.to_string()))?;
472 Ok(())
473 }
474
475 pub fn get_config(&self, key: &str) -> Result<Option<String>, RepoError> {
486 if let Some(val) = self.repo_config.get(key) {
488 return Ok(Some(val));
489 }
490 if let Some(val) = self.meta.get_config(key).map_err(RepoError::from)? {
492 return Ok(Some(val));
493 }
494 let global = crate::metadata::global_config::GlobalConfig::load();
496 Ok(global.get(key))
497 }
498
499 pub fn set_config(&mut self, key: &str, value: &str) -> Result<(), RepoError> {
501 self.meta.set_config(key, value).map_err(RepoError::from)
502 }
503
504 pub fn list_config(&self) -> Result<Vec<(String, String)>, RepoError> {
506 self.meta.list_config().map_err(RepoError::from)
507 }
508
509 fn read_head_branch(&self) -> Result<String, RepoError> {
514 if self.is_worktree {
515 let head_path = self.suture_dir.join("HEAD");
516 if head_path.exists() {
517 Ok(fs::read_to_string(&head_path)?.trim().to_string())
518 } else {
519 Ok("main".to_string())
520 }
521 } else {
522 Ok(self
523 .meta
524 .get_config("head_branch")
525 .unwrap_or(None)
526 .unwrap_or_else(|| "main".to_string()))
527 }
528 }
529
530 fn write_head_branch(&self, branch: &str) -> Result<(), RepoError> {
531 if self.is_worktree {
532 fs::write(self.suture_dir.join("HEAD"), branch)?;
533 } else {
534 self.meta
535 .set_config("head_branch", branch)
536 .map_err(RepoError::Meta)?;
537 }
538 Ok(())
539 }
540
541 pub fn create_tag(&mut self, name: &str, target: Option<&str>) -> Result<(), RepoError> {
549 let target_id = match target {
550 Some(t) if t == "HEAD" || t.starts_with("HEAD~") => {
551 let (_, head_id) = self.head()?;
552 let mut current = head_id;
553 if let Some(n_str) = t.strip_prefix("HEAD~") {
554 let n: usize = n_str
555 .parse()
556 .map_err(|_| RepoError::Custom(format!("invalid HEAD~N: {}", n_str)))?;
557 for _ in 0..n {
558 if let Some(patch) = self.dag.get_patch(¤t) {
559 current = *patch
560 .parent_ids
561 .first()
562 .ok_or_else(|| RepoError::Custom("HEAD has no parent".into()))?;
563 } else {
564 return Err(RepoError::Custom(
565 "HEAD ancestor not found".into(),
566 ));
567 }
568 }
569 }
570 current
571 }
572 Some(t) => {
573 if let Ok(bn) = BranchName::new(t) {
574 self.dag
575 .get_branch(&bn)
576 .ok_or_else(|| RepoError::BranchNotFound(t.to_string()))?
577 } else {
578 Hash::from_hex(t)
579 .map_err(|_| RepoError::Custom(format!("invalid target: {}", t)))?
580 }
581 }
582 None => {
583 let (_, head_id) = self.head()?;
584 head_id
585 }
586 };
587 self.set_config(&format!("tag.{name}"), &target_id.to_hex())
588 }
589
590 pub fn delete_tag(&mut self, name: &str) -> Result<(), RepoError> {
592 let key = format!("tag.{name}");
593 let exists: bool = self
594 .meta
595 .conn()
596 .query_row(
597 "SELECT COUNT(*) FROM config WHERE key = ?1",
598 rusqlite::params![key],
599 |row| row.get::<_, i64>(0),
600 )
601 .map(|count| count > 0)
602 .map_err(|e| RepoError::Custom(e.to_string()))?;
603 if !exists {
604 return Err(RepoError::Custom(format!("tag '{}' not found", name)));
605 }
606 self.meta
607 .conn()
608 .execute("DELETE FROM config WHERE key = ?1", rusqlite::params![key])
609 .map_err(|e| RepoError::Custom(e.to_string()))?;
610 Ok(())
611 }
612
613 pub fn list_tags(&self) -> Result<Vec<(String, PatchId)>, RepoError> {
615 let config = self.list_config()?;
616 let mut tags = Vec::new();
617 for (key, value) in config {
618 if let Some(name) = key.strip_prefix("tag.")
619 && let Ok(id) = Hash::from_hex(&value)
620 {
621 tags.push((name.to_string(), id));
622 }
623 }
624 tags.sort_by(|a, b| a.0.cmp(&b.0));
625 Ok(tags)
626 }
627
628 pub fn resolve_tag(&self, name: &str) -> Result<Option<PatchId>, RepoError> {
630 let val = self.get_config(&format!("tag.{name}"))?;
631 match val {
632 Some(hex) => Ok(Some(Hash::from_hex(&hex)?)),
633 None => Ok(None),
634 }
635 }
636
637 pub fn add_note(&self, patch_id: &PatchId, note: &str) -> Result<(), RepoError> {
643 let existing = self.list_notes(patch_id)?;
644 let next_idx = existing.len();
645 let key = format!("note.{}.{}", patch_id, next_idx);
646 self.meta.set_config(&key, note).map_err(RepoError::Meta)
647 }
648
649 pub fn list_notes(&self, patch_id: &PatchId) -> Result<Vec<String>, RepoError> {
651 let prefix = format!("note.{}.", patch_id);
652 let all_config = self.meta.list_config().map_err(RepoError::Meta)?;
653 let mut notes: Vec<(usize, String)> = Vec::new();
654 for (key, value) in &all_config {
655 if let Some(idx_str) = key.strip_prefix(&prefix)
656 && let Ok(idx) = idx_str.parse::<usize>()
657 {
658 notes.push((idx, value.clone()));
659 }
660 }
661 notes.sort_by_key(|(idx, _)| *idx);
662 Ok(notes.into_iter().map(|(_, v)| v).collect())
663 }
664
665 pub fn remove_note(&self, patch_id: &PatchId, index: usize) -> Result<(), RepoError> {
667 let notes = self.list_notes(patch_id)?;
668 if index >= notes.len() {
669 return Err(RepoError::Custom(format!(
670 "note index {} out of range ({} notes for commit)",
671 index,
672 notes.len()
673 )));
674 }
675 let key = format!("note.{}.{}", patch_id, index);
676 self.meta.delete_config(&key).map_err(RepoError::Meta)
677 }
678
679 pub fn patches_since(&self, since_id: &PatchId) -> Vec<Patch> {
687 let since_ancestors = self.dag.ancestors(since_id);
688 let mut known = since_ancestors;
690 known.insert(*since_id);
691
692 let mut new_ids: HashSet<PatchId> = HashSet::new();
694 let mut stack: Vec<PatchId> = self.dag.list_branches().iter().map(|(_, id)| *id).collect();
695
696 while let Some(id) = stack.pop() {
697 if !known.contains(&id)
698 && new_ids.insert(id)
699 && let Some(node) = self.dag.get_node(&id)
700 {
701 for parent in &node.patch.parent_ids {
702 if !known.contains(parent) && !new_ids.contains(parent) {
703 stack.push(*parent);
704 }
705 }
706 }
707 }
708
709 let patches: HashMap<PatchId, Patch> = new_ids
711 .into_iter()
712 .filter_map(|id| self.dag.get_patch(&id).map(|p| (id, p.clone())))
713 .collect();
714
715 let mut in_degree: HashMap<PatchId, usize> = HashMap::new();
717 let mut children: HashMap<PatchId, Vec<PatchId>> = HashMap::new();
718 for (&id, patch) in &patches {
719 in_degree.entry(id).or_insert(0);
720 for parent_id in &patch.parent_ids {
721 if patches.contains_key(parent_id) {
722 children.entry(*parent_id).or_default().push(id);
723 *in_degree.entry(id).or_insert(0) += 1;
724 }
725 }
726 }
727
728 let mut queue: VecDeque<PatchId> = in_degree
729 .iter()
730 .filter(|&(_, deg)| *deg == 0)
731 .map(|(&id, _)| id)
732 .collect();
733 let mut sorted_ids: Vec<PatchId> = Vec::with_capacity(patches.len());
734
735 while let Some(id) = queue.pop_front() {
736 sorted_ids.push(id);
737 if let Some(kids) = children.get(&id) {
738 for &child in kids {
739 let deg = in_degree
740 .get_mut(&child)
741 .expect("in-degree entry exists for child in topo sort");
742 *deg -= 1;
743 if *deg == 0 {
744 queue.push_back(child);
745 }
746 }
747 }
748 }
749
750 sorted_ids
751 .into_iter()
752 .filter_map(|id| patches.get(&id).cloned())
753 .collect()
754 }
755
756 pub fn status(&self) -> Result<RepoStatus, RepoError> {
762 let working_set = self.meta.working_set()?;
763 let branches = self.list_branches();
764 let head = self.head()?;
765
766 Ok(RepoStatus {
767 head_branch: Some(head.0),
768 head_patch: Some(head.1),
769 branch_count: branches.len(),
770 staged_files: working_set
771 .iter()
772 .filter(|(_, s)| {
773 matches!(
774 s,
775 FileStatus::Added | FileStatus::Modified | FileStatus::Deleted
776 )
777 })
778 .map(|(p, s)| (p.clone(), *s))
779 .collect(),
780 patch_count: self.dag.patch_count(),
781 })
782 }
783
784 pub fn add(&self, path: &str) -> Result<(), RepoError> {
786 let repo_path = RepoPath::new(path)?;
787 let full_path = self.root.join(path);
788
789 if !full_path.exists() {
790 if self.is_tracked(path)? {
791 self.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
792 return Ok(());
793 }
794 return Err(RepoError::Io(io::Error::new(
795 io::ErrorKind::NotFound,
796 format!("file not found: {}", path),
797 )));
798 }
799
800 let status = if self.is_tracked(path)? {
801 FileStatus::Modified
802 } else {
803 FileStatus::Added
804 };
805
806 self.meta.working_set_add(&repo_path, status)?;
807 Ok(())
808 }
809
810 pub fn add_all(&self) -> Result<usize, RepoError> {
812 let tree = self.snapshot_head()?;
813 let mut count = 0;
814
815 for entry in walk_dir(&self.root, &self.ignore_patterns)? {
816 let rel_path = entry.relative;
817 let full_path = self.root.join(&rel_path);
818
819 let is_tracked = tree.contains(&rel_path);
820
821 if is_tracked
823 && let Ok(data) = fs::read(&full_path)
824 && let Some(old_hash) = tree.get(&rel_path)
825 && Hash::from_data(&data) == *old_hash
826 {
827 continue; }
829
830 let status = if is_tracked {
831 FileStatus::Modified
832 } else {
833 FileStatus::Added
834 };
835
836 let repo_path = RepoPath::new(&rel_path)?;
837 self.meta.working_set_add(&repo_path, status)?;
838 count += 1;
839 }
840
841 Ok(count)
842 }
843
844 fn is_tracked(&self, path: &str) -> Result<bool, RepoError> {
849 if let Some(ref tree) = *self.cached_head_snapshot.borrow() {
851 return Ok(tree.contains(path));
852 }
853 if let Ok((_, head_id)) = self.head()
855 && let Ok(result) = self.meta.file_tree_contains(&head_id, path)
856 {
857 return Ok(result);
858 }
859 for id in self.dag.patch_ids() {
861 if let Some(node) = self.dag.get_node(&id)
862 && node.patch.target_path.as_deref() == Some(path)
863 {
864 return Ok(true);
865 }
866 }
867 Ok(false)
868 }
869
870 pub fn commit(&mut self, message: &str) -> Result<PatchId, RepoError> {
872 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
873 let working_set = self.meta.working_set()?;
874
875 let staged: Vec<_> = working_set
876 .iter()
877 .filter(|(_, s)| {
878 matches!(
879 s,
880 FileStatus::Added | FileStatus::Modified | FileStatus::Deleted
881 )
882 })
883 .collect();
884
885 if staged.is_empty() {
886 return Err(RepoError::NothingToCommit);
887 }
888
889 let (branch_name, head_id) = self.head()?;
890 let is_merge_resolution = !self.pending_merge_parents.is_empty();
891
892 let parent_ids = if self.pending_merge_parents.is_empty() {
893 vec![head_id]
894 } else {
895 std::mem::take(&mut self.pending_merge_parents)
896 };
897
898 let _ = self
900 .meta
901 .conn()
902 .execute("DELETE FROM config WHERE key = 'pending_merge_parents'", []);
903
904 let mut file_changes = Vec::new();
906 for (path, status) in &staged {
907 let full_path = self.root.join(path);
908
909 let (op_type, payload) = match status {
910 FileStatus::Added | FileStatus::Modified => {
911 let data = fs::read(&full_path)?;
912 let hash = self.cas.put_blob(&data)?;
913 let payload = hash.to_hex().as_bytes().to_vec();
914 (OperationType::Modify, payload)
915 }
916 FileStatus::Deleted => (OperationType::Delete, Vec::new()),
917 _ => continue,
918 };
919 file_changes.push(FileChange {
920 op: op_type,
921 path: path.clone(),
922 payload,
923 });
924 }
925
926 if file_changes.is_empty() {
927 return Err(RepoError::NothingToCommit);
928 }
929
930 let batch_patch = Patch::new_batch(
932 file_changes,
933 parent_ids.clone(),
934 self.author.clone(),
935 message.to_string(),
936 );
937
938 let patch_id = self.dag.add_patch(batch_patch.clone(), parent_ids)?;
939 self.meta.store_patch(&batch_patch)?;
940
941 for (path, _) in &staged {
943 let repo_path = RepoPath::new(path.clone())?;
944 self.meta.working_set_remove(&repo_path)?;
945 }
946
947 let branch = BranchName::new(&branch_name)?;
948 self.dag.update_branch(&branch, patch_id)?;
949 self.meta.set_branch(&branch, &patch_id)?;
950
951 if let Ok(tree) = self.snapshot_uncached(&patch_id) {
954 let tree_hash = tree.content_hash();
955 let _ = self.meta.set_config("head_tree_hash", &tree_hash.to_hex());
956 let _ = self.meta.store_file_tree(&patch_id, &tree);
957 }
958
959 self.invalidate_head_cache();
960
961 let _ = self.record_reflog(&old_head, &patch_id, &format!("commit: {}", message));
962
963 if is_merge_resolution {
965 }
968
969 Ok(patch_id)
970 }
971
972 pub fn has_uncommitted_changes(&self) -> Result<bool, RepoError> {
977 let working_set = self.meta.working_set()?;
978
979 let has_staged = working_set.iter().any(|(_, s)| {
980 matches!(
981 s,
982 FileStatus::Added | FileStatus::Modified | FileStatus::Deleted
983 )
984 });
985 if has_staged {
986 return Ok(true);
987 }
988
989 if let Ok(head_tree) = self.snapshot_head() {
990 for (path, hash) in head_tree.iter() {
991 let full_path = self.root.join(path);
992 if let Ok(data) = fs::read(&full_path) {
993 let current_hash = Hash::from_data(&data);
994 if ¤t_hash != hash {
995 return Ok(true);
996 }
997 } else {
998 return Ok(true);
999 }
1000 }
1001 }
1002
1003 Ok(false)
1004 }
1005
1006 pub fn stash_push(&mut self, message: Option<&str>) -> Result<usize, RepoError> {
1007 if !self.has_uncommitted_changes()? {
1008 return Err(RepoError::NothingToCommit);
1009 }
1010
1011 let working_set = self.meta.working_set()?;
1012 let mut files: Vec<(String, Option<String>)> = Vec::new();
1013
1014 for (path, status) in &working_set {
1015 match status {
1016 FileStatus::Added | FileStatus::Modified => {
1017 let full_path = self.root.join(path);
1018 if let Ok(data) = fs::read(&full_path) {
1019 let hash = self.cas.put_blob(&data)?;
1020 files.push((path.clone(), Some(hash.to_hex())));
1021 } else {
1022 files.push((path.clone(), None));
1023 }
1024 }
1025 FileStatus::Deleted => {
1026 files.push((path.clone(), None));
1027 }
1028 _ => {}
1029 }
1030 }
1031
1032 if let Ok(head_tree) = self.snapshot_head() {
1033 for (path, _hash) in head_tree.iter() {
1034 let full_path = self.root.join(path);
1035 if let Ok(data) = fs::read(&full_path) {
1036 let current_hash = Hash::from_data(&data);
1037 if ¤t_hash != _hash {
1038 let already = files.iter().any(|(p, _)| p == path);
1039 if !already {
1040 let hash = self.cas.put_blob(&data)?;
1041 files.push((path.clone(), Some(hash.to_hex())));
1042 }
1043 }
1044 }
1045 }
1046 }
1047
1048 let mut index: usize = 0;
1049 loop {
1050 let key = format!("stash.{}.message", index);
1051 if self.meta.get_config(&key)?.is_none() {
1052 break;
1053 }
1054 index += 1;
1055 }
1056
1057 let (branch_name, head_id) = self.head()?;
1058 let msg = message.unwrap_or("WIP").to_string();
1059 let files_json = serde_json::to_string(&files).unwrap_or_else(|_| "[]".to_string());
1060
1061 self.set_config(&format!("stash.{}.message", index), &msg)?;
1062 self.set_config(&format!("stash.{}.head_branch", index), &branch_name)?;
1063 self.set_config(&format!("stash.{}.head_id", index), &head_id.to_hex())?;
1064 self.set_config(&format!("stash.{}.files", index), &files_json)?;
1065
1066 self.meta
1067 .conn()
1068 .execute("DELETE FROM working_set", [])
1069 .map_err(|e| RepoError::Meta(crate::metadata::MetaError::Database(e)))?;
1070
1071 if let Ok(head_tree) = self.snapshot_head() {
1072 let current_tree = head_tree;
1073 for (path, _) in current_tree.iter() {
1074 let full_path = self.root.join(path);
1075 if full_path.exists() {
1076 let _ = fs::remove_file(&full_path);
1077 }
1078 }
1079 for (path, hash) in current_tree.iter() {
1080 let full_path = self.root.join(path);
1081 if let Some(parent) = full_path.parent() {
1082 let _ = fs::create_dir_all(parent);
1083 }
1084 if let Ok(blob) = self.cas.get_blob(hash) {
1085 let _ = fs::write(&full_path, &blob);
1086 }
1087 }
1088 }
1089
1090 Ok(index)
1091 }
1092
1093 pub fn stash_pop(&mut self) -> Result<(), RepoError> {
1094 let stashes = self.stash_list()?;
1095 if stashes.is_empty() {
1096 return Err(RepoError::Custom("No stashes found".to_string()));
1097 }
1098 let highest = stashes
1099 .iter()
1100 .map(|s| s.index)
1101 .max()
1102 .expect("stash list is non-empty (checked above)");
1103 self.stash_apply(highest)?;
1104 self.stash_drop(highest)?;
1105 Ok(())
1106 }
1107
1108 pub fn stash_apply(&mut self, index: usize) -> Result<(), RepoError> {
1109 let files_key = format!("stash.{}.files", index);
1110 let files_json = self
1111 .meta
1112 .get_config(&files_key)?
1113 .ok_or_else(|| RepoError::Custom(format!("stash@{{{}}} not found", index)))?;
1114
1115 let head_id_key = format!("stash.{}.head_id", index);
1116 let stash_head_id = self.meta.get_config(&head_id_key)?.unwrap_or_default();
1117
1118 if let Ok((_, current_head_id)) = self.head()
1119 && current_head_id.to_hex() != stash_head_id
1120 {
1121 tracing::warn!(
1122 "Warning: HEAD has moved since stash@{{{}}} was created",
1123 index
1124 );
1125 }
1126
1127 let files: Vec<(String, Option<String>)> =
1128 serde_json::from_str(&files_json).unwrap_or_default();
1129
1130 for (path, hash_opt) in &files {
1131 let full_path = self.root.join(path);
1132 match hash_opt {
1133 Some(hex_hash) => {
1134 let hash = Hash::from_hex(hex_hash)
1135 .map_err(|e| RepoError::Custom(format!("invalid hash in stash: {}", e)))?;
1136 let blob = self.cas.get_blob(&hash)?;
1137 if let Some(parent) = full_path.parent() {
1138 fs::create_dir_all(parent)?;
1139 }
1140 fs::write(&full_path, &blob)?;
1141 let repo_path = RepoPath::new(path.clone())?;
1142 self.meta
1143 .working_set_add(&repo_path, FileStatus::Modified)?;
1144 }
1145 None => {
1146 if full_path.exists() {
1147 fs::remove_file(&full_path)?;
1148 }
1149 let repo_path = RepoPath::new(path.clone())?;
1150 self.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
1151 }
1152 }
1153 }
1154
1155 Ok(())
1156 }
1157
1158 pub fn stash_list(&self) -> Result<Vec<StashEntry>, RepoError> {
1159 let all_config = self.list_config()?;
1160 let mut entries = Vec::new();
1161
1162 for (key, value) in &all_config {
1163 if let Some(rest) = key.strip_prefix("stash.")
1164 && let Some(idx_str) = rest.strip_suffix(".message")
1165 && let Ok(idx) = idx_str.parse::<usize>()
1166 {
1167 let branch_key = format!("stash.{}.head_branch", idx);
1168 let head_id_key = format!("stash.{}.head_id", idx);
1169 let branch = self.meta.get_config(&branch_key)?.unwrap_or_default();
1170 let head_id = self.meta.get_config(&head_id_key)?.unwrap_or_default();
1171 entries.push(StashEntry {
1172 index: idx,
1173 message: value.clone(),
1174 branch,
1175 head_id,
1176 });
1177 }
1178 }
1179
1180 entries.sort_by_key(|e| e.index);
1181 Ok(entries)
1182 }
1183
1184 pub fn stash_drop(&mut self, index: usize) -> Result<(), RepoError> {
1185 let prefix = format!("stash.{}.", index);
1186 let all_config = self.list_config()?;
1187 let keys_to_delete: Vec<String> = all_config
1188 .iter()
1189 .filter(|(k, _)| k.starts_with(&prefix))
1190 .map(|(k, _)| k.clone())
1191 .collect();
1192
1193 if keys_to_delete.is_empty() {
1194 return Err(RepoError::Custom(format!("stash@{{{}}} not found", index)));
1195 }
1196
1197 for key in &keys_to_delete {
1198 self.meta
1199 .conn()
1200 .execute("DELETE FROM config WHERE key = ?1", rusqlite::params![key])
1201 .map_err(|e| RepoError::Meta(crate::metadata::MetaError::Database(e)))?;
1202 }
1203
1204 Ok(())
1205 }
1206
1207 pub fn snapshot_head(&self) -> Result<FileTree, RepoError> {
1216 let (branch_name, head_id) = {
1220 let branch_name = self.read_head_branch()?;
1221 let bn = BranchName::new(&branch_name)?;
1222 let target_id = self
1223 .dag
1224 .get_branch(&bn)
1225 .ok_or_else(|| RepoError::BranchNotFound(branch_name.clone()))?;
1226 (branch_name, target_id)
1227 };
1228
1229 *self.cached_head_branch.borrow_mut() = Some(branch_name.clone());
1231 *self.cached_head_id.borrow_mut() = Some(head_id);
1232
1233 if let Some(ref tree) = *self.cached_head_snapshot.borrow() {
1234 return Ok(tree.clone());
1235 }
1236
1237 if let Some(tree) = self
1239 .meta
1240 .load_file_tree(&head_id)
1241 .map_err(RepoError::Meta)?
1242 {
1243 let tree_hash = tree.content_hash();
1245 let stored_hash = self
1246 .meta
1247 .get_config("head_tree_hash")
1248 .ok()
1249 .flatten()
1250 .and_then(|h| Hash::from_hex(&h).ok());
1251
1252 if stored_hash.is_none_or(|h| h == tree_hash) {
1253 if stored_hash.is_none() {
1255 let _ = self.meta.set_config("head_tree_hash", &tree_hash.to_hex());
1256 }
1257
1258 *self.cached_head_snapshot.borrow_mut() = Some(tree.clone());
1259 return Ok(tree);
1260 }
1261 }
1263
1264 let tree = self.snapshot_uncached(&head_id)?;
1266 let tree_hash = tree.content_hash();
1267
1268 let _ = self.meta.set_config("head_tree_hash", &tree_hash.to_hex());
1269
1270 let _ = self.meta.store_file_tree(&head_id, &tree);
1272
1273 *self.cached_head_snapshot.borrow_mut() = Some(tree.clone());
1274 Ok(tree)
1275 }
1276
1277 pub fn invalidate_head_cache(&self) {
1282 *self.cached_head_snapshot.borrow_mut() = None;
1283 *self.cached_head_id.borrow_mut() = None;
1284 *self.cached_head_branch.borrow_mut() = None;
1285 let _ = self
1286 .meta
1287 .conn()
1288 .execute("DELETE FROM config WHERE key = 'head_tree_hash'", []);
1289 }
1290
1291 fn snapshot_uncached(&self, patch_id: &PatchId) -> Result<FileTree, RepoError> {
1293 let mut chain = self.dag.patch_chain(patch_id);
1294 chain.reverse();
1296 let patches: Vec<Patch> = chain
1297 .iter()
1298 .filter_map(|id| self.dag.get_patch(id).cloned())
1299 .collect();
1300
1301 let tree = apply_patch_chain(&patches, resolve_payload_to_hash)?;
1302 Ok(tree)
1303 }
1304
1305 pub fn snapshot(&self, patch_id: &PatchId) -> Result<FileTree, RepoError> {
1309 if let Some(tree) = self
1311 .meta
1312 .load_file_tree(patch_id)
1313 .map_err(RepoError::Meta)?
1314 {
1315 return Ok(tree);
1316 }
1317 let tree = self.snapshot_uncached(patch_id)?;
1319 let _ = self.meta.store_file_tree(patch_id, &tree);
1320 Ok(tree)
1321 }
1322
1323 pub fn sync_working_tree(&self, old_tree: &FileTree) -> Result<(), RepoError> {
1334 use rayon::prelude::*;
1335
1336 let new_tree = self.snapshot_head()?;
1337 let diffs = diff_trees(old_tree, &new_tree);
1338
1339 let cas = &self.cas;
1341 let root = &self.root;
1342
1343 let blob_results: Result<Vec<(String, Vec<u8>)>, CasError> = diffs
1345 .par_iter()
1346 .filter_map(|entry| {
1347 if let (DiffType::Added | DiffType::Modified, Some(new_hash)) =
1348 (&entry.diff_type, &entry.new_hash)
1349 {
1350 Some((entry.path.clone(), *new_hash))
1351 } else {
1352 None
1353 }
1354 })
1355 .map(|(path, hash)| {
1356 let blob = cas.get_blob(&hash)?;
1357 Ok((path, blob))
1358 })
1359 .collect();
1360
1361 let blobs: Vec<(String, Vec<u8>)> = blob_results?;
1362
1363 for (path, _) in &blobs {
1365 let full_path = root.join(path);
1366 if let Some(parent) = full_path.parent() {
1367 fs::create_dir_all(parent)?;
1368 }
1369 }
1370
1371 blobs
1373 .par_iter()
1374 .map(|(path, data)| {
1375 let full_path = root.join(path);
1376 fs::write(&full_path, data).map_err(RepoError::Io)
1377 })
1378 .collect::<Result<Vec<()>, RepoError>>()?;
1379
1380 for entry in &diffs {
1382 let full_path = root.join(&entry.path);
1383 match &entry.diff_type {
1384 DiffType::Deleted => {
1385 if full_path.exists() {
1386 fs::remove_file(&full_path)?;
1387 }
1388 }
1389 DiffType::Renamed { old_path, .. } => {
1390 let old_full = root.join(old_path);
1391 if old_full.exists() {
1392 if let Some(parent) = full_path.parent() {
1393 fs::create_dir_all(parent)?;
1394 }
1395 fs::rename(&old_full, &full_path)?;
1396 }
1397 }
1398 DiffType::Added | DiffType::Modified => {
1399 }
1401 }
1402 }
1403
1404 for (path, _) in old_tree.iter() {
1406 if !new_tree.contains(path) {
1407 let full_path = root.join(path);
1408 if full_path.exists() {
1409 let _ = fs::remove_file(&full_path);
1410 }
1411 }
1412 }
1413
1414 Ok(())
1415 }
1416
1417 pub fn checkout(&mut self, branch_name: &str) -> Result<FileTree, RepoError> {
1427 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
1428 let old_branch = self.head().ok().map(|(n, _)| n);
1429 let target = BranchName::new(branch_name)?;
1430
1431 let target_id = self
1432 .dag
1433 .get_branch(&target)
1434 .ok_or_else(|| RepoError::BranchNotFound(branch_name.to_string()))?;
1435
1436 let has_changes = self.has_uncommitted_changes()?;
1437 if has_changes {
1438 self.stash_push(Some("auto-stash before checkout"))?;
1439 }
1440
1441 let target_tree = self.snapshot(&target_id)?;
1442
1443 let current_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
1444
1445 let diffs = diff_trees(¤t_tree, &target_tree);
1446
1447 for entry in &diffs {
1448 let full_path = self.root.join(&entry.path);
1449 match &entry.diff_type {
1450 DiffType::Added | DiffType::Modified => {
1451 if let Some(new_hash) = &entry.new_hash {
1452 let blob = self.cas.get_blob(new_hash)?;
1453 if let Some(parent) = full_path.parent() {
1454 fs::create_dir_all(parent)?;
1455 }
1456 fs::write(&full_path, &blob)?;
1457 }
1458 }
1459 DiffType::Deleted => {
1460 if full_path.exists() {
1461 fs::remove_file(&full_path)?;
1462 }
1463 }
1464 DiffType::Renamed { old_path, .. } => {
1465 let old_full = self.root.join(old_path);
1466 if old_full.exists() {
1467 if let Some(parent) = full_path.parent() {
1468 fs::create_dir_all(parent)?;
1469 }
1470 fs::rename(&old_full, &full_path)?;
1471 }
1472 }
1473 }
1474 }
1475
1476 for (path, _) in current_tree.iter() {
1477 if !target_tree.contains(path) {
1478 let full_path = self.root.join(path);
1479 if full_path.exists() {
1480 let _ = fs::remove_file(&full_path);
1481 }
1482 }
1483 }
1484
1485 self.write_head_branch(branch_name)?;
1486
1487 self.invalidate_head_cache();
1488
1489 let _ = self.record_reflog(
1490 &old_head,
1491 &target_id,
1492 &format!(
1493 "checkout: moving from {} to {}",
1494 old_branch.as_deref().unwrap_or("HEAD"),
1495 branch_name
1496 ),
1497 );
1498
1499 if has_changes && let Err(e) = self.stash_pop() {
1500 tracing::warn!("Warning: could not restore stashed changes: {}", e);
1501 }
1502
1503 Ok(target_tree)
1504 }
1505
1506 pub fn diff(&self, from: Option<&str>, to: Option<&str>) -> Result<Vec<DiffEntry>, RepoError> {
1514 let resolve_id = |name: &str| -> Result<PatchId, RepoError> {
1515 if name == "HEAD" || name.starts_with("HEAD~") {
1516 let (_, head_id) = self.head()?;
1517 let mut target_id = head_id;
1518 if let Some(n_str) = name.strip_prefix("HEAD~") {
1519 let n: usize = n_str
1520 .parse()
1521 .map_err(|_| RepoError::Custom(format!("invalid HEAD~N: {}", name)))?;
1522 for _ in 0..n {
1523 let patch = self.dag.get_patch(&target_id).ok_or_else(|| {
1524 RepoError::Custom("HEAD ancestor not found".to_string())
1525 })?;
1526 target_id = patch
1527 .parent_ids
1528 .first()
1529 .ok_or_else(|| RepoError::Custom("HEAD has no parent".to_string()))?
1530 .to_owned();
1531 }
1532 }
1533 return Ok(target_id);
1534 }
1535 if let Ok(hash) = Hash::from_hex(name)
1539 && self.dag.has_patch(&hash)
1540 {
1541 return Ok(hash);
1542 }
1543 let all_patch_ids = self.dag.patch_ids();
1545 let prefix_matches: Vec<&PatchId> = all_patch_ids
1546 .iter()
1547 .filter(|id| id.to_hex().starts_with(name))
1548 .collect();
1549 match prefix_matches.len() {
1550 1 => return Ok(*prefix_matches[0]),
1551 0 => {}
1552 n => {
1553 return Err(RepoError::Custom(format!(
1554 "ambiguous ref '{}' matches {} commits",
1555 name, n
1556 )));
1557 }
1558 }
1559 if let Ok(Some(tag_id)) = self.resolve_tag(name) {
1561 return Ok(tag_id);
1562 }
1563 let bn = BranchName::new(name)?;
1565 self.dag
1566 .get_branch(&bn)
1567 .ok_or_else(|| RepoError::BranchNotFound(name.to_string()))
1568 };
1569
1570 if from.is_none() && to.is_none() {
1573 let head_tree = self.snapshot_head()?;
1574 let working_tree = self.build_working_tree()?;
1575 return Ok(diff_trees(&head_tree, &working_tree));
1576 }
1577
1578 let old_tree = match from {
1579 Some(f) => self.snapshot(&resolve_id(f)?)?,
1580 None => FileTree::empty(),
1581 };
1582
1583 let new_tree = match to {
1584 Some(t) => self.snapshot(&resolve_id(t)?)?,
1585 None => self.snapshot_head()?,
1586 };
1587
1588 Ok(diff_trees(&old_tree, &new_tree))
1589 }
1590
1591 fn build_working_tree(&self) -> Result<FileTree, RepoError> {
1593 let mut tree = FileTree::empty();
1594 let entries = walk_dir(&self.root, &self.ignore_patterns)?;
1595 for entry in &entries {
1596 if let Ok(data) = fs::read(&entry.full_path) {
1597 let hash = Hash::from_data(&data);
1598 tree.insert(entry.relative.clone(), hash);
1599 }
1600 }
1601 Ok(tree)
1602 }
1603
1604 pub fn diff_staged(&self) -> Result<Vec<DiffEntry>, RepoError> {
1606 let head_tree = self.snapshot_head()?;
1607 let mut staged_tree = head_tree.clone();
1610 let working_set = self.meta.working_set()?;
1611 for (path, status) in &working_set {
1612 match status {
1613 FileStatus::Added | FileStatus::Modified => {
1614 let full_path = self.root.join(path);
1615 if let Ok(data) = fs::read(&full_path) {
1616 let hash = Hash::from_data(&data);
1617 staged_tree.insert(path.clone(), hash);
1618 }
1619 }
1620 FileStatus::Deleted => {
1621 staged_tree.remove(path);
1623 }
1624 _ => {}
1625 }
1626 }
1627 Ok(diff_trees(&head_tree, &staged_tree))
1628 }
1629
1630 pub fn reset(&mut self, target: &str, mode: ResetMode) -> Result<PatchId, RepoError> {
1642 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
1643 let target_id = if target == "HEAD" {
1644 let (_, id) = self.head()?;
1645 id
1646 } else if let Some(rest) = target.strip_prefix("HEAD~") {
1647 let n: usize = rest
1648 .parse()
1649 .map_err(|_| RepoError::Custom(format!("invalid HEAD~N: {}", target)))?;
1650 let (_, head_id) = self.head()?;
1651 let mut current = head_id;
1652 for _ in 0..n {
1653 let patch = self
1654 .dag
1655 .get_patch(¤t)
1656 .ok_or_else(|| RepoError::Custom("HEAD ancestor not found".to_string()))?;
1657 current = patch
1658 .parent_ids
1659 .first()
1660 .ok_or_else(|| RepoError::Custom("HEAD has no parent".to_string()))?
1661 .to_owned();
1662 }
1663 current
1664 } else if let Ok(hash) = Hash::from_hex(target)
1665 && self.dag.has_patch(&hash)
1666 {
1667 hash
1668 } else {
1669 let bn = BranchName::new(target)?;
1670 self.dag
1671 .get_branch(&bn)
1672 .ok_or_else(|| RepoError::BranchNotFound(target.to_string()))?
1673 };
1674
1675 let (branch_name, _) = self.head()?;
1676 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
1677
1678 let branch = BranchName::new(&branch_name)?;
1679 self.dag.update_branch(&branch, target_id)?;
1680 self.meta.set_branch(&branch, &target_id)?;
1681 self.invalidate_head_cache();
1682
1683 match mode {
1684 ResetMode::Soft => {
1685 let new_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
1686 let diffs = diff_trees(&new_tree, &old_tree);
1687 for entry in &diffs {
1688 match &entry.diff_type {
1689 DiffType::Added | DiffType::Modified => {
1690 let repo_path = RepoPath::new(entry.path.clone())?;
1691 self.meta
1692 .working_set_add(&repo_path, FileStatus::Modified)?;
1693 }
1694 DiffType::Deleted => {
1695 let repo_path = RepoPath::new(entry.path.clone())?;
1696 self.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
1697 }
1698 DiffType::Renamed { old_path, .. } => {
1699 let repo_path = RepoPath::new(old_path.clone())?;
1700 self.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
1701 let repo_path = RepoPath::new(entry.path.clone())?;
1702 self.meta.working_set_add(&repo_path, FileStatus::Added)?;
1703 }
1704 }
1705 }
1706 }
1707 ResetMode::Mixed | ResetMode::Hard => {
1708 self.meta
1709 .conn()
1710 .execute("DELETE FROM working_set", [])
1711 .map_err(|e| RepoError::Meta(crate::metadata::MetaError::Database(e)))?;
1712 if mode == ResetMode::Hard {
1713 self.sync_working_tree(&old_tree)?;
1714 }
1715 }
1716 }
1717
1718 let _ = self.record_reflog(
1719 &old_head,
1720 &target_id,
1721 &format!("reset: moving to {}", target),
1722 );
1723
1724 Ok(target_id)
1725 }
1726
1727 pub fn revert(
1736 &mut self,
1737 patch_id: &PatchId,
1738 message: Option<&str>,
1739 ) -> Result<PatchId, RepoError> {
1740 let patch = self
1741 .dag
1742 .get_patch(patch_id)
1743 .ok_or_else(|| RepoError::Custom(format!("patch not found: {}", patch_id)))?;
1744
1745 let (branch_name, head_id) = self.head()?;
1746 let msg = message
1747 .map(|m| m.to_string())
1748 .unwrap_or_else(|| format!("Revert {}", patch_id));
1749
1750 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
1751
1752 match &patch.operation_type {
1753 OperationType::Batch => {
1754 let changes = patch.file_changes().ok_or_else(|| {
1755 RepoError::Custom("batch patch has invalid file changes".into())
1756 })?;
1757 if changes.is_empty() {
1758 return Err(RepoError::Custom("cannot revert empty batch".into()));
1759 }
1760 let parent_tree = patch
1761 .parent_ids
1762 .first()
1763 .map(|pid| self.snapshot(pid).unwrap_or_else(|_| FileTree::empty()))
1764 .unwrap_or_else(FileTree::empty);
1765 let mut revert_changes = Vec::new();
1766 for change in &changes {
1767 match change.op {
1768 OperationType::Create | OperationType::Modify => {
1769 revert_changes.push(FileChange {
1770 op: OperationType::Delete,
1771 path: change.path.clone(),
1772 payload: Vec::new(),
1773 });
1774 }
1775 OperationType::Delete => {
1776 if let Some(hash) = parent_tree.get(&change.path) {
1777 revert_changes.push(FileChange {
1778 op: OperationType::Modify,
1779 path: change.path.clone(),
1780 payload: hash.to_hex().as_bytes().to_vec(),
1781 });
1782 }
1783 }
1784 _ => {}
1785 }
1786 }
1787 if revert_changes.is_empty() {
1788 return Err(RepoError::Custom("nothing to revert in batch".into()));
1789 }
1790 let revert_patch =
1791 Patch::new_batch(revert_changes, vec![head_id], self.author.clone(), msg);
1792 let revert_id = self.dag.add_patch(revert_patch.clone(), vec![head_id])?;
1793 self.meta.store_patch(&revert_patch)?;
1794
1795 let branch = BranchName::new(&branch_name)?;
1796 self.dag.update_branch(&branch, revert_id)?;
1797 self.meta.set_branch(&branch, &revert_id)?;
1798
1799 self.invalidate_head_cache();
1800
1801 self.sync_working_tree(&old_tree)?;
1802 Ok(revert_id)
1803 }
1804 OperationType::Create | OperationType::Modify => {
1805 let revert_patch = Patch::new(
1806 OperationType::Delete,
1807 patch.touch_set.clone(),
1808 patch.target_path.clone(),
1809 vec![],
1810 vec![head_id],
1811 self.author.clone(),
1812 msg,
1813 );
1814
1815 let revert_id = self.dag.add_patch(revert_patch.clone(), vec![head_id])?;
1816 self.meta.store_patch(&revert_patch)?;
1817
1818 let branch = BranchName::new(&branch_name)?;
1819 self.dag.update_branch(&branch, revert_id)?;
1820 self.meta.set_branch(&branch, &revert_id)?;
1821
1822 self.invalidate_head_cache();
1823
1824 self.sync_working_tree(&old_tree)?;
1825 Ok(revert_id)
1826 }
1827 OperationType::Delete => {
1828 if let Some(parent_id) = patch.parent_ids.first() {
1829 let parent_tree = self.snapshot(parent_id)?;
1830 if let Some(path) = &patch.target_path
1831 && let Some(hash) = parent_tree.get(path)
1832 {
1833 let payload = hash.to_hex().as_bytes().to_vec();
1834 let revert_patch = Patch::new(
1835 OperationType::Modify,
1836 patch.touch_set.clone(),
1837 patch.target_path.clone(),
1838 payload,
1839 vec![head_id],
1840 self.author.clone(),
1841 msg,
1842 );
1843
1844 let revert_id = self.dag.add_patch(revert_patch.clone(), vec![head_id])?;
1845 self.meta.store_patch(&revert_patch)?;
1846
1847 let branch = BranchName::new(&branch_name)?;
1848 self.dag.update_branch(&branch, revert_id)?;
1849 self.meta.set_branch(&branch, &revert_id)?;
1850
1851 self.invalidate_head_cache();
1852
1853 self.sync_working_tree(&old_tree)?;
1854 return Ok(revert_id);
1855 }
1856 }
1857 Err(RepoError::Custom(
1858 "cannot revert delete: original file content not found".into(),
1859 ))
1860 }
1861 _ => Err(RepoError::Custom(format!(
1862 "cannot revert {:?} patches",
1863 patch.operation_type
1864 ))),
1865 }
1866 }
1867
1868 pub fn squash(&mut self, count: usize, message: &str) -> Result<PatchId, RepoError> {
1876 if count < 2 {
1877 return Err(RepoError::Custom(
1878 "need at least 2 patches to squash".into(),
1879 ));
1880 }
1881
1882 let (branch_name, tip_id) = self.head()?;
1883 let chain = self.dag().patch_chain(&tip_id);
1884
1885 if chain.len() < count + 1 {
1887 return Err(RepoError::Custom(format!(
1888 "only {} patches on branch, cannot squash {}",
1889 chain.len(),
1890 count
1891 )));
1892 }
1893
1894 let mut to_squash = Vec::new();
1896 for i in (0..count).rev() {
1897 let pid = &chain[i];
1898 let patch = self
1899 .dag()
1900 .get_patch(pid)
1901 .ok_or_else(|| RepoError::Custom(format!("patch not found: {}", pid.to_hex())))?;
1902 to_squash.push(patch.clone());
1903 }
1904
1905 let parent_of_first = *to_squash[0]
1906 .parent_ids
1907 .first()
1908 .ok_or_else(|| RepoError::Custom("cannot squash root patch".into()))?;
1909
1910 let result = crate::patch::compose::compose_chain(&to_squash, &self.author, message)
1911 .map_err(|e| RepoError::Custom(e.to_string()))?;
1912
1913 let new_id = self
1914 .dag_mut()
1915 .add_patch(result.patch.clone(), vec![parent_of_first])?;
1916 self.meta().store_patch(&result.patch)?;
1917
1918 let branch = BranchName::new(&branch_name).map_err(|e| RepoError::Custom(e.to_string()))?;
1919 self.dag_mut().update_branch(&branch, new_id)?;
1920 self.meta().set_branch(&branch, &new_id)?;
1921
1922 self.record_reflog(
1923 to_squash.last().map(|p| &p.id).unwrap_or(&parent_of_first),
1924 &new_id,
1925 &format!("squash: {} patches into one", count),
1926 )?;
1927
1928 self.invalidate_head_cache();
1929
1930 Ok(new_id)
1931 }
1932
1933 pub fn merge_plan(&self, branch_a: &str, branch_b: &str) -> Result<MergeResult, RepoError> {
1939 let ba = BranchName::new(branch_a)?;
1940 let bb = BranchName::new(branch_b)?;
1941 self.dag.merge_branches(&ba, &bb).map_err(RepoError::Dag)
1942 }
1943
1944 pub fn preview_merge(
1964 &self,
1965 source_branch: &str,
1966 ) -> Result<MergeExecutionResult, RepoError> {
1967 if !self.pending_merge_parents.is_empty() {
1968 return Err(RepoError::MergeInProgress);
1969 }
1970
1971 let (head_branch, head_id) = self.head()?;
1972 let source_bn = BranchName::new(source_branch)?;
1973 let source_tip = self
1974 .dag
1975 .get_branch(&source_bn)
1976 .ok_or_else(|| RepoError::BranchNotFound(source_branch.to_string()))?;
1977
1978 let head_bn = BranchName::new(&head_branch)?;
1979
1980 let merge_result = self.dag.merge_branches(&head_bn, &source_bn)?;
1981
1982 if head_id == source_tip {
1983 return Ok(MergeExecutionResult {
1984 is_clean: true,
1985 merged_tree: self.snapshot_head()?,
1986 merge_patch_id: None,
1987 unresolved_conflicts: Vec::new(),
1988 patches_applied: 0,
1989 });
1990 }
1991
1992 if merge_result.patches_b_only.is_empty() && merge_result.patches_a_only.is_empty() {
1993 return Ok(MergeExecutionResult {
1994 is_clean: true,
1995 merged_tree: self.snapshot_head()?,
1996 merge_patch_id: None,
1997 unresolved_conflicts: Vec::new(),
1998 patches_applied: 0,
1999 });
2000 }
2001
2002 let patches_applied = merge_result.patches_b_only.len();
2003 let is_clean = merge_result.is_clean;
2004
2005 if is_clean {
2006 let source_tree = self.snapshot(&source_tip).unwrap_or_else(|_| FileTree::empty());
2009 let lca_id = self
2010 .dag
2011 .lca(&head_id, &source_tip)
2012 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2013 let lca_tree = self.snapshot(&lca_id).unwrap_or_else(|_| FileTree::empty());
2014 let head_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2015
2016 let source_diffs = diff_trees(&lca_tree, &source_tree);
2017 let mut merged_tree = head_tree.clone();
2018 for entry in &source_diffs {
2019 match &entry.diff_type {
2020 DiffType::Added | DiffType::Modified => {
2021 if let Some(new_hash) = &entry.new_hash {
2022 merged_tree.insert(entry.path.clone(), *new_hash);
2023 }
2024 }
2025 DiffType::Deleted => {
2026 merged_tree.remove(&entry.path);
2027 }
2028 DiffType::Renamed { old_path, .. } => {
2029 if let Some(old_hash) = entry.old_hash {
2030 merged_tree.remove(old_path);
2031 merged_tree.insert(entry.path.clone(), old_hash);
2032 }
2033 }
2034 }
2035 }
2036
2037 Ok(MergeExecutionResult {
2038 is_clean: true,
2039 merged_tree,
2040 merge_patch_id: None, unresolved_conflicts: Vec::new(),
2042 patches_applied,
2043 })
2044 } else {
2045 let head_tree = self.snapshot(&head_id).unwrap_or_else(|_| FileTree::empty());
2047 let source_tree = self.snapshot(&source_tip).unwrap_or_else(|_| FileTree::empty());
2048 let lca_id = self
2049 .dag
2050 .lca(&head_id, &source_tip)
2051 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2052 let lca_tree = self.snapshot(&lca_id).unwrap_or_else(|_| FileTree::empty());
2053
2054 let mut all_paths = std::collections::HashSet::new();
2056 for path in head_tree.paths() {
2057 all_paths.insert(path);
2058 }
2059 for path in source_tree.paths() {
2060 all_paths.insert(path);
2061 }
2062 for path in lca_tree.paths() {
2063 all_paths.insert(path);
2064 }
2065
2066 let mut unresolved_conflicts: Vec<ConflictInfo> = Vec::new();
2067 for path in &all_paths {
2068 let lca_hash = lca_tree.get(path).copied();
2069 let ours_hash = head_tree.get(path).copied();
2070 let theirs_hash = source_tree.get(path).copied();
2071
2072 if ours_hash == theirs_hash {
2074 continue;
2075 }
2076 if ours_hash == lca_hash || theirs_hash == lca_hash {
2078 continue;
2079 }
2080 unresolved_conflicts.push(ConflictInfo {
2082 path: path.to_string(),
2083 our_patch_id: head_id,
2084 their_patch_id: source_tip,
2085 our_content_hash: ours_hash,
2086 their_content_hash: theirs_hash,
2087 base_content_hash: lca_hash,
2088 });
2089 }
2090
2091 Ok(MergeExecutionResult {
2092 is_clean: false,
2093 merged_tree: self.snapshot_head()?,
2094 merge_patch_id: None,
2095 unresolved_conflicts,
2096 patches_applied,
2097 })
2098 }
2099 }
2100
2101 pub fn execute_merge(
2103 &mut self,
2104 source_branch: &str,
2105 ) -> Result<MergeExecutionResult, RepoError> {
2106 if !self.pending_merge_parents.is_empty() {
2107 return Err(RepoError::MergeInProgress);
2108 }
2109
2110 let (head_branch, head_id) = self.head()?;
2111 let source_bn = BranchName::new(source_branch)?;
2112 let source_tip = self
2113 .dag
2114 .get_branch(&source_bn)
2115 .ok_or_else(|| RepoError::BranchNotFound(source_branch.to_string()))?;
2116
2117 let head_bn = BranchName::new(&head_branch)?;
2118
2119 let merge_result = self.dag.merge_branches(&head_bn, &source_bn)?;
2120
2121 if head_id == source_tip {
2122 return Ok(MergeExecutionResult {
2123 is_clean: true,
2124 merged_tree: self.snapshot_head()?,
2125 merge_patch_id: None,
2126 unresolved_conflicts: Vec::new(),
2127 patches_applied: 0,
2128 });
2129 }
2130
2131 if merge_result.patches_b_only.is_empty() && merge_result.patches_a_only.is_empty() {
2132 return Ok(MergeExecutionResult {
2133 is_clean: true,
2134 merged_tree: self.snapshot_head()?,
2135 merge_patch_id: None,
2136 unresolved_conflicts: Vec::new(),
2137 patches_applied: 0,
2138 });
2139 }
2140
2141 if merge_result.is_clean {
2142 self.execute_clean_merge(&head_id, &source_tip, &head_branch, &merge_result)
2143 } else {
2144 self.execute_conflicting_merge(
2145 &head_id,
2146 &source_tip,
2147 source_branch,
2148 &head_branch,
2149 &merge_result,
2150 )
2151 }
2152 }
2153
2154 fn execute_clean_merge(
2155 &mut self,
2156 head_id: &PatchId,
2157 source_tip: &PatchId,
2158 head_branch: &str,
2159 merge_result: &MergeResult,
2160 ) -> Result<MergeExecutionResult, RepoError> {
2161 let head_tree = self.snapshot(head_id)?;
2162 let source_tree = self.snapshot(source_tip)?;
2163 let lca_id = self
2164 .dag
2165 .lca(head_id, source_tip)
2166 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2167 let lca_tree = self.snapshot(&lca_id).unwrap_or_else(|_| FileTree::empty());
2168
2169 let source_diffs = diff_trees(&lca_tree, &source_tree);
2170 let mut merged_tree = head_tree.clone();
2171
2172 for entry in &source_diffs {
2173 let full_path = self.root.join(&entry.path);
2174 match &entry.diff_type {
2175 DiffType::Added | DiffType::Modified => {
2176 if let Some(new_hash) = &entry.new_hash {
2177 let blob = self.cas.get_blob(new_hash)?;
2178 if let Some(parent) = full_path.parent() {
2179 fs::create_dir_all(parent)?;
2180 }
2181 fs::write(&full_path, &blob)?;
2182 merged_tree.insert(entry.path.clone(), *new_hash);
2183 }
2184 }
2185 DiffType::Deleted => {
2186 if full_path.exists() {
2187 fs::remove_file(&full_path)?;
2188 }
2189 merged_tree.remove(&entry.path);
2190 }
2191 DiffType::Renamed { old_path, .. } => {
2192 let old_full = self.root.join(old_path);
2193 if old_full.exists() {
2194 if let Some(parent) = full_path.parent() {
2195 fs::create_dir_all(parent)?;
2196 }
2197 fs::rename(&old_full, &full_path)?;
2198 }
2199 if let Some(old_hash) = entry.old_hash {
2200 merged_tree.remove(old_path);
2201 merged_tree.insert(entry.path.clone(), old_hash);
2202 }
2203 }
2204 }
2205 }
2206
2207 let merge_patch = Patch::new(
2208 OperationType::Merge,
2209 TouchSet::empty(),
2210 None,
2211 vec![],
2212 vec![*head_id, *source_tip],
2213 self.author.clone(),
2214 format!("Merge branch '{}' into {}", source_tip, head_branch),
2215 );
2216
2217 let merge_id = self
2218 .dag
2219 .add_patch(merge_patch.clone(), vec![*head_id, *source_tip])?;
2220 self.meta.store_patch(&merge_patch)?;
2221
2222 let branch = BranchName::new(head_branch)?;
2223 self.dag.update_branch(&branch, merge_id)?;
2224 self.meta.set_branch(&branch, &merge_id)?;
2225
2226 self.invalidate_head_cache();
2227
2228 Ok(MergeExecutionResult {
2229 is_clean: true,
2230 merged_tree,
2231 merge_patch_id: Some(merge_id),
2232 unresolved_conflicts: Vec::new(),
2233 patches_applied: merge_result.patches_b_only.len(),
2234 })
2235 }
2236
2237 fn execute_conflicting_merge(
2238 &mut self,
2239 head_id: &PatchId,
2240 source_tip: &PatchId,
2241 source_branch: &str,
2242 head_branch: &str,
2243 merge_result: &MergeResult,
2244 ) -> Result<MergeExecutionResult, RepoError> {
2245 let head_tree = self.snapshot(head_id)?;
2246 let source_tree = self.snapshot(source_tip)?;
2247
2248 let lca_id = self
2249 .dag
2250 .lca(head_id, source_tip)
2251 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2252 let lca_tree = self.snapshot(&lca_id).unwrap_or_else(|_| FileTree::empty());
2253
2254 let conflicting_patch_ids: HashSet<PatchId> = merge_result
2255 .conflicts
2256 .iter()
2257 .flat_map(|c| [c.patch_a_id, c.patch_b_id])
2258 .collect();
2259
2260 let mut merged_tree = head_tree.clone();
2261 let mut patches_applied = 0;
2262
2263 for entry in &merge_result.patches_b_only {
2264 if conflicting_patch_ids.contains(entry) {
2265 continue;
2266 }
2267 if let Some(patch) = self.dag.get_patch(entry) {
2268 if patch.is_identity() || patch.operation_type == OperationType::Merge {
2269 continue;
2270 }
2271 if let Some(path) = &patch.target_path {
2272 let full_path = self.root.join(path);
2273 match patch.operation_type {
2274 OperationType::Create | OperationType::Modify => {
2275 if let Some(blob_hash) = resolve_payload_to_hash(patch)
2276 && self.cas.has_blob(&blob_hash)
2277 {
2278 let blob = self.cas.get_blob(&blob_hash)?;
2279 if let Some(parent) = full_path.parent() {
2280 fs::create_dir_all(parent)?;
2281 }
2282 fs::write(&full_path, &blob)?;
2283 merged_tree.insert(path.clone(), blob_hash);
2284 }
2285 }
2286 OperationType::Delete => {
2287 if full_path.exists() {
2288 fs::remove_file(&full_path)?;
2289 }
2290 merged_tree.remove(path);
2291 }
2292 _ => {}
2293 }
2294 }
2295 patches_applied += 1;
2296 }
2297 }
2298
2299 let mut unresolved_conflicts = Vec::new();
2300
2301 for conflict in &merge_result.conflicts {
2302 let conflict_info =
2303 self.build_conflict_info(conflict, &head_tree, &source_tree, &lca_tree);
2304 if let Some(info) = conflict_info {
2305 let full_path = self.root.join(&info.path);
2306 if let Some(parent) = full_path.parent() {
2307 fs::create_dir_all(parent)?;
2308 }
2309 let conflict_content =
2310 self.write_conflict_markers(&info, source_branch, head_branch)?;
2311 fs::write(&full_path, conflict_content.as_bytes())?;
2312 let hash = self.cas.put_blob(conflict_content.as_bytes())?;
2313 merged_tree.insert(info.path.clone(), hash);
2314 unresolved_conflicts.push(info);
2315 }
2316 }
2317
2318 self.pending_merge_parents = vec![*head_id, *source_tip];
2319
2320 let parents_json = serde_json::to_string(&self.pending_merge_parents).unwrap_or_default();
2322 let _ = self.meta.set_config("pending_merge_parents", &parents_json);
2323
2324 Ok(MergeExecutionResult {
2325 is_clean: false,
2326 merged_tree,
2327 merge_patch_id: None,
2328 unresolved_conflicts,
2329 patches_applied,
2330 })
2331 }
2332
2333 fn build_conflict_info(
2334 &self,
2335 conflict: &Conflict,
2336 head_tree: &FileTree,
2337 source_tree: &FileTree,
2338 lca_tree: &FileTree,
2339 ) -> Option<ConflictInfo> {
2340 let patch_a = self.dag.get_patch(&conflict.patch_a_id)?;
2341 let patch_b = self.dag.get_patch(&conflict.patch_b_id)?;
2342
2343 let path = patch_a
2344 .target_path
2345 .clone()
2346 .or_else(|| patch_b.target_path.clone())
2347 .or_else(|| {
2348 conflict.conflict_addresses.first().cloned()
2350 })?;
2351
2352 let our_content_hash = head_tree.get(&path).copied();
2353 let their_content_hash = source_tree.get(&path).copied();
2354 let base_content_hash = lca_tree.get(&path).copied();
2355
2356 Some(ConflictInfo {
2357 path,
2358 our_patch_id: conflict.patch_a_id,
2359 their_patch_id: conflict.patch_b_id,
2360 our_content_hash,
2361 their_content_hash,
2362 base_content_hash,
2363 })
2364 }
2365
2366 fn write_conflict_markers(
2367 &self,
2368 info: &ConflictInfo,
2369 source_branch: &str,
2370 head_branch: &str,
2371 ) -> Result<String, RepoError> {
2372 let our_content = match info.our_content_hash {
2373 Some(hash) => String::from_utf8(self.cas.get_blob(&hash)?).unwrap_or_default(),
2374 None => String::new(),
2375 };
2376
2377 let their_content = match info.their_content_hash {
2378 Some(hash) => String::from_utf8(self.cas.get_blob(&hash)?).unwrap_or_default(),
2379 None => String::new(),
2380 };
2381
2382 let base_content = match info.base_content_hash {
2383 Some(hash) => Some(String::from_utf8(self.cas.get_blob(&hash)?).unwrap_or_default()),
2384 None => None,
2385 };
2386
2387 let merged = three_way_merge(
2388 base_content.as_deref(),
2389 &our_content,
2390 &their_content,
2391 head_branch,
2392 source_branch,
2393 );
2394
2395 match merged {
2396 Ok(content) => Ok(content),
2397 Err(conflict_lines) => {
2398 let mut result = String::new();
2399 for line in conflict_lines {
2400 result.push_str(&line);
2401 result.push('\n');
2402 }
2403 Ok(result)
2404 }
2405 }
2406 }
2407
2408 pub fn cherry_pick(&mut self, patch_id: &PatchId) -> Result<PatchId, RepoError> {
2417 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
2418 let patch = self
2419 .dag
2420 .get_patch(patch_id)
2421 .ok_or_else(|| RepoError::Custom(format!("patch not found: {}", patch_id)))?;
2422
2423 if patch.operation_type == OperationType::Identity
2424 || patch.operation_type == OperationType::Merge
2425 || patch.operation_type == OperationType::Create
2426 {
2427 return Err(RepoError::Custom(format!(
2428 "cannot cherry-pick {:?} patches",
2429 patch.operation_type
2430 )));
2431 }
2432
2433 let (branch_name, head_id) = self.head()?;
2434
2435 let new_patch = if patch.operation_type == OperationType::Batch {
2436 let changes = patch
2437 .file_changes()
2438 .ok_or_else(|| RepoError::Custom("batch patch has invalid file changes".into()))?;
2439 Patch::new_batch(
2440 changes,
2441 vec![head_id],
2442 self.author.clone(),
2443 patch.message.clone(),
2444 )
2445 } else {
2446 Patch::new(
2447 patch.operation_type.clone(),
2448 patch.touch_set.clone(),
2449 patch.target_path.clone(),
2450 patch.payload.clone(),
2451 vec![head_id],
2452 self.author.clone(),
2453 patch.message.clone(),
2454 )
2455 };
2456
2457 let new_id = match self.dag.add_patch(new_patch.clone(), vec![head_id]) {
2458 Ok(id) => id,
2459 Err(DagError::DuplicatePatch(_)) => {
2460 let head_ancestors = self.dag.ancestors(&head_id);
2461 let new_patch_id = new_patch.id;
2462 if head_ancestors.contains(&new_patch_id) {
2463 return Ok(new_patch_id);
2464 }
2465 return Err(RepoError::Custom(
2466 "patch already exists in DAG and is not reachable from HEAD".to_string(),
2467 ));
2468 }
2469 Err(e) => return Err(RepoError::Dag(e)),
2470 };
2471 self.meta.store_patch(&new_patch)?;
2472
2473 let branch = BranchName::new(&branch_name)?;
2474 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2475 self.dag.update_branch(&branch, new_id)?;
2476 self.meta.set_branch(&branch, &new_id)?;
2477
2478 self.invalidate_head_cache();
2479
2480 let _ = self.record_reflog(&old_head, &new_id, &format!("cherry-pick: {}", patch_id));
2481
2482 self.sync_working_tree(&old_tree)?;
2483
2484 Ok(new_id)
2485 }
2486
2487 pub fn rebase(&mut self, target_branch: &str) -> Result<RebaseResult, RepoError> {
2497 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
2498 let (head_branch, head_id) = self.head()?;
2499 let target_bn = BranchName::new(target_branch)?;
2500 let target_tip = self
2501 .dag
2502 .get_branch(&target_bn)
2503 .ok_or_else(|| RepoError::BranchNotFound(target_branch.to_string()))?;
2504
2505 if head_id == target_tip {
2506 return Ok(RebaseResult {
2507 patches_replayed: 0,
2508 new_tip: head_id,
2509 });
2510 }
2511
2512 let lca_id = self
2513 .dag
2514 .lca(&head_id, &target_tip)
2515 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2516
2517 if lca_id == head_id {
2518 let branch = BranchName::new(&head_branch)?;
2519 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2520 self.dag.update_branch(&branch, target_tip)?;
2521 self.meta.set_branch(&branch, &target_tip)?;
2522 self.invalidate_head_cache();
2523
2524 self.sync_working_tree(&old_tree)?;
2525
2526 return Ok(RebaseResult {
2527 patches_replayed: 0,
2528 new_tip: target_tip,
2529 });
2530 }
2531
2532 let mut head_ancestors = self.dag.ancestors(&lca_id);
2533 head_ancestors.insert(lca_id);
2534
2535 let mut to_replay: Vec<Patch> = Vec::new();
2536 let mut visited = HashSet::new();
2537 let mut stack = vec![head_id];
2538
2539 while let Some(id) = stack.pop() {
2540 if visited.contains(&id) || head_ancestors.contains(&id) {
2541 continue;
2542 }
2543 visited.insert(id);
2544 if let Some(patch) = self.dag.get_patch(&id) {
2545 to_replay.push(patch.clone());
2546 for parent_id in &patch.parent_ids {
2547 if !visited.contains(parent_id) {
2548 stack.push(*parent_id);
2549 }
2550 }
2551 }
2552 }
2553
2554 to_replay.sort_by_key(|p| p.timestamp);
2555
2556 let branch = BranchName::new(&head_branch)?;
2557 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2558 self.dag.update_branch(&branch, target_tip)?;
2559 self.meta.set_branch(&branch, &target_tip)?;
2560 self.invalidate_head_cache();
2561
2562 let mut current_parent = target_tip;
2563 let mut last_new_id = target_tip;
2564 let mut replayed = 0usize;
2565
2566 for patch in &to_replay {
2567 if patch.operation_type == OperationType::Merge
2568 || patch.operation_type == OperationType::Identity
2569 || patch.operation_type == OperationType::Create
2570 {
2571 continue;
2572 }
2573
2574 let new_patch = if patch.operation_type == OperationType::Batch {
2575 let changes = patch.file_changes().unwrap_or_default();
2576 Patch::new_batch(
2577 changes,
2578 vec![current_parent],
2579 self.author.clone(),
2580 patch.message.clone(),
2581 )
2582 } else {
2583 Patch::new(
2584 patch.operation_type.clone(),
2585 patch.touch_set.clone(),
2586 patch.target_path.clone(),
2587 patch.payload.clone(),
2588 vec![current_parent],
2589 self.author.clone(),
2590 patch.message.clone(),
2591 )
2592 };
2593
2594 let new_id = self
2595 .dag
2596 .add_patch(new_patch.clone(), vec![current_parent])?;
2597 self.meta.store_patch(&new_patch)?;
2598
2599 last_new_id = new_id;
2600 current_parent = new_id;
2601 replayed += 1;
2602 }
2603
2604 self.dag.update_branch(&branch, last_new_id)?;
2605 self.meta.set_branch(&branch, &last_new_id)?;
2606 self.invalidate_head_cache();
2607
2608 self.sync_working_tree(&old_tree)?;
2609
2610 let _ = self.record_reflog(
2611 &old_head,
2612 &last_new_id,
2613 &format!("rebase onto {}", target_branch),
2614 );
2615
2616 Ok(RebaseResult {
2617 patches_replayed: replayed,
2618 new_tip: last_new_id,
2619 })
2620 }
2621
2622 pub fn commit_groups(&self, patches: &[Patch]) -> Vec<Vec<Patch>> {
2631 if patches.is_empty() {
2632 return Vec::new();
2633 }
2634
2635 let mut sorted: Vec<Patch> = patches.to_vec();
2637 sorted.sort_by_key(|p| p.timestamp);
2638
2639 let mut groups: Vec<Vec<Patch>> = Vec::new();
2640 let mut current_group: Vec<Patch> = Vec::new();
2641 let mut current_message: Option<String> = None;
2642
2643 for patch in &sorted {
2644 if patch.operation_type == OperationType::Merge
2646 || patch.operation_type == OperationType::Identity
2647 || patch.operation_type == OperationType::Create
2648 {
2649 continue;
2650 }
2651
2652 match ¤t_message {
2653 None => {
2654 current_message = Some(patch.message.clone());
2655 current_group.push(patch.clone());
2656 }
2657 Some(msg) if msg == &patch.message => {
2658 current_group.push(patch.clone());
2660 }
2661 Some(_) => {
2662 if !current_group.is_empty() {
2664 groups.push(std::mem::take(&mut current_group));
2665 }
2666 current_message = Some(patch.message.clone());
2667 current_group.push(patch.clone());
2668 }
2669 }
2670 }
2671
2672 if !current_group.is_empty() {
2673 groups.push(current_group);
2674 }
2675
2676 groups
2677 }
2678
2679 pub fn patches_since_base(&self, base: &PatchId) -> Vec<Patch> {
2684 let base_ancestors = self.dag.ancestors(base);
2685 let mut exclusion = base_ancestors;
2686 exclusion.insert(*base);
2687
2688 let (_, head_id) = self
2689 .head()
2690 .unwrap_or_else(|_| ("main".to_string(), Hash::ZERO));
2691 let chain = self.dag.patch_chain(&head_id);
2692
2693 chain
2694 .into_iter()
2695 .filter(|id| !exclusion.contains(id))
2696 .filter_map(|id| self.dag.get_patch(&id).cloned())
2697 .collect()
2698 }
2699
2700 pub fn generate_rebase_todo(&self, base: &PatchId) -> Result<String, RepoError> {
2704 let patches = self.patches_since_base(base);
2705 let groups = self.commit_groups(&patches);
2706
2707 let mut lines = vec![
2708 String::new(),
2709 "# Interactive Rebase TODO".to_string(),
2710 "#".to_string(),
2711 "# Commands:".to_string(),
2712 "# pick = use commit".to_string(),
2713 "# reword = use commit, but edit the commit message".to_string(),
2714 "# edit = use commit, but stop for amending".to_string(),
2715 "# squash = use commit, but meld into previous commit".to_string(),
2716 "# drop = remove commit".to_string(),
2717 String::new(),
2718 ];
2719
2720 for group in &groups {
2721 if let Some(patch) = group.first() {
2722 let short_hash = patch.id.to_hex().chars().take(8).collect::<String>();
2723 lines.push(format!("pick {} {}", short_hash, patch.message));
2724 }
2725 }
2726
2727 lines.push(String::new());
2728 Ok(lines.join("\n"))
2729 }
2730
2731 pub fn parse_rebase_todo(
2733 &self,
2734 todo_content: &str,
2735 base: &PatchId,
2736 ) -> Result<RebasePlan, RepoError> {
2737 let patches = self.patches_since_base(base);
2738 let groups = self.commit_groups(&patches);
2739
2740 let mut group_map: HashMap<String, (String, Vec<PatchId>)> = HashMap::new();
2742 for group in &groups {
2743 if let Some(first) = group.first() {
2744 let short_hash = first.id.to_hex().chars().take(8).collect::<String>();
2745 let patch_ids: Vec<PatchId> = group.iter().map(|p| p.id).collect();
2746 group_map.insert(short_hash, (first.message.clone(), patch_ids));
2747 }
2748 }
2749
2750 let mut entries = Vec::new();
2751
2752 for line in todo_content.lines() {
2753 let line = line.trim();
2754 if line.is_empty() || line.starts_with('#') {
2755 continue;
2756 }
2757
2758 let mut parts = line.splitn(3, ' ');
2759 let action_str = match parts.next() {
2760 Some(a) => a,
2761 None => continue,
2762 };
2763 let short_hash = match parts.next() {
2764 Some(h) => h,
2765 None => continue,
2766 };
2767 let message = parts.next().unwrap_or("").to_string();
2768
2769 let action = match action_str {
2770 "pick" | "p" => RebaseAction::Pick,
2771 "reword" | "r" => RebaseAction::Reword,
2772 "edit" | "e" => RebaseAction::Edit,
2773 "squash" | "s" => RebaseAction::Squash,
2774 "drop" | "d" => RebaseAction::Drop,
2775 _ => continue, };
2777
2778 let (group_message, patch_ids) = group_map
2780 .get(short_hash)
2781 .cloned()
2782 .unwrap_or_else(|| (message.clone(), Vec::new()));
2783
2784 let effective_message = if action == RebaseAction::Reword {
2786 message
2787 } else {
2788 group_message
2789 };
2790
2791 let commit_tip = patch_ids.last().copied().unwrap_or(Hash::ZERO);
2792
2793 entries.push(RebasePlanEntry {
2794 action,
2795 commit_tip,
2796 message: effective_message,
2797 patch_ids,
2798 });
2799 }
2800
2801 Ok(RebasePlan { entries })
2802 }
2803
2804 pub fn rebase_interactive(
2809 &mut self,
2810 plan: &RebasePlan,
2811 onto: &PatchId,
2812 ) -> Result<PatchId, RepoError> {
2813 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
2814 let (head_branch, _head_id) = self.head()?;
2815
2816 let branch = BranchName::new(&head_branch)?;
2818 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2819 self.dag.update_branch(&branch, *onto)?;
2820 self.meta.set_branch(&branch, onto)?;
2821 self.invalidate_head_cache();
2822
2823 let mut current_parent = *onto;
2824 let mut last_new_id = *onto;
2825 let mut squash_message_acc: Option<String> = None;
2826
2827 for entry in &plan.entries {
2828 match entry.action {
2829 RebaseAction::Drop => {
2830 continue;
2832 }
2833 RebaseAction::Pick
2834 | RebaseAction::Reword
2835 | RebaseAction::Edit
2836 | RebaseAction::Squash => {
2837 let patches: Vec<Patch> = entry
2839 .patch_ids
2840 .iter()
2841 .filter_map(|id| self.dag.get_patch(id).cloned())
2842 .collect();
2843
2844 if patches.is_empty() {
2845 continue;
2846 }
2847
2848 let message = if entry.action == RebaseAction::Squash {
2850 let mut msg = squash_message_acc.take().unwrap_or_default();
2852 if !msg.is_empty() {
2853 msg.push('\n');
2854 }
2855 msg.push_str(&entry.message);
2856 squash_message_acc = Some(msg);
2857 continue; } else {
2859 if let Some(sq_msg) = squash_message_acc.take() {
2861 let mut combined = sq_msg;
2862 if !combined.is_empty() && !entry.message.is_empty() {
2863 combined.push('\n');
2864 }
2865 combined.push_str(&entry.message);
2866 combined
2867 } else {
2868 entry.message.clone()
2869 }
2870 };
2871
2872 for patch in &patches {
2874 if patch.operation_type == OperationType::Merge
2875 || patch.operation_type == OperationType::Identity
2876 || patch.operation_type == OperationType::Create
2877 {
2878 continue;
2879 }
2880
2881 let new_patch = if patch.operation_type == OperationType::Batch {
2882 let changes = patch.file_changes().unwrap_or_default();
2883 Patch::new_batch(
2884 changes,
2885 vec![current_parent],
2886 self.author.clone(),
2887 message.clone(),
2888 )
2889 } else {
2890 Patch::new(
2891 patch.operation_type.clone(),
2892 patch.touch_set.clone(),
2893 patch.target_path.clone(),
2894 patch.payload.clone(),
2895 vec![current_parent],
2896 self.author.clone(),
2897 message.clone(),
2898 )
2899 };
2900
2901 let new_id = self
2902 .dag
2903 .add_patch(new_patch.clone(), vec![current_parent])?;
2904 self.meta.store_patch(&new_patch)?;
2905
2906 last_new_id = new_id;
2907 current_parent = new_id;
2908 }
2909
2910 if entry.action == RebaseAction::Edit {
2912 let state = RebaseState {
2913 original_head: old_head,
2914 original_branch: head_branch.clone(),
2915 onto: *onto,
2916 next_entry: 0, current_parent,
2918 squash_message: None,
2919 plan: Vec::new(), };
2921 let _ = self.save_rebase_state(&state);
2922 self.dag.update_branch(&branch, last_new_id)?;
2924 self.meta.set_branch(&branch, &last_new_id)?;
2925 self.invalidate_head_cache();
2926 self.sync_working_tree(&old_tree)?;
2927 return Ok(last_new_id);
2928 }
2929 }
2930 }
2931 }
2932
2933 self.dag.update_branch(&branch, last_new_id)?;
2938 self.meta.set_branch(&branch, &last_new_id)?;
2939 self.invalidate_head_cache();
2940 self.sync_working_tree(&old_tree)?;
2941
2942 let _ = self.record_reflog(&old_head, &last_new_id, "interactive rebase");
2943
2944 let _ = self.clear_rebase_state();
2946
2947 Ok(last_new_id)
2948 }
2949
2950 fn save_rebase_state(&self, state: &RebaseState) -> Result<(), RepoError> {
2952 let serialized = serde_json::to_string(state)
2953 .map_err(|e| RepoError::Custom(format!("failed to serialize rebase state: {}", e)))?;
2954 self.meta
2955 .set_config("rebase_state", &serialized)
2956 .map_err(RepoError::Meta)?;
2957 Ok(())
2958 }
2959
2960 pub fn load_rebase_state(&self) -> Result<Option<RebaseState>, RepoError> {
2962 match self
2963 .meta
2964 .get_config("rebase_state")
2965 .map_err(RepoError::Meta)?
2966 {
2967 Some(json) => {
2968 let state: RebaseState = serde_json::from_str(&json).map_err(|e| {
2969 RepoError::Custom(format!("failed to parse rebase state: {}", e))
2970 })?;
2971 Ok(Some(state))
2972 }
2973 None => Ok(None),
2974 }
2975 }
2976
2977 fn clear_rebase_state(&self) -> Result<(), RepoError> {
2979 let _ = self
2980 .meta
2981 .conn()
2982 .execute("DELETE FROM config WHERE key = 'rebase_state'", []);
2983 Ok(())
2984 }
2985
2986 pub fn rebase_abort(&mut self) -> Result<(), RepoError> {
2990 let state = self
2991 .load_rebase_state()?
2992 .ok_or_else(|| RepoError::Custom("no rebase in progress".to_string()))?;
2993
2994 let branch = BranchName::new(&state.original_branch)?;
2995 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2996 self.dag.update_branch(&branch, state.original_head)?;
2997 self.meta.set_branch(&branch, &state.original_head)?;
2998 self.invalidate_head_cache();
2999 self.sync_working_tree(&old_tree)?;
3000
3001 let _ = self.record_reflog(
3002 &state.current_parent,
3003 &state.original_head,
3004 "rebase --abort",
3005 );
3006
3007 self.clear_rebase_state()?;
3008 Ok(())
3009 }
3010
3011 pub fn blame(&self, path: &str) -> Result<Vec<BlameEntry>, RepoError> {
3019 let head_tree = self.snapshot_head()?;
3020 let hash = head_tree
3021 .get(path)
3022 .ok_or_else(|| RepoError::Custom(format!("file not found in HEAD: {}", path)))?;
3023
3024 let blob = self.cas.get_blob(hash)?;
3025 let content = String::from_utf8_lossy(&blob);
3026 let lines: Vec<&str> = content.lines().collect();
3027
3028 let (_, head_id) = self.head()?;
3029 let chain = self.dag.patch_chain(&head_id);
3030
3031 let mut patches: Vec<Patch> = chain
3032 .iter()
3033 .filter_map(|id| self.dag.get_patch(id).cloned())
3034 .collect();
3035 patches.reverse();
3036
3037 let mut line_author: Vec<Option<(PatchId, String, String)>> = vec![None; lines.len()];
3038 let mut current_lines: Vec<String> = Vec::new();
3039
3040 for patch in &patches {
3041 match &patch.operation_type {
3042 OperationType::Batch => {
3043 if let Some(changes) = patch.file_changes()
3044 && let Some(change) = changes.iter().find(|c| c.path == path)
3045 {
3046 match change.op {
3047 OperationType::Create | OperationType::Modify => {
3048 let payload_hex = String::from_utf8_lossy(&change.payload);
3049 let new_content =
3050 if let Ok(blob_hash) = Hash::from_hex(&payload_hex) {
3051 if let Ok(blob_data) = self.cas.get_blob(&blob_hash) {
3052 String::from_utf8_lossy(&blob_data).to_string()
3053 } else {
3054 continue;
3055 }
3056 } else {
3057 continue;
3058 };
3059
3060 let old_refs: Vec<&str> =
3061 current_lines.iter().map(|s| s.as_str()).collect();
3062 let new_refs: Vec<&str> = new_content.lines().collect();
3063 let changes_diff =
3064 crate::engine::merge::diff_lines(&old_refs, &new_refs);
3065
3066 let mut new_line_author: Vec<Option<(PatchId, String, String)>> =
3067 Vec::new();
3068 let mut old_idx = 0usize;
3069
3070 for change_diff in &changes_diff {
3071 match change_diff {
3072 crate::engine::merge::LineChange::Unchanged(clines) => {
3073 for i in 0..clines.len() {
3074 if old_idx + i < line_author.len() {
3075 new_line_author
3076 .push(line_author[old_idx + i].clone());
3077 } else {
3078 new_line_author.push(None);
3079 }
3080 }
3081 old_idx += clines.len();
3082 }
3083 crate::engine::merge::LineChange::Deleted(clines) => {
3084 old_idx += clines.len();
3085 }
3086 crate::engine::merge::LineChange::Inserted(clines) => {
3087 for _ in 0..clines.len() {
3088 new_line_author.push(Some((
3089 patch.id,
3090 patch.message.clone(),
3091 patch.author.clone(),
3092 )));
3093 }
3094 }
3095 }
3096 }
3097
3098 line_author = new_line_author;
3099 current_lines =
3100 new_content.lines().map(|s| s.to_string()).collect();
3101 }
3102 OperationType::Delete => {
3103 line_author.clear();
3104 current_lines.clear();
3105 break;
3106 }
3107 _ => {}
3108 }
3109 }
3110 }
3111 _ => {
3112 let targets_file = patch.target_path.as_deref() == Some(path);
3113
3114 match patch.operation_type {
3115 OperationType::Create | OperationType::Modify if targets_file => {
3116 let new_content = if !patch.payload.is_empty() {
3117 let payload_hex = String::from_utf8_lossy(&patch.payload);
3118 if let Ok(blob_hash) = Hash::from_hex(&payload_hex) {
3119 if let Ok(blob_data) = self.cas.get_blob(&blob_hash) {
3120 String::from_utf8_lossy(&blob_data).to_string()
3121 } else {
3122 continue;
3123 }
3124 } else {
3125 continue;
3126 }
3127 } else {
3128 continue;
3129 };
3130
3131 let old_refs: Vec<&str> =
3132 current_lines.iter().map(|s| s.as_str()).collect();
3133 let new_refs: Vec<&str> = new_content.lines().collect();
3134 let changes = crate::engine::merge::diff_lines(&old_refs, &new_refs);
3135
3136 let mut new_line_author: Vec<Option<(PatchId, String, String)>> =
3137 Vec::new();
3138 let mut old_idx = 0usize;
3139
3140 for change in &changes {
3141 match change {
3142 crate::engine::merge::LineChange::Unchanged(clines) => {
3143 for i in 0..clines.len() {
3144 if old_idx + i < line_author.len() {
3145 new_line_author
3146 .push(line_author[old_idx + i].clone());
3147 } else {
3148 new_line_author.push(None);
3149 }
3150 }
3151 old_idx += clines.len();
3152 }
3153 crate::engine::merge::LineChange::Deleted(clines) => {
3154 old_idx += clines.len();
3155 }
3156 crate::engine::merge::LineChange::Inserted(clines) => {
3157 for _ in 0..clines.len() {
3158 new_line_author.push(Some((
3159 patch.id,
3160 patch.message.clone(),
3161 patch.author.clone(),
3162 )));
3163 }
3164 }
3165 }
3166 }
3167
3168 line_author = new_line_author;
3169 current_lines = new_content.lines().map(|s| s.to_string()).collect();
3170 }
3171 OperationType::Delete if targets_file => {
3172 line_author.clear();
3173 current_lines.clear();
3174 break;
3175 }
3176 _ => {}
3177 }
3178 }
3179 }
3180 }
3181
3182 let mut result = Vec::new();
3183 for (i, entry) in line_author.iter().enumerate() {
3184 let line_content = lines.get(i).unwrap_or(&"").to_string();
3185 if let Some((pid, msg, author)) = entry {
3186 result.push(BlameEntry {
3187 patch_id: *pid,
3188 message: msg.clone(),
3189 author: author.clone(),
3190 line: line_content,
3191 line_number: i + 1,
3192 });
3193 } else {
3194 result.push(BlameEntry {
3195 patch_id: Hash::ZERO,
3196 message: String::new(),
3197 author: String::new(),
3198 line: line_content,
3199 line_number: i + 1,
3200 });
3201 }
3202 }
3203
3204 Ok(result)
3205 }
3206
3207 pub fn log(&self, branch: Option<&str>) -> Result<Vec<Patch>, RepoError> {
3213 let target_id = match branch {
3214 Some(name) => {
3215 let bn = BranchName::new(name)?;
3216 self.dag
3217 .get_branch(&bn)
3218 .ok_or_else(|| RepoError::BranchNotFound(name.to_string()))?
3219 }
3220 None => {
3221 let (_, id) = self.head()?;
3222 id
3223 }
3224 };
3225
3226 let chain = self.dag.patch_chain(&target_id);
3227 let mut patches = Vec::new();
3228 for id in chain {
3229 if let Some(node) = self.dag.get_node(&id) {
3230 patches.push(node.patch.clone());
3231 }
3232 }
3233 Ok(patches)
3234 }
3235
3236 pub fn log_all(&self, branch: Option<&str>) -> Result<Vec<Patch>, RepoError> {
3239 let target_id = match branch {
3240 Some(name) => {
3241 let bn = BranchName::new(name)?;
3242 self.dag
3243 .get_branch(&bn)
3244 .ok_or_else(|| RepoError::BranchNotFound(name.to_string()))?
3245 }
3246 None => {
3247 let (_, id) = self.head()?;
3248 id
3249 }
3250 };
3251
3252 let mut patches = self.dag.reachable_patches(&target_id);
3253 patches.sort_by(|a, b| b.timestamp.cmp(&a.timestamp).then_with(|| a.id.cmp(&b.id)));
3254 Ok(patches)
3255 }
3256
3257 pub fn root(&self) -> &Path {
3263 &self.root
3264 }
3265
3266 pub fn dag(&self) -> &PatchDag {
3268 &self.dag
3269 }
3270
3271 pub fn dag_mut(&mut self) -> &mut PatchDag {
3273 &mut self.dag
3274 }
3275
3276 pub fn meta(&self) -> &crate::metadata::MetadataStore {
3278 &self.meta
3279 }
3280
3281 pub fn cas(&self) -> &BlobStore {
3283 &self.cas
3284 }
3285
3286 pub fn add_remote(&self, name: &str, url: &str) -> Result<(), RepoError> {
3293 let key = format!("remote.{}.url", name);
3294 self.meta.set_config(&key, url).map_err(RepoError::Meta)
3295 }
3296
3297 pub fn list_remotes(&self) -> Result<Vec<(String, String)>, RepoError> {
3299 let mut remotes = Vec::new();
3300 for (key, value) in self.meta.list_config()? {
3301 if let Some(name) = key
3302 .strip_prefix("remote.")
3303 .and_then(|n| n.strip_suffix(".url"))
3304 {
3305 remotes.push((name.to_string(), value));
3306 }
3307 }
3308 Ok(remotes)
3309 }
3310
3311 pub fn remove_remote(&self, name: &str) -> Result<(), RepoError> {
3313 let key = format!("remote.{}.url", name);
3314 if self.meta.get_config(&key)?.is_none() {
3315 return Err(RepoError::Custom(format!("remote '{}' not found", name)));
3316 }
3317 self.meta.delete_config(&key)?;
3318 if let Ok(Some(_)) = self
3319 .meta
3320 .get_config(&format!("remote.{}.last_pushed", name))
3321 {
3322 self.meta
3323 .delete_config(&format!("remote.{}.last_pushed", name))?;
3324 }
3325 Ok(())
3326 }
3327
3328 pub fn is_worktree(&self) -> bool {
3334 self.is_worktree
3335 }
3336
3337 pub fn add_worktree(
3339 &mut self,
3340 name: &str,
3341 path: &Path,
3342 branch: Option<&str>,
3343 ) -> Result<(), RepoError> {
3344 if name.is_empty()
3345 || name.contains('/')
3346 || name.contains('\\')
3347 || name.contains("..")
3348 || name.contains('\0')
3349 {
3350 return Err(RepoError::Custom("invalid worktree name".into()));
3351 }
3352 if path.exists() {
3353 return Err(RepoError::Custom(format!(
3354 "path '{}' already exists",
3355 path.display()
3356 )));
3357 }
3358 if self.is_worktree {
3359 return Err(RepoError::Custom(
3360 "cannot add worktree from a linked worktree; use the main repo".into(),
3361 ));
3362 }
3363
3364 let abs_path = if path.is_relative() {
3365 std::env::current_dir()?.join(path)
3366 } else {
3367 path.to_path_buf()
3368 };
3369
3370 fs::create_dir_all(&abs_path)?;
3371 let new_suture_dir = abs_path.join(".suture");
3372 fs::create_dir_all(&new_suture_dir)?;
3373
3374 #[cfg(unix)]
3375 {
3376 std::os::unix::fs::symlink(
3377 self.suture_dir.join("metadata.db"),
3378 new_suture_dir.join("metadata.db"),
3379 )?;
3380 if self.suture_dir.join("objects").exists() {
3381 std::os::unix::fs::symlink(
3382 self.suture_dir.join("objects"),
3383 new_suture_dir.join("objects"),
3384 )?;
3385 }
3386 if self.suture_dir.join("keys").exists() {
3387 std::os::unix::fs::symlink(
3388 self.suture_dir.join("keys"),
3389 new_suture_dir.join("keys"),
3390 )?;
3391 }
3392 }
3393 #[cfg(not(unix))]
3394 {
3395 return Err(RepoError::Unsupported(
3396 "worktrees require symlink support (Unix only)".into(),
3397 ));
3398 }
3399
3400 fs::write(
3401 new_suture_dir.join("worktree"),
3402 self.root.to_string_lossy().as_ref(),
3403 )?;
3404
3405 let branch_name = branch.unwrap_or("main");
3406 fs::write(new_suture_dir.join("HEAD"), branch_name)?;
3407
3408 self.set_config(
3409 &format!("worktree.{}.path", name),
3410 &abs_path.to_string_lossy(),
3411 )?;
3412 self.set_config(&format!("worktree.{}.branch", name), branch_name)?;
3413
3414 let mut wt_repo = Repository::open(&abs_path)?;
3415 wt_repo.checkout(branch_name)?;
3416
3417 Ok(())
3418 }
3419
3420 pub fn list_worktrees(&self) -> Result<Vec<WorktreeEntry>, RepoError> {
3422 let mut worktrees = Vec::new();
3423
3424 let main_branch = self
3425 .head()
3426 .map(|(n, _)| n)
3427 .unwrap_or_else(|_| "main".to_string());
3428 worktrees.push(WorktreeEntry {
3429 name: String::new(),
3430 path: self.root.to_string_lossy().to_string(),
3431 branch: main_branch,
3432 is_main: true,
3433 });
3434
3435 let config = self.list_config()?;
3436 let mut names: Vec<&str> = Vec::new();
3437 for (key, _value) in &config {
3438 if let Some(n) = key
3439 .strip_prefix("worktree.")
3440 .and_then(|n| n.strip_suffix(".path"))
3441 {
3442 names.push(n);
3443 }
3444 }
3445 names.sort();
3446
3447 for name in names {
3448 let path_key = format!("worktree.{}.path", name);
3449 let branch_key = format!("worktree.{}.branch", name);
3450 let path_val = self
3451 .meta
3452 .get_config(&path_key)
3453 .unwrap_or(None)
3454 .unwrap_or_default();
3455 let branch_val = self
3456 .meta
3457 .get_config(&branch_key)
3458 .unwrap_or(None)
3459 .unwrap_or_default();
3460 worktrees.push(WorktreeEntry {
3461 name: name.to_string(),
3462 path: path_val,
3463 branch: branch_val,
3464 is_main: false,
3465 });
3466 }
3467
3468 Ok(worktrees)
3469 }
3470
3471 pub fn remove_worktree(&mut self, name: &str) -> Result<(), RepoError> {
3474 let path_key = format!("worktree.{}.path", name);
3475 let path_val = self
3476 .meta
3477 .get_config(&path_key)?
3478 .ok_or_else(|| RepoError::Custom(format!("worktree '{}' not found", name)))?;
3479
3480 let wt_path = Path::new(&path_val);
3481 if wt_path.exists() {
3482 fs::remove_dir_all(wt_path)?;
3483 }
3484
3485 self.meta.delete_config(&path_key)?;
3486 self.meta
3487 .delete_config(&format!("worktree.{}.branch", name))?;
3488
3489 Ok(())
3490 }
3491
3492 pub fn rename_file(&self, old_path: &str, new_path: &str) -> Result<(), RepoError> {
3495 let old = self.root.join(old_path);
3496 let new = self.root.join(new_path);
3497
3498 if !old.exists() {
3499 return Err(RepoError::Custom(format!("path not found: {}", old_path)));
3500 }
3501
3502 if new.exists() {
3503 return Err(RepoError::Custom(format!(
3504 "path already exists: {}",
3505 new_path
3506 )));
3507 }
3508
3509 fs::rename(old, new).map_err(|e| RepoError::Custom(format!("rename failed: {}", e)))?;
3510
3511 self.add(old_path)?;
3512 self.add(new_path)?;
3513
3514 Ok(())
3515 }
3516
3517 pub fn get_remote_url(&self, name: &str) -> Result<String, RepoError> {
3519 let key = format!("remote.{}.url", name);
3520 self.meta
3521 .get_config(&key)
3522 .unwrap_or(None)
3523 .ok_or_else(|| RepoError::Custom(format!("remote '{}' not found", name)))
3524 }
3525
3526 pub fn all_patches(&self) -> Vec<Patch> {
3528 self.dag
3529 .patch_ids()
3530 .iter()
3531 .filter_map(|id| self.dag.get_patch(id).cloned())
3532 .collect()
3533 }
3534
3535 pub fn gc(&self) -> Result<GcResult, RepoError> {
3545 let branches = self.dag.list_branches();
3546 let all_ids: HashSet<PatchId> = self.dag.patch_ids().into_iter().collect();
3547
3548 let mut reachable: HashSet<PatchId> = HashSet::new();
3549 for (_name, tip_id) in &branches {
3550 reachable.insert(*tip_id);
3551 for anc in self.dag.ancestors(tip_id) {
3552 reachable.insert(anc);
3553 }
3554 }
3555
3556 let unreachable: Vec<&PatchId> = all_ids
3557 .iter()
3558 .filter(|id| !reachable.contains(id))
3559 .collect();
3560 let conn = self.meta().conn();
3561
3562 for id in &unreachable {
3563 let hex = id.to_hex();
3564 conn.execute(
3565 "DELETE FROM signatures WHERE patch_id = ?1",
3566 rusqlite::params![hex],
3567 )
3568 .map_err(|e| RepoError::Custom(e.to_string()))?;
3569 conn.execute(
3570 "DELETE FROM edges WHERE parent_id = ?1 OR child_id = ?1",
3571 rusqlite::params![hex],
3572 )
3573 .map_err(|e| RepoError::Custom(e.to_string()))?;
3574 conn.execute("DELETE FROM patches WHERE id = ?1", rusqlite::params![hex])
3575 .map_err(|e| RepoError::Custom(e.to_string()))?;
3576 }
3577
3578 Ok(GcResult {
3579 patches_removed: unreachable.len(),
3580 })
3581 }
3582
3583 pub fn fsck(&self) -> Result<FsckResult, RepoError> {
3593 let mut checks_passed = 0usize;
3594 let mut warnings = Vec::new();
3595 let mut errors = Vec::new();
3596
3597 let all_ids: HashSet<PatchId> = self.dag.patch_ids().into_iter().collect();
3599 let mut parent_ok = true;
3600 for id in &all_ids {
3601 if let Some(node) = self.dag.get_node(id) {
3602 for parent_id in &node.parent_ids {
3603 if !all_ids.contains(parent_id) {
3604 errors.push(format!(
3605 "patch {} references missing parent {}",
3606 id.to_hex(),
3607 parent_id.to_hex()
3608 ));
3609 parent_ok = false;
3610 }
3611 }
3612 }
3613 }
3614 if parent_ok {
3615 checks_passed += 1;
3616 }
3617
3618 let branches = self.dag.list_branches();
3620 let mut branch_ok = true;
3621 for (name, target_id) in &branches {
3622 if !all_ids.contains(target_id) {
3623 errors.push(format!(
3624 "branch '{}' targets non-existent patch {}",
3625 name,
3626 target_id.to_hex()
3627 ));
3628 branch_ok = false;
3629 }
3630 }
3631 if branch_ok {
3632 checks_passed += 1;
3633 }
3634
3635 let mut blob_ok = true;
3637 let all_patches = self.all_patches();
3638 for patch in &all_patches {
3639 if patch.is_batch() {
3640 if let Some(changes) = patch.file_changes() {
3641 for change in &changes {
3642 if change.payload.is_empty() {
3643 continue;
3644 }
3645 let hex = String::from_utf8_lossy(&change.payload);
3646 if let Ok(hash) = Hash::from_hex(&hex)
3647 && !self.cas().has_blob(&hash)
3648 {
3649 warnings.push(format!(
3650 "batch patch {} references missing blob {} for path {}",
3651 patch.id.to_hex(),
3652 hash.to_hex(),
3653 change.path
3654 ));
3655 blob_ok = false;
3656 }
3657 }
3658 }
3659 continue;
3660 }
3661 if patch.payload.is_empty() {
3662 continue;
3663 }
3664 if let Some(hash) = resolve_payload_to_hash(patch) {
3665 if !self.cas().has_blob(&hash) {
3666 warnings.push(format!(
3667 "patch {} references missing blob {}",
3668 patch.id.to_hex(),
3669 hash.to_hex()
3670 ));
3671 blob_ok = false;
3672 }
3673 } else {
3674 warnings.push(format!(
3675 "patch {} has non-UTF-8 payload, cannot verify blob reference",
3676 patch.id.to_hex()
3677 ));
3678 blob_ok = false;
3679 }
3680 }
3681 if blob_ok {
3682 checks_passed += 1;
3683 }
3684
3685 let mut head_ok = false;
3687 match self.head() {
3688 Ok((branch_name, _target_id)) => {
3689 if branches.iter().any(|(n, _)| n == &branch_name) {
3690 head_ok = true;
3691 checks_passed += 1;
3692 } else {
3693 errors.push(format!(
3694 "HEAD branch '{}' does not exist in branch list",
3695 branch_name
3696 ));
3697 }
3698 }
3699 Err(e) => {
3700 errors.push(format!("HEAD is invalid: {}", e));
3701 }
3702 }
3703 if head_ok {
3704 checks_passed += 1;
3705 }
3706
3707 Ok(FsckResult {
3708 checks_passed,
3709 warnings,
3710 errors,
3711 })
3712 }
3713
3714 fn record_reflog(
3719 &self,
3720 old_head: &PatchId,
3721 new_head: &PatchId,
3722 message: &str,
3723 ) -> Result<(), RepoError> {
3724 self.meta
3726 .reflog_push(old_head, new_head, message)
3727 .map_err(RepoError::Meta)?;
3728 Ok(())
3729 }
3730
3731 pub fn reflog_entries(&self) -> Result<Vec<(String, String)>, RepoError> {
3733 let sqlite_entries = self.meta.reflog_list().map_err(RepoError::Meta)?;
3735
3736 if !sqlite_entries.is_empty() {
3737 let entries: Vec<(String, String)> = sqlite_entries
3739 .into_iter()
3740 .map(|(old_head, new_head, message)| {
3741 let ts = std::time::SystemTime::now()
3742 .duration_since(std::time::UNIX_EPOCH)
3743 .unwrap_or_default()
3744 .as_secs();
3745 (new_head, format!("{}:{}:{}", ts, old_head, message))
3746 })
3747 .collect();
3748 return Ok(entries);
3749 }
3750
3751 match self.meta.get_config("reflog").map_err(RepoError::Meta)? {
3753 Some(json) => {
3754 let legacy: Vec<(String, String)> = serde_json::from_str(&json).unwrap_or_default();
3755 for (new_head, entry) in &legacy {
3757 let parts: Vec<&str> = entry.splitn(3, ':').collect();
3758 if parts.len() >= 3 {
3759 let old_head = parts[1];
3760 let msg = parts[2];
3761 if let (Ok(old), Ok(new)) =
3762 (Hash::from_hex(old_head), Hash::from_hex(new_head))
3763 {
3764 let _ = self.meta.reflog_push(&old, &new, msg);
3765 }
3766 }
3767 }
3768 let _ = self.meta.delete_config("reflog");
3770 let sqlite_entries = self.meta.reflog_list().map_err(RepoError::Meta)?;
3772 let entries: Vec<(String, String)> = sqlite_entries
3773 .into_iter()
3774 .map(|(old_head, new_head, message)| {
3775 (new_head, format!("{}:{}:{}", 0, old_head, message))
3776 })
3777 .collect();
3778 Ok(entries)
3779 }
3780 None => Ok(Vec::new()),
3781 }
3782 }
3783}
3784
3785fn load_ignore_patterns(root: &Path) -> Vec<String> {
3791 let ignore_file = root.join(".sutureignore");
3792 if !ignore_file.exists() {
3793 return Vec::new();
3794 }
3795
3796 fs::read_to_string(&ignore_file)
3797 .unwrap_or_default()
3798 .lines()
3799 .map(|line| line.trim().to_string())
3800 .filter(|line| !line.is_empty() && !line.starts_with('#'))
3801 .collect()
3802}
3803
3804fn is_ignored(rel_path: &str, patterns: &[String]) -> bool {
3806 for pattern in patterns {
3807 if let Some(suffix) = pattern.strip_prefix('*') {
3808 if rel_path.ends_with(suffix) {
3810 return true;
3811 }
3812 } else if pattern.ends_with('/') {
3813 if rel_path.starts_with(pattern) {
3815 return true;
3816 }
3817 } else {
3818 if rel_path == pattern || rel_path.starts_with(&format!("{}/", pattern)) {
3820 return true;
3821 }
3822 }
3823 }
3824 false
3825}
3826
3827struct WalkEntry {
3829 relative: String,
3830 #[allow(dead_code)]
3831 full_path: PathBuf,
3832}
3833
3834fn walk_dir(root: &Path, ignore_patterns: &[String]) -> Result<Vec<WalkEntry>, io::Error> {
3836 let mut entries = Vec::new();
3837 walk_dir_recursive(root, root, ignore_patterns, &mut entries)?;
3838 Ok(entries)
3839}
3840
3841fn walk_dir_recursive(
3842 root: &Path,
3843 current: &Path,
3844 ignore_patterns: &[String],
3845 entries: &mut Vec<WalkEntry>,
3846) -> Result<(), io::Error> {
3847 if !current.is_dir() {
3848 return Ok(());
3849 }
3850
3851 let mut dir_entries: Vec<_> = fs::read_dir(current)?
3852 .filter_map(|e| e.ok())
3853 .filter(|e| {
3854 let name = e.file_name();
3856 name != ".suture"
3857 })
3858 .collect();
3859
3860 dir_entries.sort_by_key(|e| e.file_name());
3861
3862 for entry in dir_entries {
3863 let path = entry.path();
3864 let rel = path
3865 .strip_prefix(root)
3866 .unwrap_or(&path)
3867 .to_string_lossy()
3868 .replace('\\', "/");
3869
3870 if is_ignored(&rel, ignore_patterns) {
3872 continue;
3873 }
3874
3875 if path.is_dir() {
3876 walk_dir_recursive(root, &path, ignore_patterns, entries)?;
3877 } else if path.is_file() {
3878 entries.push(WalkEntry {
3879 relative: rel,
3880 full_path: path,
3881 });
3882 }
3883 }
3884
3885 Ok(())
3886}
3887
3888fn restore_pending_merge_parents(meta: &crate::metadata::MetadataStore) -> Vec<PatchId> {
3890 let Ok(Some(json)) = meta.get_config("pending_merge_parents") else {
3891 return Vec::new();
3892 };
3893 serde_json::from_str::<Vec<PatchId>>(&json).unwrap_or_default()
3894}
3895
3896#[derive(Debug, Clone)]
3902pub struct StashEntry {
3903 pub index: usize,
3904 pub message: String,
3905 pub branch: String,
3906 pub head_id: String,
3907}
3908
3909#[derive(Debug, Clone)]
3911pub struct WorktreeEntry {
3912 pub name: String,
3913 pub path: String,
3914 pub branch: String,
3915 pub is_main: bool,
3916}
3917
3918#[derive(Debug, Clone)]
3920pub struct BlameEntry {
3921 pub patch_id: PatchId,
3923 pub message: String,
3925 pub author: String,
3927 pub line: String,
3929 pub line_number: usize,
3931}
3932
3933#[derive(Debug, Clone)]
3935pub struct RebaseResult {
3936 pub patches_replayed: usize,
3938 pub new_tip: PatchId,
3940}
3941
3942#[derive(Debug, Clone, PartialEq, Eq)]
3944pub enum RebaseAction {
3945 Pick,
3947 Reword,
3949 Edit,
3951 Squash,
3953 Drop,
3955}
3956
3957#[derive(Debug, Clone)]
3959pub struct RebasePlanEntry {
3960 pub action: RebaseAction,
3962 pub commit_tip: PatchId,
3964 pub message: String,
3966 pub patch_ids: Vec<PatchId>,
3968}
3969
3970#[derive(Debug, Clone)]
3972pub struct RebasePlan {
3973 pub entries: Vec<RebasePlanEntry>,
3974}
3975
3976#[derive(Debug, Clone, Serialize, Deserialize)]
3978pub struct RebaseState {
3979 pub original_head: PatchId,
3981 pub original_branch: String,
3983 pub onto: PatchId,
3985 pub next_entry: usize,
3987 pub plan: Vec<RebasePlanEntrySerialized>,
3989 pub current_parent: PatchId,
3991 pub squash_message: Option<String>,
3993}
3994
3995#[derive(Debug, Clone, Serialize, Deserialize)]
3997pub struct RebasePlanEntrySerialized {
3998 pub action: String,
3999 pub commit_tip: String,
4000 pub message: String,
4001 pub patch_ids: Vec<String>,
4002}
4003
4004#[derive(Debug, Clone)]
4006pub struct RepoStatus {
4007 pub head_branch: Option<String>,
4009 pub head_patch: Option<PatchId>,
4011 pub branch_count: usize,
4013 pub staged_files: Vec<(String, FileStatus)>,
4015 pub patch_count: usize,
4017}
4018
4019#[derive(Debug, Clone)]
4025pub struct MergeExecutionResult {
4026 pub is_clean: bool,
4028 pub merged_tree: FileTree,
4030 pub merge_patch_id: Option<PatchId>,
4032 pub unresolved_conflicts: Vec<ConflictInfo>,
4034 pub patches_applied: usize,
4036}
4037
4038#[derive(Debug, Clone)]
4040pub struct ConflictInfo {
4041 pub path: String,
4043 pub our_patch_id: PatchId,
4045 pub their_patch_id: PatchId,
4047 pub our_content_hash: Option<Hash>,
4049 pub their_content_hash: Option<Hash>,
4051 pub base_content_hash: Option<Hash>,
4053}
4054
4055#[derive(Debug, Clone)]
4057pub struct GcResult {
4058 pub patches_removed: usize,
4060}
4061
4062#[derive(Debug, Clone)]
4064pub struct FsckResult {
4065 pub checks_passed: usize,
4067 pub warnings: Vec<String>,
4069 pub errors: Vec<String>,
4071}
4072
4073fn three_way_merge(
4077 base: Option<&str>,
4078 ours: &str,
4079 theirs: &str,
4080 head_branch: &str,
4081 source_branch: &str,
4082) -> Result<String, Vec<String>> {
4083 use crate::engine::merge::three_way_merge_lines;
4084
4085 let base_lines: Vec<&str> = base.map(|s| s.lines().collect()).unwrap_or_default();
4086 let ours_lines: Vec<&str> = ours.lines().collect();
4087 let theirs_lines: Vec<&str> = theirs.lines().collect();
4088
4089 let ours_label = if head_branch.is_empty() {
4090 "HEAD".to_string()
4091 } else {
4092 format!("{head_branch} (HEAD)")
4093 };
4094 let theirs_label = if source_branch.is_empty() {
4095 "theirs".to_string()
4096 } else {
4097 source_branch.to_string()
4098 };
4099
4100 let result = three_way_merge_lines(
4101 &base_lines,
4102 &ours_lines,
4103 &theirs_lines,
4104 &ours_label,
4105 &theirs_label,
4106 );
4107
4108 if result.is_clean {
4109 Ok(result.lines.join("\n"))
4110 } else {
4111 Err(result.lines)
4112 }
4113}
4114
4115#[cfg(test)]
4120mod tests {
4121 use super::*;
4122
4123 #[test]
4124 fn test_init_and_open() {
4125 let dir = tempfile::tempdir().unwrap();
4126 let repo_path = dir.path();
4127
4128 let _repo = Repository::init(repo_path, "alice").unwrap();
4129 assert!(repo_path.join(".suture").exists());
4130 assert!(repo_path.join(".suture/metadata.db").exists());
4131
4132 let repo2 = Repository::open(repo_path).unwrap();
4134 assert_eq!(repo2.list_branches().len(), 1);
4135 }
4136
4137 #[test]
4138 fn test_init_already_exists() {
4139 let dir = tempfile::tempdir().unwrap();
4140 Repository::init(dir.path(), "alice").unwrap();
4141 let result = Repository::init(dir.path(), "alice");
4142 assert!(matches!(result, Err(RepoError::AlreadyExists(_))));
4143 }
4144
4145 #[test]
4146 fn test_create_branch() {
4147 let dir = tempfile::tempdir().unwrap();
4148 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4149
4150 repo.create_branch("feature", None).unwrap();
4151 assert_eq!(repo.list_branches().len(), 2);
4152
4153 let result = repo.create_branch("feature", None);
4154 assert!(result.is_err());
4155 }
4156
4157 #[test]
4158 fn test_add_and_status() {
4159 let dir = tempfile::tempdir().unwrap();
4160 let repo = Repository::init(dir.path(), "alice").unwrap();
4161
4162 let test_file = dir.path().join("hello.txt");
4163 fs::write(&test_file, "hello, suture!").unwrap();
4164
4165 repo.add("hello.txt").unwrap();
4166 let status = repo.status().unwrap();
4167 assert_eq!(status.staged_files.len(), 1);
4168 assert_eq!(status.staged_files[0].0, "hello.txt");
4169 assert_eq!(status.staged_files[0].1, FileStatus::Added);
4170 }
4171
4172 #[test]
4173 fn test_add_nonexistent_file() {
4174 let dir = tempfile::tempdir().unwrap();
4175 let repo = Repository::init(dir.path(), "alice").unwrap();
4176 let result = repo.add("does_not_exist.txt");
4177 assert!(result.is_err());
4178 }
4179
4180 #[test]
4181 fn test_commit() {
4182 let dir = tempfile::tempdir().unwrap();
4183 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4184
4185 let test_file = dir.path().join("test.txt");
4186 fs::write(&test_file, "test content").unwrap();
4187 repo.add("test.txt").unwrap();
4188
4189 let patch_id = repo.commit("initial file").unwrap();
4190
4191 let status = repo.status().unwrap();
4192 assert!(status.staged_files.is_empty());
4193 assert!(repo.dag.has_patch(&patch_id));
4194 assert_eq!(repo.dag.patch_count(), 2);
4195 }
4196
4197 #[test]
4198 fn test_commit_nothing() {
4199 let dir = tempfile::tempdir().unwrap();
4200 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4201 let result = repo.commit("empty commit");
4202 assert!(matches!(result, Err(RepoError::NothingToCommit)));
4203 }
4204
4205 #[test]
4206 fn test_log() {
4207 let dir = tempfile::tempdir().unwrap();
4208 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4209
4210 let test_file = dir.path().join("test.txt");
4211 fs::write(&test_file, "v1").unwrap();
4212 repo.add("test.txt").unwrap();
4213 repo.commit("first commit").unwrap();
4214
4215 fs::write(&test_file, "v2").unwrap();
4216 repo.add("test.txt").unwrap();
4217 repo.commit("second commit").unwrap();
4218
4219 let log = repo.log(None).unwrap();
4220 assert_eq!(log.len(), 3); }
4222
4223 #[test]
4224 fn test_snapshot_head() {
4225 let dir = tempfile::tempdir().unwrap();
4226 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4227
4228 let test_file = dir.path().join("test.txt");
4229 fs::write(&test_file, "hello world").unwrap();
4230 repo.add("test.txt").unwrap();
4231 repo.commit("add test.txt").unwrap();
4232
4233 let tree = repo.snapshot_head().unwrap();
4234 assert!(tree.contains("test.txt"));
4235 assert_eq!(tree.get("test.txt"), Some(&Hash::from_data(b"hello world")));
4236 }
4237
4238 #[test]
4239 fn test_snapshot_empty_repo() {
4240 let dir = tempfile::tempdir().unwrap();
4241 let repo = Repository::init(dir.path(), "alice").unwrap();
4242
4243 let tree = repo.snapshot_head().unwrap();
4244 assert!(tree.is_empty());
4245 }
4246
4247 #[test]
4248 fn test_checkout() {
4249 let dir = tempfile::tempdir().unwrap();
4250 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4251
4252 let main_file = dir.path().join("main.txt");
4254 fs::write(&main_file, "main content").unwrap();
4255 repo.add("main.txt").unwrap();
4256 repo.commit("add main.txt").unwrap();
4257
4258 let (_, head_id) = repo.head().unwrap();
4260 let feat_patch = Patch::new(
4261 OperationType::Modify,
4262 TouchSet::single("feature.txt"),
4263 Some("feature.txt".to_string()),
4264 Hash::from_data(b"feature content")
4265 .to_hex()
4266 .as_bytes()
4267 .to_vec(),
4268 vec![head_id],
4269 "alice".to_string(),
4270 "add feature.txt".to_string(),
4271 );
4272 let _feat_id = repo
4273 .dag_mut()
4274 .add_patch(feat_patch.clone(), vec![head_id])
4275 .unwrap();
4276 repo.meta.store_patch(&feat_patch).unwrap();
4277
4278 repo.checkout("main").unwrap();
4280 assert!(!dir.path().join("feature.txt").exists());
4281 assert!(dir.path().join("main.txt").exists());
4282 }
4283
4284 #[test]
4285 fn test_checkout_refuses_dirty() {
4286 let dir = tempfile::tempdir().unwrap();
4287 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4288
4289 let staged = dir.path().join("staged.txt");
4291 fs::write(&staged, "staged").unwrap();
4292 repo.add("staged.txt").unwrap();
4293
4294 let result = repo.checkout("main");
4296 assert!(result.is_ok());
4297
4298 let working_set = repo.meta.working_set().unwrap();
4300 assert!(working_set.iter().any(|(p, _)| p == "staged.txt"));
4301 }
4302
4303 #[test]
4304 fn test_diff() {
4305 let dir = tempfile::tempdir().unwrap();
4306 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4307
4308 let test_file = dir.path().join("test.txt");
4309 fs::write(&test_file, "v1").unwrap();
4310 repo.add("test.txt").unwrap();
4311 let first_commit = repo.commit("first").unwrap();
4312
4313 fs::write(&test_file, "v2").unwrap();
4314 repo.add("test.txt").unwrap();
4315 repo.commit("second").unwrap();
4316
4317 let diffs = repo.diff(Some(&first_commit.to_hex()), None).unwrap();
4319 assert_eq!(diffs.len(), 1);
4320 assert_eq!(diffs[0].diff_type, DiffType::Modified);
4321 }
4322
4323 #[test]
4324 fn test_revert() {
4325 let dir = tempfile::tempdir().unwrap();
4326 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4327
4328 let test_file = dir.path().join("test.txt");
4329 fs::write(&test_file, "original").unwrap();
4330 repo.add("test.txt").unwrap();
4331 let commit_id = repo.commit("add file").unwrap();
4332
4333 repo.revert(&commit_id, None).unwrap();
4335
4336 let tree = repo.snapshot_head().unwrap();
4337 assert!(!tree.contains("test.txt"));
4338 assert!(
4339 !test_file.exists(),
4340 "revert should remove the file from the working tree"
4341 );
4342 }
4343
4344 #[test]
4345 fn test_open_reconstructs_full_dag() {
4346 let dir = tempfile::tempdir().unwrap();
4347 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4348
4349 let f = dir.path().join("f.txt");
4351 fs::write(&f, "v1").unwrap();
4352 repo.add("f.txt").unwrap();
4353 repo.commit("first").unwrap();
4354
4355 fs::write(&f, "v2").unwrap();
4356 repo.add("f.txt").unwrap();
4357 repo.commit("second").unwrap();
4358
4359 fs::write(&f, "v3").unwrap();
4360 repo.add("f.txt").unwrap();
4361 repo.commit("third").unwrap();
4362
4363 let original_count = repo.dag.patch_count();
4364
4365 let repo2 = Repository::open(dir.path()).unwrap();
4367 assert_eq!(repo2.dag.patch_count(), original_count);
4368
4369 let log = repo2.log(None).unwrap();
4370 assert_eq!(log.len(), 4); }
4372
4373 #[test]
4374 fn test_ignore_patterns() {
4375 let patterns = vec![
4376 "target/".to_string(),
4377 "*.o".to_string(),
4378 "build".to_string(),
4379 ];
4380
4381 assert!(is_ignored("target/debug/main", &patterns));
4382 assert!(is_ignored("foo.o", &patterns));
4383 assert!(is_ignored("build/output", &patterns));
4384 assert!(is_ignored("build", &patterns));
4385 assert!(!is_ignored("src/main.rs", &patterns));
4386 assert!(!is_ignored("main.rs", &patterns));
4387 }
4388
4389 #[test]
4390 fn test_full_workflow_with_checkout() -> Result<(), Box<dyn std::error::Error>> {
4391 let dir = tempfile::tempdir().unwrap();
4392 let mut repo = Repository::init(dir.path(), "alice")?;
4393
4394 fs::write(dir.path().join("a.txt"), "version 1")?;
4396 repo.add("a.txt")?;
4397 repo.commit("add a.txt v1")?;
4398
4399 repo.create_branch("feature", None)?;
4401
4402 fs::write(dir.path().join("a.txt"), "version 2")?;
4404 fs::write(dir.path().join("b.txt"), "new file")?;
4405 repo.add("a.txt")?;
4406 repo.add("b.txt")?;
4407 repo.commit("modify a, add b")?;
4408
4409 repo.checkout("feature")?;
4411 let content = fs::read_to_string(dir.path().join("a.txt"))?;
4412 assert_eq!(content, "version 1");
4413 assert!(!dir.path().join("b.txt").exists());
4414
4415 Ok(())
4416 }
4417
4418 #[test]
4419 fn test_add_all() -> Result<(), Box<dyn std::error::Error>> {
4420 let dir = tempfile::tempdir().unwrap();
4421 let repo = Repository::init(dir.path(), "alice").unwrap();
4422
4423 fs::write(dir.path().join("a.txt"), "a")?;
4424 fs::write(dir.path().join("b.txt"), "b")?;
4425 let count = repo.add_all().unwrap();
4427 assert_eq!(count, 2);
4428 Ok(())
4429 }
4430
4431 #[test]
4432 fn test_execute_merge_clean() {
4433 let dir = tempfile::tempdir().unwrap();
4434 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4435
4436 fs::write(dir.path().join("base.txt"), "base").unwrap();
4437 repo.add("base.txt").unwrap();
4438 repo.commit("add base").unwrap();
4439
4440 repo.create_branch("feature", None).unwrap();
4441
4442 fs::write(dir.path().join("main_file.txt"), "main content").unwrap();
4443 repo.add("main_file.txt").unwrap();
4444 repo.commit("add main file").unwrap();
4445
4446 repo.checkout("feature").unwrap();
4447
4448 fs::write(dir.path().join("feat_file.txt"), "feature content").unwrap();
4449 repo.add("feat_file.txt").unwrap();
4450 repo.commit("add feature file").unwrap();
4451
4452 let result = repo.execute_merge("main").unwrap();
4453 assert!(result.is_clean);
4454 assert!(result.merge_patch_id.is_some());
4455 assert!(result.unresolved_conflicts.is_empty());
4456 assert!(dir.path().join("main_file.txt").exists());
4457 assert!(dir.path().join("feat_file.txt").exists());
4458 assert!(dir.path().join("base.txt").exists());
4459
4460 let log = repo.log(None).unwrap();
4461 let merge_patch = log
4462 .iter()
4463 .find(|p| p.operation_type == OperationType::Merge);
4464 assert!(merge_patch.is_some());
4465 assert_eq!(merge_patch.unwrap().parent_ids.len(), 2);
4466 }
4467
4468 #[test]
4469 fn test_execute_merge_conflicting() {
4470 let dir = tempfile::tempdir().unwrap();
4471 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4472
4473 fs::write(dir.path().join("shared.txt"), "original").unwrap();
4474 repo.add("shared.txt").unwrap();
4475 repo.commit("add shared").unwrap();
4476
4477 repo.create_branch("feature", None).unwrap();
4478
4479 fs::write(dir.path().join("shared.txt"), "main version").unwrap();
4480 repo.add("shared.txt").unwrap();
4481 repo.commit("modify on main").unwrap();
4482
4483 repo.checkout("feature").unwrap();
4484
4485 fs::write(dir.path().join("shared.txt"), "feature version").unwrap();
4486 repo.add("shared.txt").unwrap();
4487 repo.commit("modify on feature").unwrap();
4488
4489 let result = repo.execute_merge("main").unwrap();
4490 assert!(!result.is_clean);
4491 assert!(result.merge_patch_id.is_none());
4492 assert_eq!(result.unresolved_conflicts.len(), 1);
4493 assert_eq!(result.unresolved_conflicts[0].path, "shared.txt");
4494
4495 let content = fs::read_to_string(dir.path().join("shared.txt")).unwrap();
4496 assert!(content.contains("<<<<<<< feature (HEAD)"));
4497 assert!(content.contains("main version"));
4498 assert!(content.contains("feature version"));
4499 assert!(content.contains(">>>>>>> main"));
4500 }
4501
4502 #[test]
4503 fn test_execute_merge_fast_forward() {
4504 let dir = tempfile::tempdir().unwrap();
4505 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4506
4507 fs::write(dir.path().join("base.txt"), "base").unwrap();
4508 repo.add("base.txt").unwrap();
4509 repo.commit("add base").unwrap();
4510
4511 repo.create_branch("feature", None).unwrap();
4512
4513 repo.checkout("feature").unwrap();
4514 fs::write(dir.path().join("new_file.txt"), "new content").unwrap();
4515 repo.add("new_file.txt").unwrap();
4516 repo.commit("add new file on feature").unwrap();
4517
4518 repo.checkout("main").unwrap();
4519
4520 let result = repo.execute_merge("feature").unwrap();
4521 assert!(result.is_clean);
4522 assert!(dir.path().join("new_file.txt").exists());
4523 }
4524
4525 #[test]
4526 fn test_resolve_merge_conflict() {
4527 let dir = tempfile::tempdir().unwrap();
4528 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4529
4530 fs::write(dir.path().join("shared.txt"), "original").unwrap();
4531 repo.add("shared.txt").unwrap();
4532 repo.commit("add shared").unwrap();
4533
4534 repo.create_branch("feature", None).unwrap();
4535
4536 fs::write(dir.path().join("shared.txt"), "main version").unwrap();
4537 repo.add("shared.txt").unwrap();
4538 repo.commit("modify on main").unwrap();
4539
4540 repo.checkout("feature").unwrap();
4541
4542 fs::write(dir.path().join("shared.txt"), "feature version").unwrap();
4543 repo.add("shared.txt").unwrap();
4544 repo.commit("modify on feature").unwrap();
4545
4546 let _result = repo.execute_merge("main").unwrap();
4547
4548 fs::write(dir.path().join("shared.txt"), "resolved content").unwrap();
4549 repo.add("shared.txt").unwrap();
4550 let commit_id = repo.commit("resolve merge conflict").unwrap();
4551
4552 assert!(repo.pending_merge_parents.is_empty());
4553
4554 let log = repo.log(None).unwrap();
4555 let resolve_patch = log.iter().find(|p| p.id == commit_id).unwrap();
4556 assert_eq!(resolve_patch.parent_ids.len(), 2);
4557 }
4558
4559 #[test]
4560 fn test_three_way_merge() {
4561 let ours = "line1\nline2-modified\nline3";
4562 let theirs = "line1\nline2-modified\nline3";
4563 let result = three_way_merge(Some("line1\nline2\nline3"), ours, theirs, "main", "feature");
4564 assert!(result.is_ok());
4565 assert_eq!(result.unwrap(), ours);
4566
4567 let result = three_way_merge(Some("base"), "base", "changed", "main", "feature");
4568 assert_eq!(result.unwrap(), "changed");
4569
4570 let result = three_way_merge(Some("base"), "changed", "base", "main", "feature");
4571 assert_eq!(result.unwrap(), "changed");
4572
4573 let result = three_way_merge(None, "ours content", "theirs content", "main", "feature");
4574 assert!(result.is_err());
4575 let lines = result.unwrap_err();
4576 assert!(lines[0].contains("<<<<<<<"));
4577 assert!(lines.last().unwrap().contains(">>>>>>>"));
4578 }
4579
4580 #[test]
4581 fn test_config_get_set() -> Result<(), Box<dyn std::error::Error>> {
4582 let dir = tempfile::tempdir().unwrap();
4583 let mut repo = Repository::init(dir.path(), "alice")?;
4584
4585 assert!(repo.get_config("user.name")?.is_none());
4586 assert!(repo.get_config("user.email")?.is_none());
4587
4588 repo.set_config("user.name", "Alice")?;
4589 repo.set_config("user.email", "alice@example.com")?;
4590
4591 assert_eq!(repo.get_config("user.name")?.unwrap(), "Alice");
4592 assert_eq!(repo.get_config("user.email")?.unwrap(), "alice@example.com");
4593
4594 let config = repo.list_config()?;
4596 assert!(config.iter().any(|(k, v)| k == "user.name" && v == "Alice"));
4597 assert!(
4598 config
4599 .iter()
4600 .any(|(k, v)| k == "user.email" && v == "alice@example.com")
4601 );
4602 assert!(config.iter().any(|(k, _)| k == "author"));
4604
4605 Ok(())
4606 }
4607
4608 #[test]
4609 fn test_delete_branch() -> Result<(), Box<dyn std::error::Error>> {
4610 let dir = tempfile::tempdir().unwrap();
4611 let mut repo = Repository::init(dir.path(), "alice")?;
4612
4613 repo.create_branch("feature", None)?;
4614 repo.create_branch("develop", None)?;
4615 assert_eq!(repo.list_branches().len(), 3);
4616
4617 let result = repo.delete_branch("main");
4619 assert!(result.is_err());
4620
4621 repo.delete_branch("feature")?;
4623 assert_eq!(repo.list_branches().len(), 2);
4624
4625 repo.delete_branch("develop")?;
4626 assert_eq!(repo.list_branches().len(), 1);
4627
4628 Ok(())
4629 }
4630
4631 #[test]
4632 fn test_tags() -> Result<(), Box<dyn std::error::Error>> {
4633 let dir = tempfile::tempdir().unwrap();
4634 let mut repo = Repository::init(dir.path(), "alice")?;
4635
4636 fs::write(dir.path().join("a.txt"), "v1")?;
4637 repo.add("a.txt")?;
4638 let _commit_id = repo.commit("first commit")?;
4639
4640 repo.create_tag("v1.0", None)?;
4642 let tags = repo.list_tags()?;
4643 assert_eq!(tags.len(), 1);
4644
4645 Ok(())
4646 }
4647
4648 #[test]
4649 fn test_patches_since() -> Result<(), Box<dyn std::error::Error>> {
4650 let dir = tempfile::tempdir().unwrap();
4651 let mut repo = Repository::init(dir.path(), "alice")?;
4652
4653 fs::write(dir.path().join("a.txt"), "v1")?;
4655 repo.add("a.txt")?;
4656 let id1 = repo.commit("first")?;
4657
4658 fs::write(dir.path().join("a.txt"), "v2")?;
4660 repo.add("a.txt")?;
4661 let id2 = repo.commit("second")?;
4662
4663 fs::write(dir.path().join("b.txt"), "new")?;
4665 repo.add("b.txt")?;
4666 let id3 = repo.commit("third")?;
4667
4668 let since = repo.patches_since(&id1);
4670 assert_eq!(since.len(), 2);
4671 assert_eq!(since[0].id, id2);
4672 assert_eq!(since[1].id, id3);
4673
4674 let since = repo.patches_since(&id3);
4676 assert!(since.is_empty());
4677
4678 let root_id = repo.log(None)?.last().unwrap().id;
4681 let since = repo.patches_since(&root_id);
4682 assert_eq!(since.len(), 3);
4683 assert_eq!(since[0].id, id1);
4684 assert_eq!(since[1].id, id2);
4685 assert_eq!(since[2].id, id3);
4686
4687 Ok(())
4688 }
4689
4690 #[test]
4691 fn test_pending_merge_persistence() -> Result<(), Box<dyn std::error::Error>> {
4692 let dir = tempfile::tempdir().unwrap();
4693 let mut repo = Repository::init(dir.path(), "alice")?;
4694
4695 fs::write(dir.path().join("shared.txt"), "original")?;
4696 repo.add("shared.txt")?;
4697 repo.commit("add shared")?;
4698
4699 repo.create_branch("feature", None)?;
4700
4701 fs::write(dir.path().join("shared.txt"), "main version")?;
4702 repo.add("shared.txt")?;
4703 repo.commit("modify on main")?;
4704
4705 repo.checkout("feature")?;
4706
4707 fs::write(dir.path().join("shared.txt"), "feature version")?;
4708 repo.add("shared.txt")?;
4709 repo.commit("modify on feature")?;
4710
4711 let _ = repo.execute_merge("main")?;
4713 assert_eq!(repo.pending_merge_parents.len(), 2);
4714
4715 drop(repo);
4717 let mut repo2 = Repository::open(dir.path())?;
4718 assert_eq!(repo2.pending_merge_parents.len(), 2);
4719
4720 fs::write(dir.path().join("shared.txt"), "resolved")?;
4722 repo2.add("shared.txt")?;
4723 let resolve_id = repo2.commit("resolve")?;
4724 assert!(repo2.pending_merge_parents.is_empty());
4725
4726 let patch = repo2
4728 .log(None)?
4729 .into_iter()
4730 .find(|p| p.id == resolve_id)
4731 .unwrap();
4732 assert_eq!(patch.parent_ids.len(), 2);
4733
4734 Ok(())
4735 }
4736
4737 #[test]
4738 fn test_has_uncommitted_changes_clean() -> Result<(), Box<dyn std::error::Error>> {
4739 let dir = tempfile::tempdir().unwrap();
4740 let repo = Repository::init(dir.path(), "alice")?;
4741
4742 assert!(!repo.has_uncommitted_changes()?);
4743
4744 Ok(())
4745 }
4746
4747 #[test]
4748 fn test_has_uncommitted_changes_staged() -> Result<(), Box<dyn std::error::Error>> {
4749 let dir = tempfile::tempdir().unwrap();
4750 let repo = Repository::init(dir.path(), "alice")?;
4751
4752 fs::write(dir.path().join("a.txt"), "content")?;
4753 repo.add("a.txt")?;
4754
4755 assert!(repo.has_uncommitted_changes()?);
4756
4757 Ok(())
4758 }
4759
4760 #[test]
4761 fn test_has_uncommitted_changes_unstaged() -> Result<(), Box<dyn std::error::Error>> {
4762 let dir = tempfile::tempdir().unwrap();
4763 let mut repo = Repository::init(dir.path(), "alice")?;
4764
4765 fs::write(dir.path().join("a.txt"), "original")?;
4766 repo.add("a.txt")?;
4767 repo.commit("initial")?;
4768
4769 fs::write(dir.path().join("a.txt"), "modified on disk")?;
4770
4771 assert!(repo.has_uncommitted_changes()?);
4772
4773 Ok(())
4774 }
4775
4776 #[test]
4777 fn test_stash_push_pop() -> Result<(), Box<dyn std::error::Error>> {
4778 let dir = tempfile::tempdir().unwrap();
4779 let mut repo = Repository::init(dir.path(), "alice")?;
4780
4781 fs::write(dir.path().join("a.txt"), "original")?;
4782 repo.add("a.txt")?;
4783 repo.commit("initial")?;
4784
4785 fs::write(dir.path().join("a.txt"), "staged changes")?;
4786 repo.add("a.txt")?;
4787
4788 let stash_index = repo.stash_push(Some("my stash"))?;
4789 assert_eq!(stash_index, 0);
4790
4791 assert!(repo.meta.working_set()?.is_empty());
4792 let on_disk = fs::read_to_string(dir.path().join("a.txt"))?;
4793 assert_eq!(on_disk, "original");
4794
4795 repo.stash_pop()?;
4796
4797 let on_disk = fs::read_to_string(dir.path().join("a.txt"))?;
4798 assert_eq!(on_disk, "staged changes");
4799
4800 let ws = repo.meta.working_set()?;
4801 assert_eq!(ws.len(), 1);
4802 assert_eq!(ws[0].0, "a.txt");
4803 assert_eq!(ws[0].1, FileStatus::Modified);
4804
4805 Ok(())
4806 }
4807
4808 #[test]
4809 fn test_stash_list() -> Result<(), Box<dyn std::error::Error>> {
4810 let dir = tempfile::tempdir().unwrap();
4811 let mut repo = Repository::init(dir.path(), "alice")?;
4812
4813 fs::write(dir.path().join("a.txt"), "original")?;
4814 repo.add("a.txt")?;
4815 repo.commit("initial")?;
4816
4817 fs::write(dir.path().join("a.txt"), "change 1")?;
4818 repo.add("a.txt")?;
4819 let idx0 = repo.stash_push(Some("first stash"))?;
4820 assert_eq!(idx0, 0);
4821
4822 fs::write(dir.path().join("a.txt"), "change 2")?;
4823 repo.add("a.txt")?;
4824 let idx1 = repo.stash_push(Some("second stash"))?;
4825 assert_eq!(idx1, 1);
4826
4827 let list = repo.stash_list()?;
4828 assert_eq!(list.len(), 2);
4829 assert_eq!(list[0].index, 0);
4830 assert_eq!(list[0].message, "first stash");
4831 assert_eq!(list[1].index, 1);
4832 assert_eq!(list[1].message, "second stash");
4833
4834 Ok(())
4835 }
4836
4837 #[test]
4838 fn test_stash_apply_keeps_entry() -> Result<(), Box<dyn std::error::Error>> {
4839 let dir = tempfile::tempdir().unwrap();
4840 let mut repo = Repository::init(dir.path(), "alice")?;
4841
4842 fs::write(dir.path().join("a.txt"), "original")?;
4843 repo.add("a.txt")?;
4844 repo.commit("initial")?;
4845
4846 fs::write(dir.path().join("a.txt"), "changes to apply")?;
4847 repo.add("a.txt")?;
4848 let idx = repo.stash_push(Some("keep me"))?;
4849 assert_eq!(idx, 0);
4850
4851 repo.stash_apply(0)?;
4852
4853 let on_disk = fs::read_to_string(dir.path().join("a.txt"))?;
4854 assert_eq!(on_disk, "changes to apply");
4855
4856 let list = repo.stash_list()?;
4857 assert_eq!(list.len(), 1);
4858 assert_eq!(list[0].index, 0);
4859 assert_eq!(list[0].message, "keep me");
4860
4861 Ok(())
4862 }
4863
4864 #[test]
4865 fn test_stash_drop() -> Result<(), Box<dyn std::error::Error>> {
4866 let dir = tempfile::tempdir().unwrap();
4867 let mut repo = Repository::init(dir.path(), "alice")?;
4868
4869 fs::write(dir.path().join("a.txt"), "original")?;
4870 repo.add("a.txt")?;
4871 repo.commit("initial")?;
4872
4873 fs::write(dir.path().join("a.txt"), "stashed content")?;
4874 repo.add("a.txt")?;
4875 repo.stash_push(Some("droppable"))?;
4876
4877 repo.stash_drop(0)?;
4878
4879 let list = repo.stash_list()?;
4880 assert!(list.is_empty());
4881
4882 let result = repo.stash_drop(0);
4883 assert!(result.is_err());
4884
4885 Ok(())
4886 }
4887
4888 #[test]
4889 fn test_stash_pop_empty() -> Result<(), Box<dyn std::error::Error>> {
4890 let dir = tempfile::tempdir().unwrap();
4891 let mut repo = Repository::init(dir.path(), "alice")?;
4892
4893 let result = repo.stash_pop();
4894 assert!(result.is_err());
4895
4896 Ok(())
4897 }
4898
4899 #[test]
4900 fn test_stash_push_nothing() -> Result<(), Box<dyn std::error::Error>> {
4901 let dir = tempfile::tempdir().unwrap();
4902 let mut repo = Repository::init(dir.path(), "alice")?;
4903
4904 let result = repo.stash_push(None);
4905 assert!(result.is_err());
4906 let err = result.unwrap_err().to_string();
4907 assert!(err.contains("nothing to commit"));
4908
4909 Ok(())
4910 }
4911
4912 #[test]
4913 fn test_reset_soft() -> Result<(), Box<dyn std::error::Error>> {
4914 let dir = tempfile::tempdir().unwrap();
4915 let mut repo = Repository::init(dir.path(), "alice")?;
4916
4917 fs::write(dir.path().join("file1.txt"), "first content")?;
4918 repo.add("file1.txt")?;
4919 let first_commit = repo.commit("first commit")?;
4920
4921 fs::write(dir.path().join("file2.txt"), "second content")?;
4922 repo.add("file2.txt")?;
4923 repo.commit("second commit")?;
4924
4925 fs::write(dir.path().join("file2.txt"), "modified second")?;
4927 repo.add("file2.txt")?;
4928
4929 let result = repo.reset(&first_commit.to_hex(), ResetMode::Soft)?;
4930 assert_eq!(result, first_commit);
4931
4932 let (_, head_id) = repo.head()?;
4934 assert_eq!(head_id, first_commit);
4935
4936 assert!(dir.path().join("file2.txt").exists());
4938 assert_eq!(
4939 fs::read_to_string(dir.path().join("file2.txt"))?,
4940 "modified second"
4941 );
4942
4943 let status = repo.status()?;
4945 assert_eq!(status.staged_files.len(), 1);
4946 assert_eq!(status.staged_files[0].0, "file2.txt");
4947
4948 Ok(())
4949 }
4950
4951 #[test]
4952 fn test_reset_mixed() -> Result<(), Box<dyn std::error::Error>> {
4953 let dir = tempfile::tempdir().unwrap();
4954 let mut repo = Repository::init(dir.path(), "alice")?;
4955
4956 fs::write(dir.path().join("file1.txt"), "first content")?;
4957 repo.add("file1.txt")?;
4958 let first_commit = repo.commit("first commit")?;
4959
4960 fs::write(dir.path().join("file2.txt"), "second content")?;
4961 repo.add("file2.txt")?;
4962 repo.commit("second commit")?;
4963
4964 fs::write(dir.path().join("file2.txt"), "modified second")?;
4966 repo.add("file2.txt")?;
4967
4968 let result = repo.reset(&first_commit.to_hex(), ResetMode::Mixed)?;
4969 assert_eq!(result, first_commit);
4970
4971 let (_, head_id) = repo.head()?;
4973 assert_eq!(head_id, first_commit);
4974
4975 assert!(dir.path().join("file2.txt").exists());
4977 assert_eq!(
4978 fs::read_to_string(dir.path().join("file2.txt"))?,
4979 "modified second"
4980 );
4981
4982 let status = repo.status()?;
4984 assert!(status.staged_files.is_empty());
4985
4986 Ok(())
4987 }
4988
4989 #[test]
4990 fn test_reset_hard() -> Result<(), Box<dyn std::error::Error>> {
4991 let dir = tempfile::tempdir().unwrap();
4992 let mut repo = Repository::init(dir.path(), "alice")?;
4993
4994 fs::write(dir.path().join("file1.txt"), "first content")?;
4995 repo.add("file1.txt")?;
4996 let first_commit = repo.commit("first commit")?;
4997
4998 fs::write(dir.path().join("file2.txt"), "second content")?;
4999 repo.add("file2.txt")?;
5000 repo.commit("second commit")?;
5001
5002 let result = repo.reset(&first_commit.to_hex(), ResetMode::Hard)?;
5003 assert_eq!(result, first_commit);
5004
5005 let (_, head_id) = repo.head()?;
5007 assert_eq!(head_id, first_commit);
5008
5009 assert!(dir.path().join("file1.txt").exists());
5011 assert!(!dir.path().join("file2.txt").exists());
5012
5013 let tree = repo.snapshot_head()?;
5014 assert!(tree.contains("file1.txt"));
5015 assert!(!tree.contains("file2.txt"));
5016
5017 Ok(())
5018 }
5019
5020 #[test]
5021 fn test_cherry_pick() -> Result<(), Box<dyn std::error::Error>> {
5022 let dir = tempfile::tempdir().unwrap();
5023 let mut repo = Repository::init(dir.path(), "alice")?;
5024
5025 fs::write(dir.path().join("a.txt"), "content of a")?;
5026 repo.add("a.txt")?;
5027 repo.commit("add a.txt")?;
5028
5029 repo.create_branch("feature", None)?;
5030
5031 fs::write(dir.path().join("b.txt"), "content of b")?;
5032 repo.add("b.txt")?;
5033 let b_commit = repo.commit("add b.txt")?;
5034
5035 repo.checkout("feature")?;
5036
5037 fs::write(dir.path().join("c.txt"), "content of c")?;
5039 repo.add("c.txt")?;
5040 repo.commit("add c.txt on feature")?;
5041
5042 repo.cherry_pick(&b_commit)?;
5043
5044 assert!(dir.path().join("b.txt").exists());
5045 let content = fs::read_to_string(dir.path().join("b.txt"))?;
5046 assert_eq!(content, "content of b");
5047
5048 let log = repo.log(None)?;
5049 assert!(log.iter().any(|p| p.message == "add b.txt"));
5050
5051 Ok(())
5052 }
5053
5054 #[test]
5055 fn test_cherry_pick_nonexistent() {
5056 let dir = tempfile::tempdir().unwrap();
5057 let mut repo = Repository::init(dir.path(), "alice").unwrap();
5058
5059 let fake_hash = Hash::from_data(b"nonexistent");
5060 let result = repo.cherry_pick(&fake_hash);
5061 assert!(result.is_err());
5062 }
5063
5064 #[test]
5065 fn test_rebase() -> Result<(), Box<dyn std::error::Error>> {
5066 let dir = tempfile::tempdir().unwrap();
5067 let mut repo = Repository::init(dir.path(), "alice")?;
5068
5069 fs::write(dir.path().join("a.txt"), "content of a")?;
5070 repo.add("a.txt")?;
5071 repo.commit("add a.txt")?;
5072
5073 repo.create_branch("feature", None)?;
5074
5075 repo.checkout("feature")?;
5076 fs::write(dir.path().join("b.txt"), "content of b")?;
5077 repo.add("b.txt")?;
5078 repo.commit("add b.txt on feature")?;
5079
5080 repo.checkout("main")?;
5081 fs::write(dir.path().join("c.txt"), "content of c")?;
5082 repo.add("c.txt")?;
5083 repo.commit("add c.txt on main")?;
5084
5085 repo.checkout("feature")?;
5086
5087 let result = repo.rebase("main")?;
5088 assert!(result.patches_replayed > 0);
5089
5090 assert!(dir.path().join("b.txt").exists());
5091 assert!(dir.path().join("c.txt").exists());
5092
5093 let log = repo.log(None)?;
5094 assert!(log.iter().any(|p| p.message == "add b.txt on feature"));
5095 assert!(log.iter().any(|p| p.message == "add c.txt on main"));
5096
5097 Ok(())
5098 }
5099
5100 #[test]
5101 fn test_rebase_fast_forward() -> Result<(), Box<dyn std::error::Error>> {
5102 let dir = tempfile::tempdir().unwrap();
5103 let mut repo = Repository::init(dir.path(), "alice")?;
5104
5105 fs::write(dir.path().join("a.txt"), "content of a")?;
5106 repo.add("a.txt")?;
5107 repo.commit("add a.txt")?;
5108
5109 repo.create_branch("feature", None)?;
5110
5111 fs::write(dir.path().join("b.txt"), "content of b")?;
5112 repo.add("b.txt")?;
5113 repo.commit("add b.txt")?;
5114
5115 repo.checkout("feature")?;
5116
5117 let result = repo.rebase("main")?;
5118 assert_eq!(result.patches_replayed, 0);
5119
5120 assert!(dir.path().join("b.txt").exists());
5121
5122 Ok(())
5123 }
5124
5125 #[test]
5126 fn test_blame() -> Result<(), Box<dyn std::error::Error>> {
5127 let dir = tempfile::tempdir().unwrap();
5128 let mut repo = Repository::init(dir.path(), "alice")?;
5129
5130 fs::write(dir.path().join("test.txt"), "line1\nline2\nline3")?;
5131 repo.add("test.txt")?;
5132 let first_commit = repo.commit("initial content")?;
5133
5134 fs::write(dir.path().join("test.txt"), "line1\nline2-modified\nline3")?;
5135 repo.add("test.txt")?;
5136 let second_commit = repo.commit("modify line2")?;
5137
5138 let blame = repo.blame("test.txt")?;
5139
5140 assert_eq!(blame.len(), 3);
5141 assert_eq!(blame[0].line, "line1");
5142 assert_eq!(blame[0].patch_id, first_commit);
5143
5144 assert_eq!(blame[1].line, "line2-modified");
5145 assert_eq!(blame[1].patch_id, second_commit);
5146
5147 assert_eq!(blame[2].line, "line3");
5148 assert_eq!(blame[2].patch_id, first_commit);
5149
5150 Ok(())
5151 }
5152
5153 #[test]
5154 fn test_blame_nonexistent_file() {
5155 let dir = tempfile::tempdir().unwrap();
5156 let repo = Repository::init(dir.path(), "alice").unwrap();
5157
5158 let result = repo.blame("nonexistent.txt");
5159 assert!(result.is_err());
5160 }
5161
5162 #[test]
5163 fn test_rm_file() -> Result<(), Box<dyn std::error::Error>> {
5164 let dir = tempfile::tempdir().unwrap();
5165 let mut repo = Repository::init(dir.path(), "alice")?;
5166
5167 fs::write(dir.path().join("test.txt"), "content")?;
5168 repo.add("test.txt")?;
5169 repo.commit("initial")?;
5170
5171 fs::remove_file(dir.path().join("test.txt"))?;
5172 repo.add("test.txt")?;
5173
5174 assert!(!dir.path().join("test.txt").exists());
5175
5176 let ws = repo.meta.working_set()?;
5177 assert_eq!(ws.len(), 1);
5178 assert_eq!(ws[0].0, "test.txt");
5179 assert_eq!(ws[0].1, FileStatus::Deleted);
5180
5181 Ok(())
5182 }
5183
5184 #[test]
5185 fn test_rm_cached() -> Result<(), Box<dyn std::error::Error>> {
5186 let dir = tempfile::tempdir().unwrap();
5187 let mut repo = Repository::init(dir.path(), "alice")?;
5188
5189 fs::write(dir.path().join("test.txt"), "content")?;
5190 repo.add("test.txt")?;
5191 repo.commit("initial")?;
5192
5193 let repo_path = RepoPath::new("test.txt")?;
5194 repo.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
5195
5196 assert!(dir.path().join("test.txt").exists());
5197
5198 let ws = repo.meta.working_set()?;
5199 assert_eq!(ws.len(), 1);
5200 assert_eq!(ws[0].0, "test.txt");
5201 assert_eq!(ws[0].1, FileStatus::Deleted);
5202
5203 Ok(())
5204 }
5205
5206 #[test]
5207 fn test_mv_file() -> Result<(), Box<dyn std::error::Error>> {
5208 let dir = tempfile::tempdir().unwrap();
5209 let mut repo = Repository::init(dir.path(), "alice")?;
5210
5211 fs::write(dir.path().join("old.txt"), "content")?;
5212 repo.add("old.txt")?;
5213 repo.commit("initial")?;
5214
5215 repo.rename_file("old.txt", "new.txt")?;
5216
5217 assert!(!dir.path().join("old.txt").exists());
5218 assert!(dir.path().join("new.txt").exists());
5219
5220 let ws = repo.meta.working_set()?;
5221 assert!(
5222 ws.iter()
5223 .any(|(p, s)| p == "old.txt" && *s == FileStatus::Deleted)
5224 );
5225 assert!(
5226 ws.iter()
5227 .any(|(p, s)| p == "new.txt" && *s == FileStatus::Added)
5228 );
5229
5230 Ok(())
5231 }
5232
5233 #[test]
5234 fn test_mv_nonexistent() {
5235 let dir = tempfile::tempdir().unwrap();
5236 let repo = Repository::init(dir.path(), "alice").unwrap();
5237
5238 let result = repo.rename_file("nonexistent.txt", "new.txt");
5239 assert!(result.is_err());
5240 let err = result.unwrap_err().to_string();
5241 assert!(err.contains("path not found"));
5242 }
5243
5244 #[test]
5245 fn test_remove_remote() -> Result<(), Box<dyn std::error::Error>> {
5246 let dir = tempfile::tempdir().unwrap();
5247 let repo = Repository::init(dir.path(), "alice")?;
5248
5249 repo.add_remote("origin", "http://example.com")?;
5250
5251 let remotes = repo.list_remotes()?;
5252 assert_eq!(remotes.len(), 1);
5253 assert_eq!(remotes[0].0, "origin");
5254
5255 repo.remove_remote("origin")?;
5256
5257 let remotes = repo.list_remotes()?;
5258 assert!(remotes.is_empty());
5259
5260 let result = repo.remove_remote("nonexistent");
5261 assert!(result.is_err());
5262
5263 Ok(())
5264 }
5265}