1use crate::cas::store::{BlobStore, CasError};
25use crate::dag::graph::{DagError, PatchDag};
26use crate::engine::apply::{ApplyError, apply_patch_chain, resolve_payload_to_hash};
27use crate::engine::diff::{DiffEntry, DiffType, diff_trees};
28use crate::engine::tree::FileTree;
29use crate::metadata::MetaError;
30use crate::patch::conflict::Conflict;
31use crate::patch::merge::MergeResult;
32use crate::patch::types::{FileChange, OperationType, Patch, PatchId, TouchSet};
33use serde::{Deserialize, Serialize};
34use std::cell::RefCell;
35use std::collections::{HashMap, HashSet, VecDeque};
36use std::fs;
37use std::io;
38use std::path::{Path, PathBuf};
39use suture_common::{BranchName, CommonError, FileStatus, Hash, RepoPath};
40use thiserror::Error;
41
42#[derive(Error, Debug)]
44pub enum RepoError {
45 #[error("not a suture repository: {0}")]
47 NotARepository(PathBuf),
48
49 #[error("repository already exists: {0}")]
51 AlreadyExists(PathBuf),
52
53 #[error("CAS error: {0}")]
55 Cas(#[from] CasError),
56
57 #[error("DAG error: {0}")]
59 Dag(#[from] DagError),
60
61 #[error("metadata error: {0}")]
63 Meta(#[from] MetaError),
64
65 #[error("I/O error: {0}")]
67 Io(#[from] std::io::Error),
68
69 #[error("patch application error: {0}")]
71 Apply(#[from] ApplyError),
72
73 #[error("patch error: {0}")]
75 Patch(String),
76
77 #[error("nothing to commit")]
79 NothingToCommit,
80
81 #[error("merge in progress — resolve conflicts first")]
83 MergeInProgress,
84
85 #[error("uncommitted changes would be overwritten (staged: {0})")]
87 DirtyWorkingTree(usize),
88
89 #[error("branch not found: {0}")]
91 BranchNotFound(String),
92
93 #[error("common error: {0}")]
95 Common(#[from] CommonError),
96
97 #[error("{0}")]
99 Custom(String),
100
101 #[error("unsupported operation: {0}")]
103 Unsupported(String),
104}
105
106#[derive(Debug, Clone, Copy, PartialEq, Eq)]
108pub enum ResetMode {
109 Soft,
111 Mixed,
113 Hard,
115}
116
117pub struct Repository {
119 root: PathBuf,
121 #[allow(dead_code)]
123 suture_dir: PathBuf,
124 cas: BlobStore,
126 dag: PatchDag,
128 meta: crate::metadata::MetadataStore,
130 author: String,
132 ignore_patterns: Vec<String>,
134 pending_merge_parents: Vec<PatchId>,
136 cached_head_snapshot: RefCell<Option<FileTree>>,
138 cached_head_id: RefCell<Option<PatchId>>,
140 cached_head_branch: RefCell<Option<String>>,
142 repo_config: crate::metadata::repo_config::RepoConfig,
144 is_worktree: bool,
146}
147
148impl Repository {
149 pub fn init(path: &Path, author: &str) -> Result<Self, RepoError> {
151 let suture_dir = path.join(".suture");
152 if suture_dir.exists() {
153 return Err(RepoError::AlreadyExists(path.to_path_buf()));
154 }
155
156 fs::create_dir_all(suture_dir.join("objects"))?;
158
159 let mut cas = BlobStore::new(&suture_dir)?;
162 cas.set_verify_on_read(false);
163
164 let meta = crate::metadata::MetadataStore::open(&suture_dir.join("metadata.db"))?;
166
167 let mut dag = PatchDag::new();
169
170 let root_patch = Patch::new(
172 OperationType::Create,
173 TouchSet::empty(),
174 None,
175 vec![],
176 vec![],
177 author.to_string(),
178 "Initial commit".to_string(),
179 );
180 let root_id = dag.add_patch(root_patch.clone(), vec![])?;
181
182 meta.store_patch(&root_patch)?;
184
185 let main_branch = BranchName::new("main").expect("hardcoded 'main' is always valid");
187 dag.create_branch(main_branch.clone(), root_id)?;
188 meta.set_branch(&main_branch, &root_id)?;
189
190 meta.set_config("author", author)?;
192
193 let ignore_patterns = load_ignore_patterns(path);
195
196 Ok(Self {
197 root: path.to_path_buf(),
198 suture_dir,
199 cas,
200 dag,
201 meta,
202 author: author.to_string(),
203 ignore_patterns,
204 pending_merge_parents: Vec::new(),
205 cached_head_snapshot: RefCell::new(None),
206 cached_head_id: RefCell::new(None),
207 cached_head_branch: RefCell::new(None),
208 repo_config: crate::metadata::repo_config::RepoConfig::default(),
209 is_worktree: false,
210 })
211 }
212 pub fn open(path: &Path) -> Result<Self, RepoError> {
216 let suture_dir = path.join(".suture");
217 if !suture_dir.exists() {
218 return Err(RepoError::NotARepository(path.to_path_buf()));
219 }
220
221 let is_worktree = suture_dir.join("worktree").exists();
222
223 let mut cas = BlobStore::new(&suture_dir)?;
225 cas.set_verify_on_read(false);
226 let meta = crate::metadata::MetadataStore::open(&suture_dir.join("metadata.db"))?;
227
228 let mut dag = PatchDag::new();
230
231 let all_patch_ids: Vec<PatchId> = {
233 let mut stmt = meta
234 .conn()
235 .prepare("SELECT id FROM patches ORDER BY id")
236 .map_err(|e: rusqlite::Error| RepoError::Custom(e.to_string()))?;
237 let rows = stmt
238 .query_map([], |row: &rusqlite::Row| row.get::<_, String>(0))
239 .map_err(|e: rusqlite::Error| RepoError::Custom(e.to_string()))?;
240 rows.filter_map(|r: Result<String, _>| r.ok())
241 .filter_map(|hex| Hash::from_hex(&hex).ok())
242 .collect()
243 };
244
245 let mut loaded: HashSet<PatchId> = HashSet::new();
247 let mut attempts = 0;
248 while loaded.len() < all_patch_ids.len() && attempts < all_patch_ids.len() + 1 {
249 for patch_id in &all_patch_ids {
250 if loaded.contains(patch_id) {
251 continue;
252 }
253 if let Ok(patch) = meta.get_patch(patch_id) {
254 let parents_ready = patch
256 .parent_ids
257 .iter()
258 .all(|pid| loaded.contains(pid) || *pid == Hash::ZERO);
259 if parents_ready {
260 let valid_parents: Vec<PatchId> = patch
262 .parent_ids
263 .iter()
264 .filter(|pid| loaded.contains(pid))
265 .copied()
266 .collect();
267 let _ = dag.add_patch(patch, valid_parents);
268 loaded.insert(*patch_id);
269 }
270 }
271 }
272 attempts += 1;
273 }
274
275 let branches = meta.list_branches()?;
277 for (name, target_id) in &branches {
278 let branch_name = match BranchName::new(name) {
279 Ok(b) => b,
280 Err(_) => continue,
281 };
282 if !dag.branch_exists(&branch_name) {
283 let _ = dag.create_branch(branch_name, *target_id);
284 }
285 }
286
287 let author = meta
288 .get_config("user.name")
289 .unwrap_or(None)
290 .or_else(|| meta.get_config("author").unwrap_or(None))
291 .unwrap_or_else(|| "unknown".to_string());
292
293 let restored_parents = restore_pending_merge_parents(&meta);
295
296 let ignore_patterns = load_ignore_patterns(path);
298
299 let repo_config = crate::metadata::repo_config::RepoConfig::load(path);
301
302 Ok(Self {
303 root: path.to_path_buf(),
304 suture_dir,
305 cas,
306 dag,
307 meta,
308 author,
309 ignore_patterns,
310 pending_merge_parents: restored_parents,
311 cached_head_snapshot: RefCell::new(None),
312 cached_head_id: RefCell::new(None),
313 cached_head_branch: RefCell::new(None),
314 repo_config,
315 is_worktree,
316 })
317 }
318 pub fn open_in_memory() -> Result<Self, RepoError> {
324 let temp_root = tempfile::tempdir().map_err(RepoError::Io)?.keep();
325 let suture_dir = temp_root.join(".suture");
326 fs::create_dir_all(&suture_dir)?;
327
328 let mut cas = BlobStore::new(&suture_dir)?;
329 cas.set_verify_on_read(false);
330 let meta = crate::metadata::MetadataStore::open_in_memory()?;
331
332 let mut dag = PatchDag::new();
333 let root_patch = Patch::new(
334 OperationType::Create,
335 TouchSet::empty(),
336 None,
337 vec![],
338 vec![],
339 "suture".to_string(),
340 "Initial commit".to_string(),
341 );
342 let root_id = dag.add_patch(root_patch.clone(), vec![])?;
343 meta.store_patch(&root_patch)?;
344
345 let main_branch = BranchName::new("main").expect("hardcoded 'main' is always valid");
346 dag.create_branch(main_branch.clone(), root_id)?;
347 meta.set_branch(&main_branch, &root_id)?;
348 meta.set_config("author", "suture")?;
349
350 Ok(Self {
351 root: temp_root,
352 suture_dir,
353 cas,
354 dag,
355 meta,
356 author: "suture".to_string(),
357 ignore_patterns: Vec::new(),
358 pending_merge_parents: Vec::new(),
359 cached_head_snapshot: RefCell::new(None),
360 cached_head_id: RefCell::new(None),
361 cached_head_branch: RefCell::new(None),
362 repo_config: crate::metadata::repo_config::RepoConfig::default(),
363 is_worktree: false,
364 })
365 }
366
367 pub fn create_branch(&mut self, name: &str, target: Option<&str>) -> Result<(), RepoError> {
373 let branch = BranchName::new(name)?;
374 let target_id = match target {
375 Some(t) => {
376 if t == "HEAD" {
378 let head = self
379 .dag
380 .head()
381 .ok_or_else(|| RepoError::Custom("no HEAD".to_string()))?;
382 head.1
383 } else if let Some(rest) = t.strip_prefix("HEAD~") {
384 let n: usize = rest
385 .parse()
386 .map_err(|_| RepoError::Custom(format!("invalid HEAD~N: {}", t)))?;
387 let (_, head_id) = self.head()?;
388 let mut current = head_id;
389 for _ in 0..n {
390 let patch = self.dag.get_patch(¤t).ok_or_else(|| {
391 RepoError::Custom("HEAD ancestor not found".to_string())
392 })?;
393 current = patch
394 .parent_ids
395 .first()
396 .ok_or_else(|| {
397 RepoError::Custom("HEAD has no parent".to_string())
398 })?
399 .to_owned();
400 }
401 current
402 } else if let Ok(bn) = BranchName::new(t) {
403 self.dag
404 .get_branch(&bn)
405 .ok_or_else(|| RepoError::BranchNotFound(t.to_string()))?
406 } else {
407 Hash::from_hex(t)
408 .map_err(|_| RepoError::Custom(format!("invalid target: {}", t)))?
409 }
410 }
411 None => {
412 let head = self
413 .dag
414 .head()
415 .ok_or_else(|| RepoError::Custom("no HEAD branch".to_string()))?;
416 head.1
417 }
418 };
419
420 self.dag.create_branch(branch.clone(), target_id)?;
421 self.meta.set_branch(&branch, &target_id)?;
422 Ok(())
423 }
424
425 pub fn head(&self) -> Result<(String, PatchId), RepoError> {
430 if let Some(ref cached) = *self.cached_head_id.borrow()
431 && let Some(ref branch) = *self.cached_head_branch.borrow()
432 {
433 return Ok((branch.clone(), *cached));
434 }
435 let branch_name = self.read_head_branch()?;
436
437 let bn = BranchName::new(&branch_name)?;
438 let target_id = self
439 .dag
440 .get_branch(&bn)
441 .ok_or_else(|| RepoError::BranchNotFound(branch_name.clone()))?;
442
443 *self.cached_head_branch.borrow_mut() = Some(branch_name.clone());
444 *self.cached_head_id.borrow_mut() = Some(target_id);
445 Ok((branch_name, target_id))
446 }
447
448 pub fn list_branches(&self) -> Vec<(String, PatchId)> {
450 self.dag.list_branches()
451 }
452
453 pub fn delete_branch(&mut self, name: &str) -> Result<(), RepoError> {
455 let (current_branch, _) = self.head()?;
456 if current_branch == name {
457 return Err(RepoError::Custom(format!(
458 "cannot delete the current branch '{}'",
459 name
460 )));
461 }
462 let branch = BranchName::new(name)?;
463 self.dag.delete_branch(&branch)?;
464 self.meta
466 .conn()
467 .execute(
468 "DELETE FROM branches WHERE name = ?1",
469 rusqlite::params![name],
470 )
471 .map_err(|e| RepoError::Custom(e.to_string()))?;
472 Ok(())
473 }
474
475 pub fn get_config(&self, key: &str) -> Result<Option<String>, RepoError> {
486 if let Some(val) = self.repo_config.get(key) {
488 return Ok(Some(val));
489 }
490 if let Some(val) = self.meta.get_config(key).map_err(RepoError::from)? {
492 return Ok(Some(val));
493 }
494 let global = crate::metadata::global_config::GlobalConfig::load();
496 Ok(global.get(key))
497 }
498
499 pub fn set_config(&mut self, key: &str, value: &str) -> Result<(), RepoError> {
501 self.meta.set_config(key, value).map_err(RepoError::from)
502 }
503
504 pub fn list_config(&self) -> Result<Vec<(String, String)>, RepoError> {
506 self.meta.list_config().map_err(RepoError::from)
507 }
508
509 fn read_head_branch(&self) -> Result<String, RepoError> {
514 if self.is_worktree {
515 let head_path = self.suture_dir.join("HEAD");
516 if head_path.exists() {
517 Ok(fs::read_to_string(&head_path)?.trim().to_string())
518 } else {
519 Ok("main".to_string())
520 }
521 } else {
522 Ok(self
523 .meta
524 .get_config("head_branch")
525 .unwrap_or(None)
526 .unwrap_or_else(|| "main".to_string()))
527 }
528 }
529
530 fn write_head_branch(&self, branch: &str) -> Result<(), RepoError> {
531 if self.is_worktree {
532 fs::write(self.suture_dir.join("HEAD"), branch)?;
533 } else {
534 self.meta
535 .set_config("head_branch", branch)
536 .map_err(RepoError::Meta)?;
537 }
538 Ok(())
539 }
540
541 pub fn create_tag(&mut self, name: &str, target: Option<&str>) -> Result<(), RepoError> {
549 let target_id = match target {
550 Some(t) if t == "HEAD" || t.starts_with("HEAD~") => {
551 let (_, head_id) = self.head()?;
552 let mut current = head_id;
553 if let Some(n_str) = t.strip_prefix("HEAD~") {
554 let n: usize = n_str
555 .parse()
556 .map_err(|_| RepoError::Custom(format!("invalid HEAD~N: {}", n_str)))?;
557 for _ in 0..n {
558 if let Some(patch) = self.dag.get_patch(¤t) {
559 current = *patch
560 .parent_ids
561 .first()
562 .ok_or_else(|| RepoError::Custom("HEAD has no parent".into()))?;
563 } else {
564 return Err(RepoError::Custom(
565 "HEAD ancestor not found".into(),
566 ));
567 }
568 }
569 }
570 current
571 }
572 Some(t) => {
573 if let Ok(bn) = BranchName::new(t) {
574 self.dag
575 .get_branch(&bn)
576 .ok_or_else(|| RepoError::BranchNotFound(t.to_string()))?
577 } else {
578 Hash::from_hex(t)
579 .map_err(|_| RepoError::Custom(format!("invalid target: {}", t)))?
580 }
581 }
582 None => {
583 let (_, head_id) = self.head()?;
584 head_id
585 }
586 };
587 self.set_config(&format!("tag.{name}"), &target_id.to_hex())
588 }
589
590 pub fn delete_tag(&mut self, name: &str) -> Result<(), RepoError> {
592 let key = format!("tag.{name}");
593 let exists: bool = self
594 .meta
595 .conn()
596 .query_row(
597 "SELECT COUNT(*) FROM config WHERE key = ?1",
598 rusqlite::params![key],
599 |row| row.get::<_, i64>(0),
600 )
601 .map(|count| count > 0)
602 .map_err(|e| RepoError::Custom(e.to_string()))?;
603 if !exists {
604 return Err(RepoError::Custom(format!("tag '{}' not found", name)));
605 }
606 self.meta
607 .conn()
608 .execute("DELETE FROM config WHERE key = ?1", rusqlite::params![key])
609 .map_err(|e| RepoError::Custom(e.to_string()))?;
610 Ok(())
611 }
612
613 pub fn list_tags(&self) -> Result<Vec<(String, PatchId)>, RepoError> {
615 let config = self.list_config()?;
616 let mut tags = Vec::new();
617 for (key, value) in config {
618 if let Some(name) = key.strip_prefix("tag.")
619 && let Ok(id) = Hash::from_hex(&value)
620 {
621 tags.push((name.to_string(), id));
622 }
623 }
624 tags.sort_by(|a, b| a.0.cmp(&b.0));
625 Ok(tags)
626 }
627
628 pub fn resolve_tag(&self, name: &str) -> Result<Option<PatchId>, RepoError> {
630 let val = self.get_config(&format!("tag.{name}"))?;
631 match val {
632 Some(hex) => Ok(Some(Hash::from_hex(&hex)?)),
633 None => Ok(None),
634 }
635 }
636
637 pub fn add_note(&self, patch_id: &PatchId, note: &str) -> Result<(), RepoError> {
643 let existing = self.list_notes(patch_id)?;
644 let next_idx = existing.len();
645 let key = format!("note.{}.{}", patch_id, next_idx);
646 self.meta.set_config(&key, note).map_err(RepoError::Meta)
647 }
648
649 pub fn list_notes(&self, patch_id: &PatchId) -> Result<Vec<String>, RepoError> {
651 let prefix = format!("note.{}.", patch_id);
652 let all_config = self.meta.list_config().map_err(RepoError::Meta)?;
653 let mut notes: Vec<(usize, String)> = Vec::new();
654 for (key, value) in &all_config {
655 if let Some(idx_str) = key.strip_prefix(&prefix)
656 && let Ok(idx) = idx_str.parse::<usize>()
657 {
658 notes.push((idx, value.clone()));
659 }
660 }
661 notes.sort_by_key(|(idx, _)| *idx);
662 Ok(notes.into_iter().map(|(_, v)| v).collect())
663 }
664
665 pub fn remove_note(&self, patch_id: &PatchId, index: usize) -> Result<(), RepoError> {
667 let notes = self.list_notes(patch_id)?;
668 if index >= notes.len() {
669 return Err(RepoError::Custom(format!(
670 "note index {} out of range ({} notes for commit)",
671 index,
672 notes.len()
673 )));
674 }
675 let key = format!("note.{}.{}", patch_id, index);
676 self.meta.delete_config(&key).map_err(RepoError::Meta)
677 }
678
679 pub fn patches_since(&self, since_id: &PatchId) -> Vec<Patch> {
687 let since_ancestors = self.dag.ancestors(since_id);
688 let mut known = since_ancestors;
690 known.insert(*since_id);
691
692 let mut new_ids: HashSet<PatchId> = HashSet::new();
694 let mut stack: Vec<PatchId> = self.dag.list_branches().iter().map(|(_, id)| *id).collect();
695
696 while let Some(id) = stack.pop() {
697 if !known.contains(&id)
698 && new_ids.insert(id)
699 && let Some(node) = self.dag.get_node(&id)
700 {
701 for parent in &node.patch.parent_ids {
702 if !known.contains(parent) && !new_ids.contains(parent) {
703 stack.push(*parent);
704 }
705 }
706 }
707 }
708
709 let patches: HashMap<PatchId, Patch> = new_ids
711 .into_iter()
712 .filter_map(|id| self.dag.get_patch(&id).map(|p| (id, p.clone())))
713 .collect();
714
715 let mut in_degree: HashMap<PatchId, usize> = HashMap::new();
717 let mut children: HashMap<PatchId, Vec<PatchId>> = HashMap::new();
718 for (&id, patch) in &patches {
719 in_degree.entry(id).or_insert(0);
720 for parent_id in &patch.parent_ids {
721 if patches.contains_key(parent_id) {
722 children.entry(*parent_id).or_default().push(id);
723 *in_degree.entry(id).or_insert(0) += 1;
724 }
725 }
726 }
727
728 let mut queue: VecDeque<PatchId> = in_degree
729 .iter()
730 .filter(|&(_, deg)| *deg == 0)
731 .map(|(&id, _)| id)
732 .collect();
733 let mut sorted_ids: Vec<PatchId> = Vec::with_capacity(patches.len());
734
735 while let Some(id) = queue.pop_front() {
736 sorted_ids.push(id);
737 if let Some(kids) = children.get(&id) {
738 for &child in kids {
739 let deg = in_degree
740 .get_mut(&child)
741 .expect("in-degree entry exists for child in topo sort");
742 *deg -= 1;
743 if *deg == 0 {
744 queue.push_back(child);
745 }
746 }
747 }
748 }
749
750 sorted_ids
751 .into_iter()
752 .filter_map(|id| patches.get(&id).cloned())
753 .collect()
754 }
755
756 pub fn status(&self) -> Result<RepoStatus, RepoError> {
762 let working_set = self.meta.working_set()?;
763 let branches = self.list_branches();
764 let head = self.head()?;
765
766 Ok(RepoStatus {
767 head_branch: Some(head.0),
768 head_patch: Some(head.1),
769 branch_count: branches.len(),
770 staged_files: working_set
771 .iter()
772 .filter(|(_, s)| {
773 matches!(
774 s,
775 FileStatus::Added | FileStatus::Modified | FileStatus::Deleted
776 )
777 })
778 .map(|(p, s)| (p.clone(), *s))
779 .collect(),
780 patch_count: self.dag.patch_count(),
781 })
782 }
783
784 pub fn add(&self, path: &str) -> Result<(), RepoError> {
786 let repo_path = RepoPath::new(path)?;
787 let full_path = self.root.join(path);
788
789 if !full_path.exists() {
790 if self.is_tracked(path)? {
791 self.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
792 return Ok(());
793 }
794 return Err(RepoError::Io(io::Error::new(
795 io::ErrorKind::NotFound,
796 format!("file not found: {}", path),
797 )));
798 }
799
800 let status = if self.is_tracked(path)? {
801 FileStatus::Modified
802 } else {
803 FileStatus::Added
804 };
805
806 self.meta.working_set_add(&repo_path, status)?;
807 Ok(())
808 }
809
810 pub fn add_all(&self) -> Result<usize, RepoError> {
812 let tree = self.snapshot_head()?;
813 let mut count = 0;
814
815 for entry in walk_dir(&self.root, &self.ignore_patterns)? {
816 let rel_path = entry.relative;
817 let full_path = self.root.join(&rel_path);
818
819 let is_tracked = tree.contains(&rel_path);
820
821 if is_tracked
823 && let Ok(data) = fs::read(&full_path)
824 && let Some(old_hash) = tree.get(&rel_path)
825 && Hash::from_data(&data) == *old_hash
826 {
827 continue; }
829
830 let status = if is_tracked {
831 FileStatus::Modified
832 } else {
833 FileStatus::Added
834 };
835
836 let repo_path = RepoPath::new(&rel_path)?;
837 self.meta.working_set_add(&repo_path, status)?;
838 count += 1;
839 }
840
841 Ok(count)
842 }
843
844 fn is_tracked(&self, path: &str) -> Result<bool, RepoError> {
849 if let Some(ref tree) = *self.cached_head_snapshot.borrow() {
851 return Ok(tree.contains(path));
852 }
853 if let Ok((_, head_id)) = self.head()
855 && let Ok(result) = self.meta.file_tree_contains(&head_id, path)
856 {
857 return Ok(result);
858 }
859 for id in self.dag.patch_ids() {
861 if let Some(node) = self.dag.get_node(&id)
862 && node.patch.target_path.as_deref() == Some(path)
863 {
864 return Ok(true);
865 }
866 }
867 Ok(false)
868 }
869
870 pub fn commit(&mut self, message: &str) -> Result<PatchId, RepoError> {
872 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
873 let working_set = self.meta.working_set()?;
874
875 let staged: Vec<_> = working_set
876 .iter()
877 .filter(|(_, s)| {
878 matches!(
879 s,
880 FileStatus::Added | FileStatus::Modified | FileStatus::Deleted
881 )
882 })
883 .collect();
884
885 if staged.is_empty() {
886 return Err(RepoError::NothingToCommit);
887 }
888
889 let (branch_name, head_id) = self.head()?;
890 let is_merge_resolution = !self.pending_merge_parents.is_empty();
891
892 let parent_ids = if self.pending_merge_parents.is_empty() {
893 vec![head_id]
894 } else {
895 std::mem::take(&mut self.pending_merge_parents)
896 };
897
898 let _ = self
900 .meta
901 .conn()
902 .execute("DELETE FROM config WHERE key = 'pending_merge_parents'", []);
903
904 let mut file_changes = Vec::new();
906 for (path, status) in &staged {
907 let full_path = self.root.join(path);
908
909 let (op_type, payload) = match status {
910 FileStatus::Added => {
911 let data = fs::read(&full_path)?;
912 let hash = self.cas.put_blob(&data)?;
913 let payload = hash.to_hex().as_bytes().to_vec();
914 (OperationType::Create, payload)
915 }
916 FileStatus::Modified => {
917 let data = fs::read(&full_path)?;
918 let hash = self.cas.put_blob(&data)?;
919 let payload = hash.to_hex().as_bytes().to_vec();
920 (OperationType::Modify, payload)
921 }
922 FileStatus::Deleted => (OperationType::Delete, Vec::new()),
923 _ => continue,
924 };
925 file_changes.push(FileChange {
926 op: op_type,
927 path: path.clone(),
928 payload,
929 });
930 }
931
932 if file_changes.is_empty() {
933 return Err(RepoError::NothingToCommit);
934 }
935
936 let batch_patch = Patch::new_batch(
938 file_changes,
939 parent_ids.clone(),
940 self.author.clone(),
941 message.to_string(),
942 );
943
944 let patch_id = self.dag.add_patch(batch_patch.clone(), parent_ids)?;
945 self.meta.store_patch(&batch_patch)?;
946
947 for (path, _) in &staged {
949 let repo_path = RepoPath::new(path.clone())?;
950 self.meta.working_set_remove(&repo_path)?;
951 }
952
953 let branch = BranchName::new(&branch_name)?;
954 self.dag.update_branch(&branch, patch_id)?;
955 self.meta.set_branch(&branch, &patch_id)?;
956
957 if let Ok(tree) = self.snapshot_uncached(&patch_id) {
960 let tree_hash = tree.content_hash();
961 let _ = self.meta.set_config("head_tree_hash", &tree_hash.to_hex());
962 let _ = self.meta.store_file_tree(&patch_id, &tree);
963 }
964
965 self.invalidate_head_cache();
966
967 let _ = self.record_reflog(&old_head, &patch_id, &format!("commit: {}", message));
968
969 if is_merge_resolution {
971 }
974
975 Ok(patch_id)
976 }
977
978 pub fn has_uncommitted_changes(&self) -> Result<bool, RepoError> {
983 let working_set = self.meta.working_set()?;
984
985 let has_staged = working_set.iter().any(|(_, s)| {
986 matches!(
987 s,
988 FileStatus::Added | FileStatus::Modified | FileStatus::Deleted
989 )
990 });
991 if has_staged {
992 return Ok(true);
993 }
994
995 if let Ok(head_tree) = self.snapshot_head() {
996 for (path, hash) in head_tree.iter() {
997 let full_path = self.root.join(path);
998 if let Ok(data) = fs::read(&full_path) {
999 let current_hash = Hash::from_data(&data);
1000 if ¤t_hash != hash {
1001 return Ok(true);
1002 }
1003 } else {
1004 return Ok(true);
1005 }
1006 }
1007 }
1008
1009 Ok(false)
1010 }
1011
1012 pub fn stash_push(&mut self, message: Option<&str>) -> Result<usize, RepoError> {
1013 if !self.has_uncommitted_changes()? {
1014 return Err(RepoError::NothingToCommit);
1015 }
1016
1017 let working_set = self.meta.working_set()?;
1018 let mut files: Vec<(String, Option<String>)> = Vec::new();
1019
1020 for (path, status) in &working_set {
1021 match status {
1022 FileStatus::Added | FileStatus::Modified => {
1023 let full_path = self.root.join(path);
1024 if let Ok(data) = fs::read(&full_path) {
1025 let hash = self.cas.put_blob(&data)?;
1026 files.push((path.clone(), Some(hash.to_hex())));
1027 } else {
1028 files.push((path.clone(), None));
1029 }
1030 }
1031 FileStatus::Deleted => {
1032 files.push((path.clone(), None));
1033 }
1034 _ => {}
1035 }
1036 }
1037
1038 if let Ok(head_tree) = self.snapshot_head() {
1039 for (path, _hash) in head_tree.iter() {
1040 let full_path = self.root.join(path);
1041 if let Ok(data) = fs::read(&full_path) {
1042 let current_hash = Hash::from_data(&data);
1043 if ¤t_hash != _hash {
1044 let already = files.iter().any(|(p, _)| p == path);
1045 if !already {
1046 let hash = self.cas.put_blob(&data)?;
1047 files.push((path.clone(), Some(hash.to_hex())));
1048 }
1049 }
1050 }
1051 }
1052 }
1053
1054 let mut index: usize = 0;
1055 loop {
1056 let key = format!("stash.{}.message", index);
1057 if self.meta.get_config(&key)?.is_none() {
1058 break;
1059 }
1060 index += 1;
1061 }
1062
1063 let (branch_name, head_id) = self.head()?;
1064 let msg = message.unwrap_or("WIP").to_string();
1065 let files_json = serde_json::to_string(&files).unwrap_or_else(|_| "[]".to_string());
1066
1067 self.set_config(&format!("stash.{}.message", index), &msg)?;
1068 self.set_config(&format!("stash.{}.head_branch", index), &branch_name)?;
1069 self.set_config(&format!("stash.{}.head_id", index), &head_id.to_hex())?;
1070 self.set_config(&format!("stash.{}.files", index), &files_json)?;
1071
1072 self.meta
1073 .conn()
1074 .execute("DELETE FROM working_set", [])
1075 .map_err(|e| RepoError::Meta(crate::metadata::MetaError::Database(e)))?;
1076
1077 if let Ok(head_tree) = self.snapshot_head() {
1078 let current_tree = head_tree;
1079 for (path, _) in current_tree.iter() {
1080 let full_path = self.root.join(path);
1081 if full_path.exists() {
1082 let _ = fs::remove_file(&full_path);
1083 }
1084 }
1085 for (path, hash) in current_tree.iter() {
1086 let full_path = self.root.join(path);
1087 if let Some(parent) = full_path.parent() {
1088 let _ = fs::create_dir_all(parent);
1089 }
1090 if let Ok(blob) = self.cas.get_blob(hash) {
1091 let _ = fs::write(&full_path, &blob);
1092 }
1093 }
1094 }
1095
1096 Ok(index)
1097 }
1098
1099 pub fn stash_pop(&mut self) -> Result<(), RepoError> {
1100 let stashes = self.stash_list()?;
1101 if stashes.is_empty() {
1102 return Err(RepoError::Custom("No stashes found".to_string()));
1103 }
1104 let highest = stashes
1105 .iter()
1106 .map(|s| s.index)
1107 .max()
1108 .expect("stash list is non-empty (checked above)");
1109 self.stash_apply(highest)?;
1110 self.stash_drop(highest)?;
1111 Ok(())
1112 }
1113
1114 pub fn stash_apply(&mut self, index: usize) -> Result<(), RepoError> {
1115 let files_key = format!("stash.{}.files", index);
1116 let files_json = self
1117 .meta
1118 .get_config(&files_key)?
1119 .ok_or_else(|| RepoError::Custom(format!("stash@{{{}}} not found", index)))?;
1120
1121 let head_id_key = format!("stash.{}.head_id", index);
1122 let stash_head_id = self.meta.get_config(&head_id_key)?.unwrap_or_default();
1123
1124 if let Ok((_, current_head_id)) = self.head()
1125 && current_head_id.to_hex() != stash_head_id
1126 {
1127 tracing::warn!(
1128 "Warning: HEAD has moved since stash@{{{}}} was created",
1129 index
1130 );
1131 }
1132
1133 let files: Vec<(String, Option<String>)> =
1134 serde_json::from_str(&files_json).unwrap_or_default();
1135
1136 for (path, hash_opt) in &files {
1137 let full_path = self.root.join(path);
1138 match hash_opt {
1139 Some(hex_hash) => {
1140 let hash = Hash::from_hex(hex_hash)
1141 .map_err(|e| RepoError::Custom(format!("invalid hash in stash: {}", e)))?;
1142 let blob = self.cas.get_blob(&hash)?;
1143 if let Some(parent) = full_path.parent() {
1144 fs::create_dir_all(parent)?;
1145 }
1146 fs::write(&full_path, &blob)?;
1147 let repo_path = RepoPath::new(path.clone())?;
1148 self.meta
1149 .working_set_add(&repo_path, FileStatus::Modified)?;
1150 }
1151 None => {
1152 if full_path.exists() {
1153 fs::remove_file(&full_path)?;
1154 }
1155 let repo_path = RepoPath::new(path.clone())?;
1156 self.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
1157 }
1158 }
1159 }
1160
1161 Ok(())
1162 }
1163
1164 pub fn stash_list(&self) -> Result<Vec<StashEntry>, RepoError> {
1165 let all_config = self.list_config()?;
1166 let mut entries = Vec::new();
1167
1168 for (key, value) in &all_config {
1169 if let Some(rest) = key.strip_prefix("stash.")
1170 && let Some(idx_str) = rest.strip_suffix(".message")
1171 && let Ok(idx) = idx_str.parse::<usize>()
1172 {
1173 let branch_key = format!("stash.{}.head_branch", idx);
1174 let head_id_key = format!("stash.{}.head_id", idx);
1175 let branch = self.meta.get_config(&branch_key)?.unwrap_or_default();
1176 let head_id = self.meta.get_config(&head_id_key)?.unwrap_or_default();
1177 entries.push(StashEntry {
1178 index: idx,
1179 message: value.clone(),
1180 branch,
1181 head_id,
1182 });
1183 }
1184 }
1185
1186 entries.sort_by_key(|e| e.index);
1187 Ok(entries)
1188 }
1189
1190 pub fn stash_drop(&mut self, index: usize) -> Result<(), RepoError> {
1191 let prefix = format!("stash.{}.", index);
1192 let all_config = self.list_config()?;
1193 let keys_to_delete: Vec<String> = all_config
1194 .iter()
1195 .filter(|(k, _)| k.starts_with(&prefix))
1196 .map(|(k, _)| k.clone())
1197 .collect();
1198
1199 if keys_to_delete.is_empty() {
1200 return Err(RepoError::Custom(format!("stash@{{{}}} not found", index)));
1201 }
1202
1203 for key in &keys_to_delete {
1204 self.meta
1205 .conn()
1206 .execute("DELETE FROM config WHERE key = ?1", rusqlite::params![key])
1207 .map_err(|e| RepoError::Meta(crate::metadata::MetaError::Database(e)))?;
1208 }
1209
1210 Ok(())
1211 }
1212
1213 pub fn snapshot_head(&self) -> Result<FileTree, RepoError> {
1222 let (branch_name, head_id) = {
1226 let branch_name = self.read_head_branch()?;
1227 let bn = BranchName::new(&branch_name)?;
1228 let target_id = self
1229 .dag
1230 .get_branch(&bn)
1231 .ok_or_else(|| RepoError::BranchNotFound(branch_name.clone()))?;
1232 (branch_name, target_id)
1233 };
1234
1235 *self.cached_head_branch.borrow_mut() = Some(branch_name.clone());
1237 *self.cached_head_id.borrow_mut() = Some(head_id);
1238
1239 if let Some(ref tree) = *self.cached_head_snapshot.borrow() {
1240 return Ok(tree.clone());
1241 }
1242
1243 if let Some(tree) = self
1245 .meta
1246 .load_file_tree(&head_id)
1247 .map_err(RepoError::Meta)?
1248 {
1249 let tree_hash = tree.content_hash();
1251 let stored_hash = self
1252 .meta
1253 .get_config("head_tree_hash")
1254 .ok()
1255 .flatten()
1256 .and_then(|h| Hash::from_hex(&h).ok());
1257
1258 if stored_hash.is_none_or(|h| h == tree_hash) {
1259 if stored_hash.is_none() {
1261 let _ = self.meta.set_config("head_tree_hash", &tree_hash.to_hex());
1262 }
1263
1264 *self.cached_head_snapshot.borrow_mut() = Some(tree.clone());
1265 return Ok(tree);
1266 }
1267 }
1269
1270 let tree = self.snapshot_uncached(&head_id)?;
1272 let tree_hash = tree.content_hash();
1273
1274 let _ = self.meta.set_config("head_tree_hash", &tree_hash.to_hex());
1275
1276 let _ = self.meta.store_file_tree(&head_id, &tree);
1278
1279 *self.cached_head_snapshot.borrow_mut() = Some(tree.clone());
1280 Ok(tree)
1281 }
1282
1283 pub fn invalidate_head_cache(&self) {
1288 *self.cached_head_snapshot.borrow_mut() = None;
1289 *self.cached_head_id.borrow_mut() = None;
1290 *self.cached_head_branch.borrow_mut() = None;
1291 let _ = self
1292 .meta
1293 .conn()
1294 .execute("DELETE FROM config WHERE key = 'head_tree_hash'", []);
1295 }
1296
1297 fn snapshot_uncached(&self, patch_id: &PatchId) -> Result<FileTree, RepoError> {
1299 let mut chain = self.dag.patch_chain(patch_id);
1300 chain.reverse();
1302 let patches: Vec<Patch> = chain
1303 .iter()
1304 .filter_map(|id| self.dag.get_patch(id).cloned())
1305 .collect();
1306
1307 let tree = apply_patch_chain(&patches, resolve_payload_to_hash)?;
1308 Ok(tree)
1309 }
1310
1311 pub fn snapshot(&self, patch_id: &PatchId) -> Result<FileTree, RepoError> {
1315 if let Some(tree) = self
1317 .meta
1318 .load_file_tree(patch_id)
1319 .map_err(RepoError::Meta)?
1320 {
1321 return Ok(tree);
1322 }
1323 let tree = self.snapshot_uncached(patch_id)?;
1325 let _ = self.meta.store_file_tree(patch_id, &tree);
1326 Ok(tree)
1327 }
1328
1329 pub fn sync_working_tree(&self, old_tree: &FileTree) -> Result<(), RepoError> {
1340 use rayon::prelude::*;
1341
1342 let new_tree = self.snapshot_head()?;
1343 let diffs = diff_trees(old_tree, &new_tree);
1344
1345 let cas = &self.cas;
1347 let root = &self.root;
1348
1349 let blob_results: Result<Vec<(String, Vec<u8>)>, CasError> = diffs
1351 .par_iter()
1352 .filter_map(|entry| {
1353 if let (DiffType::Added | DiffType::Modified, Some(new_hash)) =
1354 (&entry.diff_type, &entry.new_hash)
1355 {
1356 Some((entry.path.clone(), *new_hash))
1357 } else {
1358 None
1359 }
1360 })
1361 .map(|(path, hash)| {
1362 let blob = cas.get_blob(&hash)?;
1363 Ok((path, blob))
1364 })
1365 .collect();
1366
1367 let blobs: Vec<(String, Vec<u8>)> = blob_results?;
1368
1369 for (path, _) in &blobs {
1371 let full_path = root.join(path);
1372 if let Some(parent) = full_path.parent() {
1373 fs::create_dir_all(parent)?;
1374 }
1375 }
1376
1377 blobs
1379 .par_iter()
1380 .map(|(path, data)| {
1381 let full_path = root.join(path);
1382 fs::write(&full_path, data).map_err(RepoError::Io)
1383 })
1384 .collect::<Result<Vec<()>, RepoError>>()?;
1385
1386 for entry in &diffs {
1388 let full_path = root.join(&entry.path);
1389 match &entry.diff_type {
1390 DiffType::Deleted => {
1391 if full_path.exists() {
1392 fs::remove_file(&full_path)?;
1393 }
1394 }
1395 DiffType::Renamed { old_path, .. } => {
1396 let old_full = root.join(old_path);
1397 if old_full.exists() {
1398 if let Some(parent) = full_path.parent() {
1399 fs::create_dir_all(parent)?;
1400 }
1401 fs::rename(&old_full, &full_path)?;
1402 }
1403 }
1404 DiffType::Added | DiffType::Modified => {
1405 }
1407 }
1408 }
1409
1410 for (path, _) in old_tree.iter() {
1412 if !new_tree.contains(path) {
1413 let full_path = root.join(path);
1414 if full_path.exists() {
1415 let _ = fs::remove_file(&full_path);
1416 }
1417 }
1418 }
1419
1420 Ok(())
1421 }
1422
1423 pub fn checkout(&mut self, branch_name: &str) -> Result<FileTree, RepoError> {
1433 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
1434 let old_branch = self.head().ok().map(|(n, _)| n);
1435 let target = BranchName::new(branch_name)?;
1436
1437 let target_id = self
1438 .dag
1439 .get_branch(&target)
1440 .ok_or_else(|| RepoError::BranchNotFound(branch_name.to_string()))?;
1441
1442 let has_changes = self.has_uncommitted_changes()?;
1443 if has_changes {
1444 self.stash_push(Some("auto-stash before checkout"))?;
1445 }
1446
1447 let target_tree = self.snapshot(&target_id)?;
1448
1449 let current_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
1450
1451 let diffs = diff_trees(¤t_tree, &target_tree);
1452
1453 for entry in &diffs {
1454 let full_path = self.root.join(&entry.path);
1455 match &entry.diff_type {
1456 DiffType::Added | DiffType::Modified => {
1457 if let Some(new_hash) = &entry.new_hash {
1458 let blob = self.cas.get_blob(new_hash)?;
1459 if let Some(parent) = full_path.parent() {
1460 fs::create_dir_all(parent)?;
1461 }
1462 fs::write(&full_path, &blob)?;
1463 }
1464 }
1465 DiffType::Deleted => {
1466 if full_path.exists() {
1467 fs::remove_file(&full_path)?;
1468 }
1469 }
1470 DiffType::Renamed { old_path, .. } => {
1471 let old_full = self.root.join(old_path);
1472 if old_full.exists() {
1473 if let Some(parent) = full_path.parent() {
1474 fs::create_dir_all(parent)?;
1475 }
1476 fs::rename(&old_full, &full_path)?;
1477 }
1478 }
1479 }
1480 }
1481
1482 for (path, _) in current_tree.iter() {
1483 if !target_tree.contains(path) {
1484 let full_path = self.root.join(path);
1485 if full_path.exists() {
1486 let _ = fs::remove_file(&full_path);
1487 }
1488 }
1489 }
1490
1491 self.write_head_branch(branch_name)?;
1492
1493 self.invalidate_head_cache();
1494
1495 let _ = self.record_reflog(
1496 &old_head,
1497 &target_id,
1498 &format!(
1499 "checkout: moving from {} to {}",
1500 old_branch.as_deref().unwrap_or("HEAD"),
1501 branch_name
1502 ),
1503 );
1504
1505 if has_changes && let Err(e) = self.stash_pop() {
1506 tracing::warn!("Warning: could not restore stashed changes: {}", e);
1507 }
1508
1509 Ok(target_tree)
1510 }
1511
1512 pub fn diff(&self, from: Option<&str>, to: Option<&str>) -> Result<Vec<DiffEntry>, RepoError> {
1520 let resolve_id = |name: &str| -> Result<PatchId, RepoError> {
1521 if name == "HEAD" || name.starts_with("HEAD~") {
1522 let (_, head_id) = self.head()?;
1523 let mut target_id = head_id;
1524 if let Some(n_str) = name.strip_prefix("HEAD~") {
1525 let n: usize = n_str
1526 .parse()
1527 .map_err(|_| RepoError::Custom(format!("invalid HEAD~N: {}", name)))?;
1528 for _ in 0..n {
1529 let patch = self.dag.get_patch(&target_id).ok_or_else(|| {
1530 RepoError::Custom("HEAD ancestor not found".to_string())
1531 })?;
1532 target_id = patch
1533 .parent_ids
1534 .first()
1535 .ok_or_else(|| RepoError::Custom("HEAD has no parent".to_string()))?
1536 .to_owned();
1537 }
1538 }
1539 return Ok(target_id);
1540 }
1541 if let Ok(hash) = Hash::from_hex(name)
1545 && self.dag.has_patch(&hash)
1546 {
1547 return Ok(hash);
1548 }
1549 let all_patch_ids = self.dag.patch_ids();
1551 let prefix_matches: Vec<&PatchId> = all_patch_ids
1552 .iter()
1553 .filter(|id| id.to_hex().starts_with(name))
1554 .collect();
1555 match prefix_matches.len() {
1556 1 => return Ok(*prefix_matches[0]),
1557 0 => {}
1558 n => {
1559 return Err(RepoError::Custom(format!(
1560 "ambiguous ref '{}' matches {} commits",
1561 name, n
1562 )));
1563 }
1564 }
1565 if let Ok(Some(tag_id)) = self.resolve_tag(name) {
1567 return Ok(tag_id);
1568 }
1569 let bn = BranchName::new(name)?;
1571 self.dag
1572 .get_branch(&bn)
1573 .ok_or_else(|| RepoError::BranchNotFound(name.to_string()))
1574 };
1575
1576 if from.is_none() && to.is_none() {
1579 let head_tree = self.snapshot_head()?;
1580 let working_tree = self.build_working_tree()?;
1581 return Ok(diff_trees(&head_tree, &working_tree));
1582 }
1583
1584 let old_tree = match from {
1585 Some(f) => self.snapshot(&resolve_id(f)?)?,
1586 None => FileTree::empty(),
1587 };
1588
1589 let new_tree = match to {
1590 Some(t) => self.snapshot(&resolve_id(t)?)?,
1591 None => self.snapshot_head()?,
1592 };
1593
1594 Ok(diff_trees(&old_tree, &new_tree))
1595 }
1596
1597 fn build_working_tree(&self) -> Result<FileTree, RepoError> {
1599 let mut tree = FileTree::empty();
1600 let entries = walk_dir(&self.root, &self.ignore_patterns)?;
1601 for entry in &entries {
1602 if let Ok(data) = fs::read(&entry.full_path) {
1603 let hash = Hash::from_data(&data);
1604 tree.insert(entry.relative.clone(), hash);
1605 }
1606 }
1607 Ok(tree)
1608 }
1609
1610 pub fn diff_staged(&self) -> Result<Vec<DiffEntry>, RepoError> {
1612 let head_tree = self.snapshot_head()?;
1613 let mut staged_tree = head_tree.clone();
1616 let working_set = self.meta.working_set()?;
1617 for (path, status) in &working_set {
1618 match status {
1619 FileStatus::Added | FileStatus::Modified => {
1620 let full_path = self.root.join(path);
1621 if let Ok(data) = fs::read(&full_path) {
1622 let hash = Hash::from_data(&data);
1623 staged_tree.insert(path.clone(), hash);
1624 }
1625 }
1626 FileStatus::Deleted => {
1627 staged_tree.remove(path);
1629 }
1630 _ => {}
1631 }
1632 }
1633 Ok(diff_trees(&head_tree, &staged_tree))
1634 }
1635
1636 pub fn reset(&mut self, target: &str, mode: ResetMode) -> Result<PatchId, RepoError> {
1648 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
1649 let target_id = if target == "HEAD" {
1650 let (_, id) = self.head()?;
1651 id
1652 } else if let Some(rest) = target.strip_prefix("HEAD~") {
1653 let n: usize = rest
1654 .parse()
1655 .map_err(|_| RepoError::Custom(format!("invalid HEAD~N: {}", target)))?;
1656 let (_, head_id) = self.head()?;
1657 let mut current = head_id;
1658 for _ in 0..n {
1659 let patch = self
1660 .dag
1661 .get_patch(¤t)
1662 .ok_or_else(|| RepoError::Custom("HEAD ancestor not found".to_string()))?;
1663 current = patch
1664 .parent_ids
1665 .first()
1666 .ok_or_else(|| RepoError::Custom("HEAD has no parent".to_string()))?
1667 .to_owned();
1668 }
1669 current
1670 } else if let Ok(hash) = Hash::from_hex(target)
1671 && self.dag.has_patch(&hash)
1672 {
1673 hash
1674 } else {
1675 let bn = BranchName::new(target)?;
1676 self.dag
1677 .get_branch(&bn)
1678 .ok_or_else(|| RepoError::BranchNotFound(target.to_string()))?
1679 };
1680
1681 let (branch_name, _) = self.head()?;
1682 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
1683
1684 let branch = BranchName::new(&branch_name)?;
1685 self.dag.update_branch(&branch, target_id)?;
1686 self.meta.set_branch(&branch, &target_id)?;
1687 self.invalidate_head_cache();
1688
1689 match mode {
1690 ResetMode::Soft => {
1691 let new_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
1692 let diffs = diff_trees(&new_tree, &old_tree);
1693 for entry in &diffs {
1694 match &entry.diff_type {
1695 DiffType::Added | DiffType::Modified => {
1696 let repo_path = RepoPath::new(entry.path.clone())?;
1697 self.meta
1698 .working_set_add(&repo_path, FileStatus::Modified)?;
1699 }
1700 DiffType::Deleted => {
1701 let repo_path = RepoPath::new(entry.path.clone())?;
1702 self.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
1703 }
1704 DiffType::Renamed { old_path, .. } => {
1705 let repo_path = RepoPath::new(old_path.clone())?;
1706 self.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
1707 let repo_path = RepoPath::new(entry.path.clone())?;
1708 self.meta.working_set_add(&repo_path, FileStatus::Added)?;
1709 }
1710 }
1711 }
1712 }
1713 ResetMode::Mixed | ResetMode::Hard => {
1714 self.meta
1715 .conn()
1716 .execute("DELETE FROM working_set", [])
1717 .map_err(|e| RepoError::Meta(crate::metadata::MetaError::Database(e)))?;
1718 if mode == ResetMode::Hard {
1719 self.sync_working_tree(&old_tree)?;
1720 }
1721 }
1722 }
1723
1724 let _ = self.record_reflog(
1725 &old_head,
1726 &target_id,
1727 &format!("reset: moving to {}", target),
1728 );
1729
1730 Ok(target_id)
1731 }
1732
1733 pub fn revert(
1742 &mut self,
1743 patch_id: &PatchId,
1744 message: Option<&str>,
1745 ) -> Result<PatchId, RepoError> {
1746 let patch = self
1747 .dag
1748 .get_patch(patch_id)
1749 .ok_or_else(|| RepoError::Custom(format!("patch not found: {}", patch_id)))?;
1750
1751 let (branch_name, head_id) = self.head()?;
1752 let msg = message
1753 .map(|m| m.to_string())
1754 .unwrap_or_else(|| format!("Revert {}", patch_id));
1755
1756 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
1757
1758 match &patch.operation_type {
1759 OperationType::Batch => {
1760 let changes = patch.file_changes().ok_or_else(|| {
1761 RepoError::Custom("batch patch has invalid file changes".into())
1762 })?;
1763 if changes.is_empty() {
1764 return Err(RepoError::Custom("cannot revert empty batch".into()));
1765 }
1766 let parent_tree = patch
1767 .parent_ids
1768 .first()
1769 .map(|pid| self.snapshot(pid).unwrap_or_else(|_| FileTree::empty()))
1770 .unwrap_or_else(FileTree::empty);
1771 let mut revert_changes = Vec::new();
1772 for change in &changes {
1773 match change.op {
1774 OperationType::Create | OperationType::Modify => {
1775 revert_changes.push(FileChange {
1776 op: OperationType::Delete,
1777 path: change.path.clone(),
1778 payload: Vec::new(),
1779 });
1780 }
1781 OperationType::Delete => {
1782 if let Some(hash) = parent_tree.get(&change.path) {
1783 revert_changes.push(FileChange {
1784 op: OperationType::Modify,
1785 path: change.path.clone(),
1786 payload: hash.to_hex().as_bytes().to_vec(),
1787 });
1788 }
1789 }
1790 _ => {}
1791 }
1792 }
1793 if revert_changes.is_empty() {
1794 return Err(RepoError::Custom("nothing to revert in batch".into()));
1795 }
1796 let revert_patch =
1797 Patch::new_batch(revert_changes, vec![head_id], self.author.clone(), msg);
1798 let revert_id = self.dag.add_patch(revert_patch.clone(), vec![head_id])?;
1799 self.meta.store_patch(&revert_patch)?;
1800
1801 let branch = BranchName::new(&branch_name)?;
1802 self.dag.update_branch(&branch, revert_id)?;
1803 self.meta.set_branch(&branch, &revert_id)?;
1804
1805 self.invalidate_head_cache();
1806
1807 self.sync_working_tree(&old_tree)?;
1808 Ok(revert_id)
1809 }
1810 OperationType::Create | OperationType::Modify => {
1811 let revert_patch = Patch::new(
1812 OperationType::Delete,
1813 patch.touch_set.clone(),
1814 patch.target_path.clone(),
1815 vec![],
1816 vec![head_id],
1817 self.author.clone(),
1818 msg,
1819 );
1820
1821 let revert_id = self.dag.add_patch(revert_patch.clone(), vec![head_id])?;
1822 self.meta.store_patch(&revert_patch)?;
1823
1824 let branch = BranchName::new(&branch_name)?;
1825 self.dag.update_branch(&branch, revert_id)?;
1826 self.meta.set_branch(&branch, &revert_id)?;
1827
1828 self.invalidate_head_cache();
1829
1830 self.sync_working_tree(&old_tree)?;
1831 Ok(revert_id)
1832 }
1833 OperationType::Delete => {
1834 if let Some(parent_id) = patch.parent_ids.first() {
1835 let parent_tree = self.snapshot(parent_id)?;
1836 if let Some(path) = &patch.target_path
1837 && let Some(hash) = parent_tree.get(path)
1838 {
1839 let payload = hash.to_hex().as_bytes().to_vec();
1840 let revert_patch = Patch::new(
1841 OperationType::Modify,
1842 patch.touch_set.clone(),
1843 patch.target_path.clone(),
1844 payload,
1845 vec![head_id],
1846 self.author.clone(),
1847 msg,
1848 );
1849
1850 let revert_id = self.dag.add_patch(revert_patch.clone(), vec![head_id])?;
1851 self.meta.store_patch(&revert_patch)?;
1852
1853 let branch = BranchName::new(&branch_name)?;
1854 self.dag.update_branch(&branch, revert_id)?;
1855 self.meta.set_branch(&branch, &revert_id)?;
1856
1857 self.invalidate_head_cache();
1858
1859 self.sync_working_tree(&old_tree)?;
1860 return Ok(revert_id);
1861 }
1862 }
1863 Err(RepoError::Custom(
1864 "cannot revert delete: original file content not found".into(),
1865 ))
1866 }
1867 _ => Err(RepoError::Custom(format!(
1868 "cannot revert {:?} patches",
1869 patch.operation_type
1870 ))),
1871 }
1872 }
1873
1874 pub fn squash(&mut self, count: usize, message: &str) -> Result<PatchId, RepoError> {
1882 if count < 2 {
1883 return Err(RepoError::Custom(
1884 "need at least 2 patches to squash".into(),
1885 ));
1886 }
1887
1888 let (branch_name, tip_id) = self.head()?;
1889 let chain = self.dag().patch_chain(&tip_id);
1890
1891 if chain.len() < count + 1 {
1893 return Err(RepoError::Custom(format!(
1894 "only {} patches on branch, cannot squash {}",
1895 chain.len(),
1896 count
1897 )));
1898 }
1899
1900 let mut to_squash = Vec::new();
1902 for i in (0..count).rev() {
1903 let pid = &chain[i];
1904 let patch = self
1905 .dag()
1906 .get_patch(pid)
1907 .ok_or_else(|| RepoError::Custom(format!("patch not found: {}", pid.to_hex())))?;
1908 to_squash.push(patch.clone());
1909 }
1910
1911 let parent_of_first = *to_squash[0]
1912 .parent_ids
1913 .first()
1914 .ok_or_else(|| RepoError::Custom("cannot squash root patch".into()))?;
1915
1916 let result = crate::patch::compose::compose_chain(&to_squash, &self.author, message)
1917 .map_err(|e| RepoError::Custom(e.to_string()))?;
1918
1919 let new_id = self
1920 .dag_mut()
1921 .add_patch(result.patch.clone(), vec![parent_of_first])?;
1922 self.meta().store_patch(&result.patch)?;
1923
1924 let branch = BranchName::new(&branch_name).map_err(|e| RepoError::Custom(e.to_string()))?;
1925 self.dag_mut().update_branch(&branch, new_id)?;
1926 self.meta().set_branch(&branch, &new_id)?;
1927
1928 self.record_reflog(
1929 to_squash.last().map(|p| &p.id).unwrap_or(&parent_of_first),
1930 &new_id,
1931 &format!("squash: {} patches into one", count),
1932 )?;
1933
1934 self.invalidate_head_cache();
1935
1936 Ok(new_id)
1937 }
1938
1939 pub fn merge_plan(&self, branch_a: &str, branch_b: &str) -> Result<MergeResult, RepoError> {
1945 let ba = BranchName::new(branch_a)?;
1946 let bb = BranchName::new(branch_b)?;
1947 self.dag.merge_branches(&ba, &bb).map_err(RepoError::Dag)
1948 }
1949
1950 pub fn preview_merge(
1970 &self,
1971 source_branch: &str,
1972 ) -> Result<MergeExecutionResult, RepoError> {
1973 if !self.pending_merge_parents.is_empty() {
1974 return Err(RepoError::MergeInProgress);
1975 }
1976
1977 let (head_branch, head_id) = self.head()?;
1978 let source_bn = BranchName::new(source_branch)?;
1979 let source_tip = self
1980 .dag
1981 .get_branch(&source_bn)
1982 .ok_or_else(|| RepoError::BranchNotFound(source_branch.to_string()))?;
1983
1984 let head_bn = BranchName::new(&head_branch)?;
1985
1986 let merge_result = self.dag.merge_branches(&head_bn, &source_bn)?;
1987
1988 if head_id == source_tip {
1989 return Ok(MergeExecutionResult {
1990 is_clean: true,
1991 merged_tree: self.snapshot_head()?,
1992 merge_patch_id: None,
1993 unresolved_conflicts: Vec::new(),
1994 patches_applied: 0,
1995 });
1996 }
1997
1998 if merge_result.patches_b_only.is_empty() && merge_result.patches_a_only.is_empty() {
1999 return Ok(MergeExecutionResult {
2000 is_clean: true,
2001 merged_tree: self.snapshot_head()?,
2002 merge_patch_id: None,
2003 unresolved_conflicts: Vec::new(),
2004 patches_applied: 0,
2005 });
2006 }
2007
2008 let patches_applied = merge_result.patches_b_only.len();
2009 let is_clean = merge_result.is_clean;
2010
2011 if is_clean {
2012 let source_tree = self.snapshot(&source_tip).unwrap_or_else(|_| FileTree::empty());
2015 let lca_id = self
2016 .dag
2017 .lca(&head_id, &source_tip)
2018 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2019 let lca_tree = self.snapshot(&lca_id).unwrap_or_else(|_| FileTree::empty());
2020 let head_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2021
2022 let source_diffs = diff_trees(&lca_tree, &source_tree);
2023 let mut merged_tree = head_tree.clone();
2024 for entry in &source_diffs {
2025 match &entry.diff_type {
2026 DiffType::Added | DiffType::Modified => {
2027 if let Some(new_hash) = &entry.new_hash {
2028 merged_tree.insert(entry.path.clone(), *new_hash);
2029 }
2030 }
2031 DiffType::Deleted => {
2032 merged_tree.remove(&entry.path);
2033 }
2034 DiffType::Renamed { old_path, .. } => {
2035 if let Some(old_hash) = entry.old_hash {
2036 merged_tree.remove(old_path);
2037 merged_tree.insert(entry.path.clone(), old_hash);
2038 }
2039 }
2040 }
2041 }
2042
2043 Ok(MergeExecutionResult {
2044 is_clean: true,
2045 merged_tree,
2046 merge_patch_id: None, unresolved_conflicts: Vec::new(),
2048 patches_applied,
2049 })
2050 } else {
2051 let head_tree = self.snapshot(&head_id).unwrap_or_else(|_| FileTree::empty());
2053 let source_tree = self.snapshot(&source_tip).unwrap_or_else(|_| FileTree::empty());
2054 let lca_id = self
2055 .dag
2056 .lca(&head_id, &source_tip)
2057 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2058 let lca_tree = self.snapshot(&lca_id).unwrap_or_else(|_| FileTree::empty());
2059
2060 let mut all_paths = std::collections::HashSet::new();
2062 for path in head_tree.paths() {
2063 all_paths.insert(path);
2064 }
2065 for path in source_tree.paths() {
2066 all_paths.insert(path);
2067 }
2068 for path in lca_tree.paths() {
2069 all_paths.insert(path);
2070 }
2071
2072 let mut unresolved_conflicts: Vec<ConflictInfo> = Vec::new();
2073 for path in &all_paths {
2074 let lca_hash = lca_tree.get(path).copied();
2075 let ours_hash = head_tree.get(path).copied();
2076 let theirs_hash = source_tree.get(path).copied();
2077
2078 if ours_hash == theirs_hash {
2080 continue;
2081 }
2082 if ours_hash == lca_hash || theirs_hash == lca_hash {
2084 continue;
2085 }
2086 unresolved_conflicts.push(ConflictInfo {
2088 path: path.to_string(),
2089 our_patch_id: head_id,
2090 their_patch_id: source_tip,
2091 our_content_hash: ours_hash,
2092 their_content_hash: theirs_hash,
2093 base_content_hash: lca_hash,
2094 });
2095 }
2096
2097 Ok(MergeExecutionResult {
2098 is_clean: false,
2099 merged_tree: self.snapshot_head()?,
2100 merge_patch_id: None,
2101 unresolved_conflicts,
2102 patches_applied,
2103 })
2104 }
2105 }
2106
2107 pub fn execute_merge(
2109 &mut self,
2110 source_branch: &str,
2111 ) -> Result<MergeExecutionResult, RepoError> {
2112 if !self.pending_merge_parents.is_empty() {
2113 return Err(RepoError::MergeInProgress);
2114 }
2115
2116 let (head_branch, head_id) = self.head()?;
2117 let source_bn = BranchName::new(source_branch)?;
2118 let source_tip = self
2119 .dag
2120 .get_branch(&source_bn)
2121 .ok_or_else(|| RepoError::BranchNotFound(source_branch.to_string()))?;
2122
2123 let head_bn = BranchName::new(&head_branch)?;
2124
2125 let merge_result = self.dag.merge_branches(&head_bn, &source_bn)?;
2126
2127 if head_id == source_tip {
2128 return Ok(MergeExecutionResult {
2129 is_clean: true,
2130 merged_tree: self.snapshot_head()?,
2131 merge_patch_id: None,
2132 unresolved_conflicts: Vec::new(),
2133 patches_applied: 0,
2134 });
2135 }
2136
2137 if merge_result.patches_b_only.is_empty() && merge_result.patches_a_only.is_empty() {
2138 return Ok(MergeExecutionResult {
2139 is_clean: true,
2140 merged_tree: self.snapshot_head()?,
2141 merge_patch_id: None,
2142 unresolved_conflicts: Vec::new(),
2143 patches_applied: 0,
2144 });
2145 }
2146
2147 if merge_result.is_clean {
2148 self.execute_clean_merge(&head_id, &source_tip, &head_branch, &merge_result)
2149 } else {
2150 self.execute_conflicting_merge(
2151 &head_id,
2152 &source_tip,
2153 source_branch,
2154 &head_branch,
2155 &merge_result,
2156 )
2157 }
2158 }
2159
2160 fn execute_clean_merge(
2161 &mut self,
2162 head_id: &PatchId,
2163 source_tip: &PatchId,
2164 head_branch: &str,
2165 merge_result: &MergeResult,
2166 ) -> Result<MergeExecutionResult, RepoError> {
2167 let head_tree = self.snapshot(head_id)?;
2168 let source_tree = self.snapshot(source_tip)?;
2169 let lca_id = self
2170 .dag
2171 .lca(head_id, source_tip)
2172 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2173 let lca_tree = self.snapshot(&lca_id).unwrap_or_else(|_| FileTree::empty());
2174
2175 let source_diffs = diff_trees(&lca_tree, &source_tree);
2176 let mut merged_tree = head_tree.clone();
2177
2178 for entry in &source_diffs {
2179 let full_path = self.root.join(&entry.path);
2180 match &entry.diff_type {
2181 DiffType::Added | DiffType::Modified => {
2182 if let Some(new_hash) = &entry.new_hash {
2183 let blob = self.cas.get_blob(new_hash)?;
2184 if let Some(parent) = full_path.parent() {
2185 fs::create_dir_all(parent)?;
2186 }
2187 fs::write(&full_path, &blob)?;
2188 merged_tree.insert(entry.path.clone(), *new_hash);
2189 }
2190 }
2191 DiffType::Deleted => {
2192 if full_path.exists() {
2193 fs::remove_file(&full_path)?;
2194 }
2195 merged_tree.remove(&entry.path);
2196 }
2197 DiffType::Renamed { old_path, .. } => {
2198 let old_full = self.root.join(old_path);
2199 if old_full.exists() {
2200 if let Some(parent) = full_path.parent() {
2201 fs::create_dir_all(parent)?;
2202 }
2203 fs::rename(&old_full, &full_path)?;
2204 }
2205 if let Some(old_hash) = entry.old_hash {
2206 merged_tree.remove(old_path);
2207 merged_tree.insert(entry.path.clone(), old_hash);
2208 }
2209 }
2210 }
2211 }
2212
2213 let merge_patch = Patch::new(
2214 OperationType::Merge,
2215 TouchSet::empty(),
2216 None,
2217 vec![],
2218 vec![*head_id, *source_tip],
2219 self.author.clone(),
2220 format!("Merge branch '{}' into {}", source_tip, head_branch),
2221 );
2222
2223 let merge_id = self
2224 .dag
2225 .add_patch(merge_patch.clone(), vec![*head_id, *source_tip])?;
2226 self.meta.store_patch(&merge_patch)?;
2227
2228 let branch = BranchName::new(head_branch)?;
2229 self.dag.update_branch(&branch, merge_id)?;
2230 self.meta.set_branch(&branch, &merge_id)?;
2231
2232 self.invalidate_head_cache();
2233
2234 Ok(MergeExecutionResult {
2235 is_clean: true,
2236 merged_tree,
2237 merge_patch_id: Some(merge_id),
2238 unresolved_conflicts: Vec::new(),
2239 patches_applied: merge_result.patches_b_only.len(),
2240 })
2241 }
2242
2243 fn execute_conflicting_merge(
2244 &mut self,
2245 head_id: &PatchId,
2246 source_tip: &PatchId,
2247 source_branch: &str,
2248 head_branch: &str,
2249 merge_result: &MergeResult,
2250 ) -> Result<MergeExecutionResult, RepoError> {
2251 let head_tree = self.snapshot(head_id)?;
2252 let source_tree = self.snapshot(source_tip)?;
2253
2254 let lca_id = self
2255 .dag
2256 .lca(head_id, source_tip)
2257 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2258 let lca_tree = self.snapshot(&lca_id).unwrap_or_else(|_| FileTree::empty());
2259
2260 let conflicting_patch_ids: HashSet<PatchId> = merge_result
2261 .conflicts
2262 .iter()
2263 .flat_map(|c| [c.patch_a_id, c.patch_b_id])
2264 .collect();
2265
2266 let mut merged_tree = head_tree.clone();
2267 let mut patches_applied = 0;
2268
2269 for entry in &merge_result.patches_b_only {
2270 if conflicting_patch_ids.contains(entry) {
2271 continue;
2272 }
2273 if let Some(patch) = self.dag.get_patch(entry) {
2274 if patch.is_identity() || patch.operation_type == OperationType::Merge {
2275 continue;
2276 }
2277 if let Some(path) = &patch.target_path {
2278 let full_path = self.root.join(path);
2279 match patch.operation_type {
2280 OperationType::Create | OperationType::Modify => {
2281 if let Some(blob_hash) = resolve_payload_to_hash(patch)
2282 && self.cas.has_blob(&blob_hash)
2283 {
2284 let blob = self.cas.get_blob(&blob_hash)?;
2285 if let Some(parent) = full_path.parent() {
2286 fs::create_dir_all(parent)?;
2287 }
2288 fs::write(&full_path, &blob)?;
2289 merged_tree.insert(path.clone(), blob_hash);
2290 }
2291 }
2292 OperationType::Delete => {
2293 if full_path.exists() {
2294 fs::remove_file(&full_path)?;
2295 }
2296 merged_tree.remove(path);
2297 }
2298 _ => {}
2299 }
2300 }
2301 patches_applied += 1;
2302 }
2303 }
2304
2305 let mut unresolved_conflicts = Vec::new();
2306
2307 for conflict in &merge_result.conflicts {
2308 let conflict_info =
2309 self.build_conflict_info(conflict, &head_tree, &source_tree, &lca_tree);
2310 if let Some(info) = conflict_info {
2311 let full_path = self.root.join(&info.path);
2312 if let Some(parent) = full_path.parent() {
2313 fs::create_dir_all(parent)?;
2314 }
2315 let conflict_content =
2316 self.write_conflict_markers(&info, source_branch, head_branch)?;
2317 fs::write(&full_path, conflict_content.as_bytes())?;
2318 let hash = self.cas.put_blob(conflict_content.as_bytes())?;
2319 merged_tree.insert(info.path.clone(), hash);
2320 unresolved_conflicts.push(info);
2321 }
2322 }
2323
2324 self.pending_merge_parents = vec![*head_id, *source_tip];
2325
2326 let parents_json = serde_json::to_string(&self.pending_merge_parents).unwrap_or_default();
2328 let _ = self.meta.set_config("pending_merge_parents", &parents_json);
2329
2330 Ok(MergeExecutionResult {
2331 is_clean: false,
2332 merged_tree,
2333 merge_patch_id: None,
2334 unresolved_conflicts,
2335 patches_applied,
2336 })
2337 }
2338
2339 fn build_conflict_info(
2340 &self,
2341 conflict: &Conflict,
2342 head_tree: &FileTree,
2343 source_tree: &FileTree,
2344 lca_tree: &FileTree,
2345 ) -> Option<ConflictInfo> {
2346 let patch_a = self.dag.get_patch(&conflict.patch_a_id)?;
2347 let patch_b = self.dag.get_patch(&conflict.patch_b_id)?;
2348
2349 let path = patch_a
2350 .target_path
2351 .clone()
2352 .or_else(|| patch_b.target_path.clone())
2353 .or_else(|| {
2354 conflict.conflict_addresses.first().cloned()
2356 })?;
2357
2358 let our_content_hash = head_tree.get(&path).copied();
2359 let their_content_hash = source_tree.get(&path).copied();
2360 let base_content_hash = lca_tree.get(&path).copied();
2361
2362 Some(ConflictInfo {
2363 path,
2364 our_patch_id: conflict.patch_a_id,
2365 their_patch_id: conflict.patch_b_id,
2366 our_content_hash,
2367 their_content_hash,
2368 base_content_hash,
2369 })
2370 }
2371
2372 fn write_conflict_markers(
2373 &self,
2374 info: &ConflictInfo,
2375 source_branch: &str,
2376 head_branch: &str,
2377 ) -> Result<String, RepoError> {
2378 let our_content = match info.our_content_hash {
2379 Some(hash) => String::from_utf8(self.cas.get_blob(&hash)?).unwrap_or_default(),
2380 None => String::new(),
2381 };
2382
2383 let their_content = match info.their_content_hash {
2384 Some(hash) => String::from_utf8(self.cas.get_blob(&hash)?).unwrap_or_default(),
2385 None => String::new(),
2386 };
2387
2388 let base_content = match info.base_content_hash {
2389 Some(hash) => Some(String::from_utf8(self.cas.get_blob(&hash)?).unwrap_or_default()),
2390 None => None,
2391 };
2392
2393 let merged = three_way_merge(
2394 base_content.as_deref(),
2395 &our_content,
2396 &their_content,
2397 head_branch,
2398 source_branch,
2399 );
2400
2401 match merged {
2402 Ok(content) => Ok(content),
2403 Err(conflict_lines) => {
2404 let mut result = String::new();
2405 for line in conflict_lines {
2406 result.push_str(&line);
2407 result.push('\n');
2408 }
2409 Ok(result)
2410 }
2411 }
2412 }
2413
2414 pub fn cherry_pick(&mut self, patch_id: &PatchId) -> Result<PatchId, RepoError> {
2423 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
2424 let patch = self
2425 .dag
2426 .get_patch(patch_id)
2427 .ok_or_else(|| RepoError::Custom(format!("patch not found: {}", patch_id)))?;
2428
2429 if patch.operation_type == OperationType::Identity
2430 || patch.operation_type == OperationType::Merge
2431 || patch.operation_type == OperationType::Create
2432 {
2433 return Err(RepoError::Custom(format!(
2434 "cannot cherry-pick {:?} patches",
2435 patch.operation_type
2436 )));
2437 }
2438
2439 let (branch_name, head_id) = self.head()?;
2440
2441 let new_patch = if patch.operation_type == OperationType::Batch {
2442 let changes = patch
2443 .file_changes()
2444 .ok_or_else(|| RepoError::Custom("batch patch has invalid file changes".into()))?;
2445 Patch::new_batch(
2446 changes,
2447 vec![head_id],
2448 self.author.clone(),
2449 patch.message.clone(),
2450 )
2451 } else {
2452 Patch::new(
2453 patch.operation_type.clone(),
2454 patch.touch_set.clone(),
2455 patch.target_path.clone(),
2456 patch.payload.clone(),
2457 vec![head_id],
2458 self.author.clone(),
2459 patch.message.clone(),
2460 )
2461 };
2462
2463 let new_id = match self.dag.add_patch(new_patch.clone(), vec![head_id]) {
2464 Ok(id) => id,
2465 Err(DagError::DuplicatePatch(_)) => {
2466 let head_ancestors = self.dag.ancestors(&head_id);
2467 let new_patch_id = new_patch.id;
2468 if head_ancestors.contains(&new_patch_id) {
2469 return Ok(new_patch_id);
2470 }
2471 return Err(RepoError::Custom(
2472 "patch already exists in DAG and is not reachable from HEAD".to_string(),
2473 ));
2474 }
2475 Err(e) => return Err(RepoError::Dag(e)),
2476 };
2477 self.meta.store_patch(&new_patch)?;
2478
2479 let branch = BranchName::new(&branch_name)?;
2480 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2481 self.dag.update_branch(&branch, new_id)?;
2482 self.meta.set_branch(&branch, &new_id)?;
2483
2484 self.invalidate_head_cache();
2485
2486 let _ = self.record_reflog(&old_head, &new_id, &format!("cherry-pick: {}", patch_id));
2487
2488 self.sync_working_tree(&old_tree)?;
2489
2490 Ok(new_id)
2491 }
2492
2493 pub fn rebase(&mut self, target_branch: &str) -> Result<RebaseResult, RepoError> {
2503 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
2504 let (head_branch, head_id) = self.head()?;
2505 let target_bn = BranchName::new(target_branch)?;
2506 let target_tip = self
2507 .dag
2508 .get_branch(&target_bn)
2509 .ok_or_else(|| RepoError::BranchNotFound(target_branch.to_string()))?;
2510
2511 if head_id == target_tip {
2512 return Ok(RebaseResult {
2513 patches_replayed: 0,
2514 new_tip: head_id,
2515 });
2516 }
2517
2518 let lca_id = self
2519 .dag
2520 .lca(&head_id, &target_tip)
2521 .ok_or_else(|| RepoError::Custom("no common ancestor found".to_string()))?;
2522
2523 if lca_id == head_id {
2524 let branch = BranchName::new(&head_branch)?;
2525 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2526 self.dag.update_branch(&branch, target_tip)?;
2527 self.meta.set_branch(&branch, &target_tip)?;
2528 self.invalidate_head_cache();
2529
2530 self.sync_working_tree(&old_tree)?;
2531
2532 return Ok(RebaseResult {
2533 patches_replayed: 0,
2534 new_tip: target_tip,
2535 });
2536 }
2537
2538 let mut head_ancestors = self.dag.ancestors(&lca_id);
2539 head_ancestors.insert(lca_id);
2540
2541 let mut to_replay: Vec<Patch> = Vec::new();
2542 let mut visited = HashSet::new();
2543 let mut stack = vec![head_id];
2544
2545 while let Some(id) = stack.pop() {
2546 if visited.contains(&id) || head_ancestors.contains(&id) {
2547 continue;
2548 }
2549 visited.insert(id);
2550 if let Some(patch) = self.dag.get_patch(&id) {
2551 to_replay.push(patch.clone());
2552 for parent_id in &patch.parent_ids {
2553 if !visited.contains(parent_id) {
2554 stack.push(*parent_id);
2555 }
2556 }
2557 }
2558 }
2559
2560 to_replay.sort_by_key(|p| p.timestamp);
2561
2562 let branch = BranchName::new(&head_branch)?;
2563 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2564 self.dag.update_branch(&branch, target_tip)?;
2565 self.meta.set_branch(&branch, &target_tip)?;
2566 self.invalidate_head_cache();
2567
2568 let mut current_parent = target_tip;
2569 let mut last_new_id = target_tip;
2570 let mut replayed = 0usize;
2571
2572 for patch in &to_replay {
2573 if patch.operation_type == OperationType::Merge
2574 || patch.operation_type == OperationType::Identity
2575 || patch.operation_type == OperationType::Create
2576 {
2577 continue;
2578 }
2579
2580 let new_patch = if patch.operation_type == OperationType::Batch {
2581 let changes = patch.file_changes().unwrap_or_default();
2582 Patch::new_batch(
2583 changes,
2584 vec![current_parent],
2585 self.author.clone(),
2586 patch.message.clone(),
2587 )
2588 } else {
2589 Patch::new(
2590 patch.operation_type.clone(),
2591 patch.touch_set.clone(),
2592 patch.target_path.clone(),
2593 patch.payload.clone(),
2594 vec![current_parent],
2595 self.author.clone(),
2596 patch.message.clone(),
2597 )
2598 };
2599
2600 let new_id = self
2601 .dag
2602 .add_patch(new_patch.clone(), vec![current_parent])?;
2603 self.meta.store_patch(&new_patch)?;
2604
2605 last_new_id = new_id;
2606 current_parent = new_id;
2607 replayed += 1;
2608 }
2609
2610 self.dag.update_branch(&branch, last_new_id)?;
2611 self.meta.set_branch(&branch, &last_new_id)?;
2612 self.invalidate_head_cache();
2613
2614 self.sync_working_tree(&old_tree)?;
2615
2616 let _ = self.record_reflog(
2617 &old_head,
2618 &last_new_id,
2619 &format!("rebase onto {}", target_branch),
2620 );
2621
2622 Ok(RebaseResult {
2623 patches_replayed: replayed,
2624 new_tip: last_new_id,
2625 })
2626 }
2627
2628 pub fn commit_groups(&self, patches: &[Patch]) -> Vec<Vec<Patch>> {
2637 if patches.is_empty() {
2638 return Vec::new();
2639 }
2640
2641 let mut sorted: Vec<Patch> = patches.to_vec();
2643 sorted.sort_by_key(|p| p.timestamp);
2644
2645 let mut groups: Vec<Vec<Patch>> = Vec::new();
2646 let mut current_group: Vec<Patch> = Vec::new();
2647 let mut current_message: Option<String> = None;
2648
2649 for patch in &sorted {
2650 if patch.operation_type == OperationType::Merge
2652 || patch.operation_type == OperationType::Identity
2653 || patch.operation_type == OperationType::Create
2654 {
2655 continue;
2656 }
2657
2658 match ¤t_message {
2659 None => {
2660 current_message = Some(patch.message.clone());
2661 current_group.push(patch.clone());
2662 }
2663 Some(msg) if msg == &patch.message => {
2664 current_group.push(patch.clone());
2666 }
2667 Some(_) => {
2668 if !current_group.is_empty() {
2670 groups.push(std::mem::take(&mut current_group));
2671 }
2672 current_message = Some(patch.message.clone());
2673 current_group.push(patch.clone());
2674 }
2675 }
2676 }
2677
2678 if !current_group.is_empty() {
2679 groups.push(current_group);
2680 }
2681
2682 groups
2683 }
2684
2685 pub fn patches_since_base(&self, base: &PatchId) -> Vec<Patch> {
2690 let base_ancestors = self.dag.ancestors(base);
2691 let mut exclusion = base_ancestors;
2692 exclusion.insert(*base);
2693
2694 let (_, head_id) = self
2695 .head()
2696 .unwrap_or_else(|_| ("main".to_string(), Hash::ZERO));
2697 let chain = self.dag.patch_chain(&head_id);
2698
2699 chain
2700 .into_iter()
2701 .filter(|id| !exclusion.contains(id))
2702 .filter_map(|id| self.dag.get_patch(&id).cloned())
2703 .collect()
2704 }
2705
2706 pub fn generate_rebase_todo(&self, base: &PatchId) -> Result<String, RepoError> {
2710 let patches = self.patches_since_base(base);
2711 let groups = self.commit_groups(&patches);
2712
2713 let mut lines = vec![
2714 String::new(),
2715 "# Interactive Rebase TODO".to_string(),
2716 "#".to_string(),
2717 "# Commands:".to_string(),
2718 "# pick = use commit".to_string(),
2719 "# reword = use commit, but edit the commit message".to_string(),
2720 "# edit = use commit, but stop for amending".to_string(),
2721 "# squash = use commit, but meld into previous commit".to_string(),
2722 "# drop = remove commit".to_string(),
2723 String::new(),
2724 ];
2725
2726 for group in &groups {
2727 if let Some(patch) = group.first() {
2728 let short_hash = patch.id.to_hex().chars().take(8).collect::<String>();
2729 lines.push(format!("pick {} {}", short_hash, patch.message));
2730 }
2731 }
2732
2733 lines.push(String::new());
2734 Ok(lines.join("\n"))
2735 }
2736
2737 pub fn parse_rebase_todo(
2739 &self,
2740 todo_content: &str,
2741 base: &PatchId,
2742 ) -> Result<RebasePlan, RepoError> {
2743 let patches = self.patches_since_base(base);
2744 let groups = self.commit_groups(&patches);
2745
2746 let mut group_map: HashMap<String, (String, Vec<PatchId>)> = HashMap::new();
2748 for group in &groups {
2749 if let Some(first) = group.first() {
2750 let short_hash = first.id.to_hex().chars().take(8).collect::<String>();
2751 let patch_ids: Vec<PatchId> = group.iter().map(|p| p.id).collect();
2752 group_map.insert(short_hash, (first.message.clone(), patch_ids));
2753 }
2754 }
2755
2756 let mut entries = Vec::new();
2757
2758 for line in todo_content.lines() {
2759 let line = line.trim();
2760 if line.is_empty() || line.starts_with('#') {
2761 continue;
2762 }
2763
2764 let mut parts = line.splitn(3, ' ');
2765 let action_str = match parts.next() {
2766 Some(a) => a,
2767 None => continue,
2768 };
2769 let short_hash = match parts.next() {
2770 Some(h) => h,
2771 None => continue,
2772 };
2773 let message = parts.next().unwrap_or("").to_string();
2774
2775 let action = match action_str {
2776 "pick" | "p" => RebaseAction::Pick,
2777 "reword" | "r" => RebaseAction::Reword,
2778 "edit" | "e" => RebaseAction::Edit,
2779 "squash" | "s" => RebaseAction::Squash,
2780 "drop" | "d" => RebaseAction::Drop,
2781 _ => continue, };
2783
2784 let (group_message, patch_ids) = group_map
2786 .get(short_hash)
2787 .cloned()
2788 .unwrap_or_else(|| (message.clone(), Vec::new()));
2789
2790 let effective_message = if action == RebaseAction::Reword {
2792 message
2793 } else {
2794 group_message
2795 };
2796
2797 let commit_tip = patch_ids.last().copied().unwrap_or(Hash::ZERO);
2798
2799 entries.push(RebasePlanEntry {
2800 action,
2801 commit_tip,
2802 message: effective_message,
2803 patch_ids,
2804 });
2805 }
2806
2807 Ok(RebasePlan { entries })
2808 }
2809
2810 pub fn rebase_interactive(
2815 &mut self,
2816 plan: &RebasePlan,
2817 onto: &PatchId,
2818 ) -> Result<PatchId, RepoError> {
2819 let old_head = self.head().map(|(_, id)| id).unwrap_or(Hash::ZERO);
2820 let (head_branch, _head_id) = self.head()?;
2821
2822 let branch = BranchName::new(&head_branch)?;
2824 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
2825 self.dag.update_branch(&branch, *onto)?;
2826 self.meta.set_branch(&branch, onto)?;
2827 self.invalidate_head_cache();
2828
2829 let mut current_parent = *onto;
2830 let mut last_new_id = *onto;
2831 let mut squash_message_acc: Option<String> = None;
2832
2833 for entry in &plan.entries {
2834 match entry.action {
2835 RebaseAction::Drop => {
2836 continue;
2838 }
2839 RebaseAction::Pick
2840 | RebaseAction::Reword
2841 | RebaseAction::Edit
2842 | RebaseAction::Squash => {
2843 let patches: Vec<Patch> = entry
2845 .patch_ids
2846 .iter()
2847 .filter_map(|id| self.dag.get_patch(id).cloned())
2848 .collect();
2849
2850 if patches.is_empty() {
2851 continue;
2852 }
2853
2854 let message = if entry.action == RebaseAction::Squash {
2856 let mut msg = squash_message_acc.take().unwrap_or_default();
2858 if !msg.is_empty() {
2859 msg.push('\n');
2860 }
2861 msg.push_str(&entry.message);
2862 squash_message_acc = Some(msg);
2863 continue; } else {
2865 if let Some(sq_msg) = squash_message_acc.take() {
2867 let mut combined = sq_msg;
2868 if !combined.is_empty() && !entry.message.is_empty() {
2869 combined.push('\n');
2870 }
2871 combined.push_str(&entry.message);
2872 combined
2873 } else {
2874 entry.message.clone()
2875 }
2876 };
2877
2878 for patch in &patches {
2880 if patch.operation_type == OperationType::Merge
2881 || patch.operation_type == OperationType::Identity
2882 || patch.operation_type == OperationType::Create
2883 {
2884 continue;
2885 }
2886
2887 let new_patch = if patch.operation_type == OperationType::Batch {
2888 let changes = patch.file_changes().unwrap_or_default();
2889 Patch::new_batch(
2890 changes,
2891 vec![current_parent],
2892 self.author.clone(),
2893 message.clone(),
2894 )
2895 } else {
2896 Patch::new(
2897 patch.operation_type.clone(),
2898 patch.touch_set.clone(),
2899 patch.target_path.clone(),
2900 patch.payload.clone(),
2901 vec![current_parent],
2902 self.author.clone(),
2903 message.clone(),
2904 )
2905 };
2906
2907 let new_id = self
2908 .dag
2909 .add_patch(new_patch.clone(), vec![current_parent])?;
2910 self.meta.store_patch(&new_patch)?;
2911
2912 last_new_id = new_id;
2913 current_parent = new_id;
2914 }
2915
2916 if entry.action == RebaseAction::Edit {
2918 let state = RebaseState {
2919 original_head: old_head,
2920 original_branch: head_branch.clone(),
2921 onto: *onto,
2922 next_entry: 0, current_parent,
2924 squash_message: None,
2925 plan: Vec::new(), };
2927 let _ = self.save_rebase_state(&state);
2928 self.dag.update_branch(&branch, last_new_id)?;
2930 self.meta.set_branch(&branch, &last_new_id)?;
2931 self.invalidate_head_cache();
2932 self.sync_working_tree(&old_tree)?;
2933 return Ok(last_new_id);
2934 }
2935 }
2936 }
2937 }
2938
2939 self.dag.update_branch(&branch, last_new_id)?;
2944 self.meta.set_branch(&branch, &last_new_id)?;
2945 self.invalidate_head_cache();
2946 self.sync_working_tree(&old_tree)?;
2947
2948 let _ = self.record_reflog(&old_head, &last_new_id, "interactive rebase");
2949
2950 let _ = self.clear_rebase_state();
2952
2953 Ok(last_new_id)
2954 }
2955
2956 fn save_rebase_state(&self, state: &RebaseState) -> Result<(), RepoError> {
2958 let serialized = serde_json::to_string(state)
2959 .map_err(|e| RepoError::Custom(format!("failed to serialize rebase state: {}", e)))?;
2960 self.meta
2961 .set_config("rebase_state", &serialized)
2962 .map_err(RepoError::Meta)?;
2963 Ok(())
2964 }
2965
2966 pub fn load_rebase_state(&self) -> Result<Option<RebaseState>, RepoError> {
2968 match self
2969 .meta
2970 .get_config("rebase_state")
2971 .map_err(RepoError::Meta)?
2972 {
2973 Some(json) => {
2974 let state: RebaseState = serde_json::from_str(&json).map_err(|e| {
2975 RepoError::Custom(format!("failed to parse rebase state: {}", e))
2976 })?;
2977 Ok(Some(state))
2978 }
2979 None => Ok(None),
2980 }
2981 }
2982
2983 fn clear_rebase_state(&self) -> Result<(), RepoError> {
2985 let _ = self
2986 .meta
2987 .conn()
2988 .execute("DELETE FROM config WHERE key = 'rebase_state'", []);
2989 Ok(())
2990 }
2991
2992 pub fn rebase_abort(&mut self) -> Result<(), RepoError> {
2996 let state = self
2997 .load_rebase_state()?
2998 .ok_or_else(|| RepoError::Custom("no rebase in progress".to_string()))?;
2999
3000 let branch = BranchName::new(&state.original_branch)?;
3001 let old_tree = self.snapshot_head().unwrap_or_else(|_| FileTree::empty());
3002 self.dag.update_branch(&branch, state.original_head)?;
3003 self.meta.set_branch(&branch, &state.original_head)?;
3004 self.invalidate_head_cache();
3005 self.sync_working_tree(&old_tree)?;
3006
3007 let _ = self.record_reflog(
3008 &state.current_parent,
3009 &state.original_head,
3010 "rebase --abort",
3011 );
3012
3013 self.clear_rebase_state()?;
3014 Ok(())
3015 }
3016
3017 pub fn blame(&self, path: &str) -> Result<Vec<BlameEntry>, RepoError> {
3025 let head_tree = self.snapshot_head()?;
3026 let hash = head_tree
3027 .get(path)
3028 .ok_or_else(|| RepoError::Custom(format!("file not found in HEAD: {}", path)))?;
3029
3030 let blob = self.cas.get_blob(hash)?;
3031 let content = String::from_utf8_lossy(&blob);
3032 let lines: Vec<&str> = content.lines().collect();
3033
3034 let (_, head_id) = self.head()?;
3035 let chain = self.dag.patch_chain(&head_id);
3036
3037 let mut patches: Vec<Patch> = chain
3038 .iter()
3039 .filter_map(|id| self.dag.get_patch(id).cloned())
3040 .collect();
3041 patches.reverse();
3042
3043 let mut line_author: Vec<Option<(PatchId, String, String)>> = vec![None; lines.len()];
3044 let mut current_lines: Vec<String> = Vec::new();
3045
3046 for patch in &patches {
3047 match &patch.operation_type {
3048 OperationType::Batch => {
3049 if let Some(changes) = patch.file_changes()
3050 && let Some(change) = changes.iter().find(|c| c.path == path)
3051 {
3052 match change.op {
3053 OperationType::Create | OperationType::Modify => {
3054 let payload_hex = String::from_utf8_lossy(&change.payload);
3055 let new_content =
3056 if let Ok(blob_hash) = Hash::from_hex(&payload_hex) {
3057 if let Ok(blob_data) = self.cas.get_blob(&blob_hash) {
3058 String::from_utf8_lossy(&blob_data).to_string()
3059 } else {
3060 continue;
3061 }
3062 } else {
3063 continue;
3064 };
3065
3066 let old_refs: Vec<&str> =
3067 current_lines.iter().map(|s| s.as_str()).collect();
3068 let new_refs: Vec<&str> = new_content.lines().collect();
3069 let changes_diff =
3070 crate::engine::merge::diff_lines(&old_refs, &new_refs);
3071
3072 let mut new_line_author: Vec<Option<(PatchId, String, String)>> =
3073 Vec::new();
3074 let mut old_idx = 0usize;
3075
3076 for change_diff in &changes_diff {
3077 match change_diff {
3078 crate::engine::merge::LineChange::Unchanged(clines) => {
3079 for i in 0..clines.len() {
3080 if old_idx + i < line_author.len() {
3081 new_line_author
3082 .push(line_author[old_idx + i].clone());
3083 } else {
3084 new_line_author.push(None);
3085 }
3086 }
3087 old_idx += clines.len();
3088 }
3089 crate::engine::merge::LineChange::Deleted(clines) => {
3090 old_idx += clines.len();
3091 }
3092 crate::engine::merge::LineChange::Inserted(clines) => {
3093 for _ in 0..clines.len() {
3094 new_line_author.push(Some((
3095 patch.id,
3096 patch.message.clone(),
3097 patch.author.clone(),
3098 )));
3099 }
3100 }
3101 }
3102 }
3103
3104 line_author = new_line_author;
3105 current_lines =
3106 new_content.lines().map(|s| s.to_string()).collect();
3107 }
3108 OperationType::Delete => {
3109 line_author.clear();
3110 current_lines.clear();
3111 break;
3112 }
3113 _ => {}
3114 }
3115 }
3116 }
3117 _ => {
3118 let targets_file = patch.target_path.as_deref() == Some(path);
3119
3120 match patch.operation_type {
3121 OperationType::Create | OperationType::Modify if targets_file => {
3122 let new_content = if !patch.payload.is_empty() {
3123 let payload_hex = String::from_utf8_lossy(&patch.payload);
3124 if let Ok(blob_hash) = Hash::from_hex(&payload_hex) {
3125 if let Ok(blob_data) = self.cas.get_blob(&blob_hash) {
3126 String::from_utf8_lossy(&blob_data).to_string()
3127 } else {
3128 continue;
3129 }
3130 } else {
3131 continue;
3132 }
3133 } else {
3134 continue;
3135 };
3136
3137 let old_refs: Vec<&str> =
3138 current_lines.iter().map(|s| s.as_str()).collect();
3139 let new_refs: Vec<&str> = new_content.lines().collect();
3140 let changes = crate::engine::merge::diff_lines(&old_refs, &new_refs);
3141
3142 let mut new_line_author: Vec<Option<(PatchId, String, String)>> =
3143 Vec::new();
3144 let mut old_idx = 0usize;
3145
3146 for change in &changes {
3147 match change {
3148 crate::engine::merge::LineChange::Unchanged(clines) => {
3149 for i in 0..clines.len() {
3150 if old_idx + i < line_author.len() {
3151 new_line_author
3152 .push(line_author[old_idx + i].clone());
3153 } else {
3154 new_line_author.push(None);
3155 }
3156 }
3157 old_idx += clines.len();
3158 }
3159 crate::engine::merge::LineChange::Deleted(clines) => {
3160 old_idx += clines.len();
3161 }
3162 crate::engine::merge::LineChange::Inserted(clines) => {
3163 for _ in 0..clines.len() {
3164 new_line_author.push(Some((
3165 patch.id,
3166 patch.message.clone(),
3167 patch.author.clone(),
3168 )));
3169 }
3170 }
3171 }
3172 }
3173
3174 line_author = new_line_author;
3175 current_lines = new_content.lines().map(|s| s.to_string()).collect();
3176 }
3177 OperationType::Delete if targets_file => {
3178 line_author.clear();
3179 current_lines.clear();
3180 break;
3181 }
3182 _ => {}
3183 }
3184 }
3185 }
3186 }
3187
3188 let mut result = Vec::new();
3189 for (i, entry) in line_author.iter().enumerate() {
3190 let line_content = lines.get(i).unwrap_or(&"").to_string();
3191 if let Some((pid, msg, author)) = entry {
3192 result.push(BlameEntry {
3193 patch_id: *pid,
3194 message: msg.clone(),
3195 author: author.clone(),
3196 line: line_content,
3197 line_number: i + 1,
3198 });
3199 } else {
3200 result.push(BlameEntry {
3201 patch_id: Hash::ZERO,
3202 message: String::new(),
3203 author: String::new(),
3204 line: line_content,
3205 line_number: i + 1,
3206 });
3207 }
3208 }
3209
3210 Ok(result)
3211 }
3212
3213 pub fn log(&self, branch: Option<&str>) -> Result<Vec<Patch>, RepoError> {
3219 let target_id = match branch {
3220 Some(name) => {
3221 let bn = BranchName::new(name)?;
3222 self.dag
3223 .get_branch(&bn)
3224 .ok_or_else(|| RepoError::BranchNotFound(name.to_string()))?
3225 }
3226 None => {
3227 let (_, id) = self.head()?;
3228 id
3229 }
3230 };
3231
3232 let chain = self.dag.patch_chain(&target_id);
3233 let mut patches = Vec::new();
3234 for id in chain {
3235 if let Some(node) = self.dag.get_node(&id) {
3236 patches.push(node.patch.clone());
3237 }
3238 }
3239 Ok(patches)
3240 }
3241
3242 pub fn log_all(&self, branch: Option<&str>) -> Result<Vec<Patch>, RepoError> {
3245 let target_id = match branch {
3246 Some(name) => {
3247 let bn = BranchName::new(name)?;
3248 self.dag
3249 .get_branch(&bn)
3250 .ok_or_else(|| RepoError::BranchNotFound(name.to_string()))?
3251 }
3252 None => {
3253 let (_, id) = self.head()?;
3254 id
3255 }
3256 };
3257
3258 let mut patches = self.dag.reachable_patches(&target_id);
3259 patches.sort_by(|a, b| b.timestamp.cmp(&a.timestamp).then_with(|| a.id.cmp(&b.id)));
3260 Ok(patches)
3261 }
3262
3263 pub fn root(&self) -> &Path {
3269 &self.root
3270 }
3271
3272 pub fn dag(&self) -> &PatchDag {
3274 &self.dag
3275 }
3276
3277 pub fn dag_mut(&mut self) -> &mut PatchDag {
3279 &mut self.dag
3280 }
3281
3282 pub fn meta(&self) -> &crate::metadata::MetadataStore {
3284 &self.meta
3285 }
3286
3287 pub fn cas(&self) -> &BlobStore {
3289 &self.cas
3290 }
3291
3292 pub fn add_remote(&self, name: &str, url: &str) -> Result<(), RepoError> {
3299 let key = format!("remote.{}.url", name);
3300 self.meta.set_config(&key, url).map_err(RepoError::Meta)
3301 }
3302
3303 pub fn list_remotes(&self) -> Result<Vec<(String, String)>, RepoError> {
3305 let mut remotes = Vec::new();
3306 for (key, value) in self.meta.list_config()? {
3307 if let Some(name) = key
3308 .strip_prefix("remote.")
3309 .and_then(|n| n.strip_suffix(".url"))
3310 {
3311 remotes.push((name.to_string(), value));
3312 }
3313 }
3314 Ok(remotes)
3315 }
3316
3317 pub fn remove_remote(&self, name: &str) -> Result<(), RepoError> {
3319 let key = format!("remote.{}.url", name);
3320 if self.meta.get_config(&key)?.is_none() {
3321 return Err(RepoError::Custom(format!("remote '{}' not found", name)));
3322 }
3323 self.meta.delete_config(&key)?;
3324 if let Ok(Some(_)) = self
3325 .meta
3326 .get_config(&format!("remote.{}.last_pushed", name))
3327 {
3328 self.meta
3329 .delete_config(&format!("remote.{}.last_pushed", name))?;
3330 }
3331 Ok(())
3332 }
3333
3334 pub fn is_worktree(&self) -> bool {
3340 self.is_worktree
3341 }
3342
3343 pub fn add_worktree(
3345 &mut self,
3346 name: &str,
3347 path: &Path,
3348 branch: Option<&str>,
3349 ) -> Result<(), RepoError> {
3350 if name.is_empty()
3351 || name.contains('/')
3352 || name.contains('\\')
3353 || name.contains("..")
3354 || name.contains('\0')
3355 {
3356 return Err(RepoError::Custom("invalid worktree name".into()));
3357 }
3358 if path.exists() {
3359 return Err(RepoError::Custom(format!(
3360 "path '{}' already exists",
3361 path.display()
3362 )));
3363 }
3364 if self.is_worktree {
3365 return Err(RepoError::Custom(
3366 "cannot add worktree from a linked worktree; use the main repo".into(),
3367 ));
3368 }
3369
3370 let abs_path = if path.is_relative() {
3371 std::env::current_dir()?.join(path)
3372 } else {
3373 path.to_path_buf()
3374 };
3375
3376 fs::create_dir_all(&abs_path)?;
3377 let new_suture_dir = abs_path.join(".suture");
3378 fs::create_dir_all(&new_suture_dir)?;
3379
3380 #[cfg(unix)]
3381 {
3382 std::os::unix::fs::symlink(
3383 self.suture_dir.join("metadata.db"),
3384 new_suture_dir.join("metadata.db"),
3385 )?;
3386 if self.suture_dir.join("objects").exists() {
3387 std::os::unix::fs::symlink(
3388 self.suture_dir.join("objects"),
3389 new_suture_dir.join("objects"),
3390 )?;
3391 }
3392 if self.suture_dir.join("keys").exists() {
3393 std::os::unix::fs::symlink(
3394 self.suture_dir.join("keys"),
3395 new_suture_dir.join("keys"),
3396 )?;
3397 }
3398 }
3399 #[cfg(not(unix))]
3400 {
3401 return Err(RepoError::Unsupported(
3402 "worktrees require symlink support (Unix only)".into(),
3403 ));
3404 }
3405
3406 fs::write(
3407 new_suture_dir.join("worktree"),
3408 self.root.to_string_lossy().as_ref(),
3409 )?;
3410
3411 let branch_name = branch.unwrap_or("main");
3412 fs::write(new_suture_dir.join("HEAD"), branch_name)?;
3413
3414 self.set_config(
3415 &format!("worktree.{}.path", name),
3416 &abs_path.to_string_lossy(),
3417 )?;
3418 self.set_config(&format!("worktree.{}.branch", name), branch_name)?;
3419
3420 let mut wt_repo = Repository::open(&abs_path)?;
3421 wt_repo.checkout(branch_name)?;
3422
3423 Ok(())
3424 }
3425
3426 pub fn list_worktrees(&self) -> Result<Vec<WorktreeEntry>, RepoError> {
3428 let mut worktrees = Vec::new();
3429
3430 let main_branch = self
3431 .head()
3432 .map(|(n, _)| n)
3433 .unwrap_or_else(|_| "main".to_string());
3434 worktrees.push(WorktreeEntry {
3435 name: String::new(),
3436 path: self.root.to_string_lossy().to_string(),
3437 branch: main_branch,
3438 is_main: true,
3439 });
3440
3441 let config = self.list_config()?;
3442 let mut names: Vec<&str> = Vec::new();
3443 for (key, _value) in &config {
3444 if let Some(n) = key
3445 .strip_prefix("worktree.")
3446 .and_then(|n| n.strip_suffix(".path"))
3447 {
3448 names.push(n);
3449 }
3450 }
3451 names.sort();
3452
3453 for name in names {
3454 let path_key = format!("worktree.{}.path", name);
3455 let branch_key = format!("worktree.{}.branch", name);
3456 let path_val = self
3457 .meta
3458 .get_config(&path_key)
3459 .unwrap_or(None)
3460 .unwrap_or_default();
3461 let branch_val = self
3462 .meta
3463 .get_config(&branch_key)
3464 .unwrap_or(None)
3465 .unwrap_or_default();
3466 worktrees.push(WorktreeEntry {
3467 name: name.to_string(),
3468 path: path_val,
3469 branch: branch_val,
3470 is_main: false,
3471 });
3472 }
3473
3474 Ok(worktrees)
3475 }
3476
3477 pub fn remove_worktree(&mut self, name: &str) -> Result<(), RepoError> {
3480 let path_key = format!("worktree.{}.path", name);
3481 let path_val = self
3482 .meta
3483 .get_config(&path_key)?
3484 .ok_or_else(|| RepoError::Custom(format!("worktree '{}' not found", name)))?;
3485
3486 let wt_path = Path::new(&path_val);
3487 if wt_path.exists() {
3488 fs::remove_dir_all(wt_path)?;
3489 }
3490
3491 self.meta.delete_config(&path_key)?;
3492 self.meta
3493 .delete_config(&format!("worktree.{}.branch", name))?;
3494
3495 Ok(())
3496 }
3497
3498 pub fn rename_file(&self, old_path: &str, new_path: &str) -> Result<(), RepoError> {
3501 let old = self.root.join(old_path);
3502 let new = self.root.join(new_path);
3503
3504 if !old.exists() {
3505 return Err(RepoError::Custom(format!("path not found: {}", old_path)));
3506 }
3507
3508 if new.exists() {
3509 return Err(RepoError::Custom(format!(
3510 "path already exists: {}",
3511 new_path
3512 )));
3513 }
3514
3515 fs::rename(old, new).map_err(|e| RepoError::Custom(format!("rename failed: {}", e)))?;
3516
3517 self.add(old_path)?;
3518 self.add(new_path)?;
3519
3520 Ok(())
3521 }
3522
3523 pub fn get_remote_url(&self, name: &str) -> Result<String, RepoError> {
3525 let key = format!("remote.{}.url", name);
3526 self.meta
3527 .get_config(&key)
3528 .unwrap_or(None)
3529 .ok_or_else(|| RepoError::Custom(format!("remote '{}' not found", name)))
3530 }
3531
3532 pub fn all_patches(&self) -> Vec<Patch> {
3534 self.dag
3535 .patch_ids()
3536 .iter()
3537 .filter_map(|id| self.dag.get_patch(id).cloned())
3538 .collect()
3539 }
3540
3541 pub fn gc(&self) -> Result<GcResult, RepoError> {
3551 let branches = self.dag.list_branches();
3552 let all_ids: HashSet<PatchId> = self.dag.patch_ids().into_iter().collect();
3553
3554 let mut reachable: HashSet<PatchId> = HashSet::new();
3555 for (_name, tip_id) in &branches {
3556 reachable.insert(*tip_id);
3557 for anc in self.dag.ancestors(tip_id) {
3558 reachable.insert(anc);
3559 }
3560 }
3561
3562 let unreachable: Vec<&PatchId> = all_ids
3563 .iter()
3564 .filter(|id| !reachable.contains(id))
3565 .collect();
3566 let conn = self.meta().conn();
3567
3568 for id in &unreachable {
3569 let hex = id.to_hex();
3570 conn.execute(
3571 "DELETE FROM signatures WHERE patch_id = ?1",
3572 rusqlite::params![hex],
3573 )
3574 .map_err(|e| RepoError::Custom(e.to_string()))?;
3575 conn.execute(
3576 "DELETE FROM edges WHERE parent_id = ?1 OR child_id = ?1",
3577 rusqlite::params![hex],
3578 )
3579 .map_err(|e| RepoError::Custom(e.to_string()))?;
3580 conn.execute("DELETE FROM patches WHERE id = ?1", rusqlite::params![hex])
3581 .map_err(|e| RepoError::Custom(e.to_string()))?;
3582 }
3583
3584 Ok(GcResult {
3585 patches_removed: unreachable.len(),
3586 })
3587 }
3588
3589 pub fn fsck(&self) -> Result<FsckResult, RepoError> {
3599 let mut checks_passed = 0usize;
3600 let mut warnings = Vec::new();
3601 let mut errors = Vec::new();
3602
3603 let all_ids: HashSet<PatchId> = self.dag.patch_ids().into_iter().collect();
3605 let mut parent_ok = true;
3606 for id in &all_ids {
3607 if let Some(node) = self.dag.get_node(id) {
3608 for parent_id in &node.parent_ids {
3609 if !all_ids.contains(parent_id) {
3610 errors.push(format!(
3611 "patch {} references missing parent {}",
3612 id.to_hex(),
3613 parent_id.to_hex()
3614 ));
3615 parent_ok = false;
3616 }
3617 }
3618 }
3619 }
3620 if parent_ok {
3621 checks_passed += 1;
3622 }
3623
3624 let branches = self.dag.list_branches();
3626 let mut branch_ok = true;
3627 for (name, target_id) in &branches {
3628 if !all_ids.contains(target_id) {
3629 errors.push(format!(
3630 "branch '{}' targets non-existent patch {}",
3631 name,
3632 target_id.to_hex()
3633 ));
3634 branch_ok = false;
3635 }
3636 }
3637 if branch_ok {
3638 checks_passed += 1;
3639 }
3640
3641 let mut blob_ok = true;
3643 let all_patches = self.all_patches();
3644 for patch in &all_patches {
3645 if patch.is_batch() {
3646 if let Some(changes) = patch.file_changes() {
3647 for change in &changes {
3648 if change.payload.is_empty() {
3649 continue;
3650 }
3651 let hex = String::from_utf8_lossy(&change.payload);
3652 if let Ok(hash) = Hash::from_hex(&hex)
3653 && !self.cas().has_blob(&hash)
3654 {
3655 warnings.push(format!(
3656 "batch patch {} references missing blob {} for path {}",
3657 patch.id.to_hex(),
3658 hash.to_hex(),
3659 change.path
3660 ));
3661 blob_ok = false;
3662 }
3663 }
3664 }
3665 continue;
3666 }
3667 if patch.payload.is_empty() {
3668 continue;
3669 }
3670 if let Some(hash) = resolve_payload_to_hash(patch) {
3671 if !self.cas().has_blob(&hash) {
3672 warnings.push(format!(
3673 "patch {} references missing blob {}",
3674 patch.id.to_hex(),
3675 hash.to_hex()
3676 ));
3677 blob_ok = false;
3678 }
3679 } else {
3680 warnings.push(format!(
3681 "patch {} has non-UTF-8 payload, cannot verify blob reference",
3682 patch.id.to_hex()
3683 ));
3684 blob_ok = false;
3685 }
3686 }
3687 if blob_ok {
3688 checks_passed += 1;
3689 }
3690
3691 let mut head_ok = false;
3693 match self.head() {
3694 Ok((branch_name, _target_id)) => {
3695 if branches.iter().any(|(n, _)| n == &branch_name) {
3696 head_ok = true;
3697 checks_passed += 1;
3698 } else {
3699 errors.push(format!(
3700 "HEAD branch '{}' does not exist in branch list",
3701 branch_name
3702 ));
3703 }
3704 }
3705 Err(e) => {
3706 errors.push(format!("HEAD is invalid: {}", e));
3707 }
3708 }
3709 if head_ok {
3710 checks_passed += 1;
3711 }
3712
3713 Ok(FsckResult {
3714 checks_passed,
3715 warnings,
3716 errors,
3717 })
3718 }
3719
3720 fn record_reflog(
3725 &self,
3726 old_head: &PatchId,
3727 new_head: &PatchId,
3728 message: &str,
3729 ) -> Result<(), RepoError> {
3730 self.meta
3732 .reflog_push(old_head, new_head, message)
3733 .map_err(RepoError::Meta)?;
3734 Ok(())
3735 }
3736
3737 pub fn reflog_entries(&self) -> Result<Vec<(String, String)>, RepoError> {
3739 let sqlite_entries = self.meta.reflog_list().map_err(RepoError::Meta)?;
3741
3742 if !sqlite_entries.is_empty() {
3743 let entries: Vec<(String, String)> = sqlite_entries
3745 .into_iter()
3746 .map(|(old_head, new_head, message)| {
3747 let ts = std::time::SystemTime::now()
3748 .duration_since(std::time::UNIX_EPOCH)
3749 .unwrap_or_default()
3750 .as_secs();
3751 (new_head, format!("{}:{}:{}", ts, old_head, message))
3752 })
3753 .collect();
3754 return Ok(entries);
3755 }
3756
3757 match self.meta.get_config("reflog").map_err(RepoError::Meta)? {
3759 Some(json) => {
3760 let legacy: Vec<(String, String)> = serde_json::from_str(&json).unwrap_or_default();
3761 for (new_head, entry) in &legacy {
3763 let parts: Vec<&str> = entry.splitn(3, ':').collect();
3764 if parts.len() >= 3 {
3765 let old_head = parts[1];
3766 let msg = parts[2];
3767 if let (Ok(old), Ok(new)) =
3768 (Hash::from_hex(old_head), Hash::from_hex(new_head))
3769 {
3770 let _ = self.meta.reflog_push(&old, &new, msg);
3771 }
3772 }
3773 }
3774 let _ = self.meta.delete_config("reflog");
3776 let sqlite_entries = self.meta.reflog_list().map_err(RepoError::Meta)?;
3778 let entries: Vec<(String, String)> = sqlite_entries
3779 .into_iter()
3780 .map(|(old_head, new_head, message)| {
3781 (new_head, format!("{}:{}:{}", 0, old_head, message))
3782 })
3783 .collect();
3784 Ok(entries)
3785 }
3786 None => Ok(Vec::new()),
3787 }
3788 }
3789}
3790
3791fn load_ignore_patterns(root: &Path) -> Vec<String> {
3797 let ignore_file = root.join(".sutureignore");
3798 if !ignore_file.exists() {
3799 return Vec::new();
3800 }
3801
3802 fs::read_to_string(&ignore_file)
3803 .unwrap_or_default()
3804 .lines()
3805 .map(|line| line.trim().to_string())
3806 .filter(|line| !line.is_empty() && !line.starts_with('#'))
3807 .collect()
3808}
3809
3810fn is_ignored(rel_path: &str, patterns: &[String]) -> bool {
3812 for pattern in patterns {
3813 if let Some(suffix) = pattern.strip_prefix('*') {
3814 if rel_path.ends_with(suffix) {
3816 return true;
3817 }
3818 } else if pattern.ends_with('/') {
3819 if rel_path.starts_with(pattern) {
3821 return true;
3822 }
3823 } else {
3824 if rel_path == pattern || rel_path.starts_with(&format!("{}/", pattern)) {
3826 return true;
3827 }
3828 }
3829 }
3830 false
3831}
3832
3833struct WalkEntry {
3835 relative: String,
3836 #[allow(dead_code)]
3837 full_path: PathBuf,
3838}
3839
3840fn walk_dir(root: &Path, ignore_patterns: &[String]) -> Result<Vec<WalkEntry>, io::Error> {
3842 let mut entries = Vec::new();
3843 walk_dir_recursive(root, root, ignore_patterns, &mut entries)?;
3844 Ok(entries)
3845}
3846
3847fn walk_dir_recursive(
3848 root: &Path,
3849 current: &Path,
3850 ignore_patterns: &[String],
3851 entries: &mut Vec<WalkEntry>,
3852) -> Result<(), io::Error> {
3853 if !current.is_dir() {
3854 return Ok(());
3855 }
3856
3857 let mut dir_entries: Vec<_> = fs::read_dir(current)?
3858 .filter_map(|e| e.ok())
3859 .filter(|e| {
3860 let name = e.file_name();
3862 name != ".suture"
3863 })
3864 .collect();
3865
3866 dir_entries.sort_by_key(|e| e.file_name());
3867
3868 for entry in dir_entries {
3869 let path = entry.path();
3870 let rel = path
3871 .strip_prefix(root)
3872 .unwrap_or(&path)
3873 .to_string_lossy()
3874 .replace('\\', "/");
3875
3876 if is_ignored(&rel, ignore_patterns) {
3878 continue;
3879 }
3880
3881 if path.is_dir() {
3882 walk_dir_recursive(root, &path, ignore_patterns, entries)?;
3883 } else if path.is_file() {
3884 entries.push(WalkEntry {
3885 relative: rel,
3886 full_path: path,
3887 });
3888 }
3889 }
3890
3891 Ok(())
3892}
3893
3894fn restore_pending_merge_parents(meta: &crate::metadata::MetadataStore) -> Vec<PatchId> {
3896 let Ok(Some(json)) = meta.get_config("pending_merge_parents") else {
3897 return Vec::new();
3898 };
3899 serde_json::from_str::<Vec<PatchId>>(&json).unwrap_or_default()
3900}
3901
3902#[derive(Debug, Clone)]
3908pub struct StashEntry {
3909 pub index: usize,
3910 pub message: String,
3911 pub branch: String,
3912 pub head_id: String,
3913}
3914
3915#[derive(Debug, Clone)]
3917pub struct WorktreeEntry {
3918 pub name: String,
3919 pub path: String,
3920 pub branch: String,
3921 pub is_main: bool,
3922}
3923
3924#[derive(Debug, Clone)]
3926pub struct BlameEntry {
3927 pub patch_id: PatchId,
3929 pub message: String,
3931 pub author: String,
3933 pub line: String,
3935 pub line_number: usize,
3937}
3938
3939#[derive(Debug, Clone)]
3941pub struct RebaseResult {
3942 pub patches_replayed: usize,
3944 pub new_tip: PatchId,
3946}
3947
3948#[derive(Debug, Clone, PartialEq, Eq)]
3950pub enum RebaseAction {
3951 Pick,
3953 Reword,
3955 Edit,
3957 Squash,
3959 Drop,
3961}
3962
3963#[derive(Debug, Clone)]
3965pub struct RebasePlanEntry {
3966 pub action: RebaseAction,
3968 pub commit_tip: PatchId,
3970 pub message: String,
3972 pub patch_ids: Vec<PatchId>,
3974}
3975
3976#[derive(Debug, Clone)]
3978pub struct RebasePlan {
3979 pub entries: Vec<RebasePlanEntry>,
3980}
3981
3982#[derive(Debug, Clone, Serialize, Deserialize)]
3984pub struct RebaseState {
3985 pub original_head: PatchId,
3987 pub original_branch: String,
3989 pub onto: PatchId,
3991 pub next_entry: usize,
3993 pub plan: Vec<RebasePlanEntrySerialized>,
3995 pub current_parent: PatchId,
3997 pub squash_message: Option<String>,
3999}
4000
4001#[derive(Debug, Clone, Serialize, Deserialize)]
4003pub struct RebasePlanEntrySerialized {
4004 pub action: String,
4005 pub commit_tip: String,
4006 pub message: String,
4007 pub patch_ids: Vec<String>,
4008}
4009
4010#[derive(Debug, Clone)]
4012pub struct RepoStatus {
4013 pub head_branch: Option<String>,
4015 pub head_patch: Option<PatchId>,
4017 pub branch_count: usize,
4019 pub staged_files: Vec<(String, FileStatus)>,
4021 pub patch_count: usize,
4023}
4024
4025#[derive(Debug, Clone)]
4031pub struct MergeExecutionResult {
4032 pub is_clean: bool,
4034 pub merged_tree: FileTree,
4036 pub merge_patch_id: Option<PatchId>,
4038 pub unresolved_conflicts: Vec<ConflictInfo>,
4040 pub patches_applied: usize,
4042}
4043
4044#[derive(Debug, Clone)]
4046pub struct ConflictInfo {
4047 pub path: String,
4049 pub our_patch_id: PatchId,
4051 pub their_patch_id: PatchId,
4053 pub our_content_hash: Option<Hash>,
4055 pub their_content_hash: Option<Hash>,
4057 pub base_content_hash: Option<Hash>,
4059}
4060
4061#[derive(Debug, Clone)]
4063pub struct GcResult {
4064 pub patches_removed: usize,
4066}
4067
4068#[derive(Debug, Clone)]
4070pub struct FsckResult {
4071 pub checks_passed: usize,
4073 pub warnings: Vec<String>,
4075 pub errors: Vec<String>,
4077}
4078
4079fn three_way_merge(
4083 base: Option<&str>,
4084 ours: &str,
4085 theirs: &str,
4086 head_branch: &str,
4087 source_branch: &str,
4088) -> Result<String, Vec<String>> {
4089 use crate::engine::merge::three_way_merge_lines;
4090
4091 let base_lines: Vec<&str> = base.map(|s| s.lines().collect()).unwrap_or_default();
4092 let ours_lines: Vec<&str> = ours.lines().collect();
4093 let theirs_lines: Vec<&str> = theirs.lines().collect();
4094
4095 let ours_label = if head_branch.is_empty() {
4096 "HEAD".to_string()
4097 } else {
4098 format!("{head_branch} (HEAD)")
4099 };
4100 let theirs_label = if source_branch.is_empty() {
4101 "theirs".to_string()
4102 } else {
4103 source_branch.to_string()
4104 };
4105
4106 let result = three_way_merge_lines(
4107 &base_lines,
4108 &ours_lines,
4109 &theirs_lines,
4110 &ours_label,
4111 &theirs_label,
4112 );
4113
4114 if result.is_clean {
4115 Ok(result.lines.join("\n"))
4116 } else {
4117 Err(result.lines)
4118 }
4119}
4120
4121#[cfg(test)]
4126mod tests {
4127 use super::*;
4128
4129 #[test]
4130 fn test_init_and_open() {
4131 let dir = tempfile::tempdir().unwrap();
4132 let repo_path = dir.path();
4133
4134 let _repo = Repository::init(repo_path, "alice").unwrap();
4135 assert!(repo_path.join(".suture").exists());
4136 assert!(repo_path.join(".suture/metadata.db").exists());
4137
4138 let repo2 = Repository::open(repo_path).unwrap();
4140 assert_eq!(repo2.list_branches().len(), 1);
4141 }
4142
4143 #[test]
4144 fn test_init_already_exists() {
4145 let dir = tempfile::tempdir().unwrap();
4146 Repository::init(dir.path(), "alice").unwrap();
4147 let result = Repository::init(dir.path(), "alice");
4148 assert!(matches!(result, Err(RepoError::AlreadyExists(_))));
4149 }
4150
4151 #[test]
4152 fn test_create_branch() {
4153 let dir = tempfile::tempdir().unwrap();
4154 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4155
4156 repo.create_branch("feature", None).unwrap();
4157 assert_eq!(repo.list_branches().len(), 2);
4158
4159 let result = repo.create_branch("feature", None);
4160 assert!(result.is_err());
4161 }
4162
4163 #[test]
4164 fn test_add_and_status() {
4165 let dir = tempfile::tempdir().unwrap();
4166 let repo = Repository::init(dir.path(), "alice").unwrap();
4167
4168 let test_file = dir.path().join("hello.txt");
4169 fs::write(&test_file, "hello, suture!").unwrap();
4170
4171 repo.add("hello.txt").unwrap();
4172 let status = repo.status().unwrap();
4173 assert_eq!(status.staged_files.len(), 1);
4174 assert_eq!(status.staged_files[0].0, "hello.txt");
4175 assert_eq!(status.staged_files[0].1, FileStatus::Added);
4176 }
4177
4178 #[test]
4179 fn test_add_nonexistent_file() {
4180 let dir = tempfile::tempdir().unwrap();
4181 let repo = Repository::init(dir.path(), "alice").unwrap();
4182 let result = repo.add("does_not_exist.txt");
4183 assert!(result.is_err());
4184 }
4185
4186 #[test]
4187 fn test_commit() {
4188 let dir = tempfile::tempdir().unwrap();
4189 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4190
4191 let test_file = dir.path().join("test.txt");
4192 fs::write(&test_file, "test content").unwrap();
4193 repo.add("test.txt").unwrap();
4194
4195 let patch_id = repo.commit("initial file").unwrap();
4196
4197 let status = repo.status().unwrap();
4198 assert!(status.staged_files.is_empty());
4199 assert!(repo.dag.has_patch(&patch_id));
4200 assert_eq!(repo.dag.patch_count(), 2);
4201 }
4202
4203 #[test]
4204 fn test_commit_nothing() {
4205 let dir = tempfile::tempdir().unwrap();
4206 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4207 let result = repo.commit("empty commit");
4208 assert!(matches!(result, Err(RepoError::NothingToCommit)));
4209 }
4210
4211 #[test]
4212 fn test_log() {
4213 let dir = tempfile::tempdir().unwrap();
4214 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4215
4216 let test_file = dir.path().join("test.txt");
4217 fs::write(&test_file, "v1").unwrap();
4218 repo.add("test.txt").unwrap();
4219 repo.commit("first commit").unwrap();
4220
4221 fs::write(&test_file, "v2").unwrap();
4222 repo.add("test.txt").unwrap();
4223 repo.commit("second commit").unwrap();
4224
4225 let log = repo.log(None).unwrap();
4226 assert_eq!(log.len(), 3); }
4228
4229 #[test]
4230 fn test_snapshot_head() {
4231 let dir = tempfile::tempdir().unwrap();
4232 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4233
4234 let test_file = dir.path().join("test.txt");
4235 fs::write(&test_file, "hello world").unwrap();
4236 repo.add("test.txt").unwrap();
4237 repo.commit("add test.txt").unwrap();
4238
4239 let tree = repo.snapshot_head().unwrap();
4240 assert!(tree.contains("test.txt"));
4241 assert_eq!(tree.get("test.txt"), Some(&Hash::from_data(b"hello world")));
4242 }
4243
4244 #[test]
4245 fn test_snapshot_empty_repo() {
4246 let dir = tempfile::tempdir().unwrap();
4247 let repo = Repository::init(dir.path(), "alice").unwrap();
4248
4249 let tree = repo.snapshot_head().unwrap();
4250 assert!(tree.is_empty());
4251 }
4252
4253 #[test]
4254 fn test_checkout() {
4255 let dir = tempfile::tempdir().unwrap();
4256 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4257
4258 let main_file = dir.path().join("main.txt");
4260 fs::write(&main_file, "main content").unwrap();
4261 repo.add("main.txt").unwrap();
4262 repo.commit("add main.txt").unwrap();
4263
4264 let (_, head_id) = repo.head().unwrap();
4266 let feat_patch = Patch::new(
4267 OperationType::Modify,
4268 TouchSet::single("feature.txt"),
4269 Some("feature.txt".to_string()),
4270 Hash::from_data(b"feature content")
4271 .to_hex()
4272 .as_bytes()
4273 .to_vec(),
4274 vec![head_id],
4275 "alice".to_string(),
4276 "add feature.txt".to_string(),
4277 );
4278 let _feat_id = repo
4279 .dag_mut()
4280 .add_patch(feat_patch.clone(), vec![head_id])
4281 .unwrap();
4282 repo.meta.store_patch(&feat_patch).unwrap();
4283
4284 repo.checkout("main").unwrap();
4286 assert!(!dir.path().join("feature.txt").exists());
4287 assert!(dir.path().join("main.txt").exists());
4288 }
4289
4290 #[test]
4291 fn test_checkout_refuses_dirty() {
4292 let dir = tempfile::tempdir().unwrap();
4293 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4294
4295 let staged = dir.path().join("staged.txt");
4297 fs::write(&staged, "staged").unwrap();
4298 repo.add("staged.txt").unwrap();
4299
4300 let result = repo.checkout("main");
4302 assert!(result.is_ok());
4303
4304 let working_set = repo.meta.working_set().unwrap();
4306 assert!(working_set.iter().any(|(p, _)| p == "staged.txt"));
4307 }
4308
4309 #[test]
4310 fn test_diff() {
4311 let dir = tempfile::tempdir().unwrap();
4312 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4313
4314 let test_file = dir.path().join("test.txt");
4315 fs::write(&test_file, "v1").unwrap();
4316 repo.add("test.txt").unwrap();
4317 let first_commit = repo.commit("first").unwrap();
4318
4319 fs::write(&test_file, "v2").unwrap();
4320 repo.add("test.txt").unwrap();
4321 repo.commit("second").unwrap();
4322
4323 let diffs = repo.diff(Some(&first_commit.to_hex()), None).unwrap();
4325 assert_eq!(diffs.len(), 1);
4326 assert_eq!(diffs[0].diff_type, DiffType::Modified);
4327 }
4328
4329 #[test]
4330 fn test_revert() {
4331 let dir = tempfile::tempdir().unwrap();
4332 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4333
4334 let test_file = dir.path().join("test.txt");
4335 fs::write(&test_file, "original").unwrap();
4336 repo.add("test.txt").unwrap();
4337 let commit_id = repo.commit("add file").unwrap();
4338
4339 repo.revert(&commit_id, None).unwrap();
4341
4342 let tree = repo.snapshot_head().unwrap();
4343 assert!(!tree.contains("test.txt"));
4344 assert!(
4345 !test_file.exists(),
4346 "revert should remove the file from the working tree"
4347 );
4348 }
4349
4350 #[test]
4351 fn test_open_reconstructs_full_dag() {
4352 let dir = tempfile::tempdir().unwrap();
4353 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4354
4355 let f = dir.path().join("f.txt");
4357 fs::write(&f, "v1").unwrap();
4358 repo.add("f.txt").unwrap();
4359 repo.commit("first").unwrap();
4360
4361 fs::write(&f, "v2").unwrap();
4362 repo.add("f.txt").unwrap();
4363 repo.commit("second").unwrap();
4364
4365 fs::write(&f, "v3").unwrap();
4366 repo.add("f.txt").unwrap();
4367 repo.commit("third").unwrap();
4368
4369 let original_count = repo.dag.patch_count();
4370
4371 let repo2 = Repository::open(dir.path()).unwrap();
4373 assert_eq!(repo2.dag.patch_count(), original_count);
4374
4375 let log = repo2.log(None).unwrap();
4376 assert_eq!(log.len(), 4); }
4378
4379 #[test]
4380 fn test_ignore_patterns() {
4381 let patterns = vec![
4382 "target/".to_string(),
4383 "*.o".to_string(),
4384 "build".to_string(),
4385 ];
4386
4387 assert!(is_ignored("target/debug/main", &patterns));
4388 assert!(is_ignored("foo.o", &patterns));
4389 assert!(is_ignored("build/output", &patterns));
4390 assert!(is_ignored("build", &patterns));
4391 assert!(!is_ignored("src/main.rs", &patterns));
4392 assert!(!is_ignored("main.rs", &patterns));
4393 }
4394
4395 #[test]
4396 fn test_full_workflow_with_checkout() -> Result<(), Box<dyn std::error::Error>> {
4397 let dir = tempfile::tempdir().unwrap();
4398 let mut repo = Repository::init(dir.path(), "alice")?;
4399
4400 fs::write(dir.path().join("a.txt"), "version 1")?;
4402 repo.add("a.txt")?;
4403 repo.commit("add a.txt v1")?;
4404
4405 repo.create_branch("feature", None)?;
4407
4408 fs::write(dir.path().join("a.txt"), "version 2")?;
4410 fs::write(dir.path().join("b.txt"), "new file")?;
4411 repo.add("a.txt")?;
4412 repo.add("b.txt")?;
4413 repo.commit("modify a, add b")?;
4414
4415 repo.checkout("feature")?;
4417 let content = fs::read_to_string(dir.path().join("a.txt"))?;
4418 assert_eq!(content, "version 1");
4419 assert!(!dir.path().join("b.txt").exists());
4420
4421 Ok(())
4422 }
4423
4424 #[test]
4425 fn test_add_all() -> Result<(), Box<dyn std::error::Error>> {
4426 let dir = tempfile::tempdir().unwrap();
4427 let repo = Repository::init(dir.path(), "alice").unwrap();
4428
4429 fs::write(dir.path().join("a.txt"), "a")?;
4430 fs::write(dir.path().join("b.txt"), "b")?;
4431 let count = repo.add_all().unwrap();
4433 assert_eq!(count, 2);
4434 Ok(())
4435 }
4436
4437 #[test]
4438 fn test_execute_merge_clean() {
4439 let dir = tempfile::tempdir().unwrap();
4440 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4441
4442 fs::write(dir.path().join("base.txt"), "base").unwrap();
4443 repo.add("base.txt").unwrap();
4444 repo.commit("add base").unwrap();
4445
4446 repo.create_branch("feature", None).unwrap();
4447
4448 fs::write(dir.path().join("main_file.txt"), "main content").unwrap();
4449 repo.add("main_file.txt").unwrap();
4450 repo.commit("add main file").unwrap();
4451
4452 repo.checkout("feature").unwrap();
4453
4454 fs::write(dir.path().join("feat_file.txt"), "feature content").unwrap();
4455 repo.add("feat_file.txt").unwrap();
4456 repo.commit("add feature file").unwrap();
4457
4458 let result = repo.execute_merge("main").unwrap();
4459 assert!(result.is_clean);
4460 assert!(result.merge_patch_id.is_some());
4461 assert!(result.unresolved_conflicts.is_empty());
4462 assert!(dir.path().join("main_file.txt").exists());
4463 assert!(dir.path().join("feat_file.txt").exists());
4464 assert!(dir.path().join("base.txt").exists());
4465
4466 let log = repo.log(None).unwrap();
4467 let merge_patch = log
4468 .iter()
4469 .find(|p| p.operation_type == OperationType::Merge);
4470 assert!(merge_patch.is_some());
4471 assert_eq!(merge_patch.unwrap().parent_ids.len(), 2);
4472 }
4473
4474 #[test]
4475 fn test_execute_merge_conflicting() {
4476 let dir = tempfile::tempdir().unwrap();
4477 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4478
4479 fs::write(dir.path().join("shared.txt"), "original").unwrap();
4480 repo.add("shared.txt").unwrap();
4481 repo.commit("add shared").unwrap();
4482
4483 repo.create_branch("feature", None).unwrap();
4484
4485 fs::write(dir.path().join("shared.txt"), "main version").unwrap();
4486 repo.add("shared.txt").unwrap();
4487 repo.commit("modify on main").unwrap();
4488
4489 repo.checkout("feature").unwrap();
4490
4491 fs::write(dir.path().join("shared.txt"), "feature version").unwrap();
4492 repo.add("shared.txt").unwrap();
4493 repo.commit("modify on feature").unwrap();
4494
4495 let result = repo.execute_merge("main").unwrap();
4496 assert!(!result.is_clean);
4497 assert!(result.merge_patch_id.is_none());
4498 assert_eq!(result.unresolved_conflicts.len(), 1);
4499 assert_eq!(result.unresolved_conflicts[0].path, "shared.txt");
4500
4501 let content = fs::read_to_string(dir.path().join("shared.txt")).unwrap();
4502 assert!(content.contains("<<<<<<< feature (HEAD)"));
4503 assert!(content.contains("main version"));
4504 assert!(content.contains("feature version"));
4505 assert!(content.contains(">>>>>>> main"));
4506 }
4507
4508 #[test]
4509 fn test_execute_merge_fast_forward() {
4510 let dir = tempfile::tempdir().unwrap();
4511 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4512
4513 fs::write(dir.path().join("base.txt"), "base").unwrap();
4514 repo.add("base.txt").unwrap();
4515 repo.commit("add base").unwrap();
4516
4517 repo.create_branch("feature", None).unwrap();
4518
4519 repo.checkout("feature").unwrap();
4520 fs::write(dir.path().join("new_file.txt"), "new content").unwrap();
4521 repo.add("new_file.txt").unwrap();
4522 repo.commit("add new file on feature").unwrap();
4523
4524 repo.checkout("main").unwrap();
4525
4526 let result = repo.execute_merge("feature").unwrap();
4527 assert!(result.is_clean);
4528 assert!(dir.path().join("new_file.txt").exists());
4529 }
4530
4531 #[test]
4532 fn test_resolve_merge_conflict() {
4533 let dir = tempfile::tempdir().unwrap();
4534 let mut repo = Repository::init(dir.path(), "alice").unwrap();
4535
4536 fs::write(dir.path().join("shared.txt"), "original").unwrap();
4537 repo.add("shared.txt").unwrap();
4538 repo.commit("add shared").unwrap();
4539
4540 repo.create_branch("feature", None).unwrap();
4541
4542 fs::write(dir.path().join("shared.txt"), "main version").unwrap();
4543 repo.add("shared.txt").unwrap();
4544 repo.commit("modify on main").unwrap();
4545
4546 repo.checkout("feature").unwrap();
4547
4548 fs::write(dir.path().join("shared.txt"), "feature version").unwrap();
4549 repo.add("shared.txt").unwrap();
4550 repo.commit("modify on feature").unwrap();
4551
4552 let _result = repo.execute_merge("main").unwrap();
4553
4554 fs::write(dir.path().join("shared.txt"), "resolved content").unwrap();
4555 repo.add("shared.txt").unwrap();
4556 let commit_id = repo.commit("resolve merge conflict").unwrap();
4557
4558 assert!(repo.pending_merge_parents.is_empty());
4559
4560 let log = repo.log(None).unwrap();
4561 let resolve_patch = log.iter().find(|p| p.id == commit_id).unwrap();
4562 assert_eq!(resolve_patch.parent_ids.len(), 2);
4563 }
4564
4565 #[test]
4566 fn test_three_way_merge() {
4567 let ours = "line1\nline2-modified\nline3";
4568 let theirs = "line1\nline2-modified\nline3";
4569 let result = three_way_merge(Some("line1\nline2\nline3"), ours, theirs, "main", "feature");
4570 assert!(result.is_ok());
4571 assert_eq!(result.unwrap(), ours);
4572
4573 let result = three_way_merge(Some("base"), "base", "changed", "main", "feature");
4574 assert_eq!(result.unwrap(), "changed");
4575
4576 let result = three_way_merge(Some("base"), "changed", "base", "main", "feature");
4577 assert_eq!(result.unwrap(), "changed");
4578
4579 let result = three_way_merge(None, "ours content", "theirs content", "main", "feature");
4580 assert!(result.is_err());
4581 let lines = result.unwrap_err();
4582 assert!(lines[0].contains("<<<<<<<"));
4583 assert!(lines.last().unwrap().contains(">>>>>>>"));
4584 }
4585
4586 #[test]
4587 fn test_config_get_set() -> Result<(), Box<dyn std::error::Error>> {
4588 let dir = tempfile::tempdir().unwrap();
4589 let mut repo = Repository::init(dir.path(), "alice")?;
4590
4591 assert!(repo.get_config("user.name")?.is_none());
4592 assert!(repo.get_config("user.email")?.is_none());
4593
4594 repo.set_config("user.name", "Alice")?;
4595 repo.set_config("user.email", "alice@example.com")?;
4596
4597 assert_eq!(repo.get_config("user.name")?.unwrap(), "Alice");
4598 assert_eq!(repo.get_config("user.email")?.unwrap(), "alice@example.com");
4599
4600 let config = repo.list_config()?;
4602 assert!(config.iter().any(|(k, v)| k == "user.name" && v == "Alice"));
4603 assert!(
4604 config
4605 .iter()
4606 .any(|(k, v)| k == "user.email" && v == "alice@example.com")
4607 );
4608 assert!(config.iter().any(|(k, _)| k == "author"));
4610
4611 Ok(())
4612 }
4613
4614 #[test]
4615 fn test_delete_branch() -> Result<(), Box<dyn std::error::Error>> {
4616 let dir = tempfile::tempdir().unwrap();
4617 let mut repo = Repository::init(dir.path(), "alice")?;
4618
4619 repo.create_branch("feature", None)?;
4620 repo.create_branch("develop", None)?;
4621 assert_eq!(repo.list_branches().len(), 3);
4622
4623 let result = repo.delete_branch("main");
4625 assert!(result.is_err());
4626
4627 repo.delete_branch("feature")?;
4629 assert_eq!(repo.list_branches().len(), 2);
4630
4631 repo.delete_branch("develop")?;
4632 assert_eq!(repo.list_branches().len(), 1);
4633
4634 Ok(())
4635 }
4636
4637 #[test]
4638 fn test_tags() -> Result<(), Box<dyn std::error::Error>> {
4639 let dir = tempfile::tempdir().unwrap();
4640 let mut repo = Repository::init(dir.path(), "alice")?;
4641
4642 fs::write(dir.path().join("a.txt"), "v1")?;
4643 repo.add("a.txt")?;
4644 let _commit_id = repo.commit("first commit")?;
4645
4646 repo.create_tag("v1.0", None)?;
4648 let tags = repo.list_tags()?;
4649 assert_eq!(tags.len(), 1);
4650
4651 Ok(())
4652 }
4653
4654 #[test]
4655 fn test_patches_since() -> Result<(), Box<dyn std::error::Error>> {
4656 let dir = tempfile::tempdir().unwrap();
4657 let mut repo = Repository::init(dir.path(), "alice")?;
4658
4659 fs::write(dir.path().join("a.txt"), "v1")?;
4661 repo.add("a.txt")?;
4662 let id1 = repo.commit("first")?;
4663
4664 fs::write(dir.path().join("a.txt"), "v2")?;
4666 repo.add("a.txt")?;
4667 let id2 = repo.commit("second")?;
4668
4669 fs::write(dir.path().join("b.txt"), "new")?;
4671 repo.add("b.txt")?;
4672 let id3 = repo.commit("third")?;
4673
4674 let since = repo.patches_since(&id1);
4676 assert_eq!(since.len(), 2);
4677 assert_eq!(since[0].id, id2);
4678 assert_eq!(since[1].id, id3);
4679
4680 let since = repo.patches_since(&id3);
4682 assert!(since.is_empty());
4683
4684 let root_id = repo.log(None)?.last().unwrap().id;
4687 let since = repo.patches_since(&root_id);
4688 assert_eq!(since.len(), 3);
4689 assert_eq!(since[0].id, id1);
4690 assert_eq!(since[1].id, id2);
4691 assert_eq!(since[2].id, id3);
4692
4693 Ok(())
4694 }
4695
4696 #[test]
4697 fn test_pending_merge_persistence() -> Result<(), Box<dyn std::error::Error>> {
4698 let dir = tempfile::tempdir().unwrap();
4699 let mut repo = Repository::init(dir.path(), "alice")?;
4700
4701 fs::write(dir.path().join("shared.txt"), "original")?;
4702 repo.add("shared.txt")?;
4703 repo.commit("add shared")?;
4704
4705 repo.create_branch("feature", None)?;
4706
4707 fs::write(dir.path().join("shared.txt"), "main version")?;
4708 repo.add("shared.txt")?;
4709 repo.commit("modify on main")?;
4710
4711 repo.checkout("feature")?;
4712
4713 fs::write(dir.path().join("shared.txt"), "feature version")?;
4714 repo.add("shared.txt")?;
4715 repo.commit("modify on feature")?;
4716
4717 let _ = repo.execute_merge("main")?;
4719 assert_eq!(repo.pending_merge_parents.len(), 2);
4720
4721 drop(repo);
4723 let mut repo2 = Repository::open(dir.path())?;
4724 assert_eq!(repo2.pending_merge_parents.len(), 2);
4725
4726 fs::write(dir.path().join("shared.txt"), "resolved")?;
4728 repo2.add("shared.txt")?;
4729 let resolve_id = repo2.commit("resolve")?;
4730 assert!(repo2.pending_merge_parents.is_empty());
4731
4732 let patch = repo2
4734 .log(None)?
4735 .into_iter()
4736 .find(|p| p.id == resolve_id)
4737 .unwrap();
4738 assert_eq!(patch.parent_ids.len(), 2);
4739
4740 Ok(())
4741 }
4742
4743 #[test]
4744 fn test_has_uncommitted_changes_clean() -> Result<(), Box<dyn std::error::Error>> {
4745 let dir = tempfile::tempdir().unwrap();
4746 let repo = Repository::init(dir.path(), "alice")?;
4747
4748 assert!(!repo.has_uncommitted_changes()?);
4749
4750 Ok(())
4751 }
4752
4753 #[test]
4754 fn test_has_uncommitted_changes_staged() -> Result<(), Box<dyn std::error::Error>> {
4755 let dir = tempfile::tempdir().unwrap();
4756 let repo = Repository::init(dir.path(), "alice")?;
4757
4758 fs::write(dir.path().join("a.txt"), "content")?;
4759 repo.add("a.txt")?;
4760
4761 assert!(repo.has_uncommitted_changes()?);
4762
4763 Ok(())
4764 }
4765
4766 #[test]
4767 fn test_has_uncommitted_changes_unstaged() -> Result<(), Box<dyn std::error::Error>> {
4768 let dir = tempfile::tempdir().unwrap();
4769 let mut repo = Repository::init(dir.path(), "alice")?;
4770
4771 fs::write(dir.path().join("a.txt"), "original")?;
4772 repo.add("a.txt")?;
4773 repo.commit("initial")?;
4774
4775 fs::write(dir.path().join("a.txt"), "modified on disk")?;
4776
4777 assert!(repo.has_uncommitted_changes()?);
4778
4779 Ok(())
4780 }
4781
4782 #[test]
4783 fn test_stash_push_pop() -> Result<(), Box<dyn std::error::Error>> {
4784 let dir = tempfile::tempdir().unwrap();
4785 let mut repo = Repository::init(dir.path(), "alice")?;
4786
4787 fs::write(dir.path().join("a.txt"), "original")?;
4788 repo.add("a.txt")?;
4789 repo.commit("initial")?;
4790
4791 fs::write(dir.path().join("a.txt"), "staged changes")?;
4792 repo.add("a.txt")?;
4793
4794 let stash_index = repo.stash_push(Some("my stash"))?;
4795 assert_eq!(stash_index, 0);
4796
4797 assert!(repo.meta.working_set()?.is_empty());
4798 let on_disk = fs::read_to_string(dir.path().join("a.txt"))?;
4799 assert_eq!(on_disk, "original");
4800
4801 repo.stash_pop()?;
4802
4803 let on_disk = fs::read_to_string(dir.path().join("a.txt"))?;
4804 assert_eq!(on_disk, "staged changes");
4805
4806 let ws = repo.meta.working_set()?;
4807 assert_eq!(ws.len(), 1);
4808 assert_eq!(ws[0].0, "a.txt");
4809 assert_eq!(ws[0].1, FileStatus::Modified);
4810
4811 Ok(())
4812 }
4813
4814 #[test]
4815 fn test_stash_list() -> Result<(), Box<dyn std::error::Error>> {
4816 let dir = tempfile::tempdir().unwrap();
4817 let mut repo = Repository::init(dir.path(), "alice")?;
4818
4819 fs::write(dir.path().join("a.txt"), "original")?;
4820 repo.add("a.txt")?;
4821 repo.commit("initial")?;
4822
4823 fs::write(dir.path().join("a.txt"), "change 1")?;
4824 repo.add("a.txt")?;
4825 let idx0 = repo.stash_push(Some("first stash"))?;
4826 assert_eq!(idx0, 0);
4827
4828 fs::write(dir.path().join("a.txt"), "change 2")?;
4829 repo.add("a.txt")?;
4830 let idx1 = repo.stash_push(Some("second stash"))?;
4831 assert_eq!(idx1, 1);
4832
4833 let list = repo.stash_list()?;
4834 assert_eq!(list.len(), 2);
4835 assert_eq!(list[0].index, 0);
4836 assert_eq!(list[0].message, "first stash");
4837 assert_eq!(list[1].index, 1);
4838 assert_eq!(list[1].message, "second stash");
4839
4840 Ok(())
4841 }
4842
4843 #[test]
4844 fn test_stash_apply_keeps_entry() -> Result<(), Box<dyn std::error::Error>> {
4845 let dir = tempfile::tempdir().unwrap();
4846 let mut repo = Repository::init(dir.path(), "alice")?;
4847
4848 fs::write(dir.path().join("a.txt"), "original")?;
4849 repo.add("a.txt")?;
4850 repo.commit("initial")?;
4851
4852 fs::write(dir.path().join("a.txt"), "changes to apply")?;
4853 repo.add("a.txt")?;
4854 let idx = repo.stash_push(Some("keep me"))?;
4855 assert_eq!(idx, 0);
4856
4857 repo.stash_apply(0)?;
4858
4859 let on_disk = fs::read_to_string(dir.path().join("a.txt"))?;
4860 assert_eq!(on_disk, "changes to apply");
4861
4862 let list = repo.stash_list()?;
4863 assert_eq!(list.len(), 1);
4864 assert_eq!(list[0].index, 0);
4865 assert_eq!(list[0].message, "keep me");
4866
4867 Ok(())
4868 }
4869
4870 #[test]
4871 fn test_stash_drop() -> Result<(), Box<dyn std::error::Error>> {
4872 let dir = tempfile::tempdir().unwrap();
4873 let mut repo = Repository::init(dir.path(), "alice")?;
4874
4875 fs::write(dir.path().join("a.txt"), "original")?;
4876 repo.add("a.txt")?;
4877 repo.commit("initial")?;
4878
4879 fs::write(dir.path().join("a.txt"), "stashed content")?;
4880 repo.add("a.txt")?;
4881 repo.stash_push(Some("droppable"))?;
4882
4883 repo.stash_drop(0)?;
4884
4885 let list = repo.stash_list()?;
4886 assert!(list.is_empty());
4887
4888 let result = repo.stash_drop(0);
4889 assert!(result.is_err());
4890
4891 Ok(())
4892 }
4893
4894 #[test]
4895 fn test_stash_pop_empty() -> Result<(), Box<dyn std::error::Error>> {
4896 let dir = tempfile::tempdir().unwrap();
4897 let mut repo = Repository::init(dir.path(), "alice")?;
4898
4899 let result = repo.stash_pop();
4900 assert!(result.is_err());
4901
4902 Ok(())
4903 }
4904
4905 #[test]
4906 fn test_stash_push_nothing() -> Result<(), Box<dyn std::error::Error>> {
4907 let dir = tempfile::tempdir().unwrap();
4908 let mut repo = Repository::init(dir.path(), "alice")?;
4909
4910 let result = repo.stash_push(None);
4911 assert!(result.is_err());
4912 let err = result.unwrap_err().to_string();
4913 assert!(err.contains("nothing to commit"));
4914
4915 Ok(())
4916 }
4917
4918 #[test]
4919 fn test_reset_soft() -> Result<(), Box<dyn std::error::Error>> {
4920 let dir = tempfile::tempdir().unwrap();
4921 let mut repo = Repository::init(dir.path(), "alice")?;
4922
4923 fs::write(dir.path().join("file1.txt"), "first content")?;
4924 repo.add("file1.txt")?;
4925 let first_commit = repo.commit("first commit")?;
4926
4927 fs::write(dir.path().join("file2.txt"), "second content")?;
4928 repo.add("file2.txt")?;
4929 repo.commit("second commit")?;
4930
4931 fs::write(dir.path().join("file2.txt"), "modified second")?;
4933 repo.add("file2.txt")?;
4934
4935 let result = repo.reset(&first_commit.to_hex(), ResetMode::Soft)?;
4936 assert_eq!(result, first_commit);
4937
4938 let (_, head_id) = repo.head()?;
4940 assert_eq!(head_id, first_commit);
4941
4942 assert!(dir.path().join("file2.txt").exists());
4944 assert_eq!(
4945 fs::read_to_string(dir.path().join("file2.txt"))?,
4946 "modified second"
4947 );
4948
4949 let status = repo.status()?;
4951 assert_eq!(status.staged_files.len(), 1);
4952 assert_eq!(status.staged_files[0].0, "file2.txt");
4953
4954 Ok(())
4955 }
4956
4957 #[test]
4958 fn test_reset_mixed() -> Result<(), Box<dyn std::error::Error>> {
4959 let dir = tempfile::tempdir().unwrap();
4960 let mut repo = Repository::init(dir.path(), "alice")?;
4961
4962 fs::write(dir.path().join("file1.txt"), "first content")?;
4963 repo.add("file1.txt")?;
4964 let first_commit = repo.commit("first commit")?;
4965
4966 fs::write(dir.path().join("file2.txt"), "second content")?;
4967 repo.add("file2.txt")?;
4968 repo.commit("second commit")?;
4969
4970 fs::write(dir.path().join("file2.txt"), "modified second")?;
4972 repo.add("file2.txt")?;
4973
4974 let result = repo.reset(&first_commit.to_hex(), ResetMode::Mixed)?;
4975 assert_eq!(result, first_commit);
4976
4977 let (_, head_id) = repo.head()?;
4979 assert_eq!(head_id, first_commit);
4980
4981 assert!(dir.path().join("file2.txt").exists());
4983 assert_eq!(
4984 fs::read_to_string(dir.path().join("file2.txt"))?,
4985 "modified second"
4986 );
4987
4988 let status = repo.status()?;
4990 assert!(status.staged_files.is_empty());
4991
4992 Ok(())
4993 }
4994
4995 #[test]
4996 fn test_reset_hard() -> Result<(), Box<dyn std::error::Error>> {
4997 let dir = tempfile::tempdir().unwrap();
4998 let mut repo = Repository::init(dir.path(), "alice")?;
4999
5000 fs::write(dir.path().join("file1.txt"), "first content")?;
5001 repo.add("file1.txt")?;
5002 let first_commit = repo.commit("first commit")?;
5003
5004 fs::write(dir.path().join("file2.txt"), "second content")?;
5005 repo.add("file2.txt")?;
5006 repo.commit("second commit")?;
5007
5008 let result = repo.reset(&first_commit.to_hex(), ResetMode::Hard)?;
5009 assert_eq!(result, first_commit);
5010
5011 let (_, head_id) = repo.head()?;
5013 assert_eq!(head_id, first_commit);
5014
5015 assert!(dir.path().join("file1.txt").exists());
5017 assert!(!dir.path().join("file2.txt").exists());
5018
5019 let tree = repo.snapshot_head()?;
5020 assert!(tree.contains("file1.txt"));
5021 assert!(!tree.contains("file2.txt"));
5022
5023 Ok(())
5024 }
5025
5026 #[test]
5027 fn test_cherry_pick() -> Result<(), Box<dyn std::error::Error>> {
5028 let dir = tempfile::tempdir().unwrap();
5029 let mut repo = Repository::init(dir.path(), "alice")?;
5030
5031 fs::write(dir.path().join("a.txt"), "content of a")?;
5032 repo.add("a.txt")?;
5033 repo.commit("add a.txt")?;
5034
5035 repo.create_branch("feature", None)?;
5036
5037 fs::write(dir.path().join("b.txt"), "content of b")?;
5038 repo.add("b.txt")?;
5039 let b_commit = repo.commit("add b.txt")?;
5040
5041 repo.checkout("feature")?;
5042
5043 fs::write(dir.path().join("c.txt"), "content of c")?;
5045 repo.add("c.txt")?;
5046 repo.commit("add c.txt on feature")?;
5047
5048 repo.cherry_pick(&b_commit)?;
5049
5050 assert!(dir.path().join("b.txt").exists());
5051 let content = fs::read_to_string(dir.path().join("b.txt"))?;
5052 assert_eq!(content, "content of b");
5053
5054 let log = repo.log(None)?;
5055 assert!(log.iter().any(|p| p.message == "add b.txt"));
5056
5057 Ok(())
5058 }
5059
5060 #[test]
5061 fn test_cherry_pick_nonexistent() {
5062 let dir = tempfile::tempdir().unwrap();
5063 let mut repo = Repository::init(dir.path(), "alice").unwrap();
5064
5065 let fake_hash = Hash::from_data(b"nonexistent");
5066 let result = repo.cherry_pick(&fake_hash);
5067 assert!(result.is_err());
5068 }
5069
5070 #[test]
5071 fn test_rebase() -> Result<(), Box<dyn std::error::Error>> {
5072 let dir = tempfile::tempdir().unwrap();
5073 let mut repo = Repository::init(dir.path(), "alice")?;
5074
5075 fs::write(dir.path().join("a.txt"), "content of a")?;
5076 repo.add("a.txt")?;
5077 repo.commit("add a.txt")?;
5078
5079 repo.create_branch("feature", None)?;
5080
5081 repo.checkout("feature")?;
5082 fs::write(dir.path().join("b.txt"), "content of b")?;
5083 repo.add("b.txt")?;
5084 repo.commit("add b.txt on feature")?;
5085
5086 repo.checkout("main")?;
5087 fs::write(dir.path().join("c.txt"), "content of c")?;
5088 repo.add("c.txt")?;
5089 repo.commit("add c.txt on main")?;
5090
5091 repo.checkout("feature")?;
5092
5093 let result = repo.rebase("main")?;
5094 assert!(result.patches_replayed > 0);
5095
5096 assert!(dir.path().join("b.txt").exists());
5097 assert!(dir.path().join("c.txt").exists());
5098
5099 let log = repo.log(None)?;
5100 assert!(log.iter().any(|p| p.message == "add b.txt on feature"));
5101 assert!(log.iter().any(|p| p.message == "add c.txt on main"));
5102
5103 Ok(())
5104 }
5105
5106 #[test]
5107 fn test_rebase_fast_forward() -> Result<(), Box<dyn std::error::Error>> {
5108 let dir = tempfile::tempdir().unwrap();
5109 let mut repo = Repository::init(dir.path(), "alice")?;
5110
5111 fs::write(dir.path().join("a.txt"), "content of a")?;
5112 repo.add("a.txt")?;
5113 repo.commit("add a.txt")?;
5114
5115 repo.create_branch("feature", None)?;
5116
5117 fs::write(dir.path().join("b.txt"), "content of b")?;
5118 repo.add("b.txt")?;
5119 repo.commit("add b.txt")?;
5120
5121 repo.checkout("feature")?;
5122
5123 let result = repo.rebase("main")?;
5124 assert_eq!(result.patches_replayed, 0);
5125
5126 assert!(dir.path().join("b.txt").exists());
5127
5128 Ok(())
5129 }
5130
5131 #[test]
5132 fn test_blame() -> Result<(), Box<dyn std::error::Error>> {
5133 let dir = tempfile::tempdir().unwrap();
5134 let mut repo = Repository::init(dir.path(), "alice")?;
5135
5136 fs::write(dir.path().join("test.txt"), "line1\nline2\nline3")?;
5137 repo.add("test.txt")?;
5138 let first_commit = repo.commit("initial content")?;
5139
5140 fs::write(dir.path().join("test.txt"), "line1\nline2-modified\nline3")?;
5141 repo.add("test.txt")?;
5142 let second_commit = repo.commit("modify line2")?;
5143
5144 let blame = repo.blame("test.txt")?;
5145
5146 assert_eq!(blame.len(), 3);
5147 assert_eq!(blame[0].line, "line1");
5148 assert_eq!(blame[0].patch_id, first_commit);
5149
5150 assert_eq!(blame[1].line, "line2-modified");
5151 assert_eq!(blame[1].patch_id, second_commit);
5152
5153 assert_eq!(blame[2].line, "line3");
5154 assert_eq!(blame[2].patch_id, first_commit);
5155
5156 Ok(())
5157 }
5158
5159 #[test]
5160 fn test_blame_nonexistent_file() {
5161 let dir = tempfile::tempdir().unwrap();
5162 let repo = Repository::init(dir.path(), "alice").unwrap();
5163
5164 let result = repo.blame("nonexistent.txt");
5165 assert!(result.is_err());
5166 }
5167
5168 #[test]
5169 fn test_rm_file() -> Result<(), Box<dyn std::error::Error>> {
5170 let dir = tempfile::tempdir().unwrap();
5171 let mut repo = Repository::init(dir.path(), "alice")?;
5172
5173 fs::write(dir.path().join("test.txt"), "content")?;
5174 repo.add("test.txt")?;
5175 repo.commit("initial")?;
5176
5177 fs::remove_file(dir.path().join("test.txt"))?;
5178 repo.add("test.txt")?;
5179
5180 assert!(!dir.path().join("test.txt").exists());
5181
5182 let ws = repo.meta.working_set()?;
5183 assert_eq!(ws.len(), 1);
5184 assert_eq!(ws[0].0, "test.txt");
5185 assert_eq!(ws[0].1, FileStatus::Deleted);
5186
5187 Ok(())
5188 }
5189
5190 #[test]
5191 fn test_rm_cached() -> Result<(), Box<dyn std::error::Error>> {
5192 let dir = tempfile::tempdir().unwrap();
5193 let mut repo = Repository::init(dir.path(), "alice")?;
5194
5195 fs::write(dir.path().join("test.txt"), "content")?;
5196 repo.add("test.txt")?;
5197 repo.commit("initial")?;
5198
5199 let repo_path = RepoPath::new("test.txt")?;
5200 repo.meta.working_set_add(&repo_path, FileStatus::Deleted)?;
5201
5202 assert!(dir.path().join("test.txt").exists());
5203
5204 let ws = repo.meta.working_set()?;
5205 assert_eq!(ws.len(), 1);
5206 assert_eq!(ws[0].0, "test.txt");
5207 assert_eq!(ws[0].1, FileStatus::Deleted);
5208
5209 Ok(())
5210 }
5211
5212 #[test]
5213 fn test_mv_file() -> Result<(), Box<dyn std::error::Error>> {
5214 let dir = tempfile::tempdir().unwrap();
5215 let mut repo = Repository::init(dir.path(), "alice")?;
5216
5217 fs::write(dir.path().join("old.txt"), "content")?;
5218 repo.add("old.txt")?;
5219 repo.commit("initial")?;
5220
5221 repo.rename_file("old.txt", "new.txt")?;
5222
5223 assert!(!dir.path().join("old.txt").exists());
5224 assert!(dir.path().join("new.txt").exists());
5225
5226 let ws = repo.meta.working_set()?;
5227 assert!(
5228 ws.iter()
5229 .any(|(p, s)| p == "old.txt" && *s == FileStatus::Deleted)
5230 );
5231 assert!(
5232 ws.iter()
5233 .any(|(p, s)| p == "new.txt" && *s == FileStatus::Added)
5234 );
5235
5236 Ok(())
5237 }
5238
5239 #[test]
5240 fn test_mv_nonexistent() {
5241 let dir = tempfile::tempdir().unwrap();
5242 let repo = Repository::init(dir.path(), "alice").unwrap();
5243
5244 let result = repo.rename_file("nonexistent.txt", "new.txt");
5245 assert!(result.is_err());
5246 let err = result.unwrap_err().to_string();
5247 assert!(err.contains("path not found"));
5248 }
5249
5250 #[test]
5251 fn test_remove_remote() -> Result<(), Box<dyn std::error::Error>> {
5252 let dir = tempfile::tempdir().unwrap();
5253 let repo = Repository::init(dir.path(), "alice")?;
5254
5255 repo.add_remote("origin", "http://example.com")?;
5256
5257 let remotes = repo.list_remotes()?;
5258 assert_eq!(remotes.len(), 1);
5259 assert_eq!(remotes[0].0, "origin");
5260
5261 repo.remove_remote("origin")?;
5262
5263 let remotes = repo.list_remotes()?;
5264 assert!(remotes.is_empty());
5265
5266 let result = repo.remove_remote("nonexistent");
5267 assert!(result.is_err());
5268
5269 Ok(())
5270 }
5271}