Skip to main content

aida_core/db/
git_backend.rs

1// trace:ARCH-distributed-git-backend | ai:claude
2//! Git-backed storage backend for distributed AIDA.
3//!
4//! Stores requirements as individual YAML files in a sharded directory layout
5//! within a git repository. Metadata (store name, features, ID config, etc.)
6//! is stored in a separate `metadata.yaml` file.
7//!
8//! This backend implements the `DatabaseBackend` trait, allowing it to be
9//! used as a drop-in replacement for the YAML/SQLite/PostgreSQL backends.
10//!
11//! Git operations (commit, push, pull) are NOT automatic — the caller decides
12//! when to sync. This backend handles only local file I/O.
13
14use anyhow::{Context, Result};
15use std::path::{Path, PathBuf};
16
17use super::traits::{BackendType, DatabaseBackend, UpdateResult, VersionConflict};
18use crate::models::{
19    DispenserHandle, QueueEntry, Requirement, RequirementsStore,
20};
21use crate::object_store;
22
23/// Git-backed storage backend.
24///
25/// Directory layout:
26/// ```text
27/// {root}/
28///   metadata.yaml          — store name, features, ID config, etc.
29///   objects/
30///     FR/000/FR-001.yaml   — individual requirement files (sharded)
31///     BUG/000/BUG-001.yaml
32///   relations/             — (future: append-only relation log)
33///   registry/              — (future: node/user registries)
34/// ```
35pub struct GitBackend {
36    /// Root directory of the git-backed store
37    root: PathBuf,
38    /// Path to the objects directory
39    objects_root: PathBuf,
40    /// Path to the metadata file
41    metadata_path: PathBuf,
42    /// Optional dispenser for ID generation
43    dispenser: Option<DispenserHandle>,
44    /// Whether to auto-commit changes to git after writes
45    auto_commit: bool,
46    /// Whether to record operations in the append-only oplog
47    oplog_enabled: bool,
48}
49
50/// Metadata stored separately from requirements (the "store" fields).
51/// This is everything in RequirementsStore except the requirements themselves.
52#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
53struct StoreMetadata {
54    #[serde(default)]
55    name: String,
56    #[serde(default)]
57    title: String,
58    #[serde(default)]
59    description: String,
60    #[serde(default)]
61    users: Vec<crate::models::User>,
62    #[serde(default)]
63    teams: Vec<crate::models::Team>,
64    #[serde(default)]
65    id_config: crate::models::IdConfiguration,
66    #[serde(default)]
67    features: Vec<crate::models::FeatureDefinition>,
68    #[serde(default = "default_one")]
69    next_feature_number: u32,
70    #[serde(default = "default_one")]
71    next_spec_number: u32,
72    #[serde(default)]
73    prefix_counters: std::collections::HashMap<String, u32>,
74    #[serde(default)]
75    relationship_definitions: Vec<crate::models::RelationshipDefinition>,
76    #[serde(default)]
77    reaction_definitions: Vec<crate::models::ReactionDefinition>,
78    #[serde(default)]
79    meta_counters: std::collections::HashMap<String, u32>,
80    #[serde(default)]
81    type_definitions: Vec<crate::models::CustomTypeDefinition>,
82    #[serde(default)]
83    allowed_prefixes: Vec<String>,
84    #[serde(default)]
85    restrict_prefixes: bool,
86    #[serde(default)]
87    ai_prompts: crate::models::AiPromptConfig,
88    #[serde(default)]
89    baselines: Vec<crate::models::Baseline>,
90}
91
92fn default_one() -> u32 {
93    1
94}
95
96impl Default for StoreMetadata {
97    fn default() -> Self {
98        Self {
99            name: String::new(),
100            title: String::new(),
101            description: String::new(),
102            users: Vec::new(),
103            teams: Vec::new(),
104            id_config: crate::models::IdConfiguration::default(),
105            features: Vec::new(),
106            next_feature_number: 1,
107            next_spec_number: 1,
108            prefix_counters: std::collections::HashMap::new(),
109            relationship_definitions: crate::models::RelationshipDefinition::defaults(),
110            reaction_definitions: crate::models::default_reaction_definitions(),
111            meta_counters: std::collections::HashMap::new(),
112            type_definitions: crate::models::default_type_definitions(),
113            allowed_prefixes: Vec::new(),
114            restrict_prefixes: false,
115            ai_prompts: crate::models::AiPromptConfig::default(),
116            baselines: Vec::new(),
117        }
118    }
119}
120
121impl GitBackend {
122    /// Create a new git backend rooted at the given directory.
123    /// Creates the directory structure if it doesn't exist.
124    pub fn new(root: &Path) -> Result<Self> {
125        let objects_root = root.join("objects");
126        let metadata_path = root.join("metadata.yaml");
127
128        std::fs::create_dir_all(&objects_root)
129            .with_context(|| format!("Failed to create objects dir: {}", objects_root.display()))?;
130
131        Ok(Self {
132            root: root.to_path_buf(),
133            objects_root,
134            metadata_path,
135            dispenser: None,
136            auto_commit: true,
137            oplog_enabled: true,
138        })
139    }
140
141    /// Set the dispenser for ID generation (distributed mode).
142    pub fn with_dispenser(mut self, dispenser: DispenserHandle) -> Self {
143        self.dispenser = Some(dispenser);
144        self
145    }
146
147    /// Disable auto-commit (useful for batch operations or testing).
148    pub fn with_auto_commit(mut self, enabled: bool) -> Self {
149        self.auto_commit = enabled;
150        self
151    }
152
153    /// Enable or disable the operation log.
154    pub fn with_oplog(mut self, enabled: bool) -> Self {
155        self.oplog_enabled = enabled;
156        self
157    }
158
159    /// Record an operation in the append-only oplog.
160    fn record_op(&self, target_id: uuid::Uuid, kind: crate::oplog::OpKind) {
161        if !self.oplog_enabled {
162            return;
163        }
164        let oplog_path = self.root.join("oplog.yaml");
165        let node_id = self.dispenser.as_ref()
166            .and_then(|d| d.state().ok())
167            .map(|s| match s.mode {
168                crate::dispenser::IdMode::Distributed { node_id } => node_id,
169                _ => 0,
170            })
171            .unwrap_or(0);
172
173        if let Ok(mut log) = crate::oplog::OpLog::load(&oplog_path) {
174            if log.node_id == 0 && node_id > 0 {
175                log.node_id = node_id;
176            }
177            log.append(target_id, "aida".into(), kind);
178            let _ = log.save(&oplog_path);
179        }
180    }
181
182    /// Stage all changes and commit to git if auto_commit is enabled.
183    /// The commit message describes what changed.
184    fn auto_commit(&self, message: &str) {
185        if !self.auto_commit || !crate::git_ops::is_git_repo(&self.root) {
186            return;
187        }
188        // Stage objects, metadata, and oplog
189        let _ = crate::git_ops::add_all(&self.root, "objects");
190        if self.metadata_path.exists() {
191            let _ = crate::git_ops::add(&self.root, &["metadata.yaml"]);
192        }
193        if self.root.join("oplog.yaml").exists() {
194            let _ = crate::git_ops::add(&self.root, &["oplog.yaml"]);
195        }
196        // Commit (silently ignore errors — git ops are best-effort)
197        let _ = crate::git_ops::commit(&self.root, message);
198    }
199
200    /// Load metadata from the metadata.yaml file.
201    fn load_metadata(&self) -> Result<StoreMetadata> {
202        if !self.metadata_path.exists() {
203            return Ok(StoreMetadata::default());
204        }
205        let content = std::fs::read_to_string(&self.metadata_path)
206            .with_context(|| format!("Failed to read {}", self.metadata_path.display()))?;
207        let meta: StoreMetadata = serde_yaml::from_str(&content)
208            .with_context(|| format!("Failed to parse {}", self.metadata_path.display()))?;
209        Ok(meta)
210    }
211
212    /// Save metadata to the metadata.yaml file.
213    fn save_metadata(&self, meta: &StoreMetadata) -> Result<()> {
214        let content = serde_yaml::to_string(meta)?;
215        std::fs::write(&self.metadata_path, content)
216            .with_context(|| format!("Failed to write {}", self.metadata_path.display()))?;
217        Ok(())
218    }
219
220    /// Convert metadata + requirements into a full RequirementsStore.
221    fn assemble_store(
222        &self,
223        meta: StoreMetadata,
224        requirements: Vec<Requirement>,
225    ) -> RequirementsStore {
226        RequirementsStore {
227            name: meta.name,
228            title: meta.title,
229            description: meta.description,
230            requirements,
231            users: meta.users,
232            teams: meta.teams,
233            id_config: meta.id_config,
234            features: meta.features,
235            next_feature_number: meta.next_feature_number,
236            next_spec_number: meta.next_spec_number,
237            prefix_counters: meta.prefix_counters,
238            relationship_definitions: meta.relationship_definitions,
239            reaction_definitions: meta.reaction_definitions,
240            meta_counters: meta.meta_counters,
241            type_definitions: meta.type_definitions,
242            allowed_prefixes: meta.allowed_prefixes,
243            restrict_prefixes: meta.restrict_prefixes,
244            ai_prompts: meta.ai_prompts,
245            baselines: meta.baselines,
246            store_version: 0,
247            migrated_to: None,
248            dispenser: self.dispenser.clone(),
249        }
250    }
251
252    /// Extract metadata from a RequirementsStore (everything except requirements).
253    fn extract_metadata(store: &RequirementsStore) -> StoreMetadata {
254        StoreMetadata {
255            name: store.name.clone(),
256            title: store.title.clone(),
257            description: store.description.clone(),
258            users: store.users.clone(),
259            teams: store.teams.clone(),
260            id_config: store.id_config.clone(),
261            features: store.features.clone(),
262            next_feature_number: store.next_feature_number,
263            next_spec_number: store.next_spec_number,
264            prefix_counters: store.prefix_counters.clone(),
265            relationship_definitions: store.relationship_definitions.clone(),
266            reaction_definitions: store.reaction_definitions.clone(),
267            meta_counters: store.meta_counters.clone(),
268            type_definitions: store.type_definitions.clone(),
269            allowed_prefixes: store.allowed_prefixes.clone(),
270            restrict_prefixes: store.restrict_prefixes,
271            ai_prompts: store.ai_prompts.clone(),
272            baselines: store.baselines.clone(),
273        }
274    }
275}
276
277impl DatabaseBackend for GitBackend {
278    fn backend_type(&self) -> BackendType {
279        BackendType::Git
280    }
281
282    fn path(&self) -> &Path {
283        &self.root
284    }
285
286    fn load(&self) -> Result<RequirementsStore> {
287        let meta = self.load_metadata()?;
288        let requirements = object_store::load_all_objects(&self.objects_root)?;
289        Ok(self.assemble_store(meta, requirements))
290    }
291
292    fn save(&self, store: &RequirementsStore) -> Result<()> {
293        // Save metadata
294        let meta = Self::extract_metadata(store);
295        self.save_metadata(&meta)?;
296
297        // Collect existing object files for deletion tracking
298        let existing = object_store::list_objects(&self.objects_root)?;
299        let existing_specs: std::collections::HashSet<String> =
300            existing.iter().map(|(s, _)| s.clone()).collect();
301
302        // Track which specs are in the current store
303        let mut current_specs = std::collections::HashSet::new();
304
305        // Write each requirement
306        for req in &store.requirements {
307            if let Some(ref spec_id) = req.spec_id {
308                current_specs.insert(spec_id.clone());
309                object_store::write_object(&self.objects_root, req)?;
310            }
311        }
312
313        // Delete object files that are no longer in the store
314        for spec_id in &existing_specs {
315            if !current_specs.contains(spec_id) {
316                let _ = object_store::delete_object(&self.objects_root, spec_id);
317            }
318        }
319
320        self.auto_commit("chore: update requirements store");
321        Ok(())
322    }
323
324    // Override individual CRUD for efficiency — don't reload everything each time
325
326    fn get_requirement_by_spec_id(&self, spec_id: &str) -> Result<Option<Requirement>> {
327        // Try direct lookup by spec_id (file name)
328        if let Ok(req) = object_store::read_object(&self.objects_root, spec_id) {
329            return Ok(Some(req));
330        }
331        // Fall back to scanning for agreed_id match
332        let files = object_store::list_objects(&self.objects_root)?;
333        for (_name, path) in &files {
334            if let Ok(req) = object_store::read_object_from_path(path) {
335                if req.agreed_id.as_deref() == Some(spec_id) {
336                    return Ok(Some(req));
337                }
338            }
339        }
340        Ok(None)
341    }
342
343    fn get_requirement(&self, id: &uuid::Uuid) -> Result<Option<Requirement>> {
344        object_store::find_by_uuid(&self.objects_root, id)
345    }
346
347    fn update_requirement(&self, requirement: &Requirement) -> Result<()> {
348        let spec_id = requirement.spec_id.as_deref()
349            .ok_or_else(|| anyhow::anyhow!("Cannot update requirement without spec_id in git backend"))?;
350
351        // Record ops for changed fields (compare with existing if possible)
352        if let Ok(old) = object_store::read_object(&self.objects_root, spec_id) {
353            if old.title != requirement.title {
354                self.record_op(requirement.id, crate::oplog::OpKind::SetTitle {
355                    title: requirement.title.clone(),
356                });
357            }
358            if old.effective_status() != requirement.effective_status() {
359                self.record_op(requirement.id, crate::oplog::OpKind::SetStatus {
360                    status: requirement.effective_status(),
361                });
362            }
363            if old.effective_priority() != requirement.effective_priority() {
364                self.record_op(requirement.id, crate::oplog::OpKind::SetPriority {
365                    priority: requirement.effective_priority(),
366                });
367            }
368            if old.owner != requirement.owner {
369                self.record_op(requirement.id, crate::oplog::OpKind::SetOwner {
370                    owner: requirement.owner.clone(),
371                });
372            }
373            if old.description != requirement.description {
374                self.record_op(requirement.id, crate::oplog::OpKind::SetDescription {
375                    description: requirement.description.clone(),
376                });
377            }
378        }
379
380        object_store::write_object(&self.objects_root, requirement)?;
381        self.auto_commit(&format!("update {}", spec_id));
382        Ok(())
383    }
384
385    fn delete_requirement(&self, id: &uuid::Uuid) -> Result<()> {
386        if let Some(req) = object_store::find_by_uuid(&self.objects_root, id)? {
387            if let Some(ref spec_id) = req.spec_id {
388                self.record_op(*id, crate::oplog::OpKind::Archive);
389                object_store::delete_object(&self.objects_root, spec_id)?;
390                self.auto_commit(&format!("delete {}", spec_id));
391                return Ok(());
392            }
393        }
394        anyhow::bail!("Requirement not found: {}", id)
395    }
396
397    fn add_requirement(&self, requirement: Requirement) -> Result<Requirement> {
398        let mut req = requirement;
399
400        if req.spec_id.is_none() {
401            // Load metadata to get counters, assign ID, save metadata back
402            let meta = self.load_metadata()?;
403            let mut temp_store = self.assemble_store(meta, Vec::new());
404            // Generate ID using the store's configured strategy
405            let req_clone = req.clone();
406            temp_store.add_requirement_with_id(req_clone, None, None);
407            // The pushed req has the assigned spec_id
408            if let Some(last) = temp_store.requirements.last() {
409                req.spec_id = last.spec_id.clone();
410            }
411            // Persist the updated counters back to metadata
412            let updated_meta = Self::extract_metadata(&temp_store);
413            self.save_metadata(&updated_meta)?;
414        }
415
416        // Record create operation
417        self.record_op(req.id, crate::oplog::OpKind::Create {
418            title: req.title.clone(),
419            description: req.description.clone(),
420            req_type: format!("{:?}", req.req_type),
421            status: req.effective_status(),
422            priority: req.effective_priority(),
423        });
424
425        object_store::write_object(&self.objects_root, &req)?;
426        let spec_id = req.spec_id.as_deref().unwrap_or("unknown");
427        self.auto_commit(&format!("add {} — {}", spec_id, req.title));
428        Ok(req)
429    }
430
431    fn exists(&self) -> bool {
432        self.root.exists()
433    }
434}
435
436#[cfg(test)]
437mod tests {
438    use super::*;
439
440    #[test]
441    fn test_git_backend_create_and_load_empty() {
442        let dir = tempfile::tempdir().unwrap();
443        let root = dir.path().join("aida-store");
444
445        let backend = GitBackend::new(&root).unwrap();
446        let store = backend.load().unwrap();
447        assert_eq!(store.requirements.len(), 0);
448        assert!(store.name.is_empty());
449    }
450
451    #[test]
452    fn test_git_backend_save_and_load() {
453        let dir = tempfile::tempdir().unwrap();
454        let root = dir.path().join("aida-store");
455        let backend = GitBackend::new(&root).unwrap();
456
457        // Create a store with some data
458        let mut store = RequirementsStore::new();
459        store.name = "Test Project".into();
460        store.title = "My Test".into();
461
462        let mut req1 = Requirement::new("First Req".into(), "Description 1".into());
463        req1.spec_id = Some("FR-001".into());
464
465        let mut req2 = Requirement::new("Second Req".into(), "Description 2".into());
466        req2.spec_id = Some("BUG-001".into());
467
468        store.requirements.push(req1);
469        store.requirements.push(req2);
470
471        backend.save(&store).unwrap();
472
473        // Load and verify
474        let loaded = backend.load().unwrap();
475        assert_eq!(loaded.name, "Test Project");
476        assert_eq!(loaded.requirements.len(), 2);
477
478        // Verify files exist in sharded layout
479        assert!(root.join("objects/FR/000/FR-001.yaml").exists());
480        assert!(root.join("objects/BUG/000/BUG-001.yaml").exists());
481        assert!(root.join("metadata.yaml").exists());
482    }
483
484    #[test]
485    fn test_git_backend_crud() {
486        let dir = tempfile::tempdir().unwrap();
487        let root = dir.path().join("aida-store");
488        let backend = GitBackend::new(&root).unwrap();
489
490        // Add
491        let mut req = Requirement::new("CRUD Test".into(), "testing".into());
492        req.spec_id = Some("FR-042".into());
493        let added = backend.add_requirement(req.clone()).unwrap();
494        assert_eq!(added.spec_id, Some("FR-042".into()));
495
496        // Read by spec_id
497        let found = backend.get_requirement_by_spec_id("FR-042").unwrap();
498        assert!(found.is_some());
499        assert_eq!(found.unwrap().title, "CRUD Test");
500
501        // Read by UUID
502        let found = backend.get_requirement(&added.id).unwrap();
503        assert!(found.is_some());
504
505        // Update
506        let mut updated = added.clone();
507        updated.title = "Updated Title".into();
508        backend.update_requirement(&updated).unwrap();
509
510        let reloaded = backend.get_requirement_by_spec_id("FR-042").unwrap().unwrap();
511        assert_eq!(reloaded.title, "Updated Title");
512
513        // Delete
514        backend.delete_requirement(&added.id).unwrap();
515        let gone = backend.get_requirement_by_spec_id("FR-042").unwrap();
516        assert!(gone.is_none());
517    }
518
519    #[test]
520    fn test_git_backend_auto_assign_id() {
521        let dir = tempfile::tempdir().unwrap();
522        let root = dir.path().join("aida-store");
523        let backend = GitBackend::new(&root).unwrap();
524
525        // Save initial metadata
526        backend.save(&RequirementsStore::new()).unwrap();
527
528        // Add requirement without spec_id — should auto-assign
529        let req = Requirement::new("Auto ID".into(), "should get an ID".into());
530        let added = backend.add_requirement(req).unwrap();
531        assert!(added.spec_id.is_some());
532
533        // Verify it's readable
534        let spec_id = added.spec_id.as_ref().unwrap();
535        let found = backend.get_requirement_by_spec_id(spec_id).unwrap();
536        assert!(found.is_some());
537    }
538
539    #[test]
540    fn test_git_backend_delete_removes_orphan_files() {
541        let dir = tempfile::tempdir().unwrap();
542        let root = dir.path().join("aida-store");
543        let backend = GitBackend::new(&root).unwrap();
544
545        // Save with 2 requirements
546        let mut store = RequirementsStore::new();
547        let mut req1 = Requirement::new("Keep".into(), "kept".into());
548        req1.spec_id = Some("FR-001".into());
549        let mut req2 = Requirement::new("Remove".into(), "removed".into());
550        req2.spec_id = Some("FR-002".into());
551        store.requirements.push(req1);
552        store.requirements.push(req2);
553        backend.save(&store).unwrap();
554
555        assert!(root.join("objects/FR/000/FR-001.yaml").exists());
556        assert!(root.join("objects/FR/000/FR-002.yaml").exists());
557
558        // Save again with only 1 requirement — FR-002 should be deleted
559        store.requirements.retain(|r| r.spec_id.as_deref() == Some("FR-001"));
560        backend.save(&store).unwrap();
561
562        assert!(root.join("objects/FR/000/FR-001.yaml").exists());
563        assert!(!root.join("objects/FR/000/FR-002.yaml").exists());
564    }
565
566    #[test]
567    fn test_git_backend_with_dispenser() {
568        use crate::dispenser::{IdMode, MemoryDispenser};
569        use std::sync::Arc;
570
571        let dir = tempfile::tempdir().unwrap();
572        let root = dir.path().join("aida-store");
573
574        let dispenser = Arc::new(MemoryDispenser::new(IdMode::Distributed { node_id: 7 }));
575        let handle = DispenserHandle(dispenser);
576        let backend = GitBackend::new(&root).unwrap().with_dispenser(handle);
577
578        // Save initial metadata
579        backend.save(&RequirementsStore::new()).unwrap();
580
581        // The dispenser should be injected into loaded stores
582        let store = backend.load().unwrap();
583        assert!(store.dispenser.is_some());
584    }
585}