1mod bundle;
6mod commit;
7mod convergence;
8mod init;
9mod planning;
10mod repair;
11mod solo;
12mod verification;
13
14use crate::agent::{ActuatorAgent, Agent, ArchitectAgent, SpeculatorAgent, VerifierAgent};
15use crate::context_retriever::ContextRetriever;
16use crate::lsp::LspClient;
17use crate::test_runner::{self, PythonTestRunner, TestResults};
18use crate::tools::{AgentTools, ToolCall};
19use crate::types::{AgentContext, EnergyComponents, ModelTier, NodeState, SRBNNode, TaskPlan};
20use anyhow::{Context, Result};
21use perspt_core::types::{
22 EscalationCategory, EscalationReport, NodeClass, ProvisionalBranch, ProvisionalBranchState,
23 RewriteAction, RewriteRecord, SheafValidationResult, SheafValidatorClass, WorkspaceState,
24};
25use petgraph::graph::{DiGraph, NodeIndex};
26use petgraph::visit::{EdgeRef, Topo, Walker};
27use std::collections::HashMap;
28use std::path::PathBuf;
29use std::sync::atomic::{AtomicBool, Ordering};
30use std::sync::Arc;
31use std::time::Instant;
32
33#[derive(Debug, Clone)]
35pub struct Dependency {
36 pub kind: String,
38}
39
40#[derive(Debug, Clone)]
42pub enum ApprovalResult {
43 Approved,
45 ApprovedWithEdit(String),
47 Rejected,
49}
50
51#[derive(Debug, Clone, Copy, PartialEq, Eq)]
53pub enum NodeOutcome {
54 Completed,
56 Escalated,
58}
59
60pub struct SRBNOrchestrator {
62 pub graph: DiGraph<SRBNNode, Dependency>,
64 node_indices: HashMap<String, NodeIndex>,
66 pub context: AgentContext,
68 pub auto_approve: bool,
70 lsp_clients: HashMap<String, LspClient>,
72 agents: Vec<Box<dyn Agent>>,
74 tools: AgentTools,
76 last_written_file: Option<PathBuf>,
78 file_version: i32,
80 provider: std::sync::Arc<perspt_core::llm_provider::GenAIProvider>,
82 architect_model: String,
84 actuator_model: String,
86 verifier_model: String,
88 speculator_model: String,
90 architect_fallback_model: Option<String>,
92 actuator_fallback_model: Option<String>,
94 verifier_fallback_model: Option<String>,
96 speculator_fallback_model: Option<String>,
98 event_sender: Option<perspt_core::events::channel::EventSender>,
100 action_receiver: Option<perspt_core::events::channel::ActionReceiver>,
102 pub ledger: crate::ledger::MerkleLedger,
104 pub last_tool_failure: Option<String>,
106 last_context_provenance: Option<perspt_core::types::ContextProvenance>,
108 last_formatted_context: String,
110 last_verification_result: Option<perspt_core::types::VerificationResult>,
112 last_applied_bundle: Option<perspt_core::types::ArtifactBundle>,
114 last_repair_footprint: Option<perspt_core::RepairFootprint>,
116 blocked_dependencies: Vec<perspt_core::types::BlockedDependency>,
118 budget: perspt_core::types::BudgetEnvelope,
120 pub planning_policy: perspt_core::PlanningPolicy,
122 pub stability_epsilon: f32,
124 pub energy_alpha: f32,
126 pub energy_beta: f32,
128 pub energy_gamma: f32,
130 abort_requested: Arc<AtomicBool>,
132}
133
134fn epoch_seconds() -> i64 {
136 use std::time::{SystemTime, UNIX_EPOCH};
137 SystemTime::now()
138 .duration_since(UNIX_EPOCH)
139 .unwrap()
140 .as_secs() as i64
141}
142
143impl SRBNOrchestrator {
144 pub fn new(working_dir: PathBuf, auto_approve: bool) -> Self {
146 Self::new_with_models(
147 working_dir,
148 auto_approve,
149 None,
150 None,
151 None,
152 None,
153 None,
154 None,
155 None,
156 None,
157 )
158 }
159
160 #[allow(clippy::too_many_arguments)]
162 pub fn new_with_models(
163 working_dir: PathBuf,
164 auto_approve: bool,
165 architect_model: Option<String>,
166 actuator_model: Option<String>,
167 verifier_model: Option<String>,
168 speculator_model: Option<String>,
169 architect_fallback_model: Option<String>,
170 actuator_fallback_model: Option<String>,
171 verifier_fallback_model: Option<String>,
172 speculator_fallback_model: Option<String>,
173 ) -> Self {
174 let context = AgentContext {
175 working_dir: working_dir.clone(),
176 auto_approve,
177 ..Default::default()
178 };
179
180 let provider = std::sync::Arc::new(
183 perspt_core::llm_provider::GenAIProvider::new().unwrap_or_else(|e| {
184 log::warn!("Failed to create GenAIProvider: {}, using default", e);
185 perspt_core::llm_provider::GenAIProvider::new().expect("GenAI must initialize")
186 }),
187 );
188
189 let tools = AgentTools::new(working_dir.clone(), !auto_approve);
191
192 let stored_architect_model = architect_model
194 .clone()
195 .unwrap_or_else(|| ModelTier::Architect.default_model().to_string());
196 let stored_actuator_model = actuator_model
197 .clone()
198 .unwrap_or_else(|| ModelTier::Actuator.default_model().to_string());
199 let stored_verifier_model = verifier_model
200 .clone()
201 .unwrap_or_else(|| ModelTier::Verifier.default_model().to_string());
202 let stored_speculator_model = speculator_model
203 .clone()
204 .unwrap_or_else(|| ModelTier::Speculator.default_model().to_string());
205
206 Self {
207 graph: DiGraph::new(),
208 node_indices: HashMap::new(),
209 context,
210 auto_approve,
211 lsp_clients: HashMap::new(),
212 agents: vec![
213 Box::new(ArchitectAgent::new(provider.clone(), architect_model)),
214 Box::new(ActuatorAgent::new(provider.clone(), actuator_model)),
215 Box::new(VerifierAgent::new(provider.clone(), verifier_model)),
216 Box::new(SpeculatorAgent::new(provider.clone(), speculator_model)),
217 ],
218 tools,
219 last_written_file: None,
220 file_version: 0,
221 provider,
222 architect_model: stored_architect_model,
223 actuator_model: stored_actuator_model,
224 verifier_model: stored_verifier_model,
225 speculator_model: stored_speculator_model,
226 architect_fallback_model,
227 actuator_fallback_model,
228 verifier_fallback_model,
229 speculator_fallback_model,
230 event_sender: None,
231 action_receiver: None,
232 #[cfg(test)]
233 ledger: crate::ledger::MerkleLedger::in_memory().expect("Failed to create test ledger"),
234 #[cfg(not(test))]
235 ledger: crate::ledger::MerkleLedger::new().expect("Failed to create ledger"),
236 last_tool_failure: None,
237 last_context_provenance: None,
238 last_formatted_context: String::new(),
239 last_verification_result: None,
240 last_applied_bundle: None,
241 last_repair_footprint: None,
242 blocked_dependencies: Vec::new(),
243 budget: perspt_core::types::BudgetEnvelope::new("pending"),
244 planning_policy: perspt_core::PlanningPolicy::default(),
245 stability_epsilon: 0.1,
246 energy_alpha: 1.0,
247 energy_beta: 0.5,
248 energy_gamma: 2.0,
249 abort_requested: Arc::new(AtomicBool::new(false)),
250 }
251 }
252
253 #[cfg(test)]
255 pub fn new_for_testing(working_dir: PathBuf) -> Self {
256 let context = AgentContext {
257 working_dir: working_dir.clone(),
258 auto_approve: true,
259 ..Default::default()
260 };
261
262 let provider = std::sync::Arc::new(
263 perspt_core::llm_provider::GenAIProvider::new().unwrap_or_else(|e| {
264 log::warn!("Failed to create GenAIProvider: {}, using default", e);
265 perspt_core::llm_provider::GenAIProvider::new().expect("GenAI must initialize")
266 }),
267 );
268
269 let tools = AgentTools::new(working_dir.clone(), false);
270
271 Self {
272 graph: DiGraph::new(),
273 node_indices: HashMap::new(),
274 context,
275 auto_approve: true,
276 lsp_clients: HashMap::new(),
277 agents: vec![
278 Box::new(ArchitectAgent::new(provider.clone(), None)),
279 Box::new(ActuatorAgent::new(provider.clone(), None)),
280 Box::new(VerifierAgent::new(provider.clone(), None)),
281 Box::new(SpeculatorAgent::new(provider.clone(), None)),
282 ],
283 tools,
284 last_written_file: None,
285 file_version: 0,
286 provider,
287 architect_model: ModelTier::Architect.default_model().to_string(),
288 actuator_model: ModelTier::Actuator.default_model().to_string(),
289 verifier_model: ModelTier::Verifier.default_model().to_string(),
290 speculator_model: ModelTier::Speculator.default_model().to_string(),
291 architect_fallback_model: None,
292 actuator_fallback_model: None,
293 verifier_fallback_model: None,
294 speculator_fallback_model: None,
295 event_sender: None,
296 action_receiver: None,
297 ledger: crate::ledger::MerkleLedger::in_memory().expect("Failed to create test ledger"),
298 last_tool_failure: None,
299 last_context_provenance: None,
300 last_formatted_context: String::new(),
301 last_verification_result: None,
302 last_applied_bundle: None,
303 last_repair_footprint: None,
304 blocked_dependencies: Vec::new(),
305 budget: perspt_core::types::BudgetEnvelope::new("test"),
306 planning_policy: perspt_core::PlanningPolicy::default(),
307 stability_epsilon: 0.1,
308 energy_alpha: 1.0,
309 energy_beta: 0.5,
310 energy_gamma: 2.0,
311 abort_requested: Arc::new(AtomicBool::new(false)),
312 }
313 }
314
315 pub fn add_node(&mut self, node: SRBNNode) -> NodeIndex {
317 let node_id = node.node_id.clone();
318 let idx = self.graph.add_node(node);
319 self.node_indices.insert(node_id, idx);
320 idx
321 }
322
323 pub fn connect_tui(
325 &mut self,
326 event_sender: perspt_core::events::channel::EventSender,
327 action_receiver: perspt_core::events::channel::ActionReceiver,
328 ) {
329 self.tools.set_event_sender(event_sender.clone());
330 self.event_sender = Some(event_sender);
331 self.action_receiver = Some(action_receiver);
332 }
333
334 pub fn abort_flag(&self) -> Arc<AtomicBool> {
336 self.abort_requested.clone()
337 }
338
339 fn is_abort_requested(&self) -> bool {
341 self.abort_requested.load(Ordering::Relaxed)
342 }
343
344 fn finalize_session(&mut self, result: &Result<()>) {
346 let status = if self.is_abort_requested() {
347 "ABORTED"
348 } else if result.is_ok() {
349 "COMPLETED"
350 } else {
351 "FAILED"
352 };
353 if let Err(e) = self.ledger.end_session(status) {
354 log::error!("Failed to finalize session as {}: {}", status, e);
355 }
356 }
357
358 pub fn set_budget(
363 &mut self,
364 max_steps: Option<u32>,
365 max_revisions: Option<u32>,
366 max_cost_usd: Option<f64>,
367 ) {
368 self.budget.max_steps = max_steps;
369 self.budget.max_revisions = max_revisions;
370 self.budget.max_cost_usd = max_cost_usd;
371 }
372
373 pub fn rehydrate_session(
388 &mut self,
389 session_id: &str,
390 ) -> Result<crate::ledger::SessionSnapshot> {
391 self.context.session_id = session_id.to_string();
393 self.ledger.current_session = Some(crate::ledger::SessionRecordLegacy {
394 session_id: session_id.to_string(),
395 task: String::new(),
396 started_at: epoch_seconds(),
397 ended_at: None,
398 status: "RESUMING".to_string(),
399 total_nodes: 0,
400 completed_nodes: 0,
401 });
402
403 let snapshot = self.ledger.load_session_snapshot()?;
404
405 if let Ok(Some(row)) = self.ledger.get_budget_envelope() {
408 self.budget = perspt_core::types::BudgetEnvelope {
409 session_id: row.session_id,
410 max_steps: row.max_steps.map(|v| v as u32),
411 steps_used: row.steps_used as u32,
412 max_revisions: row.max_revisions.map(|v| v as u32),
413 revisions_used: row.revisions_used as u32,
414 max_cost_usd: row.max_cost_usd,
415 cost_used_usd: row.cost_used_usd,
416 };
417 log::info!(
418 "Restored budget envelope: steps {}/{:?}, revisions {}/{:?}, cost ${:.2}/{:?}",
419 self.budget.steps_used,
420 self.budget.max_steps,
421 self.budget.revisions_used,
422 self.budget.max_revisions,
423 self.budget.cost_used_usd,
424 self.budget.max_cost_usd,
425 );
426 }
427
428 if snapshot.node_details.is_empty() {
430 anyhow::bail!(
431 "Session {} has no persisted nodes — cannot resume",
432 session_id
433 );
434 }
435
436 let node_ids: std::collections::HashSet<&str> = snapshot
438 .node_details
439 .iter()
440 .map(|d| d.record.node_id.as_str())
441 .collect();
442 let orphaned_edges = snapshot
443 .graph_edges
444 .iter()
445 .filter(|e| {
446 !node_ids.contains(e.parent_node_id.as_str())
447 || !node_ids.contains(e.child_node_id.as_str())
448 })
449 .count();
450 if orphaned_edges > 0 {
451 log::warn!(
452 "Session {} has {} orphaned edge(s) referencing unknown nodes — \
453 edges will be dropped during resume",
454 session_id,
455 orphaned_edges
456 );
457 self.emit_log(format!(
458 "⚠️ Resume: dropping {} orphaned graph edge(s)",
459 orphaned_edges
460 ));
461 }
462
463 let mut node_map: HashMap<String, NodeIndex> = HashMap::new();
465
466 for detail in &snapshot.node_details {
467 let rec = &detail.record;
468
469 let state = parse_node_state(&rec.state);
470 let node_class = rec
471 .node_class
472 .as_deref()
473 .map(parse_node_class)
474 .unwrap_or_default();
475
476 let mut node = SRBNNode::new(
477 rec.node_id.clone(),
478 rec.goal.clone().unwrap_or_default(),
479 ModelTier::Actuator,
480 );
481 node.state = state;
482 node.node_class = node_class;
483 node.owner_plugin = rec.owner_plugin.clone().unwrap_or_default();
484 node.parent_id = rec.parent_id.clone();
485 node.children = rec
486 .children
487 .as_deref()
488 .and_then(|s| serde_json::from_str::<Vec<String>>(s).ok())
489 .unwrap_or_default();
490 node.monitor.attempt_count = rec.attempt_count as usize;
491
492 if let Some(last_energy) = detail.energy_history.last() {
494 node.monitor.energy_history.push(last_energy.v_total);
495 }
496
497 if let Some(seal) = detail.interface_seals.last() {
499 if seal.seal_hash.len() == 32 {
500 let mut hash = [0u8; 32];
501 hash.copy_from_slice(&seal.seal_hash);
502 node.interface_seal_hash = Some(hash);
503 }
504 }
505
506 let idx = self.add_node(node);
507 node_map.insert(rec.node_id.clone(), idx);
508 }
509
510 for edge in &snapshot.graph_edges {
512 if let (Some(&from_idx), Some(&to_idx)) = (
513 node_map.get(&edge.parent_node_id),
514 node_map.get(&edge.child_node_id),
515 ) {
516 self.graph.add_edge(
517 from_idx,
518 to_idx,
519 Dependency {
520 kind: edge.edge_type.clone(),
521 },
522 );
523 }
524 }
525
526 for (child_id, &child_idx) in &node_map {
528 let parents: Vec<NodeIndex> = self
529 .graph
530 .neighbors_directed(child_idx, petgraph::Direction::Incoming)
531 .collect();
532
533 for parent_idx in parents {
534 let parent = &self.graph[parent_idx];
535 if parent.node_class == NodeClass::Interface
536 && parent.interface_seal_hash.is_none()
537 && !parent.state.is_terminal()
538 {
539 self.blocked_dependencies
540 .push(perspt_core::types::BlockedDependency {
541 child_node_id: child_id.clone(),
542 parent_node_id: parent.node_id.clone(),
543 required_seal_paths: Vec::new(),
544 blocked_at: epoch_seconds(),
545 });
546 }
547 }
548 }
549
550 let terminal = snapshot
551 .node_details
552 .iter()
553 .filter(|d| {
554 let s = parse_node_state(&d.record.state);
555 s.is_terminal()
556 })
557 .count();
558 let resumable = snapshot.node_details.len() - terminal;
559
560 log::info!(
561 "Rehydrated session {}: {} nodes ({} terminal, {} resumable), {} edges",
562 session_id,
563 snapshot.node_details.len(),
564 terminal,
565 resumable,
566 snapshot.graph_edges.len()
567 );
568
569 if let Some(ref mut sess) = self.ledger.current_session {
571 sess.total_nodes = snapshot.node_details.len();
572 sess.completed_nodes = terminal;
573 sess.status = "RUNNING".to_string();
574 }
575
576 for detail in &snapshot.node_details {
580 let state = parse_node_state(&detail.record.state);
581 if state.is_terminal() {
582 continue;
583 }
584
585 if let Some(ref prov) = detail.context_provenance {
586 let retriever = ContextRetriever::new(self.context.working_dir.clone());
587 let drift = retriever.validate_provenance_record(prov);
588 if !drift.is_empty() {
589 log::warn!(
590 "Provenance drift for node '{}': {} file(s) missing: {}",
591 detail.record.node_id,
592 drift.len(),
593 drift.join(", ")
594 );
595 self.emit_log(format!(
596 "⚠️ Provenance drift: node '{}' has {} missing file(s)",
597 detail.record.node_id,
598 drift.len()
599 ));
600 self.emit_event(perspt_core::AgentEvent::ProvenanceDrift {
601 node_id: detail.record.node_id.clone(),
602 missing_files: drift,
603 reason: "Files referenced in persisted context no longer exist".to_string(),
604 });
605 }
606 }
607 }
608
609 Ok(snapshot)
610 }
611
612 pub async fn run_resumed(&mut self) -> Result<()> {
619 let result = self.run_resumed_inner().await;
620 self.finalize_session(&result);
621 result
622 }
623
624 async fn run_resumed_inner(&mut self) -> Result<()> {
626 let topo = Topo::new(&self.graph);
627 let indices: Vec<_> = topo.iter(&self.graph).collect();
628 let total_nodes = indices.len();
629 let mut executed = 0;
630 let mut escalated: usize = 0;
631
632 let terminal_count = indices
634 .iter()
635 .filter(|i| self.graph[**i].state.is_terminal())
636 .count();
637 let blocked_count = indices
638 .iter()
639 .filter(|i| !self.graph[**i].state.is_terminal() && self.check_seal_prerequisites(**i))
640 .count();
641 let resumable_count = total_nodes - terminal_count - blocked_count;
642 self.emit_log(format!(
643 "📊 Differential resume: {} total, {} skipped (terminal), {} blocked (seal), {} to execute",
644 total_nodes, terminal_count, blocked_count, resumable_count
645 ));
646
647 for (i, idx) in indices.iter().enumerate() {
648 if self.is_abort_requested() {
650 self.emit_log("⚠️ Session aborted — stopping resumed execution".to_string());
651 break;
652 }
653
654 let node = &self.graph[*idx];
655
656 if node.state.is_terminal() {
658 log::debug!("Skipping terminal node {} ({:?})", node.node_id, node.state);
659 continue;
660 }
661
662 if self.check_seal_prerequisites(*idx) {
664 log::warn!(
665 "Node {} blocked on seal prerequisite — skipping",
666 self.graph[*idx].node_id
667 );
668 continue;
669 }
670
671 let node = &self.graph[*idx];
672 self.emit_log(format!(
673 "📝 [resume {}/{}] {}",
674 i + 1,
675 total_nodes,
676 node.goal
677 ));
678 self.emit_event(perspt_core::AgentEvent::NodeSelected {
679 node_id: node.node_id.clone(),
680 goal: node.goal.clone(),
681 node_class: node.node_class.to_string(),
682 });
683 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
684 node_id: node.node_id.clone(),
685 status: perspt_core::NodeStatus::Running,
686 });
687
688 match self.execute_node(*idx).await {
689 Ok(NodeOutcome::Completed) => {
690 if let Some(node) = self.graph.node_weight(*idx) {
691 self.emit_event(perspt_core::AgentEvent::NodeCompleted {
692 node_id: node.node_id.clone(),
693 goal: node.goal.clone(),
694 });
695 }
696 executed += 1;
697 }
698 Ok(NodeOutcome::Escalated) => {
699 escalated += 1;
700 continue;
701 }
702 Err(e) => {
703 escalated += 1;
704 let node_id = self.graph[*idx].node_id.clone();
705 log::error!("Node {} failed on resume: {}", node_id, e);
706 self.emit_log(format!("❌ Node {} failed: {}", node_id, e));
707 self.graph[*idx].state = NodeState::Escalated;
708 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
709 node_id,
710 status: perspt_core::NodeStatus::Escalated,
711 });
712 continue;
713 }
714 }
715 }
716
717 log::info!(
718 "Resumed execution completed: {} of {} nodes executed",
719 executed,
720 total_nodes
721 );
722 self.emit_event(perspt_core::AgentEvent::Complete {
723 success: escalated == 0,
724 message: format!(
725 "Resumed: {}/{} completed, {} escalated",
726 executed, total_nodes, escalated
727 ),
728 });
729 Ok(())
730 }
731
732 fn emit_event(&self, event: perspt_core::AgentEvent) {
734 if let Some(ref sender) = self.event_sender {
735 let _ = sender.send(event);
736 }
737 }
738
739 fn emit_log(&self, msg: impl Into<String>) {
741 self.emit_event(perspt_core::AgentEvent::Log(msg.into()));
742 }
743
744 async fn await_approval(
748 &mut self,
749 action_type: perspt_core::ActionType,
750 description: String,
751 diff: Option<String>,
752 ) -> ApprovalResult {
753 self.await_approval_for_node(action_type, description, diff, None)
754 .await
755 }
756
757 async fn await_approval_for_node(
759 &mut self,
760 action_type: perspt_core::ActionType,
761 description: String,
762 diff: Option<String>,
763 review_node_id: Option<&str>,
764 ) -> ApprovalResult {
765 if self.auto_approve {
767 if let Some(nid) = review_node_id {
768 self.persist_review_decision(nid, "auto_approved", None);
769 }
770 return ApprovalResult::Approved;
771 }
772
773 if self.action_receiver.is_none() {
775 if let Some(nid) = review_node_id {
776 self.persist_review_decision(nid, "auto_approved", None);
777 }
778 return ApprovalResult::Approved;
779 }
780
781 let request_id = uuid::Uuid::new_v4().to_string();
783
784 self.emit_event(perspt_core::AgentEvent::ApprovalRequest {
786 request_id: request_id.clone(),
787 node_id: review_node_id.unwrap_or("current").to_string(),
788 action_type,
789 description,
790 diff,
791 });
792
793 if let Some(ref mut receiver) = self.action_receiver {
795 while let Some(action) = receiver.recv().await {
796 match action {
797 perspt_core::AgentAction::Approve { request_id: rid } if rid == request_id => {
798 self.emit_log("✓ Approved by user");
799 if let Some(nid) = review_node_id {
800 self.persist_review_decision(nid, "approved", None);
801 }
802 return ApprovalResult::Approved;
803 }
804 perspt_core::AgentAction::ApproveWithEdit {
805 request_id: rid,
806 edited_value,
807 } if rid == request_id => {
808 self.emit_log(format!("✓ Approved with edit: {}", edited_value));
809 if let Some(nid) = review_node_id {
810 self.persist_review_decision(nid, "approved_with_edit", None);
811 }
812 return ApprovalResult::ApprovedWithEdit(edited_value);
813 }
814 perspt_core::AgentAction::Reject {
815 request_id: rid,
816 reason,
817 } if rid == request_id => {
818 let msg = reason.unwrap_or_else(|| "User rejected".to_string());
819 self.emit_log(format!("✗ Rejected: {}", msg));
820 if let Some(nid) = review_node_id {
821 self.persist_review_decision(nid, "rejected", Some(&msg));
822 }
823 return ApprovalResult::Rejected;
824 }
825 perspt_core::AgentAction::RequestCorrection {
826 request_id: rid,
827 feedback,
828 } if rid == request_id => {
829 self.emit_log(format!("🔄 Correction requested: {}", feedback));
830 if let Some(nid) = review_node_id {
831 self.persist_review_decision(
832 nid,
833 "correction_requested",
834 Some(&feedback),
835 );
836 }
837 return ApprovalResult::Rejected;
838 }
839 perspt_core::AgentAction::Abort => {
840 self.emit_log("⚠️ Session aborted by user");
841 self.abort_requested.store(true, Ordering::Relaxed);
842 if let Some(nid) = review_node_id {
843 self.persist_review_decision(nid, "aborted", None);
844 }
845 return ApprovalResult::Rejected;
846 }
847 _ => {
848 continue;
850 }
851 }
852 }
853 }
854
855 ApprovalResult::Rejected }
857
858 fn persist_review_decision(&self, node_id: &str, outcome: &str, note: Option<&str>) {
860 let degraded = self.last_verification_result.as_ref().map(|vr| vr.degraded);
861 if let Err(e) = self
862 .ledger
863 .record_review_outcome(node_id, outcome, note, None, degraded, None)
864 {
865 log::warn!("Failed to persist review decision for {}: {}", node_id, e);
866 }
867 }
868
869 pub fn add_dependency(&mut self, from_id: &str, to_id: &str, kind: &str) -> Result<()> {
871 let from_idx = self
872 .node_indices
873 .get(from_id)
874 .context(format!("Node not found: {}", from_id))?;
875 let to_idx = self
876 .node_indices
877 .get(to_id)
878 .context(format!("Node not found: {}", to_id))?;
879
880 self.graph.add_edge(
881 *from_idx,
882 *to_idx,
883 Dependency {
884 kind: kind.to_string(),
885 },
886 );
887 Ok(())
888 }
889
890 pub async fn run(&mut self, task: String) -> Result<()> {
892 log::info!("Starting SRBN execution for task: {}", task);
893 self.emit_log(format!("🚀 Starting task: {}", task));
894
895 let session_id = uuid::Uuid::new_v4().to_string();
897 self.context.session_id = session_id.clone();
898 self.ledger.start_session(
899 &session_id,
900 &task,
901 &self.context.working_dir.to_string_lossy(),
902 )?;
903
904 let result = self.run_orchestration(task).await;
906 self.finalize_session(&result);
907 result
908 }
909
910 async fn run_orchestration(&mut self, task: String) -> Result<()> {
912 if self.context.log_llm {
913 self.emit_log("📝 LLM request logging enabled".to_string());
914 }
915
916 let execution_mode = self.detect_execution_mode(&task);
918 self.context.execution_mode = execution_mode;
919 self.emit_log(format!("🎯 Execution mode: {}", execution_mode));
920
921 if execution_mode == perspt_core::types::ExecutionMode::Solo {
922 log::info!("Using Solo Mode for explicit single-file task");
924 self.emit_log("⚡ Solo Mode: Single-file execution".to_string());
925 return self.run_solo_mode(task).await;
926 }
927
928 let workspace_state = self.classify_workspace(&task);
930 self.context.workspace_state = workspace_state.clone();
931 self.emit_log(format!("📋 Workspace: {}", workspace_state));
932
933 if let WorkspaceState::ExistingProject { ref plugins } = workspace_state {
936 self.context.active_plugins = plugins.clone();
937 self.emit_log(format!("🔌 Detected plugins: {}", plugins.join(", ")));
938 self.emit_plugin_readiness();
939 }
940
941 self.step_init_project(&task).await?;
943
944 if !matches!(workspace_state, WorkspaceState::ExistingProject { .. }) {
947 self.redetect_plugins_after_init();
948 }
949
950 self.check_verifier_readiness_gate();
954
955 {
958 let plugin_refs: Vec<String> = self.context.active_plugins.clone();
959 let refs: Vec<&str> = plugin_refs.iter().map(|s| s.as_str()).collect();
960 if !refs.is_empty() {
961 self.emit_log("🔍 Starting language servers...".to_string());
962 if let Err(e) = self.start_lsp_for_plugins(&refs).await {
963 log::warn!("Failed to start LSP: {}", e);
964 self.emit_log("⚠️ Continuing without LSP".to_string());
965 } else {
966 self.emit_log("✅ Language servers ready".to_string());
967 }
968 }
969 }
970
971 if self.planning_policy == perspt_core::PlanningPolicy::default() {
975 self.planning_policy = match &self.context.workspace_state {
976 WorkspaceState::Greenfield { .. } => perspt_core::PlanningPolicy::GreenfieldBuild,
977 WorkspaceState::ExistingProject { .. } => {
978 perspt_core::PlanningPolicy::FeatureIncrement
979 }
980 WorkspaceState::Ambiguous => perspt_core::PlanningPolicy::FeatureIncrement,
981 };
982 }
983
984 if self.ledger.get_feature_charter().ok().flatten().is_none() {
988 let mut charter = perspt_core::FeatureCharter::new(&self.context.session_id, &task);
989 match self.planning_policy {
990 perspt_core::PlanningPolicy::LocalEdit => {
991 charter.max_modules = Some(1);
992 charter.max_files = Some(5);
993 charter.max_revisions = Some(3);
994 }
995 perspt_core::PlanningPolicy::FeatureIncrement => {
996 charter.max_modules = Some(10);
997 charter.max_files = Some(30);
998 charter.max_revisions = Some(5);
999 }
1000 perspt_core::PlanningPolicy::LargeFeature
1001 | perspt_core::PlanningPolicy::GreenfieldBuild
1002 | perspt_core::PlanningPolicy::ArchitecturalRevision => {
1003 charter.max_modules = Some(25);
1004 charter.max_files = Some(80);
1005 charter.max_revisions = Some(10);
1006 }
1007 }
1008 if let Some(ref lang) = self.context.active_plugins.first() {
1009 charter.language_constraint = Some(lang.to_string());
1010 }
1011 if let Err(e) = self.ledger.record_feature_charter(&charter) {
1012 log::warn!("Failed to persist default FeatureCharter: {}", e);
1013 } else {
1014 log::info!(
1015 "Registered default FeatureCharter (max_modules={:?}, max_files={:?})",
1016 charter.max_modules,
1017 charter.max_files
1018 );
1019 }
1020 }
1021
1022 if self.planning_policy.needs_architect() {
1025 self.step_sheafify(task).await?;
1026 } else {
1027 self.emit_log("📐 LocalEdit policy — skipping architect, single-node plan".to_string());
1028 self.create_deterministic_fallback_graph(&task)?;
1029 }
1030
1031 self.emit_log(format!("📐 Planning policy: {:?}", self.planning_policy));
1033
1034 let node_count = self.graph.node_count();
1036 self.emit_event(perspt_core::AgentEvent::PlanReady {
1037 nodes: node_count,
1038 plugins: self.context.active_plugins.clone(),
1039 execution_mode: execution_mode.to_string(),
1040 });
1041
1042 for node_id in self.node_indices.keys() {
1044 if let Some(idx) = self.node_indices.get(node_id) {
1045 if let Some(node) = self.graph.node_weight(*idx) {
1046 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1047 node_id: node.node_id.clone(),
1048 status: perspt_core::NodeStatus::Pending,
1049 });
1050 }
1051 }
1052 }
1053
1054 let topo = Topo::new(&self.graph);
1056 let indices: Vec<_> = topo.iter(&self.graph).collect();
1057 let total_nodes = indices.len();
1058 let mut completed_count: usize = 0;
1059 let mut escalated_count: usize = 0;
1060
1061 for (i, idx) in indices.iter().enumerate() {
1062 if self.is_abort_requested() {
1064 self.emit_log("⚠️ Session aborted — stopping execution".to_string());
1065 break;
1066 }
1067
1068 if self.budget.any_exhausted() {
1070 let node_id = self.graph[*idx].node_id.clone();
1071 self.emit_log(format!(
1072 "⛔ Budget exhausted — skipping node '{}' and remaining nodes",
1073 node_id
1074 ));
1075 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1076 node_id,
1077 status: perspt_core::NodeStatus::Escalated,
1078 });
1079 break;
1080 }
1081
1082 if self.check_seal_prerequisites(*idx) {
1087 log::warn!(
1088 "Node {} blocked on seal prerequisite — skipping in this iteration",
1089 self.graph[*idx].node_id
1090 );
1091 continue;
1092 }
1093
1094 if let Some(node) = self.graph.node_weight(*idx) {
1096 self.emit_log(format!("📝 [{}/{}] {}", i + 1, total_nodes, node.goal));
1097 self.emit_event(perspt_core::AgentEvent::NodeSelected {
1098 node_id: node.node_id.clone(),
1099 goal: node.goal.clone(),
1100 node_class: node.node_class.to_string(),
1101 });
1102 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1103 node_id: node.node_id.clone(),
1104 status: perspt_core::NodeStatus::Running,
1105 });
1106 }
1107
1108 match self.execute_node(*idx).await {
1109 Ok(NodeOutcome::Completed) => {
1110 completed_count += 1;
1111
1112 self.budget.record_step();
1114
1115 self.emit_event(perspt_core::AgentEvent::BudgetUpdated {
1117 steps_used: self.budget.steps_used,
1118 max_steps: self.budget.max_steps,
1119 cost_used_usd: self.budget.cost_used_usd,
1120 max_cost_usd: self.budget.max_cost_usd,
1121 revisions_used: self.budget.revisions_used,
1122 max_revisions: self.budget.max_revisions,
1123 });
1124
1125 if let Err(e) = self.ledger.upsert_budget_envelope(&self.budget) {
1127 log::warn!("Failed to persist budget envelope: {}", e);
1128 }
1129
1130 if let Some(node) = self.graph.node_weight(*idx) {
1132 self.emit_event(perspt_core::AgentEvent::NodeCompleted {
1133 node_id: node.node_id.clone(),
1134 goal: node.goal.clone(),
1135 });
1136 }
1137 }
1138 Ok(NodeOutcome::Escalated) => {
1139 escalated_count += 1;
1140 self.budget.record_step();
1141
1142 if let Some(node) = self.graph.node_weight(*idx) {
1144 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1145 node_id: node.node_id.clone(),
1146 status: perspt_core::NodeStatus::Escalated,
1147 });
1148 }
1149 continue;
1150 }
1151 Err(e) => {
1152 escalated_count += 1;
1153 let node_id = self.graph[*idx].node_id.clone();
1154 eprintln!("[SRBN-DIAG] Node {} failed: {:#}", node_id, e);
1155 log::error!("Node {} failed: {}", node_id, e);
1156 self.emit_log(format!("❌ Node {} failed: {}", node_id, e));
1157
1158 if let Some(bid) = self.graph[*idx].provisional_branch_id.clone() {
1163 self.flush_provisional_branch(&bid, &node_id);
1164 }
1165 self.flush_descendant_branches(*idx);
1166
1167 self.graph[*idx].state = NodeState::Escalated;
1168 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1169 node_id: node_id.clone(),
1170 status: perspt_core::NodeStatus::Escalated,
1171 });
1172 continue;
1174 }
1175 }
1176 }
1177
1178 log::info!("SRBN execution completed");
1179
1180 if let Err(e) = crate::tools::cleanup_session_sandboxes(
1182 &self.context.working_dir,
1183 &self.context.session_id,
1184 ) {
1185 log::warn!("Failed to clean up session sandboxes: {}", e);
1186 }
1187
1188 let outcome = if escalated_count == 0 {
1190 perspt_core::SessionOutcome::Success
1191 } else if completed_count > 0 {
1192 perspt_core::SessionOutcome::PartialSuccess
1193 } else {
1194 perspt_core::SessionOutcome::Failed
1195 };
1196 self.emit_event(perspt_core::AgentEvent::Complete {
1197 success: outcome == perspt_core::SessionOutcome::Success,
1198 message: format!(
1199 "{}/{} nodes completed, {} escalated",
1200 completed_count, total_nodes, escalated_count
1201 ),
1202 });
1203 Ok(())
1204 }
1205
1206 async fn execute_node(&mut self, idx: NodeIndex) -> Result<NodeOutcome> {
1208 let node = &self.graph[idx];
1209 log::info!("Executing node: {} ({})", node.node_id, node.goal);
1210
1211 let branch_id = self.maybe_create_provisional_branch(idx);
1213
1214 self.graph[idx].state = NodeState::Coding;
1216 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1217 node_id: self.graph[idx].node_id.clone(),
1218 status: perspt_core::NodeStatus::Coding,
1219 });
1220
1221 self.step_speculate(idx).await?;
1223
1224 let energy = self.step_verify(idx).await?;
1226
1227 if !self.step_converge(idx, energy).await? {
1229 let category = self.classify_non_convergence(idx);
1231 let action = self.choose_repair_action(idx, &category);
1232
1233 let node = &self.graph[idx];
1235 let report = EscalationReport {
1236 node_id: node.node_id.clone(),
1237 session_id: self.context.session_id.clone(),
1238 category,
1239 action: action.clone(),
1240 energy_snapshot: EnergyComponents {
1241 v_syn: node.monitor.current_energy(),
1242 ..Default::default()
1243 },
1244 stage_outcomes: self
1245 .last_verification_result
1246 .as_ref()
1247 .map(|vr| vr.stage_outcomes.clone())
1248 .unwrap_or_default(),
1249 evidence: self.build_escalation_evidence(idx),
1250 affected_node_ids: self.affected_dependents(idx),
1251 timestamp: epoch_seconds(),
1252 };
1253
1254 if let Err(e) = self.ledger.record_escalation_report(&report) {
1255 log::warn!("Failed to persist escalation report: {}", e);
1256 }
1257
1258 if let Some(bundle) = self.last_applied_bundle.take() {
1260 if let Err(e) = self
1261 .ledger
1262 .record_artifact_bundle(&self.graph[idx].node_id, &bundle)
1263 {
1264 log::warn!(
1265 "Failed to persist artifact bundle on escalation for {}: {}",
1266 self.graph[idx].node_id,
1267 e
1268 );
1269 }
1270 }
1271
1272 self.emit_event(perspt_core::AgentEvent::EscalationClassified {
1273 node_id: report.node_id.clone(),
1274 category: report.category.to_string(),
1275 action: report.action.to_string(),
1276 });
1277
1278 let node_id_for_flush = self.graph[idx].node_id.clone();
1280 if let Some(ref bid) = branch_id {
1281 self.flush_provisional_branch(bid, &node_id_for_flush);
1282 }
1283 self.flush_descendant_branches(idx);
1284
1285 let applied = self.apply_repair_action(idx, &action).await;
1287
1288 if !applied {
1289 self.graph[idx].state = NodeState::Escalated;
1290 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1291 node_id: self.graph[idx].node_id.clone(),
1292 status: perspt_core::NodeStatus::Escalated,
1293 });
1294 log::warn!(
1295 "Node {} escalated to user: {} → {}",
1296 self.graph[idx].node_id,
1297 category,
1298 action
1299 );
1300 }
1301
1302 return Ok(NodeOutcome::Escalated);
1303 }
1304
1305 self.step_sheaf_validate(idx).await?;
1307
1308 self.step_commit(idx).await?;
1310
1311 if let Some(ref bid) = branch_id {
1313 self.merge_provisional_branch(bid, idx);
1314 }
1315
1316 Ok(NodeOutcome::Completed)
1317 }
1318
1319 async fn step_speculate(&mut self, idx: NodeIndex) -> Result<()> {
1321 log::info!("Step 3: Speculation - Generating implementation");
1322
1323 let retriever = ContextRetriever::new(self.effective_working_dir(idx))
1327 .with_max_file_bytes(8 * 1024)
1328 .with_max_context_bytes(100 * 1024); let node = &self.graph[idx];
1331 let mut restriction_map =
1332 retriever.build_restriction_map(node, &self.context.ownership_manifest);
1333
1334 self.inject_sealed_interfaces(idx, &mut restriction_map);
1339
1340 let node = &self.graph[idx];
1341 let context_package = retriever.assemble_context_package(node, &restriction_map);
1342 let formatted_context = retriever.format_context_package(&context_package);
1343
1344 let node = &self.graph[idx];
1347 let missing_owned: Vec<String> = restriction_map
1348 .owned_files
1349 .iter()
1350 .filter(|f| {
1351 !context_package.included_files.contains_key(*f)
1353 && !node
1354 .output_targets
1355 .iter()
1356 .any(|ot| ot.to_string_lossy() == **f)
1357 })
1358 .cloned()
1359 .collect();
1360
1361 if context_package.budget_exceeded || !missing_owned.is_empty() {
1362 let reason = if context_package.budget_exceeded && !missing_owned.is_empty() {
1363 format!(
1364 "Budget exceeded and {} owned file(s) missing",
1365 missing_owned.len()
1366 )
1367 } else if context_package.budget_exceeded {
1368 "Context budget exceeded; some files replaced with structural digests".to_string()
1369 } else {
1370 format!(
1371 "{} owned file(s) could not be read: {}",
1372 missing_owned.len(),
1373 missing_owned.join(", ")
1374 )
1375 };
1376
1377 log::warn!("Context degraded for node '{}': {}", node.node_id, reason);
1378 self.emit_log(format!("⚠️ Context degraded: {}", reason));
1379 self.emit_event(perspt_core::AgentEvent::ContextDegraded {
1380 node_id: node.node_id.clone(),
1381 budget_exceeded: context_package.budget_exceeded,
1382 missing_owned_files: missing_owned.clone(),
1383 included_file_count: context_package.included_files.len(),
1384 total_bytes: context_package.total_bytes,
1385 reason: reason.clone(),
1386 });
1387
1388 if !missing_owned.is_empty() {
1391 self.emit_event(perspt_core::AgentEvent::ContextBlocked {
1392 node_id: node.node_id.clone(),
1393 missing_owned_files: missing_owned,
1394 reason: reason.clone(),
1395 });
1396 self.graph[idx].state = NodeState::Escalated;
1397 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1398 node_id: self.graph[idx].node_id.clone(),
1399 status: perspt_core::NodeStatus::Escalated,
1400 });
1401 let err_msg = format!(
1402 "Context blocked for node '{}': {}. Node escalated.",
1403 self.graph[idx].node_id, reason
1404 );
1405 eprintln!("[SRBN-DIAG] {}", err_msg);
1406 return Err(anyhow::anyhow!(err_msg));
1407 }
1408 }
1409
1410 {
1413 let node = &self.graph[idx];
1414 let prose_only_deps = self.check_structural_dependencies(node, &restriction_map);
1415 if !prose_only_deps.is_empty() {
1416 for (dep_node_id, dep_reason) in &prose_only_deps {
1417 self.emit_event(perspt_core::AgentEvent::StructuralDependencyMissing {
1418 node_id: node.node_id.clone(),
1419 dependency_node_id: dep_node_id.clone(),
1420 reason: dep_reason.clone(),
1421 });
1422 }
1423 let dep_names: Vec<&str> =
1424 prose_only_deps.iter().map(|(id, _)| id.as_str()).collect();
1425 let block_reason = format!(
1426 "Required structural dependencies lack machine-verifiable digests (only prose summaries): [{}]",
1427 dep_names.join(", ")
1428 );
1429 eprintln!(
1430 "[SRBN-DIAG] Structural dependency check failed for '{}': {}",
1431 self.graph[idx].node_id, block_reason
1432 );
1433 self.emit_log(format!("🚫 {}", block_reason));
1434 self.graph[idx].state = NodeState::Escalated;
1435 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1436 node_id: self.graph[idx].node_id.clone(),
1437 status: perspt_core::NodeStatus::Escalated,
1438 });
1439 return Err(anyhow::anyhow!(
1440 "Structural dependency check failed for node '{}': {}",
1441 self.graph[idx].node_id,
1442 block_reason
1443 ));
1444 }
1445 }
1446
1447 self.last_context_provenance = Some(context_package.provenance());
1449 self.last_formatted_context = formatted_context.clone();
1451
1452 let speculator_hints = if self.planning_policy.needs_speculator() {
1457 let node_id = self.graph[idx].node_id.clone();
1458 let node_goal = self.graph[idx].goal.clone();
1459 let child_goals: Vec<String> = self
1460 .graph
1461 .edges(idx)
1462 .filter_map(|edge| {
1463 let child = &self.graph[edge.target()];
1464 if child.state == NodeState::TaskQueued {
1465 Some(format!("- {}: {}", child.node_id, child.goal))
1466 } else {
1467 None
1468 }
1469 })
1470 .collect();
1471
1472 if !child_goals.is_empty() {
1473 let speculator_prompt = crate::prompts::render_speculator_lookahead(
1474 &node_id,
1475 &node_goal,
1476 &child_goals.join("\n"),
1477 );
1478
1479 log::debug!(
1480 "Speculator lookahead for node {} using model {}",
1481 node_id,
1482 self.speculator_model
1483 );
1484 self.call_llm_with_logging(
1485 &self.speculator_model.clone(),
1486 &speculator_prompt,
1487 Some(&node_id),
1488 )
1489 .await
1490 .unwrap_or_else(|e| {
1491 log::warn!(
1492 "Speculator lookahead failed ({}), proceeding without hints",
1493 e
1494 );
1495 String::new()
1496 })
1497 } else {
1498 String::new()
1499 }
1500 } else {
1501 String::new()
1502 };
1503
1504 let actuator = &self.agents[1];
1505 let node = &self.graph[idx];
1506 let node_id = node.node_id.clone();
1507
1508 let base_prompt = actuator.build_prompt(node, &self.context);
1510 let mut prompt = if formatted_context.is_empty() {
1511 base_prompt
1512 } else {
1513 format!(
1514 "{}\n\n## Node Context (PSP-5 Restriction Map)\n\n{}",
1515 base_prompt, formatted_context
1516 )
1517 };
1518
1519 if !speculator_hints.is_empty() {
1520 prompt = format!(
1521 "{}\n\n## Speculator Lookahead Hints\n\n{}",
1522 prompt, speculator_hints
1523 );
1524 }
1525
1526 let wd = self.effective_working_dir(idx);
1529 if let Ok(tree) = crate::tools::list_sandbox_files(&wd) {
1530 if !tree.is_empty() {
1531 prompt = format!(
1532 "{}\n\n## Current Project Tree\n\n```\n{}\n```",
1533 prompt,
1534 tree.join("\n")
1535 );
1536 }
1537 }
1538
1539 let model = actuator.model().to_string();
1540
1541 let response = self
1542 .call_llm_with_logging(&model, &prompt, Some(&node_id))
1543 .await?;
1544
1545 let message = crate::types::AgentMessage::new(crate::types::ModelTier::Actuator, response);
1546 let content = &message.content;
1547
1548 if let Some(command) = self.extract_command_from_response(content) {
1550 log::info!("Extracted command: {}", command);
1551 self.emit_log(format!("🔧 Command proposed: {}", command));
1552
1553 let node_id = self.graph[idx].node_id.clone();
1555 let approval_result = self
1556 .await_approval_for_node(
1557 perspt_core::ActionType::Command {
1558 command: command.clone(),
1559 },
1560 format!("Execute shell command: {}", command),
1561 None,
1562 Some(&node_id),
1563 )
1564 .await;
1565
1566 if !matches!(
1567 approval_result,
1568 ApprovalResult::Approved | ApprovalResult::ApprovedWithEdit(_)
1569 ) {
1570 self.emit_log("⏭️ Command skipped (not approved)");
1571 return Ok(());
1572 }
1573
1574 let mut args = HashMap::new();
1576 args.insert("command".to_string(), command.clone());
1577
1578 let call = ToolCall {
1579 name: "run_command".to_string(),
1580 arguments: args,
1581 };
1582
1583 let result = self.tools.execute(&call).await;
1584 if result.success {
1585 log::info!("✓ Command succeeded: {}", command);
1586 self.emit_log(format!("✅ Command succeeded: {}", command));
1587 self.emit_log(result.output);
1588 } else {
1589 log::warn!("Command failed: {:?}", result.error);
1590 self.emit_log(format!("❌ Command failed: {:?}", result.error));
1591 }
1592 }
1593 else if let Some(bundle) = self.parse_artifact_bundle(content) {
1595 let affected_files: Vec<String> = bundle
1596 .affected_paths()
1597 .into_iter()
1598 .map(ToString::to_string)
1599 .collect();
1600 log::info!(
1601 "Parsed artifact bundle for node {}: {} artifacts, {} commands",
1602 node_id,
1603 bundle.artifacts.len(),
1604 bundle.commands.len()
1605 );
1606 self.emit_log(format!(
1607 "📝 Bundle proposed: {} artifact(s) across {} file(s)",
1608 bundle.artifacts.len(),
1609 affected_files.len()
1610 ));
1611
1612 let approval_result = self
1613 .await_approval_for_node(
1614 perspt_core::ActionType::BundleWrite {
1615 node_id: node_id.clone(),
1616 files: affected_files.clone(),
1617 },
1618 format!("Apply bundle touching: {}", affected_files.join(", ")),
1619 serde_json::to_string_pretty(&bundle).ok(),
1620 Some(&node_id),
1621 )
1622 .await;
1623
1624 if !matches!(
1625 approval_result,
1626 ApprovalResult::Approved | ApprovalResult::ApprovedWithEdit(_)
1627 ) {
1628 self.emit_log("⏭️ Bundle application skipped (not approved)");
1629 return Ok(());
1630 }
1631
1632 let node_class = self.graph[idx].node_class;
1633 match self
1634 .apply_bundle_transactionally(&bundle, &node_id, node_class)
1635 .await
1636 {
1637 Ok(()) => {
1638 self.last_tool_failure = None;
1639 self.last_applied_bundle = Some(bundle.clone());
1640 }
1641 Err(e) if e.to_string().contains("targeted undeclared paths") => {
1642 log::warn!(
1646 "Bundle for '{}' targeted wrong files, retrying with retarget prompt",
1647 node_id
1648 );
1649 self.emit_log(format!(
1650 "🔄 Bundle for '{}' targeted wrong files — retrying...",
1651 node_id
1652 ));
1653
1654 let expected: Vec<String> = self.graph[idx]
1655 .output_targets
1656 .iter()
1657 .map(|p| p.to_string_lossy().to_string())
1658 .collect();
1659 let dropped: Vec<String> = bundle
1660 .artifacts
1661 .iter()
1662 .map(|a| a.path().to_string())
1663 .collect();
1664 let retry_prompt = crate::prompts::render_bundle_retarget(
1665 &expected.join(", "),
1666 &dropped.join(", "),
1667 &prompt,
1668 );
1669
1670 let retry_response = self
1671 .call_llm_with_logging(&model, &retry_prompt, Some(&node_id))
1672 .await?;
1673
1674 if let Some(retry_bundle) = self.parse_artifact_bundle(&retry_response) {
1675 self.apply_bundle_transactionally(&retry_bundle, &node_id, node_class)
1676 .await?;
1677 self.last_tool_failure = None;
1678 self.last_applied_bundle = Some(retry_bundle);
1679 } else {
1680 return Err(anyhow::anyhow!(
1681 "Retry for '{}' did not produce a valid bundle",
1682 node_id
1683 ));
1684 }
1685 }
1686 Err(e) => return Err(e),
1687 }
1688
1689 let effective_commands = self
1692 .last_applied_bundle
1693 .as_ref()
1694 .map(|b| b.commands.clone())
1695 .unwrap_or_default();
1696 if !effective_commands.is_empty() {
1697 self.emit_log(format!(
1698 "🔧 Executing {} bundle command(s)...",
1699 effective_commands.len()
1700 ));
1701 let work_dir = self.effective_working_dir(idx);
1702 let is_python = self.graph[idx].owner_plugin == "python";
1703 for raw_command in &effective_commands {
1704 let command = if is_python {
1706 Self::normalize_command_to_uv(raw_command)
1707 } else {
1708 raw_command.clone()
1709 };
1710
1711 let cmd_approval = self
1713 .await_approval_for_node(
1714 perspt_core::ActionType::Command {
1715 command: command.clone(),
1716 },
1717 format!("Execute bundle command: {}", command),
1718 None,
1719 Some(&node_id),
1720 )
1721 .await;
1722
1723 if !matches!(
1724 cmd_approval,
1725 ApprovalResult::Approved | ApprovalResult::ApprovedWithEdit(_)
1726 ) {
1727 self.emit_log(format!(
1728 "⏭️ Bundle command skipped (not approved): {}",
1729 command
1730 ));
1731 continue;
1732 }
1733
1734 let mut args = HashMap::new();
1735 args.insert("command".to_string(), command.clone());
1736 args.insert(
1737 "working_dir".to_string(),
1738 work_dir.to_string_lossy().to_string(),
1739 );
1740
1741 let call = ToolCall {
1742 name: "run_command".to_string(),
1743 arguments: args,
1744 };
1745
1746 let result = self.tools.execute(&call).await;
1747 if result.success {
1748 log::info!("✓ Bundle command succeeded: {}", command);
1749 self.emit_log(format!("✅ {}", command));
1750 if !result.output.is_empty() {
1751 let truncated: String = result.output.chars().take(500).collect();
1753 self.emit_log(truncated);
1754 }
1755 } else {
1756 let err_msg = result.error.unwrap_or_else(|| result.output.clone());
1757 log::warn!("Bundle command failed: {} — {}", command, err_msg);
1758 self.emit_log(format!("❌ Command failed: {} — {}", command, err_msg));
1759 self.last_tool_failure =
1761 Some(format!("Bundle command '{}' failed: {}", command, err_msg));
1762 }
1763 }
1764
1765 if is_python {
1767 log::info!("Running uv sync --dev after bundle commands...");
1768 let sync_result = tokio::process::Command::new("uv")
1769 .args(["sync", "--dev"])
1770 .current_dir(&work_dir)
1771 .stdout(std::process::Stdio::piped())
1772 .stderr(std::process::Stdio::piped())
1773 .output()
1774 .await;
1775 match sync_result {
1776 Ok(output) if output.status.success() => {
1777 self.emit_log("🐍 uv sync --dev completed".to_string());
1778 }
1779 Ok(output) => {
1780 let stderr = String::from_utf8_lossy(&output.stderr);
1781 log::warn!("uv sync --dev failed: {}", stderr);
1782 }
1783 Err(e) => {
1784 log::warn!("Failed to run uv sync --dev: {}", e);
1785 }
1786 }
1787 }
1788 }
1789 } else {
1790 log::debug!(
1791 "No code block or command found in response, response length: {}",
1792 content.len()
1793 );
1794 self.emit_log("ℹ️ No file changes detected in response".to_string());
1795 }
1796
1797 self.context.history.push(message);
1798 Ok(())
1799 }
1800
1801 fn extract_command_from_response(&self, content: &str) -> Option<String> {
1804 for line in content.lines() {
1805 let trimmed = line.trim();
1806 if trimmed.starts_with("[COMMAND]") {
1807 return Some(trimmed.trim_start_matches("[COMMAND]").trim().to_string());
1808 }
1809 if trimmed.starts_with("$ ") || trimmed.starts_with("➜ ") {
1811 return Some(
1812 trimmed
1813 .trim_start_matches("$ ")
1814 .trim_start_matches("➜ ")
1815 .trim()
1816 .to_string(),
1817 );
1818 }
1819 }
1820 None
1821 }
1822
1823 fn extract_code_from_response(&self, content: &str) -> Option<(String, String, bool)> {
1828 self.extract_all_code_blocks_from_response(content)
1830 .into_iter()
1831 .next()
1832 }
1833
1834 fn extract_all_code_blocks_from_response(&self, content: &str) -> Vec<(String, String, bool)> {
1840 let lines: Vec<&str> = content.lines().collect();
1841 let mut results: Vec<(String, String, bool)> = Vec::new();
1842 let mut file_path: Option<String> = None;
1843 let mut is_diff_marker = false;
1844 let mut in_code_block = false;
1845 let mut code_lines: Vec<&str> = Vec::new();
1846 let mut code_lang = String::new();
1847
1848 for line in &lines {
1849 if line.starts_with("File:") || line.starts_with("**File:") || line.starts_with("file:")
1851 {
1852 let path = line
1853 .trim_start_matches("File:")
1854 .trim_start_matches("**File:")
1855 .trim_start_matches("file:")
1856 .trim_start_matches("**")
1857 .trim_end_matches("**")
1858 .trim();
1859 if !path.is_empty() {
1860 file_path = Some(path.to_string());
1861 is_diff_marker = false;
1862 }
1863 }
1864
1865 if line.starts_with("Diff:") || line.starts_with("**Diff:") || line.starts_with("diff:")
1867 {
1868 let path = line
1869 .trim_start_matches("Diff:")
1870 .trim_start_matches("**Diff:")
1871 .trim_start_matches("diff:")
1872 .trim_start_matches("**")
1873 .trim_end_matches("**")
1874 .trim();
1875 if !path.is_empty() {
1876 file_path = Some(path.to_string());
1877 is_diff_marker = true;
1878 }
1879 }
1880
1881 if line.starts_with("```") && !in_code_block {
1883 in_code_block = true;
1884 code_lang = line.trim_start_matches('`').to_string();
1885 continue;
1886 }
1887
1888 if line.starts_with("```") && in_code_block {
1889 in_code_block = false;
1890 if !code_lines.is_empty() {
1891 let code = code_lines.join("\n");
1892 let filename = match file_path.take() {
1893 Some(p) => p,
1894 None => match code_lang.as_str() {
1895 "python" | "py" => "main.py".to_string(),
1896 "rust" | "rs" => "main.rs".to_string(),
1897 "javascript" | "js" => "index.js".to_string(),
1898 "typescript" | "ts" => "index.ts".to_string(),
1899 "toml" => "Cargo.toml".to_string(),
1900 "json" => {
1901 let trimmed = code.trim();
1906 if trimmed.starts_with('[')
1907 || trimmed.contains("\"artifacts\"")
1908 || trimmed.contains("\"action\"")
1909 {
1910 log::debug!("Skipping unnamed JSON block that looks like a manifest/bundle");
1911 code_lines.clear();
1912 code_lang.clear();
1913 is_diff_marker = false;
1914 continue;
1915 }
1916 "config.json".to_string()
1917 }
1918 "yaml" | "yml" => "config.yaml".to_string(),
1919 other => {
1920 log::warn!(
1921 "Skipping unnamed code block with unrecognized language tag '{}'",
1922 other
1923 );
1924 code_lines.clear();
1925 code_lang.clear();
1926 is_diff_marker = false;
1927 continue;
1928 }
1929 },
1930 };
1931 let is_diff = is_diff_marker || code_lang == "diff" || code.starts_with("---");
1932 results.push((filename, code, is_diff));
1933 }
1934 code_lines.clear();
1935 code_lang.clear();
1936 is_diff_marker = false;
1937 continue;
1938 }
1939
1940 if in_code_block {
1941 code_lines.push(line);
1942 }
1943 }
1944
1945 results
1946 }
1947
1948 pub fn session_id(&self) -> &str {
1954 &self.context.session_id
1955 }
1956
1957 pub fn node_count(&self) -> usize {
1959 self.graph.node_count()
1960 }
1961
1962 pub async fn start_lsp_for_plugins(&mut self, plugin_names: &[&str]) -> Result<()> {
1967 let registry = perspt_core::plugin::PluginRegistry::new();
1968
1969 for &name in plugin_names {
1970 if self.lsp_clients.contains_key(name) {
1971 log::debug!("LSP client already running for {}", name);
1972 continue;
1973 }
1974
1975 let plugin = match registry.get(name) {
1976 Some(p) => p,
1977 None => {
1978 log::warn!("No plugin found for '{}', skipping LSP startup", name);
1979 continue;
1980 }
1981 };
1982
1983 let profile = plugin.verifier_profile();
1984 let lsp_config = match profile.lsp.effective_config() {
1985 Some(cfg) => cfg.clone(),
1986 None => {
1987 log::warn!(
1988 "No available LSP for {} (primary and fallback unavailable)",
1989 name
1990 );
1991 continue;
1992 }
1993 };
1994
1995 log::info!(
1996 "Starting LSP for {}: {} {:?}",
1997 name,
1998 lsp_config.server_binary,
1999 lsp_config.args
2000 );
2001
2002 let mut client = LspClient::from_config(&lsp_config);
2003 match client
2004 .start_with_config(&lsp_config, &self.context.working_dir)
2005 .await
2006 {
2007 Ok(()) => {
2008 log::info!("{} LSP started successfully", name);
2009 self.lsp_clients.insert(name.to_string(), client);
2010 }
2011 Err(e) => {
2012 log::warn!(
2013 "Failed to start {} LSP: {} (continuing without it)",
2014 name,
2015 e
2016 );
2017 }
2018 }
2019 }
2020
2021 Ok(())
2022 }
2023
2024 fn lsp_key_for_file(&self, path: &str) -> Option<String> {
2029 let registry = perspt_core::plugin::PluginRegistry::new();
2030
2031 for plugin in registry.all() {
2033 if plugin.owns_file(path) {
2034 let name = plugin.name().to_string();
2035 if self.lsp_clients.contains_key(&name) {
2036 return Some(name);
2037 }
2038 }
2039 }
2040
2041 self.lsp_clients.keys().next().cloned()
2043 }
2044
2045 fn sandbox_dir_for_node(&self, idx: NodeIndex) -> Option<std::path::PathBuf> {
2056 let branch_id = self.graph[idx].provisional_branch_id.as_ref()?;
2057 let sandbox_path = self
2058 .context
2059 .working_dir
2060 .join(".perspt")
2061 .join("sandboxes")
2062 .join(&self.context.session_id)
2063 .join(branch_id);
2064 if sandbox_path.exists() {
2065 Some(sandbox_path)
2066 } else {
2067 None
2068 }
2069 }
2070
2071 fn effective_working_dir(&self, idx: NodeIndex) -> std::path::PathBuf {
2074 self.sandbox_dir_for_node(idx)
2075 .unwrap_or_else(|| self.context.working_dir.clone())
2076 }
2077
2078 fn maybe_create_provisional_branch(&mut self, idx: NodeIndex) -> Option<String> {
2081 let parents: Vec<NodeIndex> = self
2083 .graph
2084 .neighbors_directed(idx, petgraph::Direction::Incoming)
2085 .collect();
2086
2087 let node = &self.graph[idx];
2088 let node_id = node.node_id.clone();
2089 let session_id = self.context.session_id.clone();
2090
2091 let parent_node_id = if parents.is_empty() {
2094 "root".to_string()
2095 } else {
2096 self.graph[parents[0]].node_id.clone()
2097 };
2098
2099 let branch_id = format!("branch_{}_{}", node_id, uuid::Uuid::new_v4());
2100 let branch = ProvisionalBranch::new(
2101 branch_id.clone(),
2102 session_id.clone(),
2103 node_id.clone(),
2104 parent_node_id.clone(),
2105 );
2106
2107 if let Err(e) = self.ledger.record_provisional_branch(&branch) {
2109 log::warn!("Failed to record provisional branch: {}", e);
2110 }
2111
2112 for pidx in &parents {
2114 let parent_id = self.graph[*pidx].node_id.clone();
2115 let depends_on_seal = self.graph[*pidx].node_class == NodeClass::Interface;
2117 let lineage = perspt_core::types::BranchLineage {
2118 lineage_id: format!("lin_{}_{}", branch_id, parent_id),
2119 parent_branch_id: parent_id,
2120 child_branch_id: branch_id.clone(),
2121 depends_on_seal,
2122 };
2123 if let Err(e) = self.ledger.record_branch_lineage(&lineage) {
2124 log::warn!("Failed to record branch lineage: {}", e);
2125 }
2126 }
2127
2128 self.graph[idx].provisional_branch_id = Some(branch_id.clone());
2130
2131 match crate::tools::create_sandbox(&self.context.working_dir, &session_id, &branch_id) {
2134 Ok(sandbox_path) => {
2135 log::debug!("Sandbox created at {}", sandbox_path.display());
2136
2137 let plugin_refs: Vec<&str> = self
2140 .context
2141 .active_plugins
2142 .iter()
2143 .map(|s| s.as_str())
2144 .collect();
2145 if let Err(e) = crate::tools::seed_sandbox_manifests(
2146 &self.context.working_dir,
2147 &sandbox_path,
2148 &plugin_refs,
2149 ) {
2150 log::warn!("Failed to seed sandbox manifests: {}", e);
2151 }
2152
2153 let node = &self.graph[idx];
2156 for target in &node.output_targets {
2157 if let Some(rel) = target.to_str() {
2158 if let Err(e) = crate::tools::copy_to_sandbox(
2159 &self.context.working_dir,
2160 &sandbox_path,
2161 rel,
2162 ) {
2163 log::debug!("Could not seed sandbox with {}: {}", rel, e);
2164 }
2165 }
2166 }
2167 let mut ancestor_queue: Vec<NodeIndex> = parents.clone();
2173 let mut visited = std::collections::HashSet::new();
2174 while let Some(ancestor_idx) = ancestor_queue.pop() {
2175 if !visited.insert(ancestor_idx) {
2176 continue;
2177 }
2178 for target in &self.graph[ancestor_idx].output_targets {
2179 if let Some(rel) = target.to_str() {
2180 if let Err(e) = crate::tools::copy_to_sandbox(
2181 &self.context.working_dir,
2182 &sandbox_path,
2183 rel,
2184 ) {
2185 log::debug!(
2186 "Could not seed sandbox with ancestor file {}: {}",
2187 rel,
2188 e
2189 );
2190 }
2191 }
2192 }
2193 for grandparent in self
2195 .graph
2196 .neighbors_directed(ancestor_idx, petgraph::Direction::Incoming)
2197 {
2198 ancestor_queue.push(grandparent);
2199 }
2200 }
2201 }
2202 Err(e) => {
2203 log::warn!("Failed to create sandbox for branch {}: {}", branch_id, e);
2204 }
2205 }
2206
2207 self.emit_event(perspt_core::AgentEvent::BranchCreated {
2208 branch_id: branch_id.clone(),
2209 node_id,
2210 parent_node_id,
2211 });
2212 log::info!("Created provisional branch {} for node", branch_id);
2213
2214 Some(branch_id)
2215 }
2216
2217 fn merge_provisional_branch(&mut self, branch_id: &str, idx: NodeIndex) {
2219 let node_id = self.graph[idx].node_id.clone();
2220 if let Err(e) = self
2221 .ledger
2222 .update_branch_state(branch_id, &ProvisionalBranchState::Merged.to_string())
2223 {
2224 log::warn!("Failed to merge branch {}: {}", branch_id, e);
2225 }
2226
2227 let sandbox_path = self
2229 .context
2230 .working_dir
2231 .join(".perspt")
2232 .join("sandboxes")
2233 .join(&self.context.session_id)
2234 .join(branch_id);
2235 if let Err(e) = crate::tools::cleanup_sandbox(&sandbox_path) {
2236 log::warn!(
2237 "Failed to cleanup sandbox for merged branch {}: {}",
2238 branch_id,
2239 e
2240 );
2241 }
2242
2243 self.emit_event(perspt_core::AgentEvent::BranchMerged {
2244 branch_id: branch_id.to_string(),
2245 node_id,
2246 });
2247 log::info!("Merged provisional branch {}", branch_id);
2248 }
2249
2250 fn flush_provisional_branch(&mut self, branch_id: &str, node_id: &str) {
2252 if let Err(e) = self
2253 .ledger
2254 .update_branch_state(branch_id, &ProvisionalBranchState::Flushed.to_string())
2255 {
2256 log::warn!("Failed to flush branch {}: {}", branch_id, e);
2257 }
2258
2259 let sandbox_path = self
2261 .context
2262 .working_dir
2263 .join(".perspt")
2264 .join("sandboxes")
2265 .join(&self.context.session_id)
2266 .join(branch_id);
2267 if let Err(e) = crate::tools::cleanup_sandbox(&sandbox_path) {
2268 log::warn!(
2269 "Failed to cleanup sandbox for flushed branch {}: {}",
2270 branch_id,
2271 e
2272 );
2273 }
2274
2275 log::info!(
2276 "Flushed provisional branch {} for node {}",
2277 branch_id,
2278 node_id
2279 );
2280 }
2281
2282 fn flush_descendant_branches(&mut self, idx: NodeIndex) {
2288 let parent_node_id = self.graph[idx].node_id.clone();
2289 let session_id = self.context.session_id.clone();
2290
2291 let descendant_indices = self.collect_descendants(idx);
2293
2294 let mut flushed_branch_ids = Vec::new();
2295 let mut requeue_node_ids = Vec::new();
2296
2297 for desc_idx in &descendant_indices {
2298 let desc_node = &self.graph[*desc_idx];
2299 if let Some(ref bid) = desc_node.provisional_branch_id {
2300 let bid_clone = bid.clone();
2302 let nid_clone = desc_node.node_id.clone();
2303 self.flush_provisional_branch(&bid_clone, &nid_clone);
2304 flushed_branch_ids.push(bid_clone);
2305 requeue_node_ids.push(nid_clone);
2306 }
2307 }
2308
2309 if flushed_branch_ids.is_empty() {
2310 return;
2311 }
2312
2313 let flush_record = perspt_core::types::BranchFlushRecord::new(
2315 &session_id,
2316 &parent_node_id,
2317 flushed_branch_ids.clone(),
2318 requeue_node_ids.clone(),
2319 format!(
2320 "Parent node {} failed verification/convergence",
2321 parent_node_id
2322 ),
2323 );
2324 if let Err(e) = self.ledger.record_branch_flush(&flush_record) {
2325 log::warn!("Failed to record branch flush: {}", e);
2326 }
2327
2328 self.emit_event(perspt_core::AgentEvent::BranchFlushed {
2329 parent_node_id: parent_node_id.clone(),
2330 flushed_branch_ids,
2331 reason: format!("Parent {} failed", parent_node_id),
2332 });
2333
2334 log::info!(
2335 "Flushed {} descendant branches for parent {}; {} nodes eligible for requeue",
2336 flush_record.flushed_branch_ids.len(),
2337 parent_node_id,
2338 requeue_node_ids.len(),
2339 );
2340 }
2341
2342 fn collect_descendants(&self, idx: NodeIndex) -> Vec<NodeIndex> {
2345 let mut descendants = Vec::new();
2346 let mut stack = vec![idx];
2347 let mut visited = std::collections::HashSet::new();
2348 visited.insert(idx);
2349
2350 while let Some(current) = stack.pop() {
2351 for child in self
2352 .graph
2353 .neighbors_directed(current, petgraph::Direction::Outgoing)
2354 {
2355 if visited.insert(child) {
2356 descendants.push(child);
2357 stack.push(child);
2358 }
2359 }
2360 }
2361 descendants
2362 }
2363
2364 fn emit_interface_seals(&mut self, idx: NodeIndex) {
2370 let node = &self.graph[idx];
2371 if node.node_class != NodeClass::Interface {
2372 return;
2373 }
2374
2375 let node_id = node.node_id.clone();
2376 let session_id = self.context.session_id.clone();
2377 let output_targets: Vec<_> = node.output_targets.clone();
2378 let mut sealed_paths = Vec::new();
2379 let mut seal_hash = [0u8; 32];
2380
2381 let retriever = ContextRetriever::new(self.context.working_dir.clone());
2382
2383 for target in &output_targets {
2384 let path_str = target.to_string_lossy().to_string();
2385 match retriever.compute_structural_digest(
2386 &path_str,
2387 perspt_core::types::ArtifactKind::InterfaceSeal,
2388 &node_id,
2389 ) {
2390 Ok(digest) => {
2391 let seal = perspt_core::types::InterfaceSealRecord::from_digest(
2392 &session_id,
2393 &node_id,
2394 &digest,
2395 );
2396 seal_hash = seal.seal_hash;
2397 sealed_paths.push(path_str);
2398
2399 if let Err(e) = self.ledger.record_interface_seal(&seal) {
2400 log::warn!("Failed to record interface seal: {}", e);
2401 }
2402 }
2403 Err(e) => {
2404 log::debug!("Skipping seal for {}: {}", path_str, e);
2405 }
2406 }
2407 }
2408
2409 if !sealed_paths.is_empty() {
2410 self.graph[idx].interface_seal_hash = Some(seal_hash);
2412
2413 self.emit_event(perspt_core::AgentEvent::InterfaceSealed {
2414 node_id: node_id.clone(),
2415 sealed_paths: sealed_paths.clone(),
2416 seal_hash: seal_hash
2417 .iter()
2418 .map(|b| format!("{:02x}", b))
2419 .collect::<String>(),
2420 });
2421 log::info!(
2422 "Sealed {} interface artifact(s) for node {}",
2423 sealed_paths.len(),
2424 node_id
2425 );
2426 }
2427 }
2428
2429 fn unblock_dependents(&mut self, idx: NodeIndex) {
2431 let node_id = self.graph[idx].node_id.clone();
2432
2433 let (unblocked, remaining): (Vec<_>, Vec<_>) = self
2435 .blocked_dependencies
2436 .drain(..)
2437 .partition(|dep| dep.parent_node_id == node_id);
2438
2439 self.blocked_dependencies = remaining;
2440
2441 for dep in unblocked {
2442 self.emit_event(perspt_core::AgentEvent::DependentUnblocked {
2443 child_node_id: dep.child_node_id.clone(),
2444 parent_node_id: node_id.clone(),
2445 });
2446 log::info!(
2447 "Unblocked dependent {} (parent {} sealed)",
2448 dep.child_node_id,
2449 node_id
2450 );
2451 }
2452 }
2453
2454 fn check_seal_prerequisites(&mut self, idx: NodeIndex) -> bool {
2457 let parents: Vec<NodeIndex> = self
2458 .graph
2459 .neighbors_directed(idx, petgraph::Direction::Incoming)
2460 .collect();
2461
2462 for pidx in parents {
2463 let parent = &self.graph[pidx];
2464 if parent.node_class == NodeClass::Interface
2465 && parent.interface_seal_hash.is_none()
2466 && parent.state != NodeState::Completed
2467 {
2468 let child_node_id = self.graph[idx].node_id.clone();
2470 let parent_node_id = parent.node_id.clone();
2471 let sealed_paths: Vec<String> = parent
2472 .output_targets
2473 .iter()
2474 .map(|p| p.to_string_lossy().to_string())
2475 .collect();
2476
2477 let dep = perspt_core::types::BlockedDependency::new(
2478 &child_node_id,
2479 &parent_node_id,
2480 sealed_paths,
2481 );
2482 self.blocked_dependencies.push(dep);
2483
2484 log::info!(
2485 "Node {} blocked: waiting on interface seal from {}",
2486 child_node_id,
2487 parent_node_id
2488 );
2489 return true;
2490 }
2491 }
2492 false
2493 }
2494
2495 fn check_structural_dependencies(
2501 &self,
2502 node: &SRBNNode,
2503 restriction_map: &perspt_core::types::RestrictionMap,
2504 ) -> Vec<(String, String)> {
2505 use perspt_core::types::{ArtifactKind, NodeClass};
2506
2507 let mut prose_only = Vec::new();
2508
2509 if node.node_class != NodeClass::Implementation {
2511 return prose_only;
2512 }
2513
2514 let idx = match self.node_indices.get(&node.node_id) {
2516 Some(i) => *i,
2517 None => return prose_only,
2518 };
2519
2520 let parents: Vec<NodeIndex> = self
2521 .graph
2522 .neighbors_directed(idx, petgraph::Direction::Incoming)
2523 .collect();
2524
2525 for pidx in parents {
2526 let parent = &self.graph[pidx];
2527 if parent.node_class != NodeClass::Interface {
2528 continue;
2529 }
2530
2531 let has_structural = restriction_map.structural_digests.iter().any(|d| {
2533 d.source_node_id == parent.node_id
2534 && matches!(
2535 d.artifact_kind,
2536 ArtifactKind::Signature
2537 | ArtifactKind::Schema
2538 | ArtifactKind::InterfaceSeal
2539 )
2540 });
2541
2542 if !has_structural {
2543 prose_only.push((
2544 parent.node_id.clone(),
2545 format!(
2546 "Interface node '{}' has no Signature/Schema/InterfaceSeal digest in the restriction map",
2547 parent.node_id
2548 ),
2549 ));
2550 }
2551 }
2552
2553 prose_only
2554 }
2555
2556 fn inject_sealed_interfaces(
2563 &self,
2564 idx: NodeIndex,
2565 restriction_map: &mut perspt_core::types::RestrictionMap,
2566 ) {
2567 let parents: Vec<NodeIndex> = self
2568 .graph
2569 .neighbors_directed(idx, petgraph::Direction::Incoming)
2570 .collect();
2571
2572 for pidx in parents {
2573 let parent = &self.graph[pidx];
2574 if parent.interface_seal_hash.is_none() {
2575 continue;
2576 }
2577
2578 let parent_node_id = &parent.node_id;
2579
2580 let seals = match self.ledger.get_interface_seals(parent_node_id) {
2582 Ok(rows) => rows,
2583 Err(e) => {
2584 log::debug!("Could not query seals for {}: {}", parent_node_id, e);
2585 continue;
2586 }
2587 };
2588
2589 for seal in seals {
2590 restriction_map
2592 .sealed_interfaces
2593 .retain(|p| *p != seal.sealed_path);
2594
2595 let mut hash = [0u8; 32];
2597 let len = seal.seal_hash.len().min(32);
2598 hash[..len].copy_from_slice(&seal.seal_hash[..len]);
2599
2600 let digest = perspt_core::types::StructuralDigest {
2602 digest_id: format!("seal_{}_{}", seal.node_id, seal.sealed_path),
2603 source_node_id: seal.node_id.clone(),
2604 source_path: seal.sealed_path.clone(),
2605 artifact_kind: perspt_core::types::ArtifactKind::InterfaceSeal,
2606 hash,
2607 version: seal.version as u32,
2608 };
2609 restriction_map.structural_digests.push(digest);
2610
2611 log::debug!(
2612 "Injected sealed digest for {} from parent {}",
2613 seal.sealed_path,
2614 parent_node_id,
2615 );
2616 }
2617 }
2618 }
2619}
2620
2621fn parse_node_state(s: &str) -> NodeState {
2623 NodeState::from_display_str(s)
2624}
2625
2626fn parse_node_class(s: &str) -> NodeClass {
2628 match s {
2629 "Interface" => NodeClass::Interface,
2630 "Implementation" => NodeClass::Implementation,
2631 "Integration" => NodeClass::Integration,
2632 _ => NodeClass::default(),
2633 }
2634}
2635
2636#[cfg(test)]
2637mod tests {
2638 use super::verification::verification_stages_for_node;
2639 use super::*;
2640 use std::path::PathBuf;
2641
2642 #[tokio::test]
2643 async fn test_orchestrator_creation() {
2644 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2645 assert_eq!(orch.node_count(), 0);
2646 }
2647
2648 #[tokio::test]
2649 async fn test_add_nodes() {
2650 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2651
2652 let node1 = SRBNNode::new(
2653 "node1".to_string(),
2654 "Test task 1".to_string(),
2655 ModelTier::Architect,
2656 );
2657 let node2 = SRBNNode::new(
2658 "node2".to_string(),
2659 "Test task 2".to_string(),
2660 ModelTier::Actuator,
2661 );
2662
2663 orch.add_node(node1);
2664 orch.add_node(node2);
2665 orch.add_dependency("node1", "node2", "depends_on").unwrap();
2666
2667 assert_eq!(orch.node_count(), 2);
2668 }
2669 #[tokio::test]
2670 async fn test_lsp_key_for_file_resolves_by_plugin() {
2671 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2672 orch.lsp_clients.insert(
2674 "rust".to_string(),
2675 crate::lsp::LspClient::new("rust-analyzer"),
2676 );
2677 orch.lsp_clients
2678 .insert("python".to_string(), crate::lsp::LspClient::new("pylsp"));
2679
2680 assert_eq!(
2682 orch.lsp_key_for_file("src/main.rs"),
2683 Some("rust".to_string())
2684 );
2685 assert_eq!(orch.lsp_key_for_file("app.py"), Some("python".to_string()));
2687 let key = orch.lsp_key_for_file("data.csv");
2689 assert!(key.is_some()); }
2691
2692 #[tokio::test]
2697 async fn test_split_node_creates_children() {
2698 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2699 let mut node = SRBNNode::new("parent".into(), "Do everything".into(), ModelTier::Actuator);
2700 node.output_targets = vec![PathBuf::from("a.rs"), PathBuf::from("b.rs")];
2701 orch.add_node(node);
2702
2703 let idx = orch.node_indices["parent"];
2704 let applied = orch.split_node(idx, &["handle a.rs".into(), "handle b.rs".into()]);
2705 assert!(!applied.is_empty());
2706 assert!(!orch.node_indices.contains_key("parent"));
2708 assert!(orch.node_indices.contains_key("parent__split_0"));
2710 assert!(orch.node_indices.contains_key("parent__split_1"));
2711 }
2712
2713 #[tokio::test]
2714 async fn test_split_node_empty_children_is_noop() {
2715 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2716 let node = SRBNNode::new("n".into(), "g".into(), ModelTier::Actuator);
2717 orch.add_node(node);
2718 let idx = orch.node_indices["n"];
2719 let applied = orch.split_node(idx, &[]);
2720 assert!(applied.is_empty());
2722 }
2723
2724 #[tokio::test]
2725 async fn test_insert_interface_node() {
2726 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2727 let n1 = SRBNNode::new("a".into(), "source".into(), ModelTier::Actuator);
2728 let n2 = SRBNNode::new("b".into(), "dest".into(), ModelTier::Actuator);
2729 orch.add_node(n1);
2730 orch.add_node(n2);
2731 orch.add_dependency("a", "b", "data_flow").unwrap();
2732
2733 let idx_a = orch.node_indices["a"];
2734 let applied = orch.insert_interface_node(idx_a, "API boundary");
2735 assert!(applied.is_some());
2736 assert!(orch.node_indices.contains_key("a__iface"));
2737 assert_eq!(orch.node_count(), 3);
2739 }
2740
2741 #[tokio::test]
2742 async fn test_replan_subgraph_resets_nodes() {
2743 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2744 let mut n1 = SRBNNode::new("trigger".into(), "g1".into(), ModelTier::Actuator);
2745 n1.state = NodeState::Coding;
2746 let mut n2 = SRBNNode::new("dep".into(), "g2".into(), ModelTier::Actuator);
2747 n2.state = NodeState::Completed;
2748 orch.add_node(n1);
2749 orch.add_node(n2);
2750
2751 let trigger_idx = orch.node_indices["trigger"];
2752 let applied = orch.replan_subgraph(trigger_idx, &["dep".into()]);
2753 assert!(applied);
2754
2755 let dep_idx = orch.node_indices["dep"];
2756 assert_eq!(orch.graph[dep_idx].state, NodeState::TaskQueued);
2757 assert_eq!(orch.graph[trigger_idx].state, NodeState::Retry);
2758 }
2759
2760 #[tokio::test]
2761 async fn test_select_validators_always_includes_dependency_graph() {
2762 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2763 let node = SRBNNode::new("n".into(), "g".into(), ModelTier::Actuator);
2764 orch.add_node(node);
2765 let idx = orch.node_indices["n"];
2766
2767 let validators = orch.select_validators(idx);
2768 assert!(validators.contains(&SheafValidatorClass::DependencyGraphConsistency));
2769 }
2770
2771 #[tokio::test]
2772 async fn test_select_validators_interface_node() {
2773 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2774 let mut node = SRBNNode::new("iface".into(), "g".into(), ModelTier::Actuator);
2775 node.node_class = perspt_core::types::NodeClass::Interface;
2776 orch.add_node(node);
2777 let idx = orch.node_indices["iface"];
2778
2779 let validators = orch.select_validators(idx);
2780 assert!(validators.contains(&SheafValidatorClass::ExportImportConsistency));
2781 }
2782
2783 #[tokio::test]
2784 async fn test_run_sheaf_validator_dependency_graph_no_cycles() {
2785 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2786 let n1 = SRBNNode::new("a".into(), "g".into(), ModelTier::Actuator);
2787 let n2 = SRBNNode::new("b".into(), "g".into(), ModelTier::Actuator);
2788 orch.add_node(n1);
2789 orch.add_node(n2);
2790 orch.add_dependency("a", "b", "dep").unwrap();
2791
2792 let idx = orch.node_indices["a"];
2793 let result = orch.run_sheaf_validator(idx, SheafValidatorClass::DependencyGraphConsistency);
2794 assert!(result.passed);
2795 assert_eq!(result.v_sheaf_contribution, 0.0);
2796 }
2797
2798 #[tokio::test]
2799 async fn test_classify_non_convergence_default() {
2800 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2801 let node = SRBNNode::new("n".into(), "g".into(), ModelTier::Actuator);
2802 orch.add_node(node);
2803 let idx = orch.node_indices["n"];
2804
2805 let category = orch.classify_non_convergence(idx);
2807 assert_eq!(category, EscalationCategory::ImplementationError);
2808 }
2809
2810 #[tokio::test]
2811 async fn test_affected_dependents() {
2812 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2813 let n1 = SRBNNode::new("root".into(), "g".into(), ModelTier::Actuator);
2814 let n2 = SRBNNode::new("child1".into(), "g".into(), ModelTier::Actuator);
2815 let n3 = SRBNNode::new("child2".into(), "g".into(), ModelTier::Actuator);
2816 orch.add_node(n1);
2817 orch.add_node(n2);
2818 orch.add_node(n3);
2819 orch.add_dependency("root", "child1", "dep").unwrap();
2820 orch.add_dependency("root", "child2", "dep").unwrap();
2821
2822 let idx = orch.node_indices["root"];
2823 let deps = orch.affected_dependents(idx);
2824 assert_eq!(deps.len(), 2);
2825 assert!(deps.contains(&"child1".to_string()));
2826 assert!(deps.contains(&"child2".to_string()));
2827 }
2828
2829 #[tokio::test]
2834 async fn test_maybe_create_provisional_branch_root_node() {
2835 let temp_dir =
2836 std::env::temp_dir().join(format!("perspt_root_branch_{}", uuid::Uuid::new_v4()));
2837 std::fs::create_dir_all(&temp_dir).unwrap();
2838
2839 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
2840 orch.context.session_id = "test_session".into();
2841 let node = SRBNNode::new("root".into(), "root goal".into(), ModelTier::Actuator);
2842 orch.add_node(node);
2843
2844 let idx = orch.node_indices["root"];
2845 let branch = orch.maybe_create_provisional_branch(idx);
2847 assert!(branch.is_some());
2848 assert!(orch.graph[idx].provisional_branch_id.is_some());
2849
2850 let _ = std::fs::remove_dir_all(&temp_dir);
2851 }
2852
2853 #[tokio::test]
2854 async fn test_maybe_create_provisional_branch_child_node() {
2855 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_phase6"));
2856 orch.context.session_id = "test_session".into();
2857 let parent = SRBNNode::new("parent".into(), "parent goal".into(), ModelTier::Actuator);
2858 let child = SRBNNode::new("child".into(), "child goal".into(), ModelTier::Actuator);
2859 orch.add_node(parent);
2860 orch.add_node(child);
2861 orch.add_dependency("parent", "child", "dep").unwrap();
2862
2863 let idx = orch.node_indices["child"];
2864 let branch = orch.maybe_create_provisional_branch(idx);
2865 assert!(branch.is_some());
2866 assert!(orch.graph[idx].provisional_branch_id.is_some());
2867 }
2868
2869 #[tokio::test]
2870 async fn test_collect_descendants() {
2871 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2872 let n1 = SRBNNode::new("a".into(), "g".into(), ModelTier::Actuator);
2873 let n2 = SRBNNode::new("b".into(), "g".into(), ModelTier::Actuator);
2874 let n3 = SRBNNode::new("c".into(), "g".into(), ModelTier::Actuator);
2875 let n4 = SRBNNode::new("d".into(), "g".into(), ModelTier::Actuator);
2876 orch.add_node(n1);
2877 orch.add_node(n2);
2878 orch.add_node(n3);
2879 orch.add_node(n4);
2880 orch.add_dependency("a", "b", "dep").unwrap();
2881 orch.add_dependency("b", "c", "dep").unwrap();
2882 orch.add_dependency("a", "d", "dep").unwrap();
2883
2884 let idx_a = orch.node_indices["a"];
2885 let descendants = orch.collect_descendants(idx_a);
2886 assert_eq!(descendants.len(), 3); }
2888
2889 #[tokio::test]
2890 async fn test_check_seal_prerequisites_no_interface_parent() {
2891 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2892 let parent = SRBNNode::new("parent".into(), "g".into(), ModelTier::Actuator);
2893 let child = SRBNNode::new("child".into(), "g".into(), ModelTier::Actuator);
2894 orch.add_node(parent);
2895 orch.add_node(child);
2896 orch.add_dependency("parent", "child", "dep").unwrap();
2897
2898 let idx = orch.node_indices["child"];
2899 assert!(!orch.check_seal_prerequisites(idx));
2901 assert!(orch.blocked_dependencies.is_empty());
2902 }
2903
2904 #[tokio::test]
2905 async fn test_check_seal_prerequisites_unsealed_interface() {
2906 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2907 let mut parent = SRBNNode::new("iface".into(), "g".into(), ModelTier::Actuator);
2908 parent.node_class = perspt_core::types::NodeClass::Interface;
2909 let child = SRBNNode::new("impl".into(), "g".into(), ModelTier::Actuator);
2910 orch.add_node(parent);
2911 orch.add_node(child);
2912 orch.add_dependency("iface", "impl", "dep").unwrap();
2913
2914 let idx = orch.node_indices["impl"];
2915 assert!(orch.check_seal_prerequisites(idx));
2917 assert_eq!(orch.blocked_dependencies.len(), 1);
2918 assert_eq!(orch.blocked_dependencies[0].parent_node_id, "iface");
2919 }
2920
2921 #[tokio::test]
2922 async fn test_check_seal_prerequisites_sealed_interface() {
2923 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2924 let mut parent = SRBNNode::new("iface".into(), "g".into(), ModelTier::Actuator);
2925 parent.node_class = perspt_core::types::NodeClass::Interface;
2926 parent.interface_seal_hash = Some([1u8; 32]); let child = SRBNNode::new("impl".into(), "g".into(), ModelTier::Actuator);
2928 orch.add_node(parent);
2929 orch.add_node(child);
2930 orch.add_dependency("iface", "impl", "dep").unwrap();
2931
2932 let idx = orch.node_indices["impl"];
2933 assert!(!orch.check_seal_prerequisites(idx));
2935 assert!(orch.blocked_dependencies.is_empty());
2936 }
2937
2938 #[tokio::test]
2939 async fn test_unblock_dependents() {
2940 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2941 let parent = SRBNNode::new("parent".into(), "g".into(), ModelTier::Actuator);
2942 let child = SRBNNode::new("child".into(), "g".into(), ModelTier::Actuator);
2943 orch.add_node(parent);
2944 orch.add_node(child);
2945
2946 orch.blocked_dependencies
2948 .push(perspt_core::types::BlockedDependency::new(
2949 "child",
2950 "parent",
2951 vec!["src/api.rs".into()],
2952 ));
2953 assert_eq!(orch.blocked_dependencies.len(), 1);
2954
2955 let idx = orch.node_indices["parent"];
2956 orch.unblock_dependents(idx);
2957 assert!(orch.blocked_dependencies.is_empty());
2958 }
2959
2960 #[tokio::test]
2961 async fn test_flush_descendant_branches() {
2962 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_phase6_flush"));
2963 orch.context.session_id = "test_session".into();
2964
2965 let parent = SRBNNode::new("parent".into(), "g".into(), ModelTier::Actuator);
2966 let mut child1 = SRBNNode::new("child1".into(), "g".into(), ModelTier::Actuator);
2967 child1.provisional_branch_id = Some("branch_c1".into());
2968 let mut child2 = SRBNNode::new("child2".into(), "g".into(), ModelTier::Actuator);
2969 child2.provisional_branch_id = Some("branch_c2".into());
2970 let grandchild = SRBNNode::new("grandchild".into(), "g".into(), ModelTier::Actuator);
2971 orch.add_node(parent);
2972 orch.add_node(child1);
2973 orch.add_node(child2);
2974 orch.add_node(grandchild);
2975 orch.add_dependency("parent", "child1", "dep").unwrap();
2976 orch.add_dependency("parent", "child2", "dep").unwrap();
2977 orch.add_dependency("child1", "grandchild", "dep").unwrap();
2978
2979 let idx = orch.node_indices["parent"];
2980 orch.flush_descendant_branches(idx);
2983 }
2984
2985 #[tokio::test]
2990 async fn test_effective_working_dir_no_branch() {
2991 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/test/workspace"));
2992 let mut orch = orch;
2994 let node = SRBNNode::new("n1".into(), "goal".into(), ModelTier::Actuator);
2995 orch.add_node(node);
2996 let idx = orch.node_indices["n1"];
2997 assert_eq!(
2999 orch.effective_working_dir(idx),
3000 PathBuf::from("/test/workspace")
3001 );
3002 }
3003
3004 #[tokio::test]
3005 async fn test_sandbox_dir_for_node_none_without_branch() {
3006 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/test/workspace"));
3007 let mut orch = orch;
3008 let node = SRBNNode::new("n1".into(), "goal".into(), ModelTier::Actuator);
3009 orch.add_node(node);
3010 let idx = orch.node_indices["n1"];
3011 assert!(orch.sandbox_dir_for_node(idx).is_none());
3012 }
3013
3014 #[tokio::test]
3015 async fn test_rewrite_churn_guardrail() {
3016 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_churn"));
3017 let mut orch = orch;
3018 let node = SRBNNode::new("node_a".into(), "goal".into(), ModelTier::Actuator);
3019 orch.add_node(node);
3020 let count = orch.count_lineage_rewrites("node_a");
3022 assert_eq!(count, 0);
3023 }
3024
3025 #[tokio::test]
3026 async fn test_run_resumed_skips_terminal_nodes() {
3027 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_resume"));
3028
3029 let mut n1 = SRBNNode::new("done".into(), "completed".into(), ModelTier::Actuator);
3030 n1.state = NodeState::Completed;
3031 let mut n2 = SRBNNode::new("failed".into(), "failed".into(), ModelTier::Actuator);
3032 n2.state = NodeState::Failed;
3033 orch.add_node(n1);
3034 orch.add_node(n2);
3035
3036 let result = orch.run_resumed().await;
3038 assert!(result.is_ok());
3039 }
3040
3041 #[tokio::test]
3042 async fn test_persist_review_decision_no_panic() {
3043 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_review"));
3044 orch.persist_review_decision("node_x", "approved", None);
3047 }
3048
3049 #[tokio::test]
3054 async fn test_check_structural_dependencies_blocks_prose_only() {
3055 use perspt_core::types::{NodeClass, RestrictionMap};
3056
3057 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_struct_dep"));
3058
3059 let mut parent = SRBNNode::new("iface_1".into(), "Define API".into(), ModelTier::Architect);
3061 parent.node_class = NodeClass::Interface;
3062
3063 let mut child = SRBNNode::new("impl_1".into(), "Implement API".into(), ModelTier::Actuator);
3065 child.node_class = NodeClass::Implementation;
3066
3067 let parent_idx = orch.add_node(parent);
3068 let child_idx = orch.add_node(child.clone());
3069 orch.graph
3070 .add_edge(parent_idx, child_idx, Dependency { kind: "dep".into() });
3071
3072 let rmap = RestrictionMap::for_node("impl_1");
3074 let gaps = orch.check_structural_dependencies(&child, &rmap);
3075
3076 assert_eq!(gaps.len(), 1);
3077 assert_eq!(gaps[0].0, "iface_1");
3078 assert!(gaps[0].1.contains("no Signature/Schema/InterfaceSeal"));
3079 }
3080
3081 #[tokio::test]
3082 async fn test_check_structural_dependencies_passes_with_digest() {
3083 use perspt_core::types::{ArtifactKind, NodeClass, RestrictionMap, StructuralDigest};
3084
3085 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_struct_ok"));
3086
3087 let mut parent = SRBNNode::new("iface_2".into(), "Define API".into(), ModelTier::Architect);
3088 parent.node_class = NodeClass::Interface;
3089
3090 let mut child = SRBNNode::new("impl_2".into(), "Implement API".into(), ModelTier::Actuator);
3091 child.node_class = NodeClass::Implementation;
3092
3093 let parent_idx = orch.add_node(parent);
3094 let child_idx = orch.add_node(child.clone());
3095 orch.graph
3096 .add_edge(parent_idx, child_idx, Dependency { kind: "dep".into() });
3097
3098 let mut rmap = RestrictionMap::for_node("impl_2");
3100 rmap.structural_digests.push(StructuralDigest::from_content(
3101 "iface_2",
3102 "api.rs",
3103 ArtifactKind::Signature,
3104 b"fn do_thing(x: i32) -> bool;",
3105 ));
3106
3107 let gaps = orch.check_structural_dependencies(&child, &rmap);
3108 assert!(gaps.is_empty(), "Expected no gaps when digest present");
3109 }
3110
3111 #[tokio::test]
3112 async fn test_check_structural_dependencies_skips_non_implementation() {
3113 use perspt_core::types::{NodeClass, RestrictionMap};
3114
3115 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_struct_skip"));
3116
3117 let mut node = SRBNNode::new("integ_1".into(), "Wire modules".into(), ModelTier::Actuator);
3119 node.node_class = NodeClass::Integration;
3120 orch.add_node(node.clone());
3121
3122 let rmap = RestrictionMap::for_node("integ_1");
3123 let gaps = orch.check_structural_dependencies(&node, &rmap);
3124 assert!(gaps.is_empty(), "Integration nodes should skip the check");
3125 }
3126
3127 #[tokio::test]
3128 async fn test_tier_default_models_are_differentiated() {
3129 let arch = ModelTier::Architect.default_model();
3131 let act = ModelTier::Actuator.default_model();
3132 let spec = ModelTier::Speculator.default_model();
3133
3134 assert_ne!(arch, act, "Architect and Actuator defaults should differ");
3136 assert_ne!(spec, arch, "Speculator should differ from Architect");
3138 }
3139
3140 #[tokio::test]
3145 async fn test_orchestrator_stores_all_four_tier_models() {
3146 let orch = SRBNOrchestrator::new_with_models(
3147 PathBuf::from("/tmp/test_tiers"),
3148 false,
3149 Some("arch-model".into()),
3150 Some("act-model".into()),
3151 Some("ver-model".into()),
3152 Some("spec-model".into()),
3153 None,
3154 None,
3155 None,
3156 None,
3157 );
3158 assert_eq!(orch.architect_model, "arch-model");
3159 assert_eq!(orch.actuator_model, "act-model");
3160 assert_eq!(orch.verifier_model, "ver-model");
3161 assert_eq!(orch.speculator_model, "spec-model");
3162 }
3163
3164 #[tokio::test]
3165 async fn test_orchestrator_default_tier_models() {
3166 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_tier_defaults"));
3167 assert_eq!(orch.architect_model, ModelTier::Architect.default_model());
3168 assert_eq!(orch.actuator_model, ModelTier::Actuator.default_model());
3169 assert_eq!(orch.verifier_model, ModelTier::Verifier.default_model());
3170 assert_eq!(orch.speculator_model, ModelTier::Speculator.default_model());
3171 }
3172
3173 #[tokio::test]
3174 async fn test_create_nodes_rejects_duplicate_output_files() {
3175 use perspt_core::types::PlannedTask;
3176
3177 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_dup_outputs"));
3178
3179 let plan = TaskPlan {
3180 tasks: vec![
3181 PlannedTask {
3182 id: "task_1".into(),
3183 goal: "Create math".into(),
3184 output_files: vec!["src/math.py".into(), "tests/test_math.py".into()],
3185 ..PlannedTask::new("task_1", "Create math")
3186 },
3187 PlannedTask {
3188 id: "task_2".into(),
3189 goal: "Create tests".into(),
3190 output_files: vec!["tests/test_math.py".into()],
3191 ..PlannedTask::new("task_2", "Create tests")
3192 },
3193 ],
3194 };
3195
3196 let result = orch.create_nodes_from_plan(&plan);
3197 assert!(result.is_err(), "Should reject duplicate output_files");
3198 let err = result.unwrap_err().to_string();
3199 assert!(
3200 err.contains("tests/test_math.py"),
3201 "Error should mention the duplicate file: {}",
3202 err
3203 );
3204 }
3205
3206 #[tokio::test]
3207 async fn test_create_nodes_accepts_unique_output_files() {
3208 use perspt_core::types::PlannedTask;
3209
3210 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_unique_outputs"));
3211
3212 let plan = TaskPlan {
3213 tasks: vec![
3214 PlannedTask {
3215 id: "task_1".into(),
3216 goal: "Create math".into(),
3217 output_files: vec!["src/math.py".into()],
3218 ..PlannedTask::new("task_1", "Create math")
3219 },
3220 PlannedTask {
3221 id: "test_1".into(),
3222 goal: "Test math".into(),
3223 output_files: vec!["tests/test_math.py".into()],
3224 dependencies: vec!["task_1".into()],
3225 ..PlannedTask::new("test_1", "Test math")
3226 },
3227 ],
3228 };
3229
3230 let result = orch.create_nodes_from_plan(&plan);
3231 assert!(result.is_ok(), "Should accept unique output_files");
3232 assert_eq!(orch.graph.node_count(), 2);
3233 }
3234
3235 #[tokio::test]
3236 async fn test_ownership_manifest_built_with_majority_plugin_vote() {
3237 use perspt_core::types::PlannedTask;
3238
3239 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_plugin_vote"));
3240
3241 let plan = TaskPlan {
3242 tasks: vec![PlannedTask {
3243 id: "task_1".into(),
3244 goal: "Create Python module".into(),
3245 output_files: vec![
3246 "src/main.py".into(),
3247 "src/helper.py".into(),
3248 "src/__init__.py".into(),
3249 ],
3250 ..PlannedTask::new("task_1", "Create Python module")
3251 }],
3252 };
3253
3254 orch.create_nodes_from_plan(&plan).unwrap();
3255
3256 assert_eq!(orch.context.ownership_manifest.len(), 3);
3258 let idx = orch.node_indices["task_1"];
3260 assert_eq!(orch.graph[idx].owner_plugin, "python");
3261 }
3262
3263 #[tokio::test]
3264 async fn test_apply_bundle_strips_paths_outside_node_output_targets() {
3265 use perspt_core::types::{ArtifactBundle, ArtifactOperation, PlannedTask};
3266
3267 let temp_dir = std::env::temp_dir().join(format!(
3268 "perspt_bundle_target_guard_{}",
3269 uuid::Uuid::new_v4()
3270 ));
3271 std::fs::create_dir_all(temp_dir.join("src")).unwrap();
3272
3273 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
3274 let plan = TaskPlan {
3275 tasks: vec![
3276 PlannedTask {
3277 id: "validate_module".into(),
3278 goal: "Create validation module".into(),
3279 output_files: vec!["src/validate.rs".into()],
3280 ..PlannedTask::new("validate_module", "Create validation module")
3281 },
3282 PlannedTask {
3283 id: "lib_module".into(),
3284 goal: "Export validation module".into(),
3285 output_files: vec!["src/lib.rs".into()],
3286 dependencies: vec!["validate_module".into()],
3287 ..PlannedTask::new("lib_module", "Export validation module")
3288 },
3289 ],
3290 };
3291
3292 orch.create_nodes_from_plan(&plan).unwrap();
3293
3294 let bundle = ArtifactBundle {
3295 artifacts: vec![
3296 ArtifactOperation::Write {
3297 path: "src/validate.rs".into(),
3298 content: "pub fn ok() {}".into(),
3299 },
3300 ArtifactOperation::Write {
3301 path: "src/lib.rs".into(),
3302 content: "pub mod validate;".into(),
3303 },
3304 ],
3305 commands: vec![],
3306 };
3307
3308 orch.apply_bundle_transactionally(
3311 &bundle,
3312 "validate_module",
3313 perspt_core::types::NodeClass::Implementation,
3314 )
3315 .await
3316 .expect("Should apply valid artifacts after stripping undeclared paths");
3317
3318 assert!(temp_dir.join("src/validate.rs").exists());
3320 assert!(!temp_dir.join("src/lib.rs").exists());
3322 }
3323
3324 #[tokio::test]
3325 async fn test_apply_bundle_writes_into_branch_sandbox() {
3326 use perspt_core::types::{ArtifactBundle, ArtifactOperation, PlannedTask};
3327
3328 let temp_dir = std::env::temp_dir().join(format!(
3329 "perspt_branch_sandbox_write_{}",
3330 uuid::Uuid::new_v4()
3331 ));
3332 std::fs::create_dir_all(temp_dir.join("src")).unwrap();
3333 std::fs::write(temp_dir.join("src/lib.rs"), "pub fn old() {}\n").unwrap();
3334
3335 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
3336 orch.context.session_id = uuid::Uuid::new_v4().to_string();
3337
3338 let plan = TaskPlan {
3339 tasks: vec![
3340 PlannedTask {
3341 id: "parent".into(),
3342 goal: "Parent node".into(),
3343 output_files: vec!["src/lib.rs".into()],
3344 ..PlannedTask::new("parent", "Parent node")
3345 },
3346 PlannedTask {
3347 id: "child".into(),
3348 goal: "Child node".into(),
3349 context_files: vec!["src/lib.rs".into()],
3350 output_files: vec!["src/child.rs".into()],
3351 dependencies: vec!["parent".into()],
3352 ..PlannedTask::new("child", "Child node")
3353 },
3354 ],
3355 };
3356
3357 orch.create_nodes_from_plan(&plan).unwrap();
3358 let child_idx = orch.node_indices["child"];
3359 let branch_id = orch.maybe_create_provisional_branch(child_idx).unwrap();
3360 let sandbox_dir = orch.sandbox_dir_for_node(child_idx).unwrap();
3361
3362 let bundle = ArtifactBundle {
3363 artifacts: vec![ArtifactOperation::Write {
3364 path: "src/child.rs".into(),
3365 content: "pub fn child() {}\n".into(),
3366 }],
3367 commands: vec![],
3368 };
3369
3370 orch.apply_bundle_transactionally(
3371 &bundle,
3372 "child",
3373 perspt_core::types::NodeClass::Implementation,
3374 )
3375 .await
3376 .unwrap();
3377
3378 assert!(sandbox_dir.join("src/child.rs").exists());
3379 assert!(!temp_dir.join("src/child.rs").exists());
3380
3381 orch.merge_provisional_branch(&branch_id, child_idx);
3382 }
3383
3384 #[test]
3385 fn test_verification_stages_for_node_classes() {
3386 use perspt_core::plugin::VerifierStage;
3387
3388 let interface_node =
3390 SRBNNode::new("iface".into(), "Define trait".into(), ModelTier::Actuator);
3391 let mut interface_node = interface_node;
3393 interface_node.node_class = perspt_core::types::NodeClass::Interface;
3394 let stages = verification_stages_for_node(&interface_node);
3395 assert_eq!(stages, vec![VerifierStage::SyntaxCheck]);
3396
3397 let mut implementation_node = SRBNNode::new(
3399 "impl".into(),
3400 "Implement feature".into(),
3401 ModelTier::Actuator,
3402 );
3403 implementation_node.node_class = perspt_core::types::NodeClass::Implementation;
3404 let stages = verification_stages_for_node(&implementation_node);
3405 assert_eq!(
3406 stages,
3407 vec![VerifierStage::SyntaxCheck, VerifierStage::Build]
3408 );
3409
3410 implementation_node
3412 .contract
3413 .weighted_tests
3414 .push(perspt_core::types::WeightedTest {
3415 test_name: "test_feature".into(),
3416 criticality: perspt_core::types::Criticality::High,
3417 });
3418 let stages = verification_stages_for_node(&implementation_node);
3419 assert_eq!(
3420 stages,
3421 vec![
3422 VerifierStage::SyntaxCheck,
3423 VerifierStage::Build,
3424 VerifierStage::Test
3425 ]
3426 );
3427
3428 let mut integration_node =
3430 SRBNNode::new("test".into(), "Verify feature".into(), ModelTier::Actuator);
3431 integration_node.node_class = perspt_core::types::NodeClass::Integration;
3432 integration_node
3433 .contract
3434 .weighted_tests
3435 .push(perspt_core::types::WeightedTest {
3436 test_name: "test_feature".into(),
3437 criticality: perspt_core::types::Criticality::High,
3438 });
3439 let stages = verification_stages_for_node(&integration_node);
3440 assert_eq!(
3441 stages,
3442 vec![
3443 VerifierStage::SyntaxCheck,
3444 VerifierStage::Build,
3445 VerifierStage::Test,
3446 VerifierStage::Lint,
3447 ]
3448 );
3449 }
3450
3451 #[tokio::test]
3456 async fn test_classify_workspace_empty_dir() {
3457 let temp = tempfile::tempdir().unwrap();
3458 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3459 let state = orch.classify_workspace("build a web app");
3460 assert!(matches!(state, WorkspaceState::Greenfield { .. }));
3462 }
3463
3464 #[tokio::test]
3465 async fn test_classify_workspace_empty_dir_no_lang() {
3466 let temp = tempfile::tempdir().unwrap();
3467 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3468 let state = orch.classify_workspace("do something");
3469 match state {
3471 WorkspaceState::Greenfield { inferred_lang } => assert!(inferred_lang.is_none()),
3472 _ => panic!("expected Greenfield, got {:?}", state),
3473 }
3474 }
3475
3476 #[tokio::test]
3477 async fn test_classify_workspace_existing_rust_project() {
3478 let temp = tempfile::tempdir().unwrap();
3479 std::fs::write(
3481 temp.path().join("Cargo.toml"),
3482 "[package]\nname = \"test\"\nversion = \"0.1.0\"",
3483 )
3484 .unwrap();
3485 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3486 let state = orch.classify_workspace("add a feature");
3487 match state {
3488 WorkspaceState::ExistingProject { plugins } => {
3489 assert!(plugins.contains(&"rust".to_string()));
3490 }
3491 _ => panic!("expected ExistingProject, got {:?}", state),
3492 }
3493 }
3494
3495 #[tokio::test]
3496 async fn test_classify_workspace_existing_python_project() {
3497 let temp = tempfile::tempdir().unwrap();
3498 std::fs::write(
3499 temp.path().join("pyproject.toml"),
3500 "[project]\nname = \"test\"",
3501 )
3502 .unwrap();
3503 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3504 let state = orch.classify_workspace("add a feature");
3505 match state {
3506 WorkspaceState::ExistingProject { plugins } => {
3507 assert!(plugins.contains(&"python".to_string()));
3508 }
3509 _ => panic!("expected ExistingProject, got {:?}", state),
3510 }
3511 }
3512
3513 #[tokio::test]
3514 async fn test_classify_workspace_existing_js_project() {
3515 let temp = tempfile::tempdir().unwrap();
3516 std::fs::write(temp.path().join("package.json"), "{}").unwrap();
3517 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3518 let state = orch.classify_workspace("add auth");
3519 match state {
3520 WorkspaceState::ExistingProject { plugins } => {
3521 assert!(plugins.contains(&"javascript".to_string()));
3522 }
3523 _ => panic!("expected ExistingProject, got {:?}", state),
3524 }
3525 }
3526
3527 #[tokio::test]
3528 async fn test_classify_workspace_ambiguous_with_misc_files() {
3529 let temp = tempfile::tempdir().unwrap();
3530 std::fs::write(temp.path().join("notes.txt"), "hello").unwrap();
3532 std::fs::write(temp.path().join("data.csv"), "a,b,c").unwrap();
3533 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3534 let state = orch.classify_workspace("do something");
3535 assert!(matches!(state, WorkspaceState::Ambiguous));
3536 }
3537
3538 #[tokio::test]
3539 async fn test_classify_workspace_greenfield_with_rust_task() {
3540 let temp = tempfile::tempdir().unwrap();
3541 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3542 let state = orch.classify_workspace("create a rust CLI tool");
3543 match state {
3544 WorkspaceState::Greenfield { inferred_lang } => {
3545 assert_eq!(inferred_lang, Some("rust".to_string()));
3546 }
3547 _ => panic!("expected Greenfield, got {:?}", state),
3548 }
3549 }
3550
3551 #[tokio::test]
3552 async fn test_classify_workspace_greenfield_with_python_task() {
3553 let temp = tempfile::tempdir().unwrap();
3554 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3555 let state = orch.classify_workspace("build a python flask API");
3556 match state {
3557 WorkspaceState::Greenfield { inferred_lang } => {
3558 assert_eq!(inferred_lang, Some("python".to_string()));
3559 }
3560 _ => panic!("expected Greenfield, got {:?}", state),
3561 }
3562 }
3563
3564 #[tokio::test]
3569 async fn test_check_prerequisites_returns_true_when_tools_available() {
3570 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3571 let registry = perspt_core::plugin::PluginRegistry::new();
3572 if let Some(plugin) = registry.get("rust") {
3574 let result = orch.check_tool_prerequisites(plugin);
3575 let _ = result;
3578 }
3579 }
3580
3581 #[test]
3582 fn test_required_binaries_rust_includes_cargo() {
3583 let registry = perspt_core::plugin::PluginRegistry::new();
3584 let plugin = registry.get("rust").unwrap();
3585 let bins = plugin.required_binaries();
3586 assert!(bins.iter().any(|(name, _, _)| *name == "cargo"));
3587 assert!(bins.iter().any(|(name, _, _)| *name == "rustc"));
3588 }
3589
3590 #[test]
3591 fn test_required_binaries_python_includes_uv() {
3592 let registry = perspt_core::plugin::PluginRegistry::new();
3593 let plugin = registry.get("python").unwrap();
3594 let bins = plugin.required_binaries();
3595 assert!(bins.iter().any(|(name, _, _)| *name == "uv"));
3596 assert!(bins.iter().any(|(name, _, _)| *name == "python3"));
3597 }
3598
3599 #[test]
3600 fn test_required_binaries_js_includes_node() {
3601 let registry = perspt_core::plugin::PluginRegistry::new();
3602 let plugin = registry.get("javascript").unwrap();
3603 let bins = plugin.required_binaries();
3604 assert!(bins.iter().any(|(name, _, _)| *name == "node"));
3605 assert!(bins.iter().any(|(name, _, _)| *name == "npm"));
3606 }
3607
3608 #[tokio::test]
3613 async fn test_fallback_defaults_to_none_without_explicit_config() {
3614 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3615 assert!(orch.architect_fallback_model.is_none());
3616 assert!(orch.actuator_fallback_model.is_none());
3617 assert!(orch.verifier_fallback_model.is_none());
3618 assert!(orch.speculator_fallback_model.is_none());
3619 }
3620
3621 #[tokio::test]
3622 async fn test_explicit_fallback_stored_correctly() {
3623 let orch = SRBNOrchestrator::new_with_models(
3624 PathBuf::from("/tmp/test_fallback"),
3625 false,
3626 None,
3627 None,
3628 None,
3629 None,
3630 Some("gpt-4o".into()),
3631 Some("gpt-4o-mini".into()),
3632 Some("gpt-4o".into()),
3633 Some("gpt-4o-mini".into()),
3634 );
3635 assert_eq!(orch.architect_fallback_model, Some("gpt-4o".to_string()));
3636 assert_eq!(
3637 orch.actuator_fallback_model,
3638 Some("gpt-4o-mini".to_string())
3639 );
3640 assert_eq!(orch.verifier_fallback_model, Some("gpt-4o".to_string()));
3641 assert_eq!(
3642 orch.speculator_fallback_model,
3643 Some("gpt-4o-mini".to_string())
3644 );
3645 }
3646
3647 #[tokio::test]
3648 async fn test_per_tier_models_independent() {
3649 let orch = SRBNOrchestrator::new_with_models(
3650 PathBuf::from("/tmp/test_tiers_independent"),
3651 false,
3652 Some("arch".into()),
3653 Some("act".into()),
3654 Some("ver".into()),
3655 Some("spec".into()),
3656 None,
3657 None,
3658 None,
3659 None,
3660 );
3661 assert_ne!(orch.architect_model, orch.actuator_model);
3663 assert_ne!(orch.verifier_model, orch.speculator_model);
3664 }
3665
3666 #[test]
3671 fn test_extract_missing_python_modules_basic() {
3672 let output = r#"
3673FAILED tests/test_core.py::TestPipeline::test_run - ModuleNotFoundError: No module named 'httpx'
3674E ModuleNotFoundError: No module named 'pydantic'
3675ImportError: No module named 'pyarrow'
3676"#;
3677 let mut missing = SRBNOrchestrator::extract_missing_python_modules(output);
3678 missing.sort();
3679 assert_eq!(missing, vec!["httpx", "pyarrow", "pydantic"]);
3680 }
3681
3682 #[test]
3683 fn test_extract_missing_python_modules_subpackage() {
3684 let output = "ModuleNotFoundError: No module named 'foo.bar.baz'";
3685 let missing = SRBNOrchestrator::extract_missing_python_modules(output);
3686 assert_eq!(missing, vec!["foo"]);
3687 }
3688
3689 #[test]
3690 fn test_extract_missing_python_modules_stdlib_filtered() {
3691 let output = r#"
3692ModuleNotFoundError: No module named 'numpy'
3693ModuleNotFoundError: No module named 'os'
3694ModuleNotFoundError: No module named 'json'
3695"#;
3696 let missing = SRBNOrchestrator::extract_missing_python_modules(output);
3697 assert_eq!(missing, vec!["numpy"]);
3698 }
3699
3700 #[test]
3701 fn test_extract_missing_python_modules_empty() {
3702 let output = "All tests passed!\n3 passed in 0.5s";
3703 let missing = SRBNOrchestrator::extract_missing_python_modules(output);
3704 assert!(missing.is_empty());
3705 }
3706
3707 #[test]
3708 fn test_python_import_to_package_mapping() {
3709 assert_eq!(SRBNOrchestrator::python_import_to_package("PIL"), "pillow");
3710 assert_eq!(SRBNOrchestrator::python_import_to_package("yaml"), "pyyaml");
3711 assert_eq!(
3712 SRBNOrchestrator::python_import_to_package("cv2"),
3713 "opencv-python"
3714 );
3715 assert_eq!(
3716 SRBNOrchestrator::python_import_to_package("sklearn"),
3717 "scikit-learn"
3718 );
3719 assert_eq!(
3720 SRBNOrchestrator::python_import_to_package("bs4"),
3721 "beautifulsoup4"
3722 );
3723 assert_eq!(SRBNOrchestrator::python_import_to_package("httpx"), "httpx");
3725 assert_eq!(
3726 SRBNOrchestrator::python_import_to_package("fastapi"),
3727 "fastapi"
3728 );
3729 }
3730
3731 #[test]
3732 fn test_normalize_command_to_uv_pip_install() {
3733 assert_eq!(
3734 SRBNOrchestrator::normalize_command_to_uv("pip install httpx"),
3735 "uv add httpx"
3736 );
3737 assert_eq!(
3738 SRBNOrchestrator::normalize_command_to_uv("pip3 install httpx pydantic"),
3739 "uv add httpx pydantic"
3740 );
3741 assert_eq!(
3742 SRBNOrchestrator::normalize_command_to_uv("python -m pip install requests"),
3743 "uv add requests"
3744 );
3745 assert_eq!(
3746 SRBNOrchestrator::normalize_command_to_uv("python3 -m pip install flask"),
3747 "uv add flask"
3748 );
3749 }
3750
3751 #[test]
3752 fn test_normalize_command_to_uv_requirements_file() {
3753 assert_eq!(
3754 SRBNOrchestrator::normalize_command_to_uv("pip install -r requirements.txt"),
3755 "uv pip install -r requirements.txt"
3756 );
3757 }
3758
3759 #[test]
3760 fn test_normalize_command_to_uv_passthrough() {
3761 assert_eq!(
3763 SRBNOrchestrator::normalize_command_to_uv("uv add httpx"),
3764 "uv add httpx"
3765 );
3766 assert_eq!(
3768 SRBNOrchestrator::normalize_command_to_uv("cargo add serde"),
3769 "cargo add serde"
3770 );
3771 assert_eq!(
3772 SRBNOrchestrator::normalize_command_to_uv("npm install lodash"),
3773 "npm install lodash"
3774 );
3775 }
3776
3777 #[test]
3778 fn test_extract_commands_from_correction_includes_uv() {
3779 let response = r#"Here's the fix:
3780Commands:
3781```
3782uv add httpx
3783uv add --dev pytest
3784cargo add serde
3785pip install numpy
3786```
3787File: main.py
3788```python
3789import httpx
3790```"#;
3791 let commands = SRBNOrchestrator::extract_commands_from_correction(response);
3792 assert!(
3793 commands.contains(&"uv add httpx".to_string()),
3794 "{:?}",
3795 commands
3796 );
3797 assert!(
3798 commands.contains(&"cargo add serde".to_string()),
3799 "{:?}",
3800 commands
3801 );
3802 assert!(
3803 commands.contains(&"pip install numpy".to_string()),
3804 "{:?}",
3805 commands
3806 );
3807 }
3808
3809 #[test]
3810 fn test_extract_all_code_blocks_multiple_files() {
3811 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
3812 let content = r#"Here are the files:
3813
3814File: src/etl_pipeline/core.py
3815```python
3816def run_pipeline():
3817 pass
3818```
3819
3820File: src/etl_pipeline/validator.py
3821```python
3822def validate(data):
3823 return True
3824```
3825
3826File: tests/test_core.py
3827```python
3828from etl_pipeline.core import run_pipeline
3829
3830def test_run():
3831 run_pipeline()
3832```
3833"#;
3834 let blocks = orch.extract_all_code_blocks_from_response(content);
3835 assert_eq!(blocks.len(), 3, "Expected 3 blocks, got {:?}", blocks);
3836 assert_eq!(blocks[0].0, "src/etl_pipeline/core.py");
3837 assert_eq!(blocks[1].0, "src/etl_pipeline/validator.py");
3838 assert_eq!(blocks[2].0, "tests/test_core.py");
3839 assert!(!blocks[0].2, "core.py should not be a diff");
3840 }
3841
3842 #[test]
3843 fn test_extract_all_code_blocks_single_file() {
3844 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
3845 let content = r#"File: main.py
3846```python
3847print("hello")
3848```"#;
3849 let blocks = orch.extract_all_code_blocks_from_response(content);
3850 assert_eq!(blocks.len(), 1);
3851 assert_eq!(blocks[0].0, "main.py");
3852 }
3853
3854 #[test]
3855 fn test_extract_all_code_blocks_mixed_file_and_diff() {
3856 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
3857 let content = r#"File: new_module.py
3858```python
3859def new_fn():
3860 pass
3861```
3862
3863Diff: existing.py
3864```diff
3865--- existing.py
3866+++ existing.py
3867@@ -1 +1,2 @@
3868+import new_module
3869 def old_fn():
3870```"#;
3871 let blocks = orch.extract_all_code_blocks_from_response(content);
3872 assert_eq!(blocks.len(), 2);
3873 assert_eq!(blocks[0].0, "new_module.py");
3874 assert!(!blocks[0].2, "new_module.py should be a write");
3875 assert_eq!(blocks[1].0, "existing.py");
3876 assert!(blocks[1].2, "existing.py should be a diff");
3877 }
3878
3879 #[test]
3880 fn test_parse_artifact_bundle_legacy_multi_file() {
3881 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
3882 let content = r#"File: core.py
3883```python
3884def core():
3885 pass
3886```
3887
3888File: utils.py
3889```python
3890def util():
3891 pass
3892```"#;
3893 let bundle = orch.parse_artifact_bundle(content);
3894 assert!(bundle.is_some(), "Should parse multi-file legacy response");
3895 let bundle = bundle.unwrap();
3896 assert_eq!(bundle.artifacts.len(), 2, "Should have 2 artifacts");
3897 assert_eq!(bundle.artifacts[0].path(), "core.py");
3898 assert_eq!(bundle.artifacts[1].path(), "utils.py");
3899 }
3900
3901 #[test]
3906 fn test_parse_artifact_bundle_structured_json() {
3907 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
3908 let content = r#"Here is the output:
3909```json
3910{
3911 "artifacts": [
3912 {"operation": "write", "path": "src/main.py", "content": "print('hello')"},
3913 {"operation": "diff", "path": "src/lib.py", "patch": "--- a\n+++ b\n@@ -1 +1 @@\n-old\n+new"}
3914 ],
3915 "commands": ["uv add requests"]
3916}
3917```"#;
3918 let bundle = orch.parse_artifact_bundle(content);
3919 assert!(bundle.is_some(), "Should parse structured JSON bundle");
3920 let bundle = bundle.unwrap();
3921 assert_eq!(bundle.artifacts.len(), 2);
3922 assert!(bundle.artifacts[0].is_write());
3923 assert_eq!(bundle.artifacts[0].path(), "src/main.py");
3924 assert!(bundle.artifacts[1].is_diff());
3925 assert_eq!(bundle.artifacts[1].path(), "src/lib.py");
3926 assert_eq!(bundle.commands, vec!["uv add requests"]);
3927 }
3928
3929 #[test]
3930 fn test_parse_artifact_bundle_json_with_empty_path_falls_through() {
3931 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
3932 let content = r#"```json
3936{
3937 "artifacts": [
3938 {"operation": "write", "path": "", "content": "bad"}
3939 ],
3940 "commands": []
3941}
3942```"#;
3943 let bundle = orch.parse_artifact_bundle(content);
3944 assert!(
3945 bundle.is_none(),
3946 "Invalid bundle with artifacts key should be skipped"
3947 );
3948 }
3949
3950 #[test]
3951 fn test_parse_artifact_bundle_json_absolute_path_falls_through() {
3952 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
3953 let content = r#"```json
3957{
3958 "artifacts": [
3959 {"operation": "write", "path": "/etc/passwd", "content": "bad"}
3960 ],
3961 "commands": []
3962}
3963```"#;
3964 let bundle = orch.parse_artifact_bundle(content);
3965 assert!(
3966 bundle.is_none(),
3967 "Invalid bundle with artifacts key should be skipped"
3968 );
3969 }
3970
3971 #[test]
3972 fn test_parse_artifact_bundle_returns_none_for_garbage() {
3973 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
3974 let content = "This is just a plain text response with no code blocks at all.";
3975 assert!(orch.parse_artifact_bundle(content).is_none());
3976 }
3977
3978 #[tokio::test]
3979 async fn test_effective_working_dir_with_sandbox() {
3980 let temp_dir = std::env::temp_dir().join(format!(
3983 "perspt_eff_workdir_sandbox_{}",
3984 uuid::Uuid::new_v4()
3985 ));
3986 std::fs::create_dir_all(&temp_dir).unwrap();
3987
3988 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
3989 orch.context.session_id = "test_session".into();
3990
3991 let parent = SRBNNode::new("root".into(), "root goal".into(), ModelTier::Actuator);
3992 let child = SRBNNode::new("child".into(), "child goal".into(), ModelTier::Actuator);
3993 orch.add_node(parent);
3994 orch.add_node(child);
3995 orch.add_dependency("root", "child", "dep").unwrap();
3996
3997 let child_idx = orch.node_indices["child"];
3998 let branch_id = orch.maybe_create_provisional_branch(child_idx).unwrap();
3999
4000 let sandbox_path = temp_dir
4001 .join(".perspt")
4002 .join("sandboxes")
4003 .join("test_session")
4004 .join(&branch_id);
4005 assert!(sandbox_path.exists(), "Sandbox should have been created");
4006
4007 let eff = orch.effective_working_dir(child_idx);
4009 assert_eq!(eff, sandbox_path);
4010
4011 let _ = std::fs::remove_dir_all(&temp_dir);
4013 }
4014
4015 #[tokio::test]
4016 async fn test_sandbox_dir_for_node_returns_path_when_exists() {
4017 let temp_dir = std::env::temp_dir().join(format!(
4018 "perspt_sandbox_dir_exists_{}",
4019 uuid::Uuid::new_v4()
4020 ));
4021 std::fs::create_dir_all(&temp_dir).unwrap();
4022
4023 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
4024 orch.context.session_id = "sess".into();
4025
4026 let parent = SRBNNode::new("p".into(), "g".into(), ModelTier::Actuator);
4027 let child = SRBNNode::new("c".into(), "g".into(), ModelTier::Actuator);
4028 orch.add_node(parent);
4029 orch.add_node(child);
4030 orch.add_dependency("p", "c", "dep").unwrap();
4031
4032 let child_idx = orch.node_indices["c"];
4033 let branch_id = orch.maybe_create_provisional_branch(child_idx).unwrap();
4034
4035 let sandbox = orch.sandbox_dir_for_node(child_idx);
4036 assert!(sandbox.is_some());
4037 let sandbox_path = sandbox.unwrap();
4038 assert!(sandbox_path.ends_with(&branch_id));
4039
4040 let _ = std::fs::remove_dir_all(&temp_dir);
4041 }
4042
4043 #[tokio::test]
4044 async fn test_root_node_bypasses_sandbox() {
4045 let temp_dir =
4048 std::env::temp_dir().join(format!("perspt_root_bypass_{}", uuid::Uuid::new_v4()));
4049 std::fs::create_dir_all(&temp_dir).unwrap();
4050
4051 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
4052
4053 let root = SRBNNode::new("root".into(), "root goal".into(), ModelTier::Actuator);
4054 orch.add_node(root);
4055
4056 let root_idx = orch.node_indices["root"];
4057 let branch = orch.maybe_create_provisional_branch(root_idx);
4059 assert!(
4060 branch.is_some(),
4061 "Root node should now get a provisional branch for sandbox isolation"
4062 );
4063
4064 let wd = orch.effective_working_dir(root_idx);
4066 assert_ne!(wd, temp_dir, "Root should use sandbox, not raw workspace");
4067 assert!(wd.to_string_lossy().contains("sandboxes"));
4068
4069 let _ = std::fs::remove_dir_all(&temp_dir);
4070 }
4071
4072 #[tokio::test]
4073 async fn test_step_commit_copies_sandbox_to_workspace() {
4074 use perspt_core::types::{ArtifactBundle, ArtifactOperation, PlannedTask};
4077
4078 let temp_dir =
4079 std::env::temp_dir().join(format!("perspt_commit_copy_{}", uuid::Uuid::new_v4()));
4080 std::fs::create_dir_all(temp_dir.join("src")).unwrap();
4081
4082 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
4083 orch.context.session_id = uuid::Uuid::new_v4().to_string();
4084
4085 let plan = TaskPlan {
4086 tasks: vec![
4087 PlannedTask {
4088 id: "parent".into(),
4089 goal: "Parent".into(),
4090 output_files: vec!["src/parent.rs".into()],
4091 ..PlannedTask::new("parent", "Parent")
4092 },
4093 PlannedTask {
4094 id: "child".into(),
4095 goal: "Child".into(),
4096 output_files: vec!["src/child.rs".into()],
4097 dependencies: vec!["parent".into()],
4098 ..PlannedTask::new("child", "Child")
4099 },
4100 ],
4101 };
4102 orch.create_nodes_from_plan(&plan).unwrap();
4103
4104 let child_idx = orch.node_indices["child"];
4105 let _branch_id = orch.maybe_create_provisional_branch(child_idx).unwrap();
4106
4107 let bundle = ArtifactBundle {
4109 artifacts: vec![ArtifactOperation::Write {
4110 path: "src/child.rs".into(),
4111 content: "pub fn child_fn() {}\n".into(),
4112 }],
4113 commands: vec![],
4114 };
4115 orch.apply_bundle_transactionally(
4116 &bundle,
4117 "child",
4118 perspt_core::types::NodeClass::Implementation,
4119 )
4120 .await
4121 .unwrap();
4122
4123 let sandbox = orch.sandbox_dir_for_node(child_idx).unwrap();
4125 assert!(sandbox.join("src/child.rs").exists());
4126 assert!(!temp_dir.join("src/child.rs").exists());
4127
4128 let child_idx = orch.node_indices["child"];
4130 let _ = orch.step_commit(child_idx).await;
4131
4132 assert!(
4134 temp_dir.join("src/child.rs").exists(),
4135 "step_commit should copy sandbox files to workspace"
4136 );
4137 let content = std::fs::read_to_string(temp_dir.join("src/child.rs")).unwrap();
4138 assert_eq!(content, "pub fn child_fn() {}\n");
4139
4140 let _ = std::fs::remove_dir_all(&temp_dir);
4141 }
4142
4143 #[test]
4144 fn test_parse_artifact_bundle_json_path_traversal_falls_through() {
4145 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4146 let content = r#"```json
4150{
4151 "artifacts": [
4152 {"operation": "write", "path": "../../../etc/shadow", "content": "bad"}
4153 ],
4154 "commands": []
4155}
4156```"#;
4157 let bundle = orch.parse_artifact_bundle(content);
4158 assert!(
4159 bundle.is_none(),
4160 "Invalid bundle with artifacts key should be skipped"
4161 );
4162 }
4163
4164 #[test]
4167 fn test_dependency_expectations_threaded_to_nodes() {
4168 use perspt_core::types::{DependencyExpectation, PlannedTask, TaskPlan};
4169
4170 let mut plan = TaskPlan::new();
4171 let mut t1 = PlannedTask::new("t1", "Create server module");
4172 t1.output_files = vec!["src/server.py".to_string()];
4173 t1.dependency_expectations = DependencyExpectation {
4174 required_packages: vec!["flask".to_string(), "pydantic".to_string()],
4175 setup_commands: vec![],
4176 min_toolchain_version: Some("3.11".to_string()),
4177 };
4178 plan.tasks.push(t1);
4179
4180 let mut orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4181 orch.create_nodes_from_plan(&plan).unwrap();
4182
4183 let idx = orch.node_indices["t1"];
4185 let node = &orch.graph[idx];
4186 assert_eq!(node.dependency_expectations.required_packages.len(), 2);
4187 assert_eq!(node.dependency_expectations.required_packages[0], "flask");
4188 assert_eq!(
4189 node.dependency_expectations
4190 .min_toolchain_version
4191 .as_deref(),
4192 Some("3.11")
4193 );
4194 }
4195
4196 #[test]
4197 fn test_verifier_readiness_gate_no_plugins() {
4198 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4199 orch.check_verifier_readiness_gate();
4201 }
4202
4203 #[test]
4204 fn test_architect_prompt_includes_dependency_expectations() {
4205 let prompt = crate::prompts::render_architect(
4206 crate::prompts::ARCHITECT_EXISTING,
4207 "Build a web server",
4208 std::path::Path::new("/tmp"),
4209 "empty project",
4210 "",
4211 "",
4212 &[],
4213 );
4214 assert!(
4215 prompt.contains("dependency_expectations"),
4216 "Architect prompt must include dependency_expectations in the JSON schema"
4217 );
4218 assert!(
4219 prompt.contains("required_packages"),
4220 "Architect prompt must mention required_packages"
4221 );
4222 assert!(
4223 prompt.contains("min_toolchain_version"),
4224 "Architect prompt must mention min_toolchain_version"
4225 );
4226 }
4227
4228 #[test]
4231 fn test_budget_gate_stops_execution_when_exhausted() {
4232 let mut orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4233 orch.set_budget(Some(0), None, None);
4235 assert!(
4236 orch.budget.any_exhausted(),
4237 "Budget with max_steps=0 should be immediately exhausted"
4238 );
4239 }
4240
4241 #[test]
4242 fn test_budget_step_recording() {
4243 let mut budget = perspt_core::types::BudgetEnvelope::new("test-session");
4244 budget.max_steps = Some(3);
4245 assert!(!budget.any_exhausted());
4246 budget.record_step();
4247 budget.record_step();
4248 assert!(!budget.any_exhausted());
4249 budget.record_step();
4250 assert!(budget.steps_exhausted());
4251 assert!(budget.any_exhausted());
4252 }
4253
4254 #[test]
4255 fn test_set_budget_configures_envelope() {
4256 let mut orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4257 orch.set_budget(Some(10), Some(5), Some(2.50));
4258 assert_eq!(orch.budget.max_steps, Some(10));
4259 assert_eq!(orch.budget.max_revisions, Some(5));
4260 assert_eq!(orch.budget.max_cost_usd, Some(2.50));
4261 assert!(!orch.budget.any_exhausted());
4262 }
4263
4264 #[test]
4265 fn test_node_outcome_equality() {
4266 assert_eq!(NodeOutcome::Completed, NodeOutcome::Completed);
4267 assert_eq!(NodeOutcome::Escalated, NodeOutcome::Escalated);
4268 assert_ne!(NodeOutcome::Completed, NodeOutcome::Escalated);
4269 }
4270
4271 #[test]
4272 fn test_session_outcome_from_counts() {
4273 fn derive_outcome(completed: usize, escalated: usize) -> perspt_core::SessionOutcome {
4274 if escalated == 0 {
4275 perspt_core::SessionOutcome::Success
4276 } else if completed > 0 {
4277 perspt_core::SessionOutcome::PartialSuccess
4278 } else {
4279 perspt_core::SessionOutcome::Failed
4280 }
4281 }
4282
4283 assert_eq!(derive_outcome(3, 0), perspt_core::SessionOutcome::Success,);
4285 assert_eq!(
4287 derive_outcome(2, 1),
4288 perspt_core::SessionOutcome::PartialSuccess,
4289 );
4290 assert_eq!(derive_outcome(0, 3), perspt_core::SessionOutcome::Failed,);
4292 }
4293}