1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::Path;
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12 AgentRole, CoordinationMessage, CoordinationPlan, CoordinationPrimitive, CoordinationResult,
13 CoordinationTask, ExecutionFeedback, MutationProposal as AgentMutationProposal,
14};
15use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
16use oris_evolution::{
17 compute_artifact_hash, next_id, rebuild_projection_from_events, stable_hash_json, AssetState,
18 BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
19 EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
20 PreparedMutation, Selector, SelectorInput, StoreBackedSelector, StoredEvolutionEvent,
21 ValidationSnapshot,
22};
23use oris_evolution_network::{EvolutionEnvelope, NetworkAsset};
24use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
25use oris_kernel::{Kernel, KernelState, RunId};
26use oris_sandbox::{
27 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
28};
29use oris_spec::CompiledMutationPlan;
30use serde::{Deserialize, Serialize};
31use thiserror::Error;
32
33pub use oris_evolution::{
34 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
35 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
36 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
37 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
38 SelectorInput as EvoSelectorInput,
39};
40pub use oris_evolution_network::{
41 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
42};
43pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
44pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
45pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
46
47#[derive(Clone, Debug, Serialize, Deserialize)]
48pub struct ValidationPlan {
49 pub profile: String,
50 pub stages: Vec<ValidationStage>,
51}
52
53impl ValidationPlan {
54 pub fn oris_default() -> Self {
55 Self {
56 profile: "oris-default".into(),
57 stages: vec![
58 ValidationStage::Command {
59 program: "cargo".into(),
60 args: vec!["fmt".into(), "--all".into(), "--check".into()],
61 timeout_ms: 60_000,
62 },
63 ValidationStage::Command {
64 program: "cargo".into(),
65 args: vec!["check".into(), "--workspace".into()],
66 timeout_ms: 180_000,
67 },
68 ValidationStage::Command {
69 program: "cargo".into(),
70 args: vec![
71 "test".into(),
72 "-p".into(),
73 "oris-kernel".into(),
74 "-p".into(),
75 "oris-evolution".into(),
76 "-p".into(),
77 "oris-sandbox".into(),
78 "-p".into(),
79 "oris-evokernel".into(),
80 "--lib".into(),
81 ],
82 timeout_ms: 300_000,
83 },
84 ValidationStage::Command {
85 program: "cargo".into(),
86 args: vec![
87 "test".into(),
88 "-p".into(),
89 "oris-runtime".into(),
90 "--lib".into(),
91 ],
92 timeout_ms: 300_000,
93 },
94 ],
95 }
96 }
97}
98
99#[derive(Clone, Debug, Serialize, Deserialize)]
100pub enum ValidationStage {
101 Command {
102 program: String,
103 args: Vec<String>,
104 timeout_ms: u64,
105 },
106}
107
108#[derive(Clone, Debug, Serialize, Deserialize)]
109pub struct ValidationStageReport {
110 pub stage: String,
111 pub success: bool,
112 pub exit_code: Option<i32>,
113 pub duration_ms: u64,
114 pub stdout: String,
115 pub stderr: String,
116}
117
118#[derive(Clone, Debug, Serialize, Deserialize)]
119pub struct ValidationReport {
120 pub success: bool,
121 pub duration_ms: u64,
122 pub stages: Vec<ValidationStageReport>,
123 pub logs: String,
124}
125
126#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
127pub struct SignalExtractionInput {
128 pub patch_diff: String,
129 pub intent: String,
130 pub expected_effect: String,
131 pub declared_signals: Vec<String>,
132 pub changed_files: Vec<String>,
133 pub validation_success: bool,
134 pub validation_logs: String,
135 pub stage_outputs: Vec<String>,
136}
137
138#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
139pub struct SignalExtractionOutput {
140 pub values: Vec<String>,
141 pub hash: String,
142}
143
144#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
145pub struct SeedTemplate {
146 pub id: String,
147 pub intent: String,
148 pub signals: Vec<String>,
149 pub diff_payload: String,
150 pub validation_profile: String,
151}
152
153#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
154pub struct BootstrapReport {
155 pub seeded: bool,
156 pub genes_added: usize,
157 pub capsules_added: usize,
158}
159
160impl ValidationReport {
161 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
162 ValidationSnapshot {
163 success: self.success,
164 profile: profile.to_string(),
165 duration_ms: self.duration_ms,
166 summary: if self.success {
167 "validation passed".into()
168 } else {
169 "validation failed".into()
170 },
171 }
172 }
173}
174
175pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
176 let mut signals = BTreeSet::new();
177
178 for declared in &input.declared_signals {
179 if let Some(phrase) = normalize_signal_phrase(declared) {
180 signals.insert(phrase);
181 }
182 extend_signal_tokens(&mut signals, declared);
183 }
184
185 for text in [
186 input.patch_diff.as_str(),
187 input.intent.as_str(),
188 input.expected_effect.as_str(),
189 input.validation_logs.as_str(),
190 ] {
191 extend_signal_tokens(&mut signals, text);
192 }
193
194 for changed_file in &input.changed_files {
195 extend_signal_tokens(&mut signals, changed_file);
196 }
197
198 for stage_output in &input.stage_outputs {
199 extend_signal_tokens(&mut signals, stage_output);
200 }
201
202 signals.insert(if input.validation_success {
203 "validation passed".into()
204 } else {
205 "validation failed".into()
206 });
207
208 let values = signals.into_iter().take(32).collect::<Vec<_>>();
209 let hash =
210 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
211 SignalExtractionOutput { values, hash }
212}
213
214#[derive(Debug, Error)]
215pub enum ValidationError {
216 #[error("validation execution failed: {0}")]
217 Execution(String),
218}
219
220#[async_trait]
221pub trait Validator: Send + Sync {
222 async fn run(
223 &self,
224 receipt: &SandboxReceipt,
225 plan: &ValidationPlan,
226 ) -> Result<ValidationReport, ValidationError>;
227}
228
229pub struct CommandValidator {
230 policy: SandboxPolicy,
231}
232
233impl CommandValidator {
234 pub fn new(policy: SandboxPolicy) -> Self {
235 Self { policy }
236 }
237}
238
239#[async_trait]
240impl Validator for CommandValidator {
241 async fn run(
242 &self,
243 receipt: &SandboxReceipt,
244 plan: &ValidationPlan,
245 ) -> Result<ValidationReport, ValidationError> {
246 let started = std::time::Instant::now();
247 let mut stages = Vec::new();
248 let mut success = true;
249 let mut logs = String::new();
250
251 for stage in &plan.stages {
252 match stage {
253 ValidationStage::Command {
254 program,
255 args,
256 timeout_ms,
257 } => {
258 let result = execute_allowed_command(
259 &self.policy,
260 &receipt.workdir,
261 program,
262 args,
263 *timeout_ms,
264 )
265 .await;
266 let report = match result {
267 Ok(output) => ValidationStageReport {
268 stage: format!("{program} {}", args.join(" ")),
269 success: output.success,
270 exit_code: output.exit_code,
271 duration_ms: output.duration_ms,
272 stdout: output.stdout,
273 stderr: output.stderr,
274 },
275 Err(err) => ValidationStageReport {
276 stage: format!("{program} {}", args.join(" ")),
277 success: false,
278 exit_code: None,
279 duration_ms: 0,
280 stdout: String::new(),
281 stderr: err.to_string(),
282 },
283 };
284 if !report.success {
285 success = false;
286 }
287 if !report.stdout.is_empty() {
288 logs.push_str(&report.stdout);
289 logs.push('\n');
290 }
291 if !report.stderr.is_empty() {
292 logs.push_str(&report.stderr);
293 logs.push('\n');
294 }
295 stages.push(report);
296 if !success {
297 break;
298 }
299 }
300 }
301 }
302
303 Ok(ValidationReport {
304 success,
305 duration_ms: started.elapsed().as_millis() as u64,
306 stages,
307 logs,
308 })
309 }
310}
311
312#[derive(Clone, Debug)]
313pub struct ReplayDecision {
314 pub used_capsule: bool,
315 pub capsule_id: Option<CapsuleId>,
316 pub fallback_to_planner: bool,
317 pub reason: String,
318}
319
320#[derive(Clone, Copy, Debug, Eq, PartialEq)]
321enum CoordinationTaskState {
322 Ready,
323 Waiting,
324 BlockedByFailure,
325 PermanentlyBlocked,
326}
327
328#[derive(Clone, Debug, Default)]
329pub struct MultiAgentCoordinator;
330
331impl MultiAgentCoordinator {
332 pub fn new() -> Self {
333 Self
334 }
335
336 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
337 let primitive = plan.primitive.clone();
338 let root_goal = plan.root_goal.clone();
339 let timeout_ms = plan.timeout_ms;
340 let max_retries = plan.max_retries;
341 let mut tasks = BTreeMap::new();
342 for task in plan.tasks {
343 tasks.entry(task.id.clone()).or_insert(task);
344 }
345
346 let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
347 let mut completed = BTreeSet::new();
348 let mut failed = BTreeSet::new();
349 let mut completed_order = Vec::new();
350 let mut failed_order = Vec::new();
351 let mut skipped = BTreeSet::new();
352 let mut attempts = BTreeMap::new();
353 let mut messages = Vec::new();
354
355 loop {
356 if matches!(primitive, CoordinationPrimitive::Conditional) {
357 self.apply_conditional_skips(
358 &tasks,
359 &mut pending,
360 &completed,
361 &failed,
362 &mut skipped,
363 &mut messages,
364 );
365 }
366
367 let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
368 if ready.is_empty() {
369 break;
370 }
371 if matches!(primitive, CoordinationPrimitive::Sequential) {
372 ready.truncate(1);
373 }
374
375 for task_id in ready {
376 let Some(task) = tasks.get(&task_id) else {
377 continue;
378 };
379 if !pending.contains(&task_id) {
380 continue;
381 }
382 self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
383
384 let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
385 if Self::simulate_task_failure(task, prior_failures) {
386 let failure_count = prior_failures + 1;
387 attempts.insert(task_id.clone(), failure_count);
388 let will_retry = failure_count <= max_retries;
389 messages.push(CoordinationMessage {
390 from_role: task.role.clone(),
391 to_role: task.role.clone(),
392 task_id: task_id.clone(),
393 content: if will_retry {
394 format!("task {task_id} failed on attempt {failure_count} and will retry")
395 } else {
396 format!(
397 "task {task_id} failed on attempt {failure_count} and exhausted retries"
398 )
399 },
400 });
401 if !will_retry {
402 pending.remove(&task_id);
403 if failed.insert(task_id.clone()) {
404 failed_order.push(task_id);
405 }
406 }
407 continue;
408 }
409
410 pending.remove(&task_id);
411 if completed.insert(task_id.clone()) {
412 completed_order.push(task_id);
413 }
414 }
415 }
416
417 let blocked_ids = pending.into_iter().collect::<Vec<_>>();
418 for task_id in blocked_ids {
419 let Some(task) = tasks.get(&task_id) else {
420 continue;
421 };
422 let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
423 let content = match state {
424 CoordinationTaskState::BlockedByFailure => {
425 format!("task {task_id} blocked by failed dependencies")
426 }
427 CoordinationTaskState::PermanentlyBlocked => {
428 format!("task {task_id} has invalid coordination prerequisites")
429 }
430 CoordinationTaskState::Waiting => {
431 format!("task {task_id} has unresolved dependencies")
432 }
433 CoordinationTaskState::Ready => {
434 format!("task {task_id} was left pending unexpectedly")
435 }
436 };
437 messages.push(CoordinationMessage {
438 from_role: task.role.clone(),
439 to_role: task.role.clone(),
440 task_id: task_id.clone(),
441 content,
442 });
443 if failed.insert(task_id.clone()) {
444 failed_order.push(task_id);
445 }
446 }
447
448 CoordinationResult {
449 completed_tasks: completed_order,
450 failed_tasks: failed_order,
451 messages,
452 summary: format!(
453 "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
454 root_goal,
455 completed.len(),
456 failed.len(),
457 skipped.len(),
458 primitive,
459 timeout_ms,
460 max_retries
461 ),
462 }
463 }
464
465 fn ready_task_ids(
466 &self,
467 tasks: &BTreeMap<String, CoordinationTask>,
468 pending: &BTreeSet<String>,
469 completed: &BTreeSet<String>,
470 failed: &BTreeSet<String>,
471 skipped: &BTreeSet<String>,
472 ) -> Vec<String> {
473 pending
474 .iter()
475 .filter_map(|task_id| {
476 let task = tasks.get(task_id)?;
477 (self.classify_task(task, tasks, completed, failed, skipped)
478 == CoordinationTaskState::Ready)
479 .then(|| task_id.clone())
480 })
481 .collect()
482 }
483
484 fn apply_conditional_skips(
485 &self,
486 tasks: &BTreeMap<String, CoordinationTask>,
487 pending: &mut BTreeSet<String>,
488 completed: &BTreeSet<String>,
489 failed: &BTreeSet<String>,
490 skipped: &mut BTreeSet<String>,
491 messages: &mut Vec<CoordinationMessage>,
492 ) {
493 let skip_ids = pending
494 .iter()
495 .filter_map(|task_id| {
496 let task = tasks.get(task_id)?;
497 (self.classify_task(task, tasks, completed, failed, skipped)
498 == CoordinationTaskState::BlockedByFailure)
499 .then(|| task_id.clone())
500 })
501 .collect::<Vec<_>>();
502
503 for task_id in skip_ids {
504 let Some(task) = tasks.get(&task_id) else {
505 continue;
506 };
507 pending.remove(&task_id);
508 skipped.insert(task_id.clone());
509 messages.push(CoordinationMessage {
510 from_role: task.role.clone(),
511 to_role: task.role.clone(),
512 task_id: task_id.clone(),
513 content: format!("task {task_id} skipped due to failed dependency chain"),
514 });
515 }
516 }
517
518 fn classify_task(
519 &self,
520 task: &CoordinationTask,
521 tasks: &BTreeMap<String, CoordinationTask>,
522 completed: &BTreeSet<String>,
523 failed: &BTreeSet<String>,
524 skipped: &BTreeSet<String>,
525 ) -> CoordinationTaskState {
526 match task.role {
527 AgentRole::Planner | AgentRole::Coder => {
528 let mut waiting = false;
529 for dependency_id in &task.depends_on {
530 if !tasks.contains_key(dependency_id) {
531 return CoordinationTaskState::PermanentlyBlocked;
532 }
533 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
534 return CoordinationTaskState::BlockedByFailure;
535 }
536 if !completed.contains(dependency_id) {
537 waiting = true;
538 }
539 }
540 if waiting {
541 CoordinationTaskState::Waiting
542 } else {
543 CoordinationTaskState::Ready
544 }
545 }
546 AgentRole::Repair => {
547 let mut waiting = false;
548 let mut has_coder_dependency = false;
549 let mut has_failed_coder = false;
550 for dependency_id in &task.depends_on {
551 let Some(dependency) = tasks.get(dependency_id) else {
552 return CoordinationTaskState::PermanentlyBlocked;
553 };
554 let is_coder = matches!(dependency.role, AgentRole::Coder);
555 if is_coder {
556 has_coder_dependency = true;
557 }
558 if skipped.contains(dependency_id) {
559 return CoordinationTaskState::BlockedByFailure;
560 }
561 if failed.contains(dependency_id) {
562 if is_coder {
563 has_failed_coder = true;
564 } else {
565 return CoordinationTaskState::BlockedByFailure;
566 }
567 continue;
568 }
569 if !completed.contains(dependency_id) {
570 waiting = true;
571 }
572 }
573 if !has_coder_dependency {
574 CoordinationTaskState::PermanentlyBlocked
575 } else if waiting {
576 CoordinationTaskState::Waiting
577 } else if has_failed_coder {
578 CoordinationTaskState::Ready
579 } else {
580 CoordinationTaskState::PermanentlyBlocked
581 }
582 }
583 AgentRole::Optimizer => {
584 let mut waiting = false;
585 let mut has_impl_dependency = false;
586 let mut has_completed_impl = false;
587 let mut has_failed_impl = false;
588 for dependency_id in &task.depends_on {
589 let Some(dependency) = tasks.get(dependency_id) else {
590 return CoordinationTaskState::PermanentlyBlocked;
591 };
592 let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
593 if is_impl {
594 has_impl_dependency = true;
595 }
596 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
597 if is_impl {
598 has_failed_impl = true;
599 continue;
600 }
601 return CoordinationTaskState::BlockedByFailure;
602 }
603 if completed.contains(dependency_id) {
604 if is_impl {
605 has_completed_impl = true;
606 }
607 continue;
608 }
609 waiting = true;
610 }
611 if !has_impl_dependency {
612 CoordinationTaskState::PermanentlyBlocked
613 } else if waiting {
614 CoordinationTaskState::Waiting
615 } else if has_completed_impl {
616 CoordinationTaskState::Ready
617 } else if has_failed_impl {
618 CoordinationTaskState::BlockedByFailure
619 } else {
620 CoordinationTaskState::PermanentlyBlocked
621 }
622 }
623 }
624 }
625
626 fn record_handoff_messages(
627 &self,
628 task: &CoordinationTask,
629 tasks: &BTreeMap<String, CoordinationTask>,
630 completed: &BTreeSet<String>,
631 failed: &BTreeSet<String>,
632 messages: &mut Vec<CoordinationMessage>,
633 ) {
634 let mut dependency_ids = task.depends_on.clone();
635 dependency_ids.sort();
636 dependency_ids.dedup();
637
638 for dependency_id in dependency_ids {
639 let Some(dependency) = tasks.get(&dependency_id) else {
640 continue;
641 };
642 if completed.contains(&dependency_id) {
643 messages.push(CoordinationMessage {
644 from_role: dependency.role.clone(),
645 to_role: task.role.clone(),
646 task_id: task.id.clone(),
647 content: format!("handoff from {dependency_id} to {}", task.id),
648 });
649 } else if failed.contains(&dependency_id) {
650 messages.push(CoordinationMessage {
651 from_role: dependency.role.clone(),
652 to_role: task.role.clone(),
653 task_id: task.id.clone(),
654 content: format!("failed dependency {dependency_id} routed to {}", task.id),
655 });
656 }
657 }
658 }
659
660 fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
661 let normalized = task.description.to_ascii_lowercase();
662 normalized.contains("force-fail")
663 || (normalized.contains("fail-once") && prior_failures == 0)
664 }
665}
666
667#[derive(Debug, Error)]
668pub enum ReplayError {
669 #[error("store error: {0}")]
670 Store(String),
671 #[error("sandbox error: {0}")]
672 Sandbox(String),
673 #[error("validation error: {0}")]
674 Validation(String),
675}
676
677#[async_trait]
678pub trait ReplayExecutor: Send + Sync {
679 async fn try_replay(
680 &self,
681 input: &SelectorInput,
682 policy: &SandboxPolicy,
683 validation: &ValidationPlan,
684 ) -> Result<ReplayDecision, ReplayError>;
685
686 async fn try_replay_for_run(
687 &self,
688 run_id: &RunId,
689 input: &SelectorInput,
690 policy: &SandboxPolicy,
691 validation: &ValidationPlan,
692 ) -> Result<ReplayDecision, ReplayError> {
693 let _ = run_id;
694 self.try_replay(input, policy, validation).await
695 }
696}
697
698pub struct StoreReplayExecutor {
699 pub sandbox: Arc<dyn Sandbox>,
700 pub validator: Arc<dyn Validator>,
701 pub store: Arc<dyn EvolutionStore>,
702 pub selector: Arc<dyn Selector>,
703 pub governor: Arc<dyn Governor>,
704 pub economics: Option<Arc<Mutex<EvuLedger>>>,
705 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
706 pub stake_policy: StakePolicy,
707}
708
709#[async_trait]
710impl ReplayExecutor for StoreReplayExecutor {
711 async fn try_replay(
712 &self,
713 input: &SelectorInput,
714 policy: &SandboxPolicy,
715 validation: &ValidationPlan,
716 ) -> Result<ReplayDecision, ReplayError> {
717 self.try_replay_inner(None, input, policy, validation).await
718 }
719
720 async fn try_replay_for_run(
721 &self,
722 run_id: &RunId,
723 input: &SelectorInput,
724 policy: &SandboxPolicy,
725 validation: &ValidationPlan,
726 ) -> Result<ReplayDecision, ReplayError> {
727 self.try_replay_inner(Some(run_id), input, policy, validation)
728 .await
729 }
730}
731
732impl StoreReplayExecutor {
733 async fn try_replay_inner(
734 &self,
735 replay_run_id: Option<&RunId>,
736 input: &SelectorInput,
737 policy: &SandboxPolicy,
738 validation: &ValidationPlan,
739 ) -> Result<ReplayDecision, ReplayError> {
740 let mut selector_input = input.clone();
741 if self.economics.is_some() && self.remote_publishers.is_some() {
742 selector_input.limit = selector_input.limit.max(4);
743 }
744 let mut candidates = self.selector.select(&selector_input);
745 self.rerank_with_reputation_bias(&mut candidates);
746 let mut exact_match = false;
747 if candidates.is_empty() {
748 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
749 self.rerank_with_reputation_bias(&mut exact_candidates);
750 if !exact_candidates.is_empty() {
751 candidates = exact_candidates;
752 exact_match = true;
753 }
754 }
755 if candidates.is_empty() {
756 let mut remote_candidates =
757 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
758 self.rerank_with_reputation_bias(&mut remote_candidates);
759 if !remote_candidates.is_empty() {
760 candidates = remote_candidates;
761 exact_match = true;
762 }
763 }
764 candidates.truncate(input.limit.max(1));
765 let Some(best) = candidates.into_iter().next() else {
766 return Ok(ReplayDecision {
767 used_capsule: false,
768 capsule_id: None,
769 fallback_to_planner: true,
770 reason: "no matching gene".into(),
771 });
772 };
773 let remote_publisher = self.publisher_for_gene(&best.gene.id);
774
775 if !exact_match && best.score < 0.82 {
776 return Ok(ReplayDecision {
777 used_capsule: false,
778 capsule_id: None,
779 fallback_to_planner: true,
780 reason: format!("best gene score {:.3} below replay threshold", best.score),
781 });
782 }
783
784 let Some(capsule) = best.capsules.first().cloned() else {
785 return Ok(ReplayDecision {
786 used_capsule: false,
787 capsule_id: None,
788 fallback_to_planner: true,
789 reason: "candidate gene has no capsule".into(),
790 });
791 };
792
793 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
794 .map_err(|err| ReplayError::Store(err.to_string()))?
795 else {
796 return Ok(ReplayDecision {
797 used_capsule: false,
798 capsule_id: None,
799 fallback_to_planner: true,
800 reason: "mutation payload missing from store".into(),
801 });
802 };
803
804 let receipt = match self.sandbox.apply(&mutation, policy).await {
805 Ok(receipt) => receipt,
806 Err(err) => {
807 self.record_reuse_settlement(remote_publisher.as_deref(), false);
808 return Ok(ReplayDecision {
809 used_capsule: false,
810 capsule_id: Some(capsule.id.clone()),
811 fallback_to_planner: true,
812 reason: format!("replay patch apply failed: {err}"),
813 });
814 }
815 };
816
817 let report = self
818 .validator
819 .run(&receipt, validation)
820 .await
821 .map_err(|err| ReplayError::Validation(err.to_string()))?;
822 if !report.success {
823 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
824 self.record_reuse_settlement(remote_publisher.as_deref(), false);
825 return Ok(ReplayDecision {
826 used_capsule: false,
827 capsule_id: Some(capsule.id.clone()),
828 fallback_to_planner: true,
829 reason: "replay validation failed".into(),
830 });
831 }
832
833 if matches!(capsule.state, AssetState::Quarantined) {
834 self.store
835 .append_event(EvolutionEvent::ValidationPassed {
836 mutation_id: capsule.mutation_id.clone(),
837 report: report.to_snapshot(&validation.profile),
838 gene_id: Some(best.gene.id.clone()),
839 })
840 .map_err(|err| ReplayError::Store(err.to_string()))?;
841 if matches!(best.gene.state, AssetState::Quarantined) {
842 self.store
843 .append_event(EvolutionEvent::PromotionEvaluated {
844 gene_id: best.gene.id.clone(),
845 state: AssetState::Promoted,
846 reason: "remote asset locally validated via replay".into(),
847 })
848 .map_err(|err| ReplayError::Store(err.to_string()))?;
849 self.store
850 .append_event(EvolutionEvent::GenePromoted {
851 gene_id: best.gene.id.clone(),
852 })
853 .map_err(|err| ReplayError::Store(err.to_string()))?;
854 }
855 self.store
856 .append_event(EvolutionEvent::CapsuleReleased {
857 capsule_id: capsule.id.clone(),
858 state: AssetState::Promoted,
859 })
860 .map_err(|err| ReplayError::Store(err.to_string()))?;
861 }
862
863 self.store
864 .append_event(EvolutionEvent::CapsuleReused {
865 capsule_id: capsule.id.clone(),
866 gene_id: capsule.gene_id.clone(),
867 run_id: capsule.run_id.clone(),
868 replay_run_id: replay_run_id.cloned(),
869 })
870 .map_err(|err| ReplayError::Store(err.to_string()))?;
871 self.record_reuse_settlement(remote_publisher.as_deref(), true);
872
873 Ok(ReplayDecision {
874 used_capsule: true,
875 capsule_id: Some(capsule.id),
876 fallback_to_planner: false,
877 reason: if exact_match {
878 "replayed via cold-start lookup".into()
879 } else {
880 "replayed via selector".into()
881 },
882 })
883 }
884
885 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
886 let Some(ledger) = self.economics.as_ref() else {
887 return;
888 };
889 let Some(remote_publishers) = self.remote_publishers.as_ref() else {
890 return;
891 };
892 let reputation_bias = ledger
893 .lock()
894 .ok()
895 .map(|locked| locked.selector_reputation_bias())
896 .unwrap_or_default();
897 if reputation_bias.is_empty() {
898 return;
899 }
900 let publisher_map = remote_publishers
901 .lock()
902 .ok()
903 .map(|locked| locked.clone())
904 .unwrap_or_default();
905 candidates.sort_by(|left, right| {
906 effective_candidate_score(right, &publisher_map, &reputation_bias)
907 .partial_cmp(&effective_candidate_score(
908 left,
909 &publisher_map,
910 &reputation_bias,
911 ))
912 .unwrap_or(std::cmp::Ordering::Equal)
913 .then_with(|| left.gene.id.cmp(&right.gene.id))
914 });
915 }
916
917 fn publisher_for_gene(&self, gene_id: &str) -> Option<String> {
918 self.remote_publishers
919 .as_ref()?
920 .lock()
921 .ok()?
922 .get(gene_id)
923 .cloned()
924 }
925
926 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
927 let Some(publisher_id) = publisher_id else {
928 return;
929 };
930 let Some(ledger) = self.economics.as_ref() else {
931 return;
932 };
933 if let Ok(mut locked) = ledger.lock() {
934 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
935 }
936 }
937
938 fn record_replay_validation_failure(
939 &self,
940 best: &GeneCandidate,
941 capsule: &Capsule,
942 validation: &ValidationPlan,
943 report: &ValidationReport,
944 ) -> Result<(), ReplayError> {
945 let projection = self
946 .store
947 .rebuild_projection()
948 .map_err(|err| ReplayError::Store(err.to_string()))?;
949 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
950 Self::confidence_context(&projection, &best.gene.id);
951
952 self.store
953 .append_event(EvolutionEvent::ValidationFailed {
954 mutation_id: capsule.mutation_id.clone(),
955 report: report.to_snapshot(&validation.profile),
956 gene_id: Some(best.gene.id.clone()),
957 })
958 .map_err(|err| ReplayError::Store(err.to_string()))?;
959
960 let replay_failures = self.replay_failure_count(&best.gene.id)?;
961 let governor_decision = self.governor.evaluate(GovernorInput {
962 candidate_source: if self.publisher_for_gene(&best.gene.id).is_some() {
963 CandidateSource::Remote
964 } else {
965 CandidateSource::Local
966 },
967 success_count: 0,
968 blast_radius: BlastRadius {
969 files_changed: capsule.outcome.changed_files.len(),
970 lines_changed: capsule.outcome.lines_changed,
971 },
972 replay_failures,
973 recent_mutation_ages_secs: Vec::new(),
974 current_confidence,
975 historical_peak_confidence,
976 confidence_last_updated_secs,
977 });
978
979 if matches!(governor_decision.target_state, AssetState::Revoked) {
980 self.store
981 .append_event(EvolutionEvent::PromotionEvaluated {
982 gene_id: best.gene.id.clone(),
983 state: AssetState::Revoked,
984 reason: governor_decision.reason.clone(),
985 })
986 .map_err(|err| ReplayError::Store(err.to_string()))?;
987 self.store
988 .append_event(EvolutionEvent::GeneRevoked {
989 gene_id: best.gene.id.clone(),
990 reason: governor_decision.reason,
991 })
992 .map_err(|err| ReplayError::Store(err.to_string()))?;
993 for related in &best.capsules {
994 self.store
995 .append_event(EvolutionEvent::CapsuleQuarantined {
996 capsule_id: related.id.clone(),
997 })
998 .map_err(|err| ReplayError::Store(err.to_string()))?;
999 }
1000 }
1001
1002 Ok(())
1003 }
1004
1005 fn confidence_context(
1006 projection: &EvolutionProjection,
1007 gene_id: &str,
1008 ) -> (f32, f32, Option<u64>) {
1009 let peak_confidence = projection
1010 .capsules
1011 .iter()
1012 .filter(|capsule| capsule.gene_id == gene_id)
1013 .map(|capsule| capsule.confidence)
1014 .fold(0.0_f32, f32::max);
1015 let age_secs = projection
1016 .last_updated_at
1017 .get(gene_id)
1018 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1019 (peak_confidence, peak_confidence, age_secs)
1020 }
1021
1022 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1023 let parsed = DateTime::parse_from_rfc3339(timestamp)
1024 .ok()?
1025 .with_timezone(&Utc);
1026 let elapsed = now.signed_duration_since(parsed);
1027 if elapsed < Duration::zero() {
1028 Some(0)
1029 } else {
1030 u64::try_from(elapsed.num_seconds()).ok()
1031 }
1032 }
1033
1034 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1035 Ok(self
1036 .store
1037 .scan(1)
1038 .map_err(|err| ReplayError::Store(err.to_string()))?
1039 .into_iter()
1040 .filter(|stored| {
1041 matches!(
1042 &stored.event,
1043 EvolutionEvent::ValidationFailed {
1044 gene_id: Some(current_gene_id),
1045 ..
1046 } if current_gene_id == gene_id
1047 )
1048 })
1049 .count() as u64)
1050 }
1051}
1052
1053#[derive(Debug, Error)]
1054pub enum EvoKernelError {
1055 #[error("sandbox error: {0}")]
1056 Sandbox(String),
1057 #[error("validation error: {0}")]
1058 Validation(String),
1059 #[error("validation failed")]
1060 ValidationFailed(ValidationReport),
1061 #[error("store error: {0}")]
1062 Store(String),
1063}
1064
1065#[derive(Clone, Debug)]
1066pub struct CaptureOutcome {
1067 pub capsule: Capsule,
1068 pub gene: Gene,
1069 pub governor_decision: GovernorDecision,
1070}
1071
1072#[derive(Clone, Debug, Serialize, Deserialize)]
1073pub struct ImportOutcome {
1074 pub imported_asset_ids: Vec<String>,
1075 pub accepted: bool,
1076}
1077
1078#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1079pub struct EvolutionMetricsSnapshot {
1080 pub replay_attempts_total: u64,
1081 pub replay_success_total: u64,
1082 pub replay_success_rate: f64,
1083 pub mutation_declared_total: u64,
1084 pub promoted_mutations_total: u64,
1085 pub promotion_ratio: f64,
1086 pub gene_revocations_total: u64,
1087 pub mutation_velocity_last_hour: u64,
1088 pub revoke_frequency_last_hour: u64,
1089 pub promoted_genes: u64,
1090 pub promoted_capsules: u64,
1091 pub last_event_seq: u64,
1092}
1093
1094#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1095pub struct EvolutionHealthSnapshot {
1096 pub status: String,
1097 pub last_event_seq: u64,
1098 pub promoted_genes: u64,
1099 pub promoted_capsules: u64,
1100}
1101
1102#[derive(Clone)]
1103pub struct EvolutionNetworkNode {
1104 pub store: Arc<dyn EvolutionStore>,
1105}
1106
1107impl EvolutionNetworkNode {
1108 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1109 Self { store }
1110 }
1111
1112 pub fn with_default_store() -> Self {
1113 Self {
1114 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
1115 }
1116 }
1117
1118 pub fn accept_publish_request(
1119 &self,
1120 request: &PublishRequest,
1121 ) -> Result<ImportOutcome, EvoKernelError> {
1122 import_remote_envelope_into_store(
1123 self.store.as_ref(),
1124 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
1125 None,
1126 )
1127 }
1128
1129 pub fn publish_local_assets(
1130 &self,
1131 sender_id: impl Into<String>,
1132 ) -> Result<EvolutionEnvelope, EvoKernelError> {
1133 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
1134 }
1135
1136 pub fn fetch_assets(
1137 &self,
1138 responder_id: impl Into<String>,
1139 query: &FetchQuery,
1140 ) -> Result<FetchResponse, EvoKernelError> {
1141 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1142 }
1143
1144 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1145 revoke_assets_in_store(self.store.as_ref(), notice)
1146 }
1147
1148 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1149 evolution_metrics_snapshot(self.store.as_ref())
1150 }
1151
1152 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1153 self.metrics_snapshot().map(|snapshot| {
1154 let health = evolution_health_snapshot(&snapshot);
1155 render_evolution_metrics_prometheus(&snapshot, &health)
1156 })
1157 }
1158
1159 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1160 self.metrics_snapshot()
1161 .map(|snapshot| evolution_health_snapshot(&snapshot))
1162 }
1163}
1164
1165pub struct EvoKernel<S: KernelState> {
1166 pub kernel: Arc<Kernel<S>>,
1167 pub sandbox: Arc<dyn Sandbox>,
1168 pub validator: Arc<dyn Validator>,
1169 pub store: Arc<dyn EvolutionStore>,
1170 pub selector: Arc<dyn Selector>,
1171 pub governor: Arc<dyn Governor>,
1172 pub economics: Arc<Mutex<EvuLedger>>,
1173 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
1174 pub stake_policy: StakePolicy,
1175 pub sandbox_policy: SandboxPolicy,
1176 pub validation_plan: ValidationPlan,
1177}
1178
1179impl<S: KernelState> EvoKernel<S> {
1180 fn recent_prior_mutation_ages_secs(
1181 &self,
1182 exclude_mutation_id: Option<&str>,
1183 ) -> Result<Vec<u64>, EvolutionError> {
1184 let now = Utc::now();
1185 let mut ages = self
1186 .store
1187 .scan(1)?
1188 .into_iter()
1189 .filter_map(|stored| match stored.event {
1190 EvolutionEvent::MutationDeclared { mutation }
1191 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
1192 {
1193 Self::seconds_since_timestamp(&stored.timestamp, now)
1194 }
1195 _ => None,
1196 })
1197 .collect::<Vec<_>>();
1198 ages.sort_unstable();
1199 Ok(ages)
1200 }
1201
1202 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1203 let parsed = DateTime::parse_from_rfc3339(timestamp)
1204 .ok()?
1205 .with_timezone(&Utc);
1206 let elapsed = now.signed_duration_since(parsed);
1207 if elapsed < Duration::zero() {
1208 Some(0)
1209 } else {
1210 u64::try_from(elapsed.num_seconds()).ok()
1211 }
1212 }
1213
1214 pub fn new(
1215 kernel: Arc<Kernel<S>>,
1216 sandbox: Arc<dyn Sandbox>,
1217 validator: Arc<dyn Validator>,
1218 store: Arc<dyn EvolutionStore>,
1219 ) -> Self {
1220 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
1221 Self {
1222 kernel,
1223 sandbox,
1224 validator,
1225 store,
1226 selector,
1227 governor: Arc::new(DefaultGovernor::default()),
1228 economics: Arc::new(Mutex::new(EvuLedger::default())),
1229 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
1230 stake_policy: StakePolicy::default(),
1231 sandbox_policy: SandboxPolicy::oris_default(),
1232 validation_plan: ValidationPlan::oris_default(),
1233 }
1234 }
1235
1236 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
1237 self.selector = selector;
1238 self
1239 }
1240
1241 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
1242 self.sandbox_policy = policy;
1243 self
1244 }
1245
1246 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
1247 self.governor = governor;
1248 self
1249 }
1250
1251 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
1252 self.economics = economics;
1253 self
1254 }
1255
1256 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
1257 self.stake_policy = policy;
1258 self
1259 }
1260
1261 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
1262 self.validation_plan = plan;
1263 self
1264 }
1265
1266 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
1267 let mut candidates = self.selector.select(input);
1268 candidates.sort_by(|left, right| {
1269 right
1270 .score
1271 .partial_cmp(&left.score)
1272 .unwrap_or(std::cmp::Ordering::Equal)
1273 .then_with(|| left.gene.id.cmp(&right.gene.id))
1274 });
1275 candidates.truncate(input.limit.max(1));
1276 candidates
1277 }
1278
1279 pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
1280 let projection = self.store.rebuild_projection().map_err(store_err)?;
1281 if !projection.genes.is_empty() {
1282 return Ok(BootstrapReport::default());
1283 }
1284
1285 let templates = built_in_seed_templates();
1286 for template in &templates {
1287 let mutation = build_seed_mutation(template);
1288 let extracted = extract_seed_signals(template);
1289 let gene = build_bootstrap_gene(template, &extracted)
1290 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1291 let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
1292 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1293
1294 self.store
1295 .append_event(EvolutionEvent::MutationDeclared {
1296 mutation: mutation.clone(),
1297 })
1298 .map_err(store_err)?;
1299 self.store
1300 .append_event(EvolutionEvent::SignalsExtracted {
1301 mutation_id: mutation.intent.id.clone(),
1302 hash: extracted.hash.clone(),
1303 signals: extracted.values.clone(),
1304 })
1305 .map_err(store_err)?;
1306 self.store
1307 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1308 .map_err(store_err)?;
1309 self.store
1310 .append_event(EvolutionEvent::PromotionEvaluated {
1311 gene_id: gene.id.clone(),
1312 state: AssetState::Quarantined,
1313 reason: "bootstrap seeds require local validation before replay".into(),
1314 })
1315 .map_err(store_err)?;
1316 self.store
1317 .append_event(EvolutionEvent::CapsuleCommitted {
1318 capsule: capsule.clone(),
1319 })
1320 .map_err(store_err)?;
1321 self.store
1322 .append_event(EvolutionEvent::CapsuleQuarantined {
1323 capsule_id: capsule.id,
1324 })
1325 .map_err(store_err)?;
1326 }
1327
1328 Ok(BootstrapReport {
1329 seeded: true,
1330 genes_added: templates.len(),
1331 capsules_added: templates.len(),
1332 })
1333 }
1334
1335 pub async fn capture_successful_mutation(
1336 &self,
1337 run_id: &RunId,
1338 mutation: PreparedMutation,
1339 ) -> Result<Capsule, EvoKernelError> {
1340 Ok(self
1341 .capture_mutation_with_governor(run_id, mutation)
1342 .await?
1343 .capsule)
1344 }
1345
1346 pub async fn capture_mutation_with_governor(
1347 &self,
1348 run_id: &RunId,
1349 mutation: PreparedMutation,
1350 ) -> Result<CaptureOutcome, EvoKernelError> {
1351 self.store
1352 .append_event(EvolutionEvent::MutationDeclared {
1353 mutation: mutation.clone(),
1354 })
1355 .map_err(store_err)?;
1356
1357 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
1358 Ok(receipt) => receipt,
1359 Err(err) => {
1360 self.store
1361 .append_event(EvolutionEvent::MutationRejected {
1362 mutation_id: mutation.intent.id.clone(),
1363 reason: err.to_string(),
1364 })
1365 .map_err(store_err)?;
1366 return Err(EvoKernelError::Sandbox(err.to_string()));
1367 }
1368 };
1369
1370 self.store
1371 .append_event(EvolutionEvent::MutationApplied {
1372 mutation_id: mutation.intent.id.clone(),
1373 patch_hash: receipt.patch_hash.clone(),
1374 changed_files: receipt
1375 .changed_files
1376 .iter()
1377 .map(|path| path.to_string_lossy().to_string())
1378 .collect(),
1379 })
1380 .map_err(store_err)?;
1381
1382 let report = self
1383 .validator
1384 .run(&receipt, &self.validation_plan)
1385 .await
1386 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1387 if !report.success {
1388 self.store
1389 .append_event(EvolutionEvent::ValidationFailed {
1390 mutation_id: mutation.intent.id.clone(),
1391 report: report.to_snapshot(&self.validation_plan.profile),
1392 gene_id: None,
1393 })
1394 .map_err(store_err)?;
1395 return Err(EvoKernelError::ValidationFailed(report));
1396 }
1397
1398 self.store
1399 .append_event(EvolutionEvent::ValidationPassed {
1400 mutation_id: mutation.intent.id.clone(),
1401 report: report.to_snapshot(&self.validation_plan.profile),
1402 gene_id: None,
1403 })
1404 .map_err(store_err)?;
1405
1406 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
1407 patch_diff: mutation.artifact.payload.clone(),
1408 intent: mutation.intent.intent.clone(),
1409 expected_effect: mutation.intent.expected_effect.clone(),
1410 declared_signals: mutation.intent.signals.clone(),
1411 changed_files: receipt
1412 .changed_files
1413 .iter()
1414 .map(|path| path.to_string_lossy().to_string())
1415 .collect(),
1416 validation_success: report.success,
1417 validation_logs: report.logs.clone(),
1418 stage_outputs: report
1419 .stages
1420 .iter()
1421 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
1422 .filter(|value| !value.is_empty())
1423 .collect(),
1424 });
1425 self.store
1426 .append_event(EvolutionEvent::SignalsExtracted {
1427 mutation_id: mutation.intent.id.clone(),
1428 hash: extracted_signals.hash.clone(),
1429 signals: extracted_signals.values.clone(),
1430 })
1431 .map_err(store_err)?;
1432
1433 let projection = self.store.rebuild_projection().map_err(store_err)?;
1434 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
1435 let recent_mutation_ages_secs = self
1436 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
1437 .map_err(store_err)?;
1438 let mut gene = derive_gene(
1439 &mutation,
1440 &receipt,
1441 &self.validation_plan.profile,
1442 &extracted_signals.values,
1443 );
1444 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1445 StoreReplayExecutor::confidence_context(&projection, &gene.id);
1446 let success_count = projection
1447 .genes
1448 .iter()
1449 .find(|existing| existing.id == gene.id)
1450 .map(|existing| {
1451 projection
1452 .capsules
1453 .iter()
1454 .filter(|capsule| capsule.gene_id == existing.id)
1455 .count() as u64
1456 })
1457 .unwrap_or(0)
1458 + 1;
1459 let governor_decision = self.governor.evaluate(GovernorInput {
1460 candidate_source: CandidateSource::Local,
1461 success_count,
1462 blast_radius: blast_radius.clone(),
1463 replay_failures: 0,
1464 recent_mutation_ages_secs,
1465 current_confidence,
1466 historical_peak_confidence,
1467 confidence_last_updated_secs,
1468 });
1469
1470 gene.state = governor_decision.target_state.clone();
1471 self.store
1472 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1473 .map_err(store_err)?;
1474 self.store
1475 .append_event(EvolutionEvent::PromotionEvaluated {
1476 gene_id: gene.id.clone(),
1477 state: governor_decision.target_state.clone(),
1478 reason: governor_decision.reason.clone(),
1479 })
1480 .map_err(store_err)?;
1481 if matches!(governor_decision.target_state, AssetState::Promoted) {
1482 self.store
1483 .append_event(EvolutionEvent::GenePromoted {
1484 gene_id: gene.id.clone(),
1485 })
1486 .map_err(store_err)?;
1487 }
1488 if matches!(governor_decision.target_state, AssetState::Revoked) {
1489 self.store
1490 .append_event(EvolutionEvent::GeneRevoked {
1491 gene_id: gene.id.clone(),
1492 reason: governor_decision.reason.clone(),
1493 })
1494 .map_err(store_err)?;
1495 }
1496 if let Some(spec_id) = &mutation.intent.spec_id {
1497 self.store
1498 .append_event(EvolutionEvent::SpecLinked {
1499 mutation_id: mutation.intent.id.clone(),
1500 spec_id: spec_id.clone(),
1501 })
1502 .map_err(store_err)?;
1503 }
1504
1505 let mut capsule = build_capsule(
1506 run_id,
1507 &mutation,
1508 &receipt,
1509 &report,
1510 &self.validation_plan.profile,
1511 &gene,
1512 &blast_radius,
1513 )
1514 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1515 capsule.state = governor_decision.target_state.clone();
1516 self.store
1517 .append_event(EvolutionEvent::CapsuleCommitted {
1518 capsule: capsule.clone(),
1519 })
1520 .map_err(store_err)?;
1521 if matches!(governor_decision.target_state, AssetState::Quarantined) {
1522 self.store
1523 .append_event(EvolutionEvent::CapsuleQuarantined {
1524 capsule_id: capsule.id.clone(),
1525 })
1526 .map_err(store_err)?;
1527 }
1528
1529 Ok(CaptureOutcome {
1530 capsule,
1531 gene,
1532 governor_decision,
1533 })
1534 }
1535
1536 pub async fn capture_from_proposal(
1537 &self,
1538 run_id: &RunId,
1539 proposal: &AgentMutationProposal,
1540 diff_payload: String,
1541 base_revision: Option<String>,
1542 ) -> Result<CaptureOutcome, EvoKernelError> {
1543 let intent = MutationIntent {
1544 id: next_id("proposal"),
1545 intent: proposal.intent.clone(),
1546 target: MutationTarget::Paths {
1547 allow: proposal.files.clone(),
1548 },
1549 expected_effect: proposal.expected_effect.clone(),
1550 risk: RiskLevel::Low,
1551 signals: proposal.files.clone(),
1552 spec_id: None,
1553 };
1554 self.capture_mutation_with_governor(
1555 run_id,
1556 prepare_mutation(intent, diff_payload, base_revision),
1557 )
1558 .await
1559 }
1560
1561 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
1562 ExecutionFeedback {
1563 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
1564 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
1565 summary: outcome.governor_decision.reason.clone(),
1566 }
1567 }
1568
1569 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
1570 MultiAgentCoordinator::new().coordinate(plan)
1571 }
1572
1573 pub fn export_promoted_assets(
1574 &self,
1575 sender_id: impl Into<String>,
1576 ) -> Result<EvolutionEnvelope, EvoKernelError> {
1577 let sender_id = sender_id.into();
1578 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
1579 if !envelope.assets.is_empty() {
1580 let mut ledger = self
1581 .economics
1582 .lock()
1583 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
1584 if ledger
1585 .reserve_publish_stake(&sender_id, &self.stake_policy)
1586 .is_none()
1587 {
1588 return Err(EvoKernelError::Validation(
1589 "insufficient EVU for remote publish".into(),
1590 ));
1591 }
1592 }
1593 Ok(envelope)
1594 }
1595
1596 pub fn import_remote_envelope(
1597 &self,
1598 envelope: &EvolutionEnvelope,
1599 ) -> Result<ImportOutcome, EvoKernelError> {
1600 import_remote_envelope_into_store(
1601 self.store.as_ref(),
1602 envelope,
1603 Some(self.remote_publishers.as_ref()),
1604 )
1605 }
1606
1607 pub fn fetch_assets(
1608 &self,
1609 responder_id: impl Into<String>,
1610 query: &FetchQuery,
1611 ) -> Result<FetchResponse, EvoKernelError> {
1612 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1613 }
1614
1615 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1616 revoke_assets_in_store(self.store.as_ref(), notice)
1617 }
1618
1619 pub async fn replay_or_fallback(
1620 &self,
1621 input: SelectorInput,
1622 ) -> Result<ReplayDecision, EvoKernelError> {
1623 let replay_run_id = next_id("replay");
1624 self.replay_or_fallback_for_run(&replay_run_id, input).await
1625 }
1626
1627 pub async fn replay_or_fallback_for_run(
1628 &self,
1629 run_id: &RunId,
1630 input: SelectorInput,
1631 ) -> Result<ReplayDecision, EvoKernelError> {
1632 let executor = StoreReplayExecutor {
1633 sandbox: self.sandbox.clone(),
1634 validator: self.validator.clone(),
1635 store: self.store.clone(),
1636 selector: self.selector.clone(),
1637 governor: self.governor.clone(),
1638 economics: Some(self.economics.clone()),
1639 remote_publishers: Some(self.remote_publishers.clone()),
1640 stake_policy: self.stake_policy.clone(),
1641 };
1642 executor
1643 .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
1644 .await
1645 .map_err(|err| EvoKernelError::Validation(err.to_string()))
1646 }
1647
1648 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
1649 self.economics.lock().ok()?.governor_signal(node_id)
1650 }
1651
1652 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
1653 self.economics
1654 .lock()
1655 .ok()
1656 .map(|locked| locked.selector_reputation_bias())
1657 .unwrap_or_default()
1658 }
1659
1660 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1661 evolution_metrics_snapshot(self.store.as_ref())
1662 }
1663
1664 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1665 self.metrics_snapshot().map(|snapshot| {
1666 let health = evolution_health_snapshot(&snapshot);
1667 render_evolution_metrics_prometheus(&snapshot, &health)
1668 })
1669 }
1670
1671 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1672 self.metrics_snapshot()
1673 .map(|snapshot| evolution_health_snapshot(&snapshot))
1674 }
1675}
1676
1677pub fn prepare_mutation(
1678 intent: MutationIntent,
1679 diff_payload: String,
1680 base_revision: Option<String>,
1681) -> PreparedMutation {
1682 PreparedMutation {
1683 intent,
1684 artifact: MutationArtifact {
1685 encoding: ArtifactEncoding::UnifiedDiff,
1686 content_hash: compute_artifact_hash(&diff_payload),
1687 payload: diff_payload,
1688 base_revision,
1689 },
1690 }
1691}
1692
1693pub fn prepare_mutation_from_spec(
1694 plan: CompiledMutationPlan,
1695 diff_payload: String,
1696 base_revision: Option<String>,
1697) -> PreparedMutation {
1698 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
1699}
1700
1701pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
1702 Arc::new(oris_evolution::JsonlEvolutionStore::new(
1703 default_store_root(),
1704 ))
1705}
1706
1707fn built_in_seed_templates() -> Vec<SeedTemplate> {
1708 vec![
1709 SeedTemplate {
1710 id: "bootstrap-readme".into(),
1711 intent: "Seed a baseline README recovery pattern".into(),
1712 signals: vec!["bootstrap readme".into(), "missing readme".into()],
1713 diff_payload: "\
1714diff --git a/README.md b/README.md
1715new file mode 100644
1716index 0000000..1111111
1717--- /dev/null
1718+++ b/README.md
1719@@ -0,0 +1,3 @@
1720+# Oris
1721+Bootstrap documentation seed
1722+"
1723 .into(),
1724 validation_profile: "bootstrap-seed".into(),
1725 },
1726 SeedTemplate {
1727 id: "bootstrap-test-fix".into(),
1728 intent: "Seed a deterministic test stabilization pattern".into(),
1729 signals: vec!["bootstrap test fix".into(), "failing tests".into()],
1730 diff_payload: "\
1731diff --git a/src/lib.rs b/src/lib.rs
1732index 1111111..2222222 100644
1733--- a/src/lib.rs
1734+++ b/src/lib.rs
1735@@ -1 +1,2 @@
1736 pub fn demo() -> usize { 1 }
1737+pub fn normalize_test_output() -> bool { true }
1738"
1739 .into(),
1740 validation_profile: "bootstrap-seed".into(),
1741 },
1742 SeedTemplate {
1743 id: "bootstrap-refactor".into(),
1744 intent: "Seed a low-risk refactor capsule".into(),
1745 signals: vec!["bootstrap refactor".into(), "small refactor".into()],
1746 diff_payload: "\
1747diff --git a/src/lib.rs b/src/lib.rs
1748index 2222222..3333333 100644
1749--- a/src/lib.rs
1750+++ b/src/lib.rs
1751@@ -1 +1,3 @@
1752 pub fn demo() -> usize { 1 }
1753+
1754+fn extract_strategy_key(input: &str) -> &str { input }
1755"
1756 .into(),
1757 validation_profile: "bootstrap-seed".into(),
1758 },
1759 SeedTemplate {
1760 id: "bootstrap-logging".into(),
1761 intent: "Seed a baseline structured logging mutation".into(),
1762 signals: vec!["bootstrap logging".into(), "structured logs".into()],
1763 diff_payload: "\
1764diff --git a/src/lib.rs b/src/lib.rs
1765index 3333333..4444444 100644
1766--- a/src/lib.rs
1767+++ b/src/lib.rs
1768@@ -1 +1,3 @@
1769 pub fn demo() -> usize { 1 }
1770+
1771+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
1772"
1773 .into(),
1774 validation_profile: "bootstrap-seed".into(),
1775 },
1776 ]
1777}
1778
1779fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
1780 let changed_files = seed_changed_files(&template.diff_payload);
1781 let target = if changed_files.is_empty() {
1782 MutationTarget::WorkspaceRoot
1783 } else {
1784 MutationTarget::Paths {
1785 allow: changed_files,
1786 }
1787 };
1788 prepare_mutation(
1789 MutationIntent {
1790 id: stable_hash_json(&("bootstrap-mutation", &template.id))
1791 .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
1792 intent: template.intent.clone(),
1793 target,
1794 expected_effect: format!("seed {}", template.id),
1795 risk: RiskLevel::Low,
1796 signals: template.signals.clone(),
1797 spec_id: None,
1798 },
1799 template.diff_payload.clone(),
1800 None,
1801 )
1802}
1803
1804fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
1805 let mut signals = BTreeSet::new();
1806 for declared in &template.signals {
1807 if let Some(phrase) = normalize_signal_phrase(declared) {
1808 signals.insert(phrase);
1809 }
1810 extend_signal_tokens(&mut signals, declared);
1811 }
1812 extend_signal_tokens(&mut signals, &template.intent);
1813 extend_signal_tokens(&mut signals, &template.diff_payload);
1814 for changed_file in seed_changed_files(&template.diff_payload) {
1815 extend_signal_tokens(&mut signals, &changed_file);
1816 }
1817 let values = signals.into_iter().take(32).collect::<Vec<_>>();
1818 let hash =
1819 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
1820 SignalExtractionOutput { values, hash }
1821}
1822
1823fn seed_changed_files(diff_payload: &str) -> Vec<String> {
1824 let mut changed_files = BTreeSet::new();
1825 for line in diff_payload.lines() {
1826 if let Some(path) = line.strip_prefix("+++ b/") {
1827 let normalized = path.trim();
1828 if !normalized.is_empty() {
1829 changed_files.insert(normalized.to_string());
1830 }
1831 }
1832 }
1833 changed_files.into_iter().collect()
1834}
1835
1836fn build_bootstrap_gene(
1837 template: &SeedTemplate,
1838 extracted: &SignalExtractionOutput,
1839) -> Result<Gene, EvolutionError> {
1840 let strategy = vec![template.id.clone(), "bootstrap".into()];
1841 let id = stable_hash_json(&(
1842 "bootstrap-gene",
1843 &template.id,
1844 &extracted.values,
1845 &template.validation_profile,
1846 ))?;
1847 Ok(Gene {
1848 id,
1849 signals: extracted.values.clone(),
1850 strategy,
1851 validation: vec![template.validation_profile.clone()],
1852 state: AssetState::Quarantined,
1853 })
1854}
1855
1856fn build_bootstrap_capsule(
1857 run_id: &RunId,
1858 template: &SeedTemplate,
1859 mutation: &PreparedMutation,
1860 gene: &Gene,
1861) -> Result<Capsule, EvolutionError> {
1862 let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
1863 let env = current_env_fingerprint(&cwd);
1864 let diff_hash = mutation.artifact.content_hash.clone();
1865 let changed_files = seed_changed_files(&template.diff_payload);
1866 let validator_hash = stable_hash_json(&(
1867 "bootstrap-validator",
1868 &template.id,
1869 &template.validation_profile,
1870 &diff_hash,
1871 ))?;
1872 let id = stable_hash_json(&(
1873 "bootstrap-capsule",
1874 &template.id,
1875 run_id,
1876 &gene.id,
1877 &diff_hash,
1878 &env,
1879 ))?;
1880 Ok(Capsule {
1881 id,
1882 gene_id: gene.id.clone(),
1883 mutation_id: mutation.intent.id.clone(),
1884 run_id: run_id.clone(),
1885 diff_hash,
1886 confidence: 0.0,
1887 env,
1888 outcome: Outcome {
1889 success: false,
1890 validation_profile: template.validation_profile.clone(),
1891 validation_duration_ms: 0,
1892 changed_files,
1893 validator_hash,
1894 lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
1895 replay_verified: false,
1896 },
1897 state: AssetState::Quarantined,
1898 })
1899}
1900
1901fn derive_gene(
1902 mutation: &PreparedMutation,
1903 receipt: &SandboxReceipt,
1904 validation_profile: &str,
1905 extracted_signals: &[String],
1906) -> Gene {
1907 let mut strategy = BTreeSet::new();
1908 for file in &receipt.changed_files {
1909 if let Some(component) = file.components().next() {
1910 strategy.insert(component.as_os_str().to_string_lossy().to_string());
1911 }
1912 }
1913 for token in mutation
1914 .artifact
1915 .payload
1916 .split(|ch: char| !ch.is_ascii_alphanumeric())
1917 {
1918 if token.len() == 5
1919 && token.starts_with('E')
1920 && token[1..].chars().all(|ch| ch.is_ascii_digit())
1921 {
1922 strategy.insert(token.to_string());
1923 }
1924 }
1925 for token in mutation.intent.intent.split_whitespace().take(8) {
1926 strategy.insert(token.to_ascii_lowercase());
1927 }
1928 let strategy = strategy.into_iter().collect::<Vec<_>>();
1929 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
1930 .unwrap_or_else(|_| next_id("gene"));
1931 Gene {
1932 id,
1933 signals: extracted_signals.to_vec(),
1934 strategy,
1935 validation: vec![validation_profile.to_string()],
1936 state: AssetState::Promoted,
1937 }
1938}
1939
1940fn build_capsule(
1941 run_id: &RunId,
1942 mutation: &PreparedMutation,
1943 receipt: &SandboxReceipt,
1944 report: &ValidationReport,
1945 validation_profile: &str,
1946 gene: &Gene,
1947 blast_radius: &BlastRadius,
1948) -> Result<Capsule, EvolutionError> {
1949 let env = current_env_fingerprint(&receipt.workdir);
1950 let validator_hash = stable_hash_json(report)?;
1951 let diff_hash = mutation.artifact.content_hash.clone();
1952 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
1953 Ok(Capsule {
1954 id,
1955 gene_id: gene.id.clone(),
1956 mutation_id: mutation.intent.id.clone(),
1957 run_id: run_id.clone(),
1958 diff_hash,
1959 confidence: 0.7,
1960 env,
1961 outcome: oris_evolution::Outcome {
1962 success: true,
1963 validation_profile: validation_profile.to_string(),
1964 validation_duration_ms: report.duration_ms,
1965 changed_files: receipt
1966 .changed_files
1967 .iter()
1968 .map(|path| path.to_string_lossy().to_string())
1969 .collect(),
1970 validator_hash,
1971 lines_changed: blast_radius.lines_changed,
1972 replay_verified: false,
1973 },
1974 state: AssetState::Promoted,
1975 })
1976}
1977
1978fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
1979 let rustc_version = Command::new("rustc")
1980 .arg("--version")
1981 .output()
1982 .ok()
1983 .filter(|output| output.status.success())
1984 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
1985 .unwrap_or_else(|| "rustc unknown".into());
1986 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
1987 .ok()
1988 .map(|bytes| {
1989 let value = String::from_utf8_lossy(&bytes);
1990 compute_artifact_hash(&value)
1991 })
1992 .unwrap_or_else(|| "missing-cargo-lock".into());
1993 let target_triple = format!(
1994 "{}-unknown-{}",
1995 std::env::consts::ARCH,
1996 std::env::consts::OS
1997 );
1998 EnvFingerprint {
1999 rustc_version,
2000 cargo_lock_hash,
2001 target_triple,
2002 os: std::env::consts::OS.to_string(),
2003 }
2004}
2005
2006fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
2007 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
2008 let trimmed = raw.trim();
2009 if trimmed.is_empty() {
2010 continue;
2011 }
2012 let normalized = if is_rust_error_code(trimmed) {
2013 let mut chars = trimmed.chars();
2014 let prefix = chars
2015 .next()
2016 .map(|ch| ch.to_ascii_uppercase())
2017 .unwrap_or('E');
2018 format!("{prefix}{}", chars.as_str())
2019 } else {
2020 trimmed.to_ascii_lowercase()
2021 };
2022 if normalized.len() < 3 {
2023 continue;
2024 }
2025 out.insert(normalized);
2026 }
2027}
2028
2029fn normalize_signal_phrase(input: &str) -> Option<String> {
2030 let normalized = input
2031 .split(|ch: char| !ch.is_ascii_alphanumeric())
2032 .filter_map(|raw| {
2033 let trimmed = raw.trim();
2034 if trimmed.is_empty() {
2035 return None;
2036 }
2037 let normalized = if is_rust_error_code(trimmed) {
2038 let mut chars = trimmed.chars();
2039 let prefix = chars
2040 .next()
2041 .map(|ch| ch.to_ascii_uppercase())
2042 .unwrap_or('E');
2043 format!("{prefix}{}", chars.as_str())
2044 } else {
2045 trimmed.to_ascii_lowercase()
2046 };
2047 if normalized.len() < 3 {
2048 None
2049 } else {
2050 Some(normalized)
2051 }
2052 })
2053 .collect::<Vec<_>>()
2054 .join(" ");
2055 if normalized.is_empty() {
2056 None
2057 } else {
2058 Some(normalized)
2059 }
2060}
2061
2062fn is_rust_error_code(value: &str) -> bool {
2063 value.len() == 5
2064 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
2065 && value[1..].chars().all(|ch| ch.is_ascii_digit())
2066}
2067
2068fn find_declared_mutation(
2069 store: &dyn EvolutionStore,
2070 mutation_id: &MutationId,
2071) -> Result<Option<PreparedMutation>, EvolutionError> {
2072 for stored in store.scan(1)? {
2073 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
2074 if &mutation.intent.id == mutation_id {
2075 return Ok(Some(mutation));
2076 }
2077 }
2078 }
2079 Ok(None)
2080}
2081
2082fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
2083 let Ok(projection) = store.rebuild_projection() else {
2084 return Vec::new();
2085 };
2086 let capsules = projection.capsules.clone();
2087 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2088 let requested_spec_id = input
2089 .spec_id
2090 .as_deref()
2091 .map(str::trim)
2092 .filter(|value| !value.is_empty());
2093 let signal_set = input
2094 .signals
2095 .iter()
2096 .map(|signal| signal.to_ascii_lowercase())
2097 .collect::<BTreeSet<_>>();
2098 let mut candidates = projection
2099 .genes
2100 .into_iter()
2101 .filter_map(|gene| {
2102 if gene.state != AssetState::Promoted {
2103 return None;
2104 }
2105 if let Some(spec_id) = requested_spec_id {
2106 let matches_spec = spec_ids_by_gene
2107 .get(&gene.id)
2108 .map(|values| {
2109 values
2110 .iter()
2111 .any(|value| value.eq_ignore_ascii_case(spec_id))
2112 })
2113 .unwrap_or(false);
2114 if !matches_spec {
2115 return None;
2116 }
2117 }
2118 let gene_signals = gene
2119 .signals
2120 .iter()
2121 .map(|signal| signal.to_ascii_lowercase())
2122 .collect::<BTreeSet<_>>();
2123 if gene_signals == signal_set {
2124 let mut matched_capsules = capsules
2125 .iter()
2126 .filter(|capsule| {
2127 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
2128 })
2129 .cloned()
2130 .collect::<Vec<_>>();
2131 matched_capsules.sort_by(|left, right| {
2132 replay_environment_match_factor(&input.env, &right.env)
2133 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2134 .unwrap_or(std::cmp::Ordering::Equal)
2135 .then_with(|| {
2136 right
2137 .confidence
2138 .partial_cmp(&left.confidence)
2139 .unwrap_or(std::cmp::Ordering::Equal)
2140 })
2141 .then_with(|| left.id.cmp(&right.id))
2142 });
2143 if matched_capsules.is_empty() {
2144 None
2145 } else {
2146 let score = matched_capsules
2147 .first()
2148 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2149 .unwrap_or(0.0);
2150 Some(GeneCandidate {
2151 gene,
2152 score,
2153 capsules: matched_capsules,
2154 })
2155 }
2156 } else {
2157 None
2158 }
2159 })
2160 .collect::<Vec<_>>();
2161 candidates.sort_by(|left, right| {
2162 right
2163 .score
2164 .partial_cmp(&left.score)
2165 .unwrap_or(std::cmp::Ordering::Equal)
2166 .then_with(|| left.gene.id.cmp(&right.gene.id))
2167 });
2168 candidates
2169}
2170
2171fn quarantined_remote_exact_match_candidates(
2172 store: &dyn EvolutionStore,
2173 input: &SelectorInput,
2174) -> Vec<GeneCandidate> {
2175 let remote_asset_ids = store
2176 .scan(1)
2177 .ok()
2178 .map(|events| {
2179 events
2180 .into_iter()
2181 .filter_map(|stored| match stored.event {
2182 EvolutionEvent::RemoteAssetImported {
2183 source: CandidateSource::Remote,
2184 asset_ids,
2185 } => Some(asset_ids),
2186 _ => None,
2187 })
2188 .flatten()
2189 .collect::<BTreeSet<_>>()
2190 })
2191 .unwrap_or_default();
2192 if remote_asset_ids.is_empty() {
2193 return Vec::new();
2194 }
2195
2196 let Ok(projection) = store.rebuild_projection() else {
2197 return Vec::new();
2198 };
2199 let capsules = projection.capsules.clone();
2200 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2201 let requested_spec_id = input
2202 .spec_id
2203 .as_deref()
2204 .map(str::trim)
2205 .filter(|value| !value.is_empty());
2206 let normalized_signals = input
2207 .signals
2208 .iter()
2209 .filter_map(|signal| normalize_signal_phrase(signal))
2210 .collect::<BTreeSet<_>>()
2211 .into_iter()
2212 .collect::<Vec<_>>();
2213 if normalized_signals.is_empty() {
2214 return Vec::new();
2215 }
2216 let mut candidates = projection
2217 .genes
2218 .into_iter()
2219 .filter_map(|gene| {
2220 if !matches!(gene.state, AssetState::Promoted | AssetState::Quarantined) {
2221 return None;
2222 }
2223 if let Some(spec_id) = requested_spec_id {
2224 let matches_spec = spec_ids_by_gene
2225 .get(&gene.id)
2226 .map(|values| {
2227 values
2228 .iter()
2229 .any(|value| value.eq_ignore_ascii_case(spec_id))
2230 })
2231 .unwrap_or(false);
2232 if !matches_spec {
2233 return None;
2234 }
2235 }
2236 let normalized_gene_signals = gene
2237 .signals
2238 .iter()
2239 .filter_map(|candidate| normalize_signal_phrase(candidate))
2240 .collect::<Vec<_>>();
2241 let matched_query_count = normalized_signals
2242 .iter()
2243 .filter(|signal| {
2244 normalized_gene_signals.iter().any(|candidate| {
2245 candidate.contains(signal.as_str()) || signal.contains(candidate)
2246 })
2247 })
2248 .count();
2249 if matched_query_count == 0 {
2250 return None;
2251 }
2252
2253 let mut matched_capsules = capsules
2254 .iter()
2255 .filter(|capsule| {
2256 capsule.gene_id == gene.id
2257 && capsule.state == AssetState::Quarantined
2258 && remote_asset_ids.contains(&capsule.id)
2259 })
2260 .cloned()
2261 .collect::<Vec<_>>();
2262 matched_capsules.sort_by(|left, right| {
2263 replay_environment_match_factor(&input.env, &right.env)
2264 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2265 .unwrap_or(std::cmp::Ordering::Equal)
2266 .then_with(|| {
2267 right
2268 .confidence
2269 .partial_cmp(&left.confidence)
2270 .unwrap_or(std::cmp::Ordering::Equal)
2271 })
2272 .then_with(|| left.id.cmp(&right.id))
2273 });
2274 if matched_capsules.is_empty() {
2275 None
2276 } else {
2277 let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
2278 let env_score = matched_capsules
2279 .first()
2280 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2281 .unwrap_or(0.0);
2282 Some(GeneCandidate {
2283 gene,
2284 score: overlap.max(env_score),
2285 capsules: matched_capsules,
2286 })
2287 }
2288 })
2289 .collect::<Vec<_>>();
2290 candidates.sort_by(|left, right| {
2291 right
2292 .score
2293 .partial_cmp(&left.score)
2294 .unwrap_or(std::cmp::Ordering::Equal)
2295 .then_with(|| left.gene.id.cmp(&right.gene.id))
2296 });
2297 candidates
2298}
2299
2300fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
2301 let fields = [
2302 input
2303 .rustc_version
2304 .eq_ignore_ascii_case(&candidate.rustc_version),
2305 input
2306 .cargo_lock_hash
2307 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
2308 input
2309 .target_triple
2310 .eq_ignore_ascii_case(&candidate.target_triple),
2311 input.os.eq_ignore_ascii_case(&candidate.os),
2312 ];
2313 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
2314 0.5 + ((matched_fields / 4.0) * 0.5)
2315}
2316
2317fn effective_candidate_score(
2318 candidate: &GeneCandidate,
2319 publishers_by_gene: &BTreeMap<String, String>,
2320 reputation_bias: &BTreeMap<String, f32>,
2321) -> f32 {
2322 let bias = publishers_by_gene
2323 .get(&candidate.gene.id)
2324 .and_then(|publisher| reputation_bias.get(publisher))
2325 .copied()
2326 .unwrap_or(0.0)
2327 .clamp(0.0, 1.0);
2328 candidate.score * (1.0 + (bias * 0.1))
2329}
2330
2331fn export_promoted_assets_from_store(
2332 store: &dyn EvolutionStore,
2333 sender_id: impl Into<String>,
2334) -> Result<EvolutionEnvelope, EvoKernelError> {
2335 let (events, projection) = scan_projection(store)?;
2336 let genes = projection
2337 .genes
2338 .into_iter()
2339 .filter(|gene| gene.state == AssetState::Promoted)
2340 .collect::<Vec<_>>();
2341 let capsules = projection
2342 .capsules
2343 .into_iter()
2344 .filter(|capsule| capsule.state == AssetState::Promoted)
2345 .collect::<Vec<_>>();
2346 let assets = replay_export_assets(&events, genes, capsules);
2347 Ok(EvolutionEnvelope::publish(sender_id, assets))
2348}
2349
2350fn scan_projection(
2351 store: &dyn EvolutionStore,
2352) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
2353 let events = store.scan(1).map_err(store_err)?;
2354 let projection = rebuild_projection_from_events(&events);
2355 Ok((events, projection))
2356}
2357
2358fn replay_export_assets(
2359 events: &[StoredEvolutionEvent],
2360 genes: Vec<Gene>,
2361 capsules: Vec<Capsule>,
2362) -> Vec<NetworkAsset> {
2363 let mutation_ids = capsules
2364 .iter()
2365 .map(|capsule| capsule.mutation_id.clone())
2366 .collect::<BTreeSet<_>>();
2367 let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
2368 for gene in genes {
2369 assets.push(NetworkAsset::Gene { gene });
2370 }
2371 for capsule in capsules {
2372 assets.push(NetworkAsset::Capsule { capsule });
2373 }
2374 assets
2375}
2376
2377fn replay_export_events_for_mutations(
2378 events: &[StoredEvolutionEvent],
2379 mutation_ids: &BTreeSet<String>,
2380) -> Vec<NetworkAsset> {
2381 if mutation_ids.is_empty() {
2382 return Vec::new();
2383 }
2384
2385 let mut assets = Vec::new();
2386 let mut seen_mutations = BTreeSet::new();
2387 let mut seen_spec_links = BTreeSet::new();
2388 for stored in events {
2389 match &stored.event {
2390 EvolutionEvent::MutationDeclared { mutation }
2391 if mutation_ids.contains(mutation.intent.id.as_str())
2392 && seen_mutations.insert(mutation.intent.id.clone()) =>
2393 {
2394 assets.push(NetworkAsset::EvolutionEvent {
2395 event: EvolutionEvent::MutationDeclared {
2396 mutation: mutation.clone(),
2397 },
2398 });
2399 }
2400 EvolutionEvent::SpecLinked {
2401 mutation_id,
2402 spec_id,
2403 } if mutation_ids.contains(mutation_id.as_str())
2404 && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
2405 {
2406 assets.push(NetworkAsset::EvolutionEvent {
2407 event: EvolutionEvent::SpecLinked {
2408 mutation_id: mutation_id.clone(),
2409 spec_id: spec_id.clone(),
2410 },
2411 });
2412 }
2413 _ => {}
2414 }
2415 }
2416
2417 assets
2418}
2419
2420fn import_remote_envelope_into_store(
2421 store: &dyn EvolutionStore,
2422 envelope: &EvolutionEnvelope,
2423 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
2424) -> Result<ImportOutcome, EvoKernelError> {
2425 if !envelope.verify_content_hash() {
2426 return Err(EvoKernelError::Validation(
2427 "invalid evolution envelope hash".into(),
2428 ));
2429 }
2430
2431 let mut imported_asset_ids = Vec::new();
2432 for asset in &envelope.assets {
2433 match asset {
2434 NetworkAsset::Gene { gene } => {
2435 imported_asset_ids.push(gene.id.clone());
2436 let mut quarantined_gene = gene.clone();
2437 quarantined_gene.state = AssetState::Quarantined;
2438 store
2439 .append_event(EvolutionEvent::RemoteAssetImported {
2440 source: CandidateSource::Remote,
2441 asset_ids: vec![gene.id.clone()],
2442 })
2443 .map_err(store_err)?;
2444 store
2445 .append_event(EvolutionEvent::GeneProjected {
2446 gene: quarantined_gene.clone(),
2447 })
2448 .map_err(store_err)?;
2449 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
2450 store
2451 .append_event(EvolutionEvent::PromotionEvaluated {
2452 gene_id: quarantined_gene.id,
2453 state: AssetState::Quarantined,
2454 reason: "remote asset requires local validation before promotion".into(),
2455 })
2456 .map_err(store_err)?;
2457 }
2458 NetworkAsset::Capsule { capsule } => {
2459 imported_asset_ids.push(capsule.id.clone());
2460 store
2461 .append_event(EvolutionEvent::RemoteAssetImported {
2462 source: CandidateSource::Remote,
2463 asset_ids: vec![capsule.id.clone()],
2464 })
2465 .map_err(store_err)?;
2466 let mut quarantined = capsule.clone();
2467 quarantined.state = AssetState::Quarantined;
2468 store
2469 .append_event(EvolutionEvent::CapsuleCommitted {
2470 capsule: quarantined.clone(),
2471 })
2472 .map_err(store_err)?;
2473 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
2474 store
2475 .append_event(EvolutionEvent::CapsuleQuarantined {
2476 capsule_id: quarantined.id,
2477 })
2478 .map_err(store_err)?;
2479 }
2480 NetworkAsset::EvolutionEvent { event } => {
2481 if should_import_remote_event(event) {
2482 store.append_event(event.clone()).map_err(store_err)?;
2483 }
2484 }
2485 }
2486 }
2487
2488 Ok(ImportOutcome {
2489 imported_asset_ids,
2490 accepted: true,
2491 })
2492}
2493
2494fn record_remote_publisher_for_asset(
2495 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
2496 sender_id: &str,
2497 asset: &NetworkAsset,
2498) {
2499 let Some(remote_publishers) = remote_publishers else {
2500 return;
2501 };
2502 let sender_id = sender_id.trim();
2503 if sender_id.is_empty() {
2504 return;
2505 }
2506 let Ok(mut publishers) = remote_publishers.lock() else {
2507 return;
2508 };
2509 match asset {
2510 NetworkAsset::Gene { gene } => {
2511 publishers.insert(gene.id.clone(), sender_id.to_string());
2512 }
2513 NetworkAsset::Capsule { capsule } => {
2514 publishers.insert(capsule.gene_id.clone(), sender_id.to_string());
2515 }
2516 NetworkAsset::EvolutionEvent { .. } => {}
2517 }
2518}
2519
2520fn should_import_remote_event(event: &EvolutionEvent) -> bool {
2521 matches!(
2522 event,
2523 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
2524 )
2525}
2526
2527fn fetch_assets_from_store(
2528 store: &dyn EvolutionStore,
2529 responder_id: impl Into<String>,
2530 query: &FetchQuery,
2531) -> Result<FetchResponse, EvoKernelError> {
2532 let (events, projection) = scan_projection(store)?;
2533 let normalized_signals: Vec<String> = query
2534 .signals
2535 .iter()
2536 .map(|signal| signal.trim().to_ascii_lowercase())
2537 .filter(|signal| !signal.is_empty())
2538 .collect();
2539 let matches_any_signal = |candidate: &str| {
2540 if normalized_signals.is_empty() {
2541 return true;
2542 }
2543 let candidate = candidate.to_ascii_lowercase();
2544 normalized_signals
2545 .iter()
2546 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
2547 };
2548
2549 let matched_genes: Vec<Gene> = projection
2550 .genes
2551 .into_iter()
2552 .filter(|gene| gene.state == AssetState::Promoted)
2553 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
2554 .collect();
2555 let matched_gene_ids: BTreeSet<String> =
2556 matched_genes.iter().map(|gene| gene.id.clone()).collect();
2557 let matched_capsules: Vec<Capsule> = projection
2558 .capsules
2559 .into_iter()
2560 .filter(|capsule| capsule.state == AssetState::Promoted)
2561 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
2562 .collect();
2563
2564 let assets = replay_export_assets(&events, matched_genes, matched_capsules);
2565
2566 Ok(FetchResponse {
2567 sender_id: responder_id.into(),
2568 assets,
2569 })
2570}
2571
2572fn revoke_assets_in_store(
2573 store: &dyn EvolutionStore,
2574 notice: &RevokeNotice,
2575) -> Result<RevokeNotice, EvoKernelError> {
2576 let projection = store.rebuild_projection().map_err(store_err)?;
2577 let requested: BTreeSet<String> = notice
2578 .asset_ids
2579 .iter()
2580 .map(|asset_id| asset_id.trim().to_string())
2581 .filter(|asset_id| !asset_id.is_empty())
2582 .collect();
2583 let mut revoked_gene_ids = BTreeSet::new();
2584 let mut quarantined_capsule_ids = BTreeSet::new();
2585
2586 for gene in &projection.genes {
2587 if requested.contains(&gene.id) {
2588 revoked_gene_ids.insert(gene.id.clone());
2589 }
2590 }
2591 for capsule in &projection.capsules {
2592 if requested.contains(&capsule.id) {
2593 quarantined_capsule_ids.insert(capsule.id.clone());
2594 revoked_gene_ids.insert(capsule.gene_id.clone());
2595 }
2596 }
2597 for capsule in &projection.capsules {
2598 if revoked_gene_ids.contains(&capsule.gene_id) {
2599 quarantined_capsule_ids.insert(capsule.id.clone());
2600 }
2601 }
2602
2603 for gene_id in &revoked_gene_ids {
2604 store
2605 .append_event(EvolutionEvent::GeneRevoked {
2606 gene_id: gene_id.clone(),
2607 reason: notice.reason.clone(),
2608 })
2609 .map_err(store_err)?;
2610 }
2611 for capsule_id in &quarantined_capsule_ids {
2612 store
2613 .append_event(EvolutionEvent::CapsuleQuarantined {
2614 capsule_id: capsule_id.clone(),
2615 })
2616 .map_err(store_err)?;
2617 }
2618
2619 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
2620 affected_ids.extend(quarantined_capsule_ids);
2621 affected_ids.sort();
2622 affected_ids.dedup();
2623
2624 Ok(RevokeNotice {
2625 sender_id: notice.sender_id.clone(),
2626 asset_ids: affected_ids,
2627 reason: notice.reason.clone(),
2628 })
2629}
2630
2631fn evolution_metrics_snapshot(
2632 store: &dyn EvolutionStore,
2633) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2634 let events = store.scan(1).map_err(store_err)?;
2635 let projection = store.rebuild_projection().map_err(store_err)?;
2636 let replay_success_total = events
2637 .iter()
2638 .filter(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. }))
2639 .count() as u64;
2640 let replay_failures_total = events
2641 .iter()
2642 .filter(|stored| is_replay_validation_failure(&stored.event))
2643 .count() as u64;
2644 let replay_attempts_total = replay_success_total + replay_failures_total;
2645 let mutation_declared_total = events
2646 .iter()
2647 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
2648 .count() as u64;
2649 let promoted_mutations_total = events
2650 .iter()
2651 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
2652 .count() as u64;
2653 let gene_revocations_total = events
2654 .iter()
2655 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
2656 .count() as u64;
2657 let cutoff = Utc::now() - Duration::hours(1);
2658 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
2659 matches!(event, EvolutionEvent::MutationDeclared { .. })
2660 });
2661 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
2662 matches!(event, EvolutionEvent::GeneRevoked { .. })
2663 });
2664 let promoted_genes = projection
2665 .genes
2666 .iter()
2667 .filter(|gene| gene.state == AssetState::Promoted)
2668 .count() as u64;
2669 let promoted_capsules = projection
2670 .capsules
2671 .iter()
2672 .filter(|capsule| capsule.state == AssetState::Promoted)
2673 .count() as u64;
2674
2675 Ok(EvolutionMetricsSnapshot {
2676 replay_attempts_total,
2677 replay_success_total,
2678 replay_success_rate: safe_ratio(replay_success_total, replay_attempts_total),
2679 mutation_declared_total,
2680 promoted_mutations_total,
2681 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
2682 gene_revocations_total,
2683 mutation_velocity_last_hour,
2684 revoke_frequency_last_hour,
2685 promoted_genes,
2686 promoted_capsules,
2687 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
2688 })
2689}
2690
2691fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
2692 EvolutionHealthSnapshot {
2693 status: "ok".into(),
2694 last_event_seq: snapshot.last_event_seq,
2695 promoted_genes: snapshot.promoted_genes,
2696 promoted_capsules: snapshot.promoted_capsules,
2697 }
2698}
2699
2700fn render_evolution_metrics_prometheus(
2701 snapshot: &EvolutionMetricsSnapshot,
2702 health: &EvolutionHealthSnapshot,
2703) -> String {
2704 let mut out = String::new();
2705 out.push_str(
2706 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
2707 );
2708 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
2709 out.push_str(&format!(
2710 "oris_evolution_replay_attempts_total {}\n",
2711 snapshot.replay_attempts_total
2712 ));
2713 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
2714 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
2715 out.push_str(&format!(
2716 "oris_evolution_replay_success_total {}\n",
2717 snapshot.replay_success_total
2718 ));
2719 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
2720 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
2721 out.push_str(&format!(
2722 "oris_evolution_replay_success_rate {:.6}\n",
2723 snapshot.replay_success_rate
2724 ));
2725 out.push_str(
2726 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
2727 );
2728 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
2729 out.push_str(&format!(
2730 "oris_evolution_mutation_declared_total {}\n",
2731 snapshot.mutation_declared_total
2732 ));
2733 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
2734 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
2735 out.push_str(&format!(
2736 "oris_evolution_promoted_mutations_total {}\n",
2737 snapshot.promoted_mutations_total
2738 ));
2739 out.push_str(
2740 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
2741 );
2742 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
2743 out.push_str(&format!(
2744 "oris_evolution_promotion_ratio {:.6}\n",
2745 snapshot.promotion_ratio
2746 ));
2747 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
2748 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
2749 out.push_str(&format!(
2750 "oris_evolution_gene_revocations_total {}\n",
2751 snapshot.gene_revocations_total
2752 ));
2753 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
2754 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
2755 out.push_str(&format!(
2756 "oris_evolution_mutation_velocity_last_hour {}\n",
2757 snapshot.mutation_velocity_last_hour
2758 ));
2759 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
2760 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
2761 out.push_str(&format!(
2762 "oris_evolution_revoke_frequency_last_hour {}\n",
2763 snapshot.revoke_frequency_last_hour
2764 ));
2765 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
2766 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
2767 out.push_str(&format!(
2768 "oris_evolution_promoted_genes {}\n",
2769 snapshot.promoted_genes
2770 ));
2771 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
2772 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
2773 out.push_str(&format!(
2774 "oris_evolution_promoted_capsules {}\n",
2775 snapshot.promoted_capsules
2776 ));
2777 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
2778 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
2779 out.push_str(&format!(
2780 "oris_evolution_store_last_event_seq {}\n",
2781 snapshot.last_event_seq
2782 ));
2783 out.push_str(
2784 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
2785 );
2786 out.push_str("# TYPE oris_evolution_health gauge\n");
2787 out.push_str(&format!(
2788 "oris_evolution_health {}\n",
2789 u8::from(health.status == "ok")
2790 ));
2791 out
2792}
2793
2794fn count_recent_events(
2795 events: &[StoredEvolutionEvent],
2796 cutoff: DateTime<Utc>,
2797 predicate: impl Fn(&EvolutionEvent) -> bool,
2798) -> u64 {
2799 events
2800 .iter()
2801 .filter(|stored| {
2802 predicate(&stored.event)
2803 && parse_event_timestamp(&stored.timestamp)
2804 .map(|timestamp| timestamp >= cutoff)
2805 .unwrap_or(false)
2806 })
2807 .count() as u64
2808}
2809
2810fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
2811 DateTime::parse_from_rfc3339(raw)
2812 .ok()
2813 .map(|parsed| parsed.with_timezone(&Utc))
2814}
2815
2816fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
2817 matches!(
2818 event,
2819 EvolutionEvent::ValidationFailed {
2820 gene_id: Some(_),
2821 ..
2822 }
2823 )
2824}
2825
2826fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
2827 if denominator == 0 {
2828 0.0
2829 } else {
2830 numerator as f64 / denominator as f64
2831 }
2832}
2833
2834fn store_err(err: EvolutionError) -> EvoKernelError {
2835 EvoKernelError::Store(err.to_string())
2836}
2837
2838#[cfg(test)]
2839mod tests {
2840 use super::*;
2841 use oris_agent_contract::{
2842 AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
2843 };
2844 use oris_kernel::{
2845 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
2846 NoopStepFn, StateUpdatedOnlyReducer,
2847 };
2848 use serde::{Deserialize, Serialize};
2849
2850 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
2851 struct TestState;
2852
2853 impl KernelState for TestState {
2854 fn version(&self) -> u32 {
2855 1
2856 }
2857 }
2858
2859 fn temp_workspace(name: &str) -> std::path::PathBuf {
2860 let root =
2861 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
2862 if root.exists() {
2863 fs::remove_dir_all(&root).unwrap();
2864 }
2865 fs::create_dir_all(root.join("src")).unwrap();
2866 fs::write(
2867 root.join("Cargo.toml"),
2868 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
2869 )
2870 .unwrap();
2871 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
2872 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
2873 root
2874 }
2875
2876 fn test_kernel() -> Arc<Kernel<TestState>> {
2877 Arc::new(Kernel::<TestState> {
2878 events: Box::new(InMemoryEventStore::new()),
2879 snaps: None,
2880 reducer: Box::new(StateUpdatedOnlyReducer),
2881 exec: Box::new(NoopActionExecutor),
2882 step: Box::new(NoopStepFn),
2883 policy: Box::new(AllowAllPolicy),
2884 effect_sink: None,
2885 mode: KernelMode::Normal,
2886 })
2887 }
2888
2889 fn lightweight_plan() -> ValidationPlan {
2890 ValidationPlan {
2891 profile: "test".into(),
2892 stages: vec![ValidationStage::Command {
2893 program: "git".into(),
2894 args: vec!["--version".into()],
2895 timeout_ms: 5_000,
2896 }],
2897 }
2898 }
2899
2900 fn sample_mutation() -> PreparedMutation {
2901 prepare_mutation(
2902 MutationIntent {
2903 id: "mutation-1".into(),
2904 intent: "add README".into(),
2905 target: MutationTarget::Paths {
2906 allow: vec!["README.md".into()],
2907 },
2908 expected_effect: "repo still builds".into(),
2909 risk: RiskLevel::Low,
2910 signals: vec!["missing readme".into()],
2911 spec_id: None,
2912 },
2913 "\
2914diff --git a/README.md b/README.md
2915new file mode 100644
2916index 0000000..1111111
2917--- /dev/null
2918+++ b/README.md
2919@@ -0,0 +1 @@
2920+# sample
2921"
2922 .into(),
2923 Some("HEAD".into()),
2924 )
2925 }
2926
2927 fn base_sandbox_policy() -> SandboxPolicy {
2928 SandboxPolicy {
2929 allowed_programs: vec!["git".into()],
2930 max_duration_ms: 60_000,
2931 max_output_bytes: 1024 * 1024,
2932 denied_env_prefixes: Vec::new(),
2933 }
2934 }
2935
2936 fn command_validator() -> Arc<dyn Validator> {
2937 Arc::new(CommandValidator::new(base_sandbox_policy()))
2938 }
2939
2940 fn replay_input(signal: &str) -> SelectorInput {
2941 let rustc_version = std::process::Command::new("rustc")
2942 .arg("--version")
2943 .output()
2944 .ok()
2945 .filter(|output| output.status.success())
2946 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
2947 .unwrap_or_else(|| "rustc unknown".into());
2948 SelectorInput {
2949 signals: vec![signal.into()],
2950 env: EnvFingerprint {
2951 rustc_version,
2952 cargo_lock_hash: compute_artifact_hash("# lock\n"),
2953 target_triple: format!(
2954 "{}-unknown-{}",
2955 std::env::consts::ARCH,
2956 std::env::consts::OS
2957 ),
2958 os: std::env::consts::OS.into(),
2959 },
2960 spec_id: None,
2961 limit: 1,
2962 }
2963 }
2964
2965 fn build_test_evo_with_store(
2966 name: &str,
2967 run_id: &str,
2968 validator: Arc<dyn Validator>,
2969 store: Arc<dyn EvolutionStore>,
2970 ) -> EvoKernel<TestState> {
2971 let workspace = temp_workspace(name);
2972 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
2973 run_id,
2974 &workspace,
2975 std::env::temp_dir(),
2976 ));
2977 EvoKernel::new(test_kernel(), sandbox, validator, store)
2978 .with_governor(Arc::new(DefaultGovernor::new(
2979 oris_governor::GovernorConfig {
2980 promote_after_successes: 1,
2981 ..Default::default()
2982 },
2983 )))
2984 .with_validation_plan(lightweight_plan())
2985 .with_sandbox_policy(base_sandbox_policy())
2986 }
2987
2988 fn build_test_evo(
2989 name: &str,
2990 run_id: &str,
2991 validator: Arc<dyn Validator>,
2992 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
2993 let store_root = std::env::temp_dir().join(format!(
2994 "oris-evokernel-{name}-store-{}",
2995 std::process::id()
2996 ));
2997 if store_root.exists() {
2998 fs::remove_dir_all(&store_root).unwrap();
2999 }
3000 let store: Arc<dyn EvolutionStore> =
3001 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
3002 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
3003 (evo, store)
3004 }
3005
3006 fn remote_publish_envelope(
3007 sender_id: &str,
3008 run_id: &str,
3009 gene_id: &str,
3010 capsule_id: &str,
3011 mutation_id: &str,
3012 signal: &str,
3013 file_name: &str,
3014 line: &str,
3015 ) -> EvolutionEnvelope {
3016 remote_publish_envelope_with_env(
3017 sender_id,
3018 run_id,
3019 gene_id,
3020 capsule_id,
3021 mutation_id,
3022 signal,
3023 file_name,
3024 line,
3025 replay_input(signal).env,
3026 )
3027 }
3028
3029 fn remote_publish_envelope_with_env(
3030 sender_id: &str,
3031 run_id: &str,
3032 gene_id: &str,
3033 capsule_id: &str,
3034 mutation_id: &str,
3035 signal: &str,
3036 file_name: &str,
3037 line: &str,
3038 env: EnvFingerprint,
3039 ) -> EvolutionEnvelope {
3040 let mutation = prepare_mutation(
3041 MutationIntent {
3042 id: mutation_id.into(),
3043 intent: format!("add {file_name}"),
3044 target: MutationTarget::Paths {
3045 allow: vec![file_name.into()],
3046 },
3047 expected_effect: "replay should still validate".into(),
3048 risk: RiskLevel::Low,
3049 signals: vec![signal.into()],
3050 spec_id: None,
3051 },
3052 format!(
3053 "\
3054diff --git a/{file_name} b/{file_name}
3055new file mode 100644
3056index 0000000..1111111
3057--- /dev/null
3058+++ b/{file_name}
3059@@ -0,0 +1 @@
3060+{line}
3061"
3062 ),
3063 Some("HEAD".into()),
3064 );
3065 let gene = Gene {
3066 id: gene_id.into(),
3067 signals: vec![signal.into()],
3068 strategy: vec![file_name.into()],
3069 validation: vec!["test".into()],
3070 state: AssetState::Promoted,
3071 };
3072 let capsule = Capsule {
3073 id: capsule_id.into(),
3074 gene_id: gene_id.into(),
3075 mutation_id: mutation_id.into(),
3076 run_id: run_id.into(),
3077 diff_hash: mutation.artifact.content_hash.clone(),
3078 confidence: 0.9,
3079 env,
3080 outcome: Outcome {
3081 success: true,
3082 validation_profile: "test".into(),
3083 validation_duration_ms: 1,
3084 changed_files: vec![file_name.into()],
3085 validator_hash: "validator-hash".into(),
3086 lines_changed: 1,
3087 replay_verified: false,
3088 },
3089 state: AssetState::Promoted,
3090 };
3091 EvolutionEnvelope::publish(
3092 sender_id,
3093 vec![
3094 NetworkAsset::EvolutionEvent {
3095 event: EvolutionEvent::MutationDeclared { mutation },
3096 },
3097 NetworkAsset::Gene { gene: gene.clone() },
3098 NetworkAsset::Capsule {
3099 capsule: capsule.clone(),
3100 },
3101 NetworkAsset::EvolutionEvent {
3102 event: EvolutionEvent::CapsuleReleased {
3103 capsule_id: capsule.id.clone(),
3104 state: AssetState::Promoted,
3105 },
3106 },
3107 ],
3108 )
3109 }
3110
3111 fn remote_publish_envelope_with_signals(
3112 sender_id: &str,
3113 run_id: &str,
3114 gene_id: &str,
3115 capsule_id: &str,
3116 mutation_id: &str,
3117 mutation_signals: Vec<String>,
3118 gene_signals: Vec<String>,
3119 file_name: &str,
3120 line: &str,
3121 env: EnvFingerprint,
3122 ) -> EvolutionEnvelope {
3123 let mutation = prepare_mutation(
3124 MutationIntent {
3125 id: mutation_id.into(),
3126 intent: format!("add {file_name}"),
3127 target: MutationTarget::Paths {
3128 allow: vec![file_name.into()],
3129 },
3130 expected_effect: "replay should still validate".into(),
3131 risk: RiskLevel::Low,
3132 signals: mutation_signals,
3133 spec_id: None,
3134 },
3135 format!(
3136 "\
3137diff --git a/{file_name} b/{file_name}
3138new file mode 100644
3139index 0000000..1111111
3140--- /dev/null
3141+++ b/{file_name}
3142@@ -0,0 +1 @@
3143+{line}
3144"
3145 ),
3146 Some("HEAD".into()),
3147 );
3148 let gene = Gene {
3149 id: gene_id.into(),
3150 signals: gene_signals,
3151 strategy: vec![file_name.into()],
3152 validation: vec!["test".into()],
3153 state: AssetState::Promoted,
3154 };
3155 let capsule = Capsule {
3156 id: capsule_id.into(),
3157 gene_id: gene_id.into(),
3158 mutation_id: mutation_id.into(),
3159 run_id: run_id.into(),
3160 diff_hash: mutation.artifact.content_hash.clone(),
3161 confidence: 0.9,
3162 env,
3163 outcome: Outcome {
3164 success: true,
3165 validation_profile: "test".into(),
3166 validation_duration_ms: 1,
3167 changed_files: vec![file_name.into()],
3168 validator_hash: "validator-hash".into(),
3169 lines_changed: 1,
3170 replay_verified: false,
3171 },
3172 state: AssetState::Promoted,
3173 };
3174 EvolutionEnvelope::publish(
3175 sender_id,
3176 vec![
3177 NetworkAsset::EvolutionEvent {
3178 event: EvolutionEvent::MutationDeclared { mutation },
3179 },
3180 NetworkAsset::Gene { gene: gene.clone() },
3181 NetworkAsset::Capsule {
3182 capsule: capsule.clone(),
3183 },
3184 NetworkAsset::EvolutionEvent {
3185 event: EvolutionEvent::CapsuleReleased {
3186 capsule_id: capsule.id.clone(),
3187 state: AssetState::Promoted,
3188 },
3189 },
3190 ],
3191 )
3192 }
3193
3194 struct FixedValidator {
3195 success: bool,
3196 }
3197
3198 #[async_trait]
3199 impl Validator for FixedValidator {
3200 async fn run(
3201 &self,
3202 _receipt: &SandboxReceipt,
3203 plan: &ValidationPlan,
3204 ) -> Result<ValidationReport, ValidationError> {
3205 Ok(ValidationReport {
3206 success: self.success,
3207 duration_ms: 1,
3208 stages: Vec::new(),
3209 logs: if self.success {
3210 format!("{} ok", plan.profile)
3211 } else {
3212 format!("{} failed", plan.profile)
3213 },
3214 })
3215 }
3216 }
3217
3218 struct FailOnAppendStore {
3219 inner: JsonlEvolutionStore,
3220 fail_on_call: usize,
3221 call_count: Mutex<usize>,
3222 }
3223
3224 impl FailOnAppendStore {
3225 fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
3226 Self {
3227 inner: JsonlEvolutionStore::new(root_dir),
3228 fail_on_call,
3229 call_count: Mutex::new(0),
3230 }
3231 }
3232 }
3233
3234 impl EvolutionStore for FailOnAppendStore {
3235 fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
3236 let mut call_count = self
3237 .call_count
3238 .lock()
3239 .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
3240 *call_count += 1;
3241 if *call_count == self.fail_on_call {
3242 return Err(EvolutionError::Io("injected append failure".into()));
3243 }
3244 self.inner.append_event(event)
3245 }
3246
3247 fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
3248 self.inner.scan(from_seq)
3249 }
3250
3251 fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
3252 self.inner.rebuild_projection()
3253 }
3254 }
3255
3256 #[test]
3257 fn coordination_planner_to_coder_handoff_is_deterministic() {
3258 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3259 root_goal: "ship feature".into(),
3260 primitive: CoordinationPrimitive::Sequential,
3261 tasks: vec![
3262 CoordinationTask {
3263 id: "planner".into(),
3264 role: AgentRole::Planner,
3265 description: "split the work".into(),
3266 depends_on: Vec::new(),
3267 },
3268 CoordinationTask {
3269 id: "coder".into(),
3270 role: AgentRole::Coder,
3271 description: "implement the patch".into(),
3272 depends_on: vec!["planner".into()],
3273 },
3274 ],
3275 timeout_ms: 5_000,
3276 max_retries: 0,
3277 });
3278
3279 assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
3280 assert!(result.failed_tasks.is_empty());
3281 assert!(result.messages.iter().any(|message| {
3282 message.from_role == AgentRole::Planner
3283 && message.to_role == AgentRole::Coder
3284 && message.task_id == "coder"
3285 }));
3286 }
3287
3288 #[test]
3289 fn coordination_repair_runs_only_after_coder_failure() {
3290 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3291 root_goal: "fix broken implementation".into(),
3292 primitive: CoordinationPrimitive::Sequential,
3293 tasks: vec![
3294 CoordinationTask {
3295 id: "coder".into(),
3296 role: AgentRole::Coder,
3297 description: "force-fail initial implementation".into(),
3298 depends_on: Vec::new(),
3299 },
3300 CoordinationTask {
3301 id: "repair".into(),
3302 role: AgentRole::Repair,
3303 description: "patch the failed implementation".into(),
3304 depends_on: vec!["coder".into()],
3305 },
3306 ],
3307 timeout_ms: 5_000,
3308 max_retries: 0,
3309 });
3310
3311 assert_eq!(result.completed_tasks, vec!["repair"]);
3312 assert_eq!(result.failed_tasks, vec!["coder"]);
3313 assert!(result.messages.iter().any(|message| {
3314 message.from_role == AgentRole::Coder
3315 && message.to_role == AgentRole::Repair
3316 && message.task_id == "repair"
3317 }));
3318 }
3319
3320 #[test]
3321 fn coordination_optimizer_runs_after_successful_implementation_step() {
3322 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3323 root_goal: "ship optimized patch".into(),
3324 primitive: CoordinationPrimitive::Sequential,
3325 tasks: vec![
3326 CoordinationTask {
3327 id: "coder".into(),
3328 role: AgentRole::Coder,
3329 description: "implement a working patch".into(),
3330 depends_on: Vec::new(),
3331 },
3332 CoordinationTask {
3333 id: "optimizer".into(),
3334 role: AgentRole::Optimizer,
3335 description: "tighten the implementation".into(),
3336 depends_on: vec!["coder".into()],
3337 },
3338 ],
3339 timeout_ms: 5_000,
3340 max_retries: 0,
3341 });
3342
3343 assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
3344 assert!(result.failed_tasks.is_empty());
3345 }
3346
3347 #[test]
3348 fn coordination_parallel_waves_preserve_sorted_merge_order() {
3349 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3350 root_goal: "parallelize safe tasks".into(),
3351 primitive: CoordinationPrimitive::Parallel,
3352 tasks: vec![
3353 CoordinationTask {
3354 id: "z-task".into(),
3355 role: AgentRole::Planner,
3356 description: "analyze z".into(),
3357 depends_on: Vec::new(),
3358 },
3359 CoordinationTask {
3360 id: "a-task".into(),
3361 role: AgentRole::Coder,
3362 description: "implement a".into(),
3363 depends_on: Vec::new(),
3364 },
3365 CoordinationTask {
3366 id: "mid-task".into(),
3367 role: AgentRole::Optimizer,
3368 description: "polish after both".into(),
3369 depends_on: vec!["z-task".into(), "a-task".into()],
3370 },
3371 ],
3372 timeout_ms: 5_000,
3373 max_retries: 0,
3374 });
3375
3376 assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
3377 assert!(result.failed_tasks.is_empty());
3378 }
3379
3380 #[test]
3381 fn coordination_retries_stop_at_max_retries() {
3382 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3383 root_goal: "retry then stop".into(),
3384 primitive: CoordinationPrimitive::Sequential,
3385 tasks: vec![CoordinationTask {
3386 id: "coder".into(),
3387 role: AgentRole::Coder,
3388 description: "force-fail this task".into(),
3389 depends_on: Vec::new(),
3390 }],
3391 timeout_ms: 5_000,
3392 max_retries: 1,
3393 });
3394
3395 assert!(result.completed_tasks.is_empty());
3396 assert_eq!(result.failed_tasks, vec!["coder"]);
3397 assert_eq!(
3398 result
3399 .messages
3400 .iter()
3401 .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
3402 .count(),
3403 2
3404 );
3405 }
3406
3407 #[test]
3408 fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
3409 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3410 root_goal: "skip blocked follow-up work".into(),
3411 primitive: CoordinationPrimitive::Conditional,
3412 tasks: vec![
3413 CoordinationTask {
3414 id: "coder".into(),
3415 role: AgentRole::Coder,
3416 description: "force-fail the implementation".into(),
3417 depends_on: Vec::new(),
3418 },
3419 CoordinationTask {
3420 id: "optimizer".into(),
3421 role: AgentRole::Optimizer,
3422 description: "only optimize a successful implementation".into(),
3423 depends_on: vec!["coder".into()],
3424 },
3425 ],
3426 timeout_ms: 5_000,
3427 max_retries: 0,
3428 });
3429
3430 assert!(result.completed_tasks.is_empty());
3431 assert_eq!(result.failed_tasks, vec!["coder"]);
3432 assert!(result.messages.iter().any(|message| {
3433 message.task_id == "optimizer"
3434 && message
3435 .content
3436 .contains("skipped due to failed dependency chain")
3437 }));
3438 assert!(!result
3439 .failed_tasks
3440 .iter()
3441 .any(|task_id| task_id == "optimizer"));
3442 }
3443
3444 #[tokio::test]
3445 async fn command_validator_aggregates_stage_reports() {
3446 let workspace = temp_workspace("validator");
3447 let receipt = SandboxReceipt {
3448 mutation_id: "m".into(),
3449 workdir: workspace,
3450 applied: true,
3451 changed_files: Vec::new(),
3452 patch_hash: "hash".into(),
3453 stdout_log: std::env::temp_dir().join("stdout.log"),
3454 stderr_log: std::env::temp_dir().join("stderr.log"),
3455 };
3456 let validator = CommandValidator::new(SandboxPolicy {
3457 allowed_programs: vec!["git".into()],
3458 max_duration_ms: 1_000,
3459 max_output_bytes: 1024,
3460 denied_env_prefixes: Vec::new(),
3461 });
3462 let report = validator
3463 .run(
3464 &receipt,
3465 &ValidationPlan {
3466 profile: "test".into(),
3467 stages: vec![ValidationStage::Command {
3468 program: "git".into(),
3469 args: vec!["--version".into()],
3470 timeout_ms: 1_000,
3471 }],
3472 },
3473 )
3474 .await
3475 .unwrap();
3476 assert_eq!(report.stages.len(), 1);
3477 }
3478
3479 #[tokio::test]
3480 async fn capture_successful_mutation_appends_capsule() {
3481 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
3482 let capsule = evo
3483 .capture_successful_mutation(&"run-1".into(), sample_mutation())
3484 .await
3485 .unwrap();
3486 let events = store.scan(1).unwrap();
3487 assert!(events
3488 .iter()
3489 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
3490 assert!(!capsule.id.is_empty());
3491 }
3492
3493 #[tokio::test]
3494 async fn replay_hit_records_capsule_reused() {
3495 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
3496 let capsule = evo
3497 .capture_successful_mutation(&"run-2".into(), sample_mutation())
3498 .await
3499 .unwrap();
3500 let replay_run_id = "run-replay".to_string();
3501 let decision = evo
3502 .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
3503 .await
3504 .unwrap();
3505 assert!(decision.used_capsule);
3506 assert_eq!(decision.capsule_id, Some(capsule.id));
3507 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
3508 &stored.event,
3509 EvolutionEvent::CapsuleReused {
3510 run_id,
3511 replay_run_id: Some(current_replay_run_id),
3512 ..
3513 } if run_id == "run-2" && current_replay_run_id == &replay_run_id
3514 )));
3515 }
3516
3517 #[tokio::test]
3518 async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
3519 let capture_run_id = "run-legacy-capture".to_string();
3520 let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
3521 let capsule = evo
3522 .capture_successful_mutation(&capture_run_id, sample_mutation())
3523 .await
3524 .unwrap();
3525 let executor = StoreReplayExecutor {
3526 sandbox: evo.sandbox.clone(),
3527 validator: evo.validator.clone(),
3528 store: evo.store.clone(),
3529 selector: evo.selector.clone(),
3530 governor: evo.governor.clone(),
3531 economics: Some(evo.economics.clone()),
3532 remote_publishers: Some(evo.remote_publishers.clone()),
3533 stake_policy: evo.stake_policy.clone(),
3534 };
3535
3536 let decision = executor
3537 .try_replay(
3538 &replay_input("missing readme"),
3539 &evo.sandbox_policy,
3540 &evo.validation_plan,
3541 )
3542 .await
3543 .unwrap();
3544
3545 assert!(decision.used_capsule);
3546 assert_eq!(decision.capsule_id, Some(capsule.id));
3547 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
3548 &stored.event,
3549 EvolutionEvent::CapsuleReused {
3550 run_id,
3551 replay_run_id: None,
3552 ..
3553 } if run_id == &capture_run_id
3554 )));
3555 }
3556
3557 #[tokio::test]
3558 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
3559 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
3560 let capsule = evo
3561 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
3562 .await
3563 .unwrap();
3564 let decision = evo
3565 .replay_or_fallback(replay_input("missing readme"))
3566 .await
3567 .unwrap();
3568 assert!(decision.used_capsule);
3569
3570 evo.revoke_assets(&RevokeNotice {
3571 sender_id: "node-metrics".into(),
3572 asset_ids: vec![capsule.id.clone()],
3573 reason: "manual test revoke".into(),
3574 })
3575 .unwrap();
3576
3577 let snapshot = evo.metrics_snapshot().unwrap();
3578 assert_eq!(snapshot.replay_attempts_total, 1);
3579 assert_eq!(snapshot.replay_success_total, 1);
3580 assert_eq!(snapshot.replay_success_rate, 1.0);
3581 assert_eq!(snapshot.mutation_declared_total, 1);
3582 assert_eq!(snapshot.promoted_mutations_total, 1);
3583 assert_eq!(snapshot.promotion_ratio, 1.0);
3584 assert_eq!(snapshot.gene_revocations_total, 1);
3585 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
3586 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
3587 assert_eq!(snapshot.promoted_genes, 0);
3588 assert_eq!(snapshot.promoted_capsules, 0);
3589
3590 let rendered = evo.render_metrics_prometheus().unwrap();
3591 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
3592 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
3593 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
3594 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
3595 assert!(rendered.contains("oris_evolution_health 1"));
3596 }
3597
3598 #[tokio::test]
3599 async fn remote_replay_prefers_closest_environment_match() {
3600 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
3601 let input = replay_input("env-signal");
3602
3603 let envelope_a = remote_publish_envelope_with_env(
3604 "node-a",
3605 "run-remote-a",
3606 "gene-a",
3607 "capsule-a",
3608 "mutation-a",
3609 "env-signal",
3610 "A.md",
3611 "# from a",
3612 input.env.clone(),
3613 );
3614 let envelope_b = remote_publish_envelope_with_env(
3615 "node-b",
3616 "run-remote-b",
3617 "gene-b",
3618 "capsule-b",
3619 "mutation-b",
3620 "env-signal",
3621 "B.md",
3622 "# from b",
3623 EnvFingerprint {
3624 rustc_version: "old-rustc".into(),
3625 cargo_lock_hash: "other-lock".into(),
3626 target_triple: "aarch64-apple-darwin".into(),
3627 os: "linux".into(),
3628 },
3629 );
3630
3631 evo.import_remote_envelope(&envelope_a).unwrap();
3632 evo.import_remote_envelope(&envelope_b).unwrap();
3633
3634 let decision = evo.replay_or_fallback(input).await.unwrap();
3635
3636 assert!(decision.used_capsule);
3637 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
3638 assert!(!decision.fallback_to_planner);
3639 }
3640
3641 #[test]
3642 fn remote_cold_start_scoring_caps_distinct_query_coverage() {
3643 let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
3644 let input = replay_input("missing readme");
3645
3646 let exact = remote_publish_envelope_with_signals(
3647 "node-exact",
3648 "run-remote-exact",
3649 "gene-exact",
3650 "capsule-exact",
3651 "mutation-exact",
3652 vec!["missing readme".into()],
3653 vec!["missing readme".into()],
3654 "EXACT.md",
3655 "# exact",
3656 input.env.clone(),
3657 );
3658 let overlapping = remote_publish_envelope_with_signals(
3659 "node-overlap",
3660 "run-remote-overlap",
3661 "gene-overlap",
3662 "capsule-overlap",
3663 "mutation-overlap",
3664 vec!["missing readme".into()],
3665 vec!["missing".into(), "readme".into()],
3666 "OVERLAP.md",
3667 "# overlap",
3668 input.env.clone(),
3669 );
3670
3671 evo.import_remote_envelope(&exact).unwrap();
3672 evo.import_remote_envelope(&overlapping).unwrap();
3673
3674 let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
3675 let exact_candidate = candidates
3676 .iter()
3677 .find(|candidate| candidate.gene.id == "gene-exact")
3678 .unwrap();
3679 let overlap_candidate = candidates
3680 .iter()
3681 .find(|candidate| candidate.gene.id == "gene-overlap")
3682 .unwrap();
3683
3684 assert_eq!(exact_candidate.score, 1.0);
3685 assert_eq!(overlap_candidate.score, 1.0);
3686 assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
3687 }
3688
3689 #[tokio::test]
3690 async fn remote_capsule_stays_quarantined_until_first_successful_replay() {
3691 let (evo, store) = build_test_evo(
3692 "remote-quarantine",
3693 "run-remote-quarantine",
3694 command_validator(),
3695 );
3696 let envelope = remote_publish_envelope(
3697 "node-remote",
3698 "run-remote-quarantine",
3699 "gene-remote",
3700 "capsule-remote",
3701 "mutation-remote",
3702 "remote-signal",
3703 "REMOTE.md",
3704 "# from remote",
3705 );
3706
3707 evo.import_remote_envelope(&envelope).unwrap();
3708
3709 let before_replay = store.rebuild_projection().unwrap();
3710 let imported_gene = before_replay
3711 .genes
3712 .iter()
3713 .find(|gene| gene.id == "gene-remote")
3714 .unwrap();
3715 let imported_capsule = before_replay
3716 .capsules
3717 .iter()
3718 .find(|capsule| capsule.id == "capsule-remote")
3719 .unwrap();
3720 assert_eq!(imported_gene.state, AssetState::Quarantined);
3721 assert_eq!(imported_capsule.state, AssetState::Quarantined);
3722 let exported_before_replay =
3723 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
3724 assert!(exported_before_replay.assets.is_empty());
3725
3726 let decision = evo
3727 .replay_or_fallback(replay_input("remote-signal"))
3728 .await
3729 .unwrap();
3730
3731 assert!(decision.used_capsule);
3732 assert_eq!(decision.capsule_id, Some("capsule-remote".into()));
3733
3734 let after_replay = store.rebuild_projection().unwrap();
3735 let promoted_gene = after_replay
3736 .genes
3737 .iter()
3738 .find(|gene| gene.id == "gene-remote")
3739 .unwrap();
3740 let released_capsule = after_replay
3741 .capsules
3742 .iter()
3743 .find(|capsule| capsule.id == "capsule-remote")
3744 .unwrap();
3745 assert_eq!(promoted_gene.state, AssetState::Promoted);
3746 assert_eq!(released_capsule.state, AssetState::Promoted);
3747 let exported_after_replay =
3748 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
3749 assert_eq!(exported_after_replay.assets.len(), 3);
3750 assert!(exported_after_replay.assets.iter().any(|asset| matches!(
3751 asset,
3752 NetworkAsset::EvolutionEvent {
3753 event: EvolutionEvent::MutationDeclared { .. }
3754 }
3755 )));
3756 }
3757
3758 #[tokio::test]
3759 async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
3760 let (source, source_store) = build_test_evo(
3761 "remote-publish-export",
3762 "run-remote-publish-export",
3763 command_validator(),
3764 );
3765 source
3766 .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
3767 .await
3768 .unwrap();
3769 let envelope = EvolutionNetworkNode::new(source_store.clone())
3770 .publish_local_assets("node-source")
3771 .unwrap();
3772 assert!(envelope.assets.iter().any(|asset| matches!(
3773 asset,
3774 NetworkAsset::EvolutionEvent {
3775 event: EvolutionEvent::MutationDeclared { mutation }
3776 } if mutation.intent.id == "mutation-1"
3777 )));
3778
3779 let (remote, _) = build_test_evo(
3780 "remote-publish-import",
3781 "run-remote-publish-import",
3782 command_validator(),
3783 );
3784 remote.import_remote_envelope(&envelope).unwrap();
3785
3786 let decision = remote
3787 .replay_or_fallback(replay_input("missing readme"))
3788 .await
3789 .unwrap();
3790
3791 assert!(decision.used_capsule);
3792 assert!(!decision.fallback_to_planner);
3793 }
3794
3795 #[tokio::test]
3796 async fn fetch_assets_include_mutation_payload_for_remote_replay() {
3797 let (evo, store) = build_test_evo(
3798 "remote-fetch-export",
3799 "run-remote-fetch",
3800 command_validator(),
3801 );
3802 evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
3803 .await
3804 .unwrap();
3805
3806 let response = EvolutionNetworkNode::new(store.clone())
3807 .fetch_assets(
3808 "node-source",
3809 &FetchQuery {
3810 sender_id: "node-client".into(),
3811 signals: vec!["missing readme".into()],
3812 },
3813 )
3814 .unwrap();
3815
3816 assert!(response.assets.iter().any(|asset| matches!(
3817 asset,
3818 NetworkAsset::EvolutionEvent {
3819 event: EvolutionEvent::MutationDeclared { mutation }
3820 } if mutation.intent.id == "mutation-1"
3821 )));
3822 assert!(response
3823 .assets
3824 .iter()
3825 .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
3826 assert!(response
3827 .assets
3828 .iter()
3829 .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
3830 }
3831
3832 #[test]
3833 fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
3834 let store_root = std::env::temp_dir().join(format!(
3835 "oris-evokernel-remote-partial-store-{}",
3836 std::process::id()
3837 ));
3838 if store_root.exists() {
3839 fs::remove_dir_all(&store_root).unwrap();
3840 }
3841 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 4));
3842 let evo = build_test_evo_with_store(
3843 "remote-partial",
3844 "run-remote-partial",
3845 command_validator(),
3846 store.clone(),
3847 );
3848 let envelope = remote_publish_envelope(
3849 "node-partial",
3850 "run-remote-partial",
3851 "gene-partial",
3852 "capsule-partial",
3853 "mutation-partial",
3854 "partial-signal",
3855 "PARTIAL.md",
3856 "# partial",
3857 );
3858
3859 let result = evo.import_remote_envelope(&envelope);
3860
3861 assert!(matches!(result, Err(EvoKernelError::Store(_))));
3862 let projection = store.rebuild_projection().unwrap();
3863 assert!(projection
3864 .genes
3865 .iter()
3866 .any(|gene| gene.id == "gene-partial"));
3867 assert!(projection.capsules.is_empty());
3868 let publishers = evo.remote_publishers.lock().unwrap();
3869 assert_eq!(
3870 publishers.get("gene-partial").map(String::as_str),
3871 Some("node-partial")
3872 );
3873 }
3874
3875 #[tokio::test]
3876 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
3877 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
3878 let capsule = evo
3879 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
3880 .await
3881 .unwrap();
3882 let publish = evo.export_promoted_assets("node-local");
3883 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
3884
3885 let decision = evo
3886 .replay_or_fallback(replay_input("missing readme"))
3887 .await
3888 .unwrap();
3889 assert!(decision.used_capsule);
3890 assert_eq!(decision.capsule_id, Some(capsule.id));
3891 }
3892
3893 #[tokio::test]
3894 async fn second_replay_validation_failure_revokes_gene_immediately() {
3895 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
3896 let capsule = capturer
3897 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
3898 .await
3899 .unwrap();
3900
3901 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
3902 let failing_replay = build_test_evo_with_store(
3903 "revoke-replay",
3904 "run-replay-fail",
3905 failing_validator,
3906 store.clone(),
3907 );
3908
3909 let first = failing_replay
3910 .replay_or_fallback(replay_input("missing readme"))
3911 .await
3912 .unwrap();
3913 let second = failing_replay
3914 .replay_or_fallback(replay_input("missing readme"))
3915 .await
3916 .unwrap();
3917
3918 assert!(!first.used_capsule);
3919 assert!(first.fallback_to_planner);
3920 assert!(!second.used_capsule);
3921 assert!(second.fallback_to_planner);
3922
3923 let projection = store.rebuild_projection().unwrap();
3924 let gene = projection
3925 .genes
3926 .iter()
3927 .find(|gene| gene.id == capsule.gene_id)
3928 .unwrap();
3929 assert_eq!(gene.state, AssetState::Promoted);
3930 let committed_capsule = projection
3931 .capsules
3932 .iter()
3933 .find(|current| current.id == capsule.id)
3934 .unwrap();
3935 assert_eq!(committed_capsule.state, AssetState::Promoted);
3936
3937 let events = store.scan(1).unwrap();
3938 assert_eq!(
3939 events
3940 .iter()
3941 .filter(|stored| {
3942 matches!(
3943 &stored.event,
3944 EvolutionEvent::ValidationFailed {
3945 gene_id: Some(gene_id),
3946 ..
3947 } if gene_id == &capsule.gene_id
3948 )
3949 })
3950 .count(),
3951 1
3952 );
3953 assert!(!events.iter().any(|stored| {
3954 matches!(
3955 &stored.event,
3956 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
3957 )
3958 }));
3959
3960 let recovered = build_test_evo_with_store(
3961 "revoke-replay",
3962 "run-replay-check",
3963 command_validator(),
3964 store.clone(),
3965 );
3966 let after_revoke = recovered
3967 .replay_or_fallback(replay_input("missing readme"))
3968 .await
3969 .unwrap();
3970 assert!(!after_revoke.used_capsule);
3971 assert!(after_revoke.fallback_to_planner);
3972 assert!(after_revoke.reason.contains("below replay threshold"));
3973 }
3974
3975 #[tokio::test]
3976 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
3977 let ledger = Arc::new(Mutex::new(EvuLedger {
3978 accounts: vec![],
3979 reputations: vec![
3980 oris_economics::ReputationRecord {
3981 node_id: "node-a".into(),
3982 publish_success_rate: 0.4,
3983 validator_accuracy: 0.4,
3984 reuse_impact: 0,
3985 },
3986 oris_economics::ReputationRecord {
3987 node_id: "node-b".into(),
3988 publish_success_rate: 0.95,
3989 validator_accuracy: 0.95,
3990 reuse_impact: 8,
3991 },
3992 ],
3993 }));
3994 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
3995 let evo = evo.with_economics(ledger.clone());
3996
3997 let envelope_a = remote_publish_envelope(
3998 "node-a",
3999 "run-remote-a",
4000 "gene-a",
4001 "capsule-a",
4002 "mutation-a",
4003 "shared-signal",
4004 "A.md",
4005 "# from a",
4006 );
4007 let envelope_b = remote_publish_envelope(
4008 "node-b",
4009 "run-remote-b",
4010 "gene-b",
4011 "capsule-b",
4012 "mutation-b",
4013 "shared-signal",
4014 "B.md",
4015 "# from b",
4016 );
4017
4018 evo.import_remote_envelope(&envelope_a).unwrap();
4019 evo.import_remote_envelope(&envelope_b).unwrap();
4020
4021 let decision = evo
4022 .replay_or_fallback(replay_input("shared-signal"))
4023 .await
4024 .unwrap();
4025
4026 assert!(decision.used_capsule);
4027 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
4028 let locked = ledger.lock().unwrap();
4029 let rewarded = locked
4030 .accounts
4031 .iter()
4032 .find(|item| item.node_id == "node-b")
4033 .unwrap();
4034 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
4035 assert!(
4036 locked.selector_reputation_bias()["node-b"]
4037 > locked.selector_reputation_bias()["node-a"]
4038 );
4039 }
4040
4041 #[tokio::test]
4042 async fn remote_reuse_failure_penalizes_remote_reputation() {
4043 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
4044 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
4045 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
4046 let evo = evo.with_economics(ledger.clone());
4047
4048 let envelope = remote_publish_envelope(
4049 "node-remote",
4050 "run-remote-failed",
4051 "gene-remote",
4052 "capsule-remote",
4053 "mutation-remote",
4054 "failure-signal",
4055 "FAILED.md",
4056 "# from remote",
4057 );
4058 evo.import_remote_envelope(&envelope).unwrap();
4059
4060 let decision = evo
4061 .replay_or_fallback(replay_input("failure-signal"))
4062 .await
4063 .unwrap();
4064
4065 assert!(!decision.used_capsule);
4066 assert!(decision.fallback_to_planner);
4067
4068 let signal = evo.economics_signal("node-remote").unwrap();
4069 assert_eq!(signal.available_evu, 0);
4070 assert!(signal.publish_success_rate < 0.5);
4071 assert!(signal.validator_accuracy < 0.5);
4072 }
4073}