1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::Path;
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12 AgentRole, BoundedTaskClass, CoordinationMessage, CoordinationPlan, CoordinationPrimitive,
13 CoordinationResult, CoordinationTask, ExecutionFeedback,
14 MutationProposal as AgentMutationProposal, ReplayFeedback, ReplayPlannerDirective,
15 SupervisedDevloopOutcome, SupervisedDevloopRequest, SupervisedDevloopStatus,
16};
17use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
18use oris_evolution::{
19 compute_artifact_hash, decayed_replay_confidence, next_id, stable_hash_json, AssetState,
20 BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
21 EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
22 PreparedMutation, Selector, SelectorInput, StoreBackedSelector, StoredEvolutionEvent,
23 ValidationSnapshot, MIN_REPLAY_CONFIDENCE,
24};
25use oris_evolution_network::{EvolutionEnvelope, NetworkAsset};
26use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
27use oris_kernel::{Kernel, KernelState, RunId};
28use oris_sandbox::{
29 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
30};
31use oris_spec::CompiledMutationPlan;
32use serde::{Deserialize, Serialize};
33use thiserror::Error;
34
35pub use oris_evolution::{
36 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
37 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
38 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
39 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
40 SelectorInput as EvoSelectorInput,
41};
42pub use oris_evolution_network::{
43 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
44};
45pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
46pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
47pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
48
49#[derive(Clone, Debug, Serialize, Deserialize)]
50pub struct ValidationPlan {
51 pub profile: String,
52 pub stages: Vec<ValidationStage>,
53}
54
55impl ValidationPlan {
56 pub fn oris_default() -> Self {
57 Self {
58 profile: "oris-default".into(),
59 stages: vec![
60 ValidationStage::Command {
61 program: "cargo".into(),
62 args: vec!["fmt".into(), "--all".into(), "--check".into()],
63 timeout_ms: 60_000,
64 },
65 ValidationStage::Command {
66 program: "cargo".into(),
67 args: vec!["check".into(), "--workspace".into()],
68 timeout_ms: 180_000,
69 },
70 ValidationStage::Command {
71 program: "cargo".into(),
72 args: vec![
73 "test".into(),
74 "-p".into(),
75 "oris-kernel".into(),
76 "-p".into(),
77 "oris-evolution".into(),
78 "-p".into(),
79 "oris-sandbox".into(),
80 "-p".into(),
81 "oris-evokernel".into(),
82 "--lib".into(),
83 ],
84 timeout_ms: 300_000,
85 },
86 ValidationStage::Command {
87 program: "cargo".into(),
88 args: vec![
89 "test".into(),
90 "-p".into(),
91 "oris-runtime".into(),
92 "--lib".into(),
93 ],
94 timeout_ms: 300_000,
95 },
96 ],
97 }
98 }
99}
100
101#[derive(Clone, Debug, Serialize, Deserialize)]
102pub enum ValidationStage {
103 Command {
104 program: String,
105 args: Vec<String>,
106 timeout_ms: u64,
107 },
108}
109
110#[derive(Clone, Debug, Serialize, Deserialize)]
111pub struct ValidationStageReport {
112 pub stage: String,
113 pub success: bool,
114 pub exit_code: Option<i32>,
115 pub duration_ms: u64,
116 pub stdout: String,
117 pub stderr: String,
118}
119
120#[derive(Clone, Debug, Serialize, Deserialize)]
121pub struct ValidationReport {
122 pub success: bool,
123 pub duration_ms: u64,
124 pub stages: Vec<ValidationStageReport>,
125 pub logs: String,
126}
127
128#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
129pub struct SignalExtractionInput {
130 pub patch_diff: String,
131 pub intent: String,
132 pub expected_effect: String,
133 pub declared_signals: Vec<String>,
134 pub changed_files: Vec<String>,
135 pub validation_success: bool,
136 pub validation_logs: String,
137 pub stage_outputs: Vec<String>,
138}
139
140#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
141pub struct SignalExtractionOutput {
142 pub values: Vec<String>,
143 pub hash: String,
144}
145
146#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
147pub struct SeedTemplate {
148 pub id: String,
149 pub intent: String,
150 pub signals: Vec<String>,
151 pub diff_payload: String,
152 pub validation_profile: String,
153}
154
155#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
156pub struct BootstrapReport {
157 pub seeded: bool,
158 pub genes_added: usize,
159 pub capsules_added: usize,
160}
161
162const REPORTED_EXPERIENCE_RETENTION_LIMIT: usize = 3;
163
164impl ValidationReport {
165 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
166 ValidationSnapshot {
167 success: self.success,
168 profile: profile.to_string(),
169 duration_ms: self.duration_ms,
170 summary: if self.success {
171 "validation passed".into()
172 } else {
173 "validation failed".into()
174 },
175 }
176 }
177}
178
179pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
180 let mut signals = BTreeSet::new();
181
182 for declared in &input.declared_signals {
183 if let Some(phrase) = normalize_signal_phrase(declared) {
184 signals.insert(phrase);
185 }
186 extend_signal_tokens(&mut signals, declared);
187 }
188
189 for text in [
190 input.patch_diff.as_str(),
191 input.intent.as_str(),
192 input.expected_effect.as_str(),
193 input.validation_logs.as_str(),
194 ] {
195 extend_signal_tokens(&mut signals, text);
196 }
197
198 for changed_file in &input.changed_files {
199 extend_signal_tokens(&mut signals, changed_file);
200 }
201
202 for stage_output in &input.stage_outputs {
203 extend_signal_tokens(&mut signals, stage_output);
204 }
205
206 signals.insert(if input.validation_success {
207 "validation passed".into()
208 } else {
209 "validation failed".into()
210 });
211
212 let values = signals.into_iter().take(32).collect::<Vec<_>>();
213 let hash =
214 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
215 SignalExtractionOutput { values, hash }
216}
217
218#[derive(Debug, Error)]
219pub enum ValidationError {
220 #[error("validation execution failed: {0}")]
221 Execution(String),
222}
223
224#[async_trait]
225pub trait Validator: Send + Sync {
226 async fn run(
227 &self,
228 receipt: &SandboxReceipt,
229 plan: &ValidationPlan,
230 ) -> Result<ValidationReport, ValidationError>;
231}
232
233pub struct CommandValidator {
234 policy: SandboxPolicy,
235}
236
237impl CommandValidator {
238 pub fn new(policy: SandboxPolicy) -> Self {
239 Self { policy }
240 }
241}
242
243#[async_trait]
244impl Validator for CommandValidator {
245 async fn run(
246 &self,
247 receipt: &SandboxReceipt,
248 plan: &ValidationPlan,
249 ) -> Result<ValidationReport, ValidationError> {
250 let started = std::time::Instant::now();
251 let mut stages = Vec::new();
252 let mut success = true;
253 let mut logs = String::new();
254
255 for stage in &plan.stages {
256 match stage {
257 ValidationStage::Command {
258 program,
259 args,
260 timeout_ms,
261 } => {
262 let result = execute_allowed_command(
263 &self.policy,
264 &receipt.workdir,
265 program,
266 args,
267 *timeout_ms,
268 )
269 .await;
270 let report = match result {
271 Ok(output) => ValidationStageReport {
272 stage: format!("{program} {}", args.join(" ")),
273 success: output.success,
274 exit_code: output.exit_code,
275 duration_ms: output.duration_ms,
276 stdout: output.stdout,
277 stderr: output.stderr,
278 },
279 Err(err) => ValidationStageReport {
280 stage: format!("{program} {}", args.join(" ")),
281 success: false,
282 exit_code: None,
283 duration_ms: 0,
284 stdout: String::new(),
285 stderr: err.to_string(),
286 },
287 };
288 if !report.success {
289 success = false;
290 }
291 if !report.stdout.is_empty() {
292 logs.push_str(&report.stdout);
293 logs.push('\n');
294 }
295 if !report.stderr.is_empty() {
296 logs.push_str(&report.stderr);
297 logs.push('\n');
298 }
299 stages.push(report);
300 if !success {
301 break;
302 }
303 }
304 }
305 }
306
307 Ok(ValidationReport {
308 success,
309 duration_ms: started.elapsed().as_millis() as u64,
310 stages,
311 logs,
312 })
313 }
314}
315
316#[derive(Clone, Debug)]
317pub struct ReplayDecision {
318 pub used_capsule: bool,
319 pub capsule_id: Option<CapsuleId>,
320 pub fallback_to_planner: bool,
321 pub reason: String,
322}
323
324#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
325pub struct ReplayTaskClassMetrics {
326 pub task_class_id: String,
327 pub task_label: String,
328 pub replay_success_total: u64,
329 pub reasoning_steps_avoided_total: u64,
330}
331
332#[derive(Clone, Copy, Debug, Eq, PartialEq)]
333enum CoordinationTaskState {
334 Ready,
335 Waiting,
336 BlockedByFailure,
337 PermanentlyBlocked,
338}
339
340#[derive(Clone, Debug, Default)]
341pub struct MultiAgentCoordinator;
342
343impl MultiAgentCoordinator {
344 pub fn new() -> Self {
345 Self
346 }
347
348 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
349 let primitive = plan.primitive.clone();
350 let root_goal = plan.root_goal.clone();
351 let timeout_ms = plan.timeout_ms;
352 let max_retries = plan.max_retries;
353 let mut tasks = BTreeMap::new();
354 for task in plan.tasks {
355 tasks.entry(task.id.clone()).or_insert(task);
356 }
357
358 let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
359 let mut completed = BTreeSet::new();
360 let mut failed = BTreeSet::new();
361 let mut completed_order = Vec::new();
362 let mut failed_order = Vec::new();
363 let mut skipped = BTreeSet::new();
364 let mut attempts = BTreeMap::new();
365 let mut messages = Vec::new();
366
367 loop {
368 if matches!(primitive, CoordinationPrimitive::Conditional) {
369 self.apply_conditional_skips(
370 &tasks,
371 &mut pending,
372 &completed,
373 &failed,
374 &mut skipped,
375 &mut messages,
376 );
377 }
378
379 let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
380 if ready.is_empty() {
381 break;
382 }
383 if matches!(primitive, CoordinationPrimitive::Sequential) {
384 ready.truncate(1);
385 }
386
387 for task_id in ready {
388 let Some(task) = tasks.get(&task_id) else {
389 continue;
390 };
391 if !pending.contains(&task_id) {
392 continue;
393 }
394 self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
395
396 let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
397 if Self::simulate_task_failure(task, prior_failures) {
398 let failure_count = prior_failures + 1;
399 attempts.insert(task_id.clone(), failure_count);
400 let will_retry = failure_count <= max_retries;
401 messages.push(CoordinationMessage {
402 from_role: task.role.clone(),
403 to_role: task.role.clone(),
404 task_id: task_id.clone(),
405 content: if will_retry {
406 format!("task {task_id} failed on attempt {failure_count} and will retry")
407 } else {
408 format!(
409 "task {task_id} failed on attempt {failure_count} and exhausted retries"
410 )
411 },
412 });
413 if !will_retry {
414 pending.remove(&task_id);
415 if failed.insert(task_id.clone()) {
416 failed_order.push(task_id);
417 }
418 }
419 continue;
420 }
421
422 pending.remove(&task_id);
423 if completed.insert(task_id.clone()) {
424 completed_order.push(task_id);
425 }
426 }
427 }
428
429 let blocked_ids = pending.into_iter().collect::<Vec<_>>();
430 for task_id in blocked_ids {
431 let Some(task) = tasks.get(&task_id) else {
432 continue;
433 };
434 let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
435 let content = match state {
436 CoordinationTaskState::BlockedByFailure => {
437 format!("task {task_id} blocked by failed dependencies")
438 }
439 CoordinationTaskState::PermanentlyBlocked => {
440 format!("task {task_id} has invalid coordination prerequisites")
441 }
442 CoordinationTaskState::Waiting => {
443 format!("task {task_id} has unresolved dependencies")
444 }
445 CoordinationTaskState::Ready => {
446 format!("task {task_id} was left pending unexpectedly")
447 }
448 };
449 messages.push(CoordinationMessage {
450 from_role: task.role.clone(),
451 to_role: task.role.clone(),
452 task_id: task_id.clone(),
453 content,
454 });
455 if failed.insert(task_id.clone()) {
456 failed_order.push(task_id);
457 }
458 }
459
460 CoordinationResult {
461 completed_tasks: completed_order,
462 failed_tasks: failed_order,
463 messages,
464 summary: format!(
465 "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
466 root_goal,
467 completed.len(),
468 failed.len(),
469 skipped.len(),
470 primitive,
471 timeout_ms,
472 max_retries
473 ),
474 }
475 }
476
477 fn ready_task_ids(
478 &self,
479 tasks: &BTreeMap<String, CoordinationTask>,
480 pending: &BTreeSet<String>,
481 completed: &BTreeSet<String>,
482 failed: &BTreeSet<String>,
483 skipped: &BTreeSet<String>,
484 ) -> Vec<String> {
485 pending
486 .iter()
487 .filter_map(|task_id| {
488 let task = tasks.get(task_id)?;
489 (self.classify_task(task, tasks, completed, failed, skipped)
490 == CoordinationTaskState::Ready)
491 .then(|| task_id.clone())
492 })
493 .collect()
494 }
495
496 fn apply_conditional_skips(
497 &self,
498 tasks: &BTreeMap<String, CoordinationTask>,
499 pending: &mut BTreeSet<String>,
500 completed: &BTreeSet<String>,
501 failed: &BTreeSet<String>,
502 skipped: &mut BTreeSet<String>,
503 messages: &mut Vec<CoordinationMessage>,
504 ) {
505 let skip_ids = pending
506 .iter()
507 .filter_map(|task_id| {
508 let task = tasks.get(task_id)?;
509 (self.classify_task(task, tasks, completed, failed, skipped)
510 == CoordinationTaskState::BlockedByFailure)
511 .then(|| task_id.clone())
512 })
513 .collect::<Vec<_>>();
514
515 for task_id in skip_ids {
516 let Some(task) = tasks.get(&task_id) else {
517 continue;
518 };
519 pending.remove(&task_id);
520 skipped.insert(task_id.clone());
521 messages.push(CoordinationMessage {
522 from_role: task.role.clone(),
523 to_role: task.role.clone(),
524 task_id: task_id.clone(),
525 content: format!("task {task_id} skipped due to failed dependency chain"),
526 });
527 }
528 }
529
530 fn classify_task(
531 &self,
532 task: &CoordinationTask,
533 tasks: &BTreeMap<String, CoordinationTask>,
534 completed: &BTreeSet<String>,
535 failed: &BTreeSet<String>,
536 skipped: &BTreeSet<String>,
537 ) -> CoordinationTaskState {
538 match task.role {
539 AgentRole::Planner | AgentRole::Coder => {
540 let mut waiting = false;
541 for dependency_id in &task.depends_on {
542 if !tasks.contains_key(dependency_id) {
543 return CoordinationTaskState::PermanentlyBlocked;
544 }
545 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
546 return CoordinationTaskState::BlockedByFailure;
547 }
548 if !completed.contains(dependency_id) {
549 waiting = true;
550 }
551 }
552 if waiting {
553 CoordinationTaskState::Waiting
554 } else {
555 CoordinationTaskState::Ready
556 }
557 }
558 AgentRole::Repair => {
559 let mut waiting = false;
560 let mut has_coder_dependency = false;
561 let mut has_failed_coder = false;
562 for dependency_id in &task.depends_on {
563 let Some(dependency) = tasks.get(dependency_id) else {
564 return CoordinationTaskState::PermanentlyBlocked;
565 };
566 let is_coder = matches!(dependency.role, AgentRole::Coder);
567 if is_coder {
568 has_coder_dependency = true;
569 }
570 if skipped.contains(dependency_id) {
571 return CoordinationTaskState::BlockedByFailure;
572 }
573 if failed.contains(dependency_id) {
574 if is_coder {
575 has_failed_coder = true;
576 } else {
577 return CoordinationTaskState::BlockedByFailure;
578 }
579 continue;
580 }
581 if !completed.contains(dependency_id) {
582 waiting = true;
583 }
584 }
585 if !has_coder_dependency {
586 CoordinationTaskState::PermanentlyBlocked
587 } else if waiting {
588 CoordinationTaskState::Waiting
589 } else if has_failed_coder {
590 CoordinationTaskState::Ready
591 } else {
592 CoordinationTaskState::PermanentlyBlocked
593 }
594 }
595 AgentRole::Optimizer => {
596 let mut waiting = false;
597 let mut has_impl_dependency = false;
598 let mut has_completed_impl = false;
599 let mut has_failed_impl = false;
600 for dependency_id in &task.depends_on {
601 let Some(dependency) = tasks.get(dependency_id) else {
602 return CoordinationTaskState::PermanentlyBlocked;
603 };
604 let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
605 if is_impl {
606 has_impl_dependency = true;
607 }
608 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
609 if is_impl {
610 has_failed_impl = true;
611 continue;
612 }
613 return CoordinationTaskState::BlockedByFailure;
614 }
615 if completed.contains(dependency_id) {
616 if is_impl {
617 has_completed_impl = true;
618 }
619 continue;
620 }
621 waiting = true;
622 }
623 if !has_impl_dependency {
624 CoordinationTaskState::PermanentlyBlocked
625 } else if waiting {
626 CoordinationTaskState::Waiting
627 } else if has_completed_impl {
628 CoordinationTaskState::Ready
629 } else if has_failed_impl {
630 CoordinationTaskState::BlockedByFailure
631 } else {
632 CoordinationTaskState::PermanentlyBlocked
633 }
634 }
635 }
636 }
637
638 fn record_handoff_messages(
639 &self,
640 task: &CoordinationTask,
641 tasks: &BTreeMap<String, CoordinationTask>,
642 completed: &BTreeSet<String>,
643 failed: &BTreeSet<String>,
644 messages: &mut Vec<CoordinationMessage>,
645 ) {
646 let mut dependency_ids = task.depends_on.clone();
647 dependency_ids.sort();
648 dependency_ids.dedup();
649
650 for dependency_id in dependency_ids {
651 let Some(dependency) = tasks.get(&dependency_id) else {
652 continue;
653 };
654 if completed.contains(&dependency_id) {
655 messages.push(CoordinationMessage {
656 from_role: dependency.role.clone(),
657 to_role: task.role.clone(),
658 task_id: task.id.clone(),
659 content: format!("handoff from {dependency_id} to {}", task.id),
660 });
661 } else if failed.contains(&dependency_id) {
662 messages.push(CoordinationMessage {
663 from_role: dependency.role.clone(),
664 to_role: task.role.clone(),
665 task_id: task.id.clone(),
666 content: format!("failed dependency {dependency_id} routed to {}", task.id),
667 });
668 }
669 }
670 }
671
672 fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
673 let normalized = task.description.to_ascii_lowercase();
674 normalized.contains("force-fail")
675 || (normalized.contains("fail-once") && prior_failures == 0)
676 }
677}
678
679#[derive(Debug, Error)]
680pub enum ReplayError {
681 #[error("store error: {0}")]
682 Store(String),
683 #[error("sandbox error: {0}")]
684 Sandbox(String),
685 #[error("validation error: {0}")]
686 Validation(String),
687}
688
689#[async_trait]
690pub trait ReplayExecutor: Send + Sync {
691 async fn try_replay(
692 &self,
693 input: &SelectorInput,
694 policy: &SandboxPolicy,
695 validation: &ValidationPlan,
696 ) -> Result<ReplayDecision, ReplayError>;
697
698 async fn try_replay_for_run(
699 &self,
700 run_id: &RunId,
701 input: &SelectorInput,
702 policy: &SandboxPolicy,
703 validation: &ValidationPlan,
704 ) -> Result<ReplayDecision, ReplayError> {
705 let _ = run_id;
706 self.try_replay(input, policy, validation).await
707 }
708}
709
710pub struct StoreReplayExecutor {
711 pub sandbox: Arc<dyn Sandbox>,
712 pub validator: Arc<dyn Validator>,
713 pub store: Arc<dyn EvolutionStore>,
714 pub selector: Arc<dyn Selector>,
715 pub governor: Arc<dyn Governor>,
716 pub economics: Option<Arc<Mutex<EvuLedger>>>,
717 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
718 pub stake_policy: StakePolicy,
719}
720
721struct ReplayCandidates {
722 candidates: Vec<GeneCandidate>,
723 exact_match: bool,
724}
725
726#[async_trait]
727impl ReplayExecutor for StoreReplayExecutor {
728 async fn try_replay(
729 &self,
730 input: &SelectorInput,
731 policy: &SandboxPolicy,
732 validation: &ValidationPlan,
733 ) -> Result<ReplayDecision, ReplayError> {
734 self.try_replay_inner(None, input, policy, validation).await
735 }
736
737 async fn try_replay_for_run(
738 &self,
739 run_id: &RunId,
740 input: &SelectorInput,
741 policy: &SandboxPolicy,
742 validation: &ValidationPlan,
743 ) -> Result<ReplayDecision, ReplayError> {
744 self.try_replay_inner(Some(run_id), input, policy, validation)
745 .await
746 }
747}
748
749impl StoreReplayExecutor {
750 fn collect_replay_candidates(&self, input: &SelectorInput) -> ReplayCandidates {
751 self.apply_confidence_revalidation();
752 let mut selector_input = input.clone();
753 if self.economics.is_some() && self.remote_publishers.is_some() {
754 selector_input.limit = selector_input.limit.max(4);
755 }
756 let mut candidates = self.selector.select(&selector_input);
757 self.rerank_with_reputation_bias(&mut candidates);
758 let mut exact_match = false;
759 if candidates.is_empty() {
760 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
761 self.rerank_with_reputation_bias(&mut exact_candidates);
762 if !exact_candidates.is_empty() {
763 candidates = exact_candidates;
764 exact_match = true;
765 }
766 }
767 if candidates.is_empty() {
768 let mut remote_candidates =
769 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
770 self.rerank_with_reputation_bias(&mut remote_candidates);
771 if !remote_candidates.is_empty() {
772 candidates = remote_candidates;
773 exact_match = true;
774 }
775 }
776 candidates.truncate(input.limit.max(1));
777 ReplayCandidates {
778 candidates,
779 exact_match,
780 }
781 }
782
783 fn apply_confidence_revalidation(&self) {
784 let Ok(projection) = projection_snapshot(self.store.as_ref()) else {
785 return;
786 };
787 for target in stale_replay_revalidation_targets(&projection, Utc::now()) {
788 let reason = format!(
789 "confidence decayed to {:.3}; revalidation required before replay",
790 target.decayed_confidence
791 );
792 if self
793 .store
794 .append_event(EvolutionEvent::PromotionEvaluated {
795 gene_id: target.gene_id.clone(),
796 state: AssetState::Quarantined,
797 reason: reason.clone(),
798 })
799 .is_err()
800 {
801 continue;
802 }
803 for capsule_id in target.capsule_ids {
804 if self
805 .store
806 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
807 .is_err()
808 {
809 break;
810 }
811 }
812 }
813 }
814
815 async fn try_replay_inner(
816 &self,
817 replay_run_id: Option<&RunId>,
818 input: &SelectorInput,
819 policy: &SandboxPolicy,
820 validation: &ValidationPlan,
821 ) -> Result<ReplayDecision, ReplayError> {
822 let ReplayCandidates {
823 candidates,
824 exact_match,
825 } = self.collect_replay_candidates(input);
826 let Some(best) = candidates.into_iter().next() else {
827 return Ok(ReplayDecision {
828 used_capsule: false,
829 capsule_id: None,
830 fallback_to_planner: true,
831 reason: "no matching gene".into(),
832 });
833 };
834 if !exact_match && best.score < 0.82 {
835 return Ok(ReplayDecision {
836 used_capsule: false,
837 capsule_id: None,
838 fallback_to_planner: true,
839 reason: format!("best gene score {:.3} below replay threshold", best.score),
840 });
841 }
842
843 let Some(capsule) = best.capsules.first().cloned() else {
844 return Ok(ReplayDecision {
845 used_capsule: false,
846 capsule_id: None,
847 fallback_to_planner: true,
848 reason: "candidate gene has no capsule".into(),
849 });
850 };
851 let remote_publisher = self.publisher_for_capsule(&capsule.id);
852
853 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
854 .map_err(|err| ReplayError::Store(err.to_string()))?
855 else {
856 return Ok(ReplayDecision {
857 used_capsule: false,
858 capsule_id: None,
859 fallback_to_planner: true,
860 reason: "mutation payload missing from store".into(),
861 });
862 };
863
864 let receipt = match self.sandbox.apply(&mutation, policy).await {
865 Ok(receipt) => receipt,
866 Err(err) => {
867 self.record_reuse_settlement(remote_publisher.as_deref(), false);
868 return Ok(ReplayDecision {
869 used_capsule: false,
870 capsule_id: Some(capsule.id.clone()),
871 fallback_to_planner: true,
872 reason: format!("replay patch apply failed: {err}"),
873 });
874 }
875 };
876
877 let report = self
878 .validator
879 .run(&receipt, validation)
880 .await
881 .map_err(|err| ReplayError::Validation(err.to_string()))?;
882 if !report.success {
883 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
884 self.record_reuse_settlement(remote_publisher.as_deref(), false);
885 return Ok(ReplayDecision {
886 used_capsule: false,
887 capsule_id: Some(capsule.id.clone()),
888 fallback_to_planner: true,
889 reason: "replay validation failed".into(),
890 });
891 }
892
893 if matches!(capsule.state, AssetState::Quarantined) {
894 self.store
895 .append_event(EvolutionEvent::ValidationPassed {
896 mutation_id: capsule.mutation_id.clone(),
897 report: report.to_snapshot(&validation.profile),
898 gene_id: Some(best.gene.id.clone()),
899 })
900 .map_err(|err| ReplayError::Store(err.to_string()))?;
901 if matches!(best.gene.state, AssetState::Quarantined) {
902 self.store
903 .append_event(EvolutionEvent::PromotionEvaluated {
904 gene_id: best.gene.id.clone(),
905 state: AssetState::Promoted,
906 reason: "remote asset locally validated via replay".into(),
907 })
908 .map_err(|err| ReplayError::Store(err.to_string()))?;
909 self.store
910 .append_event(EvolutionEvent::GenePromoted {
911 gene_id: best.gene.id.clone(),
912 })
913 .map_err(|err| ReplayError::Store(err.to_string()))?;
914 }
915 self.store
916 .append_event(EvolutionEvent::CapsuleReleased {
917 capsule_id: capsule.id.clone(),
918 state: AssetState::Promoted,
919 })
920 .map_err(|err| ReplayError::Store(err.to_string()))?;
921 }
922
923 self.store
924 .append_event(EvolutionEvent::CapsuleReused {
925 capsule_id: capsule.id.clone(),
926 gene_id: capsule.gene_id.clone(),
927 run_id: capsule.run_id.clone(),
928 replay_run_id: replay_run_id.cloned(),
929 })
930 .map_err(|err| ReplayError::Store(err.to_string()))?;
931 self.record_reuse_settlement(remote_publisher.as_deref(), true);
932
933 Ok(ReplayDecision {
934 used_capsule: true,
935 capsule_id: Some(capsule.id),
936 fallback_to_planner: false,
937 reason: if exact_match {
938 "replayed via cold-start lookup".into()
939 } else {
940 "replayed via selector".into()
941 },
942 })
943 }
944
945 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
946 let Some(ledger) = self.economics.as_ref() else {
947 return;
948 };
949 let reputation_bias = ledger
950 .lock()
951 .ok()
952 .map(|locked| locked.selector_reputation_bias())
953 .unwrap_or_default();
954 if reputation_bias.is_empty() {
955 return;
956 }
957 let required_assets = candidates
958 .iter()
959 .filter_map(|candidate| {
960 candidate
961 .capsules
962 .first()
963 .map(|capsule| capsule.id.as_str())
964 })
965 .collect::<Vec<_>>();
966 let publisher_map = self.remote_publishers_snapshot(&required_assets);
967 if publisher_map.is_empty() {
968 return;
969 }
970 candidates.sort_by(|left, right| {
971 effective_candidate_score(right, &publisher_map, &reputation_bias)
972 .partial_cmp(&effective_candidate_score(
973 left,
974 &publisher_map,
975 &reputation_bias,
976 ))
977 .unwrap_or(std::cmp::Ordering::Equal)
978 .then_with(|| left.gene.id.cmp(&right.gene.id))
979 });
980 }
981
982 fn publisher_for_capsule(&self, capsule_id: &str) -> Option<String> {
983 self.remote_publishers_snapshot(&[capsule_id])
984 .get(capsule_id)
985 .cloned()
986 }
987
988 fn remote_publishers_snapshot(&self, required_assets: &[&str]) -> BTreeMap<String, String> {
989 let cached = self
990 .remote_publishers
991 .as_ref()
992 .and_then(|remote_publishers| {
993 remote_publishers.lock().ok().map(|locked| locked.clone())
994 })
995 .unwrap_or_default();
996 if !cached.is_empty()
997 && required_assets
998 .iter()
999 .all(|asset_id| cached.contains_key(*asset_id))
1000 {
1001 return cached;
1002 }
1003
1004 let persisted = remote_publishers_by_asset_from_store(self.store.as_ref());
1005 if persisted.is_empty() {
1006 return cached;
1007 }
1008
1009 let mut merged = cached;
1010 for (asset_id, sender_id) in persisted {
1011 merged.entry(asset_id).or_insert(sender_id);
1012 }
1013
1014 if let Some(remote_publishers) = self.remote_publishers.as_ref() {
1015 if let Ok(mut locked) = remote_publishers.lock() {
1016 for (asset_id, sender_id) in &merged {
1017 locked.entry(asset_id.clone()).or_insert(sender_id.clone());
1018 }
1019 }
1020 }
1021
1022 merged
1023 }
1024
1025 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
1026 let Some(publisher_id) = publisher_id else {
1027 return;
1028 };
1029 let Some(ledger) = self.economics.as_ref() else {
1030 return;
1031 };
1032 if let Ok(mut locked) = ledger.lock() {
1033 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
1034 }
1035 }
1036
1037 fn record_replay_validation_failure(
1038 &self,
1039 best: &GeneCandidate,
1040 capsule: &Capsule,
1041 validation: &ValidationPlan,
1042 report: &ValidationReport,
1043 ) -> Result<(), ReplayError> {
1044 let projection = projection_snapshot(self.store.as_ref())
1045 .map_err(|err| ReplayError::Store(err.to_string()))?;
1046 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1047 Self::confidence_context(&projection, &best.gene.id);
1048
1049 self.store
1050 .append_event(EvolutionEvent::ValidationFailed {
1051 mutation_id: capsule.mutation_id.clone(),
1052 report: report.to_snapshot(&validation.profile),
1053 gene_id: Some(best.gene.id.clone()),
1054 })
1055 .map_err(|err| ReplayError::Store(err.to_string()))?;
1056
1057 let replay_failures = self.replay_failure_count(&best.gene.id)?;
1058 let governor_decision = self.governor.evaluate(GovernorInput {
1059 candidate_source: if self.publisher_for_capsule(&capsule.id).is_some() {
1060 CandidateSource::Remote
1061 } else {
1062 CandidateSource::Local
1063 },
1064 success_count: 0,
1065 blast_radius: BlastRadius {
1066 files_changed: capsule.outcome.changed_files.len(),
1067 lines_changed: capsule.outcome.lines_changed,
1068 },
1069 replay_failures,
1070 recent_mutation_ages_secs: Vec::new(),
1071 current_confidence,
1072 historical_peak_confidence,
1073 confidence_last_updated_secs,
1074 });
1075
1076 if matches!(governor_decision.target_state, AssetState::Revoked) {
1077 self.store
1078 .append_event(EvolutionEvent::PromotionEvaluated {
1079 gene_id: best.gene.id.clone(),
1080 state: AssetState::Revoked,
1081 reason: governor_decision.reason.clone(),
1082 })
1083 .map_err(|err| ReplayError::Store(err.to_string()))?;
1084 self.store
1085 .append_event(EvolutionEvent::GeneRevoked {
1086 gene_id: best.gene.id.clone(),
1087 reason: governor_decision.reason,
1088 })
1089 .map_err(|err| ReplayError::Store(err.to_string()))?;
1090 for related in &best.capsules {
1091 self.store
1092 .append_event(EvolutionEvent::CapsuleQuarantined {
1093 capsule_id: related.id.clone(),
1094 })
1095 .map_err(|err| ReplayError::Store(err.to_string()))?;
1096 }
1097 }
1098
1099 Ok(())
1100 }
1101
1102 fn confidence_context(
1103 projection: &EvolutionProjection,
1104 gene_id: &str,
1105 ) -> (f32, f32, Option<u64>) {
1106 let peak_confidence = projection
1107 .capsules
1108 .iter()
1109 .filter(|capsule| capsule.gene_id == gene_id)
1110 .map(|capsule| capsule.confidence)
1111 .fold(0.0_f32, f32::max);
1112 let age_secs = projection
1113 .last_updated_at
1114 .get(gene_id)
1115 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1116 (peak_confidence, peak_confidence, age_secs)
1117 }
1118
1119 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1120 let parsed = DateTime::parse_from_rfc3339(timestamp)
1121 .ok()?
1122 .with_timezone(&Utc);
1123 let elapsed = now.signed_duration_since(parsed);
1124 if elapsed < Duration::zero() {
1125 Some(0)
1126 } else {
1127 u64::try_from(elapsed.num_seconds()).ok()
1128 }
1129 }
1130
1131 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1132 Ok(self
1133 .store
1134 .scan(1)
1135 .map_err(|err| ReplayError::Store(err.to_string()))?
1136 .into_iter()
1137 .filter(|stored| {
1138 matches!(
1139 &stored.event,
1140 EvolutionEvent::ValidationFailed {
1141 gene_id: Some(current_gene_id),
1142 ..
1143 } if current_gene_id == gene_id
1144 )
1145 })
1146 .count() as u64)
1147 }
1148}
1149
1150#[derive(Clone, Debug, PartialEq)]
1151struct ConfidenceRevalidationTarget {
1152 gene_id: String,
1153 capsule_ids: Vec<String>,
1154 decayed_confidence: f32,
1155}
1156
1157fn stale_replay_revalidation_targets(
1158 projection: &EvolutionProjection,
1159 now: DateTime<Utc>,
1160) -> Vec<ConfidenceRevalidationTarget> {
1161 projection
1162 .genes
1163 .iter()
1164 .filter(|gene| gene.state == AssetState::Promoted)
1165 .filter_map(|gene| {
1166 let promoted_capsules = projection
1167 .capsules
1168 .iter()
1169 .filter(|capsule| {
1170 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1171 })
1172 .collect::<Vec<_>>();
1173 if promoted_capsules.is_empty() {
1174 return None;
1175 }
1176 let age_secs = projection
1177 .last_updated_at
1178 .get(&gene.id)
1179 .and_then(|timestamp| seconds_since_timestamp_for_confidence(timestamp, now));
1180 let decayed_confidence = promoted_capsules
1181 .iter()
1182 .map(|capsule| decayed_replay_confidence(capsule.confidence, age_secs))
1183 .fold(0.0_f32, f32::max);
1184 if decayed_confidence >= MIN_REPLAY_CONFIDENCE {
1185 return None;
1186 }
1187 Some(ConfidenceRevalidationTarget {
1188 gene_id: gene.id.clone(),
1189 capsule_ids: promoted_capsules
1190 .into_iter()
1191 .map(|capsule| capsule.id.clone())
1192 .collect(),
1193 decayed_confidence,
1194 })
1195 })
1196 .collect()
1197}
1198
1199fn seconds_since_timestamp_for_confidence(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1200 let parsed = DateTime::parse_from_rfc3339(timestamp)
1201 .ok()?
1202 .with_timezone(&Utc);
1203 let elapsed = now.signed_duration_since(parsed);
1204 if elapsed < Duration::zero() {
1205 Some(0)
1206 } else {
1207 u64::try_from(elapsed.num_seconds()).ok()
1208 }
1209}
1210
1211#[derive(Debug, Error)]
1212pub enum EvoKernelError {
1213 #[error("sandbox error: {0}")]
1214 Sandbox(String),
1215 #[error("validation error: {0}")]
1216 Validation(String),
1217 #[error("validation failed")]
1218 ValidationFailed(ValidationReport),
1219 #[error("store error: {0}")]
1220 Store(String),
1221}
1222
1223#[derive(Clone, Debug)]
1224pub struct CaptureOutcome {
1225 pub capsule: Capsule,
1226 pub gene: Gene,
1227 pub governor_decision: GovernorDecision,
1228}
1229
1230#[derive(Clone, Debug, Serialize, Deserialize)]
1231pub struct ImportOutcome {
1232 pub imported_asset_ids: Vec<String>,
1233 pub accepted: bool,
1234}
1235
1236#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1237pub struct EvolutionMetricsSnapshot {
1238 pub replay_attempts_total: u64,
1239 pub replay_success_total: u64,
1240 pub replay_success_rate: f64,
1241 pub confidence_revalidations_total: u64,
1242 pub replay_reasoning_avoided_total: u64,
1243 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
1244 pub mutation_declared_total: u64,
1245 pub promoted_mutations_total: u64,
1246 pub promotion_ratio: f64,
1247 pub gene_revocations_total: u64,
1248 pub mutation_velocity_last_hour: u64,
1249 pub revoke_frequency_last_hour: u64,
1250 pub promoted_genes: u64,
1251 pub promoted_capsules: u64,
1252 pub last_event_seq: u64,
1253}
1254
1255#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1256pub struct EvolutionHealthSnapshot {
1257 pub status: String,
1258 pub last_event_seq: u64,
1259 pub promoted_genes: u64,
1260 pub promoted_capsules: u64,
1261}
1262
1263#[derive(Clone)]
1264pub struct EvolutionNetworkNode {
1265 pub store: Arc<dyn EvolutionStore>,
1266}
1267
1268impl EvolutionNetworkNode {
1269 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1270 Self { store }
1271 }
1272
1273 pub fn with_default_store() -> Self {
1274 Self {
1275 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
1276 }
1277 }
1278
1279 pub fn accept_publish_request(
1280 &self,
1281 request: &PublishRequest,
1282 ) -> Result<ImportOutcome, EvoKernelError> {
1283 import_remote_envelope_into_store(
1284 self.store.as_ref(),
1285 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
1286 None,
1287 )
1288 }
1289
1290 pub fn ensure_builtin_experience_assets(
1291 &self,
1292 sender_id: impl Into<String>,
1293 ) -> Result<ImportOutcome, EvoKernelError> {
1294 ensure_builtin_experience_assets_in_store(self.store.as_ref(), sender_id.into())
1295 }
1296
1297 pub fn record_reported_experience(
1298 &self,
1299 sender_id: impl Into<String>,
1300 gene_id: impl Into<String>,
1301 signals: Vec<String>,
1302 strategy: Vec<String>,
1303 validation: Vec<String>,
1304 ) -> Result<ImportOutcome, EvoKernelError> {
1305 record_reported_experience_in_store(
1306 self.store.as_ref(),
1307 sender_id.into(),
1308 gene_id.into(),
1309 signals,
1310 strategy,
1311 validation,
1312 )
1313 }
1314
1315 pub fn publish_local_assets(
1316 &self,
1317 sender_id: impl Into<String>,
1318 ) -> Result<EvolutionEnvelope, EvoKernelError> {
1319 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
1320 }
1321
1322 pub fn fetch_assets(
1323 &self,
1324 responder_id: impl Into<String>,
1325 query: &FetchQuery,
1326 ) -> Result<FetchResponse, EvoKernelError> {
1327 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1328 }
1329
1330 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1331 revoke_assets_in_store(self.store.as_ref(), notice)
1332 }
1333
1334 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1335 evolution_metrics_snapshot(self.store.as_ref())
1336 }
1337
1338 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1339 self.metrics_snapshot().map(|snapshot| {
1340 let health = evolution_health_snapshot(&snapshot);
1341 render_evolution_metrics_prometheus(&snapshot, &health)
1342 })
1343 }
1344
1345 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1346 self.metrics_snapshot()
1347 .map(|snapshot| evolution_health_snapshot(&snapshot))
1348 }
1349}
1350
1351pub struct EvoKernel<S: KernelState> {
1352 pub kernel: Arc<Kernel<S>>,
1353 pub sandbox: Arc<dyn Sandbox>,
1354 pub validator: Arc<dyn Validator>,
1355 pub store: Arc<dyn EvolutionStore>,
1356 pub selector: Arc<dyn Selector>,
1357 pub governor: Arc<dyn Governor>,
1358 pub economics: Arc<Mutex<EvuLedger>>,
1359 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
1360 pub stake_policy: StakePolicy,
1361 pub sandbox_policy: SandboxPolicy,
1362 pub validation_plan: ValidationPlan,
1363}
1364
1365impl<S: KernelState> EvoKernel<S> {
1366 fn recent_prior_mutation_ages_secs(
1367 &self,
1368 exclude_mutation_id: Option<&str>,
1369 ) -> Result<Vec<u64>, EvolutionError> {
1370 let now = Utc::now();
1371 let mut ages = self
1372 .store
1373 .scan(1)?
1374 .into_iter()
1375 .filter_map(|stored| match stored.event {
1376 EvolutionEvent::MutationDeclared { mutation }
1377 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
1378 {
1379 Self::seconds_since_timestamp(&stored.timestamp, now)
1380 }
1381 _ => None,
1382 })
1383 .collect::<Vec<_>>();
1384 ages.sort_unstable();
1385 Ok(ages)
1386 }
1387
1388 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1389 let parsed = DateTime::parse_from_rfc3339(timestamp)
1390 .ok()?
1391 .with_timezone(&Utc);
1392 let elapsed = now.signed_duration_since(parsed);
1393 if elapsed < Duration::zero() {
1394 Some(0)
1395 } else {
1396 u64::try_from(elapsed.num_seconds()).ok()
1397 }
1398 }
1399
1400 pub fn new(
1401 kernel: Arc<Kernel<S>>,
1402 sandbox: Arc<dyn Sandbox>,
1403 validator: Arc<dyn Validator>,
1404 store: Arc<dyn EvolutionStore>,
1405 ) -> Self {
1406 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
1407 Self {
1408 kernel,
1409 sandbox,
1410 validator,
1411 store,
1412 selector,
1413 governor: Arc::new(DefaultGovernor::default()),
1414 economics: Arc::new(Mutex::new(EvuLedger::default())),
1415 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
1416 stake_policy: StakePolicy::default(),
1417 sandbox_policy: SandboxPolicy::oris_default(),
1418 validation_plan: ValidationPlan::oris_default(),
1419 }
1420 }
1421
1422 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
1423 self.selector = selector;
1424 self
1425 }
1426
1427 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
1428 self.sandbox_policy = policy;
1429 self
1430 }
1431
1432 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
1433 self.governor = governor;
1434 self
1435 }
1436
1437 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
1438 self.economics = economics;
1439 self
1440 }
1441
1442 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
1443 self.stake_policy = policy;
1444 self
1445 }
1446
1447 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
1448 self.validation_plan = plan;
1449 self
1450 }
1451
1452 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
1453 let executor = StoreReplayExecutor {
1454 sandbox: self.sandbox.clone(),
1455 validator: self.validator.clone(),
1456 store: self.store.clone(),
1457 selector: self.selector.clone(),
1458 governor: self.governor.clone(),
1459 economics: Some(self.economics.clone()),
1460 remote_publishers: Some(self.remote_publishers.clone()),
1461 stake_policy: self.stake_policy.clone(),
1462 };
1463 executor.collect_replay_candidates(input).candidates
1464 }
1465
1466 pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
1467 let projection = projection_snapshot(self.store.as_ref())?;
1468 if !projection.genes.is_empty() {
1469 return Ok(BootstrapReport::default());
1470 }
1471
1472 let templates = built_in_seed_templates();
1473 for template in &templates {
1474 let mutation = build_seed_mutation(template);
1475 let extracted = extract_seed_signals(template);
1476 let gene = build_bootstrap_gene(template, &extracted)
1477 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1478 let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
1479 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1480
1481 self.store
1482 .append_event(EvolutionEvent::MutationDeclared {
1483 mutation: mutation.clone(),
1484 })
1485 .map_err(store_err)?;
1486 self.store
1487 .append_event(EvolutionEvent::SignalsExtracted {
1488 mutation_id: mutation.intent.id.clone(),
1489 hash: extracted.hash.clone(),
1490 signals: extracted.values.clone(),
1491 })
1492 .map_err(store_err)?;
1493 self.store
1494 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1495 .map_err(store_err)?;
1496 self.store
1497 .append_event(EvolutionEvent::PromotionEvaluated {
1498 gene_id: gene.id.clone(),
1499 state: AssetState::Quarantined,
1500 reason: "bootstrap seeds require local validation before replay".into(),
1501 })
1502 .map_err(store_err)?;
1503 self.store
1504 .append_event(EvolutionEvent::CapsuleCommitted {
1505 capsule: capsule.clone(),
1506 })
1507 .map_err(store_err)?;
1508 self.store
1509 .append_event(EvolutionEvent::CapsuleQuarantined {
1510 capsule_id: capsule.id,
1511 })
1512 .map_err(store_err)?;
1513 }
1514
1515 Ok(BootstrapReport {
1516 seeded: true,
1517 genes_added: templates.len(),
1518 capsules_added: templates.len(),
1519 })
1520 }
1521
1522 pub async fn capture_successful_mutation(
1523 &self,
1524 run_id: &RunId,
1525 mutation: PreparedMutation,
1526 ) -> Result<Capsule, EvoKernelError> {
1527 Ok(self
1528 .capture_mutation_with_governor(run_id, mutation)
1529 .await?
1530 .capsule)
1531 }
1532
1533 pub async fn capture_mutation_with_governor(
1534 &self,
1535 run_id: &RunId,
1536 mutation: PreparedMutation,
1537 ) -> Result<CaptureOutcome, EvoKernelError> {
1538 self.store
1539 .append_event(EvolutionEvent::MutationDeclared {
1540 mutation: mutation.clone(),
1541 })
1542 .map_err(store_err)?;
1543
1544 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
1545 Ok(receipt) => receipt,
1546 Err(err) => {
1547 self.store
1548 .append_event(EvolutionEvent::MutationRejected {
1549 mutation_id: mutation.intent.id.clone(),
1550 reason: err.to_string(),
1551 })
1552 .map_err(store_err)?;
1553 return Err(EvoKernelError::Sandbox(err.to_string()));
1554 }
1555 };
1556
1557 self.store
1558 .append_event(EvolutionEvent::MutationApplied {
1559 mutation_id: mutation.intent.id.clone(),
1560 patch_hash: receipt.patch_hash.clone(),
1561 changed_files: receipt
1562 .changed_files
1563 .iter()
1564 .map(|path| path.to_string_lossy().to_string())
1565 .collect(),
1566 })
1567 .map_err(store_err)?;
1568
1569 let report = self
1570 .validator
1571 .run(&receipt, &self.validation_plan)
1572 .await
1573 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1574 if !report.success {
1575 self.store
1576 .append_event(EvolutionEvent::ValidationFailed {
1577 mutation_id: mutation.intent.id.clone(),
1578 report: report.to_snapshot(&self.validation_plan.profile),
1579 gene_id: None,
1580 })
1581 .map_err(store_err)?;
1582 return Err(EvoKernelError::ValidationFailed(report));
1583 }
1584
1585 self.store
1586 .append_event(EvolutionEvent::ValidationPassed {
1587 mutation_id: mutation.intent.id.clone(),
1588 report: report.to_snapshot(&self.validation_plan.profile),
1589 gene_id: None,
1590 })
1591 .map_err(store_err)?;
1592
1593 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
1594 patch_diff: mutation.artifact.payload.clone(),
1595 intent: mutation.intent.intent.clone(),
1596 expected_effect: mutation.intent.expected_effect.clone(),
1597 declared_signals: mutation.intent.signals.clone(),
1598 changed_files: receipt
1599 .changed_files
1600 .iter()
1601 .map(|path| path.to_string_lossy().to_string())
1602 .collect(),
1603 validation_success: report.success,
1604 validation_logs: report.logs.clone(),
1605 stage_outputs: report
1606 .stages
1607 .iter()
1608 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
1609 .filter(|value| !value.is_empty())
1610 .collect(),
1611 });
1612 self.store
1613 .append_event(EvolutionEvent::SignalsExtracted {
1614 mutation_id: mutation.intent.id.clone(),
1615 hash: extracted_signals.hash.clone(),
1616 signals: extracted_signals.values.clone(),
1617 })
1618 .map_err(store_err)?;
1619
1620 let projection = projection_snapshot(self.store.as_ref())?;
1621 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
1622 let recent_mutation_ages_secs = self
1623 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
1624 .map_err(store_err)?;
1625 let mut gene = derive_gene(
1626 &mutation,
1627 &receipt,
1628 &self.validation_plan.profile,
1629 &extracted_signals.values,
1630 );
1631 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1632 StoreReplayExecutor::confidence_context(&projection, &gene.id);
1633 let success_count = projection
1634 .genes
1635 .iter()
1636 .find(|existing| existing.id == gene.id)
1637 .map(|existing| {
1638 projection
1639 .capsules
1640 .iter()
1641 .filter(|capsule| capsule.gene_id == existing.id)
1642 .count() as u64
1643 })
1644 .unwrap_or(0)
1645 + 1;
1646 let governor_decision = self.governor.evaluate(GovernorInput {
1647 candidate_source: CandidateSource::Local,
1648 success_count,
1649 blast_radius: blast_radius.clone(),
1650 replay_failures: 0,
1651 recent_mutation_ages_secs,
1652 current_confidence,
1653 historical_peak_confidence,
1654 confidence_last_updated_secs,
1655 });
1656
1657 gene.state = governor_decision.target_state.clone();
1658 self.store
1659 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1660 .map_err(store_err)?;
1661 self.store
1662 .append_event(EvolutionEvent::PromotionEvaluated {
1663 gene_id: gene.id.clone(),
1664 state: governor_decision.target_state.clone(),
1665 reason: governor_decision.reason.clone(),
1666 })
1667 .map_err(store_err)?;
1668 if matches!(governor_decision.target_state, AssetState::Promoted) {
1669 self.store
1670 .append_event(EvolutionEvent::GenePromoted {
1671 gene_id: gene.id.clone(),
1672 })
1673 .map_err(store_err)?;
1674 }
1675 if matches!(governor_decision.target_state, AssetState::Revoked) {
1676 self.store
1677 .append_event(EvolutionEvent::GeneRevoked {
1678 gene_id: gene.id.clone(),
1679 reason: governor_decision.reason.clone(),
1680 })
1681 .map_err(store_err)?;
1682 }
1683 if let Some(spec_id) = &mutation.intent.spec_id {
1684 self.store
1685 .append_event(EvolutionEvent::SpecLinked {
1686 mutation_id: mutation.intent.id.clone(),
1687 spec_id: spec_id.clone(),
1688 })
1689 .map_err(store_err)?;
1690 }
1691
1692 let mut capsule = build_capsule(
1693 run_id,
1694 &mutation,
1695 &receipt,
1696 &report,
1697 &self.validation_plan.profile,
1698 &gene,
1699 &blast_radius,
1700 )
1701 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1702 capsule.state = governor_decision.target_state.clone();
1703 self.store
1704 .append_event(EvolutionEvent::CapsuleCommitted {
1705 capsule: capsule.clone(),
1706 })
1707 .map_err(store_err)?;
1708 if matches!(governor_decision.target_state, AssetState::Quarantined) {
1709 self.store
1710 .append_event(EvolutionEvent::CapsuleQuarantined {
1711 capsule_id: capsule.id.clone(),
1712 })
1713 .map_err(store_err)?;
1714 }
1715
1716 Ok(CaptureOutcome {
1717 capsule,
1718 gene,
1719 governor_decision,
1720 })
1721 }
1722
1723 pub async fn capture_from_proposal(
1724 &self,
1725 run_id: &RunId,
1726 proposal: &AgentMutationProposal,
1727 diff_payload: String,
1728 base_revision: Option<String>,
1729 ) -> Result<CaptureOutcome, EvoKernelError> {
1730 let intent = MutationIntent {
1731 id: next_id("proposal"),
1732 intent: proposal.intent.clone(),
1733 target: MutationTarget::Paths {
1734 allow: proposal.files.clone(),
1735 },
1736 expected_effect: proposal.expected_effect.clone(),
1737 risk: RiskLevel::Low,
1738 signals: proposal.files.clone(),
1739 spec_id: None,
1740 };
1741 self.capture_mutation_with_governor(
1742 run_id,
1743 prepare_mutation(intent, diff_payload, base_revision),
1744 )
1745 .await
1746 }
1747
1748 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
1749 ExecutionFeedback {
1750 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
1751 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
1752 summary: outcome.governor_decision.reason.clone(),
1753 }
1754 }
1755
1756 pub fn replay_feedback_for_agent(
1757 signals: &[String],
1758 decision: &ReplayDecision,
1759 ) -> ReplayFeedback {
1760 let (task_class_id, task_label) = replay_task_descriptor(signals);
1761 let planner_directive = if decision.used_capsule {
1762 ReplayPlannerDirective::SkipPlanner
1763 } else {
1764 ReplayPlannerDirective::PlanFallback
1765 };
1766 let reasoning_steps_avoided = u64::from(decision.used_capsule);
1767 let fallback_reason = if decision.fallback_to_planner {
1768 Some(decision.reason.clone())
1769 } else {
1770 None
1771 };
1772 let summary = if decision.used_capsule {
1773 format!("reused prior capsule for task class '{task_label}'; skip planner")
1774 } else {
1775 format!(
1776 "planner fallback required for task class '{task_label}': {}",
1777 decision.reason
1778 )
1779 };
1780
1781 ReplayFeedback {
1782 used_capsule: decision.used_capsule,
1783 capsule_id: decision.capsule_id.clone(),
1784 planner_directive,
1785 reasoning_steps_avoided,
1786 fallback_reason,
1787 task_class_id,
1788 task_label,
1789 summary,
1790 }
1791 }
1792 pub async fn run_supervised_devloop(
1793 &self,
1794 run_id: &RunId,
1795 request: &SupervisedDevloopRequest,
1796 diff_payload: String,
1797 base_revision: Option<String>,
1798 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
1799 let task_class = classify_supervised_devloop_request(request);
1800 let Some(task_class) = task_class else {
1801 return Ok(SupervisedDevloopOutcome {
1802 task_id: request.task.id.clone(),
1803 task_class: None,
1804 status: SupervisedDevloopStatus::RejectedByPolicy,
1805 execution_feedback: None,
1806 summary: format!(
1807 "supervised devloop rejected task '{}' because it is an unsupported task outside the bounded scope",
1808 request.task.id
1809 ),
1810 });
1811 };
1812
1813 if !request.approval.approved {
1814 return Ok(SupervisedDevloopOutcome {
1815 task_id: request.task.id.clone(),
1816 task_class: Some(task_class),
1817 status: SupervisedDevloopStatus::AwaitingApproval,
1818 execution_feedback: None,
1819 summary: format!(
1820 "supervised devloop paused task '{}' until explicit human approval is granted",
1821 request.task.id
1822 ),
1823 });
1824 }
1825
1826 let capture = self
1827 .capture_from_proposal(run_id, &request.proposal, diff_payload, base_revision)
1828 .await?;
1829 let approver = request
1830 .approval
1831 .approver
1832 .as_deref()
1833 .unwrap_or("unknown approver");
1834
1835 Ok(SupervisedDevloopOutcome {
1836 task_id: request.task.id.clone(),
1837 task_class: Some(task_class),
1838 status: SupervisedDevloopStatus::Executed,
1839 execution_feedback: Some(Self::feedback_for_agent(&capture)),
1840 summary: format!(
1841 "supervised devloop executed task '{}' with explicit approval from {approver}",
1842 request.task.id
1843 ),
1844 })
1845 }
1846 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
1847 MultiAgentCoordinator::new().coordinate(plan)
1848 }
1849
1850 pub fn export_promoted_assets(
1851 &self,
1852 sender_id: impl Into<String>,
1853 ) -> Result<EvolutionEnvelope, EvoKernelError> {
1854 let sender_id = sender_id.into();
1855 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
1856 if !envelope.assets.is_empty() {
1857 let mut ledger = self
1858 .economics
1859 .lock()
1860 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
1861 if ledger
1862 .reserve_publish_stake(&sender_id, &self.stake_policy)
1863 .is_none()
1864 {
1865 return Err(EvoKernelError::Validation(
1866 "insufficient EVU for remote publish".into(),
1867 ));
1868 }
1869 }
1870 Ok(envelope)
1871 }
1872
1873 pub fn import_remote_envelope(
1874 &self,
1875 envelope: &EvolutionEnvelope,
1876 ) -> Result<ImportOutcome, EvoKernelError> {
1877 import_remote_envelope_into_store(
1878 self.store.as_ref(),
1879 envelope,
1880 Some(self.remote_publishers.as_ref()),
1881 )
1882 }
1883
1884 pub fn fetch_assets(
1885 &self,
1886 responder_id: impl Into<String>,
1887 query: &FetchQuery,
1888 ) -> Result<FetchResponse, EvoKernelError> {
1889 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1890 }
1891
1892 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1893 revoke_assets_in_store(self.store.as_ref(), notice)
1894 }
1895
1896 pub async fn replay_or_fallback(
1897 &self,
1898 input: SelectorInput,
1899 ) -> Result<ReplayDecision, EvoKernelError> {
1900 let replay_run_id = next_id("replay");
1901 self.replay_or_fallback_for_run(&replay_run_id, input).await
1902 }
1903
1904 pub async fn replay_or_fallback_for_run(
1905 &self,
1906 run_id: &RunId,
1907 input: SelectorInput,
1908 ) -> Result<ReplayDecision, EvoKernelError> {
1909 let executor = StoreReplayExecutor {
1910 sandbox: self.sandbox.clone(),
1911 validator: self.validator.clone(),
1912 store: self.store.clone(),
1913 selector: self.selector.clone(),
1914 governor: self.governor.clone(),
1915 economics: Some(self.economics.clone()),
1916 remote_publishers: Some(self.remote_publishers.clone()),
1917 stake_policy: self.stake_policy.clone(),
1918 };
1919 executor
1920 .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
1921 .await
1922 .map_err(|err| EvoKernelError::Validation(err.to_string()))
1923 }
1924
1925 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
1926 self.economics.lock().ok()?.governor_signal(node_id)
1927 }
1928
1929 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
1930 self.economics
1931 .lock()
1932 .ok()
1933 .map(|locked| locked.selector_reputation_bias())
1934 .unwrap_or_default()
1935 }
1936
1937 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1938 evolution_metrics_snapshot(self.store.as_ref())
1939 }
1940
1941 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1942 self.metrics_snapshot().map(|snapshot| {
1943 let health = evolution_health_snapshot(&snapshot);
1944 render_evolution_metrics_prometheus(&snapshot, &health)
1945 })
1946 }
1947
1948 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1949 self.metrics_snapshot()
1950 .map(|snapshot| evolution_health_snapshot(&snapshot))
1951 }
1952}
1953
1954pub fn prepare_mutation(
1955 intent: MutationIntent,
1956 diff_payload: String,
1957 base_revision: Option<String>,
1958) -> PreparedMutation {
1959 PreparedMutation {
1960 intent,
1961 artifact: MutationArtifact {
1962 encoding: ArtifactEncoding::UnifiedDiff,
1963 content_hash: compute_artifact_hash(&diff_payload),
1964 payload: diff_payload,
1965 base_revision,
1966 },
1967 }
1968}
1969
1970pub fn prepare_mutation_from_spec(
1971 plan: CompiledMutationPlan,
1972 diff_payload: String,
1973 base_revision: Option<String>,
1974) -> PreparedMutation {
1975 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
1976}
1977
1978pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
1979 Arc::new(oris_evolution::JsonlEvolutionStore::new(
1980 default_store_root(),
1981 ))
1982}
1983
1984fn built_in_seed_templates() -> Vec<SeedTemplate> {
1985 vec![
1986 SeedTemplate {
1987 id: "bootstrap-readme".into(),
1988 intent: "Seed a baseline README recovery pattern".into(),
1989 signals: vec!["bootstrap readme".into(), "missing readme".into()],
1990 diff_payload: "\
1991diff --git a/README.md b/README.md
1992new file mode 100644
1993index 0000000..1111111
1994--- /dev/null
1995+++ b/README.md
1996@@ -0,0 +1,3 @@
1997+# Oris
1998+Bootstrap documentation seed
1999+"
2000 .into(),
2001 validation_profile: "bootstrap-seed".into(),
2002 },
2003 SeedTemplate {
2004 id: "bootstrap-test-fix".into(),
2005 intent: "Seed a deterministic test stabilization pattern".into(),
2006 signals: vec!["bootstrap test fix".into(), "failing tests".into()],
2007 diff_payload: "\
2008diff --git a/src/lib.rs b/src/lib.rs
2009index 1111111..2222222 100644
2010--- a/src/lib.rs
2011+++ b/src/lib.rs
2012@@ -1 +1,2 @@
2013 pub fn demo() -> usize { 1 }
2014+pub fn normalize_test_output() -> bool { true }
2015"
2016 .into(),
2017 validation_profile: "bootstrap-seed".into(),
2018 },
2019 SeedTemplate {
2020 id: "bootstrap-refactor".into(),
2021 intent: "Seed a low-risk refactor capsule".into(),
2022 signals: vec!["bootstrap refactor".into(), "small refactor".into()],
2023 diff_payload: "\
2024diff --git a/src/lib.rs b/src/lib.rs
2025index 2222222..3333333 100644
2026--- a/src/lib.rs
2027+++ b/src/lib.rs
2028@@ -1 +1,3 @@
2029 pub fn demo() -> usize { 1 }
2030+
2031+fn extract_strategy_key(input: &str) -> &str { input }
2032"
2033 .into(),
2034 validation_profile: "bootstrap-seed".into(),
2035 },
2036 SeedTemplate {
2037 id: "bootstrap-logging".into(),
2038 intent: "Seed a baseline structured logging mutation".into(),
2039 signals: vec!["bootstrap logging".into(), "structured logs".into()],
2040 diff_payload: "\
2041diff --git a/src/lib.rs b/src/lib.rs
2042index 3333333..4444444 100644
2043--- a/src/lib.rs
2044+++ b/src/lib.rs
2045@@ -1 +1,3 @@
2046 pub fn demo() -> usize { 1 }
2047+
2048+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
2049"
2050 .into(),
2051 validation_profile: "bootstrap-seed".into(),
2052 },
2053 ]
2054}
2055
2056fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
2057 let changed_files = seed_changed_files(&template.diff_payload);
2058 let target = if changed_files.is_empty() {
2059 MutationTarget::WorkspaceRoot
2060 } else {
2061 MutationTarget::Paths {
2062 allow: changed_files,
2063 }
2064 };
2065 prepare_mutation(
2066 MutationIntent {
2067 id: stable_hash_json(&("bootstrap-mutation", &template.id))
2068 .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
2069 intent: template.intent.clone(),
2070 target,
2071 expected_effect: format!("seed {}", template.id),
2072 risk: RiskLevel::Low,
2073 signals: template.signals.clone(),
2074 spec_id: None,
2075 },
2076 template.diff_payload.clone(),
2077 None,
2078 )
2079}
2080
2081fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
2082 let mut signals = BTreeSet::new();
2083 for declared in &template.signals {
2084 if let Some(phrase) = normalize_signal_phrase(declared) {
2085 signals.insert(phrase);
2086 }
2087 extend_signal_tokens(&mut signals, declared);
2088 }
2089 extend_signal_tokens(&mut signals, &template.intent);
2090 extend_signal_tokens(&mut signals, &template.diff_payload);
2091 for changed_file in seed_changed_files(&template.diff_payload) {
2092 extend_signal_tokens(&mut signals, &changed_file);
2093 }
2094 let values = signals.into_iter().take(32).collect::<Vec<_>>();
2095 let hash =
2096 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
2097 SignalExtractionOutput { values, hash }
2098}
2099
2100fn seed_changed_files(diff_payload: &str) -> Vec<String> {
2101 let mut changed_files = BTreeSet::new();
2102 for line in diff_payload.lines() {
2103 if let Some(path) = line.strip_prefix("+++ b/") {
2104 let normalized = path.trim();
2105 if !normalized.is_empty() {
2106 changed_files.insert(normalized.to_string());
2107 }
2108 }
2109 }
2110 changed_files.into_iter().collect()
2111}
2112
2113fn build_bootstrap_gene(
2114 template: &SeedTemplate,
2115 extracted: &SignalExtractionOutput,
2116) -> Result<Gene, EvolutionError> {
2117 let strategy = vec![template.id.clone(), "bootstrap".into()];
2118 let id = stable_hash_json(&(
2119 "bootstrap-gene",
2120 &template.id,
2121 &extracted.values,
2122 &template.validation_profile,
2123 ))?;
2124 Ok(Gene {
2125 id,
2126 signals: extracted.values.clone(),
2127 strategy,
2128 validation: vec![template.validation_profile.clone()],
2129 state: AssetState::Quarantined,
2130 })
2131}
2132
2133fn build_bootstrap_capsule(
2134 run_id: &RunId,
2135 template: &SeedTemplate,
2136 mutation: &PreparedMutation,
2137 gene: &Gene,
2138) -> Result<Capsule, EvolutionError> {
2139 let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
2140 let env = current_env_fingerprint(&cwd);
2141 let diff_hash = mutation.artifact.content_hash.clone();
2142 let changed_files = seed_changed_files(&template.diff_payload);
2143 let validator_hash = stable_hash_json(&(
2144 "bootstrap-validator",
2145 &template.id,
2146 &template.validation_profile,
2147 &diff_hash,
2148 ))?;
2149 let id = stable_hash_json(&(
2150 "bootstrap-capsule",
2151 &template.id,
2152 run_id,
2153 &gene.id,
2154 &diff_hash,
2155 &env,
2156 ))?;
2157 Ok(Capsule {
2158 id,
2159 gene_id: gene.id.clone(),
2160 mutation_id: mutation.intent.id.clone(),
2161 run_id: run_id.clone(),
2162 diff_hash,
2163 confidence: 0.0,
2164 env,
2165 outcome: Outcome {
2166 success: false,
2167 validation_profile: template.validation_profile.clone(),
2168 validation_duration_ms: 0,
2169 changed_files,
2170 validator_hash,
2171 lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
2172 replay_verified: false,
2173 },
2174 state: AssetState::Quarantined,
2175 })
2176}
2177
2178fn derive_gene(
2179 mutation: &PreparedMutation,
2180 receipt: &SandboxReceipt,
2181 validation_profile: &str,
2182 extracted_signals: &[String],
2183) -> Gene {
2184 let mut strategy = BTreeSet::new();
2185 for file in &receipt.changed_files {
2186 if let Some(component) = file.components().next() {
2187 strategy.insert(component.as_os_str().to_string_lossy().to_string());
2188 }
2189 }
2190 for token in mutation
2191 .artifact
2192 .payload
2193 .split(|ch: char| !ch.is_ascii_alphanumeric())
2194 {
2195 if token.len() == 5
2196 && token.starts_with('E')
2197 && token[1..].chars().all(|ch| ch.is_ascii_digit())
2198 {
2199 strategy.insert(token.to_string());
2200 }
2201 }
2202 for token in mutation.intent.intent.split_whitespace().take(8) {
2203 strategy.insert(token.to_ascii_lowercase());
2204 }
2205 let strategy = strategy.into_iter().collect::<Vec<_>>();
2206 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
2207 .unwrap_or_else(|_| next_id("gene"));
2208 Gene {
2209 id,
2210 signals: extracted_signals.to_vec(),
2211 strategy,
2212 validation: vec![validation_profile.to_string()],
2213 state: AssetState::Promoted,
2214 }
2215}
2216
2217fn build_capsule(
2218 run_id: &RunId,
2219 mutation: &PreparedMutation,
2220 receipt: &SandboxReceipt,
2221 report: &ValidationReport,
2222 validation_profile: &str,
2223 gene: &Gene,
2224 blast_radius: &BlastRadius,
2225) -> Result<Capsule, EvolutionError> {
2226 let env = current_env_fingerprint(&receipt.workdir);
2227 let validator_hash = stable_hash_json(report)?;
2228 let diff_hash = mutation.artifact.content_hash.clone();
2229 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
2230 Ok(Capsule {
2231 id,
2232 gene_id: gene.id.clone(),
2233 mutation_id: mutation.intent.id.clone(),
2234 run_id: run_id.clone(),
2235 diff_hash,
2236 confidence: 0.7,
2237 env,
2238 outcome: oris_evolution::Outcome {
2239 success: true,
2240 validation_profile: validation_profile.to_string(),
2241 validation_duration_ms: report.duration_ms,
2242 changed_files: receipt
2243 .changed_files
2244 .iter()
2245 .map(|path| path.to_string_lossy().to_string())
2246 .collect(),
2247 validator_hash,
2248 lines_changed: blast_radius.lines_changed,
2249 replay_verified: false,
2250 },
2251 state: AssetState::Promoted,
2252 })
2253}
2254
2255fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
2256 let rustc_version = Command::new("rustc")
2257 .arg("--version")
2258 .output()
2259 .ok()
2260 .filter(|output| output.status.success())
2261 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
2262 .unwrap_or_else(|| "rustc unknown".into());
2263 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
2264 .ok()
2265 .map(|bytes| {
2266 let value = String::from_utf8_lossy(&bytes);
2267 compute_artifact_hash(&value)
2268 })
2269 .unwrap_or_else(|| "missing-cargo-lock".into());
2270 let target_triple = format!(
2271 "{}-unknown-{}",
2272 std::env::consts::ARCH,
2273 std::env::consts::OS
2274 );
2275 EnvFingerprint {
2276 rustc_version,
2277 cargo_lock_hash,
2278 target_triple,
2279 os: std::env::consts::OS.to_string(),
2280 }
2281}
2282
2283fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
2284 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
2285 let trimmed = raw.trim();
2286 if trimmed.is_empty() {
2287 continue;
2288 }
2289 let normalized = if is_rust_error_code(trimmed) {
2290 let mut chars = trimmed.chars();
2291 let prefix = chars
2292 .next()
2293 .map(|ch| ch.to_ascii_uppercase())
2294 .unwrap_or('E');
2295 format!("{prefix}{}", chars.as_str())
2296 } else {
2297 trimmed.to_ascii_lowercase()
2298 };
2299 if normalized.len() < 3 {
2300 continue;
2301 }
2302 out.insert(normalized);
2303 }
2304}
2305
2306fn normalize_signal_phrase(input: &str) -> Option<String> {
2307 let normalized = input
2308 .split(|ch: char| !ch.is_ascii_alphanumeric())
2309 .filter_map(|raw| {
2310 let trimmed = raw.trim();
2311 if trimmed.is_empty() {
2312 return None;
2313 }
2314 let normalized = if is_rust_error_code(trimmed) {
2315 let mut chars = trimmed.chars();
2316 let prefix = chars
2317 .next()
2318 .map(|ch| ch.to_ascii_uppercase())
2319 .unwrap_or('E');
2320 format!("{prefix}{}", chars.as_str())
2321 } else {
2322 trimmed.to_ascii_lowercase()
2323 };
2324 if normalized.len() < 3 {
2325 None
2326 } else {
2327 Some(normalized)
2328 }
2329 })
2330 .collect::<Vec<_>>()
2331 .join(" ");
2332 if normalized.is_empty() {
2333 None
2334 } else {
2335 Some(normalized)
2336 }
2337}
2338
2339fn replay_task_descriptor(signals: &[String]) -> (String, String) {
2340 let normalized = signals
2341 .iter()
2342 .filter_map(|signal| normalize_signal_phrase(signal))
2343 .collect::<BTreeSet<_>>()
2344 .into_iter()
2345 .collect::<Vec<_>>();
2346 if normalized.is_empty() {
2347 return ("unknown".into(), "unknown".into());
2348 }
2349 let task_label = normalized
2350 .iter()
2351 .find(|value| {
2352 value.as_str() != "validation passed" && value.as_str() != "validation failed"
2353 })
2354 .cloned()
2355 .unwrap_or_else(|| normalized[0].clone());
2356 let task_class_id = stable_hash_json(&normalized)
2357 .unwrap_or_else(|_| compute_artifact_hash(&normalized.join("\n")));
2358 (task_class_id, task_label)
2359}
2360
2361fn is_rust_error_code(value: &str) -> bool {
2362 value.len() == 5
2363 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
2364 && value[1..].chars().all(|ch| ch.is_ascii_digit())
2365}
2366
2367fn classify_supervised_devloop_request(
2368 request: &SupervisedDevloopRequest,
2369) -> Option<BoundedTaskClass> {
2370 let path = request.proposal.files.first()?.trim();
2371 if request.proposal.files.len() != 1 || path.is_empty() {
2372 return None;
2373 }
2374 let normalized = path.replace('\\', "/");
2375 if normalized.starts_with("docs/") && normalized.ends_with(".md") {
2376 Some(BoundedTaskClass::DocsSingleFile)
2377 } else {
2378 None
2379 }
2380}
2381
2382fn find_declared_mutation(
2383 store: &dyn EvolutionStore,
2384 mutation_id: &MutationId,
2385) -> Result<Option<PreparedMutation>, EvolutionError> {
2386 for stored in store.scan(1)? {
2387 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
2388 if &mutation.intent.id == mutation_id {
2389 return Ok(Some(mutation));
2390 }
2391 }
2392 }
2393 Ok(None)
2394}
2395
2396fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
2397 let Ok(projection) = projection_snapshot(store) else {
2398 return Vec::new();
2399 };
2400 let capsules = projection.capsules.clone();
2401 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2402 let requested_spec_id = input
2403 .spec_id
2404 .as_deref()
2405 .map(str::trim)
2406 .filter(|value| !value.is_empty());
2407 let signal_set = input
2408 .signals
2409 .iter()
2410 .map(|signal| signal.to_ascii_lowercase())
2411 .collect::<BTreeSet<_>>();
2412 let mut candidates = projection
2413 .genes
2414 .into_iter()
2415 .filter_map(|gene| {
2416 if gene.state != AssetState::Promoted {
2417 return None;
2418 }
2419 if let Some(spec_id) = requested_spec_id {
2420 let matches_spec = spec_ids_by_gene
2421 .get(&gene.id)
2422 .map(|values| {
2423 values
2424 .iter()
2425 .any(|value| value.eq_ignore_ascii_case(spec_id))
2426 })
2427 .unwrap_or(false);
2428 if !matches_spec {
2429 return None;
2430 }
2431 }
2432 let gene_signals = gene
2433 .signals
2434 .iter()
2435 .map(|signal| signal.to_ascii_lowercase())
2436 .collect::<BTreeSet<_>>();
2437 if gene_signals == signal_set {
2438 let mut matched_capsules = capsules
2439 .iter()
2440 .filter(|capsule| {
2441 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
2442 })
2443 .cloned()
2444 .collect::<Vec<_>>();
2445 matched_capsules.sort_by(|left, right| {
2446 replay_environment_match_factor(&input.env, &right.env)
2447 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2448 .unwrap_or(std::cmp::Ordering::Equal)
2449 .then_with(|| {
2450 right
2451 .confidence
2452 .partial_cmp(&left.confidence)
2453 .unwrap_or(std::cmp::Ordering::Equal)
2454 })
2455 .then_with(|| left.id.cmp(&right.id))
2456 });
2457 if matched_capsules.is_empty() {
2458 None
2459 } else {
2460 let score = matched_capsules
2461 .first()
2462 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2463 .unwrap_or(0.0);
2464 Some(GeneCandidate {
2465 gene,
2466 score,
2467 capsules: matched_capsules,
2468 })
2469 }
2470 } else {
2471 None
2472 }
2473 })
2474 .collect::<Vec<_>>();
2475 candidates.sort_by(|left, right| {
2476 right
2477 .score
2478 .partial_cmp(&left.score)
2479 .unwrap_or(std::cmp::Ordering::Equal)
2480 .then_with(|| left.gene.id.cmp(&right.gene.id))
2481 });
2482 candidates
2483}
2484
2485fn quarantined_remote_exact_match_candidates(
2486 store: &dyn EvolutionStore,
2487 input: &SelectorInput,
2488) -> Vec<GeneCandidate> {
2489 let remote_asset_ids = store
2490 .scan(1)
2491 .ok()
2492 .map(|events| {
2493 events
2494 .into_iter()
2495 .filter_map(|stored| match stored.event {
2496 EvolutionEvent::RemoteAssetImported {
2497 source: CandidateSource::Remote,
2498 asset_ids,
2499 ..
2500 } => Some(asset_ids),
2501 _ => None,
2502 })
2503 .flatten()
2504 .collect::<BTreeSet<_>>()
2505 })
2506 .unwrap_or_default();
2507 if remote_asset_ids.is_empty() {
2508 return Vec::new();
2509 }
2510
2511 let Ok(projection) = projection_snapshot(store) else {
2512 return Vec::new();
2513 };
2514 let capsules = projection.capsules.clone();
2515 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2516 let requested_spec_id = input
2517 .spec_id
2518 .as_deref()
2519 .map(str::trim)
2520 .filter(|value| !value.is_empty());
2521 let normalized_signals = input
2522 .signals
2523 .iter()
2524 .filter_map(|signal| normalize_signal_phrase(signal))
2525 .collect::<BTreeSet<_>>()
2526 .into_iter()
2527 .collect::<Vec<_>>();
2528 if normalized_signals.is_empty() {
2529 return Vec::new();
2530 }
2531 let mut candidates = projection
2532 .genes
2533 .into_iter()
2534 .filter_map(|gene| {
2535 if !matches!(gene.state, AssetState::Promoted | AssetState::Quarantined) {
2536 return None;
2537 }
2538 if let Some(spec_id) = requested_spec_id {
2539 let matches_spec = spec_ids_by_gene
2540 .get(&gene.id)
2541 .map(|values| {
2542 values
2543 .iter()
2544 .any(|value| value.eq_ignore_ascii_case(spec_id))
2545 })
2546 .unwrap_or(false);
2547 if !matches_spec {
2548 return None;
2549 }
2550 }
2551 let normalized_gene_signals = gene
2552 .signals
2553 .iter()
2554 .filter_map(|candidate| normalize_signal_phrase(candidate))
2555 .collect::<Vec<_>>();
2556 let matched_query_count = normalized_signals
2557 .iter()
2558 .filter(|signal| {
2559 normalized_gene_signals.iter().any(|candidate| {
2560 candidate.contains(signal.as_str()) || signal.contains(candidate)
2561 })
2562 })
2563 .count();
2564 if matched_query_count == 0 {
2565 return None;
2566 }
2567
2568 let mut matched_capsules = capsules
2569 .iter()
2570 .filter(|capsule| {
2571 capsule.gene_id == gene.id
2572 && capsule.state == AssetState::Quarantined
2573 && remote_asset_ids.contains(&capsule.id)
2574 })
2575 .cloned()
2576 .collect::<Vec<_>>();
2577 matched_capsules.sort_by(|left, right| {
2578 replay_environment_match_factor(&input.env, &right.env)
2579 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2580 .unwrap_or(std::cmp::Ordering::Equal)
2581 .then_with(|| {
2582 right
2583 .confidence
2584 .partial_cmp(&left.confidence)
2585 .unwrap_or(std::cmp::Ordering::Equal)
2586 })
2587 .then_with(|| left.id.cmp(&right.id))
2588 });
2589 if matched_capsules.is_empty() {
2590 None
2591 } else {
2592 let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
2593 let env_score = matched_capsules
2594 .first()
2595 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2596 .unwrap_or(0.0);
2597 Some(GeneCandidate {
2598 gene,
2599 score: overlap.max(env_score),
2600 capsules: matched_capsules,
2601 })
2602 }
2603 })
2604 .collect::<Vec<_>>();
2605 candidates.sort_by(|left, right| {
2606 right
2607 .score
2608 .partial_cmp(&left.score)
2609 .unwrap_or(std::cmp::Ordering::Equal)
2610 .then_with(|| left.gene.id.cmp(&right.gene.id))
2611 });
2612 candidates
2613}
2614
2615fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
2616 let fields = [
2617 input
2618 .rustc_version
2619 .eq_ignore_ascii_case(&candidate.rustc_version),
2620 input
2621 .cargo_lock_hash
2622 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
2623 input
2624 .target_triple
2625 .eq_ignore_ascii_case(&candidate.target_triple),
2626 input.os.eq_ignore_ascii_case(&candidate.os),
2627 ];
2628 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
2629 0.5 + ((matched_fields / 4.0) * 0.5)
2630}
2631
2632fn effective_candidate_score(
2633 candidate: &GeneCandidate,
2634 publishers_by_asset: &BTreeMap<String, String>,
2635 reputation_bias: &BTreeMap<String, f32>,
2636) -> f32 {
2637 let bias = candidate
2638 .capsules
2639 .first()
2640 .and_then(|capsule| publishers_by_asset.get(&capsule.id))
2641 .and_then(|publisher| reputation_bias.get(publisher))
2642 .copied()
2643 .unwrap_or(0.0)
2644 .clamp(0.0, 1.0);
2645 candidate.score * (1.0 + (bias * 0.1))
2646}
2647
2648fn export_promoted_assets_from_store(
2649 store: &dyn EvolutionStore,
2650 sender_id: impl Into<String>,
2651) -> Result<EvolutionEnvelope, EvoKernelError> {
2652 let (events, projection) = scan_projection(store)?;
2653 let genes = projection
2654 .genes
2655 .into_iter()
2656 .filter(|gene| gene.state == AssetState::Promoted)
2657 .collect::<Vec<_>>();
2658 let capsules = projection
2659 .capsules
2660 .into_iter()
2661 .filter(|capsule| capsule.state == AssetState::Promoted)
2662 .collect::<Vec<_>>();
2663 let assets = replay_export_assets(&events, genes, capsules);
2664 Ok(EvolutionEnvelope::publish(sender_id, assets))
2665}
2666
2667fn scan_projection(
2668 store: &dyn EvolutionStore,
2669) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
2670 store.scan_projection().map_err(store_err)
2671}
2672
2673fn projection_snapshot(store: &dyn EvolutionStore) -> Result<EvolutionProjection, EvoKernelError> {
2674 scan_projection(store).map(|(_, projection)| projection)
2675}
2676
2677fn replay_export_assets(
2678 events: &[StoredEvolutionEvent],
2679 genes: Vec<Gene>,
2680 capsules: Vec<Capsule>,
2681) -> Vec<NetworkAsset> {
2682 let mutation_ids = capsules
2683 .iter()
2684 .map(|capsule| capsule.mutation_id.clone())
2685 .collect::<BTreeSet<_>>();
2686 let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
2687 for gene in genes {
2688 assets.push(NetworkAsset::Gene { gene });
2689 }
2690 for capsule in capsules {
2691 assets.push(NetworkAsset::Capsule { capsule });
2692 }
2693 assets
2694}
2695
2696fn replay_export_events_for_mutations(
2697 events: &[StoredEvolutionEvent],
2698 mutation_ids: &BTreeSet<String>,
2699) -> Vec<NetworkAsset> {
2700 if mutation_ids.is_empty() {
2701 return Vec::new();
2702 }
2703
2704 let mut assets = Vec::new();
2705 let mut seen_mutations = BTreeSet::new();
2706 let mut seen_spec_links = BTreeSet::new();
2707 for stored in events {
2708 match &stored.event {
2709 EvolutionEvent::MutationDeclared { mutation }
2710 if mutation_ids.contains(mutation.intent.id.as_str())
2711 && seen_mutations.insert(mutation.intent.id.clone()) =>
2712 {
2713 assets.push(NetworkAsset::EvolutionEvent {
2714 event: EvolutionEvent::MutationDeclared {
2715 mutation: mutation.clone(),
2716 },
2717 });
2718 }
2719 EvolutionEvent::SpecLinked {
2720 mutation_id,
2721 spec_id,
2722 } if mutation_ids.contains(mutation_id.as_str())
2723 && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
2724 {
2725 assets.push(NetworkAsset::EvolutionEvent {
2726 event: EvolutionEvent::SpecLinked {
2727 mutation_id: mutation_id.clone(),
2728 spec_id: spec_id.clone(),
2729 },
2730 });
2731 }
2732 _ => {}
2733 }
2734 }
2735
2736 assets
2737}
2738
2739fn import_remote_envelope_into_store(
2740 store: &dyn EvolutionStore,
2741 envelope: &EvolutionEnvelope,
2742 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
2743) -> Result<ImportOutcome, EvoKernelError> {
2744 if !envelope.verify_content_hash() {
2745 return Err(EvoKernelError::Validation(
2746 "invalid evolution envelope hash".into(),
2747 ));
2748 }
2749
2750 let sender_id = normalized_sender_id(&envelope.sender_id);
2751 let (events, projection) = scan_projection(store)?;
2752 let mut known_gene_ids = projection
2753 .genes
2754 .into_iter()
2755 .map(|gene| gene.id)
2756 .collect::<BTreeSet<_>>();
2757 let mut known_capsule_ids = projection
2758 .capsules
2759 .into_iter()
2760 .map(|capsule| capsule.id)
2761 .collect::<BTreeSet<_>>();
2762 let mut known_mutation_ids = BTreeSet::new();
2763 let mut known_spec_links = BTreeSet::new();
2764 for stored in &events {
2765 match &stored.event {
2766 EvolutionEvent::MutationDeclared { mutation } => {
2767 known_mutation_ids.insert(mutation.intent.id.clone());
2768 }
2769 EvolutionEvent::SpecLinked {
2770 mutation_id,
2771 spec_id,
2772 } => {
2773 known_spec_links.insert((mutation_id.clone(), spec_id.clone()));
2774 }
2775 _ => {}
2776 }
2777 }
2778 let mut imported_asset_ids = Vec::new();
2779 for asset in &envelope.assets {
2780 match asset {
2781 NetworkAsset::Gene { gene } => {
2782 if !known_gene_ids.insert(gene.id.clone()) {
2783 continue;
2784 }
2785 imported_asset_ids.push(gene.id.clone());
2786 let mut quarantined_gene = gene.clone();
2787 quarantined_gene.state = AssetState::Quarantined;
2788 store
2789 .append_event(EvolutionEvent::RemoteAssetImported {
2790 source: CandidateSource::Remote,
2791 asset_ids: vec![gene.id.clone()],
2792 sender_id: sender_id.clone(),
2793 })
2794 .map_err(store_err)?;
2795 store
2796 .append_event(EvolutionEvent::GeneProjected {
2797 gene: quarantined_gene.clone(),
2798 })
2799 .map_err(store_err)?;
2800 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
2801 store
2802 .append_event(EvolutionEvent::PromotionEvaluated {
2803 gene_id: quarantined_gene.id,
2804 state: AssetState::Quarantined,
2805 reason: "remote asset requires local validation before promotion".into(),
2806 })
2807 .map_err(store_err)?;
2808 }
2809 NetworkAsset::Capsule { capsule } => {
2810 if !known_capsule_ids.insert(capsule.id.clone()) {
2811 continue;
2812 }
2813 imported_asset_ids.push(capsule.id.clone());
2814 store
2815 .append_event(EvolutionEvent::RemoteAssetImported {
2816 source: CandidateSource::Remote,
2817 asset_ids: vec![capsule.id.clone()],
2818 sender_id: sender_id.clone(),
2819 })
2820 .map_err(store_err)?;
2821 let mut quarantined = capsule.clone();
2822 quarantined.state = AssetState::Quarantined;
2823 store
2824 .append_event(EvolutionEvent::CapsuleCommitted {
2825 capsule: quarantined.clone(),
2826 })
2827 .map_err(store_err)?;
2828 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
2829 store
2830 .append_event(EvolutionEvent::CapsuleQuarantined {
2831 capsule_id: quarantined.id,
2832 })
2833 .map_err(store_err)?;
2834 }
2835 NetworkAsset::EvolutionEvent { event } => {
2836 let should_append = match event {
2837 EvolutionEvent::MutationDeclared { mutation } => {
2838 known_mutation_ids.insert(mutation.intent.id.clone())
2839 }
2840 EvolutionEvent::SpecLinked {
2841 mutation_id,
2842 spec_id,
2843 } => known_spec_links.insert((mutation_id.clone(), spec_id.clone())),
2844 _ if should_import_remote_event(event) => true,
2845 _ => false,
2846 };
2847 if should_append {
2848 store.append_event(event.clone()).map_err(store_err)?;
2849 }
2850 }
2851 }
2852 }
2853
2854 Ok(ImportOutcome {
2855 imported_asset_ids,
2856 accepted: true,
2857 })
2858}
2859
2860fn built_in_experience_genes() -> Vec<Gene> {
2861 vec![
2862 Gene {
2863 id: "builtin-experience-docs-rewrite-v1".into(),
2864 signals: vec!["docs.rewrite".into(), "docs".into(), "rewrite".into()],
2865 strategy: vec![
2866 "asset_origin=builtin".into(),
2867 "task_class=docs.rewrite".into(),
2868 "task_label=Docs rewrite".into(),
2869 "template_id=builtin-docs-rewrite-v1".into(),
2870 "summary=baseline docs rewrite experience".into(),
2871 ],
2872 validation: vec!["builtin-template".into(), "origin=builtin".into()],
2873 state: AssetState::Promoted,
2874 },
2875 Gene {
2876 id: "builtin-experience-ci-fix-v1".into(),
2877 signals: vec![
2878 "ci.fix".into(),
2879 "ci".into(),
2880 "test".into(),
2881 "failure".into(),
2882 ],
2883 strategy: vec![
2884 "asset_origin=builtin".into(),
2885 "task_class=ci.fix".into(),
2886 "task_label=CI fix".into(),
2887 "template_id=builtin-ci-fix-v1".into(),
2888 "summary=baseline ci stabilization experience".into(),
2889 ],
2890 validation: vec!["builtin-template".into(), "origin=builtin".into()],
2891 state: AssetState::Promoted,
2892 },
2893 ]
2894}
2895
2896fn ensure_builtin_experience_assets_in_store(
2897 store: &dyn EvolutionStore,
2898 sender_id: String,
2899) -> Result<ImportOutcome, EvoKernelError> {
2900 let projection = projection_snapshot(store)?;
2901 let known_gene_ids = projection
2902 .genes
2903 .into_iter()
2904 .map(|gene| gene.id)
2905 .collect::<BTreeSet<_>>();
2906 let sender_id = normalized_sender_id(&sender_id);
2907 let mut imported_asset_ids = Vec::new();
2908
2909 for gene in built_in_experience_genes() {
2910 if known_gene_ids.contains(&gene.id) {
2911 continue;
2912 }
2913
2914 store
2915 .append_event(EvolutionEvent::RemoteAssetImported {
2916 source: CandidateSource::Local,
2917 asset_ids: vec![gene.id.clone()],
2918 sender_id: sender_id.clone(),
2919 })
2920 .map_err(store_err)?;
2921 store
2922 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2923 .map_err(store_err)?;
2924 store
2925 .append_event(EvolutionEvent::PromotionEvaluated {
2926 gene_id: gene.id.clone(),
2927 state: AssetState::Promoted,
2928 reason: "built-in experience asset promoted for cold-start compatibility".into(),
2929 })
2930 .map_err(store_err)?;
2931 store
2932 .append_event(EvolutionEvent::GenePromoted {
2933 gene_id: gene.id.clone(),
2934 })
2935 .map_err(store_err)?;
2936 imported_asset_ids.push(gene.id.clone());
2937 }
2938
2939 Ok(ImportOutcome {
2940 imported_asset_ids,
2941 accepted: true,
2942 })
2943}
2944
2945fn strategy_metadata_value(strategy: &[String], key: &str) -> Option<String> {
2946 strategy.iter().find_map(|entry| {
2947 let (entry_key, entry_value) = entry.split_once('=')?;
2948 if entry_key.trim().eq_ignore_ascii_case(key) {
2949 let normalized = entry_value.trim();
2950 if normalized.is_empty() {
2951 None
2952 } else {
2953 Some(normalized.to_string())
2954 }
2955 } else {
2956 None
2957 }
2958 })
2959}
2960
2961fn ensure_strategy_metadata(strategy: &mut Vec<String>, key: &str, value: &str) {
2962 let normalized = value.trim();
2963 if normalized.is_empty() || strategy_metadata_value(strategy, key).is_some() {
2964 return;
2965 }
2966 strategy.push(format!("{key}={normalized}"));
2967}
2968
2969fn enforce_reported_experience_retention(
2970 store: &dyn EvolutionStore,
2971 task_class: &str,
2972 keep_latest: usize,
2973) -> Result<(), EvoKernelError> {
2974 let task_class = task_class.trim();
2975 if task_class.is_empty() || keep_latest == 0 {
2976 return Ok(());
2977 }
2978
2979 let (_, projection) = scan_projection(store)?;
2980 let mut candidates = projection
2981 .genes
2982 .iter()
2983 .filter(|gene| gene.state == AssetState::Promoted)
2984 .filter_map(|gene| {
2985 let origin = strategy_metadata_value(&gene.strategy, "asset_origin")?;
2986 if !origin.eq_ignore_ascii_case("reported_experience") {
2987 return None;
2988 }
2989 let gene_task_class = strategy_metadata_value(&gene.strategy, "task_class")?;
2990 if !gene_task_class.eq_ignore_ascii_case(task_class) {
2991 return None;
2992 }
2993 let updated_at = projection
2994 .last_updated_at
2995 .get(&gene.id)
2996 .cloned()
2997 .unwrap_or_default();
2998 Some((gene.id.clone(), updated_at))
2999 })
3000 .collect::<Vec<_>>();
3001 if candidates.len() <= keep_latest {
3002 return Ok(());
3003 }
3004
3005 candidates.sort_by(|left, right| right.1.cmp(&left.1).then_with(|| right.0.cmp(&left.0)));
3006 let stale_gene_ids = candidates
3007 .into_iter()
3008 .skip(keep_latest)
3009 .map(|(gene_id, _)| gene_id)
3010 .collect::<BTreeSet<_>>();
3011 if stale_gene_ids.is_empty() {
3012 return Ok(());
3013 }
3014
3015 let reason =
3016 format!("reported experience retention limit exceeded for task_class={task_class}");
3017 for gene_id in &stale_gene_ids {
3018 store
3019 .append_event(EvolutionEvent::GeneRevoked {
3020 gene_id: gene_id.clone(),
3021 reason: reason.clone(),
3022 })
3023 .map_err(store_err)?;
3024 }
3025
3026 let stale_capsule_ids = projection
3027 .capsules
3028 .iter()
3029 .filter(|capsule| stale_gene_ids.contains(&capsule.gene_id))
3030 .map(|capsule| capsule.id.clone())
3031 .collect::<BTreeSet<_>>();
3032 for capsule_id in stale_capsule_ids {
3033 store
3034 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
3035 .map_err(store_err)?;
3036 }
3037 Ok(())
3038}
3039
3040fn record_reported_experience_in_store(
3041 store: &dyn EvolutionStore,
3042 sender_id: String,
3043 gene_id: String,
3044 signals: Vec<String>,
3045 strategy: Vec<String>,
3046 validation: Vec<String>,
3047) -> Result<ImportOutcome, EvoKernelError> {
3048 let gene_id = gene_id.trim();
3049 if gene_id.is_empty() {
3050 return Err(EvoKernelError::Validation(
3051 "reported experience gene_id must not be empty".into(),
3052 ));
3053 }
3054
3055 let mut unique_signals = BTreeSet::new();
3056 let mut normalized_signals = Vec::new();
3057 for signal in signals {
3058 let normalized = signal.trim().to_ascii_lowercase();
3059 if normalized.is_empty() {
3060 continue;
3061 }
3062 if unique_signals.insert(normalized.clone()) {
3063 normalized_signals.push(normalized);
3064 }
3065 }
3066 if normalized_signals.is_empty() {
3067 return Err(EvoKernelError::Validation(
3068 "reported experience signals must not be empty".into(),
3069 ));
3070 }
3071
3072 let mut unique_strategy = BTreeSet::new();
3073 let mut normalized_strategy = Vec::new();
3074 for entry in strategy {
3075 let normalized = entry.trim().to_string();
3076 if normalized.is_empty() {
3077 continue;
3078 }
3079 if unique_strategy.insert(normalized.clone()) {
3080 normalized_strategy.push(normalized);
3081 }
3082 }
3083 if normalized_strategy.is_empty() {
3084 normalized_strategy.push("reported local replay experience".into());
3085 }
3086 let task_class_id = strategy_metadata_value(&normalized_strategy, "task_class")
3087 .or_else(|| normalized_signals.first().cloned())
3088 .unwrap_or_else(|| "reported-experience".into());
3089 let task_label = strategy_metadata_value(&normalized_strategy, "task_label")
3090 .or_else(|| normalized_signals.first().cloned())
3091 .unwrap_or_else(|| task_class_id.clone());
3092 ensure_strategy_metadata(
3093 &mut normalized_strategy,
3094 "asset_origin",
3095 "reported_experience",
3096 );
3097 ensure_strategy_metadata(&mut normalized_strategy, "task_class", &task_class_id);
3098 ensure_strategy_metadata(&mut normalized_strategy, "task_label", &task_label);
3099
3100 let mut unique_validation = BTreeSet::new();
3101 let mut normalized_validation = Vec::new();
3102 for entry in validation {
3103 let normalized = entry.trim().to_string();
3104 if normalized.is_empty() {
3105 continue;
3106 }
3107 if unique_validation.insert(normalized.clone()) {
3108 normalized_validation.push(normalized);
3109 }
3110 }
3111 if normalized_validation.is_empty() {
3112 normalized_validation.push("a2a.tasks.report".into());
3113 }
3114
3115 let gene = Gene {
3116 id: gene_id.to_string(),
3117 signals: normalized_signals,
3118 strategy: normalized_strategy,
3119 validation: normalized_validation,
3120 state: AssetState::Promoted,
3121 };
3122 let sender_id = normalized_sender_id(&sender_id);
3123
3124 store
3125 .append_event(EvolutionEvent::RemoteAssetImported {
3126 source: CandidateSource::Local,
3127 asset_ids: vec![gene.id.clone()],
3128 sender_id,
3129 })
3130 .map_err(store_err)?;
3131 store
3132 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
3133 .map_err(store_err)?;
3134 store
3135 .append_event(EvolutionEvent::PromotionEvaluated {
3136 gene_id: gene.id.clone(),
3137 state: AssetState::Promoted,
3138 reason: "trusted local report promoted reusable experience".into(),
3139 })
3140 .map_err(store_err)?;
3141 store
3142 .append_event(EvolutionEvent::GenePromoted {
3143 gene_id: gene.id.clone(),
3144 })
3145 .map_err(store_err)?;
3146 enforce_reported_experience_retention(
3147 store,
3148 &task_class_id,
3149 REPORTED_EXPERIENCE_RETENTION_LIMIT,
3150 )?;
3151
3152 Ok(ImportOutcome {
3153 imported_asset_ids: vec![gene.id],
3154 accepted: true,
3155 })
3156}
3157
3158fn normalized_sender_id(sender_id: &str) -> Option<String> {
3159 let trimmed = sender_id.trim();
3160 if trimmed.is_empty() {
3161 None
3162 } else {
3163 Some(trimmed.to_string())
3164 }
3165}
3166
3167fn record_remote_publisher_for_asset(
3168 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
3169 sender_id: &str,
3170 asset: &NetworkAsset,
3171) {
3172 let Some(remote_publishers) = remote_publishers else {
3173 return;
3174 };
3175 let sender_id = sender_id.trim();
3176 if sender_id.is_empty() {
3177 return;
3178 }
3179 let Ok(mut publishers) = remote_publishers.lock() else {
3180 return;
3181 };
3182 match asset {
3183 NetworkAsset::Gene { gene } => {
3184 publishers.insert(gene.id.clone(), sender_id.to_string());
3185 }
3186 NetworkAsset::Capsule { capsule } => {
3187 publishers.insert(capsule.id.clone(), sender_id.to_string());
3188 }
3189 NetworkAsset::EvolutionEvent { .. } => {}
3190 }
3191}
3192
3193fn remote_publishers_by_asset_from_store(store: &dyn EvolutionStore) -> BTreeMap<String, String> {
3194 let Ok(events) = store.scan(1) else {
3195 return BTreeMap::new();
3196 };
3197 remote_publishers_by_asset_from_events(&events)
3198}
3199
3200fn remote_publishers_by_asset_from_events(
3201 events: &[StoredEvolutionEvent],
3202) -> BTreeMap<String, String> {
3203 let mut imported_asset_publishers = BTreeMap::<String, String>::new();
3204 let mut known_gene_ids = BTreeSet::<String>::new();
3205 let mut known_capsule_ids = BTreeSet::<String>::new();
3206 let mut publishers_by_asset = BTreeMap::<String, String>::new();
3207
3208 for stored in events {
3209 match &stored.event {
3210 EvolutionEvent::RemoteAssetImported {
3211 source: CandidateSource::Remote,
3212 asset_ids,
3213 sender_id,
3214 } => {
3215 let Some(sender_id) = sender_id.as_deref().and_then(normalized_sender_id) else {
3216 continue;
3217 };
3218 for asset_id in asset_ids {
3219 imported_asset_publishers.insert(asset_id.clone(), sender_id.clone());
3220 if known_gene_ids.contains(asset_id) || known_capsule_ids.contains(asset_id) {
3221 publishers_by_asset.insert(asset_id.clone(), sender_id.clone());
3222 }
3223 }
3224 }
3225 EvolutionEvent::GeneProjected { gene } => {
3226 known_gene_ids.insert(gene.id.clone());
3227 if let Some(sender_id) = imported_asset_publishers.get(&gene.id) {
3228 publishers_by_asset.insert(gene.id.clone(), sender_id.clone());
3229 }
3230 }
3231 EvolutionEvent::CapsuleCommitted { capsule } => {
3232 known_capsule_ids.insert(capsule.id.clone());
3233 if let Some(sender_id) = imported_asset_publishers.get(&capsule.id) {
3234 publishers_by_asset.insert(capsule.id.clone(), sender_id.clone());
3235 }
3236 }
3237 _ => {}
3238 }
3239 }
3240
3241 publishers_by_asset
3242}
3243
3244fn should_import_remote_event(event: &EvolutionEvent) -> bool {
3245 matches!(
3246 event,
3247 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
3248 )
3249}
3250
3251fn fetch_assets_from_store(
3252 store: &dyn EvolutionStore,
3253 responder_id: impl Into<String>,
3254 query: &FetchQuery,
3255) -> Result<FetchResponse, EvoKernelError> {
3256 let (events, projection) = scan_projection(store)?;
3257 let normalized_signals: Vec<String> = query
3258 .signals
3259 .iter()
3260 .map(|signal| signal.trim().to_ascii_lowercase())
3261 .filter(|signal| !signal.is_empty())
3262 .collect();
3263 let matches_any_signal = |candidate: &str| {
3264 if normalized_signals.is_empty() {
3265 return true;
3266 }
3267 let candidate = candidate.to_ascii_lowercase();
3268 normalized_signals
3269 .iter()
3270 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
3271 };
3272
3273 let matched_genes: Vec<Gene> = projection
3274 .genes
3275 .into_iter()
3276 .filter(|gene| gene.state == AssetState::Promoted)
3277 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
3278 .collect();
3279 let matched_gene_ids: BTreeSet<String> =
3280 matched_genes.iter().map(|gene| gene.id.clone()).collect();
3281 let matched_capsules: Vec<Capsule> = projection
3282 .capsules
3283 .into_iter()
3284 .filter(|capsule| capsule.state == AssetState::Promoted)
3285 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
3286 .collect();
3287
3288 let assets = replay_export_assets(&events, matched_genes, matched_capsules);
3289
3290 Ok(FetchResponse {
3291 sender_id: responder_id.into(),
3292 assets,
3293 })
3294}
3295
3296fn revoke_assets_in_store(
3297 store: &dyn EvolutionStore,
3298 notice: &RevokeNotice,
3299) -> Result<RevokeNotice, EvoKernelError> {
3300 let projection = projection_snapshot(store)?;
3301 let requested: BTreeSet<String> = notice
3302 .asset_ids
3303 .iter()
3304 .map(|asset_id| asset_id.trim().to_string())
3305 .filter(|asset_id| !asset_id.is_empty())
3306 .collect();
3307 let mut revoked_gene_ids = BTreeSet::new();
3308 let mut quarantined_capsule_ids = BTreeSet::new();
3309
3310 for gene in &projection.genes {
3311 if requested.contains(&gene.id) {
3312 revoked_gene_ids.insert(gene.id.clone());
3313 }
3314 }
3315 for capsule in &projection.capsules {
3316 if requested.contains(&capsule.id) {
3317 quarantined_capsule_ids.insert(capsule.id.clone());
3318 revoked_gene_ids.insert(capsule.gene_id.clone());
3319 }
3320 }
3321 for capsule in &projection.capsules {
3322 if revoked_gene_ids.contains(&capsule.gene_id) {
3323 quarantined_capsule_ids.insert(capsule.id.clone());
3324 }
3325 }
3326
3327 for gene_id in &revoked_gene_ids {
3328 store
3329 .append_event(EvolutionEvent::GeneRevoked {
3330 gene_id: gene_id.clone(),
3331 reason: notice.reason.clone(),
3332 })
3333 .map_err(store_err)?;
3334 }
3335 for capsule_id in &quarantined_capsule_ids {
3336 store
3337 .append_event(EvolutionEvent::CapsuleQuarantined {
3338 capsule_id: capsule_id.clone(),
3339 })
3340 .map_err(store_err)?;
3341 }
3342
3343 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
3344 affected_ids.extend(quarantined_capsule_ids);
3345 affected_ids.sort();
3346 affected_ids.dedup();
3347
3348 Ok(RevokeNotice {
3349 sender_id: notice.sender_id.clone(),
3350 asset_ids: affected_ids,
3351 reason: notice.reason.clone(),
3352 })
3353}
3354
3355fn evolution_metrics_snapshot(
3356 store: &dyn EvolutionStore,
3357) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
3358 let (events, projection) = scan_projection(store)?;
3359 let gene_task_classes = projection
3360 .genes
3361 .iter()
3362 .map(|gene| (gene.id.clone(), replay_task_descriptor(&gene.signals)))
3363 .collect::<BTreeMap<_, _>>();
3364 let replay_success_total = events
3365 .iter()
3366 .filter(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. }))
3367 .count() as u64;
3368 let mut replay_task_class_totals = BTreeMap::<(String, String), u64>::new();
3369 for stored in &events {
3370 if let EvolutionEvent::CapsuleReused { gene_id, .. } = &stored.event {
3371 if let Some((task_class_id, task_label)) = gene_task_classes.get(gene_id) {
3372 *replay_task_class_totals
3373 .entry((task_class_id.clone(), task_label.clone()))
3374 .or_insert(0) += 1;
3375 }
3376 }
3377 }
3378 let replay_task_classes = replay_task_class_totals
3379 .into_iter()
3380 .map(
3381 |((task_class_id, task_label), replay_success_total)| ReplayTaskClassMetrics {
3382 task_class_id,
3383 task_label,
3384 replay_success_total,
3385 reasoning_steps_avoided_total: replay_success_total,
3386 },
3387 )
3388 .collect::<Vec<_>>();
3389 let replay_reasoning_avoided_total = replay_task_classes
3390 .iter()
3391 .map(|entry| entry.reasoning_steps_avoided_total)
3392 .sum();
3393 let replay_failures_total = events
3394 .iter()
3395 .filter(|stored| is_replay_validation_failure(&stored.event))
3396 .count() as u64;
3397 let replay_attempts_total = replay_success_total + replay_failures_total;
3398 let confidence_revalidations_total = events
3399 .iter()
3400 .filter(|stored| is_confidence_revalidation_event(&stored.event))
3401 .count() as u64;
3402 let mutation_declared_total = events
3403 .iter()
3404 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
3405 .count() as u64;
3406 let promoted_mutations_total = events
3407 .iter()
3408 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
3409 .count() as u64;
3410 let gene_revocations_total = events
3411 .iter()
3412 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
3413 .count() as u64;
3414 let cutoff = Utc::now() - Duration::hours(1);
3415 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
3416 matches!(event, EvolutionEvent::MutationDeclared { .. })
3417 });
3418 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
3419 matches!(event, EvolutionEvent::GeneRevoked { .. })
3420 });
3421 let promoted_genes = projection
3422 .genes
3423 .iter()
3424 .filter(|gene| gene.state == AssetState::Promoted)
3425 .count() as u64;
3426 let promoted_capsules = projection
3427 .capsules
3428 .iter()
3429 .filter(|capsule| capsule.state == AssetState::Promoted)
3430 .count() as u64;
3431
3432 Ok(EvolutionMetricsSnapshot {
3433 replay_attempts_total,
3434 replay_success_total,
3435 replay_success_rate: safe_ratio(replay_success_total, replay_attempts_total),
3436 confidence_revalidations_total,
3437 replay_reasoning_avoided_total,
3438 replay_task_classes,
3439 mutation_declared_total,
3440 promoted_mutations_total,
3441 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
3442 gene_revocations_total,
3443 mutation_velocity_last_hour,
3444 revoke_frequency_last_hour,
3445 promoted_genes,
3446 promoted_capsules,
3447 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
3448 })
3449}
3450
3451fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
3452 EvolutionHealthSnapshot {
3453 status: "ok".into(),
3454 last_event_seq: snapshot.last_event_seq,
3455 promoted_genes: snapshot.promoted_genes,
3456 promoted_capsules: snapshot.promoted_capsules,
3457 }
3458}
3459
3460fn render_evolution_metrics_prometheus(
3461 snapshot: &EvolutionMetricsSnapshot,
3462 health: &EvolutionHealthSnapshot,
3463) -> String {
3464 let mut out = String::new();
3465 out.push_str(
3466 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
3467 );
3468 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
3469 out.push_str(&format!(
3470 "oris_evolution_replay_attempts_total {}\n",
3471 snapshot.replay_attempts_total
3472 ));
3473 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
3474 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
3475 out.push_str(&format!(
3476 "oris_evolution_replay_success_total {}\n",
3477 snapshot.replay_success_total
3478 ));
3479 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_total Total planner steps avoided by successful replay.\n");
3480 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_total counter\n");
3481 out.push_str(&format!(
3482 "oris_evolution_replay_reasoning_avoided_total {}\n",
3483 snapshot.replay_reasoning_avoided_total
3484 ));
3485 out.push_str("# HELP oris_evolution_replay_utilization_by_task_class_total Successful replay reuse counts grouped by deterministic task class.\n");
3486 out.push_str("# TYPE oris_evolution_replay_utilization_by_task_class_total counter\n");
3487 for task_class in &snapshot.replay_task_classes {
3488 out.push_str(&format!(
3489 "oris_evolution_replay_utilization_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
3490 prometheus_label_value(&task_class.task_class_id),
3491 prometheus_label_value(&task_class.task_label),
3492 task_class.replay_success_total
3493 ));
3494 }
3495 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_by_task_class_total Planner steps avoided by successful replay grouped by deterministic task class.\n");
3496 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_by_task_class_total counter\n");
3497 for task_class in &snapshot.replay_task_classes {
3498 out.push_str(&format!(
3499 "oris_evolution_replay_reasoning_avoided_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
3500 prometheus_label_value(&task_class.task_class_id),
3501 prometheus_label_value(&task_class.task_label),
3502 task_class.reasoning_steps_avoided_total
3503 ));
3504 }
3505 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
3506 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
3507 out.push_str(&format!(
3508 "oris_evolution_replay_success_rate {:.6}\n",
3509 snapshot.replay_success_rate
3510 ));
3511 out.push_str("# HELP oris_evolution_confidence_revalidations_total Total confidence-driven demotions that require revalidation before replay.\n");
3512 out.push_str("# TYPE oris_evolution_confidence_revalidations_total counter\n");
3513 out.push_str(&format!(
3514 "oris_evolution_confidence_revalidations_total {}\n",
3515 snapshot.confidence_revalidations_total
3516 ));
3517 out.push_str(
3518 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
3519 );
3520 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
3521 out.push_str(&format!(
3522 "oris_evolution_mutation_declared_total {}\n",
3523 snapshot.mutation_declared_total
3524 ));
3525 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
3526 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
3527 out.push_str(&format!(
3528 "oris_evolution_promoted_mutations_total {}\n",
3529 snapshot.promoted_mutations_total
3530 ));
3531 out.push_str(
3532 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
3533 );
3534 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
3535 out.push_str(&format!(
3536 "oris_evolution_promotion_ratio {:.6}\n",
3537 snapshot.promotion_ratio
3538 ));
3539 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
3540 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
3541 out.push_str(&format!(
3542 "oris_evolution_gene_revocations_total {}\n",
3543 snapshot.gene_revocations_total
3544 ));
3545 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
3546 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
3547 out.push_str(&format!(
3548 "oris_evolution_mutation_velocity_last_hour {}\n",
3549 snapshot.mutation_velocity_last_hour
3550 ));
3551 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
3552 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
3553 out.push_str(&format!(
3554 "oris_evolution_revoke_frequency_last_hour {}\n",
3555 snapshot.revoke_frequency_last_hour
3556 ));
3557 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
3558 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
3559 out.push_str(&format!(
3560 "oris_evolution_promoted_genes {}\n",
3561 snapshot.promoted_genes
3562 ));
3563 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
3564 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
3565 out.push_str(&format!(
3566 "oris_evolution_promoted_capsules {}\n",
3567 snapshot.promoted_capsules
3568 ));
3569 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
3570 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
3571 out.push_str(&format!(
3572 "oris_evolution_store_last_event_seq {}\n",
3573 snapshot.last_event_seq
3574 ));
3575 out.push_str(
3576 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
3577 );
3578 out.push_str("# TYPE oris_evolution_health gauge\n");
3579 out.push_str(&format!(
3580 "oris_evolution_health {}\n",
3581 u8::from(health.status == "ok")
3582 ));
3583 out
3584}
3585
3586fn count_recent_events(
3587 events: &[StoredEvolutionEvent],
3588 cutoff: DateTime<Utc>,
3589 predicate: impl Fn(&EvolutionEvent) -> bool,
3590) -> u64 {
3591 events
3592 .iter()
3593 .filter(|stored| {
3594 predicate(&stored.event)
3595 && parse_event_timestamp(&stored.timestamp)
3596 .map(|timestamp| timestamp >= cutoff)
3597 .unwrap_or(false)
3598 })
3599 .count() as u64
3600}
3601
3602fn prometheus_label_value(input: &str) -> String {
3603 input
3604 .replace('\\', "\\\\")
3605 .replace('\n', "\\n")
3606 .replace('"', "\\\"")
3607}
3608
3609fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
3610 DateTime::parse_from_rfc3339(raw)
3611 .ok()
3612 .map(|parsed| parsed.with_timezone(&Utc))
3613}
3614
3615fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
3616 matches!(
3617 event,
3618 EvolutionEvent::ValidationFailed {
3619 gene_id: Some(_),
3620 ..
3621 }
3622 )
3623}
3624
3625fn is_confidence_revalidation_event(event: &EvolutionEvent) -> bool {
3626 matches!(
3627 event,
3628 EvolutionEvent::PromotionEvaluated { state, reason, .. }
3629 if *state == AssetState::Quarantined
3630 && reason.contains("confidence decayed")
3631 )
3632}
3633
3634fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
3635 if denominator == 0 {
3636 0.0
3637 } else {
3638 numerator as f64 / denominator as f64
3639 }
3640}
3641
3642fn store_err(err: EvolutionError) -> EvoKernelError {
3643 EvoKernelError::Store(err.to_string())
3644}
3645
3646#[cfg(test)]
3647mod tests {
3648 use super::*;
3649 use oris_agent_contract::{
3650 AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
3651 };
3652 use oris_kernel::{
3653 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
3654 NoopStepFn, StateUpdatedOnlyReducer,
3655 };
3656 use serde::{Deserialize, Serialize};
3657
3658 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
3659 struct TestState;
3660
3661 impl KernelState for TestState {
3662 fn version(&self) -> u32 {
3663 1
3664 }
3665 }
3666
3667 fn temp_workspace(name: &str) -> std::path::PathBuf {
3668 let root =
3669 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
3670 if root.exists() {
3671 fs::remove_dir_all(&root).unwrap();
3672 }
3673 fs::create_dir_all(root.join("src")).unwrap();
3674 fs::write(
3675 root.join("Cargo.toml"),
3676 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
3677 )
3678 .unwrap();
3679 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
3680 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
3681 root
3682 }
3683
3684 fn test_kernel() -> Arc<Kernel<TestState>> {
3685 Arc::new(Kernel::<TestState> {
3686 events: Box::new(InMemoryEventStore::new()),
3687 snaps: None,
3688 reducer: Box::new(StateUpdatedOnlyReducer),
3689 exec: Box::new(NoopActionExecutor),
3690 step: Box::new(NoopStepFn),
3691 policy: Box::new(AllowAllPolicy),
3692 effect_sink: None,
3693 mode: KernelMode::Normal,
3694 })
3695 }
3696
3697 fn lightweight_plan() -> ValidationPlan {
3698 ValidationPlan {
3699 profile: "test".into(),
3700 stages: vec![ValidationStage::Command {
3701 program: "git".into(),
3702 args: vec!["--version".into()],
3703 timeout_ms: 5_000,
3704 }],
3705 }
3706 }
3707
3708 fn sample_mutation() -> PreparedMutation {
3709 prepare_mutation(
3710 MutationIntent {
3711 id: "mutation-1".into(),
3712 intent: "add README".into(),
3713 target: MutationTarget::Paths {
3714 allow: vec!["README.md".into()],
3715 },
3716 expected_effect: "repo still builds".into(),
3717 risk: RiskLevel::Low,
3718 signals: vec!["missing readme".into()],
3719 spec_id: None,
3720 },
3721 "\
3722diff --git a/README.md b/README.md
3723new file mode 100644
3724index 0000000..1111111
3725--- /dev/null
3726+++ b/README.md
3727@@ -0,0 +1 @@
3728+# sample
3729"
3730 .into(),
3731 Some("HEAD".into()),
3732 )
3733 }
3734
3735 fn base_sandbox_policy() -> SandboxPolicy {
3736 SandboxPolicy {
3737 allowed_programs: vec!["git".into()],
3738 max_duration_ms: 60_000,
3739 max_output_bytes: 1024 * 1024,
3740 denied_env_prefixes: Vec::new(),
3741 }
3742 }
3743
3744 fn command_validator() -> Arc<dyn Validator> {
3745 Arc::new(CommandValidator::new(base_sandbox_policy()))
3746 }
3747
3748 fn replay_input(signal: &str) -> SelectorInput {
3749 let rustc_version = std::process::Command::new("rustc")
3750 .arg("--version")
3751 .output()
3752 .ok()
3753 .filter(|output| output.status.success())
3754 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
3755 .unwrap_or_else(|| "rustc unknown".into());
3756 SelectorInput {
3757 signals: vec![signal.into()],
3758 env: EnvFingerprint {
3759 rustc_version,
3760 cargo_lock_hash: compute_artifact_hash("# lock\n"),
3761 target_triple: format!(
3762 "{}-unknown-{}",
3763 std::env::consts::ARCH,
3764 std::env::consts::OS
3765 ),
3766 os: std::env::consts::OS.into(),
3767 },
3768 spec_id: None,
3769 limit: 1,
3770 }
3771 }
3772
3773 fn build_test_evo_with_store(
3774 name: &str,
3775 run_id: &str,
3776 validator: Arc<dyn Validator>,
3777 store: Arc<dyn EvolutionStore>,
3778 ) -> EvoKernel<TestState> {
3779 let workspace = temp_workspace(name);
3780 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
3781 run_id,
3782 &workspace,
3783 std::env::temp_dir(),
3784 ));
3785 EvoKernel::new(test_kernel(), sandbox, validator, store)
3786 .with_governor(Arc::new(DefaultGovernor::new(
3787 oris_governor::GovernorConfig {
3788 promote_after_successes: 1,
3789 ..Default::default()
3790 },
3791 )))
3792 .with_validation_plan(lightweight_plan())
3793 .with_sandbox_policy(base_sandbox_policy())
3794 }
3795
3796 fn build_test_evo(
3797 name: &str,
3798 run_id: &str,
3799 validator: Arc<dyn Validator>,
3800 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
3801 let store_root = std::env::temp_dir().join(format!(
3802 "oris-evokernel-{name}-store-{}",
3803 std::process::id()
3804 ));
3805 if store_root.exists() {
3806 fs::remove_dir_all(&store_root).unwrap();
3807 }
3808 let store: Arc<dyn EvolutionStore> =
3809 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
3810 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
3811 (evo, store)
3812 }
3813
3814 fn remote_publish_envelope(
3815 sender_id: &str,
3816 run_id: &str,
3817 gene_id: &str,
3818 capsule_id: &str,
3819 mutation_id: &str,
3820 signal: &str,
3821 file_name: &str,
3822 line: &str,
3823 ) -> EvolutionEnvelope {
3824 remote_publish_envelope_with_env(
3825 sender_id,
3826 run_id,
3827 gene_id,
3828 capsule_id,
3829 mutation_id,
3830 signal,
3831 file_name,
3832 line,
3833 replay_input(signal).env,
3834 )
3835 }
3836
3837 fn remote_publish_envelope_with_env(
3838 sender_id: &str,
3839 run_id: &str,
3840 gene_id: &str,
3841 capsule_id: &str,
3842 mutation_id: &str,
3843 signal: &str,
3844 file_name: &str,
3845 line: &str,
3846 env: EnvFingerprint,
3847 ) -> EvolutionEnvelope {
3848 let mutation = prepare_mutation(
3849 MutationIntent {
3850 id: mutation_id.into(),
3851 intent: format!("add {file_name}"),
3852 target: MutationTarget::Paths {
3853 allow: vec![file_name.into()],
3854 },
3855 expected_effect: "replay should still validate".into(),
3856 risk: RiskLevel::Low,
3857 signals: vec![signal.into()],
3858 spec_id: None,
3859 },
3860 format!(
3861 "\
3862diff --git a/{file_name} b/{file_name}
3863new file mode 100644
3864index 0000000..1111111
3865--- /dev/null
3866+++ b/{file_name}
3867@@ -0,0 +1 @@
3868+{line}
3869"
3870 ),
3871 Some("HEAD".into()),
3872 );
3873 let gene = Gene {
3874 id: gene_id.into(),
3875 signals: vec![signal.into()],
3876 strategy: vec![file_name.into()],
3877 validation: vec!["test".into()],
3878 state: AssetState::Promoted,
3879 };
3880 let capsule = Capsule {
3881 id: capsule_id.into(),
3882 gene_id: gene_id.into(),
3883 mutation_id: mutation_id.into(),
3884 run_id: run_id.into(),
3885 diff_hash: mutation.artifact.content_hash.clone(),
3886 confidence: 0.9,
3887 env,
3888 outcome: Outcome {
3889 success: true,
3890 validation_profile: "test".into(),
3891 validation_duration_ms: 1,
3892 changed_files: vec![file_name.into()],
3893 validator_hash: "validator-hash".into(),
3894 lines_changed: 1,
3895 replay_verified: false,
3896 },
3897 state: AssetState::Promoted,
3898 };
3899 EvolutionEnvelope::publish(
3900 sender_id,
3901 vec![
3902 NetworkAsset::EvolutionEvent {
3903 event: EvolutionEvent::MutationDeclared { mutation },
3904 },
3905 NetworkAsset::Gene { gene: gene.clone() },
3906 NetworkAsset::Capsule {
3907 capsule: capsule.clone(),
3908 },
3909 NetworkAsset::EvolutionEvent {
3910 event: EvolutionEvent::CapsuleReleased {
3911 capsule_id: capsule.id.clone(),
3912 state: AssetState::Promoted,
3913 },
3914 },
3915 ],
3916 )
3917 }
3918
3919 fn remote_publish_envelope_with_signals(
3920 sender_id: &str,
3921 run_id: &str,
3922 gene_id: &str,
3923 capsule_id: &str,
3924 mutation_id: &str,
3925 mutation_signals: Vec<String>,
3926 gene_signals: Vec<String>,
3927 file_name: &str,
3928 line: &str,
3929 env: EnvFingerprint,
3930 ) -> EvolutionEnvelope {
3931 let mutation = prepare_mutation(
3932 MutationIntent {
3933 id: mutation_id.into(),
3934 intent: format!("add {file_name}"),
3935 target: MutationTarget::Paths {
3936 allow: vec![file_name.into()],
3937 },
3938 expected_effect: "replay should still validate".into(),
3939 risk: RiskLevel::Low,
3940 signals: mutation_signals,
3941 spec_id: None,
3942 },
3943 format!(
3944 "\
3945diff --git a/{file_name} b/{file_name}
3946new file mode 100644
3947index 0000000..1111111
3948--- /dev/null
3949+++ b/{file_name}
3950@@ -0,0 +1 @@
3951+{line}
3952"
3953 ),
3954 Some("HEAD".into()),
3955 );
3956 let gene = Gene {
3957 id: gene_id.into(),
3958 signals: gene_signals,
3959 strategy: vec![file_name.into()],
3960 validation: vec!["test".into()],
3961 state: AssetState::Promoted,
3962 };
3963 let capsule = Capsule {
3964 id: capsule_id.into(),
3965 gene_id: gene_id.into(),
3966 mutation_id: mutation_id.into(),
3967 run_id: run_id.into(),
3968 diff_hash: mutation.artifact.content_hash.clone(),
3969 confidence: 0.9,
3970 env,
3971 outcome: Outcome {
3972 success: true,
3973 validation_profile: "test".into(),
3974 validation_duration_ms: 1,
3975 changed_files: vec![file_name.into()],
3976 validator_hash: "validator-hash".into(),
3977 lines_changed: 1,
3978 replay_verified: false,
3979 },
3980 state: AssetState::Promoted,
3981 };
3982 EvolutionEnvelope::publish(
3983 sender_id,
3984 vec![
3985 NetworkAsset::EvolutionEvent {
3986 event: EvolutionEvent::MutationDeclared { mutation },
3987 },
3988 NetworkAsset::Gene { gene: gene.clone() },
3989 NetworkAsset::Capsule {
3990 capsule: capsule.clone(),
3991 },
3992 NetworkAsset::EvolutionEvent {
3993 event: EvolutionEvent::CapsuleReleased {
3994 capsule_id: capsule.id.clone(),
3995 state: AssetState::Promoted,
3996 },
3997 },
3998 ],
3999 )
4000 }
4001
4002 struct FixedValidator {
4003 success: bool,
4004 }
4005
4006 #[async_trait]
4007 impl Validator for FixedValidator {
4008 async fn run(
4009 &self,
4010 _receipt: &SandboxReceipt,
4011 plan: &ValidationPlan,
4012 ) -> Result<ValidationReport, ValidationError> {
4013 Ok(ValidationReport {
4014 success: self.success,
4015 duration_ms: 1,
4016 stages: Vec::new(),
4017 logs: if self.success {
4018 format!("{} ok", plan.profile)
4019 } else {
4020 format!("{} failed", plan.profile)
4021 },
4022 })
4023 }
4024 }
4025
4026 struct FailOnAppendStore {
4027 inner: JsonlEvolutionStore,
4028 fail_on_call: usize,
4029 call_count: Mutex<usize>,
4030 }
4031
4032 impl FailOnAppendStore {
4033 fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
4034 Self {
4035 inner: JsonlEvolutionStore::new(root_dir),
4036 fail_on_call,
4037 call_count: Mutex::new(0),
4038 }
4039 }
4040 }
4041
4042 impl EvolutionStore for FailOnAppendStore {
4043 fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
4044 let mut call_count = self
4045 .call_count
4046 .lock()
4047 .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
4048 *call_count += 1;
4049 if *call_count == self.fail_on_call {
4050 return Err(EvolutionError::Io("injected append failure".into()));
4051 }
4052 self.inner.append_event(event)
4053 }
4054
4055 fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
4056 self.inner.scan(from_seq)
4057 }
4058
4059 fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
4060 self.inner.rebuild_projection()
4061 }
4062 }
4063
4064 #[test]
4065 fn coordination_planner_to_coder_handoff_is_deterministic() {
4066 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
4067 root_goal: "ship feature".into(),
4068 primitive: CoordinationPrimitive::Sequential,
4069 tasks: vec![
4070 CoordinationTask {
4071 id: "planner".into(),
4072 role: AgentRole::Planner,
4073 description: "split the work".into(),
4074 depends_on: Vec::new(),
4075 },
4076 CoordinationTask {
4077 id: "coder".into(),
4078 role: AgentRole::Coder,
4079 description: "implement the patch".into(),
4080 depends_on: vec!["planner".into()],
4081 },
4082 ],
4083 timeout_ms: 5_000,
4084 max_retries: 0,
4085 });
4086
4087 assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
4088 assert!(result.failed_tasks.is_empty());
4089 assert!(result.messages.iter().any(|message| {
4090 message.from_role == AgentRole::Planner
4091 && message.to_role == AgentRole::Coder
4092 && message.task_id == "coder"
4093 }));
4094 }
4095
4096 #[test]
4097 fn coordination_repair_runs_only_after_coder_failure() {
4098 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
4099 root_goal: "fix broken implementation".into(),
4100 primitive: CoordinationPrimitive::Sequential,
4101 tasks: vec![
4102 CoordinationTask {
4103 id: "coder".into(),
4104 role: AgentRole::Coder,
4105 description: "force-fail initial implementation".into(),
4106 depends_on: Vec::new(),
4107 },
4108 CoordinationTask {
4109 id: "repair".into(),
4110 role: AgentRole::Repair,
4111 description: "patch the failed implementation".into(),
4112 depends_on: vec!["coder".into()],
4113 },
4114 ],
4115 timeout_ms: 5_000,
4116 max_retries: 0,
4117 });
4118
4119 assert_eq!(result.completed_tasks, vec!["repair"]);
4120 assert_eq!(result.failed_tasks, vec!["coder"]);
4121 assert!(result.messages.iter().any(|message| {
4122 message.from_role == AgentRole::Coder
4123 && message.to_role == AgentRole::Repair
4124 && message.task_id == "repair"
4125 }));
4126 }
4127
4128 #[test]
4129 fn coordination_optimizer_runs_after_successful_implementation_step() {
4130 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
4131 root_goal: "ship optimized patch".into(),
4132 primitive: CoordinationPrimitive::Sequential,
4133 tasks: vec![
4134 CoordinationTask {
4135 id: "coder".into(),
4136 role: AgentRole::Coder,
4137 description: "implement a working patch".into(),
4138 depends_on: Vec::new(),
4139 },
4140 CoordinationTask {
4141 id: "optimizer".into(),
4142 role: AgentRole::Optimizer,
4143 description: "tighten the implementation".into(),
4144 depends_on: vec!["coder".into()],
4145 },
4146 ],
4147 timeout_ms: 5_000,
4148 max_retries: 0,
4149 });
4150
4151 assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
4152 assert!(result.failed_tasks.is_empty());
4153 }
4154
4155 #[test]
4156 fn coordination_parallel_waves_preserve_sorted_merge_order() {
4157 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
4158 root_goal: "parallelize safe tasks".into(),
4159 primitive: CoordinationPrimitive::Parallel,
4160 tasks: vec![
4161 CoordinationTask {
4162 id: "z-task".into(),
4163 role: AgentRole::Planner,
4164 description: "analyze z".into(),
4165 depends_on: Vec::new(),
4166 },
4167 CoordinationTask {
4168 id: "a-task".into(),
4169 role: AgentRole::Coder,
4170 description: "implement a".into(),
4171 depends_on: Vec::new(),
4172 },
4173 CoordinationTask {
4174 id: "mid-task".into(),
4175 role: AgentRole::Optimizer,
4176 description: "polish after both".into(),
4177 depends_on: vec!["z-task".into(), "a-task".into()],
4178 },
4179 ],
4180 timeout_ms: 5_000,
4181 max_retries: 0,
4182 });
4183
4184 assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
4185 assert!(result.failed_tasks.is_empty());
4186 }
4187
4188 #[test]
4189 fn coordination_retries_stop_at_max_retries() {
4190 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
4191 root_goal: "retry then stop".into(),
4192 primitive: CoordinationPrimitive::Sequential,
4193 tasks: vec![CoordinationTask {
4194 id: "coder".into(),
4195 role: AgentRole::Coder,
4196 description: "force-fail this task".into(),
4197 depends_on: Vec::new(),
4198 }],
4199 timeout_ms: 5_000,
4200 max_retries: 1,
4201 });
4202
4203 assert!(result.completed_tasks.is_empty());
4204 assert_eq!(result.failed_tasks, vec!["coder"]);
4205 assert_eq!(
4206 result
4207 .messages
4208 .iter()
4209 .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
4210 .count(),
4211 2
4212 );
4213 }
4214
4215 #[test]
4216 fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
4217 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
4218 root_goal: "skip blocked follow-up work".into(),
4219 primitive: CoordinationPrimitive::Conditional,
4220 tasks: vec![
4221 CoordinationTask {
4222 id: "coder".into(),
4223 role: AgentRole::Coder,
4224 description: "force-fail the implementation".into(),
4225 depends_on: Vec::new(),
4226 },
4227 CoordinationTask {
4228 id: "optimizer".into(),
4229 role: AgentRole::Optimizer,
4230 description: "only optimize a successful implementation".into(),
4231 depends_on: vec!["coder".into()],
4232 },
4233 ],
4234 timeout_ms: 5_000,
4235 max_retries: 0,
4236 });
4237
4238 assert!(result.completed_tasks.is_empty());
4239 assert_eq!(result.failed_tasks, vec!["coder"]);
4240 assert!(result.messages.iter().any(|message| {
4241 message.task_id == "optimizer"
4242 && message
4243 .content
4244 .contains("skipped due to failed dependency chain")
4245 }));
4246 assert!(!result
4247 .failed_tasks
4248 .iter()
4249 .any(|task_id| task_id == "optimizer"));
4250 }
4251
4252 #[tokio::test]
4253 async fn command_validator_aggregates_stage_reports() {
4254 let workspace = temp_workspace("validator");
4255 let receipt = SandboxReceipt {
4256 mutation_id: "m".into(),
4257 workdir: workspace,
4258 applied: true,
4259 changed_files: Vec::new(),
4260 patch_hash: "hash".into(),
4261 stdout_log: std::env::temp_dir().join("stdout.log"),
4262 stderr_log: std::env::temp_dir().join("stderr.log"),
4263 };
4264 let validator = CommandValidator::new(SandboxPolicy {
4265 allowed_programs: vec!["git".into()],
4266 max_duration_ms: 1_000,
4267 max_output_bytes: 1024,
4268 denied_env_prefixes: Vec::new(),
4269 });
4270 let report = validator
4271 .run(
4272 &receipt,
4273 &ValidationPlan {
4274 profile: "test".into(),
4275 stages: vec![ValidationStage::Command {
4276 program: "git".into(),
4277 args: vec!["--version".into()],
4278 timeout_ms: 1_000,
4279 }],
4280 },
4281 )
4282 .await
4283 .unwrap();
4284 assert_eq!(report.stages.len(), 1);
4285 }
4286
4287 #[tokio::test]
4288 async fn capture_successful_mutation_appends_capsule() {
4289 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
4290 let capsule = evo
4291 .capture_successful_mutation(&"run-1".into(), sample_mutation())
4292 .await
4293 .unwrap();
4294 let events = store.scan(1).unwrap();
4295 assert!(events
4296 .iter()
4297 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
4298 assert!(!capsule.id.is_empty());
4299 }
4300
4301 #[tokio::test]
4302 async fn replay_hit_records_capsule_reused() {
4303 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
4304 let capsule = evo
4305 .capture_successful_mutation(&"run-2".into(), sample_mutation())
4306 .await
4307 .unwrap();
4308 let replay_run_id = "run-replay".to_string();
4309 let decision = evo
4310 .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
4311 .await
4312 .unwrap();
4313 assert!(decision.used_capsule);
4314 assert_eq!(decision.capsule_id, Some(capsule.id));
4315 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
4316 &stored.event,
4317 EvolutionEvent::CapsuleReused {
4318 run_id,
4319 replay_run_id: Some(current_replay_run_id),
4320 ..
4321 } if run_id == "run-2" && current_replay_run_id == &replay_run_id
4322 )));
4323 }
4324
4325 #[tokio::test]
4326 async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
4327 let capture_run_id = "run-legacy-capture".to_string();
4328 let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
4329 let capsule = evo
4330 .capture_successful_mutation(&capture_run_id, sample_mutation())
4331 .await
4332 .unwrap();
4333 let executor = StoreReplayExecutor {
4334 sandbox: evo.sandbox.clone(),
4335 validator: evo.validator.clone(),
4336 store: evo.store.clone(),
4337 selector: evo.selector.clone(),
4338 governor: evo.governor.clone(),
4339 economics: Some(evo.economics.clone()),
4340 remote_publishers: Some(evo.remote_publishers.clone()),
4341 stake_policy: evo.stake_policy.clone(),
4342 };
4343
4344 let decision = executor
4345 .try_replay(
4346 &replay_input("missing readme"),
4347 &evo.sandbox_policy,
4348 &evo.validation_plan,
4349 )
4350 .await
4351 .unwrap();
4352
4353 assert!(decision.used_capsule);
4354 assert_eq!(decision.capsule_id, Some(capsule.id));
4355 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
4356 &stored.event,
4357 EvolutionEvent::CapsuleReused {
4358 run_id,
4359 replay_run_id: None,
4360 ..
4361 } if run_id == &capture_run_id
4362 )));
4363 }
4364
4365 #[tokio::test]
4366 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
4367 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
4368 let capsule = evo
4369 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
4370 .await
4371 .unwrap();
4372 let decision = evo
4373 .replay_or_fallback(replay_input("missing readme"))
4374 .await
4375 .unwrap();
4376 assert!(decision.used_capsule);
4377
4378 evo.revoke_assets(&RevokeNotice {
4379 sender_id: "node-metrics".into(),
4380 asset_ids: vec![capsule.id.clone()],
4381 reason: "manual test revoke".into(),
4382 })
4383 .unwrap();
4384
4385 let snapshot = evo.metrics_snapshot().unwrap();
4386 assert_eq!(snapshot.replay_attempts_total, 1);
4387 assert_eq!(snapshot.replay_success_total, 1);
4388 assert_eq!(snapshot.replay_success_rate, 1.0);
4389 assert_eq!(snapshot.confidence_revalidations_total, 0);
4390 assert_eq!(snapshot.replay_reasoning_avoided_total, 1);
4391 assert_eq!(snapshot.replay_task_classes.len(), 1);
4392 assert_eq!(snapshot.replay_task_classes[0].replay_success_total, 1);
4393 assert_eq!(
4394 snapshot.replay_task_classes[0].reasoning_steps_avoided_total,
4395 1
4396 );
4397 assert_eq!(snapshot.confidence_revalidations_total, 0);
4398 assert_eq!(snapshot.mutation_declared_total, 1);
4399 assert_eq!(snapshot.promoted_mutations_total, 1);
4400 assert_eq!(snapshot.promotion_ratio, 1.0);
4401 assert_eq!(snapshot.gene_revocations_total, 1);
4402 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
4403 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
4404 assert_eq!(snapshot.promoted_genes, 0);
4405 assert_eq!(snapshot.promoted_capsules, 0);
4406
4407 let rendered = evo.render_metrics_prometheus().unwrap();
4408 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_total 1"));
4409 assert!(rendered.contains("oris_evolution_replay_utilization_by_task_class_total"));
4410 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_by_task_class_total"));
4411 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
4412 assert!(rendered.contains("oris_evolution_confidence_revalidations_total 0"));
4413 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
4414 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
4415 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
4416 assert!(rendered.contains("oris_evolution_health 1"));
4417 }
4418
4419 #[test]
4420 fn stale_replay_targets_require_confidence_revalidation() {
4421 let now = Utc::now();
4422 let projection = EvolutionProjection {
4423 genes: vec![Gene {
4424 id: "gene-stale".into(),
4425 signals: vec!["missing readme".into()],
4426 strategy: vec!["README.md".into()],
4427 validation: vec!["test".into()],
4428 state: AssetState::Promoted,
4429 }],
4430 capsules: vec![Capsule {
4431 id: "capsule-stale".into(),
4432 gene_id: "gene-stale".into(),
4433 mutation_id: "mutation-stale".into(),
4434 run_id: "run-stale".into(),
4435 diff_hash: "hash".into(),
4436 confidence: 0.8,
4437 env: replay_input("missing readme").env,
4438 outcome: Outcome {
4439 success: true,
4440 validation_profile: "test".into(),
4441 validation_duration_ms: 1,
4442 changed_files: vec!["README.md".into()],
4443 validator_hash: "validator".into(),
4444 lines_changed: 1,
4445 replay_verified: false,
4446 },
4447 state: AssetState::Promoted,
4448 }],
4449 reuse_counts: BTreeMap::from([("gene-stale".into(), 1)]),
4450 attempt_counts: BTreeMap::from([("gene-stale".into(), 1)]),
4451 last_updated_at: BTreeMap::from([(
4452 "gene-stale".into(),
4453 (now - Duration::hours(48)).to_rfc3339(),
4454 )]),
4455 spec_ids_by_gene: BTreeMap::new(),
4456 };
4457
4458 let targets = stale_replay_revalidation_targets(&projection, now);
4459
4460 assert_eq!(targets.len(), 1);
4461 assert_eq!(targets[0].gene_id, "gene-stale");
4462 assert_eq!(targets[0].capsule_ids, vec!["capsule-stale".to_string()]);
4463 assert!(targets[0].decayed_confidence < MIN_REPLAY_CONFIDENCE);
4464 }
4465
4466 #[tokio::test]
4467 async fn remote_replay_prefers_closest_environment_match() {
4468 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
4469 let input = replay_input("env-signal");
4470
4471 let envelope_a = remote_publish_envelope_with_env(
4472 "node-a",
4473 "run-remote-a",
4474 "gene-a",
4475 "capsule-a",
4476 "mutation-a",
4477 "env-signal",
4478 "A.md",
4479 "# from a",
4480 input.env.clone(),
4481 );
4482 let envelope_b = remote_publish_envelope_with_env(
4483 "node-b",
4484 "run-remote-b",
4485 "gene-b",
4486 "capsule-b",
4487 "mutation-b",
4488 "env-signal",
4489 "B.md",
4490 "# from b",
4491 EnvFingerprint {
4492 rustc_version: "old-rustc".into(),
4493 cargo_lock_hash: "other-lock".into(),
4494 target_triple: "aarch64-apple-darwin".into(),
4495 os: "linux".into(),
4496 },
4497 );
4498
4499 evo.import_remote_envelope(&envelope_a).unwrap();
4500 evo.import_remote_envelope(&envelope_b).unwrap();
4501
4502 let decision = evo.replay_or_fallback(input).await.unwrap();
4503
4504 assert!(decision.used_capsule);
4505 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
4506 assert!(!decision.fallback_to_planner);
4507 }
4508
4509 #[test]
4510 fn remote_cold_start_scoring_caps_distinct_query_coverage() {
4511 let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
4512 let input = replay_input("missing readme");
4513
4514 let exact = remote_publish_envelope_with_signals(
4515 "node-exact",
4516 "run-remote-exact",
4517 "gene-exact",
4518 "capsule-exact",
4519 "mutation-exact",
4520 vec!["missing readme".into()],
4521 vec!["missing readme".into()],
4522 "EXACT.md",
4523 "# exact",
4524 input.env.clone(),
4525 );
4526 let overlapping = remote_publish_envelope_with_signals(
4527 "node-overlap",
4528 "run-remote-overlap",
4529 "gene-overlap",
4530 "capsule-overlap",
4531 "mutation-overlap",
4532 vec!["missing readme".into()],
4533 vec!["missing".into(), "readme".into()],
4534 "OVERLAP.md",
4535 "# overlap",
4536 input.env.clone(),
4537 );
4538
4539 evo.import_remote_envelope(&exact).unwrap();
4540 evo.import_remote_envelope(&overlapping).unwrap();
4541
4542 let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
4543 let exact_candidate = candidates
4544 .iter()
4545 .find(|candidate| candidate.gene.id == "gene-exact")
4546 .unwrap();
4547 let overlap_candidate = candidates
4548 .iter()
4549 .find(|candidate| candidate.gene.id == "gene-overlap")
4550 .unwrap();
4551
4552 assert_eq!(exact_candidate.score, 1.0);
4553 assert_eq!(overlap_candidate.score, 1.0);
4554 assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
4555 }
4556
4557 #[test]
4558 fn exact_match_candidates_respect_spec_linked_events() {
4559 let (evo, _) = build_test_evo(
4560 "spec-linked-filter",
4561 "run-spec-linked-filter",
4562 command_validator(),
4563 );
4564 let mut input = replay_input("missing readme");
4565 input.spec_id = Some("spec-readme".into());
4566
4567 let mut mutation = sample_mutation();
4568 mutation.intent.id = "mutation-spec-linked".into();
4569 mutation.intent.spec_id = None;
4570 let gene = Gene {
4571 id: "gene-spec-linked".into(),
4572 signals: vec!["missing readme".into()],
4573 strategy: vec!["README.md".into()],
4574 validation: vec!["test".into()],
4575 state: AssetState::Promoted,
4576 };
4577 let capsule = Capsule {
4578 id: "capsule-spec-linked".into(),
4579 gene_id: gene.id.clone(),
4580 mutation_id: mutation.intent.id.clone(),
4581 run_id: "run-spec-linked".into(),
4582 diff_hash: mutation.artifact.content_hash.clone(),
4583 confidence: 0.9,
4584 env: input.env.clone(),
4585 outcome: Outcome {
4586 success: true,
4587 validation_profile: "test".into(),
4588 validation_duration_ms: 1,
4589 changed_files: vec!["README.md".into()],
4590 validator_hash: "validator-hash".into(),
4591 lines_changed: 1,
4592 replay_verified: false,
4593 },
4594 state: AssetState::Promoted,
4595 };
4596
4597 evo.store
4598 .append_event(EvolutionEvent::MutationDeclared { mutation })
4599 .unwrap();
4600 evo.store
4601 .append_event(EvolutionEvent::GeneProjected { gene })
4602 .unwrap();
4603 evo.store
4604 .append_event(EvolutionEvent::CapsuleCommitted { capsule })
4605 .unwrap();
4606 evo.store
4607 .append_event(EvolutionEvent::SpecLinked {
4608 mutation_id: "mutation-spec-linked".into(),
4609 spec_id: "spec-readme".into(),
4610 })
4611 .unwrap();
4612
4613 let candidates = exact_match_candidates(evo.store.as_ref(), &input);
4614 assert_eq!(candidates.len(), 1);
4615 assert_eq!(candidates[0].gene.id, "gene-spec-linked");
4616 }
4617
4618 #[tokio::test]
4619 async fn remote_capsule_stays_quarantined_until_first_successful_replay() {
4620 let (evo, store) = build_test_evo(
4621 "remote-quarantine",
4622 "run-remote-quarantine",
4623 command_validator(),
4624 );
4625 let envelope = remote_publish_envelope(
4626 "node-remote",
4627 "run-remote-quarantine",
4628 "gene-remote",
4629 "capsule-remote",
4630 "mutation-remote",
4631 "remote-signal",
4632 "REMOTE.md",
4633 "# from remote",
4634 );
4635
4636 evo.import_remote_envelope(&envelope).unwrap();
4637
4638 let before_replay = store.rebuild_projection().unwrap();
4639 let imported_gene = before_replay
4640 .genes
4641 .iter()
4642 .find(|gene| gene.id == "gene-remote")
4643 .unwrap();
4644 let imported_capsule = before_replay
4645 .capsules
4646 .iter()
4647 .find(|capsule| capsule.id == "capsule-remote")
4648 .unwrap();
4649 assert_eq!(imported_gene.state, AssetState::Quarantined);
4650 assert_eq!(imported_capsule.state, AssetState::Quarantined);
4651 let exported_before_replay =
4652 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
4653 assert!(exported_before_replay.assets.is_empty());
4654
4655 let decision = evo
4656 .replay_or_fallback(replay_input("remote-signal"))
4657 .await
4658 .unwrap();
4659
4660 assert!(decision.used_capsule);
4661 assert_eq!(decision.capsule_id, Some("capsule-remote".into()));
4662
4663 let after_replay = store.rebuild_projection().unwrap();
4664 let promoted_gene = after_replay
4665 .genes
4666 .iter()
4667 .find(|gene| gene.id == "gene-remote")
4668 .unwrap();
4669 let released_capsule = after_replay
4670 .capsules
4671 .iter()
4672 .find(|capsule| capsule.id == "capsule-remote")
4673 .unwrap();
4674 assert_eq!(promoted_gene.state, AssetState::Promoted);
4675 assert_eq!(released_capsule.state, AssetState::Promoted);
4676 let exported_after_replay =
4677 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
4678 assert_eq!(exported_after_replay.assets.len(), 3);
4679 assert!(exported_after_replay.assets.iter().any(|asset| matches!(
4680 asset,
4681 NetworkAsset::EvolutionEvent {
4682 event: EvolutionEvent::MutationDeclared { .. }
4683 }
4684 )));
4685 }
4686
4687 #[tokio::test]
4688 async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
4689 let (source, source_store) = build_test_evo(
4690 "remote-publish-export",
4691 "run-remote-publish-export",
4692 command_validator(),
4693 );
4694 source
4695 .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
4696 .await
4697 .unwrap();
4698 let envelope = EvolutionNetworkNode::new(source_store.clone())
4699 .publish_local_assets("node-source")
4700 .unwrap();
4701 assert!(envelope.assets.iter().any(|asset| matches!(
4702 asset,
4703 NetworkAsset::EvolutionEvent {
4704 event: EvolutionEvent::MutationDeclared { mutation }
4705 } if mutation.intent.id == "mutation-1"
4706 )));
4707
4708 let (remote, _) = build_test_evo(
4709 "remote-publish-import",
4710 "run-remote-publish-import",
4711 command_validator(),
4712 );
4713 remote.import_remote_envelope(&envelope).unwrap();
4714
4715 let decision = remote
4716 .replay_or_fallback(replay_input("missing readme"))
4717 .await
4718 .unwrap();
4719
4720 assert!(decision.used_capsule);
4721 assert!(!decision.fallback_to_planner);
4722 }
4723
4724 #[tokio::test]
4725 async fn fetch_assets_include_mutation_payload_for_remote_replay() {
4726 let (evo, store) = build_test_evo(
4727 "remote-fetch-export",
4728 "run-remote-fetch",
4729 command_validator(),
4730 );
4731 evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
4732 .await
4733 .unwrap();
4734
4735 let response = EvolutionNetworkNode::new(store.clone())
4736 .fetch_assets(
4737 "node-source",
4738 &FetchQuery {
4739 sender_id: "node-client".into(),
4740 signals: vec!["missing readme".into()],
4741 },
4742 )
4743 .unwrap();
4744
4745 assert!(response.assets.iter().any(|asset| matches!(
4746 asset,
4747 NetworkAsset::EvolutionEvent {
4748 event: EvolutionEvent::MutationDeclared { mutation }
4749 } if mutation.intent.id == "mutation-1"
4750 )));
4751 assert!(response
4752 .assets
4753 .iter()
4754 .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
4755 assert!(response
4756 .assets
4757 .iter()
4758 .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
4759 }
4760
4761 #[test]
4762 fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
4763 let store_root = std::env::temp_dir().join(format!(
4764 "oris-evokernel-remote-partial-store-{}",
4765 std::process::id()
4766 ));
4767 if store_root.exists() {
4768 fs::remove_dir_all(&store_root).unwrap();
4769 }
4770 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 4));
4771 let evo = build_test_evo_with_store(
4772 "remote-partial",
4773 "run-remote-partial",
4774 command_validator(),
4775 store.clone(),
4776 );
4777 let envelope = remote_publish_envelope(
4778 "node-partial",
4779 "run-remote-partial",
4780 "gene-partial",
4781 "capsule-partial",
4782 "mutation-partial",
4783 "partial-signal",
4784 "PARTIAL.md",
4785 "# partial",
4786 );
4787
4788 let result = evo.import_remote_envelope(&envelope);
4789
4790 assert!(matches!(result, Err(EvoKernelError::Store(_))));
4791 let projection = store.rebuild_projection().unwrap();
4792 assert!(projection
4793 .genes
4794 .iter()
4795 .any(|gene| gene.id == "gene-partial"));
4796 assert!(projection.capsules.is_empty());
4797 let publishers = evo.remote_publishers.lock().unwrap();
4798 assert_eq!(
4799 publishers.get("gene-partial").map(String::as_str),
4800 Some("node-partial")
4801 );
4802 }
4803
4804 #[test]
4805 fn retry_remote_import_after_partial_failure_only_imports_missing_assets() {
4806 let store_root = std::env::temp_dir().join(format!(
4807 "oris-evokernel-remote-partial-retry-store-{}",
4808 next_id("t")
4809 ));
4810 if store_root.exists() {
4811 fs::remove_dir_all(&store_root).unwrap();
4812 }
4813 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 4));
4814 let evo = build_test_evo_with_store(
4815 "remote-partial-retry",
4816 "run-remote-partial-retry",
4817 command_validator(),
4818 store.clone(),
4819 );
4820 let envelope = remote_publish_envelope(
4821 "node-partial",
4822 "run-remote-partial-retry",
4823 "gene-partial-retry",
4824 "capsule-partial-retry",
4825 "mutation-partial-retry",
4826 "partial-retry-signal",
4827 "PARTIAL_RETRY.md",
4828 "# partial retry",
4829 );
4830
4831 let first = evo.import_remote_envelope(&envelope);
4832 assert!(matches!(first, Err(EvoKernelError::Store(_))));
4833
4834 let retry = evo.import_remote_envelope(&envelope).unwrap();
4835
4836 assert_eq!(retry.imported_asset_ids, vec!["capsule-partial-retry"]);
4837 let projection = store.rebuild_projection().unwrap();
4838 let gene = projection
4839 .genes
4840 .iter()
4841 .find(|gene| gene.id == "gene-partial-retry")
4842 .unwrap();
4843 assert_eq!(gene.state, AssetState::Quarantined);
4844 let capsule = projection
4845 .capsules
4846 .iter()
4847 .find(|capsule| capsule.id == "capsule-partial-retry")
4848 .unwrap();
4849 assert_eq!(capsule.state, AssetState::Quarantined);
4850 assert_eq!(projection.attempt_counts["gene-partial-retry"], 1);
4851
4852 let events = store.scan(1).unwrap();
4853 assert_eq!(
4854 events
4855 .iter()
4856 .filter(|stored| {
4857 matches!(
4858 &stored.event,
4859 EvolutionEvent::MutationDeclared { mutation }
4860 if mutation.intent.id == "mutation-partial-retry"
4861 )
4862 })
4863 .count(),
4864 1
4865 );
4866 assert_eq!(
4867 events
4868 .iter()
4869 .filter(|stored| {
4870 matches!(
4871 &stored.event,
4872 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-partial-retry"
4873 )
4874 })
4875 .count(),
4876 1
4877 );
4878 assert_eq!(
4879 events
4880 .iter()
4881 .filter(|stored| {
4882 matches!(
4883 &stored.event,
4884 EvolutionEvent::CapsuleCommitted { capsule }
4885 if capsule.id == "capsule-partial-retry"
4886 )
4887 })
4888 .count(),
4889 1
4890 );
4891 }
4892
4893 #[tokio::test]
4894 async fn duplicate_remote_import_does_not_requarantine_locally_validated_assets() {
4895 let (evo, store) = build_test_evo(
4896 "remote-idempotent",
4897 "run-remote-idempotent",
4898 command_validator(),
4899 );
4900 let envelope = remote_publish_envelope(
4901 "node-idempotent",
4902 "run-remote-idempotent",
4903 "gene-idempotent",
4904 "capsule-idempotent",
4905 "mutation-idempotent",
4906 "idempotent-signal",
4907 "IDEMPOTENT.md",
4908 "# idempotent",
4909 );
4910
4911 let first = evo.import_remote_envelope(&envelope).unwrap();
4912 assert_eq!(
4913 first.imported_asset_ids,
4914 vec!["gene-idempotent", "capsule-idempotent"]
4915 );
4916
4917 let decision = evo
4918 .replay_or_fallback(replay_input("idempotent-signal"))
4919 .await
4920 .unwrap();
4921 assert!(decision.used_capsule);
4922 assert_eq!(decision.capsule_id, Some("capsule-idempotent".into()));
4923
4924 let projection_before = store.rebuild_projection().unwrap();
4925 let attempts_before = projection_before.attempt_counts["gene-idempotent"];
4926 let gene_before = projection_before
4927 .genes
4928 .iter()
4929 .find(|gene| gene.id == "gene-idempotent")
4930 .unwrap();
4931 assert_eq!(gene_before.state, AssetState::Promoted);
4932 let capsule_before = projection_before
4933 .capsules
4934 .iter()
4935 .find(|capsule| capsule.id == "capsule-idempotent")
4936 .unwrap();
4937 assert_eq!(capsule_before.state, AssetState::Promoted);
4938
4939 let second = evo.import_remote_envelope(&envelope).unwrap();
4940 assert!(second.imported_asset_ids.is_empty());
4941
4942 let projection_after = store.rebuild_projection().unwrap();
4943 assert_eq!(
4944 projection_after.attempt_counts["gene-idempotent"],
4945 attempts_before
4946 );
4947 let gene_after = projection_after
4948 .genes
4949 .iter()
4950 .find(|gene| gene.id == "gene-idempotent")
4951 .unwrap();
4952 assert_eq!(gene_after.state, AssetState::Promoted);
4953 let capsule_after = projection_after
4954 .capsules
4955 .iter()
4956 .find(|capsule| capsule.id == "capsule-idempotent")
4957 .unwrap();
4958 assert_eq!(capsule_after.state, AssetState::Promoted);
4959
4960 let events = store.scan(1).unwrap();
4961 assert_eq!(
4962 events
4963 .iter()
4964 .filter(|stored| {
4965 matches!(
4966 &stored.event,
4967 EvolutionEvent::MutationDeclared { mutation }
4968 if mutation.intent.id == "mutation-idempotent"
4969 )
4970 })
4971 .count(),
4972 1
4973 );
4974 assert_eq!(
4975 events
4976 .iter()
4977 .filter(|stored| {
4978 matches!(
4979 &stored.event,
4980 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-idempotent"
4981 )
4982 })
4983 .count(),
4984 1
4985 );
4986 assert_eq!(
4987 events
4988 .iter()
4989 .filter(|stored| {
4990 matches!(
4991 &stored.event,
4992 EvolutionEvent::CapsuleCommitted { capsule }
4993 if capsule.id == "capsule-idempotent"
4994 )
4995 })
4996 .count(),
4997 1
4998 );
4999 }
5000
5001 #[tokio::test]
5002 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
5003 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
5004 let capsule = evo
5005 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
5006 .await
5007 .unwrap();
5008 let publish = evo.export_promoted_assets("node-local");
5009 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
5010
5011 let decision = evo
5012 .replay_or_fallback(replay_input("missing readme"))
5013 .await
5014 .unwrap();
5015 assert!(decision.used_capsule);
5016 assert_eq!(decision.capsule_id, Some(capsule.id));
5017 }
5018
5019 #[tokio::test]
5020 async fn second_replay_validation_failure_revokes_gene_immediately() {
5021 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
5022 let capsule = capturer
5023 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
5024 .await
5025 .unwrap();
5026
5027 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
5028 let failing_replay = build_test_evo_with_store(
5029 "revoke-replay",
5030 "run-replay-fail",
5031 failing_validator,
5032 store.clone(),
5033 );
5034
5035 let first = failing_replay
5036 .replay_or_fallback(replay_input("missing readme"))
5037 .await
5038 .unwrap();
5039 let second = failing_replay
5040 .replay_or_fallback(replay_input("missing readme"))
5041 .await
5042 .unwrap();
5043
5044 assert!(!first.used_capsule);
5045 assert!(first.fallback_to_planner);
5046 assert!(!second.used_capsule);
5047 assert!(second.fallback_to_planner);
5048
5049 let projection = store.rebuild_projection().unwrap();
5050 let gene = projection
5051 .genes
5052 .iter()
5053 .find(|gene| gene.id == capsule.gene_id)
5054 .unwrap();
5055 assert_eq!(gene.state, AssetState::Promoted);
5056 let committed_capsule = projection
5057 .capsules
5058 .iter()
5059 .find(|current| current.id == capsule.id)
5060 .unwrap();
5061 assert_eq!(committed_capsule.state, AssetState::Promoted);
5062
5063 let events = store.scan(1).unwrap();
5064 assert_eq!(
5065 events
5066 .iter()
5067 .filter(|stored| {
5068 matches!(
5069 &stored.event,
5070 EvolutionEvent::ValidationFailed {
5071 gene_id: Some(gene_id),
5072 ..
5073 } if gene_id == &capsule.gene_id
5074 )
5075 })
5076 .count(),
5077 1
5078 );
5079 assert!(!events.iter().any(|stored| {
5080 matches!(
5081 &stored.event,
5082 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
5083 )
5084 }));
5085
5086 let recovered = build_test_evo_with_store(
5087 "revoke-replay",
5088 "run-replay-check",
5089 command_validator(),
5090 store.clone(),
5091 );
5092 let after_revoke = recovered
5093 .replay_or_fallback(replay_input("missing readme"))
5094 .await
5095 .unwrap();
5096 assert!(!after_revoke.used_capsule);
5097 assert!(after_revoke.fallback_to_planner);
5098 assert!(after_revoke.reason.contains("below replay threshold"));
5099 }
5100
5101 #[tokio::test]
5102 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
5103 let ledger = Arc::new(Mutex::new(EvuLedger {
5104 accounts: vec![],
5105 reputations: vec![
5106 oris_economics::ReputationRecord {
5107 node_id: "node-a".into(),
5108 publish_success_rate: 0.4,
5109 validator_accuracy: 0.4,
5110 reuse_impact: 0,
5111 },
5112 oris_economics::ReputationRecord {
5113 node_id: "node-b".into(),
5114 publish_success_rate: 0.95,
5115 validator_accuracy: 0.95,
5116 reuse_impact: 8,
5117 },
5118 ],
5119 }));
5120 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
5121 let evo = evo.with_economics(ledger.clone());
5122
5123 let envelope_a = remote_publish_envelope(
5124 "node-a",
5125 "run-remote-a",
5126 "gene-a",
5127 "capsule-a",
5128 "mutation-a",
5129 "shared-signal",
5130 "A.md",
5131 "# from a",
5132 );
5133 let envelope_b = remote_publish_envelope(
5134 "node-b",
5135 "run-remote-b",
5136 "gene-b",
5137 "capsule-b",
5138 "mutation-b",
5139 "shared-signal",
5140 "B.md",
5141 "# from b",
5142 );
5143
5144 evo.import_remote_envelope(&envelope_a).unwrap();
5145 evo.import_remote_envelope(&envelope_b).unwrap();
5146
5147 let decision = evo
5148 .replay_or_fallback(replay_input("shared-signal"))
5149 .await
5150 .unwrap();
5151
5152 assert!(decision.used_capsule);
5153 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
5154 let locked = ledger.lock().unwrap();
5155 let rewarded = locked
5156 .accounts
5157 .iter()
5158 .find(|item| item.node_id == "node-b")
5159 .unwrap();
5160 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
5161 assert!(
5162 locked.selector_reputation_bias()["node-b"]
5163 > locked.selector_reputation_bias()["node-a"]
5164 );
5165 }
5166
5167 #[tokio::test]
5168 async fn remote_reuse_settlement_tracks_selected_capsule_publisher_for_shared_gene() {
5169 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
5170 let (evo, _) = build_test_evo(
5171 "remote-shared-publisher",
5172 "run-remote-shared-publisher",
5173 command_validator(),
5174 );
5175 let evo = evo.with_economics(ledger.clone());
5176 let input = replay_input("shared-signal");
5177 let preferred = remote_publish_envelope_with_env(
5178 "node-a",
5179 "run-remote-a",
5180 "gene-shared",
5181 "capsule-preferred",
5182 "mutation-preferred",
5183 "shared-signal",
5184 "A.md",
5185 "# from a",
5186 input.env.clone(),
5187 );
5188 let fallback = remote_publish_envelope_with_env(
5189 "node-b",
5190 "run-remote-b",
5191 "gene-shared",
5192 "capsule-fallback",
5193 "mutation-fallback",
5194 "shared-signal",
5195 "B.md",
5196 "# from b",
5197 EnvFingerprint {
5198 rustc_version: "old-rustc".into(),
5199 cargo_lock_hash: "other-lock".into(),
5200 target_triple: "aarch64-apple-darwin".into(),
5201 os: "linux".into(),
5202 },
5203 );
5204
5205 evo.import_remote_envelope(&preferred).unwrap();
5206 evo.import_remote_envelope(&fallback).unwrap();
5207
5208 let decision = evo.replay_or_fallback(input).await.unwrap();
5209
5210 assert!(decision.used_capsule);
5211 assert_eq!(decision.capsule_id, Some("capsule-preferred".into()));
5212 let locked = ledger.lock().unwrap();
5213 let rewarded = locked
5214 .accounts
5215 .iter()
5216 .find(|item| item.node_id == "node-a")
5217 .unwrap();
5218 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
5219 assert!(locked.accounts.iter().all(|item| item.node_id != "node-b"));
5220 }
5221
5222 #[test]
5223 fn select_candidates_surfaces_ranked_remote_cold_start_candidates() {
5224 let ledger = Arc::new(Mutex::new(EvuLedger {
5225 accounts: vec![],
5226 reputations: vec![
5227 oris_economics::ReputationRecord {
5228 node_id: "node-a".into(),
5229 publish_success_rate: 0.4,
5230 validator_accuracy: 0.4,
5231 reuse_impact: 0,
5232 },
5233 oris_economics::ReputationRecord {
5234 node_id: "node-b".into(),
5235 publish_success_rate: 0.95,
5236 validator_accuracy: 0.95,
5237 reuse_impact: 8,
5238 },
5239 ],
5240 }));
5241 let (evo, _) = build_test_evo("remote-select", "run-remote-select", command_validator());
5242 let evo = evo.with_economics(ledger);
5243
5244 let envelope_a = remote_publish_envelope(
5245 "node-a",
5246 "run-remote-a",
5247 "gene-a",
5248 "capsule-a",
5249 "mutation-a",
5250 "shared-signal",
5251 "A.md",
5252 "# from a",
5253 );
5254 let envelope_b = remote_publish_envelope(
5255 "node-b",
5256 "run-remote-b",
5257 "gene-b",
5258 "capsule-b",
5259 "mutation-b",
5260 "shared-signal",
5261 "B.md",
5262 "# from b",
5263 );
5264
5265 evo.import_remote_envelope(&envelope_a).unwrap();
5266 evo.import_remote_envelope(&envelope_b).unwrap();
5267
5268 let candidates = evo.select_candidates(&replay_input("shared-signal"));
5269
5270 assert_eq!(candidates.len(), 1);
5271 assert_eq!(candidates[0].gene.id, "gene-b");
5272 assert_eq!(candidates[0].capsules[0].id, "capsule-b");
5273 }
5274
5275 #[tokio::test]
5276 async fn remote_reuse_publisher_bias_survives_restart() {
5277 let ledger = Arc::new(Mutex::new(EvuLedger {
5278 accounts: vec![],
5279 reputations: vec![
5280 oris_economics::ReputationRecord {
5281 node_id: "node-a".into(),
5282 publish_success_rate: 0.4,
5283 validator_accuracy: 0.4,
5284 reuse_impact: 0,
5285 },
5286 oris_economics::ReputationRecord {
5287 node_id: "node-b".into(),
5288 publish_success_rate: 0.95,
5289 validator_accuracy: 0.95,
5290 reuse_impact: 8,
5291 },
5292 ],
5293 }));
5294 let store_root = std::env::temp_dir().join(format!(
5295 "oris-evokernel-remote-restart-store-{}",
5296 next_id("t")
5297 ));
5298 if store_root.exists() {
5299 fs::remove_dir_all(&store_root).unwrap();
5300 }
5301 let store: Arc<dyn EvolutionStore> =
5302 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
5303 let evo = build_test_evo_with_store(
5304 "remote-success-restart-source",
5305 "run-remote-restart-source",
5306 command_validator(),
5307 store.clone(),
5308 )
5309 .with_economics(ledger.clone());
5310
5311 let envelope_a = remote_publish_envelope(
5312 "node-a",
5313 "run-remote-a",
5314 "gene-a",
5315 "capsule-a",
5316 "mutation-a",
5317 "shared-signal",
5318 "A.md",
5319 "# from a",
5320 );
5321 let envelope_b = remote_publish_envelope(
5322 "node-b",
5323 "run-remote-b",
5324 "gene-b",
5325 "capsule-b",
5326 "mutation-b",
5327 "shared-signal",
5328 "B.md",
5329 "# from b",
5330 );
5331
5332 evo.import_remote_envelope(&envelope_a).unwrap();
5333 evo.import_remote_envelope(&envelope_b).unwrap();
5334
5335 let recovered = build_test_evo_with_store(
5336 "remote-success-restart-recovered",
5337 "run-remote-restart-recovered",
5338 command_validator(),
5339 store.clone(),
5340 )
5341 .with_economics(ledger.clone());
5342
5343 let decision = recovered
5344 .replay_or_fallback(replay_input("shared-signal"))
5345 .await
5346 .unwrap();
5347
5348 assert!(decision.used_capsule);
5349 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
5350 let locked = ledger.lock().unwrap();
5351 let rewarded = locked
5352 .accounts
5353 .iter()
5354 .find(|item| item.node_id == "node-b")
5355 .unwrap();
5356 assert_eq!(rewarded.balance, recovered.stake_policy.reuse_reward);
5357 }
5358
5359 #[tokio::test]
5360 async fn remote_reuse_failure_penalizes_remote_reputation() {
5361 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
5362 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
5363 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
5364 let evo = evo.with_economics(ledger.clone());
5365
5366 let envelope = remote_publish_envelope(
5367 "node-remote",
5368 "run-remote-failed",
5369 "gene-remote",
5370 "capsule-remote",
5371 "mutation-remote",
5372 "failure-signal",
5373 "FAILED.md",
5374 "# from remote",
5375 );
5376 evo.import_remote_envelope(&envelope).unwrap();
5377
5378 let decision = evo
5379 .replay_or_fallback(replay_input("failure-signal"))
5380 .await
5381 .unwrap();
5382
5383 assert!(!decision.used_capsule);
5384 assert!(decision.fallback_to_planner);
5385
5386 let signal = evo.economics_signal("node-remote").unwrap();
5387 assert_eq!(signal.available_evu, 0);
5388 assert!(signal.publish_success_rate < 0.5);
5389 assert!(signal.validator_accuracy < 0.5);
5390 }
5391
5392 #[test]
5393 fn ensure_builtin_experience_assets_is_idempotent_and_fetchable() {
5394 let store_root = std::env::temp_dir().join(format!(
5395 "oris-evokernel-builtin-experience-store-{}",
5396 next_id("t")
5397 ));
5398 if store_root.exists() {
5399 fs::remove_dir_all(&store_root).unwrap();
5400 }
5401 let store: Arc<dyn EvolutionStore> =
5402 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
5403 let node = EvolutionNetworkNode::new(store.clone());
5404
5405 let first = node
5406 .ensure_builtin_experience_assets("runtime-bootstrap")
5407 .unwrap();
5408 assert_eq!(first.imported_asset_ids.len(), 2);
5409
5410 let second = node
5411 .ensure_builtin_experience_assets("runtime-bootstrap")
5412 .unwrap();
5413 assert!(second.imported_asset_ids.is_empty());
5414
5415 let fetch = node
5416 .fetch_assets(
5417 "execution-api",
5418 &FetchQuery {
5419 sender_id: "compat-agent".into(),
5420 signals: vec!["docs.rewrite".into()],
5421 },
5422 )
5423 .unwrap();
5424
5425 let mut has_builtin_docs = false;
5426 for asset in fetch.assets {
5427 if let NetworkAsset::Gene { gene } = asset {
5428 if strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
5429 == Some("builtin")
5430 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
5431 == Some("docs.rewrite")
5432 && gene.state == AssetState::Promoted
5433 {
5434 has_builtin_docs = true;
5435 break;
5436 }
5437 }
5438 }
5439 assert!(has_builtin_docs);
5440 }
5441
5442 #[test]
5443 fn reported_experience_retention_keeps_latest_three_and_preserves_builtin_assets() {
5444 let store_root = std::env::temp_dir().join(format!(
5445 "oris-evokernel-reported-retention-store-{}",
5446 next_id("t")
5447 ));
5448 if store_root.exists() {
5449 fs::remove_dir_all(&store_root).unwrap();
5450 }
5451 let store: Arc<dyn EvolutionStore> =
5452 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
5453 let node = EvolutionNetworkNode::new(store.clone());
5454
5455 node.ensure_builtin_experience_assets("runtime-bootstrap")
5456 .unwrap();
5457
5458 for idx in 0..4 {
5459 node.record_reported_experience(
5460 "reporter-a",
5461 format!("reported-docs-rewrite-v{}", idx + 1),
5462 vec!["docs.rewrite".into(), format!("task-{}", idx + 1)],
5463 vec![
5464 "task_class=docs.rewrite".into(),
5465 format!("task_label=Docs rewrite v{}", idx + 1),
5466 format!("summary=reported replay {}", idx + 1),
5467 ],
5468 vec!["a2a.tasks.report".into()],
5469 )
5470 .unwrap();
5471 }
5472
5473 let (_, projection) = store.scan_projection().unwrap();
5474 let reported_promoted = projection
5475 .genes
5476 .iter()
5477 .filter(|gene| {
5478 gene.state == AssetState::Promoted
5479 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
5480 == Some("reported_experience")
5481 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
5482 == Some("docs.rewrite")
5483 })
5484 .count();
5485 let reported_revoked = projection
5486 .genes
5487 .iter()
5488 .filter(|gene| {
5489 gene.state == AssetState::Revoked
5490 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
5491 == Some("reported_experience")
5492 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
5493 == Some("docs.rewrite")
5494 })
5495 .count();
5496 let builtin_promoted = projection
5497 .genes
5498 .iter()
5499 .filter(|gene| {
5500 gene.state == AssetState::Promoted
5501 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
5502 == Some("builtin")
5503 })
5504 .count();
5505
5506 assert_eq!(reported_promoted, 3);
5507 assert_eq!(reported_revoked, 1);
5508 assert_eq!(builtin_promoted, 2);
5509
5510 let fetch = node
5511 .fetch_assets(
5512 "execution-api",
5513 &FetchQuery {
5514 sender_id: "consumer-b".into(),
5515 signals: vec!["docs.rewrite".into()],
5516 },
5517 )
5518 .unwrap();
5519 let docs_genes = fetch
5520 .assets
5521 .into_iter()
5522 .filter_map(|asset| match asset {
5523 NetworkAsset::Gene { gene } => Some(gene),
5524 _ => None,
5525 })
5526 .filter(|gene| {
5527 strategy_metadata_value(&gene.strategy, "task_class").as_deref()
5528 == Some("docs.rewrite")
5529 })
5530 .collect::<Vec<_>>();
5531 assert_eq!(docs_genes.len(), 4);
5532 }
5533}