1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::{Path, PathBuf};
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12 infer_mutation_needed_failure_reason_code, infer_replay_fallback_reason_code,
13 normalize_mutation_needed_failure_contract, normalize_replay_fallback_contract, AgentRole,
14 BoundedTaskClass, CoordinationMessage, CoordinationPlan, CoordinationPrimitive,
15 CoordinationResult, CoordinationTask, ExecutionFeedback, MutationNeededFailureContract,
16 MutationNeededFailureReasonCode, MutationProposal as AgentMutationProposal, ReplayFeedback,
17 ReplayPlannerDirective, SupervisedDevloopOutcome, SupervisedDevloopRequest,
18 SupervisedDevloopStatus,
19};
20use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
21use oris_evolution::{
22 compute_artifact_hash, decayed_replay_confidence, next_id, stable_hash_json, AssetState,
23 BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
24 EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
25 PreparedMutation, ReplayRoiEvidence, ReplayRoiReasonCode, Selector, SelectorInput,
26 StoreBackedSelector, StoredEvolutionEvent, TransitionEvidence, TransitionReasonCode,
27 ValidationSnapshot, MIN_REPLAY_CONFIDENCE,
28};
29use oris_evolution_network::{EvolutionEnvelope, NetworkAsset, SyncAudit};
30use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
31use oris_kernel::{Kernel, KernelState, RunId};
32use oris_sandbox::{
33 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
34};
35use oris_spec::CompiledMutationPlan;
36use serde::{Deserialize, Serialize};
37use serde_json::Value;
38use thiserror::Error;
39
40pub use oris_evolution::{
41 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
42 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
43 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
44 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
45 SelectorInput as EvoSelectorInput, TransitionReasonCode as EvoTransitionReasonCode,
46};
47pub use oris_evolution_network::{
48 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
49};
50pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
51pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
52pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
53
54#[derive(Clone, Debug, Serialize, Deserialize)]
55pub struct ValidationPlan {
56 pub profile: String,
57 pub stages: Vec<ValidationStage>,
58}
59
60impl ValidationPlan {
61 pub fn oris_default() -> Self {
62 Self {
63 profile: "oris-default".into(),
64 stages: vec![
65 ValidationStage::Command {
66 program: "cargo".into(),
67 args: vec!["fmt".into(), "--all".into(), "--check".into()],
68 timeout_ms: 60_000,
69 },
70 ValidationStage::Command {
71 program: "cargo".into(),
72 args: vec!["check".into(), "--workspace".into()],
73 timeout_ms: 180_000,
74 },
75 ValidationStage::Command {
76 program: "cargo".into(),
77 args: vec![
78 "test".into(),
79 "-p".into(),
80 "oris-kernel".into(),
81 "-p".into(),
82 "oris-evolution".into(),
83 "-p".into(),
84 "oris-sandbox".into(),
85 "-p".into(),
86 "oris-evokernel".into(),
87 "--lib".into(),
88 ],
89 timeout_ms: 300_000,
90 },
91 ValidationStage::Command {
92 program: "cargo".into(),
93 args: vec![
94 "test".into(),
95 "-p".into(),
96 "oris-runtime".into(),
97 "--lib".into(),
98 ],
99 timeout_ms: 300_000,
100 },
101 ],
102 }
103 }
104}
105
106#[derive(Clone, Debug, Serialize, Deserialize)]
107pub enum ValidationStage {
108 Command {
109 program: String,
110 args: Vec<String>,
111 timeout_ms: u64,
112 },
113}
114
115#[derive(Clone, Debug, Serialize, Deserialize)]
116pub struct ValidationStageReport {
117 pub stage: String,
118 pub success: bool,
119 pub exit_code: Option<i32>,
120 pub duration_ms: u64,
121 pub stdout: String,
122 pub stderr: String,
123}
124
125#[derive(Clone, Debug, Serialize, Deserialize)]
126pub struct ValidationReport {
127 pub success: bool,
128 pub duration_ms: u64,
129 pub stages: Vec<ValidationStageReport>,
130 pub logs: String,
131}
132
133#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
134pub struct SignalExtractionInput {
135 pub patch_diff: String,
136 pub intent: String,
137 pub expected_effect: String,
138 pub declared_signals: Vec<String>,
139 pub changed_files: Vec<String>,
140 pub validation_success: bool,
141 pub validation_logs: String,
142 pub stage_outputs: Vec<String>,
143}
144
145#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
146pub struct SignalExtractionOutput {
147 pub values: Vec<String>,
148 pub hash: String,
149}
150
151#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
152pub struct SeedTemplate {
153 pub id: String,
154 pub intent: String,
155 pub signals: Vec<String>,
156 pub diff_payload: String,
157 pub validation_profile: String,
158}
159
160#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
161pub struct BootstrapReport {
162 pub seeded: bool,
163 pub genes_added: usize,
164 pub capsules_added: usize,
165}
166
167const REPORTED_EXPERIENCE_RETENTION_LIMIT: usize = 3;
168const SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS: u64 = 2;
169const SHADOW_PROMOTION_MIN_SUCCESS_RATE: f32 = 0.70;
170const SHADOW_PROMOTION_MIN_ENV_MATCH: f32 = 0.75;
171const SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE: f32 = MIN_REPLAY_CONFIDENCE;
172const REPLAY_REASONING_TOKEN_FLOOR: u64 = 192;
173const REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT: u64 = 24;
174const COLD_START_LOOKUP_PENALTY: f32 = 0.05;
175const MUTATION_NEEDED_MAX_DIFF_BYTES: usize = 128 * 1024;
176const MUTATION_NEEDED_MAX_CHANGED_LINES: usize = 600;
177const MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS: u64 = 120_000;
178const MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS: u64 = 900_000;
179const SUPERVISED_DEVLOOP_MAX_DOC_FILES: usize = 3;
180pub const REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS: [&str; 2] =
181 ["task_class", "source_sender_id"];
182
183#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
184pub struct RepairQualityGateReport {
185 pub root_cause: bool,
186 pub fix: bool,
187 pub verification: bool,
188 pub rollback: bool,
189 pub incident_anchor: bool,
190 pub structure_score: usize,
191 pub has_actionable_command: bool,
192}
193
194impl RepairQualityGateReport {
195 pub fn passes(&self) -> bool {
196 self.incident_anchor
197 && self.structure_score >= 3
198 && (self.has_actionable_command || self.verification)
199 }
200
201 pub fn failed_checks(&self) -> Vec<String> {
202 let mut failed = Vec::new();
203 if !self.incident_anchor {
204 failed.push("包含unknown command故障上下文".to_string());
205 }
206 if self.structure_score < 3 {
207 failed.push("结构化修复信息至少满足3项(根因/修复/验证/回滚)".to_string());
208 }
209 if !(self.has_actionable_command || self.verification) {
210 failed.push("包含可执行验证命令或验证计划".to_string());
211 }
212 failed
213 }
214}
215
216pub fn evaluate_repair_quality_gate(plan: &str) -> RepairQualityGateReport {
217 fn contains_any(haystack: &str, needles: &[&str]) -> bool {
218 needles.iter().any(|needle| haystack.contains(needle))
219 }
220
221 let lower = plan.to_ascii_lowercase();
222 let root_cause = contains_any(
223 plan,
224 &["根因", "原因分析", "问题定位", "原因定位", "根本原因"],
225 ) || contains_any(
226 &lower,
227 &[
228 "root cause",
229 "cause analysis",
230 "problem diagnosis",
231 "diagnosis",
232 ],
233 );
234 let fix = contains_any(
235 plan,
236 &["修复步骤", "修复方案", "处理步骤", "修复建议", "整改方案"],
237 ) || contains_any(
238 &lower,
239 &[
240 "fix",
241 "remediation",
242 "mitigation",
243 "resolution",
244 "repair steps",
245 ],
246 );
247 let verification = contains_any(
248 plan,
249 &["验证命令", "验证步骤", "回归测试", "验证方式", "验收步骤"],
250 ) || contains_any(
251 &lower,
252 &[
253 "verification",
254 "validate",
255 "regression test",
256 "smoke test",
257 "test command",
258 ],
259 );
260 let rollback = contains_any(plan, &["回滚方案", "回滚步骤", "恢复方案", "撤销方案"])
261 || contains_any(&lower, &["rollback", "revert", "fallback plan", "undo"]);
262 let incident_anchor = contains_any(
263 &lower,
264 &[
265 "unknown command",
266 "process",
267 "proccess",
268 "command not found",
269 ],
270 ) || contains_any(plan, &["命令不存在", "命令未找到", "未知命令"]);
271 let structure_score = [root_cause, fix, verification, rollback]
272 .into_iter()
273 .filter(|ok| *ok)
274 .count();
275 let has_actionable_command = contains_any(
276 &lower,
277 &[
278 "cargo ", "git ", "python ", "pip ", "npm ", "pnpm ", "yarn ", "bash ", "make ",
279 ],
280 );
281
282 RepairQualityGateReport {
283 root_cause,
284 fix,
285 verification,
286 rollback,
287 incident_anchor,
288 structure_score,
289 has_actionable_command,
290 }
291}
292
293impl ValidationReport {
294 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
295 ValidationSnapshot {
296 success: self.success,
297 profile: profile.to_string(),
298 duration_ms: self.duration_ms,
299 summary: if self.success {
300 "validation passed".into()
301 } else {
302 "validation failed".into()
303 },
304 }
305 }
306}
307
308pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
309 let mut signals = BTreeSet::new();
310
311 for declared in &input.declared_signals {
312 if let Some(phrase) = normalize_signal_phrase(declared) {
313 signals.insert(phrase);
314 }
315 extend_signal_tokens(&mut signals, declared);
316 }
317
318 for text in [
319 input.patch_diff.as_str(),
320 input.intent.as_str(),
321 input.expected_effect.as_str(),
322 input.validation_logs.as_str(),
323 ] {
324 extend_signal_tokens(&mut signals, text);
325 }
326
327 for changed_file in &input.changed_files {
328 extend_signal_tokens(&mut signals, changed_file);
329 }
330
331 for stage_output in &input.stage_outputs {
332 extend_signal_tokens(&mut signals, stage_output);
333 }
334
335 signals.insert(if input.validation_success {
336 "validation passed".into()
337 } else {
338 "validation failed".into()
339 });
340
341 let values = signals.into_iter().take(32).collect::<Vec<_>>();
342 let hash =
343 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
344 SignalExtractionOutput { values, hash }
345}
346
347#[derive(Debug, Error)]
348pub enum ValidationError {
349 #[error("validation execution failed: {0}")]
350 Execution(String),
351}
352
353#[async_trait]
354pub trait Validator: Send + Sync {
355 async fn run(
356 &self,
357 receipt: &SandboxReceipt,
358 plan: &ValidationPlan,
359 ) -> Result<ValidationReport, ValidationError>;
360}
361
362pub struct CommandValidator {
363 policy: SandboxPolicy,
364}
365
366impl CommandValidator {
367 pub fn new(policy: SandboxPolicy) -> Self {
368 Self { policy }
369 }
370}
371
372#[async_trait]
373impl Validator for CommandValidator {
374 async fn run(
375 &self,
376 receipt: &SandboxReceipt,
377 plan: &ValidationPlan,
378 ) -> Result<ValidationReport, ValidationError> {
379 let started = std::time::Instant::now();
380 let mut stages = Vec::new();
381 let mut success = true;
382 let mut logs = String::new();
383
384 for stage in &plan.stages {
385 match stage {
386 ValidationStage::Command {
387 program,
388 args,
389 timeout_ms,
390 } => {
391 let result = execute_allowed_command(
392 &self.policy,
393 &receipt.workdir,
394 program,
395 args,
396 *timeout_ms,
397 )
398 .await;
399 let report = match result {
400 Ok(output) => ValidationStageReport {
401 stage: format!("{program} {}", args.join(" ")),
402 success: output.success,
403 exit_code: output.exit_code,
404 duration_ms: output.duration_ms,
405 stdout: output.stdout,
406 stderr: output.stderr,
407 },
408 Err(err) => ValidationStageReport {
409 stage: format!("{program} {}", args.join(" ")),
410 success: false,
411 exit_code: None,
412 duration_ms: 0,
413 stdout: String::new(),
414 stderr: err.to_string(),
415 },
416 };
417 if !report.success {
418 success = false;
419 }
420 if !report.stdout.is_empty() {
421 logs.push_str(&report.stdout);
422 logs.push('\n');
423 }
424 if !report.stderr.is_empty() {
425 logs.push_str(&report.stderr);
426 logs.push('\n');
427 }
428 stages.push(report);
429 if !success {
430 break;
431 }
432 }
433 }
434 }
435
436 Ok(ValidationReport {
437 success,
438 duration_ms: started.elapsed().as_millis() as u64,
439 stages,
440 logs,
441 })
442 }
443}
444
445#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
446pub struct ReplayDetectEvidence {
447 pub task_class_id: String,
448 pub task_label: String,
449 pub matched_signals: Vec<String>,
450 pub mismatch_reasons: Vec<String>,
451}
452
453#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
454pub struct ReplayCandidateEvidence {
455 pub rank: usize,
456 pub gene_id: String,
457 pub capsule_id: Option<String>,
458 pub match_quality: f32,
459 pub confidence: Option<f32>,
460 pub environment_match_factor: Option<f32>,
461 pub cold_start_penalty: f32,
462 pub final_score: f32,
463}
464
465#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
466pub struct ReplaySelectEvidence {
467 pub exact_match_lookup: bool,
468 pub selected_gene_id: Option<String>,
469 pub selected_capsule_id: Option<String>,
470 pub candidates: Vec<ReplayCandidateEvidence>,
471}
472
473#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
474pub struct ReplayDecision {
475 pub used_capsule: bool,
476 pub capsule_id: Option<CapsuleId>,
477 pub fallback_to_planner: bool,
478 pub reason: String,
479 pub detect_evidence: ReplayDetectEvidence,
480 pub select_evidence: ReplaySelectEvidence,
481 pub economics_evidence: ReplayRoiEvidence,
482}
483
484#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
485pub struct ReplayTaskClassMetrics {
486 pub task_class_id: String,
487 pub task_label: String,
488 pub replay_success_total: u64,
489 pub replay_failure_total: u64,
490 pub reasoning_steps_avoided_total: u64,
491 pub reasoning_avoided_tokens_total: u64,
492 pub replay_fallback_cost_total: u64,
493 pub replay_roi: f64,
494}
495
496#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
497pub struct ReplaySourceRoiMetrics {
498 pub source_sender_id: String,
499 pub replay_success_total: u64,
500 pub replay_failure_total: u64,
501 pub reasoning_avoided_tokens_total: u64,
502 pub replay_fallback_cost_total: u64,
503 pub replay_roi: f64,
504}
505
506#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
507pub struct ReplayRoiWindowSummary {
508 pub generated_at: String,
509 pub window_seconds: u64,
510 pub replay_attempts_total: u64,
511 pub replay_success_total: u64,
512 pub replay_failure_total: u64,
513 pub reasoning_avoided_tokens_total: u64,
514 pub replay_fallback_cost_total: u64,
515 pub replay_roi: f64,
516 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
517 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
518}
519
520#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
521pub struct ReplayRoiReleaseGateThresholds {
522 pub min_replay_attempts: u64,
523 pub min_replay_hit_rate: f64,
524 pub max_false_replay_rate: f64,
525 pub min_reasoning_avoided_tokens: u64,
526 pub min_replay_roi: f64,
527 pub require_replay_safety: bool,
528}
529
530impl Default for ReplayRoiReleaseGateThresholds {
531 fn default() -> Self {
532 Self {
533 min_replay_attempts: 3,
534 min_replay_hit_rate: 0.60,
535 max_false_replay_rate: 0.25,
536 min_reasoning_avoided_tokens: REPLAY_REASONING_TOKEN_FLOOR,
537 min_replay_roi: 0.05,
538 require_replay_safety: true,
539 }
540 }
541}
542
543#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
544#[serde(rename_all = "snake_case")]
545pub enum ReplayRoiReleaseGateAction {
546 BlockRelease,
547}
548
549#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
550pub struct ReplayRoiReleaseGateFailClosedPolicy {
551 pub on_threshold_violation: ReplayRoiReleaseGateAction,
552 pub on_missing_metrics: ReplayRoiReleaseGateAction,
553 pub on_invalid_metrics: ReplayRoiReleaseGateAction,
554}
555
556impl Default for ReplayRoiReleaseGateFailClosedPolicy {
557 fn default() -> Self {
558 Self {
559 on_threshold_violation: ReplayRoiReleaseGateAction::BlockRelease,
560 on_missing_metrics: ReplayRoiReleaseGateAction::BlockRelease,
561 on_invalid_metrics: ReplayRoiReleaseGateAction::BlockRelease,
562 }
563 }
564}
565
566#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
567pub struct ReplayRoiReleaseGateSafetySignal {
568 pub fail_closed_default: bool,
569 pub rollback_ready: bool,
570 pub audit_trail_complete: bool,
571 pub has_replay_activity: bool,
572}
573
574#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
575pub struct ReplayRoiReleaseGateInputContract {
576 pub generated_at: String,
577 pub window_seconds: u64,
578 pub aggregation_dimensions: Vec<String>,
579 pub replay_attempts_total: u64,
580 pub replay_success_total: u64,
581 pub replay_failure_total: u64,
582 pub replay_hit_rate: f64,
583 pub false_replay_rate: f64,
584 pub reasoning_avoided_tokens: u64,
585 pub replay_fallback_cost_total: u64,
586 pub replay_roi: f64,
587 pub replay_safety: bool,
588 pub replay_safety_signal: ReplayRoiReleaseGateSafetySignal,
589 pub thresholds: ReplayRoiReleaseGateThresholds,
590 pub fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy,
591}
592
593#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
594#[serde(rename_all = "snake_case")]
595pub enum ReplayRoiReleaseGateStatus {
596 Pass,
597 FailClosed,
598 Indeterminate,
599}
600
601#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
602pub struct ReplayRoiReleaseGateOutputContract {
603 pub status: ReplayRoiReleaseGateStatus,
604 pub failed_checks: Vec<String>,
605 pub evidence_refs: Vec<String>,
606 pub summary: String,
607}
608
609#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
610pub struct ReplayRoiReleaseGateContract {
611 pub input: ReplayRoiReleaseGateInputContract,
612 pub output: ReplayRoiReleaseGateOutputContract,
613}
614
615#[derive(Clone, Copy, Debug, Eq, PartialEq)]
616enum CoordinationTaskState {
617 Ready,
618 Waiting,
619 BlockedByFailure,
620 PermanentlyBlocked,
621}
622
623#[derive(Clone, Debug, Default)]
624pub struct MultiAgentCoordinator;
625
626impl MultiAgentCoordinator {
627 pub fn new() -> Self {
628 Self
629 }
630
631 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
632 let primitive = plan.primitive.clone();
633 let root_goal = plan.root_goal.clone();
634 let timeout_ms = plan.timeout_ms;
635 let max_retries = plan.max_retries;
636 let mut tasks = BTreeMap::new();
637 for task in plan.tasks {
638 tasks.entry(task.id.clone()).or_insert(task);
639 }
640
641 let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
642 let mut completed = BTreeSet::new();
643 let mut failed = BTreeSet::new();
644 let mut completed_order = Vec::new();
645 let mut failed_order = Vec::new();
646 let mut skipped = BTreeSet::new();
647 let mut attempts = BTreeMap::new();
648 let mut messages = Vec::new();
649
650 loop {
651 if matches!(primitive, CoordinationPrimitive::Conditional) {
652 self.apply_conditional_skips(
653 &tasks,
654 &mut pending,
655 &completed,
656 &failed,
657 &mut skipped,
658 &mut messages,
659 );
660 }
661
662 let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
663 if ready.is_empty() {
664 break;
665 }
666 if matches!(primitive, CoordinationPrimitive::Sequential) {
667 ready.truncate(1);
668 }
669
670 for task_id in ready {
671 let Some(task) = tasks.get(&task_id) else {
672 continue;
673 };
674 if !pending.contains(&task_id) {
675 continue;
676 }
677 self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
678
679 let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
680 if Self::simulate_task_failure(task, prior_failures) {
681 let failure_count = prior_failures + 1;
682 attempts.insert(task_id.clone(), failure_count);
683 let will_retry = failure_count <= max_retries;
684 messages.push(CoordinationMessage {
685 from_role: task.role.clone(),
686 to_role: task.role.clone(),
687 task_id: task_id.clone(),
688 content: if will_retry {
689 format!("task {task_id} failed on attempt {failure_count} and will retry")
690 } else {
691 format!(
692 "task {task_id} failed on attempt {failure_count} and exhausted retries"
693 )
694 },
695 });
696 if !will_retry {
697 pending.remove(&task_id);
698 if failed.insert(task_id.clone()) {
699 failed_order.push(task_id);
700 }
701 }
702 continue;
703 }
704
705 pending.remove(&task_id);
706 if completed.insert(task_id.clone()) {
707 completed_order.push(task_id);
708 }
709 }
710 }
711
712 let blocked_ids = pending.into_iter().collect::<Vec<_>>();
713 for task_id in blocked_ids {
714 let Some(task) = tasks.get(&task_id) else {
715 continue;
716 };
717 let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
718 let content = match state {
719 CoordinationTaskState::BlockedByFailure => {
720 format!("task {task_id} blocked by failed dependencies")
721 }
722 CoordinationTaskState::PermanentlyBlocked => {
723 format!("task {task_id} has invalid coordination prerequisites")
724 }
725 CoordinationTaskState::Waiting => {
726 format!("task {task_id} has unresolved dependencies")
727 }
728 CoordinationTaskState::Ready => {
729 format!("task {task_id} was left pending unexpectedly")
730 }
731 };
732 messages.push(CoordinationMessage {
733 from_role: task.role.clone(),
734 to_role: task.role.clone(),
735 task_id: task_id.clone(),
736 content,
737 });
738 if failed.insert(task_id.clone()) {
739 failed_order.push(task_id);
740 }
741 }
742
743 CoordinationResult {
744 completed_tasks: completed_order,
745 failed_tasks: failed_order,
746 messages,
747 summary: format!(
748 "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
749 root_goal,
750 completed.len(),
751 failed.len(),
752 skipped.len(),
753 primitive,
754 timeout_ms,
755 max_retries
756 ),
757 }
758 }
759
760 fn ready_task_ids(
761 &self,
762 tasks: &BTreeMap<String, CoordinationTask>,
763 pending: &BTreeSet<String>,
764 completed: &BTreeSet<String>,
765 failed: &BTreeSet<String>,
766 skipped: &BTreeSet<String>,
767 ) -> Vec<String> {
768 pending
769 .iter()
770 .filter_map(|task_id| {
771 let task = tasks.get(task_id)?;
772 (self.classify_task(task, tasks, completed, failed, skipped)
773 == CoordinationTaskState::Ready)
774 .then(|| task_id.clone())
775 })
776 .collect()
777 }
778
779 fn apply_conditional_skips(
780 &self,
781 tasks: &BTreeMap<String, CoordinationTask>,
782 pending: &mut BTreeSet<String>,
783 completed: &BTreeSet<String>,
784 failed: &BTreeSet<String>,
785 skipped: &mut BTreeSet<String>,
786 messages: &mut Vec<CoordinationMessage>,
787 ) {
788 let skip_ids = pending
789 .iter()
790 .filter_map(|task_id| {
791 let task = tasks.get(task_id)?;
792 (self.classify_task(task, tasks, completed, failed, skipped)
793 == CoordinationTaskState::BlockedByFailure)
794 .then(|| task_id.clone())
795 })
796 .collect::<Vec<_>>();
797
798 for task_id in skip_ids {
799 let Some(task) = tasks.get(&task_id) else {
800 continue;
801 };
802 pending.remove(&task_id);
803 skipped.insert(task_id.clone());
804 messages.push(CoordinationMessage {
805 from_role: task.role.clone(),
806 to_role: task.role.clone(),
807 task_id: task_id.clone(),
808 content: format!("task {task_id} skipped due to failed dependency chain"),
809 });
810 }
811 }
812
813 fn classify_task(
814 &self,
815 task: &CoordinationTask,
816 tasks: &BTreeMap<String, CoordinationTask>,
817 completed: &BTreeSet<String>,
818 failed: &BTreeSet<String>,
819 skipped: &BTreeSet<String>,
820 ) -> CoordinationTaskState {
821 match task.role {
822 AgentRole::Planner | AgentRole::Coder => {
823 let mut waiting = false;
824 for dependency_id in &task.depends_on {
825 if !tasks.contains_key(dependency_id) {
826 return CoordinationTaskState::PermanentlyBlocked;
827 }
828 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
829 return CoordinationTaskState::BlockedByFailure;
830 }
831 if !completed.contains(dependency_id) {
832 waiting = true;
833 }
834 }
835 if waiting {
836 CoordinationTaskState::Waiting
837 } else {
838 CoordinationTaskState::Ready
839 }
840 }
841 AgentRole::Repair => {
842 let mut waiting = false;
843 let mut has_coder_dependency = false;
844 let mut has_failed_coder = false;
845 for dependency_id in &task.depends_on {
846 let Some(dependency) = tasks.get(dependency_id) else {
847 return CoordinationTaskState::PermanentlyBlocked;
848 };
849 let is_coder = matches!(dependency.role, AgentRole::Coder);
850 if is_coder {
851 has_coder_dependency = true;
852 }
853 if skipped.contains(dependency_id) {
854 return CoordinationTaskState::BlockedByFailure;
855 }
856 if failed.contains(dependency_id) {
857 if is_coder {
858 has_failed_coder = true;
859 } else {
860 return CoordinationTaskState::BlockedByFailure;
861 }
862 continue;
863 }
864 if !completed.contains(dependency_id) {
865 waiting = true;
866 }
867 }
868 if !has_coder_dependency {
869 CoordinationTaskState::PermanentlyBlocked
870 } else if waiting {
871 CoordinationTaskState::Waiting
872 } else if has_failed_coder {
873 CoordinationTaskState::Ready
874 } else {
875 CoordinationTaskState::PermanentlyBlocked
876 }
877 }
878 AgentRole::Optimizer => {
879 let mut waiting = false;
880 let mut has_impl_dependency = false;
881 let mut has_completed_impl = false;
882 let mut has_failed_impl = false;
883 for dependency_id in &task.depends_on {
884 let Some(dependency) = tasks.get(dependency_id) else {
885 return CoordinationTaskState::PermanentlyBlocked;
886 };
887 let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
888 if is_impl {
889 has_impl_dependency = true;
890 }
891 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
892 if is_impl {
893 has_failed_impl = true;
894 continue;
895 }
896 return CoordinationTaskState::BlockedByFailure;
897 }
898 if completed.contains(dependency_id) {
899 if is_impl {
900 has_completed_impl = true;
901 }
902 continue;
903 }
904 waiting = true;
905 }
906 if !has_impl_dependency {
907 CoordinationTaskState::PermanentlyBlocked
908 } else if waiting {
909 CoordinationTaskState::Waiting
910 } else if has_completed_impl {
911 CoordinationTaskState::Ready
912 } else if has_failed_impl {
913 CoordinationTaskState::BlockedByFailure
914 } else {
915 CoordinationTaskState::PermanentlyBlocked
916 }
917 }
918 }
919 }
920
921 fn record_handoff_messages(
922 &self,
923 task: &CoordinationTask,
924 tasks: &BTreeMap<String, CoordinationTask>,
925 completed: &BTreeSet<String>,
926 failed: &BTreeSet<String>,
927 messages: &mut Vec<CoordinationMessage>,
928 ) {
929 let mut dependency_ids = task.depends_on.clone();
930 dependency_ids.sort();
931 dependency_ids.dedup();
932
933 for dependency_id in dependency_ids {
934 let Some(dependency) = tasks.get(&dependency_id) else {
935 continue;
936 };
937 if completed.contains(&dependency_id) {
938 messages.push(CoordinationMessage {
939 from_role: dependency.role.clone(),
940 to_role: task.role.clone(),
941 task_id: task.id.clone(),
942 content: format!("handoff from {dependency_id} to {}", task.id),
943 });
944 } else if failed.contains(&dependency_id) {
945 messages.push(CoordinationMessage {
946 from_role: dependency.role.clone(),
947 to_role: task.role.clone(),
948 task_id: task.id.clone(),
949 content: format!("failed dependency {dependency_id} routed to {}", task.id),
950 });
951 }
952 }
953 }
954
955 fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
956 let normalized = task.description.to_ascii_lowercase();
957 normalized.contains("force-fail")
958 || (normalized.contains("fail-once") && prior_failures == 0)
959 }
960}
961
962#[derive(Debug, Error)]
963pub enum ReplayError {
964 #[error("store error: {0}")]
965 Store(String),
966 #[error("sandbox error: {0}")]
967 Sandbox(String),
968 #[error("validation error: {0}")]
969 Validation(String),
970}
971
972#[async_trait]
973pub trait ReplayExecutor: Send + Sync {
974 async fn try_replay(
975 &self,
976 input: &SelectorInput,
977 policy: &SandboxPolicy,
978 validation: &ValidationPlan,
979 ) -> Result<ReplayDecision, ReplayError>;
980
981 async fn try_replay_for_run(
982 &self,
983 run_id: &RunId,
984 input: &SelectorInput,
985 policy: &SandboxPolicy,
986 validation: &ValidationPlan,
987 ) -> Result<ReplayDecision, ReplayError> {
988 let _ = run_id;
989 self.try_replay(input, policy, validation).await
990 }
991}
992
993pub struct StoreReplayExecutor {
994 pub sandbox: Arc<dyn Sandbox>,
995 pub validator: Arc<dyn Validator>,
996 pub store: Arc<dyn EvolutionStore>,
997 pub selector: Arc<dyn Selector>,
998 pub governor: Arc<dyn Governor>,
999 pub economics: Option<Arc<Mutex<EvuLedger>>>,
1000 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
1001 pub stake_policy: StakePolicy,
1002}
1003
1004struct ReplayCandidates {
1005 candidates: Vec<GeneCandidate>,
1006 exact_match: bool,
1007}
1008
1009#[async_trait]
1010impl ReplayExecutor for StoreReplayExecutor {
1011 async fn try_replay(
1012 &self,
1013 input: &SelectorInput,
1014 policy: &SandboxPolicy,
1015 validation: &ValidationPlan,
1016 ) -> Result<ReplayDecision, ReplayError> {
1017 self.try_replay_inner(None, input, policy, validation).await
1018 }
1019
1020 async fn try_replay_for_run(
1021 &self,
1022 run_id: &RunId,
1023 input: &SelectorInput,
1024 policy: &SandboxPolicy,
1025 validation: &ValidationPlan,
1026 ) -> Result<ReplayDecision, ReplayError> {
1027 self.try_replay_inner(Some(run_id), input, policy, validation)
1028 .await
1029 }
1030}
1031
1032impl StoreReplayExecutor {
1033 fn collect_replay_candidates(&self, input: &SelectorInput) -> ReplayCandidates {
1034 self.apply_confidence_revalidation();
1035 let mut selector_input = input.clone();
1036 if self.economics.is_some() && self.remote_publishers.is_some() {
1037 selector_input.limit = selector_input.limit.max(4);
1038 }
1039 let mut candidates = self.selector.select(&selector_input);
1040 self.rerank_with_reputation_bias(&mut candidates);
1041 let mut exact_match = false;
1042 if candidates.is_empty() {
1043 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
1044 self.rerank_with_reputation_bias(&mut exact_candidates);
1045 if !exact_candidates.is_empty() {
1046 candidates = exact_candidates;
1047 exact_match = true;
1048 }
1049 }
1050 if candidates.is_empty() {
1051 let mut remote_candidates =
1052 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
1053 self.rerank_with_reputation_bias(&mut remote_candidates);
1054 if !remote_candidates.is_empty() {
1055 candidates = remote_candidates;
1056 exact_match = true;
1057 }
1058 }
1059 candidates.truncate(input.limit.max(1));
1060 ReplayCandidates {
1061 candidates,
1062 exact_match,
1063 }
1064 }
1065
1066 fn build_select_evidence(
1067 &self,
1068 input: &SelectorInput,
1069 candidates: &[GeneCandidate],
1070 exact_match: bool,
1071 ) -> ReplaySelectEvidence {
1072 let cold_start_penalty = if exact_match {
1073 COLD_START_LOOKUP_PENALTY
1074 } else {
1075 0.0
1076 };
1077 let candidate_rows = candidates
1078 .iter()
1079 .enumerate()
1080 .map(|(idx, candidate)| {
1081 let top_capsule = candidate.capsules.first();
1082 let environment_match_factor = top_capsule
1083 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env));
1084 let final_score = candidate.score * (1.0 - cold_start_penalty);
1085 ReplayCandidateEvidence {
1086 rank: idx + 1,
1087 gene_id: candidate.gene.id.clone(),
1088 capsule_id: top_capsule.map(|capsule| capsule.id.clone()),
1089 match_quality: candidate.score,
1090 confidence: top_capsule.map(|capsule| capsule.confidence),
1091 environment_match_factor,
1092 cold_start_penalty,
1093 final_score,
1094 }
1095 })
1096 .collect::<Vec<_>>();
1097
1098 ReplaySelectEvidence {
1099 exact_match_lookup: exact_match,
1100 selected_gene_id: candidate_rows
1101 .first()
1102 .map(|candidate| candidate.gene_id.clone()),
1103 selected_capsule_id: candidate_rows
1104 .first()
1105 .and_then(|candidate| candidate.capsule_id.clone()),
1106 candidates: candidate_rows,
1107 }
1108 }
1109
1110 fn apply_confidence_revalidation(&self) {
1111 let Ok(projection) = projection_snapshot(self.store.as_ref()) else {
1112 return;
1113 };
1114 for target in stale_replay_revalidation_targets(&projection, Utc::now()) {
1115 let reason = format!(
1116 "confidence decayed to {:.3}; revalidation required before replay",
1117 target.decayed_confidence
1118 );
1119 let confidence_decay_ratio = if target.peak_confidence > 0.0 {
1120 (target.decayed_confidence / target.peak_confidence).clamp(0.0, 1.0)
1121 } else {
1122 0.0
1123 };
1124 if self
1125 .store
1126 .append_event(EvolutionEvent::PromotionEvaluated {
1127 gene_id: target.gene_id.clone(),
1128 state: AssetState::Quarantined,
1129 reason: reason.clone(),
1130 reason_code: TransitionReasonCode::RevalidationConfidenceDecay,
1131 evidence: Some(TransitionEvidence {
1132 replay_attempts: None,
1133 replay_successes: None,
1134 replay_success_rate: None,
1135 environment_match_factor: None,
1136 decayed_confidence: Some(target.decayed_confidence),
1137 confidence_decay_ratio: Some(confidence_decay_ratio),
1138 summary: Some(format!(
1139 "phase=confidence_revalidation; decayed_confidence={:.3}; confidence_decay_ratio={:.3}",
1140 target.decayed_confidence, confidence_decay_ratio
1141 )),
1142 }),
1143 })
1144 .is_err()
1145 {
1146 continue;
1147 }
1148 for capsule_id in target.capsule_ids {
1149 if self
1150 .store
1151 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
1152 .is_err()
1153 {
1154 break;
1155 }
1156 }
1157 }
1158 }
1159
1160 fn build_replay_economics_evidence(
1161 &self,
1162 input: &SelectorInput,
1163 candidate: Option<&GeneCandidate>,
1164 source_sender_id: Option<&str>,
1165 success: bool,
1166 reason_code: ReplayRoiReasonCode,
1167 reason: &str,
1168 ) -> ReplayRoiEvidence {
1169 let (task_class_id, task_label) =
1170 replay_descriptor_from_candidate_or_input(candidate, input);
1171 let signal_source = candidate
1172 .map(|best| best.gene.signals.as_slice())
1173 .unwrap_or(input.signals.as_slice());
1174 let baseline_tokens = estimated_reasoning_tokens(signal_source);
1175 let reasoning_avoided_tokens = if success { baseline_tokens } else { 0 };
1176 let replay_fallback_cost = if success { 0 } else { baseline_tokens };
1177 let asset_origin =
1178 candidate.and_then(|best| strategy_metadata_value(&best.gene.strategy, "asset_origin"));
1179 let mut context_dimensions = vec![
1180 format!(
1181 "outcome={}",
1182 if success {
1183 "replay_hit"
1184 } else {
1185 "planner_fallback"
1186 }
1187 ),
1188 format!("reason={reason}"),
1189 format!("task_class_id={task_class_id}"),
1190 format!("task_label={task_label}"),
1191 ];
1192 if let Some(asset_origin) = asset_origin.as_deref() {
1193 context_dimensions.push(format!("asset_origin={asset_origin}"));
1194 }
1195 if let Some(source_sender_id) = source_sender_id {
1196 context_dimensions.push(format!("source_sender_id={source_sender_id}"));
1197 }
1198 ReplayRoiEvidence {
1199 success,
1200 reason_code,
1201 task_class_id,
1202 task_label,
1203 reasoning_avoided_tokens,
1204 replay_fallback_cost,
1205 replay_roi: compute_replay_roi(reasoning_avoided_tokens, replay_fallback_cost),
1206 asset_origin,
1207 source_sender_id: source_sender_id.map(ToOwned::to_owned),
1208 context_dimensions,
1209 }
1210 }
1211
1212 fn record_replay_economics(
1213 &self,
1214 replay_run_id: Option<&RunId>,
1215 candidate: Option<&GeneCandidate>,
1216 capsule_id: Option<&str>,
1217 evidence: ReplayRoiEvidence,
1218 ) -> Result<(), ReplayError> {
1219 self.store
1220 .append_event(EvolutionEvent::ReplayEconomicsRecorded {
1221 gene_id: candidate.map(|best| best.gene.id.clone()),
1222 capsule_id: capsule_id.map(ToOwned::to_owned),
1223 replay_run_id: replay_run_id.cloned(),
1224 evidence,
1225 })
1226 .map_err(|err| ReplayError::Store(err.to_string()))?;
1227 Ok(())
1228 }
1229
1230 async fn try_replay_inner(
1231 &self,
1232 replay_run_id: Option<&RunId>,
1233 input: &SelectorInput,
1234 policy: &SandboxPolicy,
1235 validation: &ValidationPlan,
1236 ) -> Result<ReplayDecision, ReplayError> {
1237 let ReplayCandidates {
1238 candidates,
1239 exact_match,
1240 } = self.collect_replay_candidates(input);
1241 let mut detect_evidence = replay_detect_evidence_from_input(input);
1242 let select_evidence = self.build_select_evidence(input, &candidates, exact_match);
1243 let Some(best) = candidates.into_iter().next() else {
1244 detect_evidence
1245 .mismatch_reasons
1246 .push("no_candidate_after_select".to_string());
1247 let economics_evidence = self.build_replay_economics_evidence(
1248 input,
1249 None,
1250 None,
1251 false,
1252 ReplayRoiReasonCode::ReplayMissNoMatchingGene,
1253 "no matching gene",
1254 );
1255 self.record_replay_economics(replay_run_id, None, None, economics_evidence.clone())?;
1256 return Ok(ReplayDecision {
1257 used_capsule: false,
1258 capsule_id: None,
1259 fallback_to_planner: true,
1260 reason: "no matching gene".into(),
1261 detect_evidence,
1262 select_evidence,
1263 economics_evidence,
1264 });
1265 };
1266 let (detected_task_class_id, detected_task_label) =
1267 replay_descriptor_from_candidate_or_input(Some(&best), input);
1268 detect_evidence.task_class_id = detected_task_class_id;
1269 detect_evidence.task_label = detected_task_label;
1270 detect_evidence.matched_signals =
1271 matched_replay_signals(&input.signals, &best.gene.signals);
1272 if !exact_match && best.score < 0.82 {
1273 detect_evidence
1274 .mismatch_reasons
1275 .push("score_below_threshold".to_string());
1276 let reason = format!("best gene score {:.3} below replay threshold", best.score);
1277 let economics_evidence = self.build_replay_economics_evidence(
1278 input,
1279 Some(&best),
1280 None,
1281 false,
1282 ReplayRoiReasonCode::ReplayMissScoreBelowThreshold,
1283 &reason,
1284 );
1285 self.record_replay_economics(
1286 replay_run_id,
1287 Some(&best),
1288 None,
1289 economics_evidence.clone(),
1290 )?;
1291 return Ok(ReplayDecision {
1292 used_capsule: false,
1293 capsule_id: None,
1294 fallback_to_planner: true,
1295 reason,
1296 detect_evidence,
1297 select_evidence,
1298 economics_evidence,
1299 });
1300 }
1301
1302 let Some(capsule) = best.capsules.first().cloned() else {
1303 detect_evidence
1304 .mismatch_reasons
1305 .push("candidate_has_no_capsule".to_string());
1306 let economics_evidence = self.build_replay_economics_evidence(
1307 input,
1308 Some(&best),
1309 None,
1310 false,
1311 ReplayRoiReasonCode::ReplayMissCandidateHasNoCapsule,
1312 "candidate gene has no capsule",
1313 );
1314 self.record_replay_economics(
1315 replay_run_id,
1316 Some(&best),
1317 None,
1318 economics_evidence.clone(),
1319 )?;
1320 return Ok(ReplayDecision {
1321 used_capsule: false,
1322 capsule_id: None,
1323 fallback_to_planner: true,
1324 reason: "candidate gene has no capsule".into(),
1325 detect_evidence,
1326 select_evidence,
1327 economics_evidence,
1328 });
1329 };
1330 let remote_publisher = self.publisher_for_capsule(&capsule.id);
1331
1332 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
1333 .map_err(|err| ReplayError::Store(err.to_string()))?
1334 else {
1335 detect_evidence
1336 .mismatch_reasons
1337 .push("mutation_payload_missing".to_string());
1338 let economics_evidence = self.build_replay_economics_evidence(
1339 input,
1340 Some(&best),
1341 remote_publisher.as_deref(),
1342 false,
1343 ReplayRoiReasonCode::ReplayMissMutationPayloadMissing,
1344 "mutation payload missing from store",
1345 );
1346 self.record_replay_economics(
1347 replay_run_id,
1348 Some(&best),
1349 Some(&capsule.id),
1350 economics_evidence.clone(),
1351 )?;
1352 return Ok(ReplayDecision {
1353 used_capsule: false,
1354 capsule_id: None,
1355 fallback_to_planner: true,
1356 reason: "mutation payload missing from store".into(),
1357 detect_evidence,
1358 select_evidence,
1359 economics_evidence,
1360 });
1361 };
1362
1363 let receipt = match self.sandbox.apply(&mutation, policy).await {
1364 Ok(receipt) => receipt,
1365 Err(err) => {
1366 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1367 let reason = format!("replay patch apply failed: {err}");
1368 let economics_evidence = self.build_replay_economics_evidence(
1369 input,
1370 Some(&best),
1371 remote_publisher.as_deref(),
1372 false,
1373 ReplayRoiReasonCode::ReplayMissPatchApplyFailed,
1374 &reason,
1375 );
1376 self.record_replay_economics(
1377 replay_run_id,
1378 Some(&best),
1379 Some(&capsule.id),
1380 economics_evidence.clone(),
1381 )?;
1382 detect_evidence
1383 .mismatch_reasons
1384 .push("patch_apply_failed".to_string());
1385 return Ok(ReplayDecision {
1386 used_capsule: false,
1387 capsule_id: Some(capsule.id.clone()),
1388 fallback_to_planner: true,
1389 reason,
1390 detect_evidence,
1391 select_evidence,
1392 economics_evidence,
1393 });
1394 }
1395 };
1396
1397 let report = self
1398 .validator
1399 .run(&receipt, validation)
1400 .await
1401 .map_err(|err| ReplayError::Validation(err.to_string()))?;
1402 if !report.success {
1403 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
1404 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1405 let economics_evidence = self.build_replay_economics_evidence(
1406 input,
1407 Some(&best),
1408 remote_publisher.as_deref(),
1409 false,
1410 ReplayRoiReasonCode::ReplayMissValidationFailed,
1411 "replay validation failed",
1412 );
1413 self.record_replay_economics(
1414 replay_run_id,
1415 Some(&best),
1416 Some(&capsule.id),
1417 economics_evidence.clone(),
1418 )?;
1419 detect_evidence
1420 .mismatch_reasons
1421 .push("validation_failed".to_string());
1422 return Ok(ReplayDecision {
1423 used_capsule: false,
1424 capsule_id: Some(capsule.id.clone()),
1425 fallback_to_planner: true,
1426 reason: "replay validation failed".into(),
1427 detect_evidence,
1428 select_evidence,
1429 economics_evidence,
1430 });
1431 }
1432
1433 let requires_shadow_progression = remote_publisher.is_some()
1434 && matches!(
1435 capsule.state,
1436 AssetState::Quarantined | AssetState::ShadowValidated
1437 );
1438 if requires_shadow_progression {
1439 self.store
1440 .append_event(EvolutionEvent::ValidationPassed {
1441 mutation_id: capsule.mutation_id.clone(),
1442 report: report.to_snapshot(&validation.profile),
1443 gene_id: Some(best.gene.id.clone()),
1444 })
1445 .map_err(|err| ReplayError::Store(err.to_string()))?;
1446 let evidence = self.shadow_transition_evidence(&best.gene.id, &capsule, &input.env)?;
1447 let (target_state, reason_code, reason, promote_now, phase) =
1448 if matches!(best.gene.state, AssetState::Quarantined) {
1449 (
1450 AssetState::ShadowValidated,
1451 TransitionReasonCode::PromotionShadowValidationPassed,
1452 "remote asset passed first local replay and entered shadow validation"
1453 .into(),
1454 false,
1455 "quarantine_to_shadow",
1456 )
1457 } else if shadow_promotion_gate_passed(&evidence) {
1458 (
1459 AssetState::Promoted,
1460 TransitionReasonCode::PromotionRemoteReplayValidated,
1461 "shadow validation thresholds satisfied; remote asset promoted".into(),
1462 true,
1463 "shadow_to_promoted",
1464 )
1465 } else {
1466 (
1467 AssetState::ShadowValidated,
1468 TransitionReasonCode::ShadowCollectingReplayEvidence,
1469 "shadow validation collecting additional replay evidence".into(),
1470 false,
1471 "shadow_hold",
1472 )
1473 };
1474 self.store
1475 .append_event(EvolutionEvent::PromotionEvaluated {
1476 gene_id: best.gene.id.clone(),
1477 state: target_state.clone(),
1478 reason,
1479 reason_code,
1480 evidence: Some(evidence.to_transition_evidence(shadow_evidence_summary(
1481 &evidence,
1482 promote_now,
1483 phase,
1484 ))),
1485 })
1486 .map_err(|err| ReplayError::Store(err.to_string()))?;
1487 if promote_now {
1488 self.store
1489 .append_event(EvolutionEvent::GenePromoted {
1490 gene_id: best.gene.id.clone(),
1491 })
1492 .map_err(|err| ReplayError::Store(err.to_string()))?;
1493 }
1494 self.store
1495 .append_event(EvolutionEvent::CapsuleReleased {
1496 capsule_id: capsule.id.clone(),
1497 state: target_state,
1498 })
1499 .map_err(|err| ReplayError::Store(err.to_string()))?;
1500 }
1501
1502 self.store
1503 .append_event(EvolutionEvent::CapsuleReused {
1504 capsule_id: capsule.id.clone(),
1505 gene_id: capsule.gene_id.clone(),
1506 run_id: capsule.run_id.clone(),
1507 replay_run_id: replay_run_id.cloned(),
1508 })
1509 .map_err(|err| ReplayError::Store(err.to_string()))?;
1510 self.record_reuse_settlement(remote_publisher.as_deref(), true);
1511 let reason = if exact_match {
1512 "replayed via cold-start lookup".to_string()
1513 } else {
1514 "replayed via selector".to_string()
1515 };
1516 let economics_evidence = self.build_replay_economics_evidence(
1517 input,
1518 Some(&best),
1519 remote_publisher.as_deref(),
1520 true,
1521 ReplayRoiReasonCode::ReplayHit,
1522 &reason,
1523 );
1524 self.record_replay_economics(
1525 replay_run_id,
1526 Some(&best),
1527 Some(&capsule.id),
1528 economics_evidence.clone(),
1529 )?;
1530
1531 Ok(ReplayDecision {
1532 used_capsule: true,
1533 capsule_id: Some(capsule.id),
1534 fallback_to_planner: false,
1535 reason,
1536 detect_evidence,
1537 select_evidence,
1538 economics_evidence,
1539 })
1540 }
1541
1542 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
1543 let Some(ledger) = self.economics.as_ref() else {
1544 return;
1545 };
1546 let reputation_bias = ledger
1547 .lock()
1548 .ok()
1549 .map(|locked| locked.selector_reputation_bias())
1550 .unwrap_or_default();
1551 if reputation_bias.is_empty() {
1552 return;
1553 }
1554 let required_assets = candidates
1555 .iter()
1556 .filter_map(|candidate| {
1557 candidate
1558 .capsules
1559 .first()
1560 .map(|capsule| capsule.id.as_str())
1561 })
1562 .collect::<Vec<_>>();
1563 let publisher_map = self.remote_publishers_snapshot(&required_assets);
1564 if publisher_map.is_empty() {
1565 return;
1566 }
1567 candidates.sort_by(|left, right| {
1568 effective_candidate_score(right, &publisher_map, &reputation_bias)
1569 .partial_cmp(&effective_candidate_score(
1570 left,
1571 &publisher_map,
1572 &reputation_bias,
1573 ))
1574 .unwrap_or(std::cmp::Ordering::Equal)
1575 .then_with(|| left.gene.id.cmp(&right.gene.id))
1576 });
1577 }
1578
1579 fn publisher_for_capsule(&self, capsule_id: &str) -> Option<String> {
1580 self.remote_publishers_snapshot(&[capsule_id])
1581 .get(capsule_id)
1582 .cloned()
1583 }
1584
1585 fn remote_publishers_snapshot(&self, required_assets: &[&str]) -> BTreeMap<String, String> {
1586 let cached = self
1587 .remote_publishers
1588 .as_ref()
1589 .and_then(|remote_publishers| {
1590 remote_publishers.lock().ok().map(|locked| locked.clone())
1591 })
1592 .unwrap_or_default();
1593 if !cached.is_empty()
1594 && required_assets
1595 .iter()
1596 .all(|asset_id| cached.contains_key(*asset_id))
1597 {
1598 return cached;
1599 }
1600
1601 let persisted = remote_publishers_by_asset_from_store(self.store.as_ref());
1602 if persisted.is_empty() {
1603 return cached;
1604 }
1605
1606 let mut merged = cached;
1607 for (asset_id, sender_id) in persisted {
1608 merged.entry(asset_id).or_insert(sender_id);
1609 }
1610
1611 if let Some(remote_publishers) = self.remote_publishers.as_ref() {
1612 if let Ok(mut locked) = remote_publishers.lock() {
1613 for (asset_id, sender_id) in &merged {
1614 locked.entry(asset_id.clone()).or_insert(sender_id.clone());
1615 }
1616 }
1617 }
1618
1619 merged
1620 }
1621
1622 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
1623 let Some(publisher_id) = publisher_id else {
1624 return;
1625 };
1626 let Some(ledger) = self.economics.as_ref() else {
1627 return;
1628 };
1629 if let Ok(mut locked) = ledger.lock() {
1630 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
1631 }
1632 }
1633
1634 fn record_replay_validation_failure(
1635 &self,
1636 best: &GeneCandidate,
1637 capsule: &Capsule,
1638 validation: &ValidationPlan,
1639 report: &ValidationReport,
1640 ) -> Result<(), ReplayError> {
1641 let projection = projection_snapshot(self.store.as_ref())
1642 .map_err(|err| ReplayError::Store(err.to_string()))?;
1643 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1644 Self::confidence_context(&projection, &best.gene.id);
1645
1646 self.store
1647 .append_event(EvolutionEvent::ValidationFailed {
1648 mutation_id: capsule.mutation_id.clone(),
1649 report: report.to_snapshot(&validation.profile),
1650 gene_id: Some(best.gene.id.clone()),
1651 })
1652 .map_err(|err| ReplayError::Store(err.to_string()))?;
1653
1654 let replay_failures = self.replay_failure_count(&best.gene.id)?;
1655 let governor_decision = self.governor.evaluate(GovernorInput {
1656 candidate_source: if self.publisher_for_capsule(&capsule.id).is_some() {
1657 CandidateSource::Remote
1658 } else {
1659 CandidateSource::Local
1660 },
1661 success_count: 0,
1662 blast_radius: BlastRadius {
1663 files_changed: capsule.outcome.changed_files.len(),
1664 lines_changed: capsule.outcome.lines_changed,
1665 },
1666 replay_failures,
1667 recent_mutation_ages_secs: Vec::new(),
1668 current_confidence,
1669 historical_peak_confidence,
1670 confidence_last_updated_secs,
1671 });
1672
1673 if matches!(governor_decision.target_state, AssetState::Revoked) {
1674 self.store
1675 .append_event(EvolutionEvent::PromotionEvaluated {
1676 gene_id: best.gene.id.clone(),
1677 state: AssetState::Revoked,
1678 reason: governor_decision.reason.clone(),
1679 reason_code: governor_decision.reason_code.clone(),
1680 evidence: Some(TransitionEvidence {
1681 replay_attempts: Some(replay_failures),
1682 replay_successes: None,
1683 replay_success_rate: None,
1684 environment_match_factor: None,
1685 decayed_confidence: Some(current_confidence),
1686 confidence_decay_ratio: if historical_peak_confidence > 0.0 {
1687 Some((current_confidence / historical_peak_confidence).clamp(0.0, 1.0))
1688 } else {
1689 None
1690 },
1691 summary: Some(format!(
1692 "phase=replay_failure_revocation; replay_failures={replay_failures}; current_confidence={:.3}; historical_peak_confidence={:.3}",
1693 current_confidence, historical_peak_confidence
1694 )),
1695 }),
1696 })
1697 .map_err(|err| ReplayError::Store(err.to_string()))?;
1698 self.store
1699 .append_event(EvolutionEvent::GeneRevoked {
1700 gene_id: best.gene.id.clone(),
1701 reason: governor_decision.reason,
1702 })
1703 .map_err(|err| ReplayError::Store(err.to_string()))?;
1704 for related in &best.capsules {
1705 self.store
1706 .append_event(EvolutionEvent::CapsuleQuarantined {
1707 capsule_id: related.id.clone(),
1708 })
1709 .map_err(|err| ReplayError::Store(err.to_string()))?;
1710 }
1711 }
1712
1713 Ok(())
1714 }
1715
1716 fn confidence_context(
1717 projection: &EvolutionProjection,
1718 gene_id: &str,
1719 ) -> (f32, f32, Option<u64>) {
1720 let peak_confidence = projection
1721 .capsules
1722 .iter()
1723 .filter(|capsule| capsule.gene_id == gene_id)
1724 .map(|capsule| capsule.confidence)
1725 .fold(0.0_f32, f32::max);
1726 let age_secs = projection
1727 .last_updated_at
1728 .get(gene_id)
1729 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1730 (peak_confidence, peak_confidence, age_secs)
1731 }
1732
1733 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1734 let parsed = DateTime::parse_from_rfc3339(timestamp)
1735 .ok()?
1736 .with_timezone(&Utc);
1737 let elapsed = now.signed_duration_since(parsed);
1738 if elapsed < Duration::zero() {
1739 Some(0)
1740 } else {
1741 u64::try_from(elapsed.num_seconds()).ok()
1742 }
1743 }
1744
1745 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1746 Ok(self
1747 .store
1748 .scan(1)
1749 .map_err(|err| ReplayError::Store(err.to_string()))?
1750 .into_iter()
1751 .filter(|stored| {
1752 matches!(
1753 &stored.event,
1754 EvolutionEvent::ValidationFailed {
1755 gene_id: Some(current_gene_id),
1756 ..
1757 } if current_gene_id == gene_id
1758 )
1759 })
1760 .count() as u64)
1761 }
1762
1763 fn shadow_transition_evidence(
1764 &self,
1765 gene_id: &str,
1766 capsule: &Capsule,
1767 input_env: &EnvFingerprint,
1768 ) -> Result<ShadowTransitionEvidence, ReplayError> {
1769 let events = self
1770 .store
1771 .scan(1)
1772 .map_err(|err| ReplayError::Store(err.to_string()))?;
1773 let (replay_attempts, replay_successes) = events.iter().fold(
1774 (0_u64, 0_u64),
1775 |(attempts, successes), stored| match &stored.event {
1776 EvolutionEvent::ValidationPassed {
1777 gene_id: Some(current_gene_id),
1778 ..
1779 } if current_gene_id == gene_id => (attempts + 1, successes + 1),
1780 EvolutionEvent::ValidationFailed {
1781 gene_id: Some(current_gene_id),
1782 ..
1783 } if current_gene_id == gene_id => (attempts + 1, successes),
1784 _ => (attempts, successes),
1785 },
1786 );
1787 let replay_success_rate = safe_ratio(replay_successes, replay_attempts) as f32;
1788 let environment_match_factor = replay_environment_match_factor(input_env, &capsule.env);
1789 let projection = projection_snapshot(self.store.as_ref())
1790 .map_err(|err| ReplayError::Store(err.to_string()))?;
1791 let age_secs = projection
1792 .last_updated_at
1793 .get(gene_id)
1794 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1795 let decayed_confidence = decayed_replay_confidence(capsule.confidence, age_secs);
1796 let confidence_decay_ratio = if capsule.confidence > 0.0 {
1797 (decayed_confidence / capsule.confidence).clamp(0.0, 1.0)
1798 } else {
1799 0.0
1800 };
1801
1802 Ok(ShadowTransitionEvidence {
1803 replay_attempts,
1804 replay_successes,
1805 replay_success_rate,
1806 environment_match_factor,
1807 decayed_confidence,
1808 confidence_decay_ratio,
1809 })
1810 }
1811}
1812
1813#[derive(Clone, Debug)]
1814struct ShadowTransitionEvidence {
1815 replay_attempts: u64,
1816 replay_successes: u64,
1817 replay_success_rate: f32,
1818 environment_match_factor: f32,
1819 decayed_confidence: f32,
1820 confidence_decay_ratio: f32,
1821}
1822
1823impl ShadowTransitionEvidence {
1824 fn to_transition_evidence(&self, summary: String) -> TransitionEvidence {
1825 TransitionEvidence {
1826 replay_attempts: Some(self.replay_attempts),
1827 replay_successes: Some(self.replay_successes),
1828 replay_success_rate: Some(self.replay_success_rate),
1829 environment_match_factor: Some(self.environment_match_factor),
1830 decayed_confidence: Some(self.decayed_confidence),
1831 confidence_decay_ratio: Some(self.confidence_decay_ratio),
1832 summary: Some(summary),
1833 }
1834 }
1835}
1836
1837fn shadow_promotion_gate_passed(evidence: &ShadowTransitionEvidence) -> bool {
1838 evidence.replay_attempts >= SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS
1839 && evidence.replay_success_rate >= SHADOW_PROMOTION_MIN_SUCCESS_RATE
1840 && evidence.environment_match_factor >= SHADOW_PROMOTION_MIN_ENV_MATCH
1841 && evidence.decayed_confidence >= SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE
1842}
1843
1844fn shadow_evidence_summary(
1845 evidence: &ShadowTransitionEvidence,
1846 promoted: bool,
1847 phase: &str,
1848) -> String {
1849 format!(
1850 "phase={phase}; replay_attempts={}; replay_successes={}; replay_success_rate={:.3}; environment_match_factor={:.3}; decayed_confidence={:.3}; confidence_decay_ratio={:.3}; promote={promoted}",
1851 evidence.replay_attempts,
1852 evidence.replay_successes,
1853 evidence.replay_success_rate,
1854 evidence.environment_match_factor,
1855 evidence.decayed_confidence,
1856 evidence.confidence_decay_ratio,
1857 )
1858}
1859
1860#[derive(Clone, Debug, PartialEq)]
1861struct ConfidenceRevalidationTarget {
1862 gene_id: String,
1863 capsule_ids: Vec<String>,
1864 peak_confidence: f32,
1865 decayed_confidence: f32,
1866}
1867
1868fn stale_replay_revalidation_targets(
1869 projection: &EvolutionProjection,
1870 now: DateTime<Utc>,
1871) -> Vec<ConfidenceRevalidationTarget> {
1872 projection
1873 .genes
1874 .iter()
1875 .filter(|gene| gene.state == AssetState::Promoted)
1876 .filter_map(|gene| {
1877 let promoted_capsules = projection
1878 .capsules
1879 .iter()
1880 .filter(|capsule| {
1881 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1882 })
1883 .collect::<Vec<_>>();
1884 if promoted_capsules.is_empty() {
1885 return None;
1886 }
1887 let age_secs = projection
1888 .last_updated_at
1889 .get(&gene.id)
1890 .and_then(|timestamp| seconds_since_timestamp_for_confidence(timestamp, now));
1891 let decayed_confidence = promoted_capsules
1892 .iter()
1893 .map(|capsule| decayed_replay_confidence(capsule.confidence, age_secs))
1894 .fold(0.0_f32, f32::max);
1895 if decayed_confidence >= MIN_REPLAY_CONFIDENCE {
1896 return None;
1897 }
1898 let peak_confidence = promoted_capsules
1899 .iter()
1900 .map(|capsule| capsule.confidence)
1901 .fold(0.0_f32, f32::max);
1902 Some(ConfidenceRevalidationTarget {
1903 gene_id: gene.id.clone(),
1904 capsule_ids: promoted_capsules
1905 .into_iter()
1906 .map(|capsule| capsule.id.clone())
1907 .collect(),
1908 peak_confidence,
1909 decayed_confidence,
1910 })
1911 })
1912 .collect()
1913}
1914
1915fn seconds_since_timestamp_for_confidence(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1916 let parsed = DateTime::parse_from_rfc3339(timestamp)
1917 .ok()?
1918 .with_timezone(&Utc);
1919 let elapsed = now.signed_duration_since(parsed);
1920 if elapsed < Duration::zero() {
1921 Some(0)
1922 } else {
1923 u64::try_from(elapsed.num_seconds()).ok()
1924 }
1925}
1926
1927#[derive(Debug, Error)]
1928pub enum EvoKernelError {
1929 #[error("sandbox error: {0}")]
1930 Sandbox(String),
1931 #[error("validation error: {0}")]
1932 Validation(String),
1933 #[error("validation failed")]
1934 ValidationFailed(ValidationReport),
1935 #[error("store error: {0}")]
1936 Store(String),
1937}
1938
1939#[derive(Clone, Debug)]
1940pub struct CaptureOutcome {
1941 pub capsule: Capsule,
1942 pub gene: Gene,
1943 pub governor_decision: GovernorDecision,
1944}
1945
1946#[derive(Clone, Debug, Serialize, Deserialize)]
1947pub struct ImportOutcome {
1948 pub imported_asset_ids: Vec<String>,
1949 pub accepted: bool,
1950 #[serde(default, skip_serializing_if = "Option::is_none")]
1951 pub next_cursor: Option<String>,
1952 #[serde(default, skip_serializing_if = "Option::is_none")]
1953 pub resume_token: Option<String>,
1954 #[serde(default)]
1955 pub sync_audit: SyncAudit,
1956}
1957
1958#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1959pub struct EvolutionMetricsSnapshot {
1960 pub replay_attempts_total: u64,
1961 pub replay_success_total: u64,
1962 pub replay_success_rate: f64,
1963 pub confidence_revalidations_total: u64,
1964 pub replay_reasoning_avoided_total: u64,
1965 pub reasoning_avoided_tokens_total: u64,
1966 pub replay_fallback_cost_total: u64,
1967 pub replay_roi: f64,
1968 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
1969 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
1970 pub mutation_declared_total: u64,
1971 pub promoted_mutations_total: u64,
1972 pub promotion_ratio: f64,
1973 pub gene_revocations_total: u64,
1974 pub mutation_velocity_last_hour: u64,
1975 pub revoke_frequency_last_hour: u64,
1976 pub promoted_genes: u64,
1977 pub promoted_capsules: u64,
1978 pub last_event_seq: u64,
1979}
1980
1981#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1982pub struct EvolutionHealthSnapshot {
1983 pub status: String,
1984 pub last_event_seq: u64,
1985 pub promoted_genes: u64,
1986 pub promoted_capsules: u64,
1987}
1988
1989#[derive(Clone)]
1990pub struct EvolutionNetworkNode {
1991 pub store: Arc<dyn EvolutionStore>,
1992}
1993
1994impl EvolutionNetworkNode {
1995 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1996 Self { store }
1997 }
1998
1999 pub fn with_default_store() -> Self {
2000 Self {
2001 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
2002 }
2003 }
2004
2005 pub fn accept_publish_request(
2006 &self,
2007 request: &PublishRequest,
2008 ) -> Result<ImportOutcome, EvoKernelError> {
2009 let requested_cursor = resolve_requested_cursor(
2010 &request.sender_id,
2011 request.since_cursor.as_deref(),
2012 request.resume_token.as_deref(),
2013 )?;
2014 import_remote_envelope_into_store(
2015 self.store.as_ref(),
2016 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
2017 None,
2018 requested_cursor,
2019 )
2020 }
2021
2022 pub fn ensure_builtin_experience_assets(
2023 &self,
2024 sender_id: impl Into<String>,
2025 ) -> Result<ImportOutcome, EvoKernelError> {
2026 ensure_builtin_experience_assets_in_store(self.store.as_ref(), sender_id.into())
2027 }
2028
2029 pub fn record_reported_experience(
2030 &self,
2031 sender_id: impl Into<String>,
2032 gene_id: impl Into<String>,
2033 signals: Vec<String>,
2034 strategy: Vec<String>,
2035 validation: Vec<String>,
2036 ) -> Result<ImportOutcome, EvoKernelError> {
2037 record_reported_experience_in_store(
2038 self.store.as_ref(),
2039 sender_id.into(),
2040 gene_id.into(),
2041 signals,
2042 strategy,
2043 validation,
2044 )
2045 }
2046
2047 pub fn publish_local_assets(
2048 &self,
2049 sender_id: impl Into<String>,
2050 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2051 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
2052 }
2053
2054 pub fn fetch_assets(
2055 &self,
2056 responder_id: impl Into<String>,
2057 query: &FetchQuery,
2058 ) -> Result<FetchResponse, EvoKernelError> {
2059 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2060 }
2061
2062 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2063 revoke_assets_in_store(self.store.as_ref(), notice)
2064 }
2065
2066 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2067 evolution_metrics_snapshot(self.store.as_ref())
2068 }
2069
2070 pub fn replay_roi_release_gate_summary(
2071 &self,
2072 window_seconds: u64,
2073 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2074 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2075 }
2076
2077 pub fn render_replay_roi_release_gate_summary_json(
2078 &self,
2079 window_seconds: u64,
2080 ) -> Result<String, EvoKernelError> {
2081 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2082 serde_json::to_string_pretty(&summary)
2083 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2084 }
2085
2086 pub fn replay_roi_release_gate_contract(
2087 &self,
2088 window_seconds: u64,
2089 thresholds: ReplayRoiReleaseGateThresholds,
2090 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2091 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2092 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2093 }
2094
2095 pub fn render_replay_roi_release_gate_contract_json(
2096 &self,
2097 window_seconds: u64,
2098 thresholds: ReplayRoiReleaseGateThresholds,
2099 ) -> Result<String, EvoKernelError> {
2100 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2101 serde_json::to_string_pretty(&contract)
2102 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2103 }
2104
2105 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2106 self.metrics_snapshot().map(|snapshot| {
2107 let health = evolution_health_snapshot(&snapshot);
2108 render_evolution_metrics_prometheus(&snapshot, &health)
2109 })
2110 }
2111
2112 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2113 self.metrics_snapshot()
2114 .map(|snapshot| evolution_health_snapshot(&snapshot))
2115 }
2116}
2117
2118pub struct EvoKernel<S: KernelState> {
2119 pub kernel: Arc<Kernel<S>>,
2120 pub sandbox: Arc<dyn Sandbox>,
2121 pub validator: Arc<dyn Validator>,
2122 pub store: Arc<dyn EvolutionStore>,
2123 pub selector: Arc<dyn Selector>,
2124 pub governor: Arc<dyn Governor>,
2125 pub economics: Arc<Mutex<EvuLedger>>,
2126 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
2127 pub stake_policy: StakePolicy,
2128 pub sandbox_policy: SandboxPolicy,
2129 pub validation_plan: ValidationPlan,
2130}
2131
2132impl<S: KernelState> EvoKernel<S> {
2133 fn recent_prior_mutation_ages_secs(
2134 &self,
2135 exclude_mutation_id: Option<&str>,
2136 ) -> Result<Vec<u64>, EvolutionError> {
2137 let now = Utc::now();
2138 let mut ages = self
2139 .store
2140 .scan(1)?
2141 .into_iter()
2142 .filter_map(|stored| match stored.event {
2143 EvolutionEvent::MutationDeclared { mutation }
2144 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
2145 {
2146 Self::seconds_since_timestamp(&stored.timestamp, now)
2147 }
2148 _ => None,
2149 })
2150 .collect::<Vec<_>>();
2151 ages.sort_unstable();
2152 Ok(ages)
2153 }
2154
2155 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
2156 let parsed = DateTime::parse_from_rfc3339(timestamp)
2157 .ok()?
2158 .with_timezone(&Utc);
2159 let elapsed = now.signed_duration_since(parsed);
2160 if elapsed < Duration::zero() {
2161 Some(0)
2162 } else {
2163 u64::try_from(elapsed.num_seconds()).ok()
2164 }
2165 }
2166
2167 pub fn new(
2168 kernel: Arc<Kernel<S>>,
2169 sandbox: Arc<dyn Sandbox>,
2170 validator: Arc<dyn Validator>,
2171 store: Arc<dyn EvolutionStore>,
2172 ) -> Self {
2173 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
2174 Self {
2175 kernel,
2176 sandbox,
2177 validator,
2178 store,
2179 selector,
2180 governor: Arc::new(DefaultGovernor::default()),
2181 economics: Arc::new(Mutex::new(EvuLedger::default())),
2182 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
2183 stake_policy: StakePolicy::default(),
2184 sandbox_policy: SandboxPolicy::oris_default(),
2185 validation_plan: ValidationPlan::oris_default(),
2186 }
2187 }
2188
2189 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
2190 self.selector = selector;
2191 self
2192 }
2193
2194 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
2195 self.sandbox_policy = policy;
2196 self
2197 }
2198
2199 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
2200 self.governor = governor;
2201 self
2202 }
2203
2204 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
2205 self.economics = economics;
2206 self
2207 }
2208
2209 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
2210 self.stake_policy = policy;
2211 self
2212 }
2213
2214 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
2215 self.validation_plan = plan;
2216 self
2217 }
2218
2219 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
2220 let executor = StoreReplayExecutor {
2221 sandbox: self.sandbox.clone(),
2222 validator: self.validator.clone(),
2223 store: self.store.clone(),
2224 selector: self.selector.clone(),
2225 governor: self.governor.clone(),
2226 economics: Some(self.economics.clone()),
2227 remote_publishers: Some(self.remote_publishers.clone()),
2228 stake_policy: self.stake_policy.clone(),
2229 };
2230 executor.collect_replay_candidates(input).candidates
2231 }
2232
2233 pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
2234 let projection = projection_snapshot(self.store.as_ref())?;
2235 if !projection.genes.is_empty() {
2236 return Ok(BootstrapReport::default());
2237 }
2238
2239 let templates = built_in_seed_templates();
2240 for template in &templates {
2241 let mutation = build_seed_mutation(template);
2242 let extracted = extract_seed_signals(template);
2243 let gene = build_bootstrap_gene(template, &extracted)
2244 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2245 let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
2246 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2247
2248 self.store
2249 .append_event(EvolutionEvent::MutationDeclared {
2250 mutation: mutation.clone(),
2251 })
2252 .map_err(store_err)?;
2253 self.store
2254 .append_event(EvolutionEvent::SignalsExtracted {
2255 mutation_id: mutation.intent.id.clone(),
2256 hash: extracted.hash.clone(),
2257 signals: extracted.values.clone(),
2258 })
2259 .map_err(store_err)?;
2260 self.store
2261 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2262 .map_err(store_err)?;
2263 self.store
2264 .append_event(EvolutionEvent::PromotionEvaluated {
2265 gene_id: gene.id.clone(),
2266 state: AssetState::Quarantined,
2267 reason: "bootstrap seeds require local validation before replay".into(),
2268 reason_code: TransitionReasonCode::DowngradeBootstrapRequiresLocalValidation,
2269 evidence: None,
2270 })
2271 .map_err(store_err)?;
2272 self.store
2273 .append_event(EvolutionEvent::CapsuleCommitted {
2274 capsule: capsule.clone(),
2275 })
2276 .map_err(store_err)?;
2277 self.store
2278 .append_event(EvolutionEvent::CapsuleQuarantined {
2279 capsule_id: capsule.id,
2280 })
2281 .map_err(store_err)?;
2282 }
2283
2284 Ok(BootstrapReport {
2285 seeded: true,
2286 genes_added: templates.len(),
2287 capsules_added: templates.len(),
2288 })
2289 }
2290
2291 pub async fn capture_successful_mutation(
2292 &self,
2293 run_id: &RunId,
2294 mutation: PreparedMutation,
2295 ) -> Result<Capsule, EvoKernelError> {
2296 Ok(self
2297 .capture_mutation_with_governor(run_id, mutation)
2298 .await?
2299 .capsule)
2300 }
2301
2302 pub async fn capture_mutation_with_governor(
2303 &self,
2304 run_id: &RunId,
2305 mutation: PreparedMutation,
2306 ) -> Result<CaptureOutcome, EvoKernelError> {
2307 self.store
2308 .append_event(EvolutionEvent::MutationDeclared {
2309 mutation: mutation.clone(),
2310 })
2311 .map_err(store_err)?;
2312
2313 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
2314 Ok(receipt) => receipt,
2315 Err(err) => {
2316 let message = err.to_string();
2317 let contract = mutation_needed_contract_for_error_message(&message);
2318 self.store
2319 .append_event(EvolutionEvent::MutationRejected {
2320 mutation_id: mutation.intent.id.clone(),
2321 reason: contract.failure_reason,
2322 reason_code: Some(
2323 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2324 ),
2325 recovery_hint: Some(contract.recovery_hint),
2326 fail_closed: contract.fail_closed,
2327 })
2328 .map_err(store_err)?;
2329 return Err(EvoKernelError::Sandbox(message));
2330 }
2331 };
2332
2333 self.store
2334 .append_event(EvolutionEvent::MutationApplied {
2335 mutation_id: mutation.intent.id.clone(),
2336 patch_hash: receipt.patch_hash.clone(),
2337 changed_files: receipt
2338 .changed_files
2339 .iter()
2340 .map(|path| path.to_string_lossy().to_string())
2341 .collect(),
2342 })
2343 .map_err(store_err)?;
2344
2345 let report = match self.validator.run(&receipt, &self.validation_plan).await {
2346 Ok(report) => report,
2347 Err(err) => {
2348 let message = format!("mutation-needed validation execution error: {err}");
2349 let contract = mutation_needed_contract_for_error_message(&message);
2350 self.store
2351 .append_event(EvolutionEvent::MutationRejected {
2352 mutation_id: mutation.intent.id.clone(),
2353 reason: contract.failure_reason,
2354 reason_code: Some(
2355 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2356 ),
2357 recovery_hint: Some(contract.recovery_hint),
2358 fail_closed: contract.fail_closed,
2359 })
2360 .map_err(store_err)?;
2361 return Err(EvoKernelError::Validation(message));
2362 }
2363 };
2364 if !report.success {
2365 self.store
2366 .append_event(EvolutionEvent::ValidationFailed {
2367 mutation_id: mutation.intent.id.clone(),
2368 report: report.to_snapshot(&self.validation_plan.profile),
2369 gene_id: None,
2370 })
2371 .map_err(store_err)?;
2372 let contract = mutation_needed_contract_for_validation_failure(
2373 &self.validation_plan.profile,
2374 &report,
2375 );
2376 self.store
2377 .append_event(EvolutionEvent::MutationRejected {
2378 mutation_id: mutation.intent.id.clone(),
2379 reason: contract.failure_reason,
2380 reason_code: Some(
2381 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2382 ),
2383 recovery_hint: Some(contract.recovery_hint),
2384 fail_closed: contract.fail_closed,
2385 })
2386 .map_err(store_err)?;
2387 return Err(EvoKernelError::ValidationFailed(report));
2388 }
2389
2390 self.store
2391 .append_event(EvolutionEvent::ValidationPassed {
2392 mutation_id: mutation.intent.id.clone(),
2393 report: report.to_snapshot(&self.validation_plan.profile),
2394 gene_id: None,
2395 })
2396 .map_err(store_err)?;
2397
2398 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
2399 patch_diff: mutation.artifact.payload.clone(),
2400 intent: mutation.intent.intent.clone(),
2401 expected_effect: mutation.intent.expected_effect.clone(),
2402 declared_signals: mutation.intent.signals.clone(),
2403 changed_files: receipt
2404 .changed_files
2405 .iter()
2406 .map(|path| path.to_string_lossy().to_string())
2407 .collect(),
2408 validation_success: report.success,
2409 validation_logs: report.logs.clone(),
2410 stage_outputs: report
2411 .stages
2412 .iter()
2413 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
2414 .filter(|value| !value.is_empty())
2415 .collect(),
2416 });
2417 self.store
2418 .append_event(EvolutionEvent::SignalsExtracted {
2419 mutation_id: mutation.intent.id.clone(),
2420 hash: extracted_signals.hash.clone(),
2421 signals: extracted_signals.values.clone(),
2422 })
2423 .map_err(store_err)?;
2424
2425 let projection = projection_snapshot(self.store.as_ref())?;
2426 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
2427 let recent_mutation_ages_secs = self
2428 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
2429 .map_err(store_err)?;
2430 let mut gene = derive_gene(
2431 &mutation,
2432 &receipt,
2433 &self.validation_plan.profile,
2434 &extracted_signals.values,
2435 );
2436 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
2437 StoreReplayExecutor::confidence_context(&projection, &gene.id);
2438 let success_count = projection
2439 .genes
2440 .iter()
2441 .find(|existing| existing.id == gene.id)
2442 .map(|existing| {
2443 projection
2444 .capsules
2445 .iter()
2446 .filter(|capsule| capsule.gene_id == existing.id)
2447 .count() as u64
2448 })
2449 .unwrap_or(0)
2450 + 1;
2451 let governor_decision = self.governor.evaluate(GovernorInput {
2452 candidate_source: CandidateSource::Local,
2453 success_count,
2454 blast_radius: blast_radius.clone(),
2455 replay_failures: 0,
2456 recent_mutation_ages_secs,
2457 current_confidence,
2458 historical_peak_confidence,
2459 confidence_last_updated_secs,
2460 });
2461
2462 gene.state = governor_decision.target_state.clone();
2463 self.store
2464 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2465 .map_err(store_err)?;
2466 self.store
2467 .append_event(EvolutionEvent::PromotionEvaluated {
2468 gene_id: gene.id.clone(),
2469 state: governor_decision.target_state.clone(),
2470 reason: governor_decision.reason.clone(),
2471 reason_code: governor_decision.reason_code.clone(),
2472 evidence: None,
2473 })
2474 .map_err(store_err)?;
2475 if matches!(governor_decision.target_state, AssetState::Promoted) {
2476 self.store
2477 .append_event(EvolutionEvent::GenePromoted {
2478 gene_id: gene.id.clone(),
2479 })
2480 .map_err(store_err)?;
2481 }
2482 if matches!(governor_decision.target_state, AssetState::Revoked) {
2483 self.store
2484 .append_event(EvolutionEvent::GeneRevoked {
2485 gene_id: gene.id.clone(),
2486 reason: governor_decision.reason.clone(),
2487 })
2488 .map_err(store_err)?;
2489 }
2490 if let Some(spec_id) = &mutation.intent.spec_id {
2491 self.store
2492 .append_event(EvolutionEvent::SpecLinked {
2493 mutation_id: mutation.intent.id.clone(),
2494 spec_id: spec_id.clone(),
2495 })
2496 .map_err(store_err)?;
2497 }
2498
2499 let mut capsule = build_capsule(
2500 run_id,
2501 &mutation,
2502 &receipt,
2503 &report,
2504 &self.validation_plan.profile,
2505 &gene,
2506 &blast_radius,
2507 )
2508 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2509 capsule.state = governor_decision.target_state.clone();
2510 self.store
2511 .append_event(EvolutionEvent::CapsuleCommitted {
2512 capsule: capsule.clone(),
2513 })
2514 .map_err(store_err)?;
2515 if matches!(governor_decision.target_state, AssetState::Quarantined) {
2516 self.store
2517 .append_event(EvolutionEvent::CapsuleQuarantined {
2518 capsule_id: capsule.id.clone(),
2519 })
2520 .map_err(store_err)?;
2521 }
2522
2523 Ok(CaptureOutcome {
2524 capsule,
2525 gene,
2526 governor_decision,
2527 })
2528 }
2529
2530 pub async fn capture_from_proposal(
2531 &self,
2532 run_id: &RunId,
2533 proposal: &AgentMutationProposal,
2534 diff_payload: String,
2535 base_revision: Option<String>,
2536 ) -> Result<CaptureOutcome, EvoKernelError> {
2537 let intent = MutationIntent {
2538 id: next_id("proposal"),
2539 intent: proposal.intent.clone(),
2540 target: MutationTarget::Paths {
2541 allow: proposal.files.clone(),
2542 },
2543 expected_effect: proposal.expected_effect.clone(),
2544 risk: RiskLevel::Low,
2545 signals: proposal.files.clone(),
2546 spec_id: None,
2547 };
2548 self.capture_mutation_with_governor(
2549 run_id,
2550 prepare_mutation(intent, diff_payload, base_revision),
2551 )
2552 .await
2553 }
2554
2555 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
2556 ExecutionFeedback {
2557 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
2558 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
2559 summary: outcome.governor_decision.reason.clone(),
2560 }
2561 }
2562
2563 pub fn replay_feedback_for_agent(
2564 signals: &[String],
2565 decision: &ReplayDecision,
2566 ) -> ReplayFeedback {
2567 let (task_class_id, task_label) = replay_task_descriptor(signals);
2568 let planner_directive = if decision.used_capsule {
2569 ReplayPlannerDirective::SkipPlanner
2570 } else {
2571 ReplayPlannerDirective::PlanFallback
2572 };
2573 let reasoning_steps_avoided = u64::from(decision.used_capsule);
2574 let reason_code_hint = decision
2575 .detect_evidence
2576 .mismatch_reasons
2577 .first()
2578 .and_then(|reason| infer_replay_fallback_reason_code(reason));
2579 let fallback_contract = normalize_replay_fallback_contract(
2580 &planner_directive,
2581 decision
2582 .fallback_to_planner
2583 .then_some(decision.reason.as_str()),
2584 reason_code_hint,
2585 None,
2586 None,
2587 None,
2588 );
2589 let summary = if decision.used_capsule {
2590 format!("reused prior capsule for task class '{task_label}'; skip planner")
2591 } else {
2592 format!(
2593 "planner fallback required for task class '{task_label}': {}",
2594 decision.reason
2595 )
2596 };
2597
2598 ReplayFeedback {
2599 used_capsule: decision.used_capsule,
2600 capsule_id: decision.capsule_id.clone(),
2601 planner_directive,
2602 reasoning_steps_avoided,
2603 fallback_reason: fallback_contract
2604 .as_ref()
2605 .map(|contract| contract.fallback_reason.clone()),
2606 reason_code: fallback_contract
2607 .as_ref()
2608 .map(|contract| contract.reason_code),
2609 repair_hint: fallback_contract
2610 .as_ref()
2611 .map(|contract| contract.repair_hint.clone()),
2612 next_action: fallback_contract
2613 .as_ref()
2614 .map(|contract| contract.next_action),
2615 confidence: fallback_contract
2616 .as_ref()
2617 .map(|contract| contract.confidence),
2618 task_class_id,
2619 task_label,
2620 summary,
2621 }
2622 }
2623
2624 fn mutation_needed_failure_outcome(
2625 &self,
2626 request: &SupervisedDevloopRequest,
2627 task_class: Option<BoundedTaskClass>,
2628 status: SupervisedDevloopStatus,
2629 contract: MutationNeededFailureContract,
2630 mutation_id_for_audit: Option<String>,
2631 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2632 if let Some(mutation_id) = mutation_id_for_audit {
2633 self.store
2634 .append_event(EvolutionEvent::MutationRejected {
2635 mutation_id,
2636 reason: contract.failure_reason.clone(),
2637 reason_code: Some(
2638 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2639 ),
2640 recovery_hint: Some(contract.recovery_hint.clone()),
2641 fail_closed: contract.fail_closed,
2642 })
2643 .map_err(store_err)?;
2644 }
2645 let status_label = match status {
2646 SupervisedDevloopStatus::AwaitingApproval => "awaiting_approval",
2647 SupervisedDevloopStatus::RejectedByPolicy => "rejected_by_policy",
2648 SupervisedDevloopStatus::FailedClosed => "failed_closed",
2649 SupervisedDevloopStatus::Executed => "executed",
2650 };
2651 let reason_code_key = mutation_needed_reason_code_key(contract.reason_code);
2652 Ok(SupervisedDevloopOutcome {
2653 task_id: request.task.id.clone(),
2654 task_class,
2655 status,
2656 execution_feedback: None,
2657 failure_contract: Some(contract.clone()),
2658 summary: format!(
2659 "supervised devloop {status_label} task '{}' [{reason_code_key}]: {}",
2660 request.task.id, contract.failure_reason
2661 ),
2662 })
2663 }
2664
2665 pub async fn run_supervised_devloop(
2666 &self,
2667 run_id: &RunId,
2668 request: &SupervisedDevloopRequest,
2669 diff_payload: String,
2670 base_revision: Option<String>,
2671 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2672 let audit_mutation_id = mutation_needed_audit_mutation_id(request);
2673 let task_class = classify_supervised_devloop_request(request);
2674 let Some(task_class) = task_class else {
2675 let contract = normalize_mutation_needed_failure_contract(
2676 Some(&format!(
2677 "supervised devloop rejected task '{}' because it is an unsupported task outside the bounded scope",
2678 request.task.id
2679 )),
2680 Some(MutationNeededFailureReasonCode::PolicyDenied),
2681 );
2682 return self.mutation_needed_failure_outcome(
2683 request,
2684 None,
2685 SupervisedDevloopStatus::RejectedByPolicy,
2686 contract,
2687 Some(audit_mutation_id),
2688 );
2689 };
2690
2691 if !request.approval.approved {
2692 return Ok(SupervisedDevloopOutcome {
2693 task_id: request.task.id.clone(),
2694 task_class: Some(task_class),
2695 status: SupervisedDevloopStatus::AwaitingApproval,
2696 execution_feedback: None,
2697 failure_contract: None,
2698 summary: format!(
2699 "supervised devloop paused task '{}' until explicit human approval is granted",
2700 request.task.id
2701 ),
2702 });
2703 }
2704
2705 if diff_payload.len() > MUTATION_NEEDED_MAX_DIFF_BYTES {
2706 let contract = normalize_mutation_needed_failure_contract(
2707 Some(&format!(
2708 "mutation-needed diff payload exceeds bounded byte budget (size={}, max={})",
2709 diff_payload.len(),
2710 MUTATION_NEEDED_MAX_DIFF_BYTES
2711 )),
2712 Some(MutationNeededFailureReasonCode::PolicyDenied),
2713 );
2714 return self.mutation_needed_failure_outcome(
2715 request,
2716 Some(task_class),
2717 SupervisedDevloopStatus::RejectedByPolicy,
2718 contract,
2719 Some(audit_mutation_id),
2720 );
2721 }
2722
2723 let blast_radius = compute_blast_radius(&diff_payload);
2724 if blast_radius.lines_changed > MUTATION_NEEDED_MAX_CHANGED_LINES {
2725 let contract = normalize_mutation_needed_failure_contract(
2726 Some(&format!(
2727 "mutation-needed patch exceeds bounded changed-line budget (lines_changed={}, max={})",
2728 blast_radius.lines_changed,
2729 MUTATION_NEEDED_MAX_CHANGED_LINES
2730 )),
2731 Some(MutationNeededFailureReasonCode::UnsafePatch),
2732 );
2733 return self.mutation_needed_failure_outcome(
2734 request,
2735 Some(task_class),
2736 SupervisedDevloopStatus::FailedClosed,
2737 contract,
2738 Some(audit_mutation_id),
2739 );
2740 }
2741
2742 if self.sandbox_policy.max_duration_ms > MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS {
2743 let contract = normalize_mutation_needed_failure_contract(
2744 Some(&format!(
2745 "mutation-needed sandbox duration budget exceeds bounded policy (configured={}ms, max={}ms)",
2746 self.sandbox_policy.max_duration_ms,
2747 MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS
2748 )),
2749 Some(MutationNeededFailureReasonCode::PolicyDenied),
2750 );
2751 return self.mutation_needed_failure_outcome(
2752 request,
2753 Some(task_class),
2754 SupervisedDevloopStatus::RejectedByPolicy,
2755 contract,
2756 Some(audit_mutation_id),
2757 );
2758 }
2759
2760 let validation_budget_ms = validation_plan_timeout_budget_ms(&self.validation_plan);
2761 if validation_budget_ms > MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS {
2762 let contract = normalize_mutation_needed_failure_contract(
2763 Some(&format!(
2764 "mutation-needed validation timeout budget exceeds bounded policy (configured={}ms, max={}ms)",
2765 validation_budget_ms,
2766 MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS
2767 )),
2768 Some(MutationNeededFailureReasonCode::PolicyDenied),
2769 );
2770 return self.mutation_needed_failure_outcome(
2771 request,
2772 Some(task_class),
2773 SupervisedDevloopStatus::RejectedByPolicy,
2774 contract,
2775 Some(audit_mutation_id),
2776 );
2777 }
2778
2779 let capture = match self
2780 .capture_from_proposal(run_id, &request.proposal, diff_payload, base_revision)
2781 .await
2782 {
2783 Ok(capture) => capture,
2784 Err(EvoKernelError::Sandbox(message)) => {
2785 let contract = mutation_needed_contract_for_error_message(&message);
2786 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2787 return self.mutation_needed_failure_outcome(
2788 request,
2789 Some(task_class),
2790 status,
2791 contract,
2792 None,
2793 );
2794 }
2795 Err(EvoKernelError::ValidationFailed(report)) => {
2796 let contract = mutation_needed_contract_for_validation_failure(
2797 &self.validation_plan.profile,
2798 &report,
2799 );
2800 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2801 return self.mutation_needed_failure_outcome(
2802 request,
2803 Some(task_class),
2804 status,
2805 contract,
2806 None,
2807 );
2808 }
2809 Err(EvoKernelError::Validation(message)) => {
2810 let contract = mutation_needed_contract_for_error_message(&message);
2811 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2812 return self.mutation_needed_failure_outcome(
2813 request,
2814 Some(task_class),
2815 status,
2816 contract,
2817 None,
2818 );
2819 }
2820 Err(err) => return Err(err),
2821 };
2822 let approver = request
2823 .approval
2824 .approver
2825 .as_deref()
2826 .unwrap_or("unknown approver");
2827
2828 Ok(SupervisedDevloopOutcome {
2829 task_id: request.task.id.clone(),
2830 task_class: Some(task_class),
2831 status: SupervisedDevloopStatus::Executed,
2832 execution_feedback: Some(Self::feedback_for_agent(&capture)),
2833 failure_contract: None,
2834 summary: format!(
2835 "supervised devloop executed task '{}' with explicit approval from {approver}",
2836 request.task.id
2837 ),
2838 })
2839 }
2840 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
2841 MultiAgentCoordinator::new().coordinate(plan)
2842 }
2843
2844 pub fn export_promoted_assets(
2845 &self,
2846 sender_id: impl Into<String>,
2847 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2848 let sender_id = sender_id.into();
2849 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
2850 if !envelope.assets.is_empty() {
2851 let mut ledger = self
2852 .economics
2853 .lock()
2854 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
2855 if ledger
2856 .reserve_publish_stake(&sender_id, &self.stake_policy)
2857 .is_none()
2858 {
2859 return Err(EvoKernelError::Validation(
2860 "insufficient EVU for remote publish".into(),
2861 ));
2862 }
2863 }
2864 Ok(envelope)
2865 }
2866
2867 pub fn import_remote_envelope(
2868 &self,
2869 envelope: &EvolutionEnvelope,
2870 ) -> Result<ImportOutcome, EvoKernelError> {
2871 import_remote_envelope_into_store(
2872 self.store.as_ref(),
2873 envelope,
2874 Some(self.remote_publishers.as_ref()),
2875 None,
2876 )
2877 }
2878
2879 pub fn fetch_assets(
2880 &self,
2881 responder_id: impl Into<String>,
2882 query: &FetchQuery,
2883 ) -> Result<FetchResponse, EvoKernelError> {
2884 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2885 }
2886
2887 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2888 revoke_assets_in_store(self.store.as_ref(), notice)
2889 }
2890
2891 pub async fn replay_or_fallback(
2892 &self,
2893 input: SelectorInput,
2894 ) -> Result<ReplayDecision, EvoKernelError> {
2895 let replay_run_id = next_id("replay");
2896 self.replay_or_fallback_for_run(&replay_run_id, input).await
2897 }
2898
2899 pub async fn replay_or_fallback_for_run(
2900 &self,
2901 run_id: &RunId,
2902 input: SelectorInput,
2903 ) -> Result<ReplayDecision, EvoKernelError> {
2904 let executor = StoreReplayExecutor {
2905 sandbox: self.sandbox.clone(),
2906 validator: self.validator.clone(),
2907 store: self.store.clone(),
2908 selector: self.selector.clone(),
2909 governor: self.governor.clone(),
2910 economics: Some(self.economics.clone()),
2911 remote_publishers: Some(self.remote_publishers.clone()),
2912 stake_policy: self.stake_policy.clone(),
2913 };
2914 executor
2915 .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
2916 .await
2917 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2918 }
2919
2920 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
2921 self.economics.lock().ok()?.governor_signal(node_id)
2922 }
2923
2924 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
2925 self.economics
2926 .lock()
2927 .ok()
2928 .map(|locked| locked.selector_reputation_bias())
2929 .unwrap_or_default()
2930 }
2931
2932 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2933 evolution_metrics_snapshot(self.store.as_ref())
2934 }
2935
2936 pub fn replay_roi_release_gate_summary(
2937 &self,
2938 window_seconds: u64,
2939 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2940 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2941 }
2942
2943 pub fn render_replay_roi_release_gate_summary_json(
2944 &self,
2945 window_seconds: u64,
2946 ) -> Result<String, EvoKernelError> {
2947 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2948 serde_json::to_string_pretty(&summary)
2949 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2950 }
2951
2952 pub fn replay_roi_release_gate_contract(
2953 &self,
2954 window_seconds: u64,
2955 thresholds: ReplayRoiReleaseGateThresholds,
2956 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2957 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2958 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2959 }
2960
2961 pub fn render_replay_roi_release_gate_contract_json(
2962 &self,
2963 window_seconds: u64,
2964 thresholds: ReplayRoiReleaseGateThresholds,
2965 ) -> Result<String, EvoKernelError> {
2966 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2967 serde_json::to_string_pretty(&contract)
2968 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2969 }
2970
2971 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2972 self.metrics_snapshot().map(|snapshot| {
2973 let health = evolution_health_snapshot(&snapshot);
2974 render_evolution_metrics_prometheus(&snapshot, &health)
2975 })
2976 }
2977
2978 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2979 self.metrics_snapshot()
2980 .map(|snapshot| evolution_health_snapshot(&snapshot))
2981 }
2982}
2983
2984pub fn prepare_mutation(
2985 intent: MutationIntent,
2986 diff_payload: String,
2987 base_revision: Option<String>,
2988) -> PreparedMutation {
2989 PreparedMutation {
2990 intent,
2991 artifact: MutationArtifact {
2992 encoding: ArtifactEncoding::UnifiedDiff,
2993 content_hash: compute_artifact_hash(&diff_payload),
2994 payload: diff_payload,
2995 base_revision,
2996 },
2997 }
2998}
2999
3000pub fn prepare_mutation_from_spec(
3001 plan: CompiledMutationPlan,
3002 diff_payload: String,
3003 base_revision: Option<String>,
3004) -> PreparedMutation {
3005 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
3006}
3007
3008pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
3009 Arc::new(oris_evolution::JsonlEvolutionStore::new(
3010 default_store_root(),
3011 ))
3012}
3013
3014fn built_in_seed_templates() -> Vec<SeedTemplate> {
3015 vec![
3016 SeedTemplate {
3017 id: "bootstrap-readme".into(),
3018 intent: "Seed a baseline README recovery pattern".into(),
3019 signals: vec!["bootstrap readme".into(), "missing readme".into()],
3020 diff_payload: "\
3021diff --git a/README.md b/README.md
3022new file mode 100644
3023index 0000000..1111111
3024--- /dev/null
3025+++ b/README.md
3026@@ -0,0 +1,3 @@
3027+# Oris
3028+Bootstrap documentation seed
3029+"
3030 .into(),
3031 validation_profile: "bootstrap-seed".into(),
3032 },
3033 SeedTemplate {
3034 id: "bootstrap-test-fix".into(),
3035 intent: "Seed a deterministic test stabilization pattern".into(),
3036 signals: vec!["bootstrap test fix".into(), "failing tests".into()],
3037 diff_payload: "\
3038diff --git a/src/lib.rs b/src/lib.rs
3039index 1111111..2222222 100644
3040--- a/src/lib.rs
3041+++ b/src/lib.rs
3042@@ -1 +1,2 @@
3043 pub fn demo() -> usize { 1 }
3044+pub fn normalize_test_output() -> bool { true }
3045"
3046 .into(),
3047 validation_profile: "bootstrap-seed".into(),
3048 },
3049 SeedTemplate {
3050 id: "bootstrap-refactor".into(),
3051 intent: "Seed a low-risk refactor capsule".into(),
3052 signals: vec!["bootstrap refactor".into(), "small refactor".into()],
3053 diff_payload: "\
3054diff --git a/src/lib.rs b/src/lib.rs
3055index 2222222..3333333 100644
3056--- a/src/lib.rs
3057+++ b/src/lib.rs
3058@@ -1 +1,3 @@
3059 pub fn demo() -> usize { 1 }
3060+
3061+fn extract_strategy_key(input: &str) -> &str { input }
3062"
3063 .into(),
3064 validation_profile: "bootstrap-seed".into(),
3065 },
3066 SeedTemplate {
3067 id: "bootstrap-logging".into(),
3068 intent: "Seed a baseline structured logging mutation".into(),
3069 signals: vec!["bootstrap logging".into(), "structured logs".into()],
3070 diff_payload: "\
3071diff --git a/src/lib.rs b/src/lib.rs
3072index 3333333..4444444 100644
3073--- a/src/lib.rs
3074+++ b/src/lib.rs
3075@@ -1 +1,3 @@
3076 pub fn demo() -> usize { 1 }
3077+
3078+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
3079"
3080 .into(),
3081 validation_profile: "bootstrap-seed".into(),
3082 },
3083 ]
3084}
3085
3086fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
3087 let changed_files = seed_changed_files(&template.diff_payload);
3088 let target = if changed_files.is_empty() {
3089 MutationTarget::WorkspaceRoot
3090 } else {
3091 MutationTarget::Paths {
3092 allow: changed_files,
3093 }
3094 };
3095 prepare_mutation(
3096 MutationIntent {
3097 id: stable_hash_json(&("bootstrap-mutation", &template.id))
3098 .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
3099 intent: template.intent.clone(),
3100 target,
3101 expected_effect: format!("seed {}", template.id),
3102 risk: RiskLevel::Low,
3103 signals: template.signals.clone(),
3104 spec_id: None,
3105 },
3106 template.diff_payload.clone(),
3107 None,
3108 )
3109}
3110
3111fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
3112 let mut signals = BTreeSet::new();
3113 for declared in &template.signals {
3114 if let Some(phrase) = normalize_signal_phrase(declared) {
3115 signals.insert(phrase);
3116 }
3117 extend_signal_tokens(&mut signals, declared);
3118 }
3119 extend_signal_tokens(&mut signals, &template.intent);
3120 extend_signal_tokens(&mut signals, &template.diff_payload);
3121 for changed_file in seed_changed_files(&template.diff_payload) {
3122 extend_signal_tokens(&mut signals, &changed_file);
3123 }
3124 let values = signals.into_iter().take(32).collect::<Vec<_>>();
3125 let hash =
3126 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
3127 SignalExtractionOutput { values, hash }
3128}
3129
3130fn seed_changed_files(diff_payload: &str) -> Vec<String> {
3131 let mut changed_files = BTreeSet::new();
3132 for line in diff_payload.lines() {
3133 if let Some(path) = line.strip_prefix("+++ b/") {
3134 let normalized = path.trim();
3135 if !normalized.is_empty() {
3136 changed_files.insert(normalized.to_string());
3137 }
3138 }
3139 }
3140 changed_files.into_iter().collect()
3141}
3142
3143fn build_bootstrap_gene(
3144 template: &SeedTemplate,
3145 extracted: &SignalExtractionOutput,
3146) -> Result<Gene, EvolutionError> {
3147 let strategy = vec![template.id.clone(), "bootstrap".into()];
3148 let id = stable_hash_json(&(
3149 "bootstrap-gene",
3150 &template.id,
3151 &extracted.values,
3152 &template.validation_profile,
3153 ))?;
3154 Ok(Gene {
3155 id,
3156 signals: extracted.values.clone(),
3157 strategy,
3158 validation: vec![template.validation_profile.clone()],
3159 state: AssetState::Quarantined,
3160 })
3161}
3162
3163fn build_bootstrap_capsule(
3164 run_id: &RunId,
3165 template: &SeedTemplate,
3166 mutation: &PreparedMutation,
3167 gene: &Gene,
3168) -> Result<Capsule, EvolutionError> {
3169 let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
3170 let env = current_env_fingerprint(&cwd);
3171 let diff_hash = mutation.artifact.content_hash.clone();
3172 let changed_files = seed_changed_files(&template.diff_payload);
3173 let validator_hash = stable_hash_json(&(
3174 "bootstrap-validator",
3175 &template.id,
3176 &template.validation_profile,
3177 &diff_hash,
3178 ))?;
3179 let id = stable_hash_json(&(
3180 "bootstrap-capsule",
3181 &template.id,
3182 run_id,
3183 &gene.id,
3184 &diff_hash,
3185 &env,
3186 ))?;
3187 Ok(Capsule {
3188 id,
3189 gene_id: gene.id.clone(),
3190 mutation_id: mutation.intent.id.clone(),
3191 run_id: run_id.clone(),
3192 diff_hash,
3193 confidence: 0.0,
3194 env,
3195 outcome: Outcome {
3196 success: false,
3197 validation_profile: template.validation_profile.clone(),
3198 validation_duration_ms: 0,
3199 changed_files,
3200 validator_hash,
3201 lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
3202 replay_verified: false,
3203 },
3204 state: AssetState::Quarantined,
3205 })
3206}
3207
3208fn derive_gene(
3209 mutation: &PreparedMutation,
3210 receipt: &SandboxReceipt,
3211 validation_profile: &str,
3212 extracted_signals: &[String],
3213) -> Gene {
3214 let mut strategy = BTreeSet::new();
3215 for file in &receipt.changed_files {
3216 if let Some(component) = file.components().next() {
3217 strategy.insert(component.as_os_str().to_string_lossy().to_string());
3218 }
3219 }
3220 for token in mutation
3221 .artifact
3222 .payload
3223 .split(|ch: char| !ch.is_ascii_alphanumeric())
3224 {
3225 if token.len() == 5
3226 && token.starts_with('E')
3227 && token[1..].chars().all(|ch| ch.is_ascii_digit())
3228 {
3229 strategy.insert(token.to_string());
3230 }
3231 }
3232 for token in mutation.intent.intent.split_whitespace().take(8) {
3233 strategy.insert(token.to_ascii_lowercase());
3234 }
3235 let strategy = strategy.into_iter().collect::<Vec<_>>();
3236 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
3237 .unwrap_or_else(|_| next_id("gene"));
3238 Gene {
3239 id,
3240 signals: extracted_signals.to_vec(),
3241 strategy,
3242 validation: vec![validation_profile.to_string()],
3243 state: AssetState::Promoted,
3244 }
3245}
3246
3247fn build_capsule(
3248 run_id: &RunId,
3249 mutation: &PreparedMutation,
3250 receipt: &SandboxReceipt,
3251 report: &ValidationReport,
3252 validation_profile: &str,
3253 gene: &Gene,
3254 blast_radius: &BlastRadius,
3255) -> Result<Capsule, EvolutionError> {
3256 let env = current_env_fingerprint(&receipt.workdir);
3257 let validator_hash = stable_hash_json(report)?;
3258 let diff_hash = mutation.artifact.content_hash.clone();
3259 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
3260 Ok(Capsule {
3261 id,
3262 gene_id: gene.id.clone(),
3263 mutation_id: mutation.intent.id.clone(),
3264 run_id: run_id.clone(),
3265 diff_hash,
3266 confidence: 0.7,
3267 env,
3268 outcome: oris_evolution::Outcome {
3269 success: true,
3270 validation_profile: validation_profile.to_string(),
3271 validation_duration_ms: report.duration_ms,
3272 changed_files: receipt
3273 .changed_files
3274 .iter()
3275 .map(|path| path.to_string_lossy().to_string())
3276 .collect(),
3277 validator_hash,
3278 lines_changed: blast_radius.lines_changed,
3279 replay_verified: false,
3280 },
3281 state: AssetState::Promoted,
3282 })
3283}
3284
3285fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
3286 let rustc_version = Command::new("rustc")
3287 .arg("--version")
3288 .output()
3289 .ok()
3290 .filter(|output| output.status.success())
3291 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
3292 .unwrap_or_else(|| "rustc unknown".into());
3293 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
3294 .ok()
3295 .map(|bytes| {
3296 let value = String::from_utf8_lossy(&bytes);
3297 compute_artifact_hash(&value)
3298 })
3299 .unwrap_or_else(|| "missing-cargo-lock".into());
3300 let target_triple = format!(
3301 "{}-unknown-{}",
3302 std::env::consts::ARCH,
3303 std::env::consts::OS
3304 );
3305 EnvFingerprint {
3306 rustc_version,
3307 cargo_lock_hash,
3308 target_triple,
3309 os: std::env::consts::OS.to_string(),
3310 }
3311}
3312
3313fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
3314 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
3315 let trimmed = raw.trim();
3316 if trimmed.is_empty() {
3317 continue;
3318 }
3319 let normalized = if is_rust_error_code(trimmed) {
3320 let mut chars = trimmed.chars();
3321 let prefix = chars
3322 .next()
3323 .map(|ch| ch.to_ascii_uppercase())
3324 .unwrap_or('E');
3325 format!("{prefix}{}", chars.as_str())
3326 } else {
3327 trimmed.to_ascii_lowercase()
3328 };
3329 if normalized.len() < 3 {
3330 continue;
3331 }
3332 out.insert(normalized);
3333 }
3334}
3335
3336fn normalize_signal_phrase(input: &str) -> Option<String> {
3337 let normalized = input
3338 .split(|ch: char| !ch.is_ascii_alphanumeric())
3339 .filter_map(|raw| {
3340 let trimmed = raw.trim();
3341 if trimmed.is_empty() {
3342 return None;
3343 }
3344 let normalized = if is_rust_error_code(trimmed) {
3345 let mut chars = trimmed.chars();
3346 let prefix = chars
3347 .next()
3348 .map(|ch| ch.to_ascii_uppercase())
3349 .unwrap_or('E');
3350 format!("{prefix}{}", chars.as_str())
3351 } else {
3352 trimmed.to_ascii_lowercase()
3353 };
3354 if normalized.len() < 3 {
3355 None
3356 } else {
3357 Some(normalized)
3358 }
3359 })
3360 .collect::<Vec<_>>()
3361 .join(" ");
3362 if normalized.is_empty() {
3363 None
3364 } else {
3365 Some(normalized)
3366 }
3367}
3368
3369fn replay_task_descriptor(signals: &[String]) -> (String, String) {
3370 let normalized = signals
3371 .iter()
3372 .filter_map(|signal| normalize_signal_phrase(signal))
3373 .collect::<BTreeSet<_>>()
3374 .into_iter()
3375 .collect::<Vec<_>>();
3376 if normalized.is_empty() {
3377 return ("unknown".into(), "unknown".into());
3378 }
3379 let task_label = normalized
3380 .iter()
3381 .find(|value| {
3382 value.as_str() != "validation passed" && value.as_str() != "validation failed"
3383 })
3384 .cloned()
3385 .unwrap_or_else(|| normalized[0].clone());
3386 let task_class_id = stable_hash_json(&normalized)
3387 .unwrap_or_else(|_| compute_artifact_hash(&normalized.join("\n")));
3388 (task_class_id, task_label)
3389}
3390
3391fn normalized_signal_values(signals: &[String]) -> Vec<String> {
3392 signals
3393 .iter()
3394 .filter_map(|signal| normalize_signal_phrase(signal))
3395 .collect::<BTreeSet<_>>()
3396 .into_iter()
3397 .collect::<Vec<_>>()
3398}
3399
3400fn matched_replay_signals(input_signals: &[String], candidate_signals: &[String]) -> Vec<String> {
3401 let normalized_input = normalized_signal_values(input_signals);
3402 if normalized_input.is_empty() {
3403 return Vec::new();
3404 }
3405 let normalized_candidate = normalized_signal_values(candidate_signals);
3406 if normalized_candidate.is_empty() {
3407 return normalized_input;
3408 }
3409 let matched = normalized_input
3410 .iter()
3411 .filter(|signal| {
3412 normalized_candidate
3413 .iter()
3414 .any(|candidate| candidate.contains(signal.as_str()) || signal.contains(candidate))
3415 })
3416 .cloned()
3417 .collect::<Vec<_>>();
3418 if matched.is_empty() {
3419 normalized_input
3420 } else {
3421 matched
3422 }
3423}
3424
3425fn replay_detect_evidence_from_input(input: &SelectorInput) -> ReplayDetectEvidence {
3426 let (task_class_id, task_label) = replay_task_descriptor(&input.signals);
3427 ReplayDetectEvidence {
3428 task_class_id,
3429 task_label,
3430 matched_signals: normalized_signal_values(&input.signals),
3431 mismatch_reasons: Vec::new(),
3432 }
3433}
3434
3435fn replay_descriptor_from_candidate_or_input(
3436 candidate: Option<&GeneCandidate>,
3437 input: &SelectorInput,
3438) -> (String, String) {
3439 if let Some(candidate) = candidate {
3440 let task_class_id = strategy_metadata_value(&candidate.gene.strategy, "task_class");
3441 let task_label = strategy_metadata_value(&candidate.gene.strategy, "task_label");
3442 if let Some(task_class_id) = task_class_id {
3443 return (
3444 task_class_id.clone(),
3445 task_label.unwrap_or_else(|| task_class_id.clone()),
3446 );
3447 }
3448 return replay_task_descriptor(&candidate.gene.signals);
3449 }
3450 replay_task_descriptor(&input.signals)
3451}
3452
3453fn estimated_reasoning_tokens(signals: &[String]) -> u64 {
3454 let normalized = signals
3455 .iter()
3456 .filter_map(|signal| normalize_signal_phrase(signal))
3457 .collect::<BTreeSet<_>>();
3458 let signal_count = normalized.len() as u64;
3459 REPLAY_REASONING_TOKEN_FLOOR + REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT * signal_count.max(1)
3460}
3461
3462fn compute_replay_roi(reasoning_avoided_tokens: u64, replay_fallback_cost: u64) -> f64 {
3463 let total = reasoning_avoided_tokens + replay_fallback_cost;
3464 if total == 0 {
3465 return 0.0;
3466 }
3467 (reasoning_avoided_tokens as f64 - replay_fallback_cost as f64) / total as f64
3468}
3469
3470fn is_rust_error_code(value: &str) -> bool {
3471 value.len() == 5
3472 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
3473 && value[1..].chars().all(|ch| ch.is_ascii_digit())
3474}
3475
3476fn validation_plan_timeout_budget_ms(plan: &ValidationPlan) -> u64 {
3477 plan.stages.iter().fold(0_u64, |acc, stage| match stage {
3478 ValidationStage::Command { timeout_ms, .. } => acc.saturating_add(*timeout_ms),
3479 })
3480}
3481
3482fn mutation_needed_reason_code_key(reason_code: MutationNeededFailureReasonCode) -> &'static str {
3483 match reason_code {
3484 MutationNeededFailureReasonCode::PolicyDenied => "policy_denied",
3485 MutationNeededFailureReasonCode::ValidationFailed => "validation_failed",
3486 MutationNeededFailureReasonCode::UnsafePatch => "unsafe_patch",
3487 MutationNeededFailureReasonCode::Timeout => "timeout",
3488 MutationNeededFailureReasonCode::MutationPayloadMissing => "mutation_payload_missing",
3489 MutationNeededFailureReasonCode::UnknownFailClosed => "unknown_fail_closed",
3490 }
3491}
3492
3493fn mutation_needed_status_from_reason_code(
3494 reason_code: MutationNeededFailureReasonCode,
3495) -> SupervisedDevloopStatus {
3496 if matches!(reason_code, MutationNeededFailureReasonCode::PolicyDenied) {
3497 SupervisedDevloopStatus::RejectedByPolicy
3498 } else {
3499 SupervisedDevloopStatus::FailedClosed
3500 }
3501}
3502
3503fn mutation_needed_contract_for_validation_failure(
3504 profile: &str,
3505 report: &ValidationReport,
3506) -> MutationNeededFailureContract {
3507 let lower_logs = report.logs.to_ascii_lowercase();
3508 if lower_logs.contains("timed out") {
3509 normalize_mutation_needed_failure_contract(
3510 Some(&format!(
3511 "mutation-needed validation command timed out under profile '{profile}'"
3512 )),
3513 Some(MutationNeededFailureReasonCode::Timeout),
3514 )
3515 } else {
3516 normalize_mutation_needed_failure_contract(
3517 Some(&format!(
3518 "mutation-needed validation failed under profile '{profile}'"
3519 )),
3520 Some(MutationNeededFailureReasonCode::ValidationFailed),
3521 )
3522 }
3523}
3524
3525fn mutation_needed_contract_for_error_message(message: &str) -> MutationNeededFailureContract {
3526 let reason_code = infer_mutation_needed_failure_reason_code(message);
3527 normalize_mutation_needed_failure_contract(Some(message), reason_code)
3528}
3529
3530fn mutation_needed_audit_mutation_id(request: &SupervisedDevloopRequest) -> String {
3531 stable_hash_json(&(
3532 "mutation-needed-audit",
3533 &request.task.id,
3534 &request.proposal.intent,
3535 &request.proposal.files,
3536 ))
3537 .map(|hash| format!("mutation-needed-{hash}"))
3538 .unwrap_or_else(|_| format!("mutation-needed-{}", request.task.id))
3539}
3540
3541fn classify_supervised_devloop_request(
3542 request: &SupervisedDevloopRequest,
3543) -> Option<BoundedTaskClass> {
3544 let file_count = normalized_supervised_devloop_docs_files(&request.proposal.files)?.len();
3545 match file_count {
3546 1 => Some(BoundedTaskClass::DocsSingleFile),
3547 2..=SUPERVISED_DEVLOOP_MAX_DOC_FILES => Some(BoundedTaskClass::DocsMultiFile),
3548 _ => None,
3549 }
3550}
3551
3552fn normalized_supervised_devloop_docs_files(files: &[String]) -> Option<Vec<String>> {
3553 if files.is_empty() || files.len() > SUPERVISED_DEVLOOP_MAX_DOC_FILES {
3554 return None;
3555 }
3556
3557 let mut normalized_files = Vec::with_capacity(files.len());
3558 let mut seen = BTreeSet::new();
3559
3560 for path in files {
3561 let normalized = path.trim().replace('\\', "/");
3562 if normalized.is_empty()
3563 || !normalized.starts_with("docs/")
3564 || !normalized.ends_with(".md")
3565 || !seen.insert(normalized.clone())
3566 {
3567 return None;
3568 }
3569 normalized_files.push(normalized);
3570 }
3571
3572 Some(normalized_files)
3573}
3574
3575fn find_declared_mutation(
3576 store: &dyn EvolutionStore,
3577 mutation_id: &MutationId,
3578) -> Result<Option<PreparedMutation>, EvolutionError> {
3579 for stored in store.scan(1)? {
3580 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
3581 if &mutation.intent.id == mutation_id {
3582 return Ok(Some(mutation));
3583 }
3584 }
3585 }
3586 Ok(None)
3587}
3588
3589fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
3590 let Ok(projection) = projection_snapshot(store) else {
3591 return Vec::new();
3592 };
3593 let capsules = projection.capsules.clone();
3594 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3595 let requested_spec_id = input
3596 .spec_id
3597 .as_deref()
3598 .map(str::trim)
3599 .filter(|value| !value.is_empty());
3600 let signal_set = input
3601 .signals
3602 .iter()
3603 .map(|signal| signal.to_ascii_lowercase())
3604 .collect::<BTreeSet<_>>();
3605 let mut candidates = projection
3606 .genes
3607 .into_iter()
3608 .filter_map(|gene| {
3609 if gene.state != AssetState::Promoted {
3610 return None;
3611 }
3612 if let Some(spec_id) = requested_spec_id {
3613 let matches_spec = spec_ids_by_gene
3614 .get(&gene.id)
3615 .map(|values| {
3616 values
3617 .iter()
3618 .any(|value| value.eq_ignore_ascii_case(spec_id))
3619 })
3620 .unwrap_or(false);
3621 if !matches_spec {
3622 return None;
3623 }
3624 }
3625 let gene_signals = gene
3626 .signals
3627 .iter()
3628 .map(|signal| signal.to_ascii_lowercase())
3629 .collect::<BTreeSet<_>>();
3630 if gene_signals == signal_set {
3631 let mut matched_capsules = capsules
3632 .iter()
3633 .filter(|capsule| {
3634 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
3635 })
3636 .cloned()
3637 .collect::<Vec<_>>();
3638 matched_capsules.sort_by(|left, right| {
3639 replay_environment_match_factor(&input.env, &right.env)
3640 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3641 .unwrap_or(std::cmp::Ordering::Equal)
3642 .then_with(|| {
3643 right
3644 .confidence
3645 .partial_cmp(&left.confidence)
3646 .unwrap_or(std::cmp::Ordering::Equal)
3647 })
3648 .then_with(|| left.id.cmp(&right.id))
3649 });
3650 if matched_capsules.is_empty() {
3651 None
3652 } else {
3653 let score = matched_capsules
3654 .first()
3655 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3656 .unwrap_or(0.0);
3657 Some(GeneCandidate {
3658 gene,
3659 score,
3660 capsules: matched_capsules,
3661 })
3662 }
3663 } else {
3664 None
3665 }
3666 })
3667 .collect::<Vec<_>>();
3668 candidates.sort_by(|left, right| {
3669 right
3670 .score
3671 .partial_cmp(&left.score)
3672 .unwrap_or(std::cmp::Ordering::Equal)
3673 .then_with(|| left.gene.id.cmp(&right.gene.id))
3674 });
3675 candidates
3676}
3677
3678fn quarantined_remote_exact_match_candidates(
3679 store: &dyn EvolutionStore,
3680 input: &SelectorInput,
3681) -> Vec<GeneCandidate> {
3682 let remote_asset_ids = store
3683 .scan(1)
3684 .ok()
3685 .map(|events| {
3686 events
3687 .into_iter()
3688 .filter_map(|stored| match stored.event {
3689 EvolutionEvent::RemoteAssetImported {
3690 source: CandidateSource::Remote,
3691 asset_ids,
3692 ..
3693 } => Some(asset_ids),
3694 _ => None,
3695 })
3696 .flatten()
3697 .collect::<BTreeSet<_>>()
3698 })
3699 .unwrap_or_default();
3700 if remote_asset_ids.is_empty() {
3701 return Vec::new();
3702 }
3703
3704 let Ok(projection) = projection_snapshot(store) else {
3705 return Vec::new();
3706 };
3707 let capsules = projection.capsules.clone();
3708 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3709 let requested_spec_id = input
3710 .spec_id
3711 .as_deref()
3712 .map(str::trim)
3713 .filter(|value| !value.is_empty());
3714 let normalized_signals = input
3715 .signals
3716 .iter()
3717 .filter_map(|signal| normalize_signal_phrase(signal))
3718 .collect::<BTreeSet<_>>()
3719 .into_iter()
3720 .collect::<Vec<_>>();
3721 if normalized_signals.is_empty() {
3722 return Vec::new();
3723 }
3724 let mut candidates = projection
3725 .genes
3726 .into_iter()
3727 .filter_map(|gene| {
3728 if !matches!(
3729 gene.state,
3730 AssetState::Promoted | AssetState::Quarantined | AssetState::ShadowValidated
3731 ) {
3732 return None;
3733 }
3734 if let Some(spec_id) = requested_spec_id {
3735 let matches_spec = spec_ids_by_gene
3736 .get(&gene.id)
3737 .map(|values| {
3738 values
3739 .iter()
3740 .any(|value| value.eq_ignore_ascii_case(spec_id))
3741 })
3742 .unwrap_or(false);
3743 if !matches_spec {
3744 return None;
3745 }
3746 }
3747 let normalized_gene_signals = gene
3748 .signals
3749 .iter()
3750 .filter_map(|candidate| normalize_signal_phrase(candidate))
3751 .collect::<Vec<_>>();
3752 let matched_query_count = normalized_signals
3753 .iter()
3754 .filter(|signal| {
3755 normalized_gene_signals.iter().any(|candidate| {
3756 candidate.contains(signal.as_str()) || signal.contains(candidate)
3757 })
3758 })
3759 .count();
3760 if matched_query_count == 0 {
3761 return None;
3762 }
3763
3764 let mut matched_capsules = capsules
3765 .iter()
3766 .filter(|capsule| {
3767 capsule.gene_id == gene.id
3768 && matches!(
3769 capsule.state,
3770 AssetState::Quarantined | AssetState::ShadowValidated
3771 )
3772 && remote_asset_ids.contains(&capsule.id)
3773 })
3774 .cloned()
3775 .collect::<Vec<_>>();
3776 matched_capsules.sort_by(|left, right| {
3777 replay_environment_match_factor(&input.env, &right.env)
3778 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3779 .unwrap_or(std::cmp::Ordering::Equal)
3780 .then_with(|| {
3781 right
3782 .confidence
3783 .partial_cmp(&left.confidence)
3784 .unwrap_or(std::cmp::Ordering::Equal)
3785 })
3786 .then_with(|| left.id.cmp(&right.id))
3787 });
3788 if matched_capsules.is_empty() {
3789 None
3790 } else {
3791 let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
3792 let env_score = matched_capsules
3793 .first()
3794 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3795 .unwrap_or(0.0);
3796 Some(GeneCandidate {
3797 gene,
3798 score: overlap.max(env_score),
3799 capsules: matched_capsules,
3800 })
3801 }
3802 })
3803 .collect::<Vec<_>>();
3804 candidates.sort_by(|left, right| {
3805 right
3806 .score
3807 .partial_cmp(&left.score)
3808 .unwrap_or(std::cmp::Ordering::Equal)
3809 .then_with(|| left.gene.id.cmp(&right.gene.id))
3810 });
3811 candidates
3812}
3813
3814fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
3815 let fields = [
3816 input
3817 .rustc_version
3818 .eq_ignore_ascii_case(&candidate.rustc_version),
3819 input
3820 .cargo_lock_hash
3821 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
3822 input
3823 .target_triple
3824 .eq_ignore_ascii_case(&candidate.target_triple),
3825 input.os.eq_ignore_ascii_case(&candidate.os),
3826 ];
3827 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
3828 0.5 + ((matched_fields / 4.0) * 0.5)
3829}
3830
3831fn effective_candidate_score(
3832 candidate: &GeneCandidate,
3833 publishers_by_asset: &BTreeMap<String, String>,
3834 reputation_bias: &BTreeMap<String, f32>,
3835) -> f32 {
3836 let bias = candidate
3837 .capsules
3838 .first()
3839 .and_then(|capsule| publishers_by_asset.get(&capsule.id))
3840 .and_then(|publisher| reputation_bias.get(publisher))
3841 .copied()
3842 .unwrap_or(0.0)
3843 .clamp(0.0, 1.0);
3844 candidate.score * (1.0 + (bias * 0.1))
3845}
3846
3847fn export_promoted_assets_from_store(
3848 store: &dyn EvolutionStore,
3849 sender_id: impl Into<String>,
3850) -> Result<EvolutionEnvelope, EvoKernelError> {
3851 let (events, projection) = scan_projection(store)?;
3852 let genes = projection
3853 .genes
3854 .into_iter()
3855 .filter(|gene| gene.state == AssetState::Promoted)
3856 .collect::<Vec<_>>();
3857 let capsules = projection
3858 .capsules
3859 .into_iter()
3860 .filter(|capsule| capsule.state == AssetState::Promoted)
3861 .collect::<Vec<_>>();
3862 let assets = replay_export_assets(&events, genes, capsules);
3863 Ok(EvolutionEnvelope::publish(sender_id, assets))
3864}
3865
3866fn scan_projection(
3867 store: &dyn EvolutionStore,
3868) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
3869 store.scan_projection().map_err(store_err)
3870}
3871
3872fn projection_snapshot(store: &dyn EvolutionStore) -> Result<EvolutionProjection, EvoKernelError> {
3873 scan_projection(store).map(|(_, projection)| projection)
3874}
3875
3876fn replay_export_assets(
3877 events: &[StoredEvolutionEvent],
3878 genes: Vec<Gene>,
3879 capsules: Vec<Capsule>,
3880) -> Vec<NetworkAsset> {
3881 let mutation_ids = capsules
3882 .iter()
3883 .map(|capsule| capsule.mutation_id.clone())
3884 .collect::<BTreeSet<_>>();
3885 let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
3886 for gene in genes {
3887 assets.push(NetworkAsset::Gene { gene });
3888 }
3889 for capsule in capsules {
3890 assets.push(NetworkAsset::Capsule { capsule });
3891 }
3892 assets
3893}
3894
3895fn replay_export_events_for_mutations(
3896 events: &[StoredEvolutionEvent],
3897 mutation_ids: &BTreeSet<String>,
3898) -> Vec<NetworkAsset> {
3899 if mutation_ids.is_empty() {
3900 return Vec::new();
3901 }
3902
3903 let mut assets = Vec::new();
3904 let mut seen_mutations = BTreeSet::new();
3905 let mut seen_spec_links = BTreeSet::new();
3906 for stored in events {
3907 match &stored.event {
3908 EvolutionEvent::MutationDeclared { mutation }
3909 if mutation_ids.contains(mutation.intent.id.as_str())
3910 && seen_mutations.insert(mutation.intent.id.clone()) =>
3911 {
3912 assets.push(NetworkAsset::EvolutionEvent {
3913 event: EvolutionEvent::MutationDeclared {
3914 mutation: mutation.clone(),
3915 },
3916 });
3917 }
3918 EvolutionEvent::SpecLinked {
3919 mutation_id,
3920 spec_id,
3921 } if mutation_ids.contains(mutation_id.as_str())
3922 && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
3923 {
3924 assets.push(NetworkAsset::EvolutionEvent {
3925 event: EvolutionEvent::SpecLinked {
3926 mutation_id: mutation_id.clone(),
3927 spec_id: spec_id.clone(),
3928 },
3929 });
3930 }
3931 _ => {}
3932 }
3933 }
3934
3935 assets
3936}
3937
3938const SYNC_CURSOR_PREFIX: &str = "seq:";
3939const SYNC_RESUME_TOKEN_PREFIX: &str = "gep-rt1|";
3940
3941#[derive(Clone, Debug)]
3942struct DeltaWindow {
3943 changed_gene_ids: BTreeSet<String>,
3944 changed_capsule_ids: BTreeSet<String>,
3945 changed_mutation_ids: BTreeSet<String>,
3946}
3947
3948fn normalize_sync_value(value: Option<&str>) -> Option<String> {
3949 value
3950 .map(str::trim)
3951 .filter(|value| !value.is_empty())
3952 .map(ToOwned::to_owned)
3953}
3954
3955fn parse_sync_cursor_seq(cursor: &str) -> Option<u64> {
3956 let trimmed = cursor.trim();
3957 if trimmed.is_empty() {
3958 return None;
3959 }
3960 let raw = trimmed.strip_prefix(SYNC_CURSOR_PREFIX).unwrap_or(trimmed);
3961 raw.parse::<u64>().ok()
3962}
3963
3964fn format_sync_cursor(seq: u64) -> String {
3965 format!("{SYNC_CURSOR_PREFIX}{seq}")
3966}
3967
3968fn encode_resume_token(sender_id: &str, cursor: &str) -> String {
3969 format!("{SYNC_RESUME_TOKEN_PREFIX}{sender_id}|{cursor}")
3970}
3971
3972fn decode_resume_token(sender_id: &str, token: &str) -> Result<String, EvoKernelError> {
3973 let token = token.trim();
3974 let Some(encoded) = token.strip_prefix(SYNC_RESUME_TOKEN_PREFIX) else {
3975 return Ok(token.to_string());
3976 };
3977 let (token_sender, cursor) = encoded.split_once('|').ok_or_else(|| {
3978 EvoKernelError::Validation(
3979 "invalid resume_token format; expected gep-rt1|<sender>|<seq>".into(),
3980 )
3981 })?;
3982 if token_sender != sender_id.trim() {
3983 return Err(EvoKernelError::Validation(
3984 "resume_token sender mismatch".into(),
3985 ));
3986 }
3987 Ok(cursor.to_string())
3988}
3989
3990fn resolve_requested_cursor(
3991 sender_id: &str,
3992 since_cursor: Option<&str>,
3993 resume_token: Option<&str>,
3994) -> Result<Option<String>, EvoKernelError> {
3995 let cursor = if let Some(token) = normalize_sync_value(resume_token) {
3996 Some(decode_resume_token(sender_id, &token)?)
3997 } else {
3998 normalize_sync_value(since_cursor)
3999 };
4000
4001 let Some(cursor) = cursor else {
4002 return Ok(None);
4003 };
4004 let seq = parse_sync_cursor_seq(&cursor).ok_or_else(|| {
4005 EvoKernelError::Validation("invalid since_cursor/resume_token cursor format".into())
4006 })?;
4007 Ok(Some(format_sync_cursor(seq)))
4008}
4009
4010fn latest_store_cursor(store: &dyn EvolutionStore) -> Result<Option<String>, EvoKernelError> {
4011 let events = store.scan(1).map_err(store_err)?;
4012 Ok(events.last().map(|stored| format_sync_cursor(stored.seq)))
4013}
4014
4015fn delta_window(events: &[StoredEvolutionEvent], since_seq: u64) -> DeltaWindow {
4016 let mut changed_gene_ids = BTreeSet::new();
4017 let mut changed_capsule_ids = BTreeSet::new();
4018 let mut changed_mutation_ids = BTreeSet::new();
4019
4020 for stored in events {
4021 if stored.seq <= since_seq {
4022 continue;
4023 }
4024 match &stored.event {
4025 EvolutionEvent::MutationDeclared { mutation } => {
4026 changed_mutation_ids.insert(mutation.intent.id.clone());
4027 }
4028 EvolutionEvent::SpecLinked { mutation_id, .. } => {
4029 changed_mutation_ids.insert(mutation_id.clone());
4030 }
4031 EvolutionEvent::GeneProjected { gene } => {
4032 changed_gene_ids.insert(gene.id.clone());
4033 }
4034 EvolutionEvent::GenePromoted { gene_id }
4035 | EvolutionEvent::GeneRevoked { gene_id, .. }
4036 | EvolutionEvent::PromotionEvaluated { gene_id, .. } => {
4037 changed_gene_ids.insert(gene_id.clone());
4038 }
4039 EvolutionEvent::CapsuleCommitted { capsule } => {
4040 changed_capsule_ids.insert(capsule.id.clone());
4041 changed_gene_ids.insert(capsule.gene_id.clone());
4042 changed_mutation_ids.insert(capsule.mutation_id.clone());
4043 }
4044 EvolutionEvent::CapsuleReleased { capsule_id, .. }
4045 | EvolutionEvent::CapsuleQuarantined { capsule_id } => {
4046 changed_capsule_ids.insert(capsule_id.clone());
4047 }
4048 EvolutionEvent::RemoteAssetImported { asset_ids, .. } => {
4049 for asset_id in asset_ids {
4050 changed_gene_ids.insert(asset_id.clone());
4051 changed_capsule_ids.insert(asset_id.clone());
4052 }
4053 }
4054 _ => {}
4055 }
4056 }
4057
4058 DeltaWindow {
4059 changed_gene_ids,
4060 changed_capsule_ids,
4061 changed_mutation_ids,
4062 }
4063}
4064
4065fn import_remote_envelope_into_store(
4066 store: &dyn EvolutionStore,
4067 envelope: &EvolutionEnvelope,
4068 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
4069 requested_cursor: Option<String>,
4070) -> Result<ImportOutcome, EvoKernelError> {
4071 if !envelope.verify_content_hash() {
4072 record_manifest_validation(store, envelope, false, "invalid evolution envelope hash")?;
4073 return Err(EvoKernelError::Validation(
4074 "invalid evolution envelope hash".into(),
4075 ));
4076 }
4077 if let Err(reason) = envelope.verify_manifest() {
4078 record_manifest_validation(
4079 store,
4080 envelope,
4081 false,
4082 format!("manifest validation failed: {reason}"),
4083 )?;
4084 return Err(EvoKernelError::Validation(format!(
4085 "invalid evolution envelope manifest: {reason}"
4086 )));
4087 }
4088 record_manifest_validation(store, envelope, true, "manifest validated")?;
4089
4090 let sender_id = normalized_sender_id(&envelope.sender_id);
4091 let (events, projection) = scan_projection(store)?;
4092 let mut known_gene_ids = projection
4093 .genes
4094 .into_iter()
4095 .map(|gene| gene.id)
4096 .collect::<BTreeSet<_>>();
4097 let mut known_capsule_ids = projection
4098 .capsules
4099 .into_iter()
4100 .map(|capsule| capsule.id)
4101 .collect::<BTreeSet<_>>();
4102 let mut known_mutation_ids = BTreeSet::new();
4103 let mut known_spec_links = BTreeSet::new();
4104 for stored in &events {
4105 match &stored.event {
4106 EvolutionEvent::MutationDeclared { mutation } => {
4107 known_mutation_ids.insert(mutation.intent.id.clone());
4108 }
4109 EvolutionEvent::SpecLinked {
4110 mutation_id,
4111 spec_id,
4112 } => {
4113 known_spec_links.insert((mutation_id.clone(), spec_id.clone()));
4114 }
4115 _ => {}
4116 }
4117 }
4118 let mut imported_asset_ids = Vec::new();
4119 let mut applied_count = 0usize;
4120 let mut skipped_count = 0usize;
4121 for asset in &envelope.assets {
4122 match asset {
4123 NetworkAsset::Gene { gene } => {
4124 if !known_gene_ids.insert(gene.id.clone()) {
4125 skipped_count += 1;
4126 continue;
4127 }
4128 imported_asset_ids.push(gene.id.clone());
4129 applied_count += 1;
4130 let mut quarantined_gene = gene.clone();
4131 quarantined_gene.state = AssetState::Quarantined;
4132 store
4133 .append_event(EvolutionEvent::RemoteAssetImported {
4134 source: CandidateSource::Remote,
4135 asset_ids: vec![gene.id.clone()],
4136 sender_id: sender_id.clone(),
4137 })
4138 .map_err(store_err)?;
4139 store
4140 .append_event(EvolutionEvent::GeneProjected {
4141 gene: quarantined_gene.clone(),
4142 })
4143 .map_err(store_err)?;
4144 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4145 store
4146 .append_event(EvolutionEvent::PromotionEvaluated {
4147 gene_id: quarantined_gene.id,
4148 state: AssetState::Quarantined,
4149 reason: "remote asset requires local validation before promotion".into(),
4150 reason_code: TransitionReasonCode::DowngradeRemoteRequiresLocalValidation,
4151 evidence: Some(TransitionEvidence {
4152 replay_attempts: None,
4153 replay_successes: None,
4154 replay_success_rate: None,
4155 environment_match_factor: None,
4156 decayed_confidence: None,
4157 confidence_decay_ratio: None,
4158 summary: Some("phase=remote_import; source=remote; action=quarantine_before_shadow_validation".into()),
4159 }),
4160 })
4161 .map_err(store_err)?;
4162 }
4163 NetworkAsset::Capsule { capsule } => {
4164 if !known_capsule_ids.insert(capsule.id.clone()) {
4165 skipped_count += 1;
4166 continue;
4167 }
4168 imported_asset_ids.push(capsule.id.clone());
4169 applied_count += 1;
4170 store
4171 .append_event(EvolutionEvent::RemoteAssetImported {
4172 source: CandidateSource::Remote,
4173 asset_ids: vec![capsule.id.clone()],
4174 sender_id: sender_id.clone(),
4175 })
4176 .map_err(store_err)?;
4177 let mut quarantined = capsule.clone();
4178 quarantined.state = AssetState::Quarantined;
4179 store
4180 .append_event(EvolutionEvent::CapsuleCommitted {
4181 capsule: quarantined.clone(),
4182 })
4183 .map_err(store_err)?;
4184 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4185 store
4186 .append_event(EvolutionEvent::CapsuleQuarantined {
4187 capsule_id: quarantined.id,
4188 })
4189 .map_err(store_err)?;
4190 }
4191 NetworkAsset::EvolutionEvent { event } => {
4192 let should_append = match event {
4193 EvolutionEvent::MutationDeclared { mutation } => {
4194 known_mutation_ids.insert(mutation.intent.id.clone())
4195 }
4196 EvolutionEvent::SpecLinked {
4197 mutation_id,
4198 spec_id,
4199 } => known_spec_links.insert((mutation_id.clone(), spec_id.clone())),
4200 _ if should_import_remote_event(event) => true,
4201 _ => false,
4202 };
4203 if should_append {
4204 store.append_event(event.clone()).map_err(store_err)?;
4205 applied_count += 1;
4206 } else {
4207 skipped_count += 1;
4208 }
4209 }
4210 }
4211 }
4212 let next_cursor = latest_store_cursor(store)?;
4213 let resume_token = next_cursor.as_ref().and_then(|cursor| {
4214 normalized_sender_id(&envelope.sender_id).map(|sender| encode_resume_token(&sender, cursor))
4215 });
4216
4217 Ok(ImportOutcome {
4218 imported_asset_ids,
4219 accepted: true,
4220 next_cursor: next_cursor.clone(),
4221 resume_token,
4222 sync_audit: SyncAudit {
4223 batch_id: next_id("sync-import"),
4224 requested_cursor,
4225 scanned_count: envelope.assets.len(),
4226 applied_count,
4227 skipped_count,
4228 failed_count: 0,
4229 failure_reasons: Vec::new(),
4230 },
4231 })
4232}
4233
4234const EVOMAP_SNAPSHOT_ROOT: &str = "assets/gep/evomap_snapshot";
4235const EVOMAP_SNAPSHOT_GENES_FILE: &str = "genes.json";
4236const EVOMAP_SNAPSHOT_CAPSULES_FILE: &str = "capsules.json";
4237const EVOMAP_BUILTIN_RUN_ID: &str = "builtin-evomap-seed";
4238
4239#[derive(Debug, Deserialize)]
4240struct EvoMapGeneDocument {
4241 #[serde(default)]
4242 genes: Vec<EvoMapGeneAsset>,
4243}
4244
4245#[derive(Debug, Deserialize)]
4246struct EvoMapGeneAsset {
4247 id: String,
4248 #[serde(default)]
4249 category: Option<String>,
4250 #[serde(default)]
4251 signals_match: Vec<Value>,
4252 #[serde(default)]
4253 strategy: Vec<String>,
4254 #[serde(default)]
4255 validation: Vec<String>,
4256 #[serde(default)]
4257 constraints: Option<EvoMapConstraintAsset>,
4258 #[serde(default)]
4259 model_name: Option<String>,
4260 #[serde(default)]
4261 schema_version: Option<String>,
4262 #[serde(default)]
4263 compatibility: Option<Value>,
4264}
4265
4266#[derive(Clone, Debug, Deserialize, Default)]
4267struct EvoMapConstraintAsset {
4268 #[serde(default)]
4269 max_files: Option<usize>,
4270 #[serde(default)]
4271 forbidden_paths: Vec<String>,
4272}
4273
4274#[derive(Debug, Deserialize)]
4275struct EvoMapCapsuleDocument {
4276 #[serde(default)]
4277 capsules: Vec<EvoMapCapsuleAsset>,
4278}
4279
4280#[derive(Debug, Deserialize)]
4281struct EvoMapCapsuleAsset {
4282 id: String,
4283 gene: String,
4284 #[serde(default)]
4285 trigger: Vec<String>,
4286 #[serde(default)]
4287 summary: String,
4288 #[serde(default)]
4289 diff: Option<String>,
4290 #[serde(default)]
4291 confidence: Option<f32>,
4292 #[serde(default)]
4293 outcome: Option<EvoMapOutcomeAsset>,
4294 #[serde(default)]
4295 blast_radius: Option<EvoMapBlastRadiusAsset>,
4296 #[serde(default)]
4297 content: Option<EvoMapCapsuleContentAsset>,
4298 #[serde(default)]
4299 env_fingerprint: Option<Value>,
4300 #[serde(default)]
4301 model_name: Option<String>,
4302 #[serde(default)]
4303 schema_version: Option<String>,
4304 #[serde(default)]
4305 compatibility: Option<Value>,
4306}
4307
4308#[derive(Clone, Debug, Deserialize, Default)]
4309struct EvoMapOutcomeAsset {
4310 #[serde(default)]
4311 status: Option<String>,
4312 #[serde(default)]
4313 score: Option<f32>,
4314}
4315
4316#[derive(Clone, Debug, Deserialize, Default)]
4317struct EvoMapBlastRadiusAsset {
4318 #[serde(default)]
4319 lines: usize,
4320}
4321
4322#[derive(Clone, Debug, Deserialize, Default)]
4323struct EvoMapCapsuleContentAsset {
4324 #[serde(default)]
4325 changed_files: Vec<String>,
4326}
4327
4328#[derive(Debug)]
4329struct BuiltinCapsuleSeed {
4330 capsule: Capsule,
4331 mutation: PreparedMutation,
4332}
4333
4334#[derive(Debug)]
4335struct BuiltinAssetBundle {
4336 genes: Vec<Gene>,
4337 capsules: Vec<BuiltinCapsuleSeed>,
4338}
4339
4340fn built_in_experience_genes() -> Vec<Gene> {
4341 vec![
4342 Gene {
4343 id: "builtin-experience-docs-rewrite-v1".into(),
4344 signals: vec!["docs.rewrite".into(), "docs".into(), "rewrite".into()],
4345 strategy: vec![
4346 "asset_origin=builtin".into(),
4347 "task_class=docs.rewrite".into(),
4348 "task_label=Docs rewrite".into(),
4349 "template_id=builtin-docs-rewrite-v1".into(),
4350 "summary=baseline docs rewrite experience".into(),
4351 ],
4352 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4353 state: AssetState::Promoted,
4354 },
4355 Gene {
4356 id: "builtin-experience-ci-fix-v1".into(),
4357 signals: vec![
4358 "ci.fix".into(),
4359 "ci".into(),
4360 "test".into(),
4361 "failure".into(),
4362 ],
4363 strategy: vec![
4364 "asset_origin=builtin".into(),
4365 "task_class=ci.fix".into(),
4366 "task_label=CI fix".into(),
4367 "template_id=builtin-ci-fix-v1".into(),
4368 "summary=baseline ci stabilization experience".into(),
4369 ],
4370 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4371 state: AssetState::Promoted,
4372 },
4373 Gene {
4374 id: "builtin-experience-task-decomposition-v1".into(),
4375 signals: vec![
4376 "task.decomposition".into(),
4377 "task".into(),
4378 "decomposition".into(),
4379 "planning".into(),
4380 ],
4381 strategy: vec![
4382 "asset_origin=builtin".into(),
4383 "task_class=task.decomposition".into(),
4384 "task_label=Task decomposition".into(),
4385 "template_id=builtin-task-decomposition-v1".into(),
4386 "summary=baseline task decomposition and routing experience".into(),
4387 ],
4388 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4389 state: AssetState::Promoted,
4390 },
4391 Gene {
4392 id: "builtin-experience-project-workflow-v1".into(),
4393 signals: vec![
4394 "project.workflow".into(),
4395 "project".into(),
4396 "workflow".into(),
4397 "milestone".into(),
4398 ],
4399 strategy: vec![
4400 "asset_origin=builtin".into(),
4401 "task_class=project.workflow".into(),
4402 "task_label=Project workflow".into(),
4403 "template_id=builtin-project-workflow-v1".into(),
4404 "summary=baseline project proposal and merge workflow experience".into(),
4405 ],
4406 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4407 state: AssetState::Promoted,
4408 },
4409 Gene {
4410 id: "builtin-experience-service-bid-v1".into(),
4411 signals: vec![
4412 "service.bid".into(),
4413 "service".into(),
4414 "bid".into(),
4415 "economics".into(),
4416 ],
4417 strategy: vec![
4418 "asset_origin=builtin".into(),
4419 "task_class=service.bid".into(),
4420 "task_label=Service bid".into(),
4421 "template_id=builtin-service-bid-v1".into(),
4422 "summary=baseline service bidding and settlement experience".into(),
4423 ],
4424 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4425 state: AssetState::Promoted,
4426 },
4427 ]
4428}
4429
4430fn evomap_snapshot_path(file_name: &str) -> PathBuf {
4431 PathBuf::from(env!("CARGO_MANIFEST_DIR"))
4432 .join(EVOMAP_SNAPSHOT_ROOT)
4433 .join(file_name)
4434}
4435
4436fn read_evomap_snapshot(file_name: &str) -> Result<Option<String>, EvoKernelError> {
4437 let path = evomap_snapshot_path(file_name);
4438 if !path.exists() {
4439 return Ok(None);
4440 }
4441 fs::read_to_string(&path).map(Some).map_err(|err| {
4442 EvoKernelError::Validation(format!(
4443 "failed to read EvoMap snapshot {}: {err}",
4444 path.display()
4445 ))
4446 })
4447}
4448
4449fn compatibility_state_from_value(value: Option<&Value>) -> Option<String> {
4450 let value = value?;
4451 if let Some(state) = value.as_str() {
4452 let normalized = state.trim().to_ascii_lowercase();
4453 if normalized.is_empty() {
4454 return None;
4455 }
4456 return Some(normalized);
4457 }
4458 value
4459 .get("state")
4460 .and_then(Value::as_str)
4461 .map(str::trim)
4462 .filter(|state| !state.is_empty())
4463 .map(|state| state.to_ascii_lowercase())
4464}
4465
4466fn map_evomap_state(value: Option<&Value>) -> AssetState {
4467 match compatibility_state_from_value(value).as_deref() {
4468 Some("promoted") => AssetState::Promoted,
4469 Some("candidate") => AssetState::Candidate,
4470 Some("quarantined") => AssetState::Quarantined,
4471 Some("shadow_validated") => AssetState::ShadowValidated,
4472 Some("revoked") => AssetState::Revoked,
4473 Some("rejected") => AssetState::Archived,
4474 Some("archived") => AssetState::Archived,
4475 _ => AssetState::Candidate,
4476 }
4477}
4478
4479fn value_as_signal_string(value: &Value) -> Option<String> {
4480 match value {
4481 Value::String(raw) => {
4482 let normalized = raw.trim();
4483 if normalized.is_empty() {
4484 None
4485 } else {
4486 Some(normalized.to_string())
4487 }
4488 }
4489 Value::Object(_) => {
4490 let serialized = serde_json::to_string(value).ok()?;
4491 let normalized = serialized.trim();
4492 if normalized.is_empty() {
4493 None
4494 } else {
4495 Some(normalized.to_string())
4496 }
4497 }
4498 Value::Null => None,
4499 other => {
4500 let rendered = other.to_string();
4501 let normalized = rendered.trim();
4502 if normalized.is_empty() {
4503 None
4504 } else {
4505 Some(normalized.to_string())
4506 }
4507 }
4508 }
4509}
4510
4511fn parse_diff_changed_files(payload: &str) -> Vec<String> {
4512 let mut changed_files = BTreeSet::new();
4513 for line in payload.lines() {
4514 let line = line.trim();
4515 if let Some(path) = line.strip_prefix("+++ b/") {
4516 let path = path.trim();
4517 if !path.is_empty() && path != "/dev/null" {
4518 changed_files.insert(path.to_string());
4519 }
4520 continue;
4521 }
4522 if let Some(path) = line.strip_prefix("diff --git a/") {
4523 if let Some((_, right)) = path.split_once(" b/") {
4524 let right = right.trim();
4525 if !right.is_empty() {
4526 changed_files.insert(right.to_string());
4527 }
4528 }
4529 }
4530 }
4531 changed_files.into_iter().collect()
4532}
4533
4534fn strip_diff_code_fence(payload: &str) -> String {
4535 let trimmed = payload.trim();
4536 if !trimmed.starts_with("```") {
4537 return trimmed.to_string();
4538 }
4539 let mut lines = trimmed.lines().collect::<Vec<_>>();
4540 if lines.is_empty() {
4541 return String::new();
4542 }
4543 lines.remove(0);
4544 if lines
4545 .last()
4546 .map(|line| line.trim() == "```")
4547 .unwrap_or(false)
4548 {
4549 lines.pop();
4550 }
4551 lines.join("\n").trim().to_string()
4552}
4553
4554fn synthetic_diff_for_capsule(capsule: &EvoMapCapsuleAsset) -> String {
4555 let file_path = format!("docs/evomap_builtin_capsules/{}.md", capsule.id);
4556 let mut content = Vec::new();
4557 content.push(format!("# EvoMap Builtin Capsule {}", capsule.id));
4558 if capsule.summary.trim().is_empty() {
4559 content.push("summary: missing".to_string());
4560 } else {
4561 content.push(format!("summary: {}", capsule.summary.trim()));
4562 }
4563 if !capsule.trigger.is_empty() {
4564 content.push(format!("trigger: {}", capsule.trigger.join(", ")));
4565 }
4566 content.push(format!("gene: {}", capsule.gene));
4567 let added = content
4568 .into_iter()
4569 .map(|line| format!("+{}", line.replace('\r', "")))
4570 .collect::<Vec<_>>()
4571 .join("\n");
4572 format!(
4573 "diff --git a/{file_path} b/{file_path}\nnew file mode 100644\nindex 0000000..1111111\n--- /dev/null\n+++ b/{file_path}\n@@ -0,0 +1,{line_count} @@\n{added}\n",
4574 line_count = added.lines().count()
4575 )
4576}
4577
4578fn normalized_diff_payload(capsule: &EvoMapCapsuleAsset) -> String {
4579 if let Some(raw) = capsule.diff.as_deref() {
4580 let normalized = strip_diff_code_fence(raw);
4581 if !normalized.trim().is_empty() {
4582 return normalized;
4583 }
4584 }
4585 synthetic_diff_for_capsule(capsule)
4586}
4587
4588fn env_field(value: Option<&Value>, keys: &[&str]) -> Option<String> {
4589 let object = value?.as_object()?;
4590 keys.iter().find_map(|key| {
4591 object
4592 .get(*key)
4593 .and_then(Value::as_str)
4594 .map(str::trim)
4595 .filter(|value| !value.is_empty())
4596 .map(|value| value.to_string())
4597 })
4598}
4599
4600fn map_evomap_env_fingerprint(value: Option<&Value>) -> EnvFingerprint {
4601 let os =
4602 env_field(value, &["os", "platform", "os_release"]).unwrap_or_else(|| "unknown".into());
4603 let target_triple = env_field(value, &["target_triple"]).unwrap_or_else(|| {
4604 let arch = env_field(value, &["arch"]).unwrap_or_else(|| "unknown".into());
4605 format!("{arch}-unknown-{os}")
4606 });
4607 EnvFingerprint {
4608 rustc_version: env_field(value, &["runtime", "rustc_version", "node_version"])
4609 .unwrap_or_else(|| "unknown".into()),
4610 cargo_lock_hash: env_field(value, &["cargo_lock_hash"]).unwrap_or_else(|| "unknown".into()),
4611 target_triple,
4612 os,
4613 }
4614}
4615
4616fn load_evomap_builtin_assets() -> Result<Option<BuiltinAssetBundle>, EvoKernelError> {
4617 let genes_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_GENES_FILE)?;
4618 let capsules_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_CAPSULES_FILE)?;
4619 let (Some(genes_raw), Some(capsules_raw)) = (genes_raw, capsules_raw) else {
4620 return Ok(None);
4621 };
4622
4623 let genes_doc: EvoMapGeneDocument = serde_json::from_str(&genes_raw).map_err(|err| {
4624 EvoKernelError::Validation(format!("failed to parse EvoMap genes snapshot: {err}"))
4625 })?;
4626 let capsules_doc: EvoMapCapsuleDocument =
4627 serde_json::from_str(&capsules_raw).map_err(|err| {
4628 EvoKernelError::Validation(format!("failed to parse EvoMap capsules snapshot: {err}"))
4629 })?;
4630
4631 let mut genes = Vec::new();
4632 let mut known_gene_ids = BTreeSet::new();
4633 for source in genes_doc.genes {
4634 let EvoMapGeneAsset {
4635 id,
4636 category,
4637 signals_match,
4638 strategy,
4639 validation,
4640 constraints,
4641 model_name,
4642 schema_version,
4643 compatibility,
4644 } = source;
4645 let gene_id = id.trim();
4646 if gene_id.is_empty() {
4647 return Err(EvoKernelError::Validation(
4648 "EvoMap snapshot gene id must not be empty".into(),
4649 ));
4650 }
4651 if !known_gene_ids.insert(gene_id.to_string()) {
4652 continue;
4653 }
4654
4655 let mut seen_signals = BTreeSet::new();
4656 let mut signals = Vec::new();
4657 for signal in signals_match {
4658 let Some(normalized) = value_as_signal_string(&signal) else {
4659 continue;
4660 };
4661 if seen_signals.insert(normalized.clone()) {
4662 signals.push(normalized);
4663 }
4664 }
4665 if signals.is_empty() {
4666 signals.push(format!("gene:{}", gene_id.to_ascii_lowercase()));
4667 }
4668
4669 let mut strategy = strategy
4670 .into_iter()
4671 .map(|item| item.trim().to_string())
4672 .filter(|item| !item.is_empty())
4673 .collect::<Vec<_>>();
4674 if strategy.is_empty() {
4675 strategy.push("evomap strategy missing in snapshot".into());
4676 }
4677 let constraint = constraints.unwrap_or_default();
4678 let compat_state = compatibility_state_from_value(compatibility.as_ref())
4679 .unwrap_or_else(|| "candidate".to_string());
4680 ensure_strategy_metadata(&mut strategy, "asset_origin", "builtin_evomap");
4681 ensure_strategy_metadata(
4682 &mut strategy,
4683 "evomap_category",
4684 category.as_deref().unwrap_or("unknown"),
4685 );
4686 ensure_strategy_metadata(
4687 &mut strategy,
4688 "evomap_constraints_max_files",
4689 &constraint.max_files.unwrap_or_default().to_string(),
4690 );
4691 ensure_strategy_metadata(
4692 &mut strategy,
4693 "evomap_constraints_forbidden_paths",
4694 &constraint.forbidden_paths.join("|"),
4695 );
4696 ensure_strategy_metadata(
4697 &mut strategy,
4698 "evomap_model_name",
4699 model_name.as_deref().unwrap_or("unknown"),
4700 );
4701 ensure_strategy_metadata(
4702 &mut strategy,
4703 "evomap_schema_version",
4704 schema_version.as_deref().unwrap_or("1.5.0"),
4705 );
4706 ensure_strategy_metadata(&mut strategy, "evomap_compatibility_state", &compat_state);
4707
4708 let mut validation = validation
4709 .into_iter()
4710 .map(|item| item.trim().to_string())
4711 .filter(|item| !item.is_empty())
4712 .collect::<Vec<_>>();
4713 if validation.is_empty() {
4714 validation.push("evomap-builtin-seed".into());
4715 }
4716
4717 genes.push(Gene {
4718 id: gene_id.to_string(),
4719 signals,
4720 strategy,
4721 validation,
4722 state: map_evomap_state(compatibility.as_ref()),
4723 });
4724 }
4725
4726 let mut capsules = Vec::new();
4727 let known_gene_ids = genes
4728 .iter()
4729 .map(|gene| gene.id.clone())
4730 .collect::<BTreeSet<_>>();
4731 for source in capsules_doc.capsules {
4732 let EvoMapCapsuleAsset {
4733 id,
4734 gene,
4735 trigger,
4736 summary,
4737 diff,
4738 confidence,
4739 outcome,
4740 blast_radius,
4741 content,
4742 env_fingerprint,
4743 model_name: _model_name,
4744 schema_version: _schema_version,
4745 compatibility,
4746 } = source;
4747 let source_for_diff = EvoMapCapsuleAsset {
4748 id: id.clone(),
4749 gene: gene.clone(),
4750 trigger: trigger.clone(),
4751 summary: summary.clone(),
4752 diff,
4753 confidence,
4754 outcome: outcome.clone(),
4755 blast_radius: blast_radius.clone(),
4756 content: content.clone(),
4757 env_fingerprint: env_fingerprint.clone(),
4758 model_name: None,
4759 schema_version: None,
4760 compatibility: compatibility.clone(),
4761 };
4762 if !known_gene_ids.contains(gene.as_str()) {
4763 return Err(EvoKernelError::Validation(format!(
4764 "EvoMap capsule {} references unknown gene {}",
4765 id, gene
4766 )));
4767 }
4768 let normalized_diff = normalized_diff_payload(&source_for_diff);
4769 if normalized_diff.trim().is_empty() {
4770 return Err(EvoKernelError::Validation(format!(
4771 "EvoMap capsule {} has empty normalized diff payload",
4772 id
4773 )));
4774 }
4775 let mut changed_files = content
4776 .as_ref()
4777 .map(|content| {
4778 content
4779 .changed_files
4780 .iter()
4781 .map(|item| item.trim().to_string())
4782 .filter(|item| !item.is_empty())
4783 .collect::<Vec<_>>()
4784 })
4785 .unwrap_or_default();
4786 if changed_files.is_empty() {
4787 changed_files = parse_diff_changed_files(&normalized_diff);
4788 }
4789 if changed_files.is_empty() {
4790 changed_files.push(format!("docs/evomap_builtin_capsules/{}.md", id));
4791 }
4792
4793 let confidence = confidence
4794 .or_else(|| outcome.as_ref().and_then(|outcome| outcome.score))
4795 .unwrap_or(0.6)
4796 .clamp(0.0, 1.0);
4797 let status_success = outcome
4798 .as_ref()
4799 .and_then(|outcome| outcome.status.as_deref())
4800 .map(|status| status.eq_ignore_ascii_case("success"))
4801 .unwrap_or(true);
4802 let blast_radius = blast_radius.unwrap_or_default();
4803 let mutation_id = format!("builtin-evomap-mutation-{}", id);
4804 let intent = MutationIntent {
4805 id: mutation_id.clone(),
4806 intent: if summary.trim().is_empty() {
4807 format!("apply EvoMap capsule {}", id)
4808 } else {
4809 summary.trim().to_string()
4810 },
4811 target: MutationTarget::Paths {
4812 allow: changed_files.clone(),
4813 },
4814 expected_effect: format!("seed replay candidate from EvoMap capsule {}", id),
4815 risk: RiskLevel::Low,
4816 signals: if trigger.is_empty() {
4817 vec![format!("capsule:{}", id.to_ascii_lowercase())]
4818 } else {
4819 trigger
4820 .iter()
4821 .map(|signal| signal.trim().to_ascii_lowercase())
4822 .filter(|signal| !signal.is_empty())
4823 .collect::<Vec<_>>()
4824 },
4825 spec_id: None,
4826 };
4827 let mutation = PreparedMutation {
4828 intent,
4829 artifact: oris_evolution::MutationArtifact {
4830 encoding: ArtifactEncoding::UnifiedDiff,
4831 payload: normalized_diff.clone(),
4832 base_revision: None,
4833 content_hash: compute_artifact_hash(&normalized_diff),
4834 },
4835 };
4836 let capsule = Capsule {
4837 id: id.clone(),
4838 gene_id: gene.clone(),
4839 mutation_id,
4840 run_id: EVOMAP_BUILTIN_RUN_ID.to_string(),
4841 diff_hash: compute_artifact_hash(&normalized_diff),
4842 confidence,
4843 env: map_evomap_env_fingerprint(env_fingerprint.as_ref()),
4844 outcome: Outcome {
4845 success: status_success,
4846 validation_profile: "evomap-builtin-seed".into(),
4847 validation_duration_ms: 0,
4848 changed_files,
4849 validator_hash: "builtin-evomap".into(),
4850 lines_changed: blast_radius.lines,
4851 replay_verified: false,
4852 },
4853 state: map_evomap_state(compatibility.as_ref()),
4854 };
4855 capsules.push(BuiltinCapsuleSeed { capsule, mutation });
4856 }
4857
4858 Ok(Some(BuiltinAssetBundle { genes, capsules }))
4859}
4860
4861fn ensure_builtin_experience_assets_in_store(
4862 store: &dyn EvolutionStore,
4863 sender_id: String,
4864) -> Result<ImportOutcome, EvoKernelError> {
4865 let (events, projection) = scan_projection(store)?;
4866 let mut known_gene_ids = projection
4867 .genes
4868 .into_iter()
4869 .map(|gene| gene.id)
4870 .collect::<BTreeSet<_>>();
4871 let mut known_capsule_ids = projection
4872 .capsules
4873 .into_iter()
4874 .map(|capsule| capsule.id)
4875 .collect::<BTreeSet<_>>();
4876 let mut known_mutation_ids = BTreeSet::new();
4877 for stored in &events {
4878 if let EvolutionEvent::MutationDeclared { mutation } = &stored.event {
4879 known_mutation_ids.insert(mutation.intent.id.clone());
4880 }
4881 }
4882 let normalized_sender = normalized_sender_id(&sender_id);
4883 let mut imported_asset_ids = Vec::new();
4884 let mut bundle = BuiltinAssetBundle {
4887 genes: built_in_experience_genes(),
4888 capsules: Vec::new(),
4889 };
4890 if let Some(snapshot_bundle) = load_evomap_builtin_assets()? {
4891 bundle.genes.extend(snapshot_bundle.genes);
4892 bundle.capsules.extend(snapshot_bundle.capsules);
4893 }
4894 let scanned_count = bundle.genes.len() + bundle.capsules.len();
4895
4896 for gene in bundle.genes {
4897 if !known_gene_ids.insert(gene.id.clone()) {
4898 continue;
4899 }
4900
4901 store
4902 .append_event(EvolutionEvent::RemoteAssetImported {
4903 source: CandidateSource::Local,
4904 asset_ids: vec![gene.id.clone()],
4905 sender_id: normalized_sender.clone(),
4906 })
4907 .map_err(store_err)?;
4908 store
4909 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
4910 .map_err(store_err)?;
4911 match gene.state {
4912 AssetState::Revoked | AssetState::Archived => {}
4913 AssetState::Quarantined | AssetState::ShadowValidated => {
4914 store
4915 .append_event(EvolutionEvent::PromotionEvaluated {
4916 gene_id: gene.id.clone(),
4917 state: AssetState::Quarantined,
4918 reason:
4919 "built-in EvoMap asset requires additional validation before promotion"
4920 .into(),
4921 reason_code: TransitionReasonCode::DowngradeBuiltinRequiresValidation,
4922 evidence: None,
4923 })
4924 .map_err(store_err)?;
4925 }
4926 AssetState::Promoted | AssetState::Candidate => {
4927 store
4928 .append_event(EvolutionEvent::PromotionEvaluated {
4929 gene_id: gene.id.clone(),
4930 state: AssetState::Promoted,
4931 reason: "built-in experience asset promoted for cold-start compatibility"
4932 .into(),
4933 reason_code: TransitionReasonCode::PromotionBuiltinColdStartCompatibility,
4934 evidence: None,
4935 })
4936 .map_err(store_err)?;
4937 store
4938 .append_event(EvolutionEvent::GenePromoted {
4939 gene_id: gene.id.clone(),
4940 })
4941 .map_err(store_err)?;
4942 }
4943 }
4944 imported_asset_ids.push(gene.id.clone());
4945 }
4946
4947 for seed in bundle.capsules {
4948 if !known_gene_ids.contains(seed.capsule.gene_id.as_str()) {
4949 return Err(EvoKernelError::Validation(format!(
4950 "built-in capsule {} references unknown gene {}",
4951 seed.capsule.id, seed.capsule.gene_id
4952 )));
4953 }
4954 if known_mutation_ids.insert(seed.mutation.intent.id.clone()) {
4955 store
4956 .append_event(EvolutionEvent::MutationDeclared {
4957 mutation: seed.mutation.clone(),
4958 })
4959 .map_err(store_err)?;
4960 }
4961 if !known_capsule_ids.insert(seed.capsule.id.clone()) {
4962 continue;
4963 }
4964 store
4965 .append_event(EvolutionEvent::RemoteAssetImported {
4966 source: CandidateSource::Local,
4967 asset_ids: vec![seed.capsule.id.clone()],
4968 sender_id: normalized_sender.clone(),
4969 })
4970 .map_err(store_err)?;
4971 store
4972 .append_event(EvolutionEvent::CapsuleCommitted {
4973 capsule: seed.capsule.clone(),
4974 })
4975 .map_err(store_err)?;
4976 match seed.capsule.state {
4977 AssetState::Revoked | AssetState::Archived => {}
4978 AssetState::Quarantined | AssetState::ShadowValidated => {
4979 store
4980 .append_event(EvolutionEvent::CapsuleQuarantined {
4981 capsule_id: seed.capsule.id.clone(),
4982 })
4983 .map_err(store_err)?;
4984 }
4985 AssetState::Promoted | AssetState::Candidate => {
4986 store
4987 .append_event(EvolutionEvent::CapsuleReleased {
4988 capsule_id: seed.capsule.id.clone(),
4989 state: AssetState::Promoted,
4990 })
4991 .map_err(store_err)?;
4992 }
4993 }
4994 imported_asset_ids.push(seed.capsule.id.clone());
4995 }
4996
4997 let next_cursor = latest_store_cursor(store)?;
4998 let resume_token = next_cursor.as_ref().and_then(|cursor| {
4999 normalized_sender
5000 .as_deref()
5001 .map(|sender| encode_resume_token(sender, cursor))
5002 });
5003 let applied_count = imported_asset_ids.len();
5004 let skipped_count = scanned_count.saturating_sub(applied_count);
5005
5006 Ok(ImportOutcome {
5007 imported_asset_ids,
5008 accepted: true,
5009 next_cursor: next_cursor.clone(),
5010 resume_token,
5011 sync_audit: SyncAudit {
5012 batch_id: next_id("sync-import"),
5013 requested_cursor: None,
5014 scanned_count,
5015 applied_count,
5016 skipped_count,
5017 failed_count: 0,
5018 failure_reasons: Vec::new(),
5019 },
5020 })
5021}
5022
5023fn strategy_metadata_value(strategy: &[String], key: &str) -> Option<String> {
5024 strategy.iter().find_map(|entry| {
5025 let (entry_key, entry_value) = entry.split_once('=')?;
5026 if entry_key.trim().eq_ignore_ascii_case(key) {
5027 let normalized = entry_value.trim();
5028 if normalized.is_empty() {
5029 None
5030 } else {
5031 Some(normalized.to_string())
5032 }
5033 } else {
5034 None
5035 }
5036 })
5037}
5038
5039fn ensure_strategy_metadata(strategy: &mut Vec<String>, key: &str, value: &str) {
5040 let normalized = value.trim();
5041 if normalized.is_empty() || strategy_metadata_value(strategy, key).is_some() {
5042 return;
5043 }
5044 strategy.push(format!("{key}={normalized}"));
5045}
5046
5047fn enforce_reported_experience_retention(
5048 store: &dyn EvolutionStore,
5049 task_class: &str,
5050 keep_latest: usize,
5051) -> Result<(), EvoKernelError> {
5052 let task_class = task_class.trim();
5053 if task_class.is_empty() || keep_latest == 0 {
5054 return Ok(());
5055 }
5056
5057 let (_, projection) = scan_projection(store)?;
5058 let mut candidates = projection
5059 .genes
5060 .iter()
5061 .filter(|gene| gene.state == AssetState::Promoted)
5062 .filter_map(|gene| {
5063 let origin = strategy_metadata_value(&gene.strategy, "asset_origin")?;
5064 if !origin.eq_ignore_ascii_case("reported_experience") {
5065 return None;
5066 }
5067 let gene_task_class = strategy_metadata_value(&gene.strategy, "task_class")?;
5068 if !gene_task_class.eq_ignore_ascii_case(task_class) {
5069 return None;
5070 }
5071 let updated_at = projection
5072 .last_updated_at
5073 .get(&gene.id)
5074 .cloned()
5075 .unwrap_or_default();
5076 Some((gene.id.clone(), updated_at))
5077 })
5078 .collect::<Vec<_>>();
5079 if candidates.len() <= keep_latest {
5080 return Ok(());
5081 }
5082
5083 candidates.sort_by(|left, right| right.1.cmp(&left.1).then_with(|| right.0.cmp(&left.0)));
5084 let stale_gene_ids = candidates
5085 .into_iter()
5086 .skip(keep_latest)
5087 .map(|(gene_id, _)| gene_id)
5088 .collect::<BTreeSet<_>>();
5089 if stale_gene_ids.is_empty() {
5090 return Ok(());
5091 }
5092
5093 let reason =
5094 format!("reported experience retention limit exceeded for task_class={task_class}");
5095 for gene_id in &stale_gene_ids {
5096 store
5097 .append_event(EvolutionEvent::GeneRevoked {
5098 gene_id: gene_id.clone(),
5099 reason: reason.clone(),
5100 })
5101 .map_err(store_err)?;
5102 }
5103
5104 let stale_capsule_ids = projection
5105 .capsules
5106 .iter()
5107 .filter(|capsule| stale_gene_ids.contains(&capsule.gene_id))
5108 .map(|capsule| capsule.id.clone())
5109 .collect::<BTreeSet<_>>();
5110 for capsule_id in stale_capsule_ids {
5111 store
5112 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
5113 .map_err(store_err)?;
5114 }
5115 Ok(())
5116}
5117
5118fn record_reported_experience_in_store(
5119 store: &dyn EvolutionStore,
5120 sender_id: String,
5121 gene_id: String,
5122 signals: Vec<String>,
5123 strategy: Vec<String>,
5124 validation: Vec<String>,
5125) -> Result<ImportOutcome, EvoKernelError> {
5126 let gene_id = gene_id.trim();
5127 if gene_id.is_empty() {
5128 return Err(EvoKernelError::Validation(
5129 "reported experience gene_id must not be empty".into(),
5130 ));
5131 }
5132
5133 let mut unique_signals = BTreeSet::new();
5134 let mut normalized_signals = Vec::new();
5135 for signal in signals {
5136 let normalized = signal.trim().to_ascii_lowercase();
5137 if normalized.is_empty() {
5138 continue;
5139 }
5140 if unique_signals.insert(normalized.clone()) {
5141 normalized_signals.push(normalized);
5142 }
5143 }
5144 if normalized_signals.is_empty() {
5145 return Err(EvoKernelError::Validation(
5146 "reported experience signals must not be empty".into(),
5147 ));
5148 }
5149
5150 let mut unique_strategy = BTreeSet::new();
5151 let mut normalized_strategy = Vec::new();
5152 for entry in strategy {
5153 let normalized = entry.trim().to_string();
5154 if normalized.is_empty() {
5155 continue;
5156 }
5157 if unique_strategy.insert(normalized.clone()) {
5158 normalized_strategy.push(normalized);
5159 }
5160 }
5161 if normalized_strategy.is_empty() {
5162 normalized_strategy.push("reported local replay experience".into());
5163 }
5164 let task_class_id = strategy_metadata_value(&normalized_strategy, "task_class")
5165 .or_else(|| normalized_signals.first().cloned())
5166 .unwrap_or_else(|| "reported-experience".into());
5167 let task_label = strategy_metadata_value(&normalized_strategy, "task_label")
5168 .or_else(|| normalized_signals.first().cloned())
5169 .unwrap_or_else(|| task_class_id.clone());
5170 ensure_strategy_metadata(
5171 &mut normalized_strategy,
5172 "asset_origin",
5173 "reported_experience",
5174 );
5175 ensure_strategy_metadata(&mut normalized_strategy, "task_class", &task_class_id);
5176 ensure_strategy_metadata(&mut normalized_strategy, "task_label", &task_label);
5177
5178 let mut unique_validation = BTreeSet::new();
5179 let mut normalized_validation = Vec::new();
5180 for entry in validation {
5181 let normalized = entry.trim().to_string();
5182 if normalized.is_empty() {
5183 continue;
5184 }
5185 if unique_validation.insert(normalized.clone()) {
5186 normalized_validation.push(normalized);
5187 }
5188 }
5189 if normalized_validation.is_empty() {
5190 normalized_validation.push("a2a.tasks.report".into());
5191 }
5192
5193 let gene = Gene {
5194 id: gene_id.to_string(),
5195 signals: normalized_signals,
5196 strategy: normalized_strategy,
5197 validation: normalized_validation,
5198 state: AssetState::Promoted,
5199 };
5200 let normalized_sender = normalized_sender_id(&sender_id);
5201
5202 store
5203 .append_event(EvolutionEvent::RemoteAssetImported {
5204 source: CandidateSource::Local,
5205 asset_ids: vec![gene.id.clone()],
5206 sender_id: normalized_sender.clone(),
5207 })
5208 .map_err(store_err)?;
5209 store
5210 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
5211 .map_err(store_err)?;
5212 store
5213 .append_event(EvolutionEvent::PromotionEvaluated {
5214 gene_id: gene.id.clone(),
5215 state: AssetState::Promoted,
5216 reason: "trusted local report promoted reusable experience".into(),
5217 reason_code: TransitionReasonCode::PromotionTrustedLocalReport,
5218 evidence: None,
5219 })
5220 .map_err(store_err)?;
5221 store
5222 .append_event(EvolutionEvent::GenePromoted {
5223 gene_id: gene.id.clone(),
5224 })
5225 .map_err(store_err)?;
5226 enforce_reported_experience_retention(
5227 store,
5228 &task_class_id,
5229 REPORTED_EXPERIENCE_RETENTION_LIMIT,
5230 )?;
5231
5232 let imported_asset_ids = vec![gene.id];
5233 let next_cursor = latest_store_cursor(store)?;
5234 let resume_token = next_cursor.as_ref().and_then(|cursor| {
5235 normalized_sender
5236 .as_deref()
5237 .map(|sender| encode_resume_token(sender, cursor))
5238 });
5239 Ok(ImportOutcome {
5240 imported_asset_ids,
5241 accepted: true,
5242 next_cursor,
5243 resume_token,
5244 sync_audit: SyncAudit {
5245 batch_id: next_id("sync-import"),
5246 requested_cursor: None,
5247 scanned_count: 1,
5248 applied_count: 1,
5249 skipped_count: 0,
5250 failed_count: 0,
5251 failure_reasons: Vec::new(),
5252 },
5253 })
5254}
5255
5256fn normalized_sender_id(sender_id: &str) -> Option<String> {
5257 let trimmed = sender_id.trim();
5258 if trimmed.is_empty() {
5259 None
5260 } else {
5261 Some(trimmed.to_string())
5262 }
5263}
5264
5265fn record_manifest_validation(
5266 store: &dyn EvolutionStore,
5267 envelope: &EvolutionEnvelope,
5268 accepted: bool,
5269 reason: impl Into<String>,
5270) -> Result<(), EvoKernelError> {
5271 let manifest = envelope.manifest.as_ref();
5272 let sender_id = manifest
5273 .and_then(|value| normalized_sender_id(&value.sender_id))
5274 .or_else(|| normalized_sender_id(&envelope.sender_id));
5275 let publisher = manifest.and_then(|value| normalized_sender_id(&value.publisher));
5276 let asset_ids = manifest
5277 .map(|value| value.asset_ids.clone())
5278 .unwrap_or_else(|| EvolutionEnvelope::manifest_asset_ids(&envelope.assets));
5279
5280 store
5281 .append_event(EvolutionEvent::ManifestValidated {
5282 accepted,
5283 reason: reason.into(),
5284 sender_id,
5285 publisher,
5286 asset_ids,
5287 })
5288 .map_err(store_err)?;
5289 Ok(())
5290}
5291
5292fn record_remote_publisher_for_asset(
5293 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
5294 sender_id: &str,
5295 asset: &NetworkAsset,
5296) {
5297 let Some(remote_publishers) = remote_publishers else {
5298 return;
5299 };
5300 let sender_id = sender_id.trim();
5301 if sender_id.is_empty() {
5302 return;
5303 }
5304 let Ok(mut publishers) = remote_publishers.lock() else {
5305 return;
5306 };
5307 match asset {
5308 NetworkAsset::Gene { gene } => {
5309 publishers.insert(gene.id.clone(), sender_id.to_string());
5310 }
5311 NetworkAsset::Capsule { capsule } => {
5312 publishers.insert(capsule.id.clone(), sender_id.to_string());
5313 }
5314 NetworkAsset::EvolutionEvent { .. } => {}
5315 }
5316}
5317
5318fn remote_publishers_by_asset_from_store(store: &dyn EvolutionStore) -> BTreeMap<String, String> {
5319 let Ok(events) = store.scan(1) else {
5320 return BTreeMap::new();
5321 };
5322 remote_publishers_by_asset_from_events(&events)
5323}
5324
5325fn remote_publishers_by_asset_from_events(
5326 events: &[StoredEvolutionEvent],
5327) -> BTreeMap<String, String> {
5328 let mut imported_asset_publishers = BTreeMap::<String, String>::new();
5329 let mut known_gene_ids = BTreeSet::<String>::new();
5330 let mut known_capsule_ids = BTreeSet::<String>::new();
5331 let mut publishers_by_asset = BTreeMap::<String, String>::new();
5332
5333 for stored in events {
5334 match &stored.event {
5335 EvolutionEvent::RemoteAssetImported {
5336 source: CandidateSource::Remote,
5337 asset_ids,
5338 sender_id,
5339 } => {
5340 let Some(sender_id) = sender_id.as_deref().and_then(normalized_sender_id) else {
5341 continue;
5342 };
5343 for asset_id in asset_ids {
5344 imported_asset_publishers.insert(asset_id.clone(), sender_id.clone());
5345 if known_gene_ids.contains(asset_id) || known_capsule_ids.contains(asset_id) {
5346 publishers_by_asset.insert(asset_id.clone(), sender_id.clone());
5347 }
5348 }
5349 }
5350 EvolutionEvent::GeneProjected { gene } => {
5351 known_gene_ids.insert(gene.id.clone());
5352 if let Some(sender_id) = imported_asset_publishers.get(&gene.id) {
5353 publishers_by_asset.insert(gene.id.clone(), sender_id.clone());
5354 }
5355 }
5356 EvolutionEvent::CapsuleCommitted { capsule } => {
5357 known_capsule_ids.insert(capsule.id.clone());
5358 if let Some(sender_id) = imported_asset_publishers.get(&capsule.id) {
5359 publishers_by_asset.insert(capsule.id.clone(), sender_id.clone());
5360 }
5361 }
5362 _ => {}
5363 }
5364 }
5365
5366 publishers_by_asset
5367}
5368
5369fn should_import_remote_event(event: &EvolutionEvent) -> bool {
5370 matches!(
5371 event,
5372 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
5373 )
5374}
5375
5376fn fetch_assets_from_store(
5377 store: &dyn EvolutionStore,
5378 responder_id: impl Into<String>,
5379 query: &FetchQuery,
5380) -> Result<FetchResponse, EvoKernelError> {
5381 let (events, projection) = scan_projection(store)?;
5382 let requested_cursor = resolve_requested_cursor(
5383 &query.sender_id,
5384 query.since_cursor.as_deref(),
5385 query.resume_token.as_deref(),
5386 )?;
5387 let since_seq = requested_cursor
5388 .as_deref()
5389 .and_then(parse_sync_cursor_seq)
5390 .unwrap_or(0);
5391 let normalized_signals: Vec<String> = query
5392 .signals
5393 .iter()
5394 .map(|signal| signal.trim().to_ascii_lowercase())
5395 .filter(|signal| !signal.is_empty())
5396 .collect();
5397 let matches_any_signal = |candidate: &str| {
5398 if normalized_signals.is_empty() {
5399 return true;
5400 }
5401 let candidate = candidate.to_ascii_lowercase();
5402 normalized_signals
5403 .iter()
5404 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
5405 };
5406
5407 let matched_genes: Vec<Gene> = projection
5408 .genes
5409 .into_iter()
5410 .filter(|gene| gene.state == AssetState::Promoted)
5411 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
5412 .collect();
5413 let matched_gene_ids: BTreeSet<String> =
5414 matched_genes.iter().map(|gene| gene.id.clone()).collect();
5415 let matched_capsules: Vec<Capsule> = projection
5416 .capsules
5417 .into_iter()
5418 .filter(|capsule| capsule.state == AssetState::Promoted)
5419 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
5420 .collect();
5421 let all_assets = replay_export_assets(&events, matched_genes.clone(), matched_capsules.clone());
5422 let (selected_genes, selected_capsules) = if requested_cursor.is_some() {
5423 let delta = delta_window(&events, since_seq);
5424 let selected_capsules = matched_capsules
5425 .into_iter()
5426 .filter(|capsule| {
5427 delta.changed_capsule_ids.contains(&capsule.id)
5428 || delta.changed_mutation_ids.contains(&capsule.mutation_id)
5429 })
5430 .collect::<Vec<_>>();
5431 let selected_gene_ids = selected_capsules
5432 .iter()
5433 .map(|capsule| capsule.gene_id.clone())
5434 .collect::<BTreeSet<_>>();
5435 let selected_genes = matched_genes
5436 .into_iter()
5437 .filter(|gene| {
5438 delta.changed_gene_ids.contains(&gene.id) || selected_gene_ids.contains(&gene.id)
5439 })
5440 .collect::<Vec<_>>();
5441 (selected_genes, selected_capsules)
5442 } else {
5443 (matched_genes, matched_capsules)
5444 };
5445 let assets = replay_export_assets(&events, selected_genes, selected_capsules);
5446 let next_cursor = events.last().map(|stored| format_sync_cursor(stored.seq));
5447 let resume_token = next_cursor
5448 .as_ref()
5449 .map(|cursor| encode_resume_token(&query.sender_id, cursor));
5450 let applied_count = assets.len();
5451 let skipped_count = all_assets.len().saturating_sub(applied_count);
5452
5453 Ok(FetchResponse {
5454 sender_id: responder_id.into(),
5455 assets,
5456 next_cursor: next_cursor.clone(),
5457 resume_token,
5458 sync_audit: SyncAudit {
5459 batch_id: next_id("sync-fetch"),
5460 requested_cursor,
5461 scanned_count: all_assets.len(),
5462 applied_count,
5463 skipped_count,
5464 failed_count: 0,
5465 failure_reasons: Vec::new(),
5466 },
5467 })
5468}
5469
5470fn revoke_assets_in_store(
5471 store: &dyn EvolutionStore,
5472 notice: &RevokeNotice,
5473) -> Result<RevokeNotice, EvoKernelError> {
5474 let projection = projection_snapshot(store)?;
5475 let requested: BTreeSet<String> = notice
5476 .asset_ids
5477 .iter()
5478 .map(|asset_id| asset_id.trim().to_string())
5479 .filter(|asset_id| !asset_id.is_empty())
5480 .collect();
5481 let mut revoked_gene_ids = BTreeSet::new();
5482 let mut quarantined_capsule_ids = BTreeSet::new();
5483
5484 for gene in &projection.genes {
5485 if requested.contains(&gene.id) {
5486 revoked_gene_ids.insert(gene.id.clone());
5487 }
5488 }
5489 for capsule in &projection.capsules {
5490 if requested.contains(&capsule.id) {
5491 quarantined_capsule_ids.insert(capsule.id.clone());
5492 revoked_gene_ids.insert(capsule.gene_id.clone());
5493 }
5494 }
5495 for capsule in &projection.capsules {
5496 if revoked_gene_ids.contains(&capsule.gene_id) {
5497 quarantined_capsule_ids.insert(capsule.id.clone());
5498 }
5499 }
5500
5501 for gene_id in &revoked_gene_ids {
5502 store
5503 .append_event(EvolutionEvent::GeneRevoked {
5504 gene_id: gene_id.clone(),
5505 reason: notice.reason.clone(),
5506 })
5507 .map_err(store_err)?;
5508 }
5509 for capsule_id in &quarantined_capsule_ids {
5510 store
5511 .append_event(EvolutionEvent::CapsuleQuarantined {
5512 capsule_id: capsule_id.clone(),
5513 })
5514 .map_err(store_err)?;
5515 }
5516
5517 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
5518 affected_ids.extend(quarantined_capsule_ids);
5519 affected_ids.sort();
5520 affected_ids.dedup();
5521
5522 Ok(RevokeNotice {
5523 sender_id: notice.sender_id.clone(),
5524 asset_ids: affected_ids,
5525 reason: notice.reason.clone(),
5526 })
5527}
5528
5529fn evolution_metrics_snapshot(
5530 store: &dyn EvolutionStore,
5531) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
5532 let (events, projection) = scan_projection(store)?;
5533 let replay = collect_replay_roi_aggregate(&events, &projection, None);
5534 let replay_reasoning_avoided_total = replay.replay_success_total;
5535 let confidence_revalidations_total = events
5536 .iter()
5537 .filter(|stored| is_confidence_revalidation_event(&stored.event))
5538 .count() as u64;
5539 let mutation_declared_total = events
5540 .iter()
5541 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
5542 .count() as u64;
5543 let promoted_mutations_total = events
5544 .iter()
5545 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
5546 .count() as u64;
5547 let gene_revocations_total = events
5548 .iter()
5549 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
5550 .count() as u64;
5551 let cutoff = Utc::now() - Duration::hours(1);
5552 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
5553 matches!(event, EvolutionEvent::MutationDeclared { .. })
5554 });
5555 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
5556 matches!(event, EvolutionEvent::GeneRevoked { .. })
5557 });
5558 let promoted_genes = projection
5559 .genes
5560 .iter()
5561 .filter(|gene| gene.state == AssetState::Promoted)
5562 .count() as u64;
5563 let promoted_capsules = projection
5564 .capsules
5565 .iter()
5566 .filter(|capsule| capsule.state == AssetState::Promoted)
5567 .count() as u64;
5568
5569 Ok(EvolutionMetricsSnapshot {
5570 replay_attempts_total: replay.replay_attempts_total,
5571 replay_success_total: replay.replay_success_total,
5572 replay_success_rate: safe_ratio(replay.replay_success_total, replay.replay_attempts_total),
5573 confidence_revalidations_total,
5574 replay_reasoning_avoided_total,
5575 reasoning_avoided_tokens_total: replay.reasoning_avoided_tokens_total,
5576 replay_fallback_cost_total: replay.replay_fallback_cost_total,
5577 replay_roi: compute_replay_roi(
5578 replay.reasoning_avoided_tokens_total,
5579 replay.replay_fallback_cost_total,
5580 ),
5581 replay_task_classes: replay.replay_task_classes,
5582 replay_sources: replay.replay_sources,
5583 mutation_declared_total,
5584 promoted_mutations_total,
5585 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
5586 gene_revocations_total,
5587 mutation_velocity_last_hour,
5588 revoke_frequency_last_hour,
5589 promoted_genes,
5590 promoted_capsules,
5591 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
5592 })
5593}
5594
5595struct ReplayRoiAggregate {
5596 replay_attempts_total: u64,
5597 replay_success_total: u64,
5598 replay_failure_total: u64,
5599 reasoning_avoided_tokens_total: u64,
5600 replay_fallback_cost_total: u64,
5601 replay_task_classes: Vec<ReplayTaskClassMetrics>,
5602 replay_sources: Vec<ReplaySourceRoiMetrics>,
5603}
5604
5605fn collect_replay_roi_aggregate(
5606 events: &[StoredEvolutionEvent],
5607 projection: &EvolutionProjection,
5608 cutoff: Option<DateTime<Utc>>,
5609) -> ReplayRoiAggregate {
5610 let replay_evidences = events
5611 .iter()
5612 .filter(|stored| replay_event_in_scope(stored, cutoff))
5613 .filter_map(|stored| match &stored.event {
5614 EvolutionEvent::ReplayEconomicsRecorded { evidence, .. } => Some(evidence.clone()),
5615 _ => None,
5616 })
5617 .collect::<Vec<_>>();
5618
5619 let mut task_totals = BTreeMap::<(String, String), (u64, u64, u64, u64)>::new();
5620 let mut source_totals = BTreeMap::<String, (u64, u64, u64, u64)>::new();
5621
5622 let (
5623 replay_success_total,
5624 replay_failure_total,
5625 reasoning_avoided_tokens_total,
5626 replay_fallback_cost_total,
5627 ) = if replay_evidences.is_empty() {
5628 let gene_task_classes = projection
5629 .genes
5630 .iter()
5631 .map(|gene| (gene.id.clone(), replay_task_descriptor(&gene.signals)))
5632 .collect::<BTreeMap<_, _>>();
5633 let mut replay_success_total = 0_u64;
5634 let mut replay_failure_total = 0_u64;
5635
5636 for stored in events
5637 .iter()
5638 .filter(|stored| replay_event_in_scope(stored, cutoff))
5639 {
5640 match &stored.event {
5641 EvolutionEvent::CapsuleReused { gene_id, .. } => {
5642 replay_success_total += 1;
5643 if let Some((task_class_id, task_label)) = gene_task_classes.get(gene_id) {
5644 let entry = task_totals
5645 .entry((task_class_id.clone(), task_label.clone()))
5646 .or_insert((0, 0, 0, 0));
5647 entry.0 += 1;
5648 entry.2 += REPLAY_REASONING_TOKEN_FLOOR;
5649 }
5650 }
5651 event if is_replay_validation_failure(event) => {
5652 replay_failure_total += 1;
5653 }
5654 _ => {}
5655 }
5656 }
5657
5658 (
5659 replay_success_total,
5660 replay_failure_total,
5661 replay_success_total * REPLAY_REASONING_TOKEN_FLOOR,
5662 replay_failure_total * REPLAY_REASONING_TOKEN_FLOOR,
5663 )
5664 } else {
5665 let mut replay_success_total = 0_u64;
5666 let mut replay_failure_total = 0_u64;
5667 let mut reasoning_avoided_tokens_total = 0_u64;
5668 let mut replay_fallback_cost_total = 0_u64;
5669
5670 for evidence in &replay_evidences {
5671 if evidence.success {
5672 replay_success_total += 1;
5673 } else {
5674 replay_failure_total += 1;
5675 }
5676 reasoning_avoided_tokens_total += evidence.reasoning_avoided_tokens;
5677 replay_fallback_cost_total += evidence.replay_fallback_cost;
5678
5679 let entry = task_totals
5680 .entry((evidence.task_class_id.clone(), evidence.task_label.clone()))
5681 .or_insert((0, 0, 0, 0));
5682 if evidence.success {
5683 entry.0 += 1;
5684 } else {
5685 entry.1 += 1;
5686 }
5687 entry.2 += evidence.reasoning_avoided_tokens;
5688 entry.3 += evidence.replay_fallback_cost;
5689
5690 if let Some(source_sender_id) = evidence.source_sender_id.as_deref() {
5691 let source_entry = source_totals
5692 .entry(source_sender_id.to_string())
5693 .or_insert((0, 0, 0, 0));
5694 if evidence.success {
5695 source_entry.0 += 1;
5696 } else {
5697 source_entry.1 += 1;
5698 }
5699 source_entry.2 += evidence.reasoning_avoided_tokens;
5700 source_entry.3 += evidence.replay_fallback_cost;
5701 }
5702 }
5703
5704 (
5705 replay_success_total,
5706 replay_failure_total,
5707 reasoning_avoided_tokens_total,
5708 replay_fallback_cost_total,
5709 )
5710 };
5711
5712 let replay_task_classes = task_totals
5713 .into_iter()
5714 .map(
5715 |(
5716 (task_class_id, task_label),
5717 (
5718 replay_success_total,
5719 replay_failure_total,
5720 reasoning_avoided_tokens_total,
5721 replay_fallback_cost_total,
5722 ),
5723 )| ReplayTaskClassMetrics {
5724 task_class_id,
5725 task_label,
5726 replay_success_total,
5727 replay_failure_total,
5728 reasoning_steps_avoided_total: replay_success_total,
5729 reasoning_avoided_tokens_total,
5730 replay_fallback_cost_total,
5731 replay_roi: compute_replay_roi(
5732 reasoning_avoided_tokens_total,
5733 replay_fallback_cost_total,
5734 ),
5735 },
5736 )
5737 .collect::<Vec<_>>();
5738 let replay_sources = source_totals
5739 .into_iter()
5740 .map(
5741 |(
5742 source_sender_id,
5743 (
5744 replay_success_total,
5745 replay_failure_total,
5746 reasoning_avoided_tokens_total,
5747 replay_fallback_cost_total,
5748 ),
5749 )| ReplaySourceRoiMetrics {
5750 source_sender_id,
5751 replay_success_total,
5752 replay_failure_total,
5753 reasoning_avoided_tokens_total,
5754 replay_fallback_cost_total,
5755 replay_roi: compute_replay_roi(
5756 reasoning_avoided_tokens_total,
5757 replay_fallback_cost_total,
5758 ),
5759 },
5760 )
5761 .collect::<Vec<_>>();
5762
5763 ReplayRoiAggregate {
5764 replay_attempts_total: replay_success_total + replay_failure_total,
5765 replay_success_total,
5766 replay_failure_total,
5767 reasoning_avoided_tokens_total,
5768 replay_fallback_cost_total,
5769 replay_task_classes,
5770 replay_sources,
5771 }
5772}
5773
5774fn replay_event_in_scope(stored: &StoredEvolutionEvent, cutoff: Option<DateTime<Utc>>) -> bool {
5775 match cutoff {
5776 Some(cutoff) => parse_event_timestamp(&stored.timestamp)
5777 .map(|timestamp| timestamp >= cutoff)
5778 .unwrap_or(false),
5779 None => true,
5780 }
5781}
5782
5783fn replay_roi_release_gate_summary(
5784 store: &dyn EvolutionStore,
5785 window_seconds: u64,
5786) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
5787 let (events, projection) = scan_projection(store)?;
5788 let now = Utc::now();
5789 let cutoff = if window_seconds == 0 {
5790 None
5791 } else {
5792 let seconds = i64::try_from(window_seconds).unwrap_or(i64::MAX);
5793 Some(now - Duration::seconds(seconds))
5794 };
5795 let replay = collect_replay_roi_aggregate(&events, &projection, cutoff);
5796
5797 Ok(ReplayRoiWindowSummary {
5798 generated_at: now.to_rfc3339(),
5799 window_seconds,
5800 replay_attempts_total: replay.replay_attempts_total,
5801 replay_success_total: replay.replay_success_total,
5802 replay_failure_total: replay.replay_failure_total,
5803 reasoning_avoided_tokens_total: replay.reasoning_avoided_tokens_total,
5804 replay_fallback_cost_total: replay.replay_fallback_cost_total,
5805 replay_roi: compute_replay_roi(
5806 replay.reasoning_avoided_tokens_total,
5807 replay.replay_fallback_cost_total,
5808 ),
5809 replay_task_classes: replay.replay_task_classes,
5810 replay_sources: replay.replay_sources,
5811 })
5812}
5813
5814fn replay_roi_release_gate_contract(
5815 summary: &ReplayRoiWindowSummary,
5816 thresholds: ReplayRoiReleaseGateThresholds,
5817) -> ReplayRoiReleaseGateContract {
5818 let input = replay_roi_release_gate_input_contract(summary, thresholds);
5819 let output = evaluate_replay_roi_release_gate_contract_input(&input);
5820 ReplayRoiReleaseGateContract { input, output }
5821}
5822
5823fn replay_roi_release_gate_input_contract(
5824 summary: &ReplayRoiWindowSummary,
5825 thresholds: ReplayRoiReleaseGateThresholds,
5826) -> ReplayRoiReleaseGateInputContract {
5827 let replay_safety_signal = replay_roi_release_gate_safety_signal(summary);
5828 let replay_safety = replay_safety_signal.fail_closed_default
5829 && replay_safety_signal.rollback_ready
5830 && replay_safety_signal.audit_trail_complete
5831 && replay_safety_signal.has_replay_activity;
5832 ReplayRoiReleaseGateInputContract {
5833 generated_at: summary.generated_at.clone(),
5834 window_seconds: summary.window_seconds,
5835 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
5836 .iter()
5837 .map(|dimension| (*dimension).to_string())
5838 .collect(),
5839 replay_attempts_total: summary.replay_attempts_total,
5840 replay_success_total: summary.replay_success_total,
5841 replay_failure_total: summary.replay_failure_total,
5842 replay_hit_rate: safe_ratio(summary.replay_success_total, summary.replay_attempts_total),
5843 false_replay_rate: safe_ratio(summary.replay_failure_total, summary.replay_attempts_total),
5844 reasoning_avoided_tokens: summary.reasoning_avoided_tokens_total,
5845 replay_fallback_cost_total: summary.replay_fallback_cost_total,
5846 replay_roi: summary.replay_roi,
5847 replay_safety,
5848 replay_safety_signal,
5849 thresholds,
5850 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
5851 }
5852}
5853
5854fn replay_roi_release_gate_safety_signal(
5855 summary: &ReplayRoiWindowSummary,
5856) -> ReplayRoiReleaseGateSafetySignal {
5857 ReplayRoiReleaseGateSafetySignal {
5858 fail_closed_default: true,
5859 rollback_ready: summary.replay_failure_total == 0 || summary.replay_fallback_cost_total > 0,
5860 audit_trail_complete: summary.replay_attempts_total
5861 == summary.replay_success_total + summary.replay_failure_total,
5862 has_replay_activity: summary.replay_attempts_total > 0,
5863 }
5864}
5865
5866pub fn evaluate_replay_roi_release_gate_contract_input(
5867 input: &ReplayRoiReleaseGateInputContract,
5868) -> ReplayRoiReleaseGateOutputContract {
5869 let mut failed_checks = Vec::new();
5870 let mut evidence_refs = Vec::new();
5871 let mut indeterminate = false;
5872
5873 replay_release_gate_push_unique(&mut evidence_refs, "replay_roi_release_gate_summary");
5874 replay_release_gate_push_unique(
5875 &mut evidence_refs,
5876 format!("window_seconds:{}", input.window_seconds),
5877 );
5878 if input.generated_at.trim().is_empty() {
5879 replay_release_gate_record_failed_check(
5880 &mut failed_checks,
5881 &mut evidence_refs,
5882 "missing_generated_at",
5883 &["field:generated_at"],
5884 );
5885 indeterminate = true;
5886 } else {
5887 replay_release_gate_push_unique(
5888 &mut evidence_refs,
5889 format!("generated_at:{}", input.generated_at),
5890 );
5891 }
5892
5893 let expected_attempts_total = input.replay_success_total + input.replay_failure_total;
5894 if input.replay_attempts_total != expected_attempts_total {
5895 replay_release_gate_record_failed_check(
5896 &mut failed_checks,
5897 &mut evidence_refs,
5898 "invalid_attempt_accounting",
5899 &[
5900 "metric:replay_attempts_total",
5901 "metric:replay_success_total",
5902 "metric:replay_failure_total",
5903 ],
5904 );
5905 indeterminate = true;
5906 }
5907
5908 if input.replay_attempts_total == 0 {
5909 replay_release_gate_record_failed_check(
5910 &mut failed_checks,
5911 &mut evidence_refs,
5912 "missing_replay_attempts",
5913 &["metric:replay_attempts_total"],
5914 );
5915 indeterminate = true;
5916 }
5917
5918 if !replay_release_gate_rate_valid(input.replay_hit_rate) {
5919 replay_release_gate_record_failed_check(
5920 &mut failed_checks,
5921 &mut evidence_refs,
5922 "invalid_replay_hit_rate",
5923 &["metric:replay_hit_rate"],
5924 );
5925 indeterminate = true;
5926 }
5927 if !replay_release_gate_rate_valid(input.false_replay_rate) {
5928 replay_release_gate_record_failed_check(
5929 &mut failed_checks,
5930 &mut evidence_refs,
5931 "invalid_false_replay_rate",
5932 &["metric:false_replay_rate"],
5933 );
5934 indeterminate = true;
5935 }
5936
5937 if !input.replay_roi.is_finite() {
5938 replay_release_gate_record_failed_check(
5939 &mut failed_checks,
5940 &mut evidence_refs,
5941 "invalid_replay_roi",
5942 &["metric:replay_roi"],
5943 );
5944 indeterminate = true;
5945 }
5946
5947 let expected_hit_rate = safe_ratio(input.replay_success_total, input.replay_attempts_total);
5948 let expected_false_rate = safe_ratio(input.replay_failure_total, input.replay_attempts_total);
5949 if input.replay_attempts_total > 0
5950 && !replay_release_gate_float_eq(input.replay_hit_rate, expected_hit_rate)
5951 {
5952 replay_release_gate_record_failed_check(
5953 &mut failed_checks,
5954 &mut evidence_refs,
5955 "invalid_replay_hit_rate_consistency",
5956 &["metric:replay_hit_rate", "metric:replay_success_total"],
5957 );
5958 indeterminate = true;
5959 }
5960 if input.replay_attempts_total > 0
5961 && !replay_release_gate_float_eq(input.false_replay_rate, expected_false_rate)
5962 {
5963 replay_release_gate_record_failed_check(
5964 &mut failed_checks,
5965 &mut evidence_refs,
5966 "invalid_false_replay_rate_consistency",
5967 &["metric:false_replay_rate", "metric:replay_failure_total"],
5968 );
5969 indeterminate = true;
5970 }
5971
5972 if !(0.0..=1.0).contains(&input.thresholds.min_replay_hit_rate) {
5973 replay_release_gate_record_failed_check(
5974 &mut failed_checks,
5975 &mut evidence_refs,
5976 "invalid_threshold_min_replay_hit_rate",
5977 &["threshold:min_replay_hit_rate"],
5978 );
5979 indeterminate = true;
5980 }
5981 if !(0.0..=1.0).contains(&input.thresholds.max_false_replay_rate) {
5982 replay_release_gate_record_failed_check(
5983 &mut failed_checks,
5984 &mut evidence_refs,
5985 "invalid_threshold_max_false_replay_rate",
5986 &["threshold:max_false_replay_rate"],
5987 );
5988 indeterminate = true;
5989 }
5990 if !input.thresholds.min_replay_roi.is_finite() {
5991 replay_release_gate_record_failed_check(
5992 &mut failed_checks,
5993 &mut evidence_refs,
5994 "invalid_threshold_min_replay_roi",
5995 &["threshold:min_replay_roi"],
5996 );
5997 indeterminate = true;
5998 }
5999
6000 if input.replay_attempts_total < input.thresholds.min_replay_attempts {
6001 replay_release_gate_record_failed_check(
6002 &mut failed_checks,
6003 &mut evidence_refs,
6004 "min_replay_attempts_below_threshold",
6005 &[
6006 "threshold:min_replay_attempts",
6007 "metric:replay_attempts_total",
6008 ],
6009 );
6010 }
6011 if input.replay_attempts_total > 0
6012 && input.replay_hit_rate < input.thresholds.min_replay_hit_rate
6013 {
6014 replay_release_gate_record_failed_check(
6015 &mut failed_checks,
6016 &mut evidence_refs,
6017 "replay_hit_rate_below_threshold",
6018 &["threshold:min_replay_hit_rate", "metric:replay_hit_rate"],
6019 );
6020 }
6021 if input.replay_attempts_total > 0
6022 && input.false_replay_rate > input.thresholds.max_false_replay_rate
6023 {
6024 replay_release_gate_record_failed_check(
6025 &mut failed_checks,
6026 &mut evidence_refs,
6027 "false_replay_rate_above_threshold",
6028 &[
6029 "threshold:max_false_replay_rate",
6030 "metric:false_replay_rate",
6031 ],
6032 );
6033 }
6034 if input.reasoning_avoided_tokens < input.thresholds.min_reasoning_avoided_tokens {
6035 replay_release_gate_record_failed_check(
6036 &mut failed_checks,
6037 &mut evidence_refs,
6038 "reasoning_avoided_tokens_below_threshold",
6039 &[
6040 "threshold:min_reasoning_avoided_tokens",
6041 "metric:reasoning_avoided_tokens",
6042 ],
6043 );
6044 }
6045 if input.replay_roi < input.thresholds.min_replay_roi {
6046 replay_release_gate_record_failed_check(
6047 &mut failed_checks,
6048 &mut evidence_refs,
6049 "replay_roi_below_threshold",
6050 &["threshold:min_replay_roi", "metric:replay_roi"],
6051 );
6052 }
6053 if input.thresholds.require_replay_safety && !input.replay_safety {
6054 replay_release_gate_record_failed_check(
6055 &mut failed_checks,
6056 &mut evidence_refs,
6057 "replay_safety_required",
6058 &["metric:replay_safety", "threshold:require_replay_safety"],
6059 );
6060 }
6061
6062 failed_checks.sort();
6063 evidence_refs.sort();
6064
6065 let status = if failed_checks.is_empty() {
6066 ReplayRoiReleaseGateStatus::Pass
6067 } else if indeterminate {
6068 ReplayRoiReleaseGateStatus::Indeterminate
6069 } else {
6070 ReplayRoiReleaseGateStatus::FailClosed
6071 };
6072 let joined_checks = if failed_checks.is_empty() {
6073 "none".to_string()
6074 } else {
6075 failed_checks.join(",")
6076 };
6077 let summary = match status {
6078 ReplayRoiReleaseGateStatus::Pass => format!(
6079 "release gate pass: attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6080 input.replay_attempts_total,
6081 input.replay_hit_rate,
6082 input.false_replay_rate,
6083 input.reasoning_avoided_tokens,
6084 input.replay_roi,
6085 input.replay_safety
6086 ),
6087 ReplayRoiReleaseGateStatus::FailClosed => format!(
6088 "release gate fail_closed: failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6089 joined_checks,
6090 input.replay_attempts_total,
6091 input.replay_hit_rate,
6092 input.false_replay_rate,
6093 input.reasoning_avoided_tokens,
6094 input.replay_roi,
6095 input.replay_safety
6096 ),
6097 ReplayRoiReleaseGateStatus::Indeterminate => format!(
6098 "release gate indeterminate (fail-closed): failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6099 joined_checks,
6100 input.replay_attempts_total,
6101 input.replay_hit_rate,
6102 input.false_replay_rate,
6103 input.reasoning_avoided_tokens,
6104 input.replay_roi,
6105 input.replay_safety
6106 ),
6107 };
6108
6109 ReplayRoiReleaseGateOutputContract {
6110 status,
6111 failed_checks,
6112 evidence_refs,
6113 summary,
6114 }
6115}
6116
6117fn replay_release_gate_record_failed_check(
6118 failed_checks: &mut Vec<String>,
6119 evidence_refs: &mut Vec<String>,
6120 check: &str,
6121 refs: &[&str],
6122) {
6123 replay_release_gate_push_unique(failed_checks, check.to_string());
6124 for entry in refs {
6125 replay_release_gate_push_unique(evidence_refs, (*entry).to_string());
6126 }
6127}
6128
6129fn replay_release_gate_push_unique(values: &mut Vec<String>, entry: impl Into<String>) {
6130 let entry = entry.into();
6131 if !values.iter().any(|current| current == &entry) {
6132 values.push(entry);
6133 }
6134}
6135
6136fn replay_release_gate_rate_valid(value: f64) -> bool {
6137 value.is_finite() && (0.0..=1.0).contains(&value)
6138}
6139
6140fn replay_release_gate_float_eq(left: f64, right: f64) -> bool {
6141 (left - right).abs() <= 1e-9
6142}
6143
6144fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
6145 EvolutionHealthSnapshot {
6146 status: "ok".into(),
6147 last_event_seq: snapshot.last_event_seq,
6148 promoted_genes: snapshot.promoted_genes,
6149 promoted_capsules: snapshot.promoted_capsules,
6150 }
6151}
6152
6153fn render_evolution_metrics_prometheus(
6154 snapshot: &EvolutionMetricsSnapshot,
6155 health: &EvolutionHealthSnapshot,
6156) -> String {
6157 let mut out = String::new();
6158 out.push_str(
6159 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
6160 );
6161 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
6162 out.push_str(&format!(
6163 "oris_evolution_replay_attempts_total {}\n",
6164 snapshot.replay_attempts_total
6165 ));
6166 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
6167 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
6168 out.push_str(&format!(
6169 "oris_evolution_replay_success_total {}\n",
6170 snapshot.replay_success_total
6171 ));
6172 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_total Total planner steps avoided by successful replay.\n");
6173 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_total counter\n");
6174 out.push_str(&format!(
6175 "oris_evolution_replay_reasoning_avoided_total {}\n",
6176 snapshot.replay_reasoning_avoided_total
6177 ));
6178 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_total Estimated reasoning tokens avoided by replay hits.\n");
6179 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_total counter\n");
6180 out.push_str(&format!(
6181 "oris_evolution_reasoning_avoided_tokens_total {}\n",
6182 snapshot.reasoning_avoided_tokens_total
6183 ));
6184 out.push_str("# HELP oris_evolution_replay_fallback_cost_total Estimated reasoning token cost spent on replay fallbacks.\n");
6185 out.push_str("# TYPE oris_evolution_replay_fallback_cost_total counter\n");
6186 out.push_str(&format!(
6187 "oris_evolution_replay_fallback_cost_total {}\n",
6188 snapshot.replay_fallback_cost_total
6189 ));
6190 out.push_str("# HELP oris_evolution_replay_roi Net replay ROI in token space ((avoided - fallback_cost) / total).\n");
6191 out.push_str("# TYPE oris_evolution_replay_roi gauge\n");
6192 out.push_str(&format!(
6193 "oris_evolution_replay_roi {:.6}\n",
6194 snapshot.replay_roi
6195 ));
6196 out.push_str("# HELP oris_evolution_replay_utilization_by_task_class_total Successful replay reuse counts grouped by deterministic task class.\n");
6197 out.push_str("# TYPE oris_evolution_replay_utilization_by_task_class_total counter\n");
6198 for task_class in &snapshot.replay_task_classes {
6199 out.push_str(&format!(
6200 "oris_evolution_replay_utilization_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6201 prometheus_label_value(&task_class.task_class_id),
6202 prometheus_label_value(&task_class.task_label),
6203 task_class.replay_success_total
6204 ));
6205 }
6206 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_by_task_class_total Planner steps avoided by successful replay grouped by deterministic task class.\n");
6207 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_by_task_class_total counter\n");
6208 for task_class in &snapshot.replay_task_classes {
6209 out.push_str(&format!(
6210 "oris_evolution_replay_reasoning_avoided_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6211 prometheus_label_value(&task_class.task_class_id),
6212 prometheus_label_value(&task_class.task_label),
6213 task_class.reasoning_steps_avoided_total
6214 ));
6215 }
6216 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_task_class_total Estimated reasoning tokens avoided by replay hits grouped by deterministic task class.\n");
6217 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_task_class_total counter\n");
6218 for task_class in &snapshot.replay_task_classes {
6219 out.push_str(&format!(
6220 "oris_evolution_reasoning_avoided_tokens_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6221 prometheus_label_value(&task_class.task_class_id),
6222 prometheus_label_value(&task_class.task_label),
6223 task_class.reasoning_avoided_tokens_total
6224 ));
6225 }
6226 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_task_class_total Estimated fallback token cost grouped by deterministic task class.\n");
6227 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_task_class_total counter\n");
6228 for task_class in &snapshot.replay_task_classes {
6229 out.push_str(&format!(
6230 "oris_evolution_replay_fallback_cost_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6231 prometheus_label_value(&task_class.task_class_id),
6232 prometheus_label_value(&task_class.task_label),
6233 task_class.replay_fallback_cost_total
6234 ));
6235 }
6236 out.push_str("# HELP oris_evolution_replay_roi_by_task_class Replay ROI in token space grouped by deterministic task class.\n");
6237 out.push_str("# TYPE oris_evolution_replay_roi_by_task_class gauge\n");
6238 for task_class in &snapshot.replay_task_classes {
6239 out.push_str(&format!(
6240 "oris_evolution_replay_roi_by_task_class{{task_class_id=\"{}\",task_label=\"{}\"}} {:.6}\n",
6241 prometheus_label_value(&task_class.task_class_id),
6242 prometheus_label_value(&task_class.task_label),
6243 task_class.replay_roi
6244 ));
6245 }
6246 out.push_str("# HELP oris_evolution_replay_roi_by_source Replay ROI in token space grouped by remote sender id for cross-node reconciliation.\n");
6247 out.push_str("# TYPE oris_evolution_replay_roi_by_source gauge\n");
6248 for source in &snapshot.replay_sources {
6249 out.push_str(&format!(
6250 "oris_evolution_replay_roi_by_source{{source_sender_id=\"{}\"}} {:.6}\n",
6251 prometheus_label_value(&source.source_sender_id),
6252 source.replay_roi
6253 ));
6254 }
6255 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_source_total Estimated reasoning tokens avoided grouped by remote sender id.\n");
6256 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_source_total counter\n");
6257 for source in &snapshot.replay_sources {
6258 out.push_str(&format!(
6259 "oris_evolution_reasoning_avoided_tokens_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6260 prometheus_label_value(&source.source_sender_id),
6261 source.reasoning_avoided_tokens_total
6262 ));
6263 }
6264 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_source_total Estimated replay fallback token cost grouped by remote sender id.\n");
6265 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_source_total counter\n");
6266 for source in &snapshot.replay_sources {
6267 out.push_str(&format!(
6268 "oris_evolution_replay_fallback_cost_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6269 prometheus_label_value(&source.source_sender_id),
6270 source.replay_fallback_cost_total
6271 ));
6272 }
6273 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
6274 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
6275 out.push_str(&format!(
6276 "oris_evolution_replay_success_rate {:.6}\n",
6277 snapshot.replay_success_rate
6278 ));
6279 out.push_str("# HELP oris_evolution_confidence_revalidations_total Total confidence-driven demotions that require revalidation before replay.\n");
6280 out.push_str("# TYPE oris_evolution_confidence_revalidations_total counter\n");
6281 out.push_str(&format!(
6282 "oris_evolution_confidence_revalidations_total {}\n",
6283 snapshot.confidence_revalidations_total
6284 ));
6285 out.push_str(
6286 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
6287 );
6288 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
6289 out.push_str(&format!(
6290 "oris_evolution_mutation_declared_total {}\n",
6291 snapshot.mutation_declared_total
6292 ));
6293 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
6294 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
6295 out.push_str(&format!(
6296 "oris_evolution_promoted_mutations_total {}\n",
6297 snapshot.promoted_mutations_total
6298 ));
6299 out.push_str(
6300 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
6301 );
6302 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
6303 out.push_str(&format!(
6304 "oris_evolution_promotion_ratio {:.6}\n",
6305 snapshot.promotion_ratio
6306 ));
6307 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
6308 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
6309 out.push_str(&format!(
6310 "oris_evolution_gene_revocations_total {}\n",
6311 snapshot.gene_revocations_total
6312 ));
6313 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
6314 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
6315 out.push_str(&format!(
6316 "oris_evolution_mutation_velocity_last_hour {}\n",
6317 snapshot.mutation_velocity_last_hour
6318 ));
6319 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
6320 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
6321 out.push_str(&format!(
6322 "oris_evolution_revoke_frequency_last_hour {}\n",
6323 snapshot.revoke_frequency_last_hour
6324 ));
6325 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
6326 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
6327 out.push_str(&format!(
6328 "oris_evolution_promoted_genes {}\n",
6329 snapshot.promoted_genes
6330 ));
6331 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
6332 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
6333 out.push_str(&format!(
6334 "oris_evolution_promoted_capsules {}\n",
6335 snapshot.promoted_capsules
6336 ));
6337 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
6338 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
6339 out.push_str(&format!(
6340 "oris_evolution_store_last_event_seq {}\n",
6341 snapshot.last_event_seq
6342 ));
6343 out.push_str(
6344 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
6345 );
6346 out.push_str("# TYPE oris_evolution_health gauge\n");
6347 out.push_str(&format!(
6348 "oris_evolution_health {}\n",
6349 u8::from(health.status == "ok")
6350 ));
6351 out
6352}
6353
6354fn count_recent_events(
6355 events: &[StoredEvolutionEvent],
6356 cutoff: DateTime<Utc>,
6357 predicate: impl Fn(&EvolutionEvent) -> bool,
6358) -> u64 {
6359 events
6360 .iter()
6361 .filter(|stored| {
6362 predicate(&stored.event)
6363 && parse_event_timestamp(&stored.timestamp)
6364 .map(|timestamp| timestamp >= cutoff)
6365 .unwrap_or(false)
6366 })
6367 .count() as u64
6368}
6369
6370fn prometheus_label_value(input: &str) -> String {
6371 input
6372 .replace('\\', "\\\\")
6373 .replace('\n', "\\n")
6374 .replace('"', "\\\"")
6375}
6376
6377fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
6378 DateTime::parse_from_rfc3339(raw)
6379 .ok()
6380 .map(|parsed| parsed.with_timezone(&Utc))
6381}
6382
6383fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
6384 matches!(
6385 event,
6386 EvolutionEvent::ValidationFailed {
6387 gene_id: Some(_),
6388 ..
6389 }
6390 )
6391}
6392
6393fn is_confidence_revalidation_event(event: &EvolutionEvent) -> bool {
6394 matches!(
6395 event,
6396 EvolutionEvent::PromotionEvaluated {
6397 state,
6398 reason,
6399 reason_code,
6400 ..
6401 }
6402 if *state == AssetState::Quarantined
6403 && (reason_code == &TransitionReasonCode::RevalidationConfidenceDecay
6404 || (reason_code == &TransitionReasonCode::Unspecified
6405 && reason.contains("confidence decayed")))
6406 )
6407}
6408
6409fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
6410 if denominator == 0 {
6411 0.0
6412 } else {
6413 numerator as f64 / denominator as f64
6414 }
6415}
6416
6417fn store_err(err: EvolutionError) -> EvoKernelError {
6418 EvoKernelError::Store(err.to_string())
6419}
6420
6421#[cfg(test)]
6422mod tests {
6423 use super::*;
6424 use oris_agent_contract::{
6425 AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
6426 };
6427 use oris_kernel::{
6428 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
6429 NoopStepFn, StateUpdatedOnlyReducer,
6430 };
6431 use serde::{Deserialize, Serialize};
6432
6433 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
6434 struct TestState;
6435
6436 impl KernelState for TestState {
6437 fn version(&self) -> u32 {
6438 1
6439 }
6440 }
6441
6442 #[test]
6443 fn repair_quality_gate_accepts_semantic_variants() {
6444 let plan = r#"
6445根本原因:脚本中拼写错误导致 unknown command 'process'。
6446修复建议:将 `proccess` 更正为 `process`,并统一命令入口。
6447验证方式:执行 `cargo check -p oris-runtime` 与回归测试。
6448恢复方案:若新入口异常,立即回滚到旧命令映射。
6449"#;
6450 let report = evaluate_repair_quality_gate(plan);
6451 assert!(report.passes());
6452 assert!(report.failed_checks().is_empty());
6453 }
6454
6455 #[test]
6456 fn repair_quality_gate_rejects_missing_incident_anchor() {
6457 let plan = r#"
6458原因分析:逻辑分支覆盖不足。
6459修复方案:补充分支与日志。
6460验证命令:cargo check -p oris-runtime
6461回滚方案:git revert HEAD
6462"#;
6463 let report = evaluate_repair_quality_gate(plan);
6464 assert!(!report.passes());
6465 assert!(report
6466 .failed_checks()
6467 .iter()
6468 .any(|check| check.contains("unknown command")));
6469 }
6470
6471 fn temp_workspace(name: &str) -> std::path::PathBuf {
6472 let root =
6473 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
6474 if root.exists() {
6475 fs::remove_dir_all(&root).unwrap();
6476 }
6477 fs::create_dir_all(root.join("src")).unwrap();
6478 fs::write(
6479 root.join("Cargo.toml"),
6480 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
6481 )
6482 .unwrap();
6483 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
6484 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
6485 root
6486 }
6487
6488 fn test_kernel() -> Arc<Kernel<TestState>> {
6489 Arc::new(Kernel::<TestState> {
6490 events: Box::new(InMemoryEventStore::new()),
6491 snaps: None,
6492 reducer: Box::new(StateUpdatedOnlyReducer),
6493 exec: Box::new(NoopActionExecutor),
6494 step: Box::new(NoopStepFn),
6495 policy: Box::new(AllowAllPolicy),
6496 effect_sink: None,
6497 mode: KernelMode::Normal,
6498 })
6499 }
6500
6501 fn lightweight_plan() -> ValidationPlan {
6502 ValidationPlan {
6503 profile: "test".into(),
6504 stages: vec![ValidationStage::Command {
6505 program: "git".into(),
6506 args: vec!["--version".into()],
6507 timeout_ms: 5_000,
6508 }],
6509 }
6510 }
6511
6512 fn sample_mutation() -> PreparedMutation {
6513 prepare_mutation(
6514 MutationIntent {
6515 id: "mutation-1".into(),
6516 intent: "add README".into(),
6517 target: MutationTarget::Paths {
6518 allow: vec!["README.md".into()],
6519 },
6520 expected_effect: "repo still builds".into(),
6521 risk: RiskLevel::Low,
6522 signals: vec!["missing readme".into()],
6523 spec_id: None,
6524 },
6525 "\
6526diff --git a/README.md b/README.md
6527new file mode 100644
6528index 0000000..1111111
6529--- /dev/null
6530+++ b/README.md
6531@@ -0,0 +1 @@
6532+# sample
6533"
6534 .into(),
6535 Some("HEAD".into()),
6536 )
6537 }
6538
6539 fn base_sandbox_policy() -> SandboxPolicy {
6540 SandboxPolicy {
6541 allowed_programs: vec!["git".into()],
6542 max_duration_ms: 60_000,
6543 max_output_bytes: 1024 * 1024,
6544 denied_env_prefixes: Vec::new(),
6545 }
6546 }
6547
6548 fn command_validator() -> Arc<dyn Validator> {
6549 Arc::new(CommandValidator::new(base_sandbox_policy()))
6550 }
6551
6552 fn replay_input(signal: &str) -> SelectorInput {
6553 let rustc_version = std::process::Command::new("rustc")
6554 .arg("--version")
6555 .output()
6556 .ok()
6557 .filter(|output| output.status.success())
6558 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
6559 .unwrap_or_else(|| "rustc unknown".into());
6560 SelectorInput {
6561 signals: vec![signal.into()],
6562 env: EnvFingerprint {
6563 rustc_version,
6564 cargo_lock_hash: compute_artifact_hash("# lock\n"),
6565 target_triple: format!(
6566 "{}-unknown-{}",
6567 std::env::consts::ARCH,
6568 std::env::consts::OS
6569 ),
6570 os: std::env::consts::OS.into(),
6571 },
6572 spec_id: None,
6573 limit: 1,
6574 }
6575 }
6576
6577 fn build_test_evo_with_store(
6578 name: &str,
6579 run_id: &str,
6580 validator: Arc<dyn Validator>,
6581 store: Arc<dyn EvolutionStore>,
6582 ) -> EvoKernel<TestState> {
6583 let workspace = temp_workspace(name);
6584 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
6585 run_id,
6586 &workspace,
6587 std::env::temp_dir(),
6588 ));
6589 EvoKernel::new(test_kernel(), sandbox, validator, store)
6590 .with_governor(Arc::new(DefaultGovernor::new(
6591 oris_governor::GovernorConfig {
6592 promote_after_successes: 1,
6593 ..Default::default()
6594 },
6595 )))
6596 .with_validation_plan(lightweight_plan())
6597 .with_sandbox_policy(base_sandbox_policy())
6598 }
6599
6600 fn build_test_evo(
6601 name: &str,
6602 run_id: &str,
6603 validator: Arc<dyn Validator>,
6604 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
6605 let store_root = std::env::temp_dir().join(format!(
6606 "oris-evokernel-{name}-store-{}",
6607 std::process::id()
6608 ));
6609 if store_root.exists() {
6610 fs::remove_dir_all(&store_root).unwrap();
6611 }
6612 let store: Arc<dyn EvolutionStore> =
6613 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
6614 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
6615 (evo, store)
6616 }
6617
6618 fn remote_publish_envelope(
6619 sender_id: &str,
6620 run_id: &str,
6621 gene_id: &str,
6622 capsule_id: &str,
6623 mutation_id: &str,
6624 signal: &str,
6625 file_name: &str,
6626 line: &str,
6627 ) -> EvolutionEnvelope {
6628 remote_publish_envelope_with_env(
6629 sender_id,
6630 run_id,
6631 gene_id,
6632 capsule_id,
6633 mutation_id,
6634 signal,
6635 file_name,
6636 line,
6637 replay_input(signal).env,
6638 )
6639 }
6640
6641 fn remote_publish_envelope_with_env(
6642 sender_id: &str,
6643 run_id: &str,
6644 gene_id: &str,
6645 capsule_id: &str,
6646 mutation_id: &str,
6647 signal: &str,
6648 file_name: &str,
6649 line: &str,
6650 env: EnvFingerprint,
6651 ) -> EvolutionEnvelope {
6652 let mutation = prepare_mutation(
6653 MutationIntent {
6654 id: mutation_id.into(),
6655 intent: format!("add {file_name}"),
6656 target: MutationTarget::Paths {
6657 allow: vec![file_name.into()],
6658 },
6659 expected_effect: "replay should still validate".into(),
6660 risk: RiskLevel::Low,
6661 signals: vec![signal.into()],
6662 spec_id: None,
6663 },
6664 format!(
6665 "\
6666diff --git a/{file_name} b/{file_name}
6667new file mode 100644
6668index 0000000..1111111
6669--- /dev/null
6670+++ b/{file_name}
6671@@ -0,0 +1 @@
6672+{line}
6673"
6674 ),
6675 Some("HEAD".into()),
6676 );
6677 let gene = Gene {
6678 id: gene_id.into(),
6679 signals: vec![signal.into()],
6680 strategy: vec![file_name.into()],
6681 validation: vec!["test".into()],
6682 state: AssetState::Promoted,
6683 };
6684 let capsule = Capsule {
6685 id: capsule_id.into(),
6686 gene_id: gene_id.into(),
6687 mutation_id: mutation_id.into(),
6688 run_id: run_id.into(),
6689 diff_hash: mutation.artifact.content_hash.clone(),
6690 confidence: 0.9,
6691 env,
6692 outcome: Outcome {
6693 success: true,
6694 validation_profile: "test".into(),
6695 validation_duration_ms: 1,
6696 changed_files: vec![file_name.into()],
6697 validator_hash: "validator-hash".into(),
6698 lines_changed: 1,
6699 replay_verified: false,
6700 },
6701 state: AssetState::Promoted,
6702 };
6703 EvolutionEnvelope::publish(
6704 sender_id,
6705 vec![
6706 NetworkAsset::EvolutionEvent {
6707 event: EvolutionEvent::MutationDeclared { mutation },
6708 },
6709 NetworkAsset::Gene { gene: gene.clone() },
6710 NetworkAsset::Capsule {
6711 capsule: capsule.clone(),
6712 },
6713 NetworkAsset::EvolutionEvent {
6714 event: EvolutionEvent::CapsuleReleased {
6715 capsule_id: capsule.id.clone(),
6716 state: AssetState::Promoted,
6717 },
6718 },
6719 ],
6720 )
6721 }
6722
6723 fn remote_publish_envelope_with_signals(
6724 sender_id: &str,
6725 run_id: &str,
6726 gene_id: &str,
6727 capsule_id: &str,
6728 mutation_id: &str,
6729 mutation_signals: Vec<String>,
6730 gene_signals: Vec<String>,
6731 file_name: &str,
6732 line: &str,
6733 env: EnvFingerprint,
6734 ) -> EvolutionEnvelope {
6735 let mutation = prepare_mutation(
6736 MutationIntent {
6737 id: mutation_id.into(),
6738 intent: format!("add {file_name}"),
6739 target: MutationTarget::Paths {
6740 allow: vec![file_name.into()],
6741 },
6742 expected_effect: "replay should still validate".into(),
6743 risk: RiskLevel::Low,
6744 signals: mutation_signals,
6745 spec_id: None,
6746 },
6747 format!(
6748 "\
6749diff --git a/{file_name} b/{file_name}
6750new file mode 100644
6751index 0000000..1111111
6752--- /dev/null
6753+++ b/{file_name}
6754@@ -0,0 +1 @@
6755+{line}
6756"
6757 ),
6758 Some("HEAD".into()),
6759 );
6760 let gene = Gene {
6761 id: gene_id.into(),
6762 signals: gene_signals,
6763 strategy: vec![file_name.into()],
6764 validation: vec!["test".into()],
6765 state: AssetState::Promoted,
6766 };
6767 let capsule = Capsule {
6768 id: capsule_id.into(),
6769 gene_id: gene_id.into(),
6770 mutation_id: mutation_id.into(),
6771 run_id: run_id.into(),
6772 diff_hash: mutation.artifact.content_hash.clone(),
6773 confidence: 0.9,
6774 env,
6775 outcome: Outcome {
6776 success: true,
6777 validation_profile: "test".into(),
6778 validation_duration_ms: 1,
6779 changed_files: vec![file_name.into()],
6780 validator_hash: "validator-hash".into(),
6781 lines_changed: 1,
6782 replay_verified: false,
6783 },
6784 state: AssetState::Promoted,
6785 };
6786 EvolutionEnvelope::publish(
6787 sender_id,
6788 vec![
6789 NetworkAsset::EvolutionEvent {
6790 event: EvolutionEvent::MutationDeclared { mutation },
6791 },
6792 NetworkAsset::Gene { gene: gene.clone() },
6793 NetworkAsset::Capsule {
6794 capsule: capsule.clone(),
6795 },
6796 NetworkAsset::EvolutionEvent {
6797 event: EvolutionEvent::CapsuleReleased {
6798 capsule_id: capsule.id.clone(),
6799 state: AssetState::Promoted,
6800 },
6801 },
6802 ],
6803 )
6804 }
6805
6806 struct FixedValidator {
6807 success: bool,
6808 }
6809
6810 #[async_trait]
6811 impl Validator for FixedValidator {
6812 async fn run(
6813 &self,
6814 _receipt: &SandboxReceipt,
6815 plan: &ValidationPlan,
6816 ) -> Result<ValidationReport, ValidationError> {
6817 Ok(ValidationReport {
6818 success: self.success,
6819 duration_ms: 1,
6820 stages: Vec::new(),
6821 logs: if self.success {
6822 format!("{} ok", plan.profile)
6823 } else {
6824 format!("{} failed", plan.profile)
6825 },
6826 })
6827 }
6828 }
6829
6830 struct FailOnAppendStore {
6831 inner: JsonlEvolutionStore,
6832 fail_on_call: usize,
6833 call_count: Mutex<usize>,
6834 }
6835
6836 impl FailOnAppendStore {
6837 fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
6838 Self {
6839 inner: JsonlEvolutionStore::new(root_dir),
6840 fail_on_call,
6841 call_count: Mutex::new(0),
6842 }
6843 }
6844 }
6845
6846 impl EvolutionStore for FailOnAppendStore {
6847 fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
6848 let mut call_count = self
6849 .call_count
6850 .lock()
6851 .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
6852 *call_count += 1;
6853 if *call_count == self.fail_on_call {
6854 return Err(EvolutionError::Io("injected append failure".into()));
6855 }
6856 self.inner.append_event(event)
6857 }
6858
6859 fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
6860 self.inner.scan(from_seq)
6861 }
6862
6863 fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
6864 self.inner.rebuild_projection()
6865 }
6866 }
6867
6868 #[test]
6869 fn coordination_planner_to_coder_handoff_is_deterministic() {
6870 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6871 root_goal: "ship feature".into(),
6872 primitive: CoordinationPrimitive::Sequential,
6873 tasks: vec![
6874 CoordinationTask {
6875 id: "planner".into(),
6876 role: AgentRole::Planner,
6877 description: "split the work".into(),
6878 depends_on: Vec::new(),
6879 },
6880 CoordinationTask {
6881 id: "coder".into(),
6882 role: AgentRole::Coder,
6883 description: "implement the patch".into(),
6884 depends_on: vec!["planner".into()],
6885 },
6886 ],
6887 timeout_ms: 5_000,
6888 max_retries: 0,
6889 });
6890
6891 assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
6892 assert!(result.failed_tasks.is_empty());
6893 assert!(result.messages.iter().any(|message| {
6894 message.from_role == AgentRole::Planner
6895 && message.to_role == AgentRole::Coder
6896 && message.task_id == "coder"
6897 }));
6898 }
6899
6900 #[test]
6901 fn coordination_repair_runs_only_after_coder_failure() {
6902 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6903 root_goal: "fix broken implementation".into(),
6904 primitive: CoordinationPrimitive::Sequential,
6905 tasks: vec![
6906 CoordinationTask {
6907 id: "coder".into(),
6908 role: AgentRole::Coder,
6909 description: "force-fail initial implementation".into(),
6910 depends_on: Vec::new(),
6911 },
6912 CoordinationTask {
6913 id: "repair".into(),
6914 role: AgentRole::Repair,
6915 description: "patch the failed implementation".into(),
6916 depends_on: vec!["coder".into()],
6917 },
6918 ],
6919 timeout_ms: 5_000,
6920 max_retries: 0,
6921 });
6922
6923 assert_eq!(result.completed_tasks, vec!["repair"]);
6924 assert_eq!(result.failed_tasks, vec!["coder"]);
6925 assert!(result.messages.iter().any(|message| {
6926 message.from_role == AgentRole::Coder
6927 && message.to_role == AgentRole::Repair
6928 && message.task_id == "repair"
6929 }));
6930 }
6931
6932 #[test]
6933 fn coordination_optimizer_runs_after_successful_implementation_step() {
6934 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6935 root_goal: "ship optimized patch".into(),
6936 primitive: CoordinationPrimitive::Sequential,
6937 tasks: vec![
6938 CoordinationTask {
6939 id: "coder".into(),
6940 role: AgentRole::Coder,
6941 description: "implement a working patch".into(),
6942 depends_on: Vec::new(),
6943 },
6944 CoordinationTask {
6945 id: "optimizer".into(),
6946 role: AgentRole::Optimizer,
6947 description: "tighten the implementation".into(),
6948 depends_on: vec!["coder".into()],
6949 },
6950 ],
6951 timeout_ms: 5_000,
6952 max_retries: 0,
6953 });
6954
6955 assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
6956 assert!(result.failed_tasks.is_empty());
6957 }
6958
6959 #[test]
6960 fn coordination_parallel_waves_preserve_sorted_merge_order() {
6961 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6962 root_goal: "parallelize safe tasks".into(),
6963 primitive: CoordinationPrimitive::Parallel,
6964 tasks: vec![
6965 CoordinationTask {
6966 id: "z-task".into(),
6967 role: AgentRole::Planner,
6968 description: "analyze z".into(),
6969 depends_on: Vec::new(),
6970 },
6971 CoordinationTask {
6972 id: "a-task".into(),
6973 role: AgentRole::Coder,
6974 description: "implement a".into(),
6975 depends_on: Vec::new(),
6976 },
6977 CoordinationTask {
6978 id: "mid-task".into(),
6979 role: AgentRole::Optimizer,
6980 description: "polish after both".into(),
6981 depends_on: vec!["z-task".into(), "a-task".into()],
6982 },
6983 ],
6984 timeout_ms: 5_000,
6985 max_retries: 0,
6986 });
6987
6988 assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
6989 assert!(result.failed_tasks.is_empty());
6990 }
6991
6992 #[test]
6993 fn coordination_retries_stop_at_max_retries() {
6994 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6995 root_goal: "retry then stop".into(),
6996 primitive: CoordinationPrimitive::Sequential,
6997 tasks: vec![CoordinationTask {
6998 id: "coder".into(),
6999 role: AgentRole::Coder,
7000 description: "force-fail this task".into(),
7001 depends_on: Vec::new(),
7002 }],
7003 timeout_ms: 5_000,
7004 max_retries: 1,
7005 });
7006
7007 assert!(result.completed_tasks.is_empty());
7008 assert_eq!(result.failed_tasks, vec!["coder"]);
7009 assert_eq!(
7010 result
7011 .messages
7012 .iter()
7013 .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
7014 .count(),
7015 2
7016 );
7017 }
7018
7019 #[test]
7020 fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
7021 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7022 root_goal: "skip blocked follow-up work".into(),
7023 primitive: CoordinationPrimitive::Conditional,
7024 tasks: vec![
7025 CoordinationTask {
7026 id: "coder".into(),
7027 role: AgentRole::Coder,
7028 description: "force-fail the implementation".into(),
7029 depends_on: Vec::new(),
7030 },
7031 CoordinationTask {
7032 id: "optimizer".into(),
7033 role: AgentRole::Optimizer,
7034 description: "only optimize a successful implementation".into(),
7035 depends_on: vec!["coder".into()],
7036 },
7037 ],
7038 timeout_ms: 5_000,
7039 max_retries: 0,
7040 });
7041
7042 assert!(result.completed_tasks.is_empty());
7043 assert_eq!(result.failed_tasks, vec!["coder"]);
7044 assert!(result.messages.iter().any(|message| {
7045 message.task_id == "optimizer"
7046 && message
7047 .content
7048 .contains("skipped due to failed dependency chain")
7049 }));
7050 assert!(!result
7051 .failed_tasks
7052 .iter()
7053 .any(|task_id| task_id == "optimizer"));
7054 }
7055
7056 #[tokio::test]
7057 async fn command_validator_aggregates_stage_reports() {
7058 let workspace = temp_workspace("validator");
7059 let receipt = SandboxReceipt {
7060 mutation_id: "m".into(),
7061 workdir: workspace,
7062 applied: true,
7063 changed_files: Vec::new(),
7064 patch_hash: "hash".into(),
7065 stdout_log: std::env::temp_dir().join("stdout.log"),
7066 stderr_log: std::env::temp_dir().join("stderr.log"),
7067 };
7068 let validator = CommandValidator::new(SandboxPolicy {
7069 allowed_programs: vec!["git".into()],
7070 max_duration_ms: 1_000,
7071 max_output_bytes: 1024,
7072 denied_env_prefixes: Vec::new(),
7073 });
7074 let report = validator
7075 .run(
7076 &receipt,
7077 &ValidationPlan {
7078 profile: "test".into(),
7079 stages: vec![ValidationStage::Command {
7080 program: "git".into(),
7081 args: vec!["--version".into()],
7082 timeout_ms: 1_000,
7083 }],
7084 },
7085 )
7086 .await
7087 .unwrap();
7088 assert_eq!(report.stages.len(), 1);
7089 }
7090
7091 #[tokio::test]
7092 async fn capture_successful_mutation_appends_capsule() {
7093 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
7094 let capsule = evo
7095 .capture_successful_mutation(&"run-1".into(), sample_mutation())
7096 .await
7097 .unwrap();
7098 let events = store.scan(1).unwrap();
7099 assert!(events
7100 .iter()
7101 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
7102 assert!(!capsule.id.is_empty());
7103 }
7104
7105 #[tokio::test]
7106 async fn replay_hit_records_capsule_reused() {
7107 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
7108 let capsule = evo
7109 .capture_successful_mutation(&"run-2".into(), sample_mutation())
7110 .await
7111 .unwrap();
7112 let replay_run_id = "run-replay".to_string();
7113 let decision = evo
7114 .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
7115 .await
7116 .unwrap();
7117 assert!(decision.used_capsule);
7118 assert_eq!(decision.capsule_id, Some(capsule.id));
7119 assert!(!decision.detect_evidence.task_class_id.is_empty());
7120 assert!(!decision.detect_evidence.matched_signals.is_empty());
7121 assert!(decision.detect_evidence.mismatch_reasons.is_empty());
7122 assert!(!decision.select_evidence.candidates.is_empty());
7123 assert!(!decision.select_evidence.exact_match_lookup);
7124 assert_eq!(
7125 decision.select_evidence.selected_capsule_id.as_deref(),
7126 decision.capsule_id.as_deref()
7127 );
7128 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7129 &stored.event,
7130 EvolutionEvent::CapsuleReused {
7131 run_id,
7132 replay_run_id: Some(current_replay_run_id),
7133 ..
7134 } if run_id == "run-2" && current_replay_run_id == &replay_run_id
7135 )));
7136 }
7137
7138 #[tokio::test]
7139 async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
7140 let capture_run_id = "run-legacy-capture".to_string();
7141 let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
7142 let capsule = evo
7143 .capture_successful_mutation(&capture_run_id, sample_mutation())
7144 .await
7145 .unwrap();
7146 let executor = StoreReplayExecutor {
7147 sandbox: evo.sandbox.clone(),
7148 validator: evo.validator.clone(),
7149 store: evo.store.clone(),
7150 selector: evo.selector.clone(),
7151 governor: evo.governor.clone(),
7152 economics: Some(evo.economics.clone()),
7153 remote_publishers: Some(evo.remote_publishers.clone()),
7154 stake_policy: evo.stake_policy.clone(),
7155 };
7156
7157 let decision = executor
7158 .try_replay(
7159 &replay_input("missing readme"),
7160 &evo.sandbox_policy,
7161 &evo.validation_plan,
7162 )
7163 .await
7164 .unwrap();
7165
7166 assert!(decision.used_capsule);
7167 assert_eq!(decision.capsule_id, Some(capsule.id));
7168 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7169 &stored.event,
7170 EvolutionEvent::CapsuleReused {
7171 run_id,
7172 replay_run_id: None,
7173 ..
7174 } if run_id == &capture_run_id
7175 )));
7176 }
7177
7178 #[tokio::test]
7179 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
7180 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
7181 let capsule = evo
7182 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
7183 .await
7184 .unwrap();
7185 let decision = evo
7186 .replay_or_fallback(replay_input("missing readme"))
7187 .await
7188 .unwrap();
7189 assert!(decision.used_capsule);
7190
7191 evo.revoke_assets(&RevokeNotice {
7192 sender_id: "node-metrics".into(),
7193 asset_ids: vec![capsule.id.clone()],
7194 reason: "manual test revoke".into(),
7195 })
7196 .unwrap();
7197
7198 let snapshot = evo.metrics_snapshot().unwrap();
7199 assert_eq!(snapshot.replay_attempts_total, 1);
7200 assert_eq!(snapshot.replay_success_total, 1);
7201 assert_eq!(snapshot.replay_success_rate, 1.0);
7202 assert_eq!(snapshot.confidence_revalidations_total, 0);
7203 assert_eq!(snapshot.replay_reasoning_avoided_total, 1);
7204 assert_eq!(
7205 snapshot.reasoning_avoided_tokens_total,
7206 decision.economics_evidence.reasoning_avoided_tokens
7207 );
7208 assert_eq!(snapshot.replay_fallback_cost_total, 0);
7209 assert_eq!(snapshot.replay_roi, 1.0);
7210 assert_eq!(snapshot.replay_task_classes.len(), 1);
7211 assert_eq!(snapshot.replay_task_classes[0].replay_success_total, 1);
7212 assert_eq!(snapshot.replay_task_classes[0].replay_failure_total, 0);
7213 assert_eq!(
7214 snapshot.replay_task_classes[0].reasoning_steps_avoided_total,
7215 1
7216 );
7217 assert_eq!(
7218 snapshot.replay_task_classes[0].replay_fallback_cost_total,
7219 0
7220 );
7221 assert_eq!(snapshot.replay_task_classes[0].replay_roi, 1.0);
7222 assert!(snapshot.replay_sources.is_empty());
7223 assert_eq!(snapshot.confidence_revalidations_total, 0);
7224 assert_eq!(snapshot.mutation_declared_total, 1);
7225 assert_eq!(snapshot.promoted_mutations_total, 1);
7226 assert_eq!(snapshot.promotion_ratio, 1.0);
7227 assert_eq!(snapshot.gene_revocations_total, 1);
7228 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
7229 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
7230 assert_eq!(snapshot.promoted_genes, 0);
7231 assert_eq!(snapshot.promoted_capsules, 0);
7232
7233 let rendered = evo.render_metrics_prometheus().unwrap();
7234 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_total 1"));
7235 assert!(rendered.contains("oris_evolution_reasoning_avoided_tokens_total"));
7236 assert!(rendered.contains("oris_evolution_replay_fallback_cost_total"));
7237 assert!(rendered.contains("oris_evolution_replay_roi 1.000000"));
7238 assert!(rendered.contains("oris_evolution_replay_utilization_by_task_class_total"));
7239 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_by_task_class_total"));
7240 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
7241 assert!(rendered.contains("oris_evolution_confidence_revalidations_total 0"));
7242 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
7243 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
7244 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
7245 assert!(rendered.contains("oris_evolution_health 1"));
7246 }
7247
7248 #[tokio::test]
7249 async fn replay_roi_release_gate_summary_matches_metrics_snapshot_for_legacy_replay_history() {
7250 let (evo, _) = build_test_evo("roi-legacy", "run-roi-legacy", command_validator());
7251 let capsule = evo
7252 .capture_successful_mutation(&"run-roi-legacy".into(), sample_mutation())
7253 .await
7254 .unwrap();
7255
7256 evo.store
7257 .append_event(EvolutionEvent::CapsuleReused {
7258 capsule_id: capsule.id.clone(),
7259 gene_id: capsule.gene_id.clone(),
7260 run_id: capsule.run_id.clone(),
7261 replay_run_id: Some("run-roi-legacy-replay".into()),
7262 })
7263 .unwrap();
7264 evo.store
7265 .append_event(EvolutionEvent::ValidationFailed {
7266 mutation_id: "legacy-replay-failure".into(),
7267 report: ValidationSnapshot {
7268 success: false,
7269 profile: "test".into(),
7270 duration_ms: 1,
7271 summary: "legacy replay validation failed".into(),
7272 },
7273 gene_id: Some(capsule.gene_id.clone()),
7274 })
7275 .unwrap();
7276
7277 let metrics = evo.metrics_snapshot().unwrap();
7278 let summary = evo.replay_roi_release_gate_summary(0).unwrap();
7279 let task_class = &metrics.replay_task_classes[0];
7280
7281 assert_eq!(metrics.replay_attempts_total, 2);
7282 assert_eq!(metrics.replay_success_total, 1);
7283 assert_eq!(summary.replay_attempts_total, metrics.replay_attempts_total);
7284 assert_eq!(summary.replay_success_total, metrics.replay_success_total);
7285 assert_eq!(
7286 summary.replay_failure_total,
7287 metrics.replay_attempts_total - metrics.replay_success_total
7288 );
7289 assert_eq!(
7290 summary.reasoning_avoided_tokens_total,
7291 metrics.reasoning_avoided_tokens_total
7292 );
7293 assert_eq!(
7294 summary.replay_fallback_cost_total,
7295 metrics.replay_fallback_cost_total
7296 );
7297 assert_eq!(summary.replay_roi, metrics.replay_roi);
7298 assert_eq!(summary.replay_task_classes.len(), 1);
7299 assert_eq!(
7300 summary.replay_task_classes[0].task_class_id,
7301 task_class.task_class_id
7302 );
7303 assert_eq!(
7304 summary.replay_task_classes[0].replay_success_total,
7305 task_class.replay_success_total
7306 );
7307 assert_eq!(
7308 summary.replay_task_classes[0].replay_failure_total,
7309 task_class.replay_failure_total
7310 );
7311 assert_eq!(
7312 summary.replay_task_classes[0].reasoning_avoided_tokens_total,
7313 task_class.reasoning_avoided_tokens_total
7314 );
7315 assert_eq!(
7316 summary.replay_task_classes[0].replay_fallback_cost_total,
7317 task_class.replay_fallback_cost_total
7318 );
7319 }
7320
7321 #[tokio::test]
7322 async fn replay_roi_release_gate_summary_aggregates_task_class_and_remote_source() {
7323 let (evo, _) = build_test_evo("roi-summary", "run-roi-summary", command_validator());
7324 let envelope = remote_publish_envelope(
7325 "node-roi",
7326 "run-remote-roi",
7327 "gene-roi",
7328 "capsule-roi",
7329 "mutation-roi",
7330 "roi-signal",
7331 "ROI.md",
7332 "# roi",
7333 );
7334 evo.import_remote_envelope(&envelope).unwrap();
7335
7336 let miss = evo
7337 .replay_or_fallback(replay_input("entropy-hash-12345-no-overlap"))
7338 .await
7339 .unwrap();
7340 assert!(!miss.used_capsule);
7341 assert!(miss.fallback_to_planner);
7342 assert!(miss.select_evidence.candidates.is_empty());
7343 assert!(miss
7344 .detect_evidence
7345 .mismatch_reasons
7346 .iter()
7347 .any(|reason| reason == "no_candidate_after_select"));
7348
7349 let hit = evo
7350 .replay_or_fallback(replay_input("roi-signal"))
7351 .await
7352 .unwrap();
7353 assert!(hit.used_capsule);
7354 assert!(!hit.select_evidence.candidates.is_empty());
7355 assert_eq!(
7356 hit.select_evidence.selected_capsule_id.as_deref(),
7357 hit.capsule_id.as_deref()
7358 );
7359
7360 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7361 assert_eq!(summary.replay_attempts_total, 2);
7362 assert_eq!(summary.replay_success_total, 1);
7363 assert_eq!(summary.replay_failure_total, 1);
7364 assert!(summary.reasoning_avoided_tokens_total > 0);
7365 assert!(summary.replay_fallback_cost_total > 0);
7366 assert!(summary
7367 .replay_task_classes
7368 .iter()
7369 .any(|entry| { entry.replay_success_total == 1 && entry.replay_failure_total == 0 }));
7370 assert!(summary.replay_sources.iter().any(|source| {
7371 source.source_sender_id == "node-roi" && source.replay_success_total == 1
7372 }));
7373
7374 let rendered = evo
7375 .render_replay_roi_release_gate_summary_json(60 * 60)
7376 .unwrap();
7377 assert!(rendered.contains("\"replay_attempts_total\": 2"));
7378 assert!(rendered.contains("\"source_sender_id\": \"node-roi\""));
7379 }
7380
7381 #[tokio::test]
7382 async fn replay_roi_release_gate_summary_contract_exposes_core_metrics_and_fail_closed_defaults(
7383 ) {
7384 let (evo, _) = build_test_evo("roi-contract", "run-roi-contract", command_validator());
7385 let envelope = remote_publish_envelope(
7386 "node-contract",
7387 "run-remote-contract",
7388 "gene-contract",
7389 "capsule-contract",
7390 "mutation-contract",
7391 "contract-signal",
7392 "CONTRACT.md",
7393 "# contract",
7394 );
7395 evo.import_remote_envelope(&envelope).unwrap();
7396
7397 let miss = evo
7398 .replay_or_fallback(replay_input("entropy-hash-contract-no-overlap"))
7399 .await
7400 .unwrap();
7401 assert!(!miss.used_capsule);
7402 assert!(miss.fallback_to_planner);
7403
7404 let hit = evo
7405 .replay_or_fallback(replay_input("contract-signal"))
7406 .await
7407 .unwrap();
7408 assert!(hit.used_capsule);
7409
7410 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7411 let contract = evo
7412 .replay_roi_release_gate_contract(60 * 60, ReplayRoiReleaseGateThresholds::default())
7413 .unwrap();
7414
7415 assert_eq!(contract.input.replay_attempts_total, 2);
7416 assert_eq!(contract.input.replay_success_total, 1);
7417 assert_eq!(contract.input.replay_failure_total, 1);
7418 assert_eq!(
7419 contract.input.reasoning_avoided_tokens,
7420 summary.reasoning_avoided_tokens_total
7421 );
7422 assert_eq!(
7423 contract.input.replay_fallback_cost_total,
7424 summary.replay_fallback_cost_total
7425 );
7426 assert!((contract.input.replay_hit_rate - 0.5).abs() < f64::EPSILON);
7427 assert!((contract.input.false_replay_rate - 0.5).abs() < f64::EPSILON);
7428 assert!((contract.input.replay_roi - summary.replay_roi).abs() < f64::EPSILON);
7429 assert!(contract.input.replay_safety);
7430 assert_eq!(
7431 contract.input.aggregation_dimensions,
7432 REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7433 .iter()
7434 .map(|dimension| (*dimension).to_string())
7435 .collect::<Vec<_>>()
7436 );
7437 assert_eq!(
7438 contract.input.thresholds,
7439 ReplayRoiReleaseGateThresholds::default()
7440 );
7441 assert_eq!(
7442 contract.input.fail_closed_policy,
7443 ReplayRoiReleaseGateFailClosedPolicy::default()
7444 );
7445 assert_eq!(
7446 contract.output.status,
7447 ReplayRoiReleaseGateStatus::FailClosed
7448 );
7449 assert!(contract
7450 .output
7451 .failed_checks
7452 .iter()
7453 .any(|check| check == "min_replay_attempts_below_threshold"));
7454 assert!(contract
7455 .output
7456 .failed_checks
7457 .iter()
7458 .any(|check| check == "replay_hit_rate_below_threshold"));
7459 assert!(contract
7460 .output
7461 .failed_checks
7462 .iter()
7463 .any(|check| check == "false_replay_rate_above_threshold"));
7464 assert!(contract
7465 .output
7466 .evidence_refs
7467 .iter()
7468 .any(|evidence| evidence == "replay_roi_release_gate_summary"));
7469 assert!(contract.output.summary.contains("release gate fail_closed"));
7470 }
7471
7472 #[tokio::test]
7473 async fn replay_roi_release_gate_summary_contract_accepts_custom_thresholds_and_json() {
7474 let (evo, _) = build_test_evo(
7475 "roi-contract-thresholds",
7476 "run-roi-contract-thresholds",
7477 command_validator(),
7478 );
7479 let thresholds = ReplayRoiReleaseGateThresholds {
7480 min_replay_attempts: 8,
7481 min_replay_hit_rate: 0.75,
7482 max_false_replay_rate: 0.10,
7483 min_reasoning_avoided_tokens: 600,
7484 min_replay_roi: 0.30,
7485 require_replay_safety: true,
7486 };
7487 let contract = evo
7488 .replay_roi_release_gate_contract(60 * 60, thresholds.clone())
7489 .unwrap();
7490 assert_eq!(contract.input.thresholds, thresholds.clone());
7491 assert_eq!(contract.input.replay_attempts_total, 0);
7492 assert_eq!(contract.input.replay_hit_rate, 0.0);
7493 assert_eq!(contract.input.false_replay_rate, 0.0);
7494 assert!(!contract.input.replay_safety_signal.has_replay_activity);
7495 assert!(!contract.input.replay_safety);
7496 assert_eq!(
7497 contract.output.status,
7498 ReplayRoiReleaseGateStatus::Indeterminate
7499 );
7500 assert!(contract
7501 .output
7502 .failed_checks
7503 .iter()
7504 .any(|check| check == "missing_replay_attempts"));
7505 assert!(contract
7506 .output
7507 .summary
7508 .contains("indeterminate (fail-closed)"));
7509
7510 let rendered = evo
7511 .render_replay_roi_release_gate_contract_json(60 * 60, thresholds)
7512 .unwrap();
7513 assert!(rendered.contains("\"min_replay_attempts\": 8"));
7514 assert!(rendered.contains("\"min_replay_hit_rate\": 0.75"));
7515 assert!(rendered.contains("\"status\": \"indeterminate\""));
7516 }
7517
7518 #[tokio::test]
7519 async fn replay_roi_release_gate_summary_window_boundary_filters_old_events() {
7520 let (evo, _) = build_test_evo("roi-window", "run-roi-window", command_validator());
7521 let envelope = remote_publish_envelope(
7522 "node-window",
7523 "run-remote-window",
7524 "gene-window",
7525 "capsule-window",
7526 "mutation-window",
7527 "window-signal",
7528 "WINDOW.md",
7529 "# window",
7530 );
7531 evo.import_remote_envelope(&envelope).unwrap();
7532
7533 let miss = evo
7534 .replay_or_fallback(replay_input("window-no-match-signal"))
7535 .await
7536 .unwrap();
7537 assert!(!miss.used_capsule);
7538 assert!(miss.fallback_to_planner);
7539
7540 let first_hit = evo
7541 .replay_or_fallback(replay_input("window-signal"))
7542 .await
7543 .unwrap();
7544 assert!(first_hit.used_capsule);
7545
7546 std::thread::sleep(std::time::Duration::from_secs(2));
7547
7548 let second_hit = evo
7549 .replay_or_fallback(replay_input("window-signal"))
7550 .await
7551 .unwrap();
7552 assert!(second_hit.used_capsule);
7553
7554 let narrow = evo.replay_roi_release_gate_summary(1).unwrap();
7555 assert_eq!(narrow.replay_attempts_total, 1);
7556 assert_eq!(narrow.replay_success_total, 1);
7557 assert_eq!(narrow.replay_failure_total, 0);
7558
7559 let all = evo.replay_roi_release_gate_summary(0).unwrap();
7560 assert_eq!(all.replay_attempts_total, 3);
7561 assert_eq!(all.replay_success_total, 2);
7562 assert_eq!(all.replay_failure_total, 1);
7563 }
7564
7565 fn fixed_release_gate_pass_fixture() -> ReplayRoiReleaseGateInputContract {
7566 ReplayRoiReleaseGateInputContract {
7567 generated_at: "2026-03-13T00:00:00Z".to_string(),
7568 window_seconds: 86_400,
7569 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7570 .iter()
7571 .map(|dimension| (*dimension).to_string())
7572 .collect(),
7573 replay_attempts_total: 4,
7574 replay_success_total: 3,
7575 replay_failure_total: 1,
7576 replay_hit_rate: 0.75,
7577 false_replay_rate: 0.25,
7578 reasoning_avoided_tokens: 480,
7579 replay_fallback_cost_total: 64,
7580 replay_roi: compute_replay_roi(480, 64),
7581 replay_safety: true,
7582 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7583 fail_closed_default: true,
7584 rollback_ready: true,
7585 audit_trail_complete: true,
7586 has_replay_activity: true,
7587 },
7588 thresholds: ReplayRoiReleaseGateThresholds::default(),
7589 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7590 }
7591 }
7592
7593 fn fixed_release_gate_fail_fixture() -> ReplayRoiReleaseGateInputContract {
7594 ReplayRoiReleaseGateInputContract {
7595 generated_at: "2026-03-13T00:00:00Z".to_string(),
7596 window_seconds: 86_400,
7597 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7598 .iter()
7599 .map(|dimension| (*dimension).to_string())
7600 .collect(),
7601 replay_attempts_total: 10,
7602 replay_success_total: 4,
7603 replay_failure_total: 6,
7604 replay_hit_rate: 0.4,
7605 false_replay_rate: 0.6,
7606 reasoning_avoided_tokens: 80,
7607 replay_fallback_cost_total: 400,
7608 replay_roi: compute_replay_roi(80, 400),
7609 replay_safety: false,
7610 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7611 fail_closed_default: true,
7612 rollback_ready: true,
7613 audit_trail_complete: true,
7614 has_replay_activity: true,
7615 },
7616 thresholds: ReplayRoiReleaseGateThresholds::default(),
7617 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7618 }
7619 }
7620
7621 fn fixed_release_gate_borderline_fixture() -> ReplayRoiReleaseGateInputContract {
7622 ReplayRoiReleaseGateInputContract {
7623 generated_at: "2026-03-13T00:00:00Z".to_string(),
7624 window_seconds: 3_600,
7625 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7626 .iter()
7627 .map(|dimension| (*dimension).to_string())
7628 .collect(),
7629 replay_attempts_total: 4,
7630 replay_success_total: 3,
7631 replay_failure_total: 1,
7632 replay_hit_rate: 0.75,
7633 false_replay_rate: 0.25,
7634 reasoning_avoided_tokens: 192,
7635 replay_fallback_cost_total: 173,
7636 replay_roi: 0.05,
7637 replay_safety: true,
7638 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7639 fail_closed_default: true,
7640 rollback_ready: true,
7641 audit_trail_complete: true,
7642 has_replay_activity: true,
7643 },
7644 thresholds: ReplayRoiReleaseGateThresholds {
7645 min_replay_attempts: 4,
7646 min_replay_hit_rate: 0.75,
7647 max_false_replay_rate: 0.25,
7648 min_reasoning_avoided_tokens: 192,
7649 min_replay_roi: 0.05,
7650 require_replay_safety: true,
7651 },
7652 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7653 }
7654 }
7655
7656 #[test]
7657 fn replay_roi_release_gate_summary_fixed_fixtures_cover_pass_fail_and_borderline() {
7658 let pass =
7659 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_pass_fixture());
7660 let fail =
7661 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7662 let borderline = evaluate_replay_roi_release_gate_contract_input(
7663 &fixed_release_gate_borderline_fixture(),
7664 );
7665
7666 assert_eq!(pass.status, ReplayRoiReleaseGateStatus::Pass);
7667 assert!(pass.failed_checks.is_empty());
7668 assert_eq!(fail.status, ReplayRoiReleaseGateStatus::FailClosed);
7669 assert!(!fail.failed_checks.is_empty());
7670 assert_eq!(borderline.status, ReplayRoiReleaseGateStatus::Pass);
7671 assert!(borderline.failed_checks.is_empty());
7672 }
7673
7674 #[test]
7675 fn replay_roi_release_gate_summary_machine_readable_output_is_stable_and_sorted() {
7676 let output =
7677 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7678
7679 assert_eq!(
7680 output.failed_checks,
7681 vec![
7682 "false_replay_rate_above_threshold".to_string(),
7683 "reasoning_avoided_tokens_below_threshold".to_string(),
7684 "replay_hit_rate_below_threshold".to_string(),
7685 "replay_roi_below_threshold".to_string(),
7686 "replay_safety_required".to_string(),
7687 ]
7688 );
7689 assert_eq!(
7690 output.evidence_refs,
7691 vec![
7692 "generated_at:2026-03-13T00:00:00Z".to_string(),
7693 "metric:false_replay_rate".to_string(),
7694 "metric:reasoning_avoided_tokens".to_string(),
7695 "metric:replay_hit_rate".to_string(),
7696 "metric:replay_roi".to_string(),
7697 "metric:replay_safety".to_string(),
7698 "replay_roi_release_gate_summary".to_string(),
7699 "threshold:max_false_replay_rate".to_string(),
7700 "threshold:min_reasoning_avoided_tokens".to_string(),
7701 "threshold:min_replay_hit_rate".to_string(),
7702 "threshold:min_replay_roi".to_string(),
7703 "threshold:require_replay_safety".to_string(),
7704 "window_seconds:86400".to_string(),
7705 ]
7706 );
7707
7708 let rendered = serde_json::to_string(&output).unwrap();
7709 assert!(rendered.starts_with("{\"status\":\"fail_closed\",\"failed_checks\":"));
7710 assert_eq!(rendered, serde_json::to_string(&output).unwrap());
7711 }
7712
7713 #[test]
7714 fn replay_roi_release_gate_summary_evaluator_passes_with_threshold_compliance() {
7715 let input = ReplayRoiReleaseGateInputContract {
7716 generated_at: Utc::now().to_rfc3339(),
7717 window_seconds: 86_400,
7718 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7719 .iter()
7720 .map(|dimension| (*dimension).to_string())
7721 .collect(),
7722 replay_attempts_total: 10,
7723 replay_success_total: 9,
7724 replay_failure_total: 1,
7725 replay_hit_rate: 0.9,
7726 false_replay_rate: 0.1,
7727 reasoning_avoided_tokens: 960,
7728 replay_fallback_cost_total: 64,
7729 replay_roi: compute_replay_roi(960, 64),
7730 replay_safety: true,
7731 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7732 fail_closed_default: true,
7733 rollback_ready: true,
7734 audit_trail_complete: true,
7735 has_replay_activity: true,
7736 },
7737 thresholds: ReplayRoiReleaseGateThresholds::default(),
7738 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7739 };
7740
7741 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7742 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Pass);
7743 assert!(output.failed_checks.is_empty());
7744 assert!(output.summary.contains("release gate pass"));
7745 }
7746
7747 #[test]
7748 fn replay_roi_release_gate_summary_evaluator_fail_closed_on_threshold_violations() {
7749 let input = ReplayRoiReleaseGateInputContract {
7750 generated_at: Utc::now().to_rfc3339(),
7751 window_seconds: 86_400,
7752 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7753 .iter()
7754 .map(|dimension| (*dimension).to_string())
7755 .collect(),
7756 replay_attempts_total: 10,
7757 replay_success_total: 4,
7758 replay_failure_total: 6,
7759 replay_hit_rate: 0.4,
7760 false_replay_rate: 0.6,
7761 reasoning_avoided_tokens: 80,
7762 replay_fallback_cost_total: 400,
7763 replay_roi: compute_replay_roi(80, 400),
7764 replay_safety: false,
7765 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7766 fail_closed_default: true,
7767 rollback_ready: true,
7768 audit_trail_complete: true,
7769 has_replay_activity: true,
7770 },
7771 thresholds: ReplayRoiReleaseGateThresholds::default(),
7772 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7773 };
7774
7775 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7776 assert_eq!(output.status, ReplayRoiReleaseGateStatus::FailClosed);
7777 assert!(output
7778 .failed_checks
7779 .iter()
7780 .any(|check| check == "replay_hit_rate_below_threshold"));
7781 assert!(output
7782 .failed_checks
7783 .iter()
7784 .any(|check| check == "false_replay_rate_above_threshold"));
7785 assert!(output
7786 .failed_checks
7787 .iter()
7788 .any(|check| check == "replay_roi_below_threshold"));
7789 assert!(output.summary.contains("release gate fail_closed"));
7790 }
7791
7792 #[test]
7793 fn replay_roi_release_gate_summary_evaluator_marks_missing_data_indeterminate() {
7794 let input = ReplayRoiReleaseGateInputContract {
7795 generated_at: String::new(),
7796 window_seconds: 86_400,
7797 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7798 .iter()
7799 .map(|dimension| (*dimension).to_string())
7800 .collect(),
7801 replay_attempts_total: 0,
7802 replay_success_total: 0,
7803 replay_failure_total: 0,
7804 replay_hit_rate: 0.0,
7805 false_replay_rate: 0.0,
7806 reasoning_avoided_tokens: 0,
7807 replay_fallback_cost_total: 0,
7808 replay_roi: 0.0,
7809 replay_safety: false,
7810 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7811 fail_closed_default: true,
7812 rollback_ready: true,
7813 audit_trail_complete: true,
7814 has_replay_activity: false,
7815 },
7816 thresholds: ReplayRoiReleaseGateThresholds::default(),
7817 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7818 };
7819
7820 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7821 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Indeterminate);
7822 assert!(output
7823 .failed_checks
7824 .iter()
7825 .any(|check| check == "missing_generated_at"));
7826 assert!(output
7827 .failed_checks
7828 .iter()
7829 .any(|check| check == "missing_replay_attempts"));
7830 assert!(output
7831 .summary
7832 .contains("release gate indeterminate (fail-closed)"));
7833 }
7834
7835 #[test]
7836 fn stale_replay_targets_require_confidence_revalidation() {
7837 let now = Utc::now();
7838 let projection = EvolutionProjection {
7839 genes: vec![Gene {
7840 id: "gene-stale".into(),
7841 signals: vec!["missing readme".into()],
7842 strategy: vec!["README.md".into()],
7843 validation: vec!["test".into()],
7844 state: AssetState::Promoted,
7845 }],
7846 capsules: vec![Capsule {
7847 id: "capsule-stale".into(),
7848 gene_id: "gene-stale".into(),
7849 mutation_id: "mutation-stale".into(),
7850 run_id: "run-stale".into(),
7851 diff_hash: "hash".into(),
7852 confidence: 0.8,
7853 env: replay_input("missing readme").env,
7854 outcome: Outcome {
7855 success: true,
7856 validation_profile: "test".into(),
7857 validation_duration_ms: 1,
7858 changed_files: vec!["README.md".into()],
7859 validator_hash: "validator".into(),
7860 lines_changed: 1,
7861 replay_verified: false,
7862 },
7863 state: AssetState::Promoted,
7864 }],
7865 reuse_counts: BTreeMap::from([("gene-stale".into(), 1)]),
7866 attempt_counts: BTreeMap::from([("gene-stale".into(), 1)]),
7867 last_updated_at: BTreeMap::from([(
7868 "gene-stale".into(),
7869 (now - Duration::hours(48)).to_rfc3339(),
7870 )]),
7871 spec_ids_by_gene: BTreeMap::new(),
7872 };
7873
7874 let targets = stale_replay_revalidation_targets(&projection, now);
7875
7876 assert_eq!(targets.len(), 1);
7877 assert_eq!(targets[0].gene_id, "gene-stale");
7878 assert_eq!(targets[0].capsule_ids, vec!["capsule-stale".to_string()]);
7879 assert!(targets[0].decayed_confidence < MIN_REPLAY_CONFIDENCE);
7880 }
7881
7882 #[tokio::test]
7883 async fn remote_replay_prefers_closest_environment_match() {
7884 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
7885 let input = replay_input("env-signal");
7886
7887 let envelope_a = remote_publish_envelope_with_env(
7888 "node-a",
7889 "run-remote-a",
7890 "gene-a",
7891 "capsule-a",
7892 "mutation-a",
7893 "env-signal",
7894 "A.md",
7895 "# from a",
7896 input.env.clone(),
7897 );
7898 let envelope_b = remote_publish_envelope_with_env(
7899 "node-b",
7900 "run-remote-b",
7901 "gene-b",
7902 "capsule-b",
7903 "mutation-b",
7904 "env-signal",
7905 "B.md",
7906 "# from b",
7907 EnvFingerprint {
7908 rustc_version: "old-rustc".into(),
7909 cargo_lock_hash: "other-lock".into(),
7910 target_triple: "aarch64-apple-darwin".into(),
7911 os: "linux".into(),
7912 },
7913 );
7914
7915 evo.import_remote_envelope(&envelope_a).unwrap();
7916 evo.import_remote_envelope(&envelope_b).unwrap();
7917
7918 let decision = evo.replay_or_fallback(input).await.unwrap();
7919
7920 assert!(decision.used_capsule);
7921 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
7922 assert!(!decision.fallback_to_planner);
7923 }
7924
7925 #[test]
7926 fn remote_cold_start_scoring_caps_distinct_query_coverage() {
7927 let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
7928 let input = replay_input("missing readme");
7929
7930 let exact = remote_publish_envelope_with_signals(
7931 "node-exact",
7932 "run-remote-exact",
7933 "gene-exact",
7934 "capsule-exact",
7935 "mutation-exact",
7936 vec!["missing readme".into()],
7937 vec!["missing readme".into()],
7938 "EXACT.md",
7939 "# exact",
7940 input.env.clone(),
7941 );
7942 let overlapping = remote_publish_envelope_with_signals(
7943 "node-overlap",
7944 "run-remote-overlap",
7945 "gene-overlap",
7946 "capsule-overlap",
7947 "mutation-overlap",
7948 vec!["missing readme".into()],
7949 vec!["missing".into(), "readme".into()],
7950 "OVERLAP.md",
7951 "# overlap",
7952 input.env.clone(),
7953 );
7954
7955 evo.import_remote_envelope(&exact).unwrap();
7956 evo.import_remote_envelope(&overlapping).unwrap();
7957
7958 let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
7959 let exact_candidate = candidates
7960 .iter()
7961 .find(|candidate| candidate.gene.id == "gene-exact")
7962 .unwrap();
7963 let overlap_candidate = candidates
7964 .iter()
7965 .find(|candidate| candidate.gene.id == "gene-overlap")
7966 .unwrap();
7967
7968 assert_eq!(exact_candidate.score, 1.0);
7969 assert_eq!(overlap_candidate.score, 1.0);
7970 assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
7971 }
7972
7973 #[test]
7974 fn exact_match_candidates_respect_spec_linked_events() {
7975 let (evo, _) = build_test_evo(
7976 "spec-linked-filter",
7977 "run-spec-linked-filter",
7978 command_validator(),
7979 );
7980 let mut input = replay_input("missing readme");
7981 input.spec_id = Some("spec-readme".into());
7982
7983 let mut mutation = sample_mutation();
7984 mutation.intent.id = "mutation-spec-linked".into();
7985 mutation.intent.spec_id = None;
7986 let gene = Gene {
7987 id: "gene-spec-linked".into(),
7988 signals: vec!["missing readme".into()],
7989 strategy: vec!["README.md".into()],
7990 validation: vec!["test".into()],
7991 state: AssetState::Promoted,
7992 };
7993 let capsule = Capsule {
7994 id: "capsule-spec-linked".into(),
7995 gene_id: gene.id.clone(),
7996 mutation_id: mutation.intent.id.clone(),
7997 run_id: "run-spec-linked".into(),
7998 diff_hash: mutation.artifact.content_hash.clone(),
7999 confidence: 0.9,
8000 env: input.env.clone(),
8001 outcome: Outcome {
8002 success: true,
8003 validation_profile: "test".into(),
8004 validation_duration_ms: 1,
8005 changed_files: vec!["README.md".into()],
8006 validator_hash: "validator-hash".into(),
8007 lines_changed: 1,
8008 replay_verified: false,
8009 },
8010 state: AssetState::Promoted,
8011 };
8012
8013 evo.store
8014 .append_event(EvolutionEvent::MutationDeclared { mutation })
8015 .unwrap();
8016 evo.store
8017 .append_event(EvolutionEvent::GeneProjected { gene })
8018 .unwrap();
8019 evo.store
8020 .append_event(EvolutionEvent::CapsuleCommitted { capsule })
8021 .unwrap();
8022 evo.store
8023 .append_event(EvolutionEvent::SpecLinked {
8024 mutation_id: "mutation-spec-linked".into(),
8025 spec_id: "spec-readme".into(),
8026 })
8027 .unwrap();
8028
8029 let candidates = exact_match_candidates(evo.store.as_ref(), &input);
8030 assert_eq!(candidates.len(), 1);
8031 assert_eq!(candidates[0].gene.id, "gene-spec-linked");
8032 }
8033
8034 #[tokio::test]
8035 async fn remote_capsule_advances_from_quarantine_to_shadow_then_promoted() {
8036 let (evo, store) = build_test_evo(
8037 "remote-quarantine",
8038 "run-remote-quarantine",
8039 command_validator(),
8040 );
8041 let envelope = remote_publish_envelope(
8042 "node-remote",
8043 "run-remote-quarantine",
8044 "gene-remote",
8045 "capsule-remote",
8046 "mutation-remote",
8047 "remote-signal",
8048 "REMOTE.md",
8049 "# from remote",
8050 );
8051
8052 evo.import_remote_envelope(&envelope).unwrap();
8053
8054 let before_replay = store.rebuild_projection().unwrap();
8055 let imported_gene = before_replay
8056 .genes
8057 .iter()
8058 .find(|gene| gene.id == "gene-remote")
8059 .unwrap();
8060 let imported_capsule = before_replay
8061 .capsules
8062 .iter()
8063 .find(|capsule| capsule.id == "capsule-remote")
8064 .unwrap();
8065 assert_eq!(imported_gene.state, AssetState::Quarantined);
8066 assert_eq!(imported_capsule.state, AssetState::Quarantined);
8067 let exported_before_replay =
8068 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8069 assert!(exported_before_replay.assets.is_empty());
8070
8071 let first_decision = evo
8072 .replay_or_fallback(replay_input("remote-signal"))
8073 .await
8074 .unwrap();
8075
8076 assert!(first_decision.used_capsule);
8077 assert_eq!(first_decision.capsule_id, Some("capsule-remote".into()));
8078
8079 let after_first_replay = store.rebuild_projection().unwrap();
8080 let shadow_gene = after_first_replay
8081 .genes
8082 .iter()
8083 .find(|gene| gene.id == "gene-remote")
8084 .unwrap();
8085 let shadow_capsule = after_first_replay
8086 .capsules
8087 .iter()
8088 .find(|capsule| capsule.id == "capsule-remote")
8089 .unwrap();
8090 assert_eq!(shadow_gene.state, AssetState::ShadowValidated);
8091 assert_eq!(shadow_capsule.state, AssetState::ShadowValidated);
8092 let exported_after_first_replay =
8093 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8094 assert!(exported_after_first_replay.assets.is_empty());
8095
8096 let second_decision = evo
8097 .replay_or_fallback(replay_input("remote-signal"))
8098 .await
8099 .unwrap();
8100 assert!(second_decision.used_capsule);
8101 assert_eq!(second_decision.capsule_id, Some("capsule-remote".into()));
8102
8103 let after_second_replay = store.rebuild_projection().unwrap();
8104 let promoted_gene = after_second_replay
8105 .genes
8106 .iter()
8107 .find(|gene| gene.id == "gene-remote")
8108 .unwrap();
8109 let promoted_capsule = after_second_replay
8110 .capsules
8111 .iter()
8112 .find(|capsule| capsule.id == "capsule-remote")
8113 .unwrap();
8114 assert_eq!(promoted_gene.state, AssetState::Promoted);
8115 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8116 let exported_after_second_replay =
8117 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8118 assert_eq!(exported_after_second_replay.assets.len(), 3);
8119 assert!(exported_after_second_replay
8120 .assets
8121 .iter()
8122 .any(|asset| matches!(
8123 asset,
8124 NetworkAsset::EvolutionEvent {
8125 event: EvolutionEvent::MutationDeclared { .. }
8126 }
8127 )));
8128 }
8129
8130 #[tokio::test]
8131 async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
8132 let (source, source_store) = build_test_evo(
8133 "remote-publish-export",
8134 "run-remote-publish-export",
8135 command_validator(),
8136 );
8137 source
8138 .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
8139 .await
8140 .unwrap();
8141 let envelope = EvolutionNetworkNode::new(source_store.clone())
8142 .publish_local_assets("node-source")
8143 .unwrap();
8144 assert!(envelope.assets.iter().any(|asset| matches!(
8145 asset,
8146 NetworkAsset::EvolutionEvent {
8147 event: EvolutionEvent::MutationDeclared { mutation }
8148 } if mutation.intent.id == "mutation-1"
8149 )));
8150
8151 let (remote, _) = build_test_evo(
8152 "remote-publish-import",
8153 "run-remote-publish-import",
8154 command_validator(),
8155 );
8156 remote.import_remote_envelope(&envelope).unwrap();
8157
8158 let decision = remote
8159 .replay_or_fallback(replay_input("missing readme"))
8160 .await
8161 .unwrap();
8162
8163 assert!(decision.used_capsule);
8164 assert!(!decision.fallback_to_planner);
8165 }
8166
8167 #[tokio::test]
8168 async fn import_remote_envelope_records_manifest_validation_event() {
8169 let (source, source_store) = build_test_evo(
8170 "remote-manifest-success-source",
8171 "run-remote-manifest-success-source",
8172 command_validator(),
8173 );
8174 source
8175 .capture_successful_mutation(
8176 &"run-remote-manifest-success-source".into(),
8177 sample_mutation(),
8178 )
8179 .await
8180 .unwrap();
8181 let envelope = EvolutionNetworkNode::new(source_store.clone())
8182 .publish_local_assets("node-source")
8183 .unwrap();
8184
8185 let (remote, remote_store) = build_test_evo(
8186 "remote-manifest-success-remote",
8187 "run-remote-manifest-success-remote",
8188 command_validator(),
8189 );
8190 remote.import_remote_envelope(&envelope).unwrap();
8191
8192 let events = remote_store.scan(1).unwrap();
8193 assert!(events.iter().any(|stored| matches!(
8194 &stored.event,
8195 EvolutionEvent::ManifestValidated {
8196 accepted: true,
8197 reason,
8198 sender_id: Some(sender_id),
8199 publisher: Some(publisher),
8200 asset_ids,
8201 } if reason == "manifest validated"
8202 && sender_id == "node-source"
8203 && publisher == "node-source"
8204 && !asset_ids.is_empty()
8205 )));
8206 }
8207
8208 #[test]
8209 fn import_remote_envelope_rejects_invalid_manifest_and_records_audit_event() {
8210 let (remote, remote_store) = build_test_evo(
8211 "remote-manifest-invalid",
8212 "run-remote-manifest-invalid",
8213 command_validator(),
8214 );
8215 let mut envelope = remote_publish_envelope(
8216 "node-remote",
8217 "run-remote-manifest-invalid",
8218 "gene-remote",
8219 "capsule-remote",
8220 "mutation-remote",
8221 "manifest-signal",
8222 "MANIFEST.md",
8223 "# drift",
8224 );
8225 if let Some(manifest) = envelope.manifest.as_mut() {
8226 manifest.asset_hash = "tampered-hash".to_string();
8227 }
8228 envelope.content_hash = envelope.compute_content_hash();
8229
8230 let error = remote.import_remote_envelope(&envelope).unwrap_err();
8231 assert!(error.to_string().contains("manifest"));
8232
8233 let events = remote_store.scan(1).unwrap();
8234 assert!(events.iter().any(|stored| matches!(
8235 &stored.event,
8236 EvolutionEvent::ManifestValidated {
8237 accepted: false,
8238 reason,
8239 sender_id: Some(sender_id),
8240 publisher: Some(publisher),
8241 asset_ids,
8242 } if reason.contains("manifest asset_hash mismatch")
8243 && sender_id == "node-remote"
8244 && publisher == "node-remote"
8245 && !asset_ids.is_empty()
8246 )));
8247 }
8248
8249 #[tokio::test]
8250 async fn fetch_assets_include_mutation_payload_for_remote_replay() {
8251 let (evo, store) = build_test_evo(
8252 "remote-fetch-export",
8253 "run-remote-fetch",
8254 command_validator(),
8255 );
8256 evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
8257 .await
8258 .unwrap();
8259
8260 let response = EvolutionNetworkNode::new(store.clone())
8261 .fetch_assets(
8262 "node-source",
8263 &FetchQuery {
8264 sender_id: "node-client".into(),
8265 signals: vec!["missing readme".into()],
8266 since_cursor: None,
8267 resume_token: None,
8268 },
8269 )
8270 .unwrap();
8271
8272 assert!(response.assets.iter().any(|asset| matches!(
8273 asset,
8274 NetworkAsset::EvolutionEvent {
8275 event: EvolutionEvent::MutationDeclared { mutation }
8276 } if mutation.intent.id == "mutation-1"
8277 )));
8278 assert!(response
8279 .assets
8280 .iter()
8281 .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
8282 assert!(response
8283 .assets
8284 .iter()
8285 .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
8286 }
8287
8288 #[test]
8289 fn fetch_assets_delta_sync_supports_since_cursor_and_resume_token() {
8290 let store_root =
8291 std::env::temp_dir().join(format!("oris-evokernel-fetch-delta-store-{}", next_id("t")));
8292 if store_root.exists() {
8293 fs::remove_dir_all(&store_root).unwrap();
8294 }
8295 let store: Arc<dyn EvolutionStore> =
8296 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
8297 let node = EvolutionNetworkNode::new(store.clone());
8298 node.record_reported_experience(
8299 "delta-agent",
8300 "gene-delta-a",
8301 vec!["delta.signal".into()],
8302 vec![
8303 "task_class=delta.signal".into(),
8304 "task_label=delta replay".into(),
8305 ],
8306 vec!["a2a.tasks.report".into()],
8307 )
8308 .unwrap();
8309
8310 let first = node
8311 .fetch_assets(
8312 "execution-api",
8313 &FetchQuery {
8314 sender_id: "delta-agent".into(),
8315 signals: vec!["delta.signal".into()],
8316 since_cursor: None,
8317 resume_token: None,
8318 },
8319 )
8320 .unwrap();
8321 let first_cursor = first.next_cursor.clone().expect("first next_cursor");
8322 let first_token = first.resume_token.clone().expect("first resume_token");
8323 assert!(first.assets.iter().any(
8324 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8325 ));
8326
8327 let restarted = EvolutionNetworkNode::new(store.clone());
8328 restarted
8329 .record_reported_experience(
8330 "delta-agent",
8331 "gene-delta-b",
8332 vec!["delta.signal".into()],
8333 vec![
8334 "task_class=delta.signal".into(),
8335 "task_label=delta replay".into(),
8336 ],
8337 vec!["a2a.tasks.report".into()],
8338 )
8339 .unwrap();
8340
8341 let from_token = restarted
8342 .fetch_assets(
8343 "execution-api",
8344 &FetchQuery {
8345 sender_id: "delta-agent".into(),
8346 signals: vec!["delta.signal".into()],
8347 since_cursor: None,
8348 resume_token: Some(first_token),
8349 },
8350 )
8351 .unwrap();
8352 assert!(from_token.assets.iter().any(
8353 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8354 ));
8355 assert!(!from_token.assets.iter().any(
8356 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8357 ));
8358 assert_eq!(
8359 from_token.sync_audit.requested_cursor,
8360 Some(first_cursor.clone())
8361 );
8362 assert!(from_token.sync_audit.applied_count >= 1);
8363
8364 let from_cursor = restarted
8365 .fetch_assets(
8366 "execution-api",
8367 &FetchQuery {
8368 sender_id: "delta-agent".into(),
8369 signals: vec!["delta.signal".into()],
8370 since_cursor: Some(first_cursor),
8371 resume_token: None,
8372 },
8373 )
8374 .unwrap();
8375 assert!(from_cursor.assets.iter().any(
8376 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8377 ));
8378 }
8379
8380 #[test]
8381 fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
8382 let store_root = std::env::temp_dir().join(format!(
8383 "oris-evokernel-remote-partial-store-{}",
8384 std::process::id()
8385 ));
8386 if store_root.exists() {
8387 fs::remove_dir_all(&store_root).unwrap();
8388 }
8389 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8390 let evo = build_test_evo_with_store(
8391 "remote-partial",
8392 "run-remote-partial",
8393 command_validator(),
8394 store.clone(),
8395 );
8396 let envelope = remote_publish_envelope(
8397 "node-partial",
8398 "run-remote-partial",
8399 "gene-partial",
8400 "capsule-partial",
8401 "mutation-partial",
8402 "partial-signal",
8403 "PARTIAL.md",
8404 "# partial",
8405 );
8406
8407 let result = evo.import_remote_envelope(&envelope);
8408
8409 assert!(matches!(result, Err(EvoKernelError::Store(_))));
8410 let projection = store.rebuild_projection().unwrap();
8411 assert!(projection
8412 .genes
8413 .iter()
8414 .any(|gene| gene.id == "gene-partial"));
8415 assert!(projection.capsules.is_empty());
8416 let publishers = evo.remote_publishers.lock().unwrap();
8417 assert_eq!(
8418 publishers.get("gene-partial").map(String::as_str),
8419 Some("node-partial")
8420 );
8421 }
8422
8423 #[test]
8424 fn retry_remote_import_after_partial_failure_only_imports_missing_assets() {
8425 let store_root = std::env::temp_dir().join(format!(
8426 "oris-evokernel-remote-partial-retry-store-{}",
8427 next_id("t")
8428 ));
8429 if store_root.exists() {
8430 fs::remove_dir_all(&store_root).unwrap();
8431 }
8432 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8433 let evo = build_test_evo_with_store(
8434 "remote-partial-retry",
8435 "run-remote-partial-retry",
8436 command_validator(),
8437 store.clone(),
8438 );
8439 let envelope = remote_publish_envelope(
8440 "node-partial",
8441 "run-remote-partial-retry",
8442 "gene-partial-retry",
8443 "capsule-partial-retry",
8444 "mutation-partial-retry",
8445 "partial-retry-signal",
8446 "PARTIAL_RETRY.md",
8447 "# partial retry",
8448 );
8449
8450 let first = evo.import_remote_envelope(&envelope);
8451 assert!(matches!(first, Err(EvoKernelError::Store(_))));
8452
8453 let retry = evo.import_remote_envelope(&envelope).unwrap();
8454
8455 assert_eq!(retry.imported_asset_ids, vec!["capsule-partial-retry"]);
8456 let projection = store.rebuild_projection().unwrap();
8457 let gene = projection
8458 .genes
8459 .iter()
8460 .find(|gene| gene.id == "gene-partial-retry")
8461 .unwrap();
8462 assert_eq!(gene.state, AssetState::Quarantined);
8463 let capsule = projection
8464 .capsules
8465 .iter()
8466 .find(|capsule| capsule.id == "capsule-partial-retry")
8467 .unwrap();
8468 assert_eq!(capsule.state, AssetState::Quarantined);
8469 assert_eq!(projection.attempt_counts["gene-partial-retry"], 1);
8470
8471 let events = store.scan(1).unwrap();
8472 assert_eq!(
8473 events
8474 .iter()
8475 .filter(|stored| {
8476 matches!(
8477 &stored.event,
8478 EvolutionEvent::MutationDeclared { mutation }
8479 if mutation.intent.id == "mutation-partial-retry"
8480 )
8481 })
8482 .count(),
8483 1
8484 );
8485 assert_eq!(
8486 events
8487 .iter()
8488 .filter(|stored| {
8489 matches!(
8490 &stored.event,
8491 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-partial-retry"
8492 )
8493 })
8494 .count(),
8495 1
8496 );
8497 assert_eq!(
8498 events
8499 .iter()
8500 .filter(|stored| {
8501 matches!(
8502 &stored.event,
8503 EvolutionEvent::CapsuleCommitted { capsule }
8504 if capsule.id == "capsule-partial-retry"
8505 )
8506 })
8507 .count(),
8508 1
8509 );
8510 }
8511
8512 #[tokio::test]
8513 async fn duplicate_remote_import_does_not_requarantine_locally_validated_assets() {
8514 let (evo, store) = build_test_evo(
8515 "remote-idempotent",
8516 "run-remote-idempotent",
8517 command_validator(),
8518 );
8519 let envelope = remote_publish_envelope(
8520 "node-idempotent",
8521 "run-remote-idempotent",
8522 "gene-idempotent",
8523 "capsule-idempotent",
8524 "mutation-idempotent",
8525 "idempotent-signal",
8526 "IDEMPOTENT.md",
8527 "# idempotent",
8528 );
8529
8530 let first = evo.import_remote_envelope(&envelope).unwrap();
8531 assert_eq!(
8532 first.imported_asset_ids,
8533 vec!["gene-idempotent", "capsule-idempotent"]
8534 );
8535
8536 let decision = evo
8537 .replay_or_fallback(replay_input("idempotent-signal"))
8538 .await
8539 .unwrap();
8540 assert!(decision.used_capsule);
8541 assert_eq!(decision.capsule_id, Some("capsule-idempotent".into()));
8542
8543 let projection_before = store.rebuild_projection().unwrap();
8544 let attempts_before = projection_before.attempt_counts["gene-idempotent"];
8545 let gene_before = projection_before
8546 .genes
8547 .iter()
8548 .find(|gene| gene.id == "gene-idempotent")
8549 .unwrap();
8550 assert_eq!(gene_before.state, AssetState::ShadowValidated);
8551 let capsule_before = projection_before
8552 .capsules
8553 .iter()
8554 .find(|capsule| capsule.id == "capsule-idempotent")
8555 .unwrap();
8556 assert_eq!(capsule_before.state, AssetState::ShadowValidated);
8557
8558 let second = evo.import_remote_envelope(&envelope).unwrap();
8559 assert!(second.imported_asset_ids.is_empty());
8560
8561 let projection_after = store.rebuild_projection().unwrap();
8562 assert_eq!(
8563 projection_after.attempt_counts["gene-idempotent"],
8564 attempts_before
8565 );
8566 let gene_after = projection_after
8567 .genes
8568 .iter()
8569 .find(|gene| gene.id == "gene-idempotent")
8570 .unwrap();
8571 assert_eq!(gene_after.state, AssetState::ShadowValidated);
8572 let capsule_after = projection_after
8573 .capsules
8574 .iter()
8575 .find(|capsule| capsule.id == "capsule-idempotent")
8576 .unwrap();
8577 assert_eq!(capsule_after.state, AssetState::ShadowValidated);
8578
8579 let third_decision = evo
8580 .replay_or_fallback(replay_input("idempotent-signal"))
8581 .await
8582 .unwrap();
8583 assert!(third_decision.used_capsule);
8584 assert_eq!(third_decision.capsule_id, Some("capsule-idempotent".into()));
8585
8586 let projection_promoted = store.rebuild_projection().unwrap();
8587 let promoted_gene = projection_promoted
8588 .genes
8589 .iter()
8590 .find(|gene| gene.id == "gene-idempotent")
8591 .unwrap();
8592 let promoted_capsule = projection_promoted
8593 .capsules
8594 .iter()
8595 .find(|capsule| capsule.id == "capsule-idempotent")
8596 .unwrap();
8597 assert_eq!(promoted_gene.state, AssetState::Promoted);
8598 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8599
8600 let events = store.scan(1).unwrap();
8601 assert_eq!(
8602 events
8603 .iter()
8604 .filter(|stored| {
8605 matches!(
8606 &stored.event,
8607 EvolutionEvent::MutationDeclared { mutation }
8608 if mutation.intent.id == "mutation-idempotent"
8609 )
8610 })
8611 .count(),
8612 1
8613 );
8614 assert_eq!(
8615 events
8616 .iter()
8617 .filter(|stored| {
8618 matches!(
8619 &stored.event,
8620 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-idempotent"
8621 )
8622 })
8623 .count(),
8624 1
8625 );
8626 assert_eq!(
8627 events
8628 .iter()
8629 .filter(|stored| {
8630 matches!(
8631 &stored.event,
8632 EvolutionEvent::CapsuleCommitted { capsule }
8633 if capsule.id == "capsule-idempotent"
8634 )
8635 })
8636 .count(),
8637 1
8638 );
8639
8640 assert_eq!(first.sync_audit.scanned_count, envelope.assets.len());
8641 assert_eq!(first.sync_audit.failed_count, 0);
8642 assert_eq!(second.sync_audit.applied_count, 0);
8643 assert_eq!(second.sync_audit.skipped_count, envelope.assets.len());
8644 assert!(second.resume_token.is_some());
8645 }
8646
8647 #[tokio::test]
8648 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
8649 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
8650 let capsule = evo
8651 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
8652 .await
8653 .unwrap();
8654 let publish = evo.export_promoted_assets("node-local");
8655 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
8656
8657 let decision = evo
8658 .replay_or_fallback(replay_input("missing readme"))
8659 .await
8660 .unwrap();
8661 assert!(decision.used_capsule);
8662 assert_eq!(decision.capsule_id, Some(capsule.id));
8663 }
8664
8665 #[tokio::test]
8666 async fn second_replay_validation_failure_revokes_gene_immediately() {
8667 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
8668 let capsule = capturer
8669 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
8670 .await
8671 .unwrap();
8672
8673 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
8674 let failing_replay = build_test_evo_with_store(
8675 "revoke-replay",
8676 "run-replay-fail",
8677 failing_validator,
8678 store.clone(),
8679 );
8680
8681 let first = failing_replay
8682 .replay_or_fallback(replay_input("missing readme"))
8683 .await
8684 .unwrap();
8685 let second = failing_replay
8686 .replay_or_fallback(replay_input("missing readme"))
8687 .await
8688 .unwrap();
8689
8690 assert!(!first.used_capsule);
8691 assert!(first.fallback_to_planner);
8692 assert!(!second.used_capsule);
8693 assert!(second.fallback_to_planner);
8694
8695 let projection = store.rebuild_projection().unwrap();
8696 let gene = projection
8697 .genes
8698 .iter()
8699 .find(|gene| gene.id == capsule.gene_id)
8700 .unwrap();
8701 assert_eq!(gene.state, AssetState::Promoted);
8702 let committed_capsule = projection
8703 .capsules
8704 .iter()
8705 .find(|current| current.id == capsule.id)
8706 .unwrap();
8707 assert_eq!(committed_capsule.state, AssetState::Promoted);
8708
8709 let events = store.scan(1).unwrap();
8710 assert_eq!(
8711 events
8712 .iter()
8713 .filter(|stored| {
8714 matches!(
8715 &stored.event,
8716 EvolutionEvent::ValidationFailed {
8717 gene_id: Some(gene_id),
8718 ..
8719 } if gene_id == &capsule.gene_id
8720 )
8721 })
8722 .count(),
8723 1
8724 );
8725 assert!(!events.iter().any(|stored| {
8726 matches!(
8727 &stored.event,
8728 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
8729 )
8730 }));
8731
8732 let recovered = build_test_evo_with_store(
8733 "revoke-replay",
8734 "run-replay-check",
8735 command_validator(),
8736 store.clone(),
8737 );
8738 let after_revoke = recovered
8739 .replay_or_fallback(replay_input("missing readme"))
8740 .await
8741 .unwrap();
8742 assert!(!after_revoke.used_capsule);
8743 assert!(after_revoke.fallback_to_planner);
8744 assert!(after_revoke.reason.contains("below replay threshold"));
8745 }
8746
8747 #[tokio::test]
8748 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
8749 let ledger = Arc::new(Mutex::new(EvuLedger {
8750 accounts: vec![],
8751 reputations: vec![
8752 oris_economics::ReputationRecord {
8753 node_id: "node-a".into(),
8754 publish_success_rate: 0.4,
8755 validator_accuracy: 0.4,
8756 reuse_impact: 0,
8757 },
8758 oris_economics::ReputationRecord {
8759 node_id: "node-b".into(),
8760 publish_success_rate: 0.95,
8761 validator_accuracy: 0.95,
8762 reuse_impact: 8,
8763 },
8764 ],
8765 }));
8766 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
8767 let evo = evo.with_economics(ledger.clone());
8768
8769 let envelope_a = remote_publish_envelope(
8770 "node-a",
8771 "run-remote-a",
8772 "gene-a",
8773 "capsule-a",
8774 "mutation-a",
8775 "shared-signal",
8776 "A.md",
8777 "# from a",
8778 );
8779 let envelope_b = remote_publish_envelope(
8780 "node-b",
8781 "run-remote-b",
8782 "gene-b",
8783 "capsule-b",
8784 "mutation-b",
8785 "shared-signal",
8786 "B.md",
8787 "# from b",
8788 );
8789
8790 evo.import_remote_envelope(&envelope_a).unwrap();
8791 evo.import_remote_envelope(&envelope_b).unwrap();
8792
8793 let decision = evo
8794 .replay_or_fallback(replay_input("shared-signal"))
8795 .await
8796 .unwrap();
8797
8798 assert!(decision.used_capsule);
8799 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
8800 let locked = ledger.lock().unwrap();
8801 let rewarded = locked
8802 .accounts
8803 .iter()
8804 .find(|item| item.node_id == "node-b")
8805 .unwrap();
8806 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8807 assert!(
8808 locked.selector_reputation_bias()["node-b"]
8809 > locked.selector_reputation_bias()["node-a"]
8810 );
8811 }
8812
8813 #[tokio::test]
8814 async fn remote_reuse_settlement_tracks_selected_capsule_publisher_for_shared_gene() {
8815 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
8816 let (evo, _) = build_test_evo(
8817 "remote-shared-publisher",
8818 "run-remote-shared-publisher",
8819 command_validator(),
8820 );
8821 let evo = evo.with_economics(ledger.clone());
8822 let input = replay_input("shared-signal");
8823 let preferred = remote_publish_envelope_with_env(
8824 "node-a",
8825 "run-remote-a",
8826 "gene-shared",
8827 "capsule-preferred",
8828 "mutation-preferred",
8829 "shared-signal",
8830 "A.md",
8831 "# from a",
8832 input.env.clone(),
8833 );
8834 let fallback = remote_publish_envelope_with_env(
8835 "node-b",
8836 "run-remote-b",
8837 "gene-shared",
8838 "capsule-fallback",
8839 "mutation-fallback",
8840 "shared-signal",
8841 "B.md",
8842 "# from b",
8843 EnvFingerprint {
8844 rustc_version: "old-rustc".into(),
8845 cargo_lock_hash: "other-lock".into(),
8846 target_triple: "aarch64-apple-darwin".into(),
8847 os: "linux".into(),
8848 },
8849 );
8850
8851 evo.import_remote_envelope(&preferred).unwrap();
8852 evo.import_remote_envelope(&fallback).unwrap();
8853
8854 let decision = evo.replay_or_fallback(input).await.unwrap();
8855
8856 assert!(decision.used_capsule);
8857 assert_eq!(decision.capsule_id, Some("capsule-preferred".into()));
8858 let locked = ledger.lock().unwrap();
8859 let rewarded = locked
8860 .accounts
8861 .iter()
8862 .find(|item| item.node_id == "node-a")
8863 .unwrap();
8864 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8865 assert!(locked.accounts.iter().all(|item| item.node_id != "node-b"));
8866 }
8867
8868 #[test]
8869 fn select_candidates_surfaces_ranked_remote_cold_start_candidates() {
8870 let ledger = Arc::new(Mutex::new(EvuLedger {
8871 accounts: vec![],
8872 reputations: vec![
8873 oris_economics::ReputationRecord {
8874 node_id: "node-a".into(),
8875 publish_success_rate: 0.4,
8876 validator_accuracy: 0.4,
8877 reuse_impact: 0,
8878 },
8879 oris_economics::ReputationRecord {
8880 node_id: "node-b".into(),
8881 publish_success_rate: 0.95,
8882 validator_accuracy: 0.95,
8883 reuse_impact: 8,
8884 },
8885 ],
8886 }));
8887 let (evo, _) = build_test_evo("remote-select", "run-remote-select", command_validator());
8888 let evo = evo.with_economics(ledger);
8889
8890 let envelope_a = remote_publish_envelope(
8891 "node-a",
8892 "run-remote-a",
8893 "gene-a",
8894 "capsule-a",
8895 "mutation-a",
8896 "shared-signal",
8897 "A.md",
8898 "# from a",
8899 );
8900 let envelope_b = remote_publish_envelope(
8901 "node-b",
8902 "run-remote-b",
8903 "gene-b",
8904 "capsule-b",
8905 "mutation-b",
8906 "shared-signal",
8907 "B.md",
8908 "# from b",
8909 );
8910
8911 evo.import_remote_envelope(&envelope_a).unwrap();
8912 evo.import_remote_envelope(&envelope_b).unwrap();
8913
8914 let candidates = evo.select_candidates(&replay_input("shared-signal"));
8915
8916 assert_eq!(candidates.len(), 1);
8917 assert_eq!(candidates[0].gene.id, "gene-b");
8918 assert_eq!(candidates[0].capsules[0].id, "capsule-b");
8919 }
8920
8921 #[tokio::test]
8922 async fn remote_reuse_publisher_bias_survives_restart() {
8923 let ledger = Arc::new(Mutex::new(EvuLedger {
8924 accounts: vec![],
8925 reputations: vec![
8926 oris_economics::ReputationRecord {
8927 node_id: "node-a".into(),
8928 publish_success_rate: 0.4,
8929 validator_accuracy: 0.4,
8930 reuse_impact: 0,
8931 },
8932 oris_economics::ReputationRecord {
8933 node_id: "node-b".into(),
8934 publish_success_rate: 0.95,
8935 validator_accuracy: 0.95,
8936 reuse_impact: 8,
8937 },
8938 ],
8939 }));
8940 let store_root = std::env::temp_dir().join(format!(
8941 "oris-evokernel-remote-restart-store-{}",
8942 next_id("t")
8943 ));
8944 if store_root.exists() {
8945 fs::remove_dir_all(&store_root).unwrap();
8946 }
8947 let store: Arc<dyn EvolutionStore> =
8948 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
8949 let evo = build_test_evo_with_store(
8950 "remote-success-restart-source",
8951 "run-remote-restart-source",
8952 command_validator(),
8953 store.clone(),
8954 )
8955 .with_economics(ledger.clone());
8956
8957 let envelope_a = remote_publish_envelope(
8958 "node-a",
8959 "run-remote-a",
8960 "gene-a",
8961 "capsule-a",
8962 "mutation-a",
8963 "shared-signal",
8964 "A.md",
8965 "# from a",
8966 );
8967 let envelope_b = remote_publish_envelope(
8968 "node-b",
8969 "run-remote-b",
8970 "gene-b",
8971 "capsule-b",
8972 "mutation-b",
8973 "shared-signal",
8974 "B.md",
8975 "# from b",
8976 );
8977
8978 evo.import_remote_envelope(&envelope_a).unwrap();
8979 evo.import_remote_envelope(&envelope_b).unwrap();
8980
8981 let recovered = build_test_evo_with_store(
8982 "remote-success-restart-recovered",
8983 "run-remote-restart-recovered",
8984 command_validator(),
8985 store.clone(),
8986 )
8987 .with_economics(ledger.clone());
8988
8989 let decision = recovered
8990 .replay_or_fallback(replay_input("shared-signal"))
8991 .await
8992 .unwrap();
8993
8994 assert!(decision.used_capsule);
8995 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
8996 let locked = ledger.lock().unwrap();
8997 let rewarded = locked
8998 .accounts
8999 .iter()
9000 .find(|item| item.node_id == "node-b")
9001 .unwrap();
9002 assert_eq!(rewarded.balance, recovered.stake_policy.reuse_reward);
9003 }
9004
9005 #[tokio::test]
9006 async fn remote_reuse_failure_penalizes_remote_reputation() {
9007 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
9008 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
9009 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
9010 let evo = evo.with_economics(ledger.clone());
9011
9012 let envelope = remote_publish_envelope(
9013 "node-remote",
9014 "run-remote-failed",
9015 "gene-remote",
9016 "capsule-remote",
9017 "mutation-remote",
9018 "failure-signal",
9019 "FAILED.md",
9020 "# from remote",
9021 );
9022 evo.import_remote_envelope(&envelope).unwrap();
9023
9024 let decision = evo
9025 .replay_or_fallback(replay_input("failure-signal"))
9026 .await
9027 .unwrap();
9028
9029 assert!(!decision.used_capsule);
9030 assert!(decision.fallback_to_planner);
9031
9032 let signal = evo.economics_signal("node-remote").unwrap();
9033 assert_eq!(signal.available_evu, 0);
9034 assert!(signal.publish_success_rate < 0.5);
9035 assert!(signal.validator_accuracy < 0.5);
9036 }
9037
9038 #[test]
9039 fn ensure_builtin_experience_assets_is_idempotent_and_fetchable() {
9040 let store_root = std::env::temp_dir().join(format!(
9041 "oris-evokernel-builtin-experience-store-{}",
9042 next_id("t")
9043 ));
9044 if store_root.exists() {
9045 fs::remove_dir_all(&store_root).unwrap();
9046 }
9047 let store: Arc<dyn EvolutionStore> =
9048 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9049 let node = EvolutionNetworkNode::new(store.clone());
9050
9051 let first = node
9052 .ensure_builtin_experience_assets("runtime-bootstrap")
9053 .unwrap();
9054 assert!(!first.imported_asset_ids.is_empty());
9055
9056 let second = node
9057 .ensure_builtin_experience_assets("runtime-bootstrap")
9058 .unwrap();
9059 assert!(second.imported_asset_ids.is_empty());
9060
9061 let fetch = node
9062 .fetch_assets(
9063 "execution-api",
9064 &FetchQuery {
9065 sender_id: "compat-agent".into(),
9066 signals: vec!["error".into()],
9067 since_cursor: None,
9068 resume_token: None,
9069 },
9070 )
9071 .unwrap();
9072
9073 let mut has_builtin_evomap = false;
9074 for asset in fetch.assets {
9075 if let NetworkAsset::Gene { gene } = asset {
9076 if strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9077 == Some("builtin_evomap")
9078 && gene.state == AssetState::Promoted
9079 {
9080 has_builtin_evomap = true;
9081 break;
9082 }
9083 }
9084 }
9085 assert!(has_builtin_evomap);
9086 }
9087
9088 #[test]
9089 fn reported_experience_retention_keeps_latest_three_and_preserves_builtin_assets() {
9090 let store_root = std::env::temp_dir().join(format!(
9091 "oris-evokernel-reported-retention-store-{}",
9092 next_id("t")
9093 ));
9094 if store_root.exists() {
9095 fs::remove_dir_all(&store_root).unwrap();
9096 }
9097 let store: Arc<dyn EvolutionStore> =
9098 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9099 let node = EvolutionNetworkNode::new(store.clone());
9100
9101 node.ensure_builtin_experience_assets("runtime-bootstrap")
9102 .unwrap();
9103
9104 for idx in 0..4 {
9105 node.record_reported_experience(
9106 "reporter-a",
9107 format!("reported-docs-rewrite-v{}", idx + 1),
9108 vec!["docs.rewrite".into(), format!("task-{}", idx + 1)],
9109 vec![
9110 "task_class=docs.rewrite".into(),
9111 format!("task_label=Docs rewrite v{}", idx + 1),
9112 format!("summary=reported replay {}", idx + 1),
9113 ],
9114 vec!["a2a.tasks.report".into()],
9115 )
9116 .unwrap();
9117 }
9118
9119 let (_, projection) = store.scan_projection().unwrap();
9120 let reported_promoted = projection
9121 .genes
9122 .iter()
9123 .filter(|gene| {
9124 gene.state == AssetState::Promoted
9125 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9126 == Some("reported_experience")
9127 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9128 == Some("docs.rewrite")
9129 })
9130 .count();
9131 let reported_revoked = projection
9132 .genes
9133 .iter()
9134 .filter(|gene| {
9135 gene.state == AssetState::Revoked
9136 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9137 == Some("reported_experience")
9138 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9139 == Some("docs.rewrite")
9140 })
9141 .count();
9142 let builtin_promoted = projection
9143 .genes
9144 .iter()
9145 .filter(|gene| {
9146 gene.state == AssetState::Promoted
9147 && matches!(
9148 strategy_metadata_value(&gene.strategy, "asset_origin").as_deref(),
9149 Some("builtin") | Some("builtin_evomap")
9150 )
9151 })
9152 .count();
9153
9154 assert_eq!(reported_promoted, 3);
9155 assert_eq!(reported_revoked, 1);
9156 assert!(builtin_promoted >= 1);
9157
9158 let fetch = node
9159 .fetch_assets(
9160 "execution-api",
9161 &FetchQuery {
9162 sender_id: "consumer-b".into(),
9163 signals: vec!["docs.rewrite".into()],
9164 since_cursor: None,
9165 resume_token: None,
9166 },
9167 )
9168 .unwrap();
9169 let docs_genes = fetch
9170 .assets
9171 .into_iter()
9172 .filter_map(|asset| match asset {
9173 NetworkAsset::Gene { gene } => Some(gene),
9174 _ => None,
9175 })
9176 .filter(|gene| {
9177 strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9178 == Some("docs.rewrite")
9179 })
9180 .collect::<Vec<_>>();
9181 assert!(docs_genes.len() >= 3);
9182 }
9183}