1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::{Path, PathBuf};
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12 infer_mutation_needed_failure_reason_code, infer_replay_fallback_reason_code,
13 normalize_mutation_needed_failure_contract, normalize_replay_fallback_contract, AgentRole,
14 BoundedTaskClass, CoordinationMessage, CoordinationPlan, CoordinationPrimitive,
15 CoordinationResult, CoordinationTask, ExecutionFeedback, MutationNeededFailureContract,
16 MutationNeededFailureReasonCode, MutationProposal as AgentMutationProposal, ReplayFeedback,
17 ReplayPlannerDirective, SupervisedDevloopOutcome, SupervisedDevloopRequest,
18 SupervisedDevloopStatus,
19};
20use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
21use oris_evolution::{
22 compute_artifact_hash, decayed_replay_confidence, next_id, stable_hash_json, AssetState,
23 BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
24 EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
25 PreparedMutation, ReplayRoiEvidence, ReplayRoiReasonCode, Selector, SelectorInput,
26 StoreBackedSelector, StoredEvolutionEvent, TransitionEvidence, TransitionReasonCode,
27 ValidationSnapshot, MIN_REPLAY_CONFIDENCE,
28};
29use oris_evolution_network::{EvolutionEnvelope, NetworkAsset, SyncAudit};
30use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
31use oris_kernel::{Kernel, KernelState, RunId};
32use oris_sandbox::{
33 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
34};
35use oris_spec::CompiledMutationPlan;
36use serde::{Deserialize, Serialize};
37use serde_json::Value;
38use thiserror::Error;
39
40pub use oris_evolution::{
41 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
42 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
43 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
44 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
45 SelectorInput as EvoSelectorInput, TransitionReasonCode as EvoTransitionReasonCode,
46};
47pub use oris_evolution_network::{
48 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
49};
50pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
51pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
52pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
53
54#[derive(Clone, Debug, Serialize, Deserialize)]
55pub struct ValidationPlan {
56 pub profile: String,
57 pub stages: Vec<ValidationStage>,
58}
59
60impl ValidationPlan {
61 pub fn oris_default() -> Self {
62 Self {
63 profile: "oris-default".into(),
64 stages: vec![
65 ValidationStage::Command {
66 program: "cargo".into(),
67 args: vec!["fmt".into(), "--all".into(), "--check".into()],
68 timeout_ms: 60_000,
69 },
70 ValidationStage::Command {
71 program: "cargo".into(),
72 args: vec!["check".into(), "--workspace".into()],
73 timeout_ms: 180_000,
74 },
75 ValidationStage::Command {
76 program: "cargo".into(),
77 args: vec![
78 "test".into(),
79 "-p".into(),
80 "oris-kernel".into(),
81 "-p".into(),
82 "oris-evolution".into(),
83 "-p".into(),
84 "oris-sandbox".into(),
85 "-p".into(),
86 "oris-evokernel".into(),
87 "--lib".into(),
88 ],
89 timeout_ms: 300_000,
90 },
91 ValidationStage::Command {
92 program: "cargo".into(),
93 args: vec![
94 "test".into(),
95 "-p".into(),
96 "oris-runtime".into(),
97 "--lib".into(),
98 ],
99 timeout_ms: 300_000,
100 },
101 ],
102 }
103 }
104}
105
106#[derive(Clone, Debug, Serialize, Deserialize)]
107pub enum ValidationStage {
108 Command {
109 program: String,
110 args: Vec<String>,
111 timeout_ms: u64,
112 },
113}
114
115#[derive(Clone, Debug, Serialize, Deserialize)]
116pub struct ValidationStageReport {
117 pub stage: String,
118 pub success: bool,
119 pub exit_code: Option<i32>,
120 pub duration_ms: u64,
121 pub stdout: String,
122 pub stderr: String,
123}
124
125#[derive(Clone, Debug, Serialize, Deserialize)]
126pub struct ValidationReport {
127 pub success: bool,
128 pub duration_ms: u64,
129 pub stages: Vec<ValidationStageReport>,
130 pub logs: String,
131}
132
133#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
134pub struct SignalExtractionInput {
135 pub patch_diff: String,
136 pub intent: String,
137 pub expected_effect: String,
138 pub declared_signals: Vec<String>,
139 pub changed_files: Vec<String>,
140 pub validation_success: bool,
141 pub validation_logs: String,
142 pub stage_outputs: Vec<String>,
143}
144
145#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
146pub struct SignalExtractionOutput {
147 pub values: Vec<String>,
148 pub hash: String,
149}
150
151#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
152pub struct SeedTemplate {
153 pub id: String,
154 pub intent: String,
155 pub signals: Vec<String>,
156 pub diff_payload: String,
157 pub validation_profile: String,
158}
159
160#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
161pub struct BootstrapReport {
162 pub seeded: bool,
163 pub genes_added: usize,
164 pub capsules_added: usize,
165}
166
167const REPORTED_EXPERIENCE_RETENTION_LIMIT: usize = 3;
168const SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS: u64 = 2;
169const SHADOW_PROMOTION_MIN_SUCCESS_RATE: f32 = 0.70;
170const SHADOW_PROMOTION_MIN_ENV_MATCH: f32 = 0.75;
171const SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE: f32 = MIN_REPLAY_CONFIDENCE;
172const REPLAY_REASONING_TOKEN_FLOOR: u64 = 192;
173const REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT: u64 = 24;
174const COLD_START_LOOKUP_PENALTY: f32 = 0.05;
175const MUTATION_NEEDED_MAX_DIFF_BYTES: usize = 128 * 1024;
176const MUTATION_NEEDED_MAX_CHANGED_LINES: usize = 600;
177const MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS: u64 = 120_000;
178const MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS: u64 = 900_000;
179const SUPERVISED_DEVLOOP_MAX_DOC_FILES: usize = 3;
180pub const REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS: [&str; 2] =
181 ["task_class", "source_sender_id"];
182
183#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
184pub struct RepairQualityGateReport {
185 pub root_cause: bool,
186 pub fix: bool,
187 pub verification: bool,
188 pub rollback: bool,
189 pub incident_anchor: bool,
190 pub structure_score: usize,
191 pub has_actionable_command: bool,
192}
193
194impl RepairQualityGateReport {
195 pub fn passes(&self) -> bool {
196 self.incident_anchor
197 && self.structure_score >= 3
198 && (self.has_actionable_command || self.verification)
199 }
200
201 pub fn failed_checks(&self) -> Vec<String> {
202 let mut failed = Vec::new();
203 if !self.incident_anchor {
204 failed.push("包含unknown command故障上下文".to_string());
205 }
206 if self.structure_score < 3 {
207 failed.push("结构化修复信息至少满足3项(根因/修复/验证/回滚)".to_string());
208 }
209 if !(self.has_actionable_command || self.verification) {
210 failed.push("包含可执行验证命令或验证计划".to_string());
211 }
212 failed
213 }
214}
215
216pub fn evaluate_repair_quality_gate(plan: &str) -> RepairQualityGateReport {
217 fn contains_any(haystack: &str, needles: &[&str]) -> bool {
218 needles.iter().any(|needle| haystack.contains(needle))
219 }
220
221 let lower = plan.to_ascii_lowercase();
222 let root_cause = contains_any(
223 plan,
224 &["根因", "原因分析", "问题定位", "原因定位", "根本原因"],
225 ) || contains_any(
226 &lower,
227 &[
228 "root cause",
229 "cause analysis",
230 "problem diagnosis",
231 "diagnosis",
232 ],
233 );
234 let fix = contains_any(
235 plan,
236 &["修复步骤", "修复方案", "处理步骤", "修复建议", "整改方案"],
237 ) || contains_any(
238 &lower,
239 &[
240 "fix",
241 "remediation",
242 "mitigation",
243 "resolution",
244 "repair steps",
245 ],
246 );
247 let verification = contains_any(
248 plan,
249 &["验证命令", "验证步骤", "回归测试", "验证方式", "验收步骤"],
250 ) || contains_any(
251 &lower,
252 &[
253 "verification",
254 "validate",
255 "regression test",
256 "smoke test",
257 "test command",
258 ],
259 );
260 let rollback = contains_any(plan, &["回滚方案", "回滚步骤", "恢复方案", "撤销方案"])
261 || contains_any(&lower, &["rollback", "revert", "fallback plan", "undo"]);
262 let incident_anchor = contains_any(
263 &lower,
264 &[
265 "unknown command",
266 "process",
267 "proccess",
268 "command not found",
269 ],
270 ) || contains_any(plan, &["命令不存在", "命令未找到", "未知命令"]);
271 let structure_score = [root_cause, fix, verification, rollback]
272 .into_iter()
273 .filter(|ok| *ok)
274 .count();
275 let has_actionable_command = contains_any(
276 &lower,
277 &[
278 "cargo ", "git ", "python ", "pip ", "npm ", "pnpm ", "yarn ", "bash ", "make ",
279 ],
280 );
281
282 RepairQualityGateReport {
283 root_cause,
284 fix,
285 verification,
286 rollback,
287 incident_anchor,
288 structure_score,
289 has_actionable_command,
290 }
291}
292
293impl ValidationReport {
294 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
295 ValidationSnapshot {
296 success: self.success,
297 profile: profile.to_string(),
298 duration_ms: self.duration_ms,
299 summary: if self.success {
300 "validation passed".into()
301 } else {
302 "validation failed".into()
303 },
304 }
305 }
306}
307
308pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
309 let mut signals = BTreeSet::new();
310
311 for declared in &input.declared_signals {
312 if let Some(phrase) = normalize_signal_phrase(declared) {
313 signals.insert(phrase);
314 }
315 extend_signal_tokens(&mut signals, declared);
316 }
317
318 for text in [
319 input.patch_diff.as_str(),
320 input.intent.as_str(),
321 input.expected_effect.as_str(),
322 input.validation_logs.as_str(),
323 ] {
324 extend_signal_tokens(&mut signals, text);
325 }
326
327 for changed_file in &input.changed_files {
328 extend_signal_tokens(&mut signals, changed_file);
329 }
330
331 for stage_output in &input.stage_outputs {
332 extend_signal_tokens(&mut signals, stage_output);
333 }
334
335 signals.insert(if input.validation_success {
336 "validation passed".into()
337 } else {
338 "validation failed".into()
339 });
340
341 let values = signals.into_iter().take(32).collect::<Vec<_>>();
342 let hash =
343 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
344 SignalExtractionOutput { values, hash }
345}
346
347#[derive(Debug, Error)]
348pub enum ValidationError {
349 #[error("validation execution failed: {0}")]
350 Execution(String),
351}
352
353#[async_trait]
354pub trait Validator: Send + Sync {
355 async fn run(
356 &self,
357 receipt: &SandboxReceipt,
358 plan: &ValidationPlan,
359 ) -> Result<ValidationReport, ValidationError>;
360}
361
362pub struct CommandValidator {
363 policy: SandboxPolicy,
364}
365
366impl CommandValidator {
367 pub fn new(policy: SandboxPolicy) -> Self {
368 Self { policy }
369 }
370}
371
372#[async_trait]
373impl Validator for CommandValidator {
374 async fn run(
375 &self,
376 receipt: &SandboxReceipt,
377 plan: &ValidationPlan,
378 ) -> Result<ValidationReport, ValidationError> {
379 let started = std::time::Instant::now();
380 let mut stages = Vec::new();
381 let mut success = true;
382 let mut logs = String::new();
383
384 for stage in &plan.stages {
385 match stage {
386 ValidationStage::Command {
387 program,
388 args,
389 timeout_ms,
390 } => {
391 let result = execute_allowed_command(
392 &self.policy,
393 &receipt.workdir,
394 program,
395 args,
396 *timeout_ms,
397 )
398 .await;
399 let report = match result {
400 Ok(output) => ValidationStageReport {
401 stage: format!("{program} {}", args.join(" ")),
402 success: output.success,
403 exit_code: output.exit_code,
404 duration_ms: output.duration_ms,
405 stdout: output.stdout,
406 stderr: output.stderr,
407 },
408 Err(err) => ValidationStageReport {
409 stage: format!("{program} {}", args.join(" ")),
410 success: false,
411 exit_code: None,
412 duration_ms: 0,
413 stdout: String::new(),
414 stderr: err.to_string(),
415 },
416 };
417 if !report.success {
418 success = false;
419 }
420 if !report.stdout.is_empty() {
421 logs.push_str(&report.stdout);
422 logs.push('\n');
423 }
424 if !report.stderr.is_empty() {
425 logs.push_str(&report.stderr);
426 logs.push('\n');
427 }
428 stages.push(report);
429 if !success {
430 break;
431 }
432 }
433 }
434 }
435
436 Ok(ValidationReport {
437 success,
438 duration_ms: started.elapsed().as_millis() as u64,
439 stages,
440 logs,
441 })
442 }
443}
444
445#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
446pub struct ReplayDetectEvidence {
447 pub task_class_id: String,
448 pub task_label: String,
449 pub matched_signals: Vec<String>,
450 pub mismatch_reasons: Vec<String>,
451}
452
453#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
454pub struct ReplayCandidateEvidence {
455 pub rank: usize,
456 pub gene_id: String,
457 pub capsule_id: Option<String>,
458 pub match_quality: f32,
459 pub confidence: Option<f32>,
460 pub environment_match_factor: Option<f32>,
461 pub cold_start_penalty: f32,
462 pub final_score: f32,
463}
464
465#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
466pub struct ReplaySelectEvidence {
467 pub exact_match_lookup: bool,
468 pub selected_gene_id: Option<String>,
469 pub selected_capsule_id: Option<String>,
470 pub candidates: Vec<ReplayCandidateEvidence>,
471}
472
473#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
474pub struct ReplayDecision {
475 pub used_capsule: bool,
476 pub capsule_id: Option<CapsuleId>,
477 pub fallback_to_planner: bool,
478 pub reason: String,
479 pub detect_evidence: ReplayDetectEvidence,
480 pub select_evidence: ReplaySelectEvidence,
481 pub economics_evidence: ReplayRoiEvidence,
482}
483
484#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
485pub struct ReplayTaskClassMetrics {
486 pub task_class_id: String,
487 pub task_label: String,
488 pub replay_success_total: u64,
489 pub replay_failure_total: u64,
490 pub reasoning_steps_avoided_total: u64,
491 pub reasoning_avoided_tokens_total: u64,
492 pub replay_fallback_cost_total: u64,
493 pub replay_roi: f64,
494}
495
496#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
497pub struct ReplaySourceRoiMetrics {
498 pub source_sender_id: String,
499 pub replay_success_total: u64,
500 pub replay_failure_total: u64,
501 pub reasoning_avoided_tokens_total: u64,
502 pub replay_fallback_cost_total: u64,
503 pub replay_roi: f64,
504}
505
506#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
507pub struct ReplayRoiWindowSummary {
508 pub generated_at: String,
509 pub window_seconds: u64,
510 pub replay_attempts_total: u64,
511 pub replay_success_total: u64,
512 pub replay_failure_total: u64,
513 pub reasoning_avoided_tokens_total: u64,
514 pub replay_fallback_cost_total: u64,
515 pub replay_roi: f64,
516 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
517 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
518}
519
520#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
521pub struct ReplayRoiReleaseGateThresholds {
522 pub min_replay_attempts: u64,
523 pub min_replay_hit_rate: f64,
524 pub max_false_replay_rate: f64,
525 pub min_reasoning_avoided_tokens: u64,
526 pub min_replay_roi: f64,
527 pub require_replay_safety: bool,
528}
529
530impl Default for ReplayRoiReleaseGateThresholds {
531 fn default() -> Self {
532 Self {
533 min_replay_attempts: 3,
534 min_replay_hit_rate: 0.60,
535 max_false_replay_rate: 0.25,
536 min_reasoning_avoided_tokens: REPLAY_REASONING_TOKEN_FLOOR,
537 min_replay_roi: 0.05,
538 require_replay_safety: true,
539 }
540 }
541}
542
543#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
544#[serde(rename_all = "snake_case")]
545pub enum ReplayRoiReleaseGateAction {
546 BlockRelease,
547}
548
549#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
550pub struct ReplayRoiReleaseGateFailClosedPolicy {
551 pub on_threshold_violation: ReplayRoiReleaseGateAction,
552 pub on_missing_metrics: ReplayRoiReleaseGateAction,
553 pub on_invalid_metrics: ReplayRoiReleaseGateAction,
554}
555
556impl Default for ReplayRoiReleaseGateFailClosedPolicy {
557 fn default() -> Self {
558 Self {
559 on_threshold_violation: ReplayRoiReleaseGateAction::BlockRelease,
560 on_missing_metrics: ReplayRoiReleaseGateAction::BlockRelease,
561 on_invalid_metrics: ReplayRoiReleaseGateAction::BlockRelease,
562 }
563 }
564}
565
566#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
567pub struct ReplayRoiReleaseGateSafetySignal {
568 pub fail_closed_default: bool,
569 pub rollback_ready: bool,
570 pub audit_trail_complete: bool,
571 pub has_replay_activity: bool,
572}
573
574#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
575pub struct ReplayRoiReleaseGateInputContract {
576 pub generated_at: String,
577 pub window_seconds: u64,
578 pub aggregation_dimensions: Vec<String>,
579 pub replay_attempts_total: u64,
580 pub replay_success_total: u64,
581 pub replay_failure_total: u64,
582 pub replay_hit_rate: f64,
583 pub false_replay_rate: f64,
584 pub reasoning_avoided_tokens: u64,
585 pub replay_fallback_cost_total: u64,
586 pub replay_roi: f64,
587 pub replay_safety: bool,
588 pub replay_safety_signal: ReplayRoiReleaseGateSafetySignal,
589 pub thresholds: ReplayRoiReleaseGateThresholds,
590 pub fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy,
591}
592
593#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
594#[serde(rename_all = "snake_case")]
595pub enum ReplayRoiReleaseGateStatus {
596 Pass,
597 FailClosed,
598 Indeterminate,
599}
600
601#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
602pub struct ReplayRoiReleaseGateOutputContract {
603 pub status: ReplayRoiReleaseGateStatus,
604 pub failed_checks: Vec<String>,
605 pub evidence_refs: Vec<String>,
606 pub summary: String,
607}
608
609#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
610pub struct ReplayRoiReleaseGateContract {
611 pub input: ReplayRoiReleaseGateInputContract,
612 pub output: ReplayRoiReleaseGateOutputContract,
613}
614
615#[derive(Clone, Copy, Debug, Eq, PartialEq)]
616enum CoordinationTaskState {
617 Ready,
618 Waiting,
619 BlockedByFailure,
620 PermanentlyBlocked,
621}
622
623#[derive(Clone, Debug, Default)]
624pub struct MultiAgentCoordinator;
625
626impl MultiAgentCoordinator {
627 pub fn new() -> Self {
628 Self
629 }
630
631 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
632 let primitive = plan.primitive.clone();
633 let root_goal = plan.root_goal.clone();
634 let timeout_ms = plan.timeout_ms;
635 let max_retries = plan.max_retries;
636 let mut tasks = BTreeMap::new();
637 for task in plan.tasks {
638 tasks.entry(task.id.clone()).or_insert(task);
639 }
640
641 let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
642 let mut completed = BTreeSet::new();
643 let mut failed = BTreeSet::new();
644 let mut completed_order = Vec::new();
645 let mut failed_order = Vec::new();
646 let mut skipped = BTreeSet::new();
647 let mut attempts = BTreeMap::new();
648 let mut messages = Vec::new();
649
650 loop {
651 if matches!(primitive, CoordinationPrimitive::Conditional) {
652 self.apply_conditional_skips(
653 &tasks,
654 &mut pending,
655 &completed,
656 &failed,
657 &mut skipped,
658 &mut messages,
659 );
660 }
661
662 let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
663 if ready.is_empty() {
664 break;
665 }
666 if matches!(primitive, CoordinationPrimitive::Sequential) {
667 ready.truncate(1);
668 }
669
670 for task_id in ready {
671 let Some(task) = tasks.get(&task_id) else {
672 continue;
673 };
674 if !pending.contains(&task_id) {
675 continue;
676 }
677 self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
678
679 let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
680 if Self::simulate_task_failure(task, prior_failures) {
681 let failure_count = prior_failures + 1;
682 attempts.insert(task_id.clone(), failure_count);
683 let will_retry = failure_count <= max_retries;
684 messages.push(CoordinationMessage {
685 from_role: task.role.clone(),
686 to_role: task.role.clone(),
687 task_id: task_id.clone(),
688 content: if will_retry {
689 format!("task {task_id} failed on attempt {failure_count} and will retry")
690 } else {
691 format!(
692 "task {task_id} failed on attempt {failure_count} and exhausted retries"
693 )
694 },
695 });
696 if !will_retry {
697 pending.remove(&task_id);
698 if failed.insert(task_id.clone()) {
699 failed_order.push(task_id);
700 }
701 }
702 continue;
703 }
704
705 pending.remove(&task_id);
706 if completed.insert(task_id.clone()) {
707 completed_order.push(task_id);
708 }
709 }
710 }
711
712 let blocked_ids = pending.into_iter().collect::<Vec<_>>();
713 for task_id in blocked_ids {
714 let Some(task) = tasks.get(&task_id) else {
715 continue;
716 };
717 let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
718 let content = match state {
719 CoordinationTaskState::BlockedByFailure => {
720 format!("task {task_id} blocked by failed dependencies")
721 }
722 CoordinationTaskState::PermanentlyBlocked => {
723 format!("task {task_id} has invalid coordination prerequisites")
724 }
725 CoordinationTaskState::Waiting => {
726 format!("task {task_id} has unresolved dependencies")
727 }
728 CoordinationTaskState::Ready => {
729 format!("task {task_id} was left pending unexpectedly")
730 }
731 };
732 messages.push(CoordinationMessage {
733 from_role: task.role.clone(),
734 to_role: task.role.clone(),
735 task_id: task_id.clone(),
736 content,
737 });
738 if failed.insert(task_id.clone()) {
739 failed_order.push(task_id);
740 }
741 }
742
743 CoordinationResult {
744 completed_tasks: completed_order,
745 failed_tasks: failed_order,
746 messages,
747 summary: format!(
748 "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
749 root_goal,
750 completed.len(),
751 failed.len(),
752 skipped.len(),
753 primitive,
754 timeout_ms,
755 max_retries
756 ),
757 }
758 }
759
760 fn ready_task_ids(
761 &self,
762 tasks: &BTreeMap<String, CoordinationTask>,
763 pending: &BTreeSet<String>,
764 completed: &BTreeSet<String>,
765 failed: &BTreeSet<String>,
766 skipped: &BTreeSet<String>,
767 ) -> Vec<String> {
768 pending
769 .iter()
770 .filter_map(|task_id| {
771 let task = tasks.get(task_id)?;
772 (self.classify_task(task, tasks, completed, failed, skipped)
773 == CoordinationTaskState::Ready)
774 .then(|| task_id.clone())
775 })
776 .collect()
777 }
778
779 fn apply_conditional_skips(
780 &self,
781 tasks: &BTreeMap<String, CoordinationTask>,
782 pending: &mut BTreeSet<String>,
783 completed: &BTreeSet<String>,
784 failed: &BTreeSet<String>,
785 skipped: &mut BTreeSet<String>,
786 messages: &mut Vec<CoordinationMessage>,
787 ) {
788 let skip_ids = pending
789 .iter()
790 .filter_map(|task_id| {
791 let task = tasks.get(task_id)?;
792 (self.classify_task(task, tasks, completed, failed, skipped)
793 == CoordinationTaskState::BlockedByFailure)
794 .then(|| task_id.clone())
795 })
796 .collect::<Vec<_>>();
797
798 for task_id in skip_ids {
799 let Some(task) = tasks.get(&task_id) else {
800 continue;
801 };
802 pending.remove(&task_id);
803 skipped.insert(task_id.clone());
804 messages.push(CoordinationMessage {
805 from_role: task.role.clone(),
806 to_role: task.role.clone(),
807 task_id: task_id.clone(),
808 content: format!("task {task_id} skipped due to failed dependency chain"),
809 });
810 }
811 }
812
813 fn classify_task(
814 &self,
815 task: &CoordinationTask,
816 tasks: &BTreeMap<String, CoordinationTask>,
817 completed: &BTreeSet<String>,
818 failed: &BTreeSet<String>,
819 skipped: &BTreeSet<String>,
820 ) -> CoordinationTaskState {
821 match task.role {
822 AgentRole::Planner | AgentRole::Coder => {
823 let mut waiting = false;
824 for dependency_id in &task.depends_on {
825 if !tasks.contains_key(dependency_id) {
826 return CoordinationTaskState::PermanentlyBlocked;
827 }
828 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
829 return CoordinationTaskState::BlockedByFailure;
830 }
831 if !completed.contains(dependency_id) {
832 waiting = true;
833 }
834 }
835 if waiting {
836 CoordinationTaskState::Waiting
837 } else {
838 CoordinationTaskState::Ready
839 }
840 }
841 AgentRole::Repair => {
842 let mut waiting = false;
843 let mut has_coder_dependency = false;
844 let mut has_failed_coder = false;
845 for dependency_id in &task.depends_on {
846 let Some(dependency) = tasks.get(dependency_id) else {
847 return CoordinationTaskState::PermanentlyBlocked;
848 };
849 let is_coder = matches!(dependency.role, AgentRole::Coder);
850 if is_coder {
851 has_coder_dependency = true;
852 }
853 if skipped.contains(dependency_id) {
854 return CoordinationTaskState::BlockedByFailure;
855 }
856 if failed.contains(dependency_id) {
857 if is_coder {
858 has_failed_coder = true;
859 } else {
860 return CoordinationTaskState::BlockedByFailure;
861 }
862 continue;
863 }
864 if !completed.contains(dependency_id) {
865 waiting = true;
866 }
867 }
868 if !has_coder_dependency {
869 CoordinationTaskState::PermanentlyBlocked
870 } else if waiting {
871 CoordinationTaskState::Waiting
872 } else if has_failed_coder {
873 CoordinationTaskState::Ready
874 } else {
875 CoordinationTaskState::PermanentlyBlocked
876 }
877 }
878 AgentRole::Optimizer => {
879 let mut waiting = false;
880 let mut has_impl_dependency = false;
881 let mut has_completed_impl = false;
882 let mut has_failed_impl = false;
883 for dependency_id in &task.depends_on {
884 let Some(dependency) = tasks.get(dependency_id) else {
885 return CoordinationTaskState::PermanentlyBlocked;
886 };
887 let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
888 if is_impl {
889 has_impl_dependency = true;
890 }
891 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
892 if is_impl {
893 has_failed_impl = true;
894 continue;
895 }
896 return CoordinationTaskState::BlockedByFailure;
897 }
898 if completed.contains(dependency_id) {
899 if is_impl {
900 has_completed_impl = true;
901 }
902 continue;
903 }
904 waiting = true;
905 }
906 if !has_impl_dependency {
907 CoordinationTaskState::PermanentlyBlocked
908 } else if waiting {
909 CoordinationTaskState::Waiting
910 } else if has_completed_impl {
911 CoordinationTaskState::Ready
912 } else if has_failed_impl {
913 CoordinationTaskState::BlockedByFailure
914 } else {
915 CoordinationTaskState::PermanentlyBlocked
916 }
917 }
918 }
919 }
920
921 fn record_handoff_messages(
922 &self,
923 task: &CoordinationTask,
924 tasks: &BTreeMap<String, CoordinationTask>,
925 completed: &BTreeSet<String>,
926 failed: &BTreeSet<String>,
927 messages: &mut Vec<CoordinationMessage>,
928 ) {
929 let mut dependency_ids = task.depends_on.clone();
930 dependency_ids.sort();
931 dependency_ids.dedup();
932
933 for dependency_id in dependency_ids {
934 let Some(dependency) = tasks.get(&dependency_id) else {
935 continue;
936 };
937 if completed.contains(&dependency_id) {
938 messages.push(CoordinationMessage {
939 from_role: dependency.role.clone(),
940 to_role: task.role.clone(),
941 task_id: task.id.clone(),
942 content: format!("handoff from {dependency_id} to {}", task.id),
943 });
944 } else if failed.contains(&dependency_id) {
945 messages.push(CoordinationMessage {
946 from_role: dependency.role.clone(),
947 to_role: task.role.clone(),
948 task_id: task.id.clone(),
949 content: format!("failed dependency {dependency_id} routed to {}", task.id),
950 });
951 }
952 }
953 }
954
955 fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
956 let normalized = task.description.to_ascii_lowercase();
957 normalized.contains("force-fail")
958 || (normalized.contains("fail-once") && prior_failures == 0)
959 }
960}
961
962#[derive(Debug, Error)]
963pub enum ReplayError {
964 #[error("store error: {0}")]
965 Store(String),
966 #[error("sandbox error: {0}")]
967 Sandbox(String),
968 #[error("validation error: {0}")]
969 Validation(String),
970}
971
972#[async_trait]
973pub trait ReplayExecutor: Send + Sync {
974 async fn try_replay(
975 &self,
976 input: &SelectorInput,
977 policy: &SandboxPolicy,
978 validation: &ValidationPlan,
979 ) -> Result<ReplayDecision, ReplayError>;
980
981 async fn try_replay_for_run(
982 &self,
983 run_id: &RunId,
984 input: &SelectorInput,
985 policy: &SandboxPolicy,
986 validation: &ValidationPlan,
987 ) -> Result<ReplayDecision, ReplayError> {
988 let _ = run_id;
989 self.try_replay(input, policy, validation).await
990 }
991}
992
993pub struct StoreReplayExecutor {
994 pub sandbox: Arc<dyn Sandbox>,
995 pub validator: Arc<dyn Validator>,
996 pub store: Arc<dyn EvolutionStore>,
997 pub selector: Arc<dyn Selector>,
998 pub governor: Arc<dyn Governor>,
999 pub economics: Option<Arc<Mutex<EvuLedger>>>,
1000 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
1001 pub stake_policy: StakePolicy,
1002}
1003
1004struct ReplayCandidates {
1005 candidates: Vec<GeneCandidate>,
1006 exact_match: bool,
1007}
1008
1009#[async_trait]
1010impl ReplayExecutor for StoreReplayExecutor {
1011 async fn try_replay(
1012 &self,
1013 input: &SelectorInput,
1014 policy: &SandboxPolicy,
1015 validation: &ValidationPlan,
1016 ) -> Result<ReplayDecision, ReplayError> {
1017 self.try_replay_inner(None, input, policy, validation).await
1018 }
1019
1020 async fn try_replay_for_run(
1021 &self,
1022 run_id: &RunId,
1023 input: &SelectorInput,
1024 policy: &SandboxPolicy,
1025 validation: &ValidationPlan,
1026 ) -> Result<ReplayDecision, ReplayError> {
1027 self.try_replay_inner(Some(run_id), input, policy, validation)
1028 .await
1029 }
1030}
1031
1032impl StoreReplayExecutor {
1033 fn collect_replay_candidates(&self, input: &SelectorInput) -> ReplayCandidates {
1034 self.apply_confidence_revalidation();
1035 let mut selector_input = input.clone();
1036 if self.economics.is_some() && self.remote_publishers.is_some() {
1037 selector_input.limit = selector_input.limit.max(4);
1038 }
1039 let mut candidates = self.selector.select(&selector_input);
1040 self.rerank_with_reputation_bias(&mut candidates);
1041 let mut exact_match = false;
1042 if candidates.is_empty() {
1043 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
1044 self.rerank_with_reputation_bias(&mut exact_candidates);
1045 if !exact_candidates.is_empty() {
1046 candidates = exact_candidates;
1047 exact_match = true;
1048 }
1049 }
1050 if candidates.is_empty() {
1051 let mut remote_candidates =
1052 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
1053 self.rerank_with_reputation_bias(&mut remote_candidates);
1054 if !remote_candidates.is_empty() {
1055 candidates = remote_candidates;
1056 exact_match = true;
1057 }
1058 }
1059 candidates.truncate(input.limit.max(1));
1060 ReplayCandidates {
1061 candidates,
1062 exact_match,
1063 }
1064 }
1065
1066 fn build_select_evidence(
1067 &self,
1068 input: &SelectorInput,
1069 candidates: &[GeneCandidate],
1070 exact_match: bool,
1071 ) -> ReplaySelectEvidence {
1072 let cold_start_penalty = if exact_match {
1073 COLD_START_LOOKUP_PENALTY
1074 } else {
1075 0.0
1076 };
1077 let candidate_rows = candidates
1078 .iter()
1079 .enumerate()
1080 .map(|(idx, candidate)| {
1081 let top_capsule = candidate.capsules.first();
1082 let environment_match_factor = top_capsule
1083 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env));
1084 let final_score = candidate.score * (1.0 - cold_start_penalty);
1085 ReplayCandidateEvidence {
1086 rank: idx + 1,
1087 gene_id: candidate.gene.id.clone(),
1088 capsule_id: top_capsule.map(|capsule| capsule.id.clone()),
1089 match_quality: candidate.score,
1090 confidence: top_capsule.map(|capsule| capsule.confidence),
1091 environment_match_factor,
1092 cold_start_penalty,
1093 final_score,
1094 }
1095 })
1096 .collect::<Vec<_>>();
1097
1098 ReplaySelectEvidence {
1099 exact_match_lookup: exact_match,
1100 selected_gene_id: candidate_rows
1101 .first()
1102 .map(|candidate| candidate.gene_id.clone()),
1103 selected_capsule_id: candidate_rows
1104 .first()
1105 .and_then(|candidate| candidate.capsule_id.clone()),
1106 candidates: candidate_rows,
1107 }
1108 }
1109
1110 fn apply_confidence_revalidation(&self) {
1111 let Ok(projection) = projection_snapshot(self.store.as_ref()) else {
1112 return;
1113 };
1114 for target in stale_replay_revalidation_targets(&projection, Utc::now()) {
1115 let reason = format!(
1116 "confidence decayed to {:.3}; revalidation required before replay",
1117 target.decayed_confidence
1118 );
1119 let confidence_decay_ratio = if target.peak_confidence > 0.0 {
1120 (target.decayed_confidence / target.peak_confidence).clamp(0.0, 1.0)
1121 } else {
1122 0.0
1123 };
1124 if self
1125 .store
1126 .append_event(EvolutionEvent::PromotionEvaluated {
1127 gene_id: target.gene_id.clone(),
1128 state: AssetState::Quarantined,
1129 reason: reason.clone(),
1130 reason_code: TransitionReasonCode::RevalidationConfidenceDecay,
1131 evidence: Some(TransitionEvidence {
1132 replay_attempts: None,
1133 replay_successes: None,
1134 replay_success_rate: None,
1135 environment_match_factor: None,
1136 decayed_confidence: Some(target.decayed_confidence),
1137 confidence_decay_ratio: Some(confidence_decay_ratio),
1138 summary: Some(format!(
1139 "phase=confidence_revalidation; decayed_confidence={:.3}; confidence_decay_ratio={:.3}",
1140 target.decayed_confidence, confidence_decay_ratio
1141 )),
1142 }),
1143 })
1144 .is_err()
1145 {
1146 continue;
1147 }
1148 for capsule_id in target.capsule_ids {
1149 if self
1150 .store
1151 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
1152 .is_err()
1153 {
1154 break;
1155 }
1156 }
1157 }
1158 }
1159
1160 fn build_replay_economics_evidence(
1161 &self,
1162 input: &SelectorInput,
1163 candidate: Option<&GeneCandidate>,
1164 source_sender_id: Option<&str>,
1165 success: bool,
1166 reason_code: ReplayRoiReasonCode,
1167 reason: &str,
1168 ) -> ReplayRoiEvidence {
1169 let (task_class_id, task_label) =
1170 replay_descriptor_from_candidate_or_input(candidate, input);
1171 let signal_source = candidate
1172 .map(|best| best.gene.signals.as_slice())
1173 .unwrap_or(input.signals.as_slice());
1174 let baseline_tokens = estimated_reasoning_tokens(signal_source);
1175 let reasoning_avoided_tokens = if success { baseline_tokens } else { 0 };
1176 let replay_fallback_cost = if success { 0 } else { baseline_tokens };
1177 let asset_origin =
1178 candidate.and_then(|best| strategy_metadata_value(&best.gene.strategy, "asset_origin"));
1179 let mut context_dimensions = vec![
1180 format!(
1181 "outcome={}",
1182 if success {
1183 "replay_hit"
1184 } else {
1185 "planner_fallback"
1186 }
1187 ),
1188 format!("reason={reason}"),
1189 format!("task_class_id={task_class_id}"),
1190 format!("task_label={task_label}"),
1191 ];
1192 if let Some(asset_origin) = asset_origin.as_deref() {
1193 context_dimensions.push(format!("asset_origin={asset_origin}"));
1194 }
1195 if let Some(source_sender_id) = source_sender_id {
1196 context_dimensions.push(format!("source_sender_id={source_sender_id}"));
1197 }
1198 ReplayRoiEvidence {
1199 success,
1200 reason_code,
1201 task_class_id,
1202 task_label,
1203 reasoning_avoided_tokens,
1204 replay_fallback_cost,
1205 replay_roi: compute_replay_roi(reasoning_avoided_tokens, replay_fallback_cost),
1206 asset_origin,
1207 source_sender_id: source_sender_id.map(ToOwned::to_owned),
1208 context_dimensions,
1209 }
1210 }
1211
1212 fn record_replay_economics(
1213 &self,
1214 replay_run_id: Option<&RunId>,
1215 candidate: Option<&GeneCandidate>,
1216 capsule_id: Option<&str>,
1217 evidence: ReplayRoiEvidence,
1218 ) -> Result<(), ReplayError> {
1219 self.store
1220 .append_event(EvolutionEvent::ReplayEconomicsRecorded {
1221 gene_id: candidate.map(|best| best.gene.id.clone()),
1222 capsule_id: capsule_id.map(ToOwned::to_owned),
1223 replay_run_id: replay_run_id.cloned(),
1224 evidence,
1225 })
1226 .map_err(|err| ReplayError::Store(err.to_string()))?;
1227 Ok(())
1228 }
1229
1230 async fn try_replay_inner(
1231 &self,
1232 replay_run_id: Option<&RunId>,
1233 input: &SelectorInput,
1234 policy: &SandboxPolicy,
1235 validation: &ValidationPlan,
1236 ) -> Result<ReplayDecision, ReplayError> {
1237 let ReplayCandidates {
1238 candidates,
1239 exact_match,
1240 } = self.collect_replay_candidates(input);
1241 let mut detect_evidence = replay_detect_evidence_from_input(input);
1242 let select_evidence = self.build_select_evidence(input, &candidates, exact_match);
1243 let Some(best) = candidates.into_iter().next() else {
1244 detect_evidence
1245 .mismatch_reasons
1246 .push("no_candidate_after_select".to_string());
1247 let economics_evidence = self.build_replay_economics_evidence(
1248 input,
1249 None,
1250 None,
1251 false,
1252 ReplayRoiReasonCode::ReplayMissNoMatchingGene,
1253 "no matching gene",
1254 );
1255 self.record_replay_economics(replay_run_id, None, None, economics_evidence.clone())?;
1256 return Ok(ReplayDecision {
1257 used_capsule: false,
1258 capsule_id: None,
1259 fallback_to_planner: true,
1260 reason: "no matching gene".into(),
1261 detect_evidence,
1262 select_evidence,
1263 economics_evidence,
1264 });
1265 };
1266 let (detected_task_class_id, detected_task_label) =
1267 replay_descriptor_from_candidate_or_input(Some(&best), input);
1268 detect_evidence.task_class_id = detected_task_class_id;
1269 detect_evidence.task_label = detected_task_label;
1270 detect_evidence.matched_signals =
1271 matched_replay_signals(&input.signals, &best.gene.signals);
1272 if !exact_match && best.score < 0.82 {
1273 detect_evidence
1274 .mismatch_reasons
1275 .push("score_below_threshold".to_string());
1276 let reason = format!("best gene score {:.3} below replay threshold", best.score);
1277 let economics_evidence = self.build_replay_economics_evidence(
1278 input,
1279 Some(&best),
1280 None,
1281 false,
1282 ReplayRoiReasonCode::ReplayMissScoreBelowThreshold,
1283 &reason,
1284 );
1285 self.record_replay_economics(
1286 replay_run_id,
1287 Some(&best),
1288 None,
1289 economics_evidence.clone(),
1290 )?;
1291 return Ok(ReplayDecision {
1292 used_capsule: false,
1293 capsule_id: None,
1294 fallback_to_planner: true,
1295 reason,
1296 detect_evidence,
1297 select_evidence,
1298 economics_evidence,
1299 });
1300 }
1301
1302 let Some(capsule) = best.capsules.first().cloned() else {
1303 detect_evidence
1304 .mismatch_reasons
1305 .push("candidate_has_no_capsule".to_string());
1306 let economics_evidence = self.build_replay_economics_evidence(
1307 input,
1308 Some(&best),
1309 None,
1310 false,
1311 ReplayRoiReasonCode::ReplayMissCandidateHasNoCapsule,
1312 "candidate gene has no capsule",
1313 );
1314 self.record_replay_economics(
1315 replay_run_id,
1316 Some(&best),
1317 None,
1318 economics_evidence.clone(),
1319 )?;
1320 return Ok(ReplayDecision {
1321 used_capsule: false,
1322 capsule_id: None,
1323 fallback_to_planner: true,
1324 reason: "candidate gene has no capsule".into(),
1325 detect_evidence,
1326 select_evidence,
1327 economics_evidence,
1328 });
1329 };
1330 let remote_publisher = self.publisher_for_capsule(&capsule.id);
1331
1332 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
1333 .map_err(|err| ReplayError::Store(err.to_string()))?
1334 else {
1335 detect_evidence
1336 .mismatch_reasons
1337 .push("mutation_payload_missing".to_string());
1338 let economics_evidence = self.build_replay_economics_evidence(
1339 input,
1340 Some(&best),
1341 remote_publisher.as_deref(),
1342 false,
1343 ReplayRoiReasonCode::ReplayMissMutationPayloadMissing,
1344 "mutation payload missing from store",
1345 );
1346 self.record_replay_economics(
1347 replay_run_id,
1348 Some(&best),
1349 Some(&capsule.id),
1350 economics_evidence.clone(),
1351 )?;
1352 return Ok(ReplayDecision {
1353 used_capsule: false,
1354 capsule_id: None,
1355 fallback_to_planner: true,
1356 reason: "mutation payload missing from store".into(),
1357 detect_evidence,
1358 select_evidence,
1359 economics_evidence,
1360 });
1361 };
1362
1363 let receipt = match self.sandbox.apply(&mutation, policy).await {
1364 Ok(receipt) => receipt,
1365 Err(err) => {
1366 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1367 let reason = format!("replay patch apply failed: {err}");
1368 let economics_evidence = self.build_replay_economics_evidence(
1369 input,
1370 Some(&best),
1371 remote_publisher.as_deref(),
1372 false,
1373 ReplayRoiReasonCode::ReplayMissPatchApplyFailed,
1374 &reason,
1375 );
1376 self.record_replay_economics(
1377 replay_run_id,
1378 Some(&best),
1379 Some(&capsule.id),
1380 economics_evidence.clone(),
1381 )?;
1382 detect_evidence
1383 .mismatch_reasons
1384 .push("patch_apply_failed".to_string());
1385 return Ok(ReplayDecision {
1386 used_capsule: false,
1387 capsule_id: Some(capsule.id.clone()),
1388 fallback_to_planner: true,
1389 reason,
1390 detect_evidence,
1391 select_evidence,
1392 economics_evidence,
1393 });
1394 }
1395 };
1396
1397 let report = self
1398 .validator
1399 .run(&receipt, validation)
1400 .await
1401 .map_err(|err| ReplayError::Validation(err.to_string()))?;
1402 if !report.success {
1403 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
1404 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1405 let economics_evidence = self.build_replay_economics_evidence(
1406 input,
1407 Some(&best),
1408 remote_publisher.as_deref(),
1409 false,
1410 ReplayRoiReasonCode::ReplayMissValidationFailed,
1411 "replay validation failed",
1412 );
1413 self.record_replay_economics(
1414 replay_run_id,
1415 Some(&best),
1416 Some(&capsule.id),
1417 economics_evidence.clone(),
1418 )?;
1419 detect_evidence
1420 .mismatch_reasons
1421 .push("validation_failed".to_string());
1422 return Ok(ReplayDecision {
1423 used_capsule: false,
1424 capsule_id: Some(capsule.id.clone()),
1425 fallback_to_planner: true,
1426 reason: "replay validation failed".into(),
1427 detect_evidence,
1428 select_evidence,
1429 economics_evidence,
1430 });
1431 }
1432
1433 let requires_shadow_progression = remote_publisher.is_some()
1434 && matches!(
1435 capsule.state,
1436 AssetState::Quarantined | AssetState::ShadowValidated
1437 );
1438 if requires_shadow_progression {
1439 self.store
1440 .append_event(EvolutionEvent::ValidationPassed {
1441 mutation_id: capsule.mutation_id.clone(),
1442 report: report.to_snapshot(&validation.profile),
1443 gene_id: Some(best.gene.id.clone()),
1444 })
1445 .map_err(|err| ReplayError::Store(err.to_string()))?;
1446 let evidence = self.shadow_transition_evidence(&best.gene.id, &capsule, &input.env)?;
1447 let (target_state, reason_code, reason, promote_now, phase) =
1448 if matches!(best.gene.state, AssetState::Quarantined) {
1449 (
1450 AssetState::ShadowValidated,
1451 TransitionReasonCode::PromotionShadowValidationPassed,
1452 "remote asset passed first local replay and entered shadow validation"
1453 .into(),
1454 false,
1455 "quarantine_to_shadow",
1456 )
1457 } else if shadow_promotion_gate_passed(&evidence) {
1458 (
1459 AssetState::Promoted,
1460 TransitionReasonCode::PromotionRemoteReplayValidated,
1461 "shadow validation thresholds satisfied; remote asset promoted".into(),
1462 true,
1463 "shadow_to_promoted",
1464 )
1465 } else {
1466 (
1467 AssetState::ShadowValidated,
1468 TransitionReasonCode::ShadowCollectingReplayEvidence,
1469 "shadow validation collecting additional replay evidence".into(),
1470 false,
1471 "shadow_hold",
1472 )
1473 };
1474 self.store
1475 .append_event(EvolutionEvent::PromotionEvaluated {
1476 gene_id: best.gene.id.clone(),
1477 state: target_state.clone(),
1478 reason,
1479 reason_code,
1480 evidence: Some(evidence.to_transition_evidence(shadow_evidence_summary(
1481 &evidence,
1482 promote_now,
1483 phase,
1484 ))),
1485 })
1486 .map_err(|err| ReplayError::Store(err.to_string()))?;
1487 if promote_now {
1488 self.store
1489 .append_event(EvolutionEvent::GenePromoted {
1490 gene_id: best.gene.id.clone(),
1491 })
1492 .map_err(|err| ReplayError::Store(err.to_string()))?;
1493 }
1494 self.store
1495 .append_event(EvolutionEvent::CapsuleReleased {
1496 capsule_id: capsule.id.clone(),
1497 state: target_state,
1498 })
1499 .map_err(|err| ReplayError::Store(err.to_string()))?;
1500 }
1501
1502 self.store
1503 .append_event(EvolutionEvent::CapsuleReused {
1504 capsule_id: capsule.id.clone(),
1505 gene_id: capsule.gene_id.clone(),
1506 run_id: capsule.run_id.clone(),
1507 replay_run_id: replay_run_id.cloned(),
1508 })
1509 .map_err(|err| ReplayError::Store(err.to_string()))?;
1510 self.record_reuse_settlement(remote_publisher.as_deref(), true);
1511 let reason = if exact_match {
1512 "replayed via cold-start lookup".to_string()
1513 } else {
1514 "replayed via selector".to_string()
1515 };
1516 let economics_evidence = self.build_replay_economics_evidence(
1517 input,
1518 Some(&best),
1519 remote_publisher.as_deref(),
1520 true,
1521 ReplayRoiReasonCode::ReplayHit,
1522 &reason,
1523 );
1524 self.record_replay_economics(
1525 replay_run_id,
1526 Some(&best),
1527 Some(&capsule.id),
1528 economics_evidence.clone(),
1529 )?;
1530
1531 Ok(ReplayDecision {
1532 used_capsule: true,
1533 capsule_id: Some(capsule.id),
1534 fallback_to_planner: false,
1535 reason,
1536 detect_evidence,
1537 select_evidence,
1538 economics_evidence,
1539 })
1540 }
1541
1542 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
1543 let Some(ledger) = self.economics.as_ref() else {
1544 return;
1545 };
1546 let reputation_bias = ledger
1547 .lock()
1548 .ok()
1549 .map(|locked| locked.selector_reputation_bias())
1550 .unwrap_or_default();
1551 if reputation_bias.is_empty() {
1552 return;
1553 }
1554 let required_assets = candidates
1555 .iter()
1556 .filter_map(|candidate| {
1557 candidate
1558 .capsules
1559 .first()
1560 .map(|capsule| capsule.id.as_str())
1561 })
1562 .collect::<Vec<_>>();
1563 let publisher_map = self.remote_publishers_snapshot(&required_assets);
1564 if publisher_map.is_empty() {
1565 return;
1566 }
1567 candidates.sort_by(|left, right| {
1568 effective_candidate_score(right, &publisher_map, &reputation_bias)
1569 .partial_cmp(&effective_candidate_score(
1570 left,
1571 &publisher_map,
1572 &reputation_bias,
1573 ))
1574 .unwrap_or(std::cmp::Ordering::Equal)
1575 .then_with(|| left.gene.id.cmp(&right.gene.id))
1576 });
1577 }
1578
1579 fn publisher_for_capsule(&self, capsule_id: &str) -> Option<String> {
1580 self.remote_publishers_snapshot(&[capsule_id])
1581 .get(capsule_id)
1582 .cloned()
1583 }
1584
1585 fn remote_publishers_snapshot(&self, required_assets: &[&str]) -> BTreeMap<String, String> {
1586 let cached = self
1587 .remote_publishers
1588 .as_ref()
1589 .and_then(|remote_publishers| {
1590 remote_publishers.lock().ok().map(|locked| locked.clone())
1591 })
1592 .unwrap_or_default();
1593 if !cached.is_empty()
1594 && required_assets
1595 .iter()
1596 .all(|asset_id| cached.contains_key(*asset_id))
1597 {
1598 return cached;
1599 }
1600
1601 let persisted = remote_publishers_by_asset_from_store(self.store.as_ref());
1602 if persisted.is_empty() {
1603 return cached;
1604 }
1605
1606 let mut merged = cached;
1607 for (asset_id, sender_id) in persisted {
1608 merged.entry(asset_id).or_insert(sender_id);
1609 }
1610
1611 if let Some(remote_publishers) = self.remote_publishers.as_ref() {
1612 if let Ok(mut locked) = remote_publishers.lock() {
1613 for (asset_id, sender_id) in &merged {
1614 locked.entry(asset_id.clone()).or_insert(sender_id.clone());
1615 }
1616 }
1617 }
1618
1619 merged
1620 }
1621
1622 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
1623 let Some(publisher_id) = publisher_id else {
1624 return;
1625 };
1626 let Some(ledger) = self.economics.as_ref() else {
1627 return;
1628 };
1629 if let Ok(mut locked) = ledger.lock() {
1630 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
1631 }
1632 }
1633
1634 fn record_replay_validation_failure(
1635 &self,
1636 best: &GeneCandidate,
1637 capsule: &Capsule,
1638 validation: &ValidationPlan,
1639 report: &ValidationReport,
1640 ) -> Result<(), ReplayError> {
1641 let projection = projection_snapshot(self.store.as_ref())
1642 .map_err(|err| ReplayError::Store(err.to_string()))?;
1643 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1644 Self::confidence_context(&projection, &best.gene.id);
1645
1646 self.store
1647 .append_event(EvolutionEvent::ValidationFailed {
1648 mutation_id: capsule.mutation_id.clone(),
1649 report: report.to_snapshot(&validation.profile),
1650 gene_id: Some(best.gene.id.clone()),
1651 })
1652 .map_err(|err| ReplayError::Store(err.to_string()))?;
1653
1654 let replay_failures = self.replay_failure_count(&best.gene.id)?;
1655 let source_sender_id = self.publisher_for_capsule(&capsule.id);
1656 let governor_decision = self.governor.evaluate(GovernorInput {
1657 candidate_source: if source_sender_id.is_some() {
1658 CandidateSource::Remote
1659 } else {
1660 CandidateSource::Local
1661 },
1662 success_count: 0,
1663 blast_radius: BlastRadius {
1664 files_changed: capsule.outcome.changed_files.len(),
1665 lines_changed: capsule.outcome.lines_changed,
1666 },
1667 replay_failures,
1668 recent_mutation_ages_secs: Vec::new(),
1669 current_confidence,
1670 historical_peak_confidence,
1671 confidence_last_updated_secs,
1672 });
1673
1674 if matches!(governor_decision.target_state, AssetState::Revoked) {
1675 self.store
1676 .append_event(EvolutionEvent::PromotionEvaluated {
1677 gene_id: best.gene.id.clone(),
1678 state: AssetState::Revoked,
1679 reason: governor_decision.reason.clone(),
1680 reason_code: governor_decision.reason_code.clone(),
1681 evidence: Some(TransitionEvidence {
1682 replay_attempts: Some(replay_failures),
1683 replay_successes: None,
1684 replay_success_rate: None,
1685 environment_match_factor: None,
1686 decayed_confidence: Some(current_confidence),
1687 confidence_decay_ratio: if historical_peak_confidence > 0.0 {
1688 Some((current_confidence / historical_peak_confidence).clamp(0.0, 1.0))
1689 } else {
1690 None
1691 },
1692 summary: Some(replay_failure_revocation_summary(
1693 replay_failures,
1694 current_confidence,
1695 historical_peak_confidence,
1696 source_sender_id.as_deref(),
1697 )),
1698 }),
1699 })
1700 .map_err(|err| ReplayError::Store(err.to_string()))?;
1701 self.store
1702 .append_event(EvolutionEvent::GeneRevoked {
1703 gene_id: best.gene.id.clone(),
1704 reason: governor_decision.reason,
1705 })
1706 .map_err(|err| ReplayError::Store(err.to_string()))?;
1707 for related in &best.capsules {
1708 self.store
1709 .append_event(EvolutionEvent::CapsuleQuarantined {
1710 capsule_id: related.id.clone(),
1711 })
1712 .map_err(|err| ReplayError::Store(err.to_string()))?;
1713 }
1714 }
1715
1716 Ok(())
1717 }
1718
1719 fn confidence_context(
1720 projection: &EvolutionProjection,
1721 gene_id: &str,
1722 ) -> (f32, f32, Option<u64>) {
1723 let peak_confidence = projection
1724 .capsules
1725 .iter()
1726 .filter(|capsule| capsule.gene_id == gene_id)
1727 .map(|capsule| capsule.confidence)
1728 .fold(0.0_f32, f32::max);
1729 let age_secs = projection
1730 .last_updated_at
1731 .get(gene_id)
1732 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1733 (peak_confidence, peak_confidence, age_secs)
1734 }
1735
1736 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1737 let parsed = DateTime::parse_from_rfc3339(timestamp)
1738 .ok()?
1739 .with_timezone(&Utc);
1740 let elapsed = now.signed_duration_since(parsed);
1741 if elapsed < Duration::zero() {
1742 Some(0)
1743 } else {
1744 u64::try_from(elapsed.num_seconds()).ok()
1745 }
1746 }
1747
1748 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1749 Ok(self
1750 .store
1751 .scan(1)
1752 .map_err(|err| ReplayError::Store(err.to_string()))?
1753 .into_iter()
1754 .filter(|stored| {
1755 matches!(
1756 &stored.event,
1757 EvolutionEvent::ValidationFailed {
1758 gene_id: Some(current_gene_id),
1759 ..
1760 } if current_gene_id == gene_id
1761 )
1762 })
1763 .count() as u64)
1764 }
1765
1766 fn shadow_transition_evidence(
1767 &self,
1768 gene_id: &str,
1769 capsule: &Capsule,
1770 input_env: &EnvFingerprint,
1771 ) -> Result<ShadowTransitionEvidence, ReplayError> {
1772 let events = self
1773 .store
1774 .scan(1)
1775 .map_err(|err| ReplayError::Store(err.to_string()))?;
1776 let (replay_attempts, replay_successes) = events.iter().fold(
1777 (0_u64, 0_u64),
1778 |(attempts, successes), stored| match &stored.event {
1779 EvolutionEvent::ValidationPassed {
1780 gene_id: Some(current_gene_id),
1781 ..
1782 } if current_gene_id == gene_id => (attempts + 1, successes + 1),
1783 EvolutionEvent::ValidationFailed {
1784 gene_id: Some(current_gene_id),
1785 ..
1786 } if current_gene_id == gene_id => (attempts + 1, successes),
1787 _ => (attempts, successes),
1788 },
1789 );
1790 let replay_success_rate = safe_ratio(replay_successes, replay_attempts) as f32;
1791 let environment_match_factor = replay_environment_match_factor(input_env, &capsule.env);
1792 let projection = projection_snapshot(self.store.as_ref())
1793 .map_err(|err| ReplayError::Store(err.to_string()))?;
1794 let age_secs = projection
1795 .last_updated_at
1796 .get(gene_id)
1797 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1798 let decayed_confidence = decayed_replay_confidence(capsule.confidence, age_secs);
1799 let confidence_decay_ratio = if capsule.confidence > 0.0 {
1800 (decayed_confidence / capsule.confidence).clamp(0.0, 1.0)
1801 } else {
1802 0.0
1803 };
1804
1805 Ok(ShadowTransitionEvidence {
1806 replay_attempts,
1807 replay_successes,
1808 replay_success_rate,
1809 environment_match_factor,
1810 decayed_confidence,
1811 confidence_decay_ratio,
1812 })
1813 }
1814}
1815
1816#[derive(Clone, Debug)]
1817struct ShadowTransitionEvidence {
1818 replay_attempts: u64,
1819 replay_successes: u64,
1820 replay_success_rate: f32,
1821 environment_match_factor: f32,
1822 decayed_confidence: f32,
1823 confidence_decay_ratio: f32,
1824}
1825
1826impl ShadowTransitionEvidence {
1827 fn to_transition_evidence(&self, summary: String) -> TransitionEvidence {
1828 TransitionEvidence {
1829 replay_attempts: Some(self.replay_attempts),
1830 replay_successes: Some(self.replay_successes),
1831 replay_success_rate: Some(self.replay_success_rate),
1832 environment_match_factor: Some(self.environment_match_factor),
1833 decayed_confidence: Some(self.decayed_confidence),
1834 confidence_decay_ratio: Some(self.confidence_decay_ratio),
1835 summary: Some(summary),
1836 }
1837 }
1838}
1839
1840fn shadow_promotion_gate_passed(evidence: &ShadowTransitionEvidence) -> bool {
1841 evidence.replay_attempts >= SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS
1842 && evidence.replay_success_rate >= SHADOW_PROMOTION_MIN_SUCCESS_RATE
1843 && evidence.environment_match_factor >= SHADOW_PROMOTION_MIN_ENV_MATCH
1844 && evidence.decayed_confidence >= SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE
1845}
1846
1847fn shadow_evidence_summary(
1848 evidence: &ShadowTransitionEvidence,
1849 promoted: bool,
1850 phase: &str,
1851) -> String {
1852 format!(
1853 "phase={phase}; replay_attempts={}; replay_successes={}; replay_success_rate={:.3}; environment_match_factor={:.3}; decayed_confidence={:.3}; confidence_decay_ratio={:.3}; promote={promoted}",
1854 evidence.replay_attempts,
1855 evidence.replay_successes,
1856 evidence.replay_success_rate,
1857 evidence.environment_match_factor,
1858 evidence.decayed_confidence,
1859 evidence.confidence_decay_ratio,
1860 )
1861}
1862
1863#[derive(Clone, Debug, PartialEq)]
1864struct ConfidenceRevalidationTarget {
1865 gene_id: String,
1866 capsule_ids: Vec<String>,
1867 peak_confidence: f32,
1868 decayed_confidence: f32,
1869}
1870
1871fn stale_replay_revalidation_targets(
1872 projection: &EvolutionProjection,
1873 now: DateTime<Utc>,
1874) -> Vec<ConfidenceRevalidationTarget> {
1875 projection
1876 .genes
1877 .iter()
1878 .filter(|gene| gene.state == AssetState::Promoted)
1879 .filter_map(|gene| {
1880 let promoted_capsules = projection
1881 .capsules
1882 .iter()
1883 .filter(|capsule| {
1884 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1885 })
1886 .collect::<Vec<_>>();
1887 if promoted_capsules.is_empty() {
1888 return None;
1889 }
1890 let age_secs = projection
1891 .last_updated_at
1892 .get(&gene.id)
1893 .and_then(|timestamp| seconds_since_timestamp_for_confidence(timestamp, now));
1894 let decayed_confidence = promoted_capsules
1895 .iter()
1896 .map(|capsule| decayed_replay_confidence(capsule.confidence, age_secs))
1897 .fold(0.0_f32, f32::max);
1898 if decayed_confidence >= MIN_REPLAY_CONFIDENCE {
1899 return None;
1900 }
1901 let peak_confidence = promoted_capsules
1902 .iter()
1903 .map(|capsule| capsule.confidence)
1904 .fold(0.0_f32, f32::max);
1905 Some(ConfidenceRevalidationTarget {
1906 gene_id: gene.id.clone(),
1907 capsule_ids: promoted_capsules
1908 .into_iter()
1909 .map(|capsule| capsule.id.clone())
1910 .collect(),
1911 peak_confidence,
1912 decayed_confidence,
1913 })
1914 })
1915 .collect()
1916}
1917
1918fn seconds_since_timestamp_for_confidence(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1919 let parsed = DateTime::parse_from_rfc3339(timestamp)
1920 .ok()?
1921 .with_timezone(&Utc);
1922 let elapsed = now.signed_duration_since(parsed);
1923 if elapsed < Duration::zero() {
1924 Some(0)
1925 } else {
1926 u64::try_from(elapsed.num_seconds()).ok()
1927 }
1928}
1929
1930#[derive(Debug, Error)]
1931pub enum EvoKernelError {
1932 #[error("sandbox error: {0}")]
1933 Sandbox(String),
1934 #[error("validation error: {0}")]
1935 Validation(String),
1936 #[error("validation failed")]
1937 ValidationFailed(ValidationReport),
1938 #[error("store error: {0}")]
1939 Store(String),
1940}
1941
1942#[derive(Clone, Debug)]
1943pub struct CaptureOutcome {
1944 pub capsule: Capsule,
1945 pub gene: Gene,
1946 pub governor_decision: GovernorDecision,
1947}
1948
1949#[derive(Clone, Debug, Serialize, Deserialize)]
1950pub struct ImportOutcome {
1951 pub imported_asset_ids: Vec<String>,
1952 pub accepted: bool,
1953 #[serde(default, skip_serializing_if = "Option::is_none")]
1954 pub next_cursor: Option<String>,
1955 #[serde(default, skip_serializing_if = "Option::is_none")]
1956 pub resume_token: Option<String>,
1957 #[serde(default)]
1958 pub sync_audit: SyncAudit,
1959}
1960
1961#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1962pub struct EvolutionMetricsSnapshot {
1963 pub replay_attempts_total: u64,
1964 pub replay_success_total: u64,
1965 pub replay_success_rate: f64,
1966 pub confidence_revalidations_total: u64,
1967 pub replay_reasoning_avoided_total: u64,
1968 pub reasoning_avoided_tokens_total: u64,
1969 pub replay_fallback_cost_total: u64,
1970 pub replay_roi: f64,
1971 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
1972 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
1973 pub mutation_declared_total: u64,
1974 pub promoted_mutations_total: u64,
1975 pub promotion_ratio: f64,
1976 pub gene_revocations_total: u64,
1977 pub mutation_velocity_last_hour: u64,
1978 pub revoke_frequency_last_hour: u64,
1979 pub promoted_genes: u64,
1980 pub promoted_capsules: u64,
1981 pub last_event_seq: u64,
1982}
1983
1984#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1985pub struct EvolutionHealthSnapshot {
1986 pub status: String,
1987 pub last_event_seq: u64,
1988 pub promoted_genes: u64,
1989 pub promoted_capsules: u64,
1990}
1991
1992#[derive(Clone)]
1993pub struct EvolutionNetworkNode {
1994 pub store: Arc<dyn EvolutionStore>,
1995}
1996
1997impl EvolutionNetworkNode {
1998 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1999 Self { store }
2000 }
2001
2002 pub fn with_default_store() -> Self {
2003 Self {
2004 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
2005 }
2006 }
2007
2008 pub fn accept_publish_request(
2009 &self,
2010 request: &PublishRequest,
2011 ) -> Result<ImportOutcome, EvoKernelError> {
2012 let requested_cursor = resolve_requested_cursor(
2013 &request.sender_id,
2014 request.since_cursor.as_deref(),
2015 request.resume_token.as_deref(),
2016 )?;
2017 import_remote_envelope_into_store(
2018 self.store.as_ref(),
2019 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
2020 None,
2021 requested_cursor,
2022 )
2023 }
2024
2025 pub fn ensure_builtin_experience_assets(
2026 &self,
2027 sender_id: impl Into<String>,
2028 ) -> Result<ImportOutcome, EvoKernelError> {
2029 ensure_builtin_experience_assets_in_store(self.store.as_ref(), sender_id.into())
2030 }
2031
2032 pub fn record_reported_experience(
2033 &self,
2034 sender_id: impl Into<String>,
2035 gene_id: impl Into<String>,
2036 signals: Vec<String>,
2037 strategy: Vec<String>,
2038 validation: Vec<String>,
2039 ) -> Result<ImportOutcome, EvoKernelError> {
2040 record_reported_experience_in_store(
2041 self.store.as_ref(),
2042 sender_id.into(),
2043 gene_id.into(),
2044 signals,
2045 strategy,
2046 validation,
2047 )
2048 }
2049
2050 pub fn publish_local_assets(
2051 &self,
2052 sender_id: impl Into<String>,
2053 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2054 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
2055 }
2056
2057 pub fn fetch_assets(
2058 &self,
2059 responder_id: impl Into<String>,
2060 query: &FetchQuery,
2061 ) -> Result<FetchResponse, EvoKernelError> {
2062 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2063 }
2064
2065 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2066 revoke_assets_in_store(self.store.as_ref(), notice)
2067 }
2068
2069 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2070 evolution_metrics_snapshot(self.store.as_ref())
2071 }
2072
2073 pub fn replay_roi_release_gate_summary(
2074 &self,
2075 window_seconds: u64,
2076 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2077 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2078 }
2079
2080 pub fn render_replay_roi_release_gate_summary_json(
2081 &self,
2082 window_seconds: u64,
2083 ) -> Result<String, EvoKernelError> {
2084 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2085 serde_json::to_string_pretty(&summary)
2086 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2087 }
2088
2089 pub fn replay_roi_release_gate_contract(
2090 &self,
2091 window_seconds: u64,
2092 thresholds: ReplayRoiReleaseGateThresholds,
2093 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2094 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2095 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2096 }
2097
2098 pub fn render_replay_roi_release_gate_contract_json(
2099 &self,
2100 window_seconds: u64,
2101 thresholds: ReplayRoiReleaseGateThresholds,
2102 ) -> Result<String, EvoKernelError> {
2103 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2104 serde_json::to_string_pretty(&contract)
2105 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2106 }
2107
2108 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2109 self.metrics_snapshot().map(|snapshot| {
2110 let health = evolution_health_snapshot(&snapshot);
2111 render_evolution_metrics_prometheus(&snapshot, &health)
2112 })
2113 }
2114
2115 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2116 self.metrics_snapshot()
2117 .map(|snapshot| evolution_health_snapshot(&snapshot))
2118 }
2119}
2120
2121pub struct EvoKernel<S: KernelState> {
2122 pub kernel: Arc<Kernel<S>>,
2123 pub sandbox: Arc<dyn Sandbox>,
2124 pub validator: Arc<dyn Validator>,
2125 pub store: Arc<dyn EvolutionStore>,
2126 pub selector: Arc<dyn Selector>,
2127 pub governor: Arc<dyn Governor>,
2128 pub economics: Arc<Mutex<EvuLedger>>,
2129 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
2130 pub stake_policy: StakePolicy,
2131 pub sandbox_policy: SandboxPolicy,
2132 pub validation_plan: ValidationPlan,
2133}
2134
2135impl<S: KernelState> EvoKernel<S> {
2136 fn recent_prior_mutation_ages_secs(
2137 &self,
2138 exclude_mutation_id: Option<&str>,
2139 ) -> Result<Vec<u64>, EvolutionError> {
2140 let now = Utc::now();
2141 let mut ages = self
2142 .store
2143 .scan(1)?
2144 .into_iter()
2145 .filter_map(|stored| match stored.event {
2146 EvolutionEvent::MutationDeclared { mutation }
2147 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
2148 {
2149 Self::seconds_since_timestamp(&stored.timestamp, now)
2150 }
2151 _ => None,
2152 })
2153 .collect::<Vec<_>>();
2154 ages.sort_unstable();
2155 Ok(ages)
2156 }
2157
2158 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
2159 let parsed = DateTime::parse_from_rfc3339(timestamp)
2160 .ok()?
2161 .with_timezone(&Utc);
2162 let elapsed = now.signed_duration_since(parsed);
2163 if elapsed < Duration::zero() {
2164 Some(0)
2165 } else {
2166 u64::try_from(elapsed.num_seconds()).ok()
2167 }
2168 }
2169
2170 pub fn new(
2171 kernel: Arc<Kernel<S>>,
2172 sandbox: Arc<dyn Sandbox>,
2173 validator: Arc<dyn Validator>,
2174 store: Arc<dyn EvolutionStore>,
2175 ) -> Self {
2176 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
2177 Self {
2178 kernel,
2179 sandbox,
2180 validator,
2181 store,
2182 selector,
2183 governor: Arc::new(DefaultGovernor::default()),
2184 economics: Arc::new(Mutex::new(EvuLedger::default())),
2185 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
2186 stake_policy: StakePolicy::default(),
2187 sandbox_policy: SandboxPolicy::oris_default(),
2188 validation_plan: ValidationPlan::oris_default(),
2189 }
2190 }
2191
2192 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
2193 self.selector = selector;
2194 self
2195 }
2196
2197 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
2198 self.sandbox_policy = policy;
2199 self
2200 }
2201
2202 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
2203 self.governor = governor;
2204 self
2205 }
2206
2207 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
2208 self.economics = economics;
2209 self
2210 }
2211
2212 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
2213 self.stake_policy = policy;
2214 self
2215 }
2216
2217 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
2218 self.validation_plan = plan;
2219 self
2220 }
2221
2222 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
2223 let executor = StoreReplayExecutor {
2224 sandbox: self.sandbox.clone(),
2225 validator: self.validator.clone(),
2226 store: self.store.clone(),
2227 selector: self.selector.clone(),
2228 governor: self.governor.clone(),
2229 economics: Some(self.economics.clone()),
2230 remote_publishers: Some(self.remote_publishers.clone()),
2231 stake_policy: self.stake_policy.clone(),
2232 };
2233 executor.collect_replay_candidates(input).candidates
2234 }
2235
2236 pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
2237 let projection = projection_snapshot(self.store.as_ref())?;
2238 if !projection.genes.is_empty() {
2239 return Ok(BootstrapReport::default());
2240 }
2241
2242 let templates = built_in_seed_templates();
2243 for template in &templates {
2244 let mutation = build_seed_mutation(template);
2245 let extracted = extract_seed_signals(template);
2246 let gene = build_bootstrap_gene(template, &extracted)
2247 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2248 let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
2249 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2250
2251 self.store
2252 .append_event(EvolutionEvent::MutationDeclared {
2253 mutation: mutation.clone(),
2254 })
2255 .map_err(store_err)?;
2256 self.store
2257 .append_event(EvolutionEvent::SignalsExtracted {
2258 mutation_id: mutation.intent.id.clone(),
2259 hash: extracted.hash.clone(),
2260 signals: extracted.values.clone(),
2261 })
2262 .map_err(store_err)?;
2263 self.store
2264 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2265 .map_err(store_err)?;
2266 self.store
2267 .append_event(EvolutionEvent::PromotionEvaluated {
2268 gene_id: gene.id.clone(),
2269 state: AssetState::Quarantined,
2270 reason: "bootstrap seeds require local validation before replay".into(),
2271 reason_code: TransitionReasonCode::DowngradeBootstrapRequiresLocalValidation,
2272 evidence: None,
2273 })
2274 .map_err(store_err)?;
2275 self.store
2276 .append_event(EvolutionEvent::CapsuleCommitted {
2277 capsule: capsule.clone(),
2278 })
2279 .map_err(store_err)?;
2280 self.store
2281 .append_event(EvolutionEvent::CapsuleQuarantined {
2282 capsule_id: capsule.id,
2283 })
2284 .map_err(store_err)?;
2285 }
2286
2287 Ok(BootstrapReport {
2288 seeded: true,
2289 genes_added: templates.len(),
2290 capsules_added: templates.len(),
2291 })
2292 }
2293
2294 pub async fn capture_successful_mutation(
2295 &self,
2296 run_id: &RunId,
2297 mutation: PreparedMutation,
2298 ) -> Result<Capsule, EvoKernelError> {
2299 Ok(self
2300 .capture_mutation_with_governor(run_id, mutation)
2301 .await?
2302 .capsule)
2303 }
2304
2305 pub async fn capture_mutation_with_governor(
2306 &self,
2307 run_id: &RunId,
2308 mutation: PreparedMutation,
2309 ) -> Result<CaptureOutcome, EvoKernelError> {
2310 self.store
2311 .append_event(EvolutionEvent::MutationDeclared {
2312 mutation: mutation.clone(),
2313 })
2314 .map_err(store_err)?;
2315
2316 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
2317 Ok(receipt) => receipt,
2318 Err(err) => {
2319 let message = err.to_string();
2320 let contract = mutation_needed_contract_for_error_message(&message);
2321 self.store
2322 .append_event(EvolutionEvent::MutationRejected {
2323 mutation_id: mutation.intent.id.clone(),
2324 reason: contract.failure_reason,
2325 reason_code: Some(
2326 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2327 ),
2328 recovery_hint: Some(contract.recovery_hint),
2329 fail_closed: contract.fail_closed,
2330 })
2331 .map_err(store_err)?;
2332 return Err(EvoKernelError::Sandbox(message));
2333 }
2334 };
2335
2336 self.store
2337 .append_event(EvolutionEvent::MutationApplied {
2338 mutation_id: mutation.intent.id.clone(),
2339 patch_hash: receipt.patch_hash.clone(),
2340 changed_files: receipt
2341 .changed_files
2342 .iter()
2343 .map(|path| path.to_string_lossy().to_string())
2344 .collect(),
2345 })
2346 .map_err(store_err)?;
2347
2348 let report = match self.validator.run(&receipt, &self.validation_plan).await {
2349 Ok(report) => report,
2350 Err(err) => {
2351 let message = format!("mutation-needed validation execution error: {err}");
2352 let contract = mutation_needed_contract_for_error_message(&message);
2353 self.store
2354 .append_event(EvolutionEvent::MutationRejected {
2355 mutation_id: mutation.intent.id.clone(),
2356 reason: contract.failure_reason,
2357 reason_code: Some(
2358 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2359 ),
2360 recovery_hint: Some(contract.recovery_hint),
2361 fail_closed: contract.fail_closed,
2362 })
2363 .map_err(store_err)?;
2364 return Err(EvoKernelError::Validation(message));
2365 }
2366 };
2367 if !report.success {
2368 self.store
2369 .append_event(EvolutionEvent::ValidationFailed {
2370 mutation_id: mutation.intent.id.clone(),
2371 report: report.to_snapshot(&self.validation_plan.profile),
2372 gene_id: None,
2373 })
2374 .map_err(store_err)?;
2375 let contract = mutation_needed_contract_for_validation_failure(
2376 &self.validation_plan.profile,
2377 &report,
2378 );
2379 self.store
2380 .append_event(EvolutionEvent::MutationRejected {
2381 mutation_id: mutation.intent.id.clone(),
2382 reason: contract.failure_reason,
2383 reason_code: Some(
2384 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2385 ),
2386 recovery_hint: Some(contract.recovery_hint),
2387 fail_closed: contract.fail_closed,
2388 })
2389 .map_err(store_err)?;
2390 return Err(EvoKernelError::ValidationFailed(report));
2391 }
2392
2393 self.store
2394 .append_event(EvolutionEvent::ValidationPassed {
2395 mutation_id: mutation.intent.id.clone(),
2396 report: report.to_snapshot(&self.validation_plan.profile),
2397 gene_id: None,
2398 })
2399 .map_err(store_err)?;
2400
2401 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
2402 patch_diff: mutation.artifact.payload.clone(),
2403 intent: mutation.intent.intent.clone(),
2404 expected_effect: mutation.intent.expected_effect.clone(),
2405 declared_signals: mutation.intent.signals.clone(),
2406 changed_files: receipt
2407 .changed_files
2408 .iter()
2409 .map(|path| path.to_string_lossy().to_string())
2410 .collect(),
2411 validation_success: report.success,
2412 validation_logs: report.logs.clone(),
2413 stage_outputs: report
2414 .stages
2415 .iter()
2416 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
2417 .filter(|value| !value.is_empty())
2418 .collect(),
2419 });
2420 self.store
2421 .append_event(EvolutionEvent::SignalsExtracted {
2422 mutation_id: mutation.intent.id.clone(),
2423 hash: extracted_signals.hash.clone(),
2424 signals: extracted_signals.values.clone(),
2425 })
2426 .map_err(store_err)?;
2427
2428 let projection = projection_snapshot(self.store.as_ref())?;
2429 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
2430 let recent_mutation_ages_secs = self
2431 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
2432 .map_err(store_err)?;
2433 let mut gene = derive_gene(
2434 &mutation,
2435 &receipt,
2436 &self.validation_plan.profile,
2437 &extracted_signals.values,
2438 );
2439 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
2440 StoreReplayExecutor::confidence_context(&projection, &gene.id);
2441 let success_count = projection
2442 .genes
2443 .iter()
2444 .find(|existing| existing.id == gene.id)
2445 .map(|existing| {
2446 projection
2447 .capsules
2448 .iter()
2449 .filter(|capsule| capsule.gene_id == existing.id)
2450 .count() as u64
2451 })
2452 .unwrap_or(0)
2453 + 1;
2454 let governor_decision = self.governor.evaluate(GovernorInput {
2455 candidate_source: CandidateSource::Local,
2456 success_count,
2457 blast_radius: blast_radius.clone(),
2458 replay_failures: 0,
2459 recent_mutation_ages_secs,
2460 current_confidence,
2461 historical_peak_confidence,
2462 confidence_last_updated_secs,
2463 });
2464
2465 gene.state = governor_decision.target_state.clone();
2466 self.store
2467 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2468 .map_err(store_err)?;
2469 self.store
2470 .append_event(EvolutionEvent::PromotionEvaluated {
2471 gene_id: gene.id.clone(),
2472 state: governor_decision.target_state.clone(),
2473 reason: governor_decision.reason.clone(),
2474 reason_code: governor_decision.reason_code.clone(),
2475 evidence: None,
2476 })
2477 .map_err(store_err)?;
2478 if matches!(governor_decision.target_state, AssetState::Promoted) {
2479 self.store
2480 .append_event(EvolutionEvent::GenePromoted {
2481 gene_id: gene.id.clone(),
2482 })
2483 .map_err(store_err)?;
2484 }
2485 if matches!(governor_decision.target_state, AssetState::Revoked) {
2486 self.store
2487 .append_event(EvolutionEvent::GeneRevoked {
2488 gene_id: gene.id.clone(),
2489 reason: governor_decision.reason.clone(),
2490 })
2491 .map_err(store_err)?;
2492 }
2493 if let Some(spec_id) = &mutation.intent.spec_id {
2494 self.store
2495 .append_event(EvolutionEvent::SpecLinked {
2496 mutation_id: mutation.intent.id.clone(),
2497 spec_id: spec_id.clone(),
2498 })
2499 .map_err(store_err)?;
2500 }
2501
2502 let mut capsule = build_capsule(
2503 run_id,
2504 &mutation,
2505 &receipt,
2506 &report,
2507 &self.validation_plan.profile,
2508 &gene,
2509 &blast_radius,
2510 )
2511 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2512 capsule.state = governor_decision.target_state.clone();
2513 self.store
2514 .append_event(EvolutionEvent::CapsuleCommitted {
2515 capsule: capsule.clone(),
2516 })
2517 .map_err(store_err)?;
2518 if matches!(governor_decision.target_state, AssetState::Quarantined) {
2519 self.store
2520 .append_event(EvolutionEvent::CapsuleQuarantined {
2521 capsule_id: capsule.id.clone(),
2522 })
2523 .map_err(store_err)?;
2524 }
2525
2526 Ok(CaptureOutcome {
2527 capsule,
2528 gene,
2529 governor_decision,
2530 })
2531 }
2532
2533 pub async fn capture_from_proposal(
2534 &self,
2535 run_id: &RunId,
2536 proposal: &AgentMutationProposal,
2537 diff_payload: String,
2538 base_revision: Option<String>,
2539 ) -> Result<CaptureOutcome, EvoKernelError> {
2540 let intent = MutationIntent {
2541 id: next_id("proposal"),
2542 intent: proposal.intent.clone(),
2543 target: MutationTarget::Paths {
2544 allow: proposal.files.clone(),
2545 },
2546 expected_effect: proposal.expected_effect.clone(),
2547 risk: RiskLevel::Low,
2548 signals: proposal.files.clone(),
2549 spec_id: None,
2550 };
2551 self.capture_mutation_with_governor(
2552 run_id,
2553 prepare_mutation(intent, diff_payload, base_revision),
2554 )
2555 .await
2556 }
2557
2558 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
2559 ExecutionFeedback {
2560 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
2561 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
2562 summary: outcome.governor_decision.reason.clone(),
2563 }
2564 }
2565
2566 pub fn replay_feedback_for_agent(
2567 signals: &[String],
2568 decision: &ReplayDecision,
2569 ) -> ReplayFeedback {
2570 let (task_class_id, task_label) = replay_task_descriptor(signals);
2571 let planner_directive = if decision.used_capsule {
2572 ReplayPlannerDirective::SkipPlanner
2573 } else {
2574 ReplayPlannerDirective::PlanFallback
2575 };
2576 let reasoning_steps_avoided = u64::from(decision.used_capsule);
2577 let reason_code_hint = decision
2578 .detect_evidence
2579 .mismatch_reasons
2580 .first()
2581 .and_then(|reason| infer_replay_fallback_reason_code(reason));
2582 let fallback_contract = normalize_replay_fallback_contract(
2583 &planner_directive,
2584 decision
2585 .fallback_to_planner
2586 .then_some(decision.reason.as_str()),
2587 reason_code_hint,
2588 None,
2589 None,
2590 None,
2591 );
2592 let summary = if decision.used_capsule {
2593 format!("reused prior capsule for task class '{task_label}'; skip planner")
2594 } else {
2595 format!(
2596 "planner fallback required for task class '{task_label}': {}",
2597 decision.reason
2598 )
2599 };
2600
2601 ReplayFeedback {
2602 used_capsule: decision.used_capsule,
2603 capsule_id: decision.capsule_id.clone(),
2604 planner_directive,
2605 reasoning_steps_avoided,
2606 fallback_reason: fallback_contract
2607 .as_ref()
2608 .map(|contract| contract.fallback_reason.clone()),
2609 reason_code: fallback_contract
2610 .as_ref()
2611 .map(|contract| contract.reason_code),
2612 repair_hint: fallback_contract
2613 .as_ref()
2614 .map(|contract| contract.repair_hint.clone()),
2615 next_action: fallback_contract
2616 .as_ref()
2617 .map(|contract| contract.next_action),
2618 confidence: fallback_contract
2619 .as_ref()
2620 .map(|contract| contract.confidence),
2621 task_class_id,
2622 task_label,
2623 summary,
2624 }
2625 }
2626
2627 fn mutation_needed_failure_outcome(
2628 &self,
2629 request: &SupervisedDevloopRequest,
2630 task_class: Option<BoundedTaskClass>,
2631 status: SupervisedDevloopStatus,
2632 contract: MutationNeededFailureContract,
2633 mutation_id_for_audit: Option<String>,
2634 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2635 if let Some(mutation_id) = mutation_id_for_audit {
2636 self.store
2637 .append_event(EvolutionEvent::MutationRejected {
2638 mutation_id,
2639 reason: contract.failure_reason.clone(),
2640 reason_code: Some(
2641 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2642 ),
2643 recovery_hint: Some(contract.recovery_hint.clone()),
2644 fail_closed: contract.fail_closed,
2645 })
2646 .map_err(store_err)?;
2647 }
2648 let status_label = match status {
2649 SupervisedDevloopStatus::AwaitingApproval => "awaiting_approval",
2650 SupervisedDevloopStatus::RejectedByPolicy => "rejected_by_policy",
2651 SupervisedDevloopStatus::FailedClosed => "failed_closed",
2652 SupervisedDevloopStatus::Executed => "executed",
2653 };
2654 let reason_code_key = mutation_needed_reason_code_key(contract.reason_code);
2655 Ok(SupervisedDevloopOutcome {
2656 task_id: request.task.id.clone(),
2657 task_class,
2658 status,
2659 execution_feedback: None,
2660 failure_contract: Some(contract.clone()),
2661 summary: format!(
2662 "supervised devloop {status_label} task '{}' [{reason_code_key}]: {}",
2663 request.task.id, contract.failure_reason
2664 ),
2665 })
2666 }
2667
2668 pub async fn run_supervised_devloop(
2669 &self,
2670 run_id: &RunId,
2671 request: &SupervisedDevloopRequest,
2672 diff_payload: String,
2673 base_revision: Option<String>,
2674 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2675 let audit_mutation_id = mutation_needed_audit_mutation_id(request);
2676 let task_class = classify_supervised_devloop_request(request);
2677 let Some(task_class) = task_class else {
2678 let contract = normalize_mutation_needed_failure_contract(
2679 Some(&format!(
2680 "supervised devloop rejected task '{}' because it is an unsupported task outside the bounded scope",
2681 request.task.id
2682 )),
2683 Some(MutationNeededFailureReasonCode::PolicyDenied),
2684 );
2685 return self.mutation_needed_failure_outcome(
2686 request,
2687 None,
2688 SupervisedDevloopStatus::RejectedByPolicy,
2689 contract,
2690 Some(audit_mutation_id),
2691 );
2692 };
2693
2694 if !request.approval.approved {
2695 return Ok(SupervisedDevloopOutcome {
2696 task_id: request.task.id.clone(),
2697 task_class: Some(task_class),
2698 status: SupervisedDevloopStatus::AwaitingApproval,
2699 execution_feedback: None,
2700 failure_contract: None,
2701 summary: format!(
2702 "supervised devloop paused task '{}' until explicit human approval is granted",
2703 request.task.id
2704 ),
2705 });
2706 }
2707
2708 if diff_payload.len() > MUTATION_NEEDED_MAX_DIFF_BYTES {
2709 let contract = normalize_mutation_needed_failure_contract(
2710 Some(&format!(
2711 "mutation-needed diff payload exceeds bounded byte budget (size={}, max={})",
2712 diff_payload.len(),
2713 MUTATION_NEEDED_MAX_DIFF_BYTES
2714 )),
2715 Some(MutationNeededFailureReasonCode::PolicyDenied),
2716 );
2717 return self.mutation_needed_failure_outcome(
2718 request,
2719 Some(task_class),
2720 SupervisedDevloopStatus::RejectedByPolicy,
2721 contract,
2722 Some(audit_mutation_id),
2723 );
2724 }
2725
2726 let blast_radius = compute_blast_radius(&diff_payload);
2727 if blast_radius.lines_changed > MUTATION_NEEDED_MAX_CHANGED_LINES {
2728 let contract = normalize_mutation_needed_failure_contract(
2729 Some(&format!(
2730 "mutation-needed patch exceeds bounded changed-line budget (lines_changed={}, max={})",
2731 blast_radius.lines_changed,
2732 MUTATION_NEEDED_MAX_CHANGED_LINES
2733 )),
2734 Some(MutationNeededFailureReasonCode::UnsafePatch),
2735 );
2736 return self.mutation_needed_failure_outcome(
2737 request,
2738 Some(task_class),
2739 SupervisedDevloopStatus::FailedClosed,
2740 contract,
2741 Some(audit_mutation_id),
2742 );
2743 }
2744
2745 if self.sandbox_policy.max_duration_ms > MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS {
2746 let contract = normalize_mutation_needed_failure_contract(
2747 Some(&format!(
2748 "mutation-needed sandbox duration budget exceeds bounded policy (configured={}ms, max={}ms)",
2749 self.sandbox_policy.max_duration_ms,
2750 MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS
2751 )),
2752 Some(MutationNeededFailureReasonCode::PolicyDenied),
2753 );
2754 return self.mutation_needed_failure_outcome(
2755 request,
2756 Some(task_class),
2757 SupervisedDevloopStatus::RejectedByPolicy,
2758 contract,
2759 Some(audit_mutation_id),
2760 );
2761 }
2762
2763 let validation_budget_ms = validation_plan_timeout_budget_ms(&self.validation_plan);
2764 if validation_budget_ms > MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS {
2765 let contract = normalize_mutation_needed_failure_contract(
2766 Some(&format!(
2767 "mutation-needed validation timeout budget exceeds bounded policy (configured={}ms, max={}ms)",
2768 validation_budget_ms,
2769 MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS
2770 )),
2771 Some(MutationNeededFailureReasonCode::PolicyDenied),
2772 );
2773 return self.mutation_needed_failure_outcome(
2774 request,
2775 Some(task_class),
2776 SupervisedDevloopStatus::RejectedByPolicy,
2777 contract,
2778 Some(audit_mutation_id),
2779 );
2780 }
2781
2782 let capture = match self
2783 .capture_from_proposal(run_id, &request.proposal, diff_payload, base_revision)
2784 .await
2785 {
2786 Ok(capture) => capture,
2787 Err(EvoKernelError::Sandbox(message)) => {
2788 let contract = mutation_needed_contract_for_error_message(&message);
2789 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2790 return self.mutation_needed_failure_outcome(
2791 request,
2792 Some(task_class),
2793 status,
2794 contract,
2795 None,
2796 );
2797 }
2798 Err(EvoKernelError::ValidationFailed(report)) => {
2799 let contract = mutation_needed_contract_for_validation_failure(
2800 &self.validation_plan.profile,
2801 &report,
2802 );
2803 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2804 return self.mutation_needed_failure_outcome(
2805 request,
2806 Some(task_class),
2807 status,
2808 contract,
2809 None,
2810 );
2811 }
2812 Err(EvoKernelError::Validation(message)) => {
2813 let contract = mutation_needed_contract_for_error_message(&message);
2814 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2815 return self.mutation_needed_failure_outcome(
2816 request,
2817 Some(task_class),
2818 status,
2819 contract,
2820 None,
2821 );
2822 }
2823 Err(err) => return Err(err),
2824 };
2825 let approver = request
2826 .approval
2827 .approver
2828 .as_deref()
2829 .unwrap_or("unknown approver");
2830
2831 Ok(SupervisedDevloopOutcome {
2832 task_id: request.task.id.clone(),
2833 task_class: Some(task_class),
2834 status: SupervisedDevloopStatus::Executed,
2835 execution_feedback: Some(Self::feedback_for_agent(&capture)),
2836 failure_contract: None,
2837 summary: format!(
2838 "supervised devloop executed task '{}' with explicit approval from {approver}",
2839 request.task.id
2840 ),
2841 })
2842 }
2843 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
2844 MultiAgentCoordinator::new().coordinate(plan)
2845 }
2846
2847 pub fn export_promoted_assets(
2848 &self,
2849 sender_id: impl Into<String>,
2850 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2851 let sender_id = sender_id.into();
2852 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
2853 if !envelope.assets.is_empty() {
2854 let mut ledger = self
2855 .economics
2856 .lock()
2857 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
2858 if ledger
2859 .reserve_publish_stake(&sender_id, &self.stake_policy)
2860 .is_none()
2861 {
2862 return Err(EvoKernelError::Validation(
2863 "insufficient EVU for remote publish".into(),
2864 ));
2865 }
2866 }
2867 Ok(envelope)
2868 }
2869
2870 pub fn import_remote_envelope(
2871 &self,
2872 envelope: &EvolutionEnvelope,
2873 ) -> Result<ImportOutcome, EvoKernelError> {
2874 import_remote_envelope_into_store(
2875 self.store.as_ref(),
2876 envelope,
2877 Some(self.remote_publishers.as_ref()),
2878 None,
2879 )
2880 }
2881
2882 pub fn fetch_assets(
2883 &self,
2884 responder_id: impl Into<String>,
2885 query: &FetchQuery,
2886 ) -> Result<FetchResponse, EvoKernelError> {
2887 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2888 }
2889
2890 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2891 revoke_assets_in_store(self.store.as_ref(), notice)
2892 }
2893
2894 pub async fn replay_or_fallback(
2895 &self,
2896 input: SelectorInput,
2897 ) -> Result<ReplayDecision, EvoKernelError> {
2898 let replay_run_id = next_id("replay");
2899 self.replay_or_fallback_for_run(&replay_run_id, input).await
2900 }
2901
2902 pub async fn replay_or_fallback_for_run(
2903 &self,
2904 run_id: &RunId,
2905 input: SelectorInput,
2906 ) -> Result<ReplayDecision, EvoKernelError> {
2907 let executor = StoreReplayExecutor {
2908 sandbox: self.sandbox.clone(),
2909 validator: self.validator.clone(),
2910 store: self.store.clone(),
2911 selector: self.selector.clone(),
2912 governor: self.governor.clone(),
2913 economics: Some(self.economics.clone()),
2914 remote_publishers: Some(self.remote_publishers.clone()),
2915 stake_policy: self.stake_policy.clone(),
2916 };
2917 executor
2918 .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
2919 .await
2920 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2921 }
2922
2923 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
2924 self.economics.lock().ok()?.governor_signal(node_id)
2925 }
2926
2927 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
2928 self.economics
2929 .lock()
2930 .ok()
2931 .map(|locked| locked.selector_reputation_bias())
2932 .unwrap_or_default()
2933 }
2934
2935 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2936 evolution_metrics_snapshot(self.store.as_ref())
2937 }
2938
2939 pub fn replay_roi_release_gate_summary(
2940 &self,
2941 window_seconds: u64,
2942 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2943 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2944 }
2945
2946 pub fn render_replay_roi_release_gate_summary_json(
2947 &self,
2948 window_seconds: u64,
2949 ) -> Result<String, EvoKernelError> {
2950 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2951 serde_json::to_string_pretty(&summary)
2952 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2953 }
2954
2955 pub fn replay_roi_release_gate_contract(
2956 &self,
2957 window_seconds: u64,
2958 thresholds: ReplayRoiReleaseGateThresholds,
2959 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2960 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2961 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2962 }
2963
2964 pub fn render_replay_roi_release_gate_contract_json(
2965 &self,
2966 window_seconds: u64,
2967 thresholds: ReplayRoiReleaseGateThresholds,
2968 ) -> Result<String, EvoKernelError> {
2969 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2970 serde_json::to_string_pretty(&contract)
2971 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2972 }
2973
2974 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2975 self.metrics_snapshot().map(|snapshot| {
2976 let health = evolution_health_snapshot(&snapshot);
2977 render_evolution_metrics_prometheus(&snapshot, &health)
2978 })
2979 }
2980
2981 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2982 self.metrics_snapshot()
2983 .map(|snapshot| evolution_health_snapshot(&snapshot))
2984 }
2985}
2986
2987pub fn prepare_mutation(
2988 intent: MutationIntent,
2989 diff_payload: String,
2990 base_revision: Option<String>,
2991) -> PreparedMutation {
2992 PreparedMutation {
2993 intent,
2994 artifact: MutationArtifact {
2995 encoding: ArtifactEncoding::UnifiedDiff,
2996 content_hash: compute_artifact_hash(&diff_payload),
2997 payload: diff_payload,
2998 base_revision,
2999 },
3000 }
3001}
3002
3003pub fn prepare_mutation_from_spec(
3004 plan: CompiledMutationPlan,
3005 diff_payload: String,
3006 base_revision: Option<String>,
3007) -> PreparedMutation {
3008 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
3009}
3010
3011pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
3012 Arc::new(oris_evolution::JsonlEvolutionStore::new(
3013 default_store_root(),
3014 ))
3015}
3016
3017fn built_in_seed_templates() -> Vec<SeedTemplate> {
3018 vec![
3019 SeedTemplate {
3020 id: "bootstrap-readme".into(),
3021 intent: "Seed a baseline README recovery pattern".into(),
3022 signals: vec!["bootstrap readme".into(), "missing readme".into()],
3023 diff_payload: "\
3024diff --git a/README.md b/README.md
3025new file mode 100644
3026index 0000000..1111111
3027--- /dev/null
3028+++ b/README.md
3029@@ -0,0 +1,3 @@
3030+# Oris
3031+Bootstrap documentation seed
3032+"
3033 .into(),
3034 validation_profile: "bootstrap-seed".into(),
3035 },
3036 SeedTemplate {
3037 id: "bootstrap-test-fix".into(),
3038 intent: "Seed a deterministic test stabilization pattern".into(),
3039 signals: vec!["bootstrap test fix".into(), "failing tests".into()],
3040 diff_payload: "\
3041diff --git a/src/lib.rs b/src/lib.rs
3042index 1111111..2222222 100644
3043--- a/src/lib.rs
3044+++ b/src/lib.rs
3045@@ -1 +1,2 @@
3046 pub fn demo() -> usize { 1 }
3047+pub fn normalize_test_output() -> bool { true }
3048"
3049 .into(),
3050 validation_profile: "bootstrap-seed".into(),
3051 },
3052 SeedTemplate {
3053 id: "bootstrap-refactor".into(),
3054 intent: "Seed a low-risk refactor capsule".into(),
3055 signals: vec!["bootstrap refactor".into(), "small refactor".into()],
3056 diff_payload: "\
3057diff --git a/src/lib.rs b/src/lib.rs
3058index 2222222..3333333 100644
3059--- a/src/lib.rs
3060+++ b/src/lib.rs
3061@@ -1 +1,3 @@
3062 pub fn demo() -> usize { 1 }
3063+
3064+fn extract_strategy_key(input: &str) -> &str { input }
3065"
3066 .into(),
3067 validation_profile: "bootstrap-seed".into(),
3068 },
3069 SeedTemplate {
3070 id: "bootstrap-logging".into(),
3071 intent: "Seed a baseline structured logging mutation".into(),
3072 signals: vec!["bootstrap logging".into(), "structured logs".into()],
3073 diff_payload: "\
3074diff --git a/src/lib.rs b/src/lib.rs
3075index 3333333..4444444 100644
3076--- a/src/lib.rs
3077+++ b/src/lib.rs
3078@@ -1 +1,3 @@
3079 pub fn demo() -> usize { 1 }
3080+
3081+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
3082"
3083 .into(),
3084 validation_profile: "bootstrap-seed".into(),
3085 },
3086 ]
3087}
3088
3089fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
3090 let changed_files = seed_changed_files(&template.diff_payload);
3091 let target = if changed_files.is_empty() {
3092 MutationTarget::WorkspaceRoot
3093 } else {
3094 MutationTarget::Paths {
3095 allow: changed_files,
3096 }
3097 };
3098 prepare_mutation(
3099 MutationIntent {
3100 id: stable_hash_json(&("bootstrap-mutation", &template.id))
3101 .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
3102 intent: template.intent.clone(),
3103 target,
3104 expected_effect: format!("seed {}", template.id),
3105 risk: RiskLevel::Low,
3106 signals: template.signals.clone(),
3107 spec_id: None,
3108 },
3109 template.diff_payload.clone(),
3110 None,
3111 )
3112}
3113
3114fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
3115 let mut signals = BTreeSet::new();
3116 for declared in &template.signals {
3117 if let Some(phrase) = normalize_signal_phrase(declared) {
3118 signals.insert(phrase);
3119 }
3120 extend_signal_tokens(&mut signals, declared);
3121 }
3122 extend_signal_tokens(&mut signals, &template.intent);
3123 extend_signal_tokens(&mut signals, &template.diff_payload);
3124 for changed_file in seed_changed_files(&template.diff_payload) {
3125 extend_signal_tokens(&mut signals, &changed_file);
3126 }
3127 let values = signals.into_iter().take(32).collect::<Vec<_>>();
3128 let hash =
3129 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
3130 SignalExtractionOutput { values, hash }
3131}
3132
3133fn seed_changed_files(diff_payload: &str) -> Vec<String> {
3134 let mut changed_files = BTreeSet::new();
3135 for line in diff_payload.lines() {
3136 if let Some(path) = line.strip_prefix("+++ b/") {
3137 let normalized = path.trim();
3138 if !normalized.is_empty() {
3139 changed_files.insert(normalized.to_string());
3140 }
3141 }
3142 }
3143 changed_files.into_iter().collect()
3144}
3145
3146fn build_bootstrap_gene(
3147 template: &SeedTemplate,
3148 extracted: &SignalExtractionOutput,
3149) -> Result<Gene, EvolutionError> {
3150 let strategy = vec![template.id.clone(), "bootstrap".into()];
3151 let id = stable_hash_json(&(
3152 "bootstrap-gene",
3153 &template.id,
3154 &extracted.values,
3155 &template.validation_profile,
3156 ))?;
3157 Ok(Gene {
3158 id,
3159 signals: extracted.values.clone(),
3160 strategy,
3161 validation: vec![template.validation_profile.clone()],
3162 state: AssetState::Quarantined,
3163 })
3164}
3165
3166fn build_bootstrap_capsule(
3167 run_id: &RunId,
3168 template: &SeedTemplate,
3169 mutation: &PreparedMutation,
3170 gene: &Gene,
3171) -> Result<Capsule, EvolutionError> {
3172 let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
3173 let env = current_env_fingerprint(&cwd);
3174 let diff_hash = mutation.artifact.content_hash.clone();
3175 let changed_files = seed_changed_files(&template.diff_payload);
3176 let validator_hash = stable_hash_json(&(
3177 "bootstrap-validator",
3178 &template.id,
3179 &template.validation_profile,
3180 &diff_hash,
3181 ))?;
3182 let id = stable_hash_json(&(
3183 "bootstrap-capsule",
3184 &template.id,
3185 run_id,
3186 &gene.id,
3187 &diff_hash,
3188 &env,
3189 ))?;
3190 Ok(Capsule {
3191 id,
3192 gene_id: gene.id.clone(),
3193 mutation_id: mutation.intent.id.clone(),
3194 run_id: run_id.clone(),
3195 diff_hash,
3196 confidence: 0.0,
3197 env,
3198 outcome: Outcome {
3199 success: false,
3200 validation_profile: template.validation_profile.clone(),
3201 validation_duration_ms: 0,
3202 changed_files,
3203 validator_hash,
3204 lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
3205 replay_verified: false,
3206 },
3207 state: AssetState::Quarantined,
3208 })
3209}
3210
3211fn derive_gene(
3212 mutation: &PreparedMutation,
3213 receipt: &SandboxReceipt,
3214 validation_profile: &str,
3215 extracted_signals: &[String],
3216) -> Gene {
3217 let mut strategy = BTreeSet::new();
3218 for file in &receipt.changed_files {
3219 if let Some(component) = file.components().next() {
3220 strategy.insert(component.as_os_str().to_string_lossy().to_string());
3221 }
3222 }
3223 for token in mutation
3224 .artifact
3225 .payload
3226 .split(|ch: char| !ch.is_ascii_alphanumeric())
3227 {
3228 if token.len() == 5
3229 && token.starts_with('E')
3230 && token[1..].chars().all(|ch| ch.is_ascii_digit())
3231 {
3232 strategy.insert(token.to_string());
3233 }
3234 }
3235 for token in mutation.intent.intent.split_whitespace().take(8) {
3236 strategy.insert(token.to_ascii_lowercase());
3237 }
3238 let strategy = strategy.into_iter().collect::<Vec<_>>();
3239 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
3240 .unwrap_or_else(|_| next_id("gene"));
3241 Gene {
3242 id,
3243 signals: extracted_signals.to_vec(),
3244 strategy,
3245 validation: vec![validation_profile.to_string()],
3246 state: AssetState::Promoted,
3247 }
3248}
3249
3250fn build_capsule(
3251 run_id: &RunId,
3252 mutation: &PreparedMutation,
3253 receipt: &SandboxReceipt,
3254 report: &ValidationReport,
3255 validation_profile: &str,
3256 gene: &Gene,
3257 blast_radius: &BlastRadius,
3258) -> Result<Capsule, EvolutionError> {
3259 let env = current_env_fingerprint(&receipt.workdir);
3260 let validator_hash = stable_hash_json(report)?;
3261 let diff_hash = mutation.artifact.content_hash.clone();
3262 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
3263 Ok(Capsule {
3264 id,
3265 gene_id: gene.id.clone(),
3266 mutation_id: mutation.intent.id.clone(),
3267 run_id: run_id.clone(),
3268 diff_hash,
3269 confidence: 0.7,
3270 env,
3271 outcome: oris_evolution::Outcome {
3272 success: true,
3273 validation_profile: validation_profile.to_string(),
3274 validation_duration_ms: report.duration_ms,
3275 changed_files: receipt
3276 .changed_files
3277 .iter()
3278 .map(|path| path.to_string_lossy().to_string())
3279 .collect(),
3280 validator_hash,
3281 lines_changed: blast_radius.lines_changed,
3282 replay_verified: false,
3283 },
3284 state: AssetState::Promoted,
3285 })
3286}
3287
3288fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
3289 let rustc_version = Command::new("rustc")
3290 .arg("--version")
3291 .output()
3292 .ok()
3293 .filter(|output| output.status.success())
3294 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
3295 .unwrap_or_else(|| "rustc unknown".into());
3296 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
3297 .ok()
3298 .map(|bytes| {
3299 let value = String::from_utf8_lossy(&bytes);
3300 compute_artifact_hash(&value)
3301 })
3302 .unwrap_or_else(|| "missing-cargo-lock".into());
3303 let target_triple = format!(
3304 "{}-unknown-{}",
3305 std::env::consts::ARCH,
3306 std::env::consts::OS
3307 );
3308 EnvFingerprint {
3309 rustc_version,
3310 cargo_lock_hash,
3311 target_triple,
3312 os: std::env::consts::OS.to_string(),
3313 }
3314}
3315
3316fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
3317 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
3318 let trimmed = raw.trim();
3319 if trimmed.is_empty() {
3320 continue;
3321 }
3322 let normalized = if is_rust_error_code(trimmed) {
3323 let mut chars = trimmed.chars();
3324 let prefix = chars
3325 .next()
3326 .map(|ch| ch.to_ascii_uppercase())
3327 .unwrap_or('E');
3328 format!("{prefix}{}", chars.as_str())
3329 } else {
3330 trimmed.to_ascii_lowercase()
3331 };
3332 if normalized.len() < 3 {
3333 continue;
3334 }
3335 out.insert(normalized);
3336 }
3337}
3338
3339fn normalize_signal_phrase(input: &str) -> Option<String> {
3340 let normalized = input
3341 .split(|ch: char| !ch.is_ascii_alphanumeric())
3342 .filter_map(|raw| {
3343 let trimmed = raw.trim();
3344 if trimmed.is_empty() {
3345 return None;
3346 }
3347 let normalized = if is_rust_error_code(trimmed) {
3348 let mut chars = trimmed.chars();
3349 let prefix = chars
3350 .next()
3351 .map(|ch| ch.to_ascii_uppercase())
3352 .unwrap_or('E');
3353 format!("{prefix}{}", chars.as_str())
3354 } else {
3355 trimmed.to_ascii_lowercase()
3356 };
3357 if normalized.len() < 3 {
3358 None
3359 } else {
3360 Some(normalized)
3361 }
3362 })
3363 .collect::<Vec<_>>()
3364 .join(" ");
3365 if normalized.is_empty() {
3366 None
3367 } else {
3368 Some(normalized)
3369 }
3370}
3371
3372fn replay_task_descriptor(signals: &[String]) -> (String, String) {
3373 let normalized = signals
3374 .iter()
3375 .filter_map(|signal| normalize_signal_phrase(signal))
3376 .collect::<BTreeSet<_>>()
3377 .into_iter()
3378 .collect::<Vec<_>>();
3379 if normalized.is_empty() {
3380 return ("unknown".into(), "unknown".into());
3381 }
3382 let task_label = normalized
3383 .iter()
3384 .find(|value| {
3385 value.as_str() != "validation passed" && value.as_str() != "validation failed"
3386 })
3387 .cloned()
3388 .unwrap_or_else(|| normalized[0].clone());
3389 let task_class_id = stable_hash_json(&normalized)
3390 .unwrap_or_else(|_| compute_artifact_hash(&normalized.join("\n")));
3391 (task_class_id, task_label)
3392}
3393
3394fn normalized_signal_values(signals: &[String]) -> Vec<String> {
3395 signals
3396 .iter()
3397 .filter_map(|signal| normalize_signal_phrase(signal))
3398 .collect::<BTreeSet<_>>()
3399 .into_iter()
3400 .collect::<Vec<_>>()
3401}
3402
3403fn matched_replay_signals(input_signals: &[String], candidate_signals: &[String]) -> Vec<String> {
3404 let normalized_input = normalized_signal_values(input_signals);
3405 if normalized_input.is_empty() {
3406 return Vec::new();
3407 }
3408 let normalized_candidate = normalized_signal_values(candidate_signals);
3409 if normalized_candidate.is_empty() {
3410 return normalized_input;
3411 }
3412 let matched = normalized_input
3413 .iter()
3414 .filter(|signal| {
3415 normalized_candidate
3416 .iter()
3417 .any(|candidate| candidate.contains(signal.as_str()) || signal.contains(candidate))
3418 })
3419 .cloned()
3420 .collect::<Vec<_>>();
3421 if matched.is_empty() {
3422 normalized_input
3423 } else {
3424 matched
3425 }
3426}
3427
3428fn replay_detect_evidence_from_input(input: &SelectorInput) -> ReplayDetectEvidence {
3429 let (task_class_id, task_label) = replay_task_descriptor(&input.signals);
3430 ReplayDetectEvidence {
3431 task_class_id,
3432 task_label,
3433 matched_signals: normalized_signal_values(&input.signals),
3434 mismatch_reasons: Vec::new(),
3435 }
3436}
3437
3438fn replay_descriptor_from_candidate_or_input(
3439 candidate: Option<&GeneCandidate>,
3440 input: &SelectorInput,
3441) -> (String, String) {
3442 if let Some(candidate) = candidate {
3443 let task_class_id = strategy_metadata_value(&candidate.gene.strategy, "task_class");
3444 let task_label = strategy_metadata_value(&candidate.gene.strategy, "task_label");
3445 if let Some(task_class_id) = task_class_id {
3446 return (
3447 task_class_id.clone(),
3448 task_label.unwrap_or_else(|| task_class_id.clone()),
3449 );
3450 }
3451 return replay_task_descriptor(&candidate.gene.signals);
3452 }
3453 replay_task_descriptor(&input.signals)
3454}
3455
3456fn estimated_reasoning_tokens(signals: &[String]) -> u64 {
3457 let normalized = signals
3458 .iter()
3459 .filter_map(|signal| normalize_signal_phrase(signal))
3460 .collect::<BTreeSet<_>>();
3461 let signal_count = normalized.len() as u64;
3462 REPLAY_REASONING_TOKEN_FLOOR + REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT * signal_count.max(1)
3463}
3464
3465fn compute_replay_roi(reasoning_avoided_tokens: u64, replay_fallback_cost: u64) -> f64 {
3466 let total = reasoning_avoided_tokens + replay_fallback_cost;
3467 if total == 0 {
3468 return 0.0;
3469 }
3470 (reasoning_avoided_tokens as f64 - replay_fallback_cost as f64) / total as f64
3471}
3472
3473fn is_rust_error_code(value: &str) -> bool {
3474 value.len() == 5
3475 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
3476 && value[1..].chars().all(|ch| ch.is_ascii_digit())
3477}
3478
3479fn validation_plan_timeout_budget_ms(plan: &ValidationPlan) -> u64 {
3480 plan.stages.iter().fold(0_u64, |acc, stage| match stage {
3481 ValidationStage::Command { timeout_ms, .. } => acc.saturating_add(*timeout_ms),
3482 })
3483}
3484
3485fn mutation_needed_reason_code_key(reason_code: MutationNeededFailureReasonCode) -> &'static str {
3486 match reason_code {
3487 MutationNeededFailureReasonCode::PolicyDenied => "policy_denied",
3488 MutationNeededFailureReasonCode::ValidationFailed => "validation_failed",
3489 MutationNeededFailureReasonCode::UnsafePatch => "unsafe_patch",
3490 MutationNeededFailureReasonCode::Timeout => "timeout",
3491 MutationNeededFailureReasonCode::MutationPayloadMissing => "mutation_payload_missing",
3492 MutationNeededFailureReasonCode::UnknownFailClosed => "unknown_fail_closed",
3493 }
3494}
3495
3496fn mutation_needed_status_from_reason_code(
3497 reason_code: MutationNeededFailureReasonCode,
3498) -> SupervisedDevloopStatus {
3499 if matches!(reason_code, MutationNeededFailureReasonCode::PolicyDenied) {
3500 SupervisedDevloopStatus::RejectedByPolicy
3501 } else {
3502 SupervisedDevloopStatus::FailedClosed
3503 }
3504}
3505
3506fn mutation_needed_contract_for_validation_failure(
3507 profile: &str,
3508 report: &ValidationReport,
3509) -> MutationNeededFailureContract {
3510 let lower_logs = report.logs.to_ascii_lowercase();
3511 if lower_logs.contains("timed out") {
3512 normalize_mutation_needed_failure_contract(
3513 Some(&format!(
3514 "mutation-needed validation command timed out under profile '{profile}'"
3515 )),
3516 Some(MutationNeededFailureReasonCode::Timeout),
3517 )
3518 } else {
3519 normalize_mutation_needed_failure_contract(
3520 Some(&format!(
3521 "mutation-needed validation failed under profile '{profile}'"
3522 )),
3523 Some(MutationNeededFailureReasonCode::ValidationFailed),
3524 )
3525 }
3526}
3527
3528fn mutation_needed_contract_for_error_message(message: &str) -> MutationNeededFailureContract {
3529 let reason_code = infer_mutation_needed_failure_reason_code(message);
3530 normalize_mutation_needed_failure_contract(Some(message), reason_code)
3531}
3532
3533fn mutation_needed_audit_mutation_id(request: &SupervisedDevloopRequest) -> String {
3534 stable_hash_json(&(
3535 "mutation-needed-audit",
3536 &request.task.id,
3537 &request.proposal.intent,
3538 &request.proposal.files,
3539 ))
3540 .map(|hash| format!("mutation-needed-{hash}"))
3541 .unwrap_or_else(|_| format!("mutation-needed-{}", request.task.id))
3542}
3543
3544fn classify_supervised_devloop_request(
3545 request: &SupervisedDevloopRequest,
3546) -> Option<BoundedTaskClass> {
3547 let file_count = normalized_supervised_devloop_docs_files(&request.proposal.files)?.len();
3548 match file_count {
3549 1 => Some(BoundedTaskClass::DocsSingleFile),
3550 2..=SUPERVISED_DEVLOOP_MAX_DOC_FILES => Some(BoundedTaskClass::DocsMultiFile),
3551 _ => None,
3552 }
3553}
3554
3555fn normalized_supervised_devloop_docs_files(files: &[String]) -> Option<Vec<String>> {
3556 if files.is_empty() || files.len() > SUPERVISED_DEVLOOP_MAX_DOC_FILES {
3557 return None;
3558 }
3559
3560 let mut normalized_files = Vec::with_capacity(files.len());
3561 let mut seen = BTreeSet::new();
3562
3563 for path in files {
3564 let normalized = path.trim().replace('\\', "/");
3565 if normalized.is_empty()
3566 || !normalized.starts_with("docs/")
3567 || !normalized.ends_with(".md")
3568 || !seen.insert(normalized.clone())
3569 {
3570 return None;
3571 }
3572 normalized_files.push(normalized);
3573 }
3574
3575 Some(normalized_files)
3576}
3577
3578fn find_declared_mutation(
3579 store: &dyn EvolutionStore,
3580 mutation_id: &MutationId,
3581) -> Result<Option<PreparedMutation>, EvolutionError> {
3582 for stored in store.scan(1)? {
3583 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
3584 if &mutation.intent.id == mutation_id {
3585 return Ok(Some(mutation));
3586 }
3587 }
3588 }
3589 Ok(None)
3590}
3591
3592fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
3593 let Ok(projection) = projection_snapshot(store) else {
3594 return Vec::new();
3595 };
3596 let capsules = projection.capsules.clone();
3597 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3598 let requested_spec_id = input
3599 .spec_id
3600 .as_deref()
3601 .map(str::trim)
3602 .filter(|value| !value.is_empty());
3603 let signal_set = input
3604 .signals
3605 .iter()
3606 .map(|signal| signal.to_ascii_lowercase())
3607 .collect::<BTreeSet<_>>();
3608 let mut candidates = projection
3609 .genes
3610 .into_iter()
3611 .filter_map(|gene| {
3612 if gene.state != AssetState::Promoted {
3613 return None;
3614 }
3615 if let Some(spec_id) = requested_spec_id {
3616 let matches_spec = spec_ids_by_gene
3617 .get(&gene.id)
3618 .map(|values| {
3619 values
3620 .iter()
3621 .any(|value| value.eq_ignore_ascii_case(spec_id))
3622 })
3623 .unwrap_or(false);
3624 if !matches_spec {
3625 return None;
3626 }
3627 }
3628 let gene_signals = gene
3629 .signals
3630 .iter()
3631 .map(|signal| signal.to_ascii_lowercase())
3632 .collect::<BTreeSet<_>>();
3633 if gene_signals == signal_set {
3634 let mut matched_capsules = capsules
3635 .iter()
3636 .filter(|capsule| {
3637 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
3638 })
3639 .cloned()
3640 .collect::<Vec<_>>();
3641 matched_capsules.sort_by(|left, right| {
3642 replay_environment_match_factor(&input.env, &right.env)
3643 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3644 .unwrap_or(std::cmp::Ordering::Equal)
3645 .then_with(|| {
3646 right
3647 .confidence
3648 .partial_cmp(&left.confidence)
3649 .unwrap_or(std::cmp::Ordering::Equal)
3650 })
3651 .then_with(|| left.id.cmp(&right.id))
3652 });
3653 if matched_capsules.is_empty() {
3654 None
3655 } else {
3656 let score = matched_capsules
3657 .first()
3658 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3659 .unwrap_or(0.0);
3660 Some(GeneCandidate {
3661 gene,
3662 score,
3663 capsules: matched_capsules,
3664 })
3665 }
3666 } else {
3667 None
3668 }
3669 })
3670 .collect::<Vec<_>>();
3671 candidates.sort_by(|left, right| {
3672 right
3673 .score
3674 .partial_cmp(&left.score)
3675 .unwrap_or(std::cmp::Ordering::Equal)
3676 .then_with(|| left.gene.id.cmp(&right.gene.id))
3677 });
3678 candidates
3679}
3680
3681fn quarantined_remote_exact_match_candidates(
3682 store: &dyn EvolutionStore,
3683 input: &SelectorInput,
3684) -> Vec<GeneCandidate> {
3685 let remote_asset_ids = store
3686 .scan(1)
3687 .ok()
3688 .map(|events| {
3689 events
3690 .into_iter()
3691 .filter_map(|stored| match stored.event {
3692 EvolutionEvent::RemoteAssetImported {
3693 source: CandidateSource::Remote,
3694 asset_ids,
3695 ..
3696 } => Some(asset_ids),
3697 _ => None,
3698 })
3699 .flatten()
3700 .collect::<BTreeSet<_>>()
3701 })
3702 .unwrap_or_default();
3703 if remote_asset_ids.is_empty() {
3704 return Vec::new();
3705 }
3706
3707 let Ok(projection) = projection_snapshot(store) else {
3708 return Vec::new();
3709 };
3710 let capsules = projection.capsules.clone();
3711 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3712 let requested_spec_id = input
3713 .spec_id
3714 .as_deref()
3715 .map(str::trim)
3716 .filter(|value| !value.is_empty());
3717 let normalized_signals = input
3718 .signals
3719 .iter()
3720 .filter_map(|signal| normalize_signal_phrase(signal))
3721 .collect::<BTreeSet<_>>()
3722 .into_iter()
3723 .collect::<Vec<_>>();
3724 if normalized_signals.is_empty() {
3725 return Vec::new();
3726 }
3727 let mut candidates = projection
3728 .genes
3729 .into_iter()
3730 .filter_map(|gene| {
3731 if !matches!(
3732 gene.state,
3733 AssetState::Promoted | AssetState::Quarantined | AssetState::ShadowValidated
3734 ) {
3735 return None;
3736 }
3737 if let Some(spec_id) = requested_spec_id {
3738 let matches_spec = spec_ids_by_gene
3739 .get(&gene.id)
3740 .map(|values| {
3741 values
3742 .iter()
3743 .any(|value| value.eq_ignore_ascii_case(spec_id))
3744 })
3745 .unwrap_or(false);
3746 if !matches_spec {
3747 return None;
3748 }
3749 }
3750 let normalized_gene_signals = gene
3751 .signals
3752 .iter()
3753 .filter_map(|candidate| normalize_signal_phrase(candidate))
3754 .collect::<Vec<_>>();
3755 let matched_query_count = normalized_signals
3756 .iter()
3757 .filter(|signal| {
3758 normalized_gene_signals.iter().any(|candidate| {
3759 candidate.contains(signal.as_str()) || signal.contains(candidate)
3760 })
3761 })
3762 .count();
3763 if matched_query_count == 0 {
3764 return None;
3765 }
3766
3767 let mut matched_capsules = capsules
3768 .iter()
3769 .filter(|capsule| {
3770 capsule.gene_id == gene.id
3771 && matches!(
3772 capsule.state,
3773 AssetState::Quarantined | AssetState::ShadowValidated
3774 )
3775 && remote_asset_ids.contains(&capsule.id)
3776 })
3777 .cloned()
3778 .collect::<Vec<_>>();
3779 matched_capsules.sort_by(|left, right| {
3780 replay_environment_match_factor(&input.env, &right.env)
3781 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3782 .unwrap_or(std::cmp::Ordering::Equal)
3783 .then_with(|| {
3784 right
3785 .confidence
3786 .partial_cmp(&left.confidence)
3787 .unwrap_or(std::cmp::Ordering::Equal)
3788 })
3789 .then_with(|| left.id.cmp(&right.id))
3790 });
3791 if matched_capsules.is_empty() {
3792 None
3793 } else {
3794 let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
3795 let env_score = matched_capsules
3796 .first()
3797 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3798 .unwrap_or(0.0);
3799 Some(GeneCandidate {
3800 gene,
3801 score: overlap.max(env_score),
3802 capsules: matched_capsules,
3803 })
3804 }
3805 })
3806 .collect::<Vec<_>>();
3807 candidates.sort_by(|left, right| {
3808 right
3809 .score
3810 .partial_cmp(&left.score)
3811 .unwrap_or(std::cmp::Ordering::Equal)
3812 .then_with(|| left.gene.id.cmp(&right.gene.id))
3813 });
3814 candidates
3815}
3816
3817fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
3818 let fields = [
3819 input
3820 .rustc_version
3821 .eq_ignore_ascii_case(&candidate.rustc_version),
3822 input
3823 .cargo_lock_hash
3824 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
3825 input
3826 .target_triple
3827 .eq_ignore_ascii_case(&candidate.target_triple),
3828 input.os.eq_ignore_ascii_case(&candidate.os),
3829 ];
3830 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
3831 0.5 + ((matched_fields / 4.0) * 0.5)
3832}
3833
3834fn effective_candidate_score(
3835 candidate: &GeneCandidate,
3836 publishers_by_asset: &BTreeMap<String, String>,
3837 reputation_bias: &BTreeMap<String, f32>,
3838) -> f32 {
3839 let bias = candidate
3840 .capsules
3841 .first()
3842 .and_then(|capsule| publishers_by_asset.get(&capsule.id))
3843 .and_then(|publisher| reputation_bias.get(publisher))
3844 .copied()
3845 .unwrap_or(0.0)
3846 .clamp(0.0, 1.0);
3847 candidate.score * (1.0 + (bias * 0.1))
3848}
3849
3850fn export_promoted_assets_from_store(
3851 store: &dyn EvolutionStore,
3852 sender_id: impl Into<String>,
3853) -> Result<EvolutionEnvelope, EvoKernelError> {
3854 let (events, projection) = scan_projection(store)?;
3855 let genes = projection
3856 .genes
3857 .into_iter()
3858 .filter(|gene| gene.state == AssetState::Promoted)
3859 .collect::<Vec<_>>();
3860 let capsules = projection
3861 .capsules
3862 .into_iter()
3863 .filter(|capsule| capsule.state == AssetState::Promoted)
3864 .collect::<Vec<_>>();
3865 let assets = replay_export_assets(&events, genes, capsules);
3866 Ok(EvolutionEnvelope::publish(sender_id, assets))
3867}
3868
3869fn scan_projection(
3870 store: &dyn EvolutionStore,
3871) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
3872 store.scan_projection().map_err(store_err)
3873}
3874
3875fn projection_snapshot(store: &dyn EvolutionStore) -> Result<EvolutionProjection, EvoKernelError> {
3876 scan_projection(store).map(|(_, projection)| projection)
3877}
3878
3879fn replay_export_assets(
3880 events: &[StoredEvolutionEvent],
3881 genes: Vec<Gene>,
3882 capsules: Vec<Capsule>,
3883) -> Vec<NetworkAsset> {
3884 let mutation_ids = capsules
3885 .iter()
3886 .map(|capsule| capsule.mutation_id.clone())
3887 .collect::<BTreeSet<_>>();
3888 let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
3889 for gene in genes {
3890 assets.push(NetworkAsset::Gene { gene });
3891 }
3892 for capsule in capsules {
3893 assets.push(NetworkAsset::Capsule { capsule });
3894 }
3895 assets
3896}
3897
3898fn replay_export_events_for_mutations(
3899 events: &[StoredEvolutionEvent],
3900 mutation_ids: &BTreeSet<String>,
3901) -> Vec<NetworkAsset> {
3902 if mutation_ids.is_empty() {
3903 return Vec::new();
3904 }
3905
3906 let mut assets = Vec::new();
3907 let mut seen_mutations = BTreeSet::new();
3908 let mut seen_spec_links = BTreeSet::new();
3909 for stored in events {
3910 match &stored.event {
3911 EvolutionEvent::MutationDeclared { mutation }
3912 if mutation_ids.contains(mutation.intent.id.as_str())
3913 && seen_mutations.insert(mutation.intent.id.clone()) =>
3914 {
3915 assets.push(NetworkAsset::EvolutionEvent {
3916 event: EvolutionEvent::MutationDeclared {
3917 mutation: mutation.clone(),
3918 },
3919 });
3920 }
3921 EvolutionEvent::SpecLinked {
3922 mutation_id,
3923 spec_id,
3924 } if mutation_ids.contains(mutation_id.as_str())
3925 && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
3926 {
3927 assets.push(NetworkAsset::EvolutionEvent {
3928 event: EvolutionEvent::SpecLinked {
3929 mutation_id: mutation_id.clone(),
3930 spec_id: spec_id.clone(),
3931 },
3932 });
3933 }
3934 _ => {}
3935 }
3936 }
3937
3938 assets
3939}
3940
3941const SYNC_CURSOR_PREFIX: &str = "seq:";
3942const SYNC_RESUME_TOKEN_PREFIX: &str = "gep-rt1|";
3943
3944#[derive(Clone, Debug)]
3945struct DeltaWindow {
3946 changed_gene_ids: BTreeSet<String>,
3947 changed_capsule_ids: BTreeSet<String>,
3948 changed_mutation_ids: BTreeSet<String>,
3949}
3950
3951fn normalize_sync_value(value: Option<&str>) -> Option<String> {
3952 value
3953 .map(str::trim)
3954 .filter(|value| !value.is_empty())
3955 .map(ToOwned::to_owned)
3956}
3957
3958fn parse_sync_cursor_seq(cursor: &str) -> Option<u64> {
3959 let trimmed = cursor.trim();
3960 if trimmed.is_empty() {
3961 return None;
3962 }
3963 let raw = trimmed.strip_prefix(SYNC_CURSOR_PREFIX).unwrap_or(trimmed);
3964 raw.parse::<u64>().ok()
3965}
3966
3967fn format_sync_cursor(seq: u64) -> String {
3968 format!("{SYNC_CURSOR_PREFIX}{seq}")
3969}
3970
3971fn encode_resume_token(sender_id: &str, cursor: &str) -> String {
3972 format!("{SYNC_RESUME_TOKEN_PREFIX}{sender_id}|{cursor}")
3973}
3974
3975fn decode_resume_token(sender_id: &str, token: &str) -> Result<String, EvoKernelError> {
3976 let token = token.trim();
3977 let Some(encoded) = token.strip_prefix(SYNC_RESUME_TOKEN_PREFIX) else {
3978 return Ok(token.to_string());
3979 };
3980 let (token_sender, cursor) = encoded.split_once('|').ok_or_else(|| {
3981 EvoKernelError::Validation(
3982 "invalid resume_token format; expected gep-rt1|<sender>|<seq>".into(),
3983 )
3984 })?;
3985 if token_sender != sender_id.trim() {
3986 return Err(EvoKernelError::Validation(
3987 "resume_token sender mismatch".into(),
3988 ));
3989 }
3990 Ok(cursor.to_string())
3991}
3992
3993fn resolve_requested_cursor(
3994 sender_id: &str,
3995 since_cursor: Option<&str>,
3996 resume_token: Option<&str>,
3997) -> Result<Option<String>, EvoKernelError> {
3998 let cursor = if let Some(token) = normalize_sync_value(resume_token) {
3999 Some(decode_resume_token(sender_id, &token)?)
4000 } else {
4001 normalize_sync_value(since_cursor)
4002 };
4003
4004 let Some(cursor) = cursor else {
4005 return Ok(None);
4006 };
4007 let seq = parse_sync_cursor_seq(&cursor).ok_or_else(|| {
4008 EvoKernelError::Validation("invalid since_cursor/resume_token cursor format".into())
4009 })?;
4010 Ok(Some(format_sync_cursor(seq)))
4011}
4012
4013fn latest_store_cursor(store: &dyn EvolutionStore) -> Result<Option<String>, EvoKernelError> {
4014 let events = store.scan(1).map_err(store_err)?;
4015 Ok(events.last().map(|stored| format_sync_cursor(stored.seq)))
4016}
4017
4018fn delta_window(events: &[StoredEvolutionEvent], since_seq: u64) -> DeltaWindow {
4019 let mut changed_gene_ids = BTreeSet::new();
4020 let mut changed_capsule_ids = BTreeSet::new();
4021 let mut changed_mutation_ids = BTreeSet::new();
4022
4023 for stored in events {
4024 if stored.seq <= since_seq {
4025 continue;
4026 }
4027 match &stored.event {
4028 EvolutionEvent::MutationDeclared { mutation } => {
4029 changed_mutation_ids.insert(mutation.intent.id.clone());
4030 }
4031 EvolutionEvent::SpecLinked { mutation_id, .. } => {
4032 changed_mutation_ids.insert(mutation_id.clone());
4033 }
4034 EvolutionEvent::GeneProjected { gene } => {
4035 changed_gene_ids.insert(gene.id.clone());
4036 }
4037 EvolutionEvent::GenePromoted { gene_id }
4038 | EvolutionEvent::GeneRevoked { gene_id, .. }
4039 | EvolutionEvent::PromotionEvaluated { gene_id, .. } => {
4040 changed_gene_ids.insert(gene_id.clone());
4041 }
4042 EvolutionEvent::CapsuleCommitted { capsule } => {
4043 changed_capsule_ids.insert(capsule.id.clone());
4044 changed_gene_ids.insert(capsule.gene_id.clone());
4045 changed_mutation_ids.insert(capsule.mutation_id.clone());
4046 }
4047 EvolutionEvent::CapsuleReleased { capsule_id, .. }
4048 | EvolutionEvent::CapsuleQuarantined { capsule_id } => {
4049 changed_capsule_ids.insert(capsule_id.clone());
4050 }
4051 EvolutionEvent::RemoteAssetImported { asset_ids, .. } => {
4052 for asset_id in asset_ids {
4053 changed_gene_ids.insert(asset_id.clone());
4054 changed_capsule_ids.insert(asset_id.clone());
4055 }
4056 }
4057 _ => {}
4058 }
4059 }
4060
4061 DeltaWindow {
4062 changed_gene_ids,
4063 changed_capsule_ids,
4064 changed_mutation_ids,
4065 }
4066}
4067
4068fn import_remote_envelope_into_store(
4069 store: &dyn EvolutionStore,
4070 envelope: &EvolutionEnvelope,
4071 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
4072 requested_cursor: Option<String>,
4073) -> Result<ImportOutcome, EvoKernelError> {
4074 if !envelope.verify_content_hash() {
4075 record_manifest_validation(store, envelope, false, "invalid evolution envelope hash")?;
4076 return Err(EvoKernelError::Validation(
4077 "invalid evolution envelope hash".into(),
4078 ));
4079 }
4080 if let Err(reason) = envelope.verify_manifest() {
4081 record_manifest_validation(
4082 store,
4083 envelope,
4084 false,
4085 format!("manifest validation failed: {reason}"),
4086 )?;
4087 return Err(EvoKernelError::Validation(format!(
4088 "invalid evolution envelope manifest: {reason}"
4089 )));
4090 }
4091 record_manifest_validation(store, envelope, true, "manifest validated")?;
4092
4093 let sender_id = normalized_sender_id(&envelope.sender_id);
4094 let (events, projection) = scan_projection(store)?;
4095 let mut known_gene_ids = projection
4096 .genes
4097 .into_iter()
4098 .map(|gene| gene.id)
4099 .collect::<BTreeSet<_>>();
4100 let mut known_capsule_ids = projection
4101 .capsules
4102 .into_iter()
4103 .map(|capsule| capsule.id)
4104 .collect::<BTreeSet<_>>();
4105 let mut known_mutation_ids = BTreeSet::new();
4106 let mut known_spec_links = BTreeSet::new();
4107 for stored in &events {
4108 match &stored.event {
4109 EvolutionEvent::MutationDeclared { mutation } => {
4110 known_mutation_ids.insert(mutation.intent.id.clone());
4111 }
4112 EvolutionEvent::SpecLinked {
4113 mutation_id,
4114 spec_id,
4115 } => {
4116 known_spec_links.insert((mutation_id.clone(), spec_id.clone()));
4117 }
4118 _ => {}
4119 }
4120 }
4121 let mut imported_asset_ids = Vec::new();
4122 let mut applied_count = 0usize;
4123 let mut skipped_count = 0usize;
4124 for asset in &envelope.assets {
4125 match asset {
4126 NetworkAsset::Gene { gene } => {
4127 if !known_gene_ids.insert(gene.id.clone()) {
4128 skipped_count += 1;
4129 continue;
4130 }
4131 imported_asset_ids.push(gene.id.clone());
4132 applied_count += 1;
4133 let mut quarantined_gene = gene.clone();
4134 quarantined_gene.state = AssetState::Quarantined;
4135 store
4136 .append_event(EvolutionEvent::RemoteAssetImported {
4137 source: CandidateSource::Remote,
4138 asset_ids: vec![gene.id.clone()],
4139 sender_id: sender_id.clone(),
4140 })
4141 .map_err(store_err)?;
4142 store
4143 .append_event(EvolutionEvent::GeneProjected {
4144 gene: quarantined_gene.clone(),
4145 })
4146 .map_err(store_err)?;
4147 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4148 store
4149 .append_event(EvolutionEvent::PromotionEvaluated {
4150 gene_id: quarantined_gene.id,
4151 state: AssetState::Quarantined,
4152 reason: "remote asset requires local validation before promotion".into(),
4153 reason_code: TransitionReasonCode::DowngradeRemoteRequiresLocalValidation,
4154 evidence: Some(TransitionEvidence {
4155 replay_attempts: None,
4156 replay_successes: None,
4157 replay_success_rate: None,
4158 environment_match_factor: None,
4159 decayed_confidence: None,
4160 confidence_decay_ratio: None,
4161 summary: Some("phase=remote_import; source=remote; action=quarantine_before_shadow_validation".into()),
4162 }),
4163 })
4164 .map_err(store_err)?;
4165 }
4166 NetworkAsset::Capsule { capsule } => {
4167 if !known_capsule_ids.insert(capsule.id.clone()) {
4168 skipped_count += 1;
4169 continue;
4170 }
4171 imported_asset_ids.push(capsule.id.clone());
4172 applied_count += 1;
4173 store
4174 .append_event(EvolutionEvent::RemoteAssetImported {
4175 source: CandidateSource::Remote,
4176 asset_ids: vec![capsule.id.clone()],
4177 sender_id: sender_id.clone(),
4178 })
4179 .map_err(store_err)?;
4180 let mut quarantined = capsule.clone();
4181 quarantined.state = AssetState::Quarantined;
4182 store
4183 .append_event(EvolutionEvent::CapsuleCommitted {
4184 capsule: quarantined.clone(),
4185 })
4186 .map_err(store_err)?;
4187 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4188 store
4189 .append_event(EvolutionEvent::CapsuleQuarantined {
4190 capsule_id: quarantined.id,
4191 })
4192 .map_err(store_err)?;
4193 }
4194 NetworkAsset::EvolutionEvent { event } => {
4195 let should_append = match event {
4196 EvolutionEvent::MutationDeclared { mutation } => {
4197 known_mutation_ids.insert(mutation.intent.id.clone())
4198 }
4199 EvolutionEvent::SpecLinked {
4200 mutation_id,
4201 spec_id,
4202 } => known_spec_links.insert((mutation_id.clone(), spec_id.clone())),
4203 _ if should_import_remote_event(event) => true,
4204 _ => false,
4205 };
4206 if should_append {
4207 store.append_event(event.clone()).map_err(store_err)?;
4208 applied_count += 1;
4209 } else {
4210 skipped_count += 1;
4211 }
4212 }
4213 }
4214 }
4215 let next_cursor = latest_store_cursor(store)?;
4216 let resume_token = next_cursor.as_ref().and_then(|cursor| {
4217 normalized_sender_id(&envelope.sender_id).map(|sender| encode_resume_token(&sender, cursor))
4218 });
4219
4220 Ok(ImportOutcome {
4221 imported_asset_ids,
4222 accepted: true,
4223 next_cursor: next_cursor.clone(),
4224 resume_token,
4225 sync_audit: SyncAudit {
4226 batch_id: next_id("sync-import"),
4227 requested_cursor,
4228 scanned_count: envelope.assets.len(),
4229 applied_count,
4230 skipped_count,
4231 failed_count: 0,
4232 failure_reasons: Vec::new(),
4233 },
4234 })
4235}
4236
4237const EVOMAP_SNAPSHOT_ROOT: &str = "assets/gep/evomap_snapshot";
4238const EVOMAP_SNAPSHOT_GENES_FILE: &str = "genes.json";
4239const EVOMAP_SNAPSHOT_CAPSULES_FILE: &str = "capsules.json";
4240const EVOMAP_BUILTIN_RUN_ID: &str = "builtin-evomap-seed";
4241
4242#[derive(Debug, Deserialize)]
4243struct EvoMapGeneDocument {
4244 #[serde(default)]
4245 genes: Vec<EvoMapGeneAsset>,
4246}
4247
4248#[derive(Debug, Deserialize)]
4249struct EvoMapGeneAsset {
4250 id: String,
4251 #[serde(default)]
4252 category: Option<String>,
4253 #[serde(default)]
4254 signals_match: Vec<Value>,
4255 #[serde(default)]
4256 strategy: Vec<String>,
4257 #[serde(default)]
4258 validation: Vec<String>,
4259 #[serde(default)]
4260 constraints: Option<EvoMapConstraintAsset>,
4261 #[serde(default)]
4262 model_name: Option<String>,
4263 #[serde(default)]
4264 schema_version: Option<String>,
4265 #[serde(default)]
4266 compatibility: Option<Value>,
4267}
4268
4269#[derive(Clone, Debug, Deserialize, Default)]
4270struct EvoMapConstraintAsset {
4271 #[serde(default)]
4272 max_files: Option<usize>,
4273 #[serde(default)]
4274 forbidden_paths: Vec<String>,
4275}
4276
4277#[derive(Debug, Deserialize)]
4278struct EvoMapCapsuleDocument {
4279 #[serde(default)]
4280 capsules: Vec<EvoMapCapsuleAsset>,
4281}
4282
4283#[derive(Debug, Deserialize)]
4284struct EvoMapCapsuleAsset {
4285 id: String,
4286 gene: String,
4287 #[serde(default)]
4288 trigger: Vec<String>,
4289 #[serde(default)]
4290 summary: String,
4291 #[serde(default)]
4292 diff: Option<String>,
4293 #[serde(default)]
4294 confidence: Option<f32>,
4295 #[serde(default)]
4296 outcome: Option<EvoMapOutcomeAsset>,
4297 #[serde(default)]
4298 blast_radius: Option<EvoMapBlastRadiusAsset>,
4299 #[serde(default)]
4300 content: Option<EvoMapCapsuleContentAsset>,
4301 #[serde(default)]
4302 env_fingerprint: Option<Value>,
4303 #[serde(default)]
4304 model_name: Option<String>,
4305 #[serde(default)]
4306 schema_version: Option<String>,
4307 #[serde(default)]
4308 compatibility: Option<Value>,
4309}
4310
4311#[derive(Clone, Debug, Deserialize, Default)]
4312struct EvoMapOutcomeAsset {
4313 #[serde(default)]
4314 status: Option<String>,
4315 #[serde(default)]
4316 score: Option<f32>,
4317}
4318
4319#[derive(Clone, Debug, Deserialize, Default)]
4320struct EvoMapBlastRadiusAsset {
4321 #[serde(default)]
4322 lines: usize,
4323}
4324
4325#[derive(Clone, Debug, Deserialize, Default)]
4326struct EvoMapCapsuleContentAsset {
4327 #[serde(default)]
4328 changed_files: Vec<String>,
4329}
4330
4331#[derive(Debug)]
4332struct BuiltinCapsuleSeed {
4333 capsule: Capsule,
4334 mutation: PreparedMutation,
4335}
4336
4337#[derive(Debug)]
4338struct BuiltinAssetBundle {
4339 genes: Vec<Gene>,
4340 capsules: Vec<BuiltinCapsuleSeed>,
4341}
4342
4343fn built_in_experience_genes() -> Vec<Gene> {
4344 vec![
4345 Gene {
4346 id: "builtin-experience-docs-rewrite-v1".into(),
4347 signals: vec!["docs.rewrite".into(), "docs".into(), "rewrite".into()],
4348 strategy: vec![
4349 "asset_origin=builtin".into(),
4350 "task_class=docs.rewrite".into(),
4351 "task_label=Docs rewrite".into(),
4352 "template_id=builtin-docs-rewrite-v1".into(),
4353 "summary=baseline docs rewrite experience".into(),
4354 ],
4355 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4356 state: AssetState::Promoted,
4357 },
4358 Gene {
4359 id: "builtin-experience-ci-fix-v1".into(),
4360 signals: vec![
4361 "ci.fix".into(),
4362 "ci".into(),
4363 "test".into(),
4364 "failure".into(),
4365 ],
4366 strategy: vec![
4367 "asset_origin=builtin".into(),
4368 "task_class=ci.fix".into(),
4369 "task_label=CI fix".into(),
4370 "template_id=builtin-ci-fix-v1".into(),
4371 "summary=baseline ci stabilization experience".into(),
4372 ],
4373 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4374 state: AssetState::Promoted,
4375 },
4376 Gene {
4377 id: "builtin-experience-task-decomposition-v1".into(),
4378 signals: vec![
4379 "task.decomposition".into(),
4380 "task".into(),
4381 "decomposition".into(),
4382 "planning".into(),
4383 ],
4384 strategy: vec![
4385 "asset_origin=builtin".into(),
4386 "task_class=task.decomposition".into(),
4387 "task_label=Task decomposition".into(),
4388 "template_id=builtin-task-decomposition-v1".into(),
4389 "summary=baseline task decomposition and routing experience".into(),
4390 ],
4391 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4392 state: AssetState::Promoted,
4393 },
4394 Gene {
4395 id: "builtin-experience-project-workflow-v1".into(),
4396 signals: vec![
4397 "project.workflow".into(),
4398 "project".into(),
4399 "workflow".into(),
4400 "milestone".into(),
4401 ],
4402 strategy: vec![
4403 "asset_origin=builtin".into(),
4404 "task_class=project.workflow".into(),
4405 "task_label=Project workflow".into(),
4406 "template_id=builtin-project-workflow-v1".into(),
4407 "summary=baseline project proposal and merge workflow experience".into(),
4408 ],
4409 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4410 state: AssetState::Promoted,
4411 },
4412 Gene {
4413 id: "builtin-experience-service-bid-v1".into(),
4414 signals: vec![
4415 "service.bid".into(),
4416 "service".into(),
4417 "bid".into(),
4418 "economics".into(),
4419 ],
4420 strategy: vec![
4421 "asset_origin=builtin".into(),
4422 "task_class=service.bid".into(),
4423 "task_label=Service bid".into(),
4424 "template_id=builtin-service-bid-v1".into(),
4425 "summary=baseline service bidding and settlement experience".into(),
4426 ],
4427 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4428 state: AssetState::Promoted,
4429 },
4430 ]
4431}
4432
4433fn evomap_snapshot_path(file_name: &str) -> PathBuf {
4434 PathBuf::from(env!("CARGO_MANIFEST_DIR"))
4435 .join(EVOMAP_SNAPSHOT_ROOT)
4436 .join(file_name)
4437}
4438
4439fn read_evomap_snapshot(file_name: &str) -> Result<Option<String>, EvoKernelError> {
4440 let path = evomap_snapshot_path(file_name);
4441 if !path.exists() {
4442 return Ok(None);
4443 }
4444 fs::read_to_string(&path).map(Some).map_err(|err| {
4445 EvoKernelError::Validation(format!(
4446 "failed to read EvoMap snapshot {}: {err}",
4447 path.display()
4448 ))
4449 })
4450}
4451
4452fn compatibility_state_from_value(value: Option<&Value>) -> Option<String> {
4453 let value = value?;
4454 if let Some(state) = value.as_str() {
4455 let normalized = state.trim().to_ascii_lowercase();
4456 if normalized.is_empty() {
4457 return None;
4458 }
4459 return Some(normalized);
4460 }
4461 value
4462 .get("state")
4463 .and_then(Value::as_str)
4464 .map(str::trim)
4465 .filter(|state| !state.is_empty())
4466 .map(|state| state.to_ascii_lowercase())
4467}
4468
4469fn map_evomap_state(value: Option<&Value>) -> AssetState {
4470 match compatibility_state_from_value(value).as_deref() {
4471 Some("promoted") => AssetState::Promoted,
4472 Some("candidate") => AssetState::Candidate,
4473 Some("quarantined") => AssetState::Quarantined,
4474 Some("shadow_validated") => AssetState::ShadowValidated,
4475 Some("revoked") => AssetState::Revoked,
4476 Some("rejected") => AssetState::Archived,
4477 Some("archived") => AssetState::Archived,
4478 _ => AssetState::Candidate,
4479 }
4480}
4481
4482fn value_as_signal_string(value: &Value) -> Option<String> {
4483 match value {
4484 Value::String(raw) => {
4485 let normalized = raw.trim();
4486 if normalized.is_empty() {
4487 None
4488 } else {
4489 Some(normalized.to_string())
4490 }
4491 }
4492 Value::Object(_) => {
4493 let serialized = serde_json::to_string(value).ok()?;
4494 let normalized = serialized.trim();
4495 if normalized.is_empty() {
4496 None
4497 } else {
4498 Some(normalized.to_string())
4499 }
4500 }
4501 Value::Null => None,
4502 other => {
4503 let rendered = other.to_string();
4504 let normalized = rendered.trim();
4505 if normalized.is_empty() {
4506 None
4507 } else {
4508 Some(normalized.to_string())
4509 }
4510 }
4511 }
4512}
4513
4514fn parse_diff_changed_files(payload: &str) -> Vec<String> {
4515 let mut changed_files = BTreeSet::new();
4516 for line in payload.lines() {
4517 let line = line.trim();
4518 if let Some(path) = line.strip_prefix("+++ b/") {
4519 let path = path.trim();
4520 if !path.is_empty() && path != "/dev/null" {
4521 changed_files.insert(path.to_string());
4522 }
4523 continue;
4524 }
4525 if let Some(path) = line.strip_prefix("diff --git a/") {
4526 if let Some((_, right)) = path.split_once(" b/") {
4527 let right = right.trim();
4528 if !right.is_empty() {
4529 changed_files.insert(right.to_string());
4530 }
4531 }
4532 }
4533 }
4534 changed_files.into_iter().collect()
4535}
4536
4537fn strip_diff_code_fence(payload: &str) -> String {
4538 let trimmed = payload.trim();
4539 if !trimmed.starts_with("```") {
4540 return trimmed.to_string();
4541 }
4542 let mut lines = trimmed.lines().collect::<Vec<_>>();
4543 if lines.is_empty() {
4544 return String::new();
4545 }
4546 lines.remove(0);
4547 if lines
4548 .last()
4549 .map(|line| line.trim() == "```")
4550 .unwrap_or(false)
4551 {
4552 lines.pop();
4553 }
4554 lines.join("\n").trim().to_string()
4555}
4556
4557fn synthetic_diff_for_capsule(capsule: &EvoMapCapsuleAsset) -> String {
4558 let file_path = format!("docs/evomap_builtin_capsules/{}.md", capsule.id);
4559 let mut content = Vec::new();
4560 content.push(format!("# EvoMap Builtin Capsule {}", capsule.id));
4561 if capsule.summary.trim().is_empty() {
4562 content.push("summary: missing".to_string());
4563 } else {
4564 content.push(format!("summary: {}", capsule.summary.trim()));
4565 }
4566 if !capsule.trigger.is_empty() {
4567 content.push(format!("trigger: {}", capsule.trigger.join(", ")));
4568 }
4569 content.push(format!("gene: {}", capsule.gene));
4570 let added = content
4571 .into_iter()
4572 .map(|line| format!("+{}", line.replace('\r', "")))
4573 .collect::<Vec<_>>()
4574 .join("\n");
4575 format!(
4576 "diff --git a/{file_path} b/{file_path}\nnew file mode 100644\nindex 0000000..1111111\n--- /dev/null\n+++ b/{file_path}\n@@ -0,0 +1,{line_count} @@\n{added}\n",
4577 line_count = added.lines().count()
4578 )
4579}
4580
4581fn normalized_diff_payload(capsule: &EvoMapCapsuleAsset) -> String {
4582 if let Some(raw) = capsule.diff.as_deref() {
4583 let normalized = strip_diff_code_fence(raw);
4584 if !normalized.trim().is_empty() {
4585 return normalized;
4586 }
4587 }
4588 synthetic_diff_for_capsule(capsule)
4589}
4590
4591fn env_field(value: Option<&Value>, keys: &[&str]) -> Option<String> {
4592 let object = value?.as_object()?;
4593 keys.iter().find_map(|key| {
4594 object
4595 .get(*key)
4596 .and_then(Value::as_str)
4597 .map(str::trim)
4598 .filter(|value| !value.is_empty())
4599 .map(|value| value.to_string())
4600 })
4601}
4602
4603fn map_evomap_env_fingerprint(value: Option<&Value>) -> EnvFingerprint {
4604 let os =
4605 env_field(value, &["os", "platform", "os_release"]).unwrap_or_else(|| "unknown".into());
4606 let target_triple = env_field(value, &["target_triple"]).unwrap_or_else(|| {
4607 let arch = env_field(value, &["arch"]).unwrap_or_else(|| "unknown".into());
4608 format!("{arch}-unknown-{os}")
4609 });
4610 EnvFingerprint {
4611 rustc_version: env_field(value, &["runtime", "rustc_version", "node_version"])
4612 .unwrap_or_else(|| "unknown".into()),
4613 cargo_lock_hash: env_field(value, &["cargo_lock_hash"]).unwrap_or_else(|| "unknown".into()),
4614 target_triple,
4615 os,
4616 }
4617}
4618
4619fn load_evomap_builtin_assets() -> Result<Option<BuiltinAssetBundle>, EvoKernelError> {
4620 let genes_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_GENES_FILE)?;
4621 let capsules_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_CAPSULES_FILE)?;
4622 let (Some(genes_raw), Some(capsules_raw)) = (genes_raw, capsules_raw) else {
4623 return Ok(None);
4624 };
4625
4626 let genes_doc: EvoMapGeneDocument = serde_json::from_str(&genes_raw).map_err(|err| {
4627 EvoKernelError::Validation(format!("failed to parse EvoMap genes snapshot: {err}"))
4628 })?;
4629 let capsules_doc: EvoMapCapsuleDocument =
4630 serde_json::from_str(&capsules_raw).map_err(|err| {
4631 EvoKernelError::Validation(format!("failed to parse EvoMap capsules snapshot: {err}"))
4632 })?;
4633
4634 let mut genes = Vec::new();
4635 let mut known_gene_ids = BTreeSet::new();
4636 for source in genes_doc.genes {
4637 let EvoMapGeneAsset {
4638 id,
4639 category,
4640 signals_match,
4641 strategy,
4642 validation,
4643 constraints,
4644 model_name,
4645 schema_version,
4646 compatibility,
4647 } = source;
4648 let gene_id = id.trim();
4649 if gene_id.is_empty() {
4650 return Err(EvoKernelError::Validation(
4651 "EvoMap snapshot gene id must not be empty".into(),
4652 ));
4653 }
4654 if !known_gene_ids.insert(gene_id.to_string()) {
4655 continue;
4656 }
4657
4658 let mut seen_signals = BTreeSet::new();
4659 let mut signals = Vec::new();
4660 for signal in signals_match {
4661 let Some(normalized) = value_as_signal_string(&signal) else {
4662 continue;
4663 };
4664 if seen_signals.insert(normalized.clone()) {
4665 signals.push(normalized);
4666 }
4667 }
4668 if signals.is_empty() {
4669 signals.push(format!("gene:{}", gene_id.to_ascii_lowercase()));
4670 }
4671
4672 let mut strategy = strategy
4673 .into_iter()
4674 .map(|item| item.trim().to_string())
4675 .filter(|item| !item.is_empty())
4676 .collect::<Vec<_>>();
4677 if strategy.is_empty() {
4678 strategy.push("evomap strategy missing in snapshot".into());
4679 }
4680 let constraint = constraints.unwrap_or_default();
4681 let compat_state = compatibility_state_from_value(compatibility.as_ref())
4682 .unwrap_or_else(|| "candidate".to_string());
4683 ensure_strategy_metadata(&mut strategy, "asset_origin", "builtin_evomap");
4684 ensure_strategy_metadata(
4685 &mut strategy,
4686 "evomap_category",
4687 category.as_deref().unwrap_or("unknown"),
4688 );
4689 ensure_strategy_metadata(
4690 &mut strategy,
4691 "evomap_constraints_max_files",
4692 &constraint.max_files.unwrap_or_default().to_string(),
4693 );
4694 ensure_strategy_metadata(
4695 &mut strategy,
4696 "evomap_constraints_forbidden_paths",
4697 &constraint.forbidden_paths.join("|"),
4698 );
4699 ensure_strategy_metadata(
4700 &mut strategy,
4701 "evomap_model_name",
4702 model_name.as_deref().unwrap_or("unknown"),
4703 );
4704 ensure_strategy_metadata(
4705 &mut strategy,
4706 "evomap_schema_version",
4707 schema_version.as_deref().unwrap_or("1.5.0"),
4708 );
4709 ensure_strategy_metadata(&mut strategy, "evomap_compatibility_state", &compat_state);
4710
4711 let mut validation = validation
4712 .into_iter()
4713 .map(|item| item.trim().to_string())
4714 .filter(|item| !item.is_empty())
4715 .collect::<Vec<_>>();
4716 if validation.is_empty() {
4717 validation.push("evomap-builtin-seed".into());
4718 }
4719
4720 genes.push(Gene {
4721 id: gene_id.to_string(),
4722 signals,
4723 strategy,
4724 validation,
4725 state: map_evomap_state(compatibility.as_ref()),
4726 });
4727 }
4728
4729 let mut capsules = Vec::new();
4730 let known_gene_ids = genes
4731 .iter()
4732 .map(|gene| gene.id.clone())
4733 .collect::<BTreeSet<_>>();
4734 for source in capsules_doc.capsules {
4735 let EvoMapCapsuleAsset {
4736 id,
4737 gene,
4738 trigger,
4739 summary,
4740 diff,
4741 confidence,
4742 outcome,
4743 blast_radius,
4744 content,
4745 env_fingerprint,
4746 model_name: _model_name,
4747 schema_version: _schema_version,
4748 compatibility,
4749 } = source;
4750 let source_for_diff = EvoMapCapsuleAsset {
4751 id: id.clone(),
4752 gene: gene.clone(),
4753 trigger: trigger.clone(),
4754 summary: summary.clone(),
4755 diff,
4756 confidence,
4757 outcome: outcome.clone(),
4758 blast_radius: blast_radius.clone(),
4759 content: content.clone(),
4760 env_fingerprint: env_fingerprint.clone(),
4761 model_name: None,
4762 schema_version: None,
4763 compatibility: compatibility.clone(),
4764 };
4765 if !known_gene_ids.contains(gene.as_str()) {
4766 return Err(EvoKernelError::Validation(format!(
4767 "EvoMap capsule {} references unknown gene {}",
4768 id, gene
4769 )));
4770 }
4771 let normalized_diff = normalized_diff_payload(&source_for_diff);
4772 if normalized_diff.trim().is_empty() {
4773 return Err(EvoKernelError::Validation(format!(
4774 "EvoMap capsule {} has empty normalized diff payload",
4775 id
4776 )));
4777 }
4778 let mut changed_files = content
4779 .as_ref()
4780 .map(|content| {
4781 content
4782 .changed_files
4783 .iter()
4784 .map(|item| item.trim().to_string())
4785 .filter(|item| !item.is_empty())
4786 .collect::<Vec<_>>()
4787 })
4788 .unwrap_or_default();
4789 if changed_files.is_empty() {
4790 changed_files = parse_diff_changed_files(&normalized_diff);
4791 }
4792 if changed_files.is_empty() {
4793 changed_files.push(format!("docs/evomap_builtin_capsules/{}.md", id));
4794 }
4795
4796 let confidence = confidence
4797 .or_else(|| outcome.as_ref().and_then(|outcome| outcome.score))
4798 .unwrap_or(0.6)
4799 .clamp(0.0, 1.0);
4800 let status_success = outcome
4801 .as_ref()
4802 .and_then(|outcome| outcome.status.as_deref())
4803 .map(|status| status.eq_ignore_ascii_case("success"))
4804 .unwrap_or(true);
4805 let blast_radius = blast_radius.unwrap_or_default();
4806 let mutation_id = format!("builtin-evomap-mutation-{}", id);
4807 let intent = MutationIntent {
4808 id: mutation_id.clone(),
4809 intent: if summary.trim().is_empty() {
4810 format!("apply EvoMap capsule {}", id)
4811 } else {
4812 summary.trim().to_string()
4813 },
4814 target: MutationTarget::Paths {
4815 allow: changed_files.clone(),
4816 },
4817 expected_effect: format!("seed replay candidate from EvoMap capsule {}", id),
4818 risk: RiskLevel::Low,
4819 signals: if trigger.is_empty() {
4820 vec![format!("capsule:{}", id.to_ascii_lowercase())]
4821 } else {
4822 trigger
4823 .iter()
4824 .map(|signal| signal.trim().to_ascii_lowercase())
4825 .filter(|signal| !signal.is_empty())
4826 .collect::<Vec<_>>()
4827 },
4828 spec_id: None,
4829 };
4830 let mutation = PreparedMutation {
4831 intent,
4832 artifact: oris_evolution::MutationArtifact {
4833 encoding: ArtifactEncoding::UnifiedDiff,
4834 payload: normalized_diff.clone(),
4835 base_revision: None,
4836 content_hash: compute_artifact_hash(&normalized_diff),
4837 },
4838 };
4839 let capsule = Capsule {
4840 id: id.clone(),
4841 gene_id: gene.clone(),
4842 mutation_id,
4843 run_id: EVOMAP_BUILTIN_RUN_ID.to_string(),
4844 diff_hash: compute_artifact_hash(&normalized_diff),
4845 confidence,
4846 env: map_evomap_env_fingerprint(env_fingerprint.as_ref()),
4847 outcome: Outcome {
4848 success: status_success,
4849 validation_profile: "evomap-builtin-seed".into(),
4850 validation_duration_ms: 0,
4851 changed_files,
4852 validator_hash: "builtin-evomap".into(),
4853 lines_changed: blast_radius.lines,
4854 replay_verified: false,
4855 },
4856 state: map_evomap_state(compatibility.as_ref()),
4857 };
4858 capsules.push(BuiltinCapsuleSeed { capsule, mutation });
4859 }
4860
4861 Ok(Some(BuiltinAssetBundle { genes, capsules }))
4862}
4863
4864fn ensure_builtin_experience_assets_in_store(
4865 store: &dyn EvolutionStore,
4866 sender_id: String,
4867) -> Result<ImportOutcome, EvoKernelError> {
4868 let (events, projection) = scan_projection(store)?;
4869 let mut known_gene_ids = projection
4870 .genes
4871 .into_iter()
4872 .map(|gene| gene.id)
4873 .collect::<BTreeSet<_>>();
4874 let mut known_capsule_ids = projection
4875 .capsules
4876 .into_iter()
4877 .map(|capsule| capsule.id)
4878 .collect::<BTreeSet<_>>();
4879 let mut known_mutation_ids = BTreeSet::new();
4880 for stored in &events {
4881 if let EvolutionEvent::MutationDeclared { mutation } = &stored.event {
4882 known_mutation_ids.insert(mutation.intent.id.clone());
4883 }
4884 }
4885 let normalized_sender = normalized_sender_id(&sender_id);
4886 let mut imported_asset_ids = Vec::new();
4887 let mut bundle = BuiltinAssetBundle {
4890 genes: built_in_experience_genes(),
4891 capsules: Vec::new(),
4892 };
4893 if let Some(snapshot_bundle) = load_evomap_builtin_assets()? {
4894 bundle.genes.extend(snapshot_bundle.genes);
4895 bundle.capsules.extend(snapshot_bundle.capsules);
4896 }
4897 let scanned_count = bundle.genes.len() + bundle.capsules.len();
4898
4899 for gene in bundle.genes {
4900 if !known_gene_ids.insert(gene.id.clone()) {
4901 continue;
4902 }
4903
4904 store
4905 .append_event(EvolutionEvent::RemoteAssetImported {
4906 source: CandidateSource::Local,
4907 asset_ids: vec![gene.id.clone()],
4908 sender_id: normalized_sender.clone(),
4909 })
4910 .map_err(store_err)?;
4911 store
4912 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
4913 .map_err(store_err)?;
4914 match gene.state {
4915 AssetState::Revoked | AssetState::Archived => {}
4916 AssetState::Quarantined | AssetState::ShadowValidated => {
4917 store
4918 .append_event(EvolutionEvent::PromotionEvaluated {
4919 gene_id: gene.id.clone(),
4920 state: AssetState::Quarantined,
4921 reason:
4922 "built-in EvoMap asset requires additional validation before promotion"
4923 .into(),
4924 reason_code: TransitionReasonCode::DowngradeBuiltinRequiresValidation,
4925 evidence: None,
4926 })
4927 .map_err(store_err)?;
4928 }
4929 AssetState::Promoted | AssetState::Candidate => {
4930 store
4931 .append_event(EvolutionEvent::PromotionEvaluated {
4932 gene_id: gene.id.clone(),
4933 state: AssetState::Promoted,
4934 reason: "built-in experience asset promoted for cold-start compatibility"
4935 .into(),
4936 reason_code: TransitionReasonCode::PromotionBuiltinColdStartCompatibility,
4937 evidence: None,
4938 })
4939 .map_err(store_err)?;
4940 store
4941 .append_event(EvolutionEvent::GenePromoted {
4942 gene_id: gene.id.clone(),
4943 })
4944 .map_err(store_err)?;
4945 }
4946 }
4947 imported_asset_ids.push(gene.id.clone());
4948 }
4949
4950 for seed in bundle.capsules {
4951 if !known_gene_ids.contains(seed.capsule.gene_id.as_str()) {
4952 return Err(EvoKernelError::Validation(format!(
4953 "built-in capsule {} references unknown gene {}",
4954 seed.capsule.id, seed.capsule.gene_id
4955 )));
4956 }
4957 if known_mutation_ids.insert(seed.mutation.intent.id.clone()) {
4958 store
4959 .append_event(EvolutionEvent::MutationDeclared {
4960 mutation: seed.mutation.clone(),
4961 })
4962 .map_err(store_err)?;
4963 }
4964 if !known_capsule_ids.insert(seed.capsule.id.clone()) {
4965 continue;
4966 }
4967 store
4968 .append_event(EvolutionEvent::RemoteAssetImported {
4969 source: CandidateSource::Local,
4970 asset_ids: vec![seed.capsule.id.clone()],
4971 sender_id: normalized_sender.clone(),
4972 })
4973 .map_err(store_err)?;
4974 store
4975 .append_event(EvolutionEvent::CapsuleCommitted {
4976 capsule: seed.capsule.clone(),
4977 })
4978 .map_err(store_err)?;
4979 match seed.capsule.state {
4980 AssetState::Revoked | AssetState::Archived => {}
4981 AssetState::Quarantined | AssetState::ShadowValidated => {
4982 store
4983 .append_event(EvolutionEvent::CapsuleQuarantined {
4984 capsule_id: seed.capsule.id.clone(),
4985 })
4986 .map_err(store_err)?;
4987 }
4988 AssetState::Promoted | AssetState::Candidate => {
4989 store
4990 .append_event(EvolutionEvent::CapsuleReleased {
4991 capsule_id: seed.capsule.id.clone(),
4992 state: AssetState::Promoted,
4993 })
4994 .map_err(store_err)?;
4995 }
4996 }
4997 imported_asset_ids.push(seed.capsule.id.clone());
4998 }
4999
5000 let next_cursor = latest_store_cursor(store)?;
5001 let resume_token = next_cursor.as_ref().and_then(|cursor| {
5002 normalized_sender
5003 .as_deref()
5004 .map(|sender| encode_resume_token(sender, cursor))
5005 });
5006 let applied_count = imported_asset_ids.len();
5007 let skipped_count = scanned_count.saturating_sub(applied_count);
5008
5009 Ok(ImportOutcome {
5010 imported_asset_ids,
5011 accepted: true,
5012 next_cursor: next_cursor.clone(),
5013 resume_token,
5014 sync_audit: SyncAudit {
5015 batch_id: next_id("sync-import"),
5016 requested_cursor: None,
5017 scanned_count,
5018 applied_count,
5019 skipped_count,
5020 failed_count: 0,
5021 failure_reasons: Vec::new(),
5022 },
5023 })
5024}
5025
5026fn strategy_metadata_value(strategy: &[String], key: &str) -> Option<String> {
5027 strategy.iter().find_map(|entry| {
5028 let (entry_key, entry_value) = entry.split_once('=')?;
5029 if entry_key.trim().eq_ignore_ascii_case(key) {
5030 let normalized = entry_value.trim();
5031 if normalized.is_empty() {
5032 None
5033 } else {
5034 Some(normalized.to_string())
5035 }
5036 } else {
5037 None
5038 }
5039 })
5040}
5041
5042fn ensure_strategy_metadata(strategy: &mut Vec<String>, key: &str, value: &str) {
5043 let normalized = value.trim();
5044 if normalized.is_empty() || strategy_metadata_value(strategy, key).is_some() {
5045 return;
5046 }
5047 strategy.push(format!("{key}={normalized}"));
5048}
5049
5050fn enforce_reported_experience_retention(
5051 store: &dyn EvolutionStore,
5052 task_class: &str,
5053 keep_latest: usize,
5054) -> Result<(), EvoKernelError> {
5055 let task_class = task_class.trim();
5056 if task_class.is_empty() || keep_latest == 0 {
5057 return Ok(());
5058 }
5059
5060 let (_, projection) = scan_projection(store)?;
5061 let mut candidates = projection
5062 .genes
5063 .iter()
5064 .filter(|gene| gene.state == AssetState::Promoted)
5065 .filter_map(|gene| {
5066 let origin = strategy_metadata_value(&gene.strategy, "asset_origin")?;
5067 if !origin.eq_ignore_ascii_case("reported_experience") {
5068 return None;
5069 }
5070 let gene_task_class = strategy_metadata_value(&gene.strategy, "task_class")?;
5071 if !gene_task_class.eq_ignore_ascii_case(task_class) {
5072 return None;
5073 }
5074 let updated_at = projection
5075 .last_updated_at
5076 .get(&gene.id)
5077 .cloned()
5078 .unwrap_or_default();
5079 Some((gene.id.clone(), updated_at))
5080 })
5081 .collect::<Vec<_>>();
5082 if candidates.len() <= keep_latest {
5083 return Ok(());
5084 }
5085
5086 candidates.sort_by(|left, right| right.1.cmp(&left.1).then_with(|| right.0.cmp(&left.0)));
5087 let stale_gene_ids = candidates
5088 .into_iter()
5089 .skip(keep_latest)
5090 .map(|(gene_id, _)| gene_id)
5091 .collect::<BTreeSet<_>>();
5092 if stale_gene_ids.is_empty() {
5093 return Ok(());
5094 }
5095
5096 let reason =
5097 format!("reported experience retention limit exceeded for task_class={task_class}");
5098 for gene_id in &stale_gene_ids {
5099 store
5100 .append_event(EvolutionEvent::GeneRevoked {
5101 gene_id: gene_id.clone(),
5102 reason: reason.clone(),
5103 })
5104 .map_err(store_err)?;
5105 }
5106
5107 let stale_capsule_ids = projection
5108 .capsules
5109 .iter()
5110 .filter(|capsule| stale_gene_ids.contains(&capsule.gene_id))
5111 .map(|capsule| capsule.id.clone())
5112 .collect::<BTreeSet<_>>();
5113 for capsule_id in stale_capsule_ids {
5114 store
5115 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
5116 .map_err(store_err)?;
5117 }
5118 Ok(())
5119}
5120
5121fn record_reported_experience_in_store(
5122 store: &dyn EvolutionStore,
5123 sender_id: String,
5124 gene_id: String,
5125 signals: Vec<String>,
5126 strategy: Vec<String>,
5127 validation: Vec<String>,
5128) -> Result<ImportOutcome, EvoKernelError> {
5129 let gene_id = gene_id.trim();
5130 if gene_id.is_empty() {
5131 return Err(EvoKernelError::Validation(
5132 "reported experience gene_id must not be empty".into(),
5133 ));
5134 }
5135
5136 let mut unique_signals = BTreeSet::new();
5137 let mut normalized_signals = Vec::new();
5138 for signal in signals {
5139 let normalized = signal.trim().to_ascii_lowercase();
5140 if normalized.is_empty() {
5141 continue;
5142 }
5143 if unique_signals.insert(normalized.clone()) {
5144 normalized_signals.push(normalized);
5145 }
5146 }
5147 if normalized_signals.is_empty() {
5148 return Err(EvoKernelError::Validation(
5149 "reported experience signals must not be empty".into(),
5150 ));
5151 }
5152
5153 let mut unique_strategy = BTreeSet::new();
5154 let mut normalized_strategy = Vec::new();
5155 for entry in strategy {
5156 let normalized = entry.trim().to_string();
5157 if normalized.is_empty() {
5158 continue;
5159 }
5160 if unique_strategy.insert(normalized.clone()) {
5161 normalized_strategy.push(normalized);
5162 }
5163 }
5164 if normalized_strategy.is_empty() {
5165 normalized_strategy.push("reported local replay experience".into());
5166 }
5167 let task_class_id = strategy_metadata_value(&normalized_strategy, "task_class")
5168 .or_else(|| normalized_signals.first().cloned())
5169 .unwrap_or_else(|| "reported-experience".into());
5170 let task_label = strategy_metadata_value(&normalized_strategy, "task_label")
5171 .or_else(|| normalized_signals.first().cloned())
5172 .unwrap_or_else(|| task_class_id.clone());
5173 ensure_strategy_metadata(
5174 &mut normalized_strategy,
5175 "asset_origin",
5176 "reported_experience",
5177 );
5178 ensure_strategy_metadata(&mut normalized_strategy, "task_class", &task_class_id);
5179 ensure_strategy_metadata(&mut normalized_strategy, "task_label", &task_label);
5180
5181 let mut unique_validation = BTreeSet::new();
5182 let mut normalized_validation = Vec::new();
5183 for entry in validation {
5184 let normalized = entry.trim().to_string();
5185 if normalized.is_empty() {
5186 continue;
5187 }
5188 if unique_validation.insert(normalized.clone()) {
5189 normalized_validation.push(normalized);
5190 }
5191 }
5192 if normalized_validation.is_empty() {
5193 normalized_validation.push("a2a.tasks.report".into());
5194 }
5195
5196 let gene = Gene {
5197 id: gene_id.to_string(),
5198 signals: normalized_signals,
5199 strategy: normalized_strategy,
5200 validation: normalized_validation,
5201 state: AssetState::Promoted,
5202 };
5203 let normalized_sender = normalized_sender_id(&sender_id);
5204
5205 store
5206 .append_event(EvolutionEvent::RemoteAssetImported {
5207 source: CandidateSource::Local,
5208 asset_ids: vec![gene.id.clone()],
5209 sender_id: normalized_sender.clone(),
5210 })
5211 .map_err(store_err)?;
5212 store
5213 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
5214 .map_err(store_err)?;
5215 store
5216 .append_event(EvolutionEvent::PromotionEvaluated {
5217 gene_id: gene.id.clone(),
5218 state: AssetState::Promoted,
5219 reason: "trusted local report promoted reusable experience".into(),
5220 reason_code: TransitionReasonCode::PromotionTrustedLocalReport,
5221 evidence: None,
5222 })
5223 .map_err(store_err)?;
5224 store
5225 .append_event(EvolutionEvent::GenePromoted {
5226 gene_id: gene.id.clone(),
5227 })
5228 .map_err(store_err)?;
5229 enforce_reported_experience_retention(
5230 store,
5231 &task_class_id,
5232 REPORTED_EXPERIENCE_RETENTION_LIMIT,
5233 )?;
5234
5235 let imported_asset_ids = vec![gene.id];
5236 let next_cursor = latest_store_cursor(store)?;
5237 let resume_token = next_cursor.as_ref().and_then(|cursor| {
5238 normalized_sender
5239 .as_deref()
5240 .map(|sender| encode_resume_token(sender, cursor))
5241 });
5242 Ok(ImportOutcome {
5243 imported_asset_ids,
5244 accepted: true,
5245 next_cursor,
5246 resume_token,
5247 sync_audit: SyncAudit {
5248 batch_id: next_id("sync-import"),
5249 requested_cursor: None,
5250 scanned_count: 1,
5251 applied_count: 1,
5252 skipped_count: 0,
5253 failed_count: 0,
5254 failure_reasons: Vec::new(),
5255 },
5256 })
5257}
5258
5259fn normalized_sender_id(sender_id: &str) -> Option<String> {
5260 let trimmed = sender_id.trim();
5261 if trimmed.is_empty() {
5262 None
5263 } else {
5264 Some(trimmed.to_string())
5265 }
5266}
5267
5268fn normalized_asset_ids(asset_ids: &[String]) -> BTreeSet<String> {
5269 asset_ids
5270 .iter()
5271 .map(|asset_id| asset_id.trim().to_string())
5272 .filter(|asset_id| !asset_id.is_empty())
5273 .collect()
5274}
5275
5276fn validate_remote_revoke_notice_assets(
5277 store: &dyn EvolutionStore,
5278 notice: &RevokeNotice,
5279) -> Result<(String, BTreeSet<String>), EvoKernelError> {
5280 let sender_id = normalized_sender_id(¬ice.sender_id).ok_or_else(|| {
5281 EvoKernelError::Validation("revoke notice sender_id must not be empty".into())
5282 })?;
5283 let requested = normalized_asset_ids(¬ice.asset_ids);
5284 if requested.is_empty() {
5285 return Ok((sender_id, requested));
5286 }
5287
5288 let remote_publishers = remote_publishers_by_asset_from_store(store);
5289 let has_remote_assets = requested
5290 .iter()
5291 .any(|asset_id| remote_publishers.contains_key(asset_id));
5292 if !has_remote_assets {
5293 return Ok((sender_id, requested));
5294 }
5295
5296 let unauthorized = requested
5297 .iter()
5298 .filter(|asset_id| {
5299 remote_publishers.get(*asset_id).map(String::as_str) != Some(sender_id.as_str())
5300 })
5301 .cloned()
5302 .collect::<Vec<_>>();
5303 if !unauthorized.is_empty() {
5304 return Err(EvoKernelError::Validation(format!(
5305 "remote revoke notice contains assets not owned by sender {sender_id}: {}",
5306 unauthorized.join(", ")
5307 )));
5308 }
5309
5310 Ok((sender_id, requested))
5311}
5312
5313fn replay_failure_revocation_summary(
5314 replay_failures: u64,
5315 current_confidence: f32,
5316 historical_peak_confidence: f32,
5317 source_sender_id: Option<&str>,
5318) -> String {
5319 let source_sender_id = source_sender_id.unwrap_or("unavailable");
5320 format!(
5321 "phase=replay_failure_revocation; source_sender_id={source_sender_id}; replay_failures={replay_failures}; current_confidence={current_confidence:.3}; historical_peak_confidence={historical_peak_confidence:.3}"
5322 )
5323}
5324
5325fn record_manifest_validation(
5326 store: &dyn EvolutionStore,
5327 envelope: &EvolutionEnvelope,
5328 accepted: bool,
5329 reason: impl Into<String>,
5330) -> Result<(), EvoKernelError> {
5331 let manifest = envelope.manifest.as_ref();
5332 let sender_id = manifest
5333 .and_then(|value| normalized_sender_id(&value.sender_id))
5334 .or_else(|| normalized_sender_id(&envelope.sender_id));
5335 let publisher = manifest.and_then(|value| normalized_sender_id(&value.publisher));
5336 let asset_ids = manifest
5337 .map(|value| value.asset_ids.clone())
5338 .unwrap_or_else(|| EvolutionEnvelope::manifest_asset_ids(&envelope.assets));
5339
5340 store
5341 .append_event(EvolutionEvent::ManifestValidated {
5342 accepted,
5343 reason: reason.into(),
5344 sender_id,
5345 publisher,
5346 asset_ids,
5347 })
5348 .map_err(store_err)?;
5349 Ok(())
5350}
5351
5352fn record_remote_publisher_for_asset(
5353 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
5354 sender_id: &str,
5355 asset: &NetworkAsset,
5356) {
5357 let Some(remote_publishers) = remote_publishers else {
5358 return;
5359 };
5360 let sender_id = sender_id.trim();
5361 if sender_id.is_empty() {
5362 return;
5363 }
5364 let Ok(mut publishers) = remote_publishers.lock() else {
5365 return;
5366 };
5367 match asset {
5368 NetworkAsset::Gene { gene } => {
5369 publishers.insert(gene.id.clone(), sender_id.to_string());
5370 }
5371 NetworkAsset::Capsule { capsule } => {
5372 publishers.insert(capsule.id.clone(), sender_id.to_string());
5373 }
5374 NetworkAsset::EvolutionEvent { .. } => {}
5375 }
5376}
5377
5378fn remote_publishers_by_asset_from_store(store: &dyn EvolutionStore) -> BTreeMap<String, String> {
5379 let Ok(events) = store.scan(1) else {
5380 return BTreeMap::new();
5381 };
5382 remote_publishers_by_asset_from_events(&events)
5383}
5384
5385fn remote_publishers_by_asset_from_events(
5386 events: &[StoredEvolutionEvent],
5387) -> BTreeMap<String, String> {
5388 let mut imported_asset_publishers = BTreeMap::<String, String>::new();
5389 let mut known_gene_ids = BTreeSet::<String>::new();
5390 let mut known_capsule_ids = BTreeSet::<String>::new();
5391 let mut publishers_by_asset = BTreeMap::<String, String>::new();
5392
5393 for stored in events {
5394 match &stored.event {
5395 EvolutionEvent::RemoteAssetImported {
5396 source: CandidateSource::Remote,
5397 asset_ids,
5398 sender_id,
5399 } => {
5400 let Some(sender_id) = sender_id.as_deref().and_then(normalized_sender_id) else {
5401 continue;
5402 };
5403 for asset_id in asset_ids {
5404 imported_asset_publishers.insert(asset_id.clone(), sender_id.clone());
5405 if known_gene_ids.contains(asset_id) || known_capsule_ids.contains(asset_id) {
5406 publishers_by_asset.insert(asset_id.clone(), sender_id.clone());
5407 }
5408 }
5409 }
5410 EvolutionEvent::GeneProjected { gene } => {
5411 known_gene_ids.insert(gene.id.clone());
5412 if let Some(sender_id) = imported_asset_publishers.get(&gene.id) {
5413 publishers_by_asset.insert(gene.id.clone(), sender_id.clone());
5414 }
5415 }
5416 EvolutionEvent::CapsuleCommitted { capsule } => {
5417 known_capsule_ids.insert(capsule.id.clone());
5418 if let Some(sender_id) = imported_asset_publishers.get(&capsule.id) {
5419 publishers_by_asset.insert(capsule.id.clone(), sender_id.clone());
5420 }
5421 }
5422 _ => {}
5423 }
5424 }
5425
5426 publishers_by_asset
5427}
5428
5429fn should_import_remote_event(event: &EvolutionEvent) -> bool {
5430 matches!(
5431 event,
5432 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
5433 )
5434}
5435
5436fn fetch_assets_from_store(
5437 store: &dyn EvolutionStore,
5438 responder_id: impl Into<String>,
5439 query: &FetchQuery,
5440) -> Result<FetchResponse, EvoKernelError> {
5441 let (events, projection) = scan_projection(store)?;
5442 let requested_cursor = resolve_requested_cursor(
5443 &query.sender_id,
5444 query.since_cursor.as_deref(),
5445 query.resume_token.as_deref(),
5446 )?;
5447 let since_seq = requested_cursor
5448 .as_deref()
5449 .and_then(parse_sync_cursor_seq)
5450 .unwrap_or(0);
5451 let normalized_signals: Vec<String> = query
5452 .signals
5453 .iter()
5454 .map(|signal| signal.trim().to_ascii_lowercase())
5455 .filter(|signal| !signal.is_empty())
5456 .collect();
5457 let matches_any_signal = |candidate: &str| {
5458 if normalized_signals.is_empty() {
5459 return true;
5460 }
5461 let candidate = candidate.to_ascii_lowercase();
5462 normalized_signals
5463 .iter()
5464 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
5465 };
5466
5467 let matched_genes: Vec<Gene> = projection
5468 .genes
5469 .into_iter()
5470 .filter(|gene| gene.state == AssetState::Promoted)
5471 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
5472 .collect();
5473 let matched_gene_ids: BTreeSet<String> =
5474 matched_genes.iter().map(|gene| gene.id.clone()).collect();
5475 let matched_capsules: Vec<Capsule> = projection
5476 .capsules
5477 .into_iter()
5478 .filter(|capsule| capsule.state == AssetState::Promoted)
5479 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
5480 .collect();
5481 let all_assets = replay_export_assets(&events, matched_genes.clone(), matched_capsules.clone());
5482 let (selected_genes, selected_capsules) = if requested_cursor.is_some() {
5483 let delta = delta_window(&events, since_seq);
5484 let selected_capsules = matched_capsules
5485 .into_iter()
5486 .filter(|capsule| {
5487 delta.changed_capsule_ids.contains(&capsule.id)
5488 || delta.changed_mutation_ids.contains(&capsule.mutation_id)
5489 })
5490 .collect::<Vec<_>>();
5491 let selected_gene_ids = selected_capsules
5492 .iter()
5493 .map(|capsule| capsule.gene_id.clone())
5494 .collect::<BTreeSet<_>>();
5495 let selected_genes = matched_genes
5496 .into_iter()
5497 .filter(|gene| {
5498 delta.changed_gene_ids.contains(&gene.id) || selected_gene_ids.contains(&gene.id)
5499 })
5500 .collect::<Vec<_>>();
5501 (selected_genes, selected_capsules)
5502 } else {
5503 (matched_genes, matched_capsules)
5504 };
5505 let assets = replay_export_assets(&events, selected_genes, selected_capsules);
5506 let next_cursor = events.last().map(|stored| format_sync_cursor(stored.seq));
5507 let resume_token = next_cursor
5508 .as_ref()
5509 .map(|cursor| encode_resume_token(&query.sender_id, cursor));
5510 let applied_count = assets.len();
5511 let skipped_count = all_assets.len().saturating_sub(applied_count);
5512
5513 Ok(FetchResponse {
5514 sender_id: responder_id.into(),
5515 assets,
5516 next_cursor: next_cursor.clone(),
5517 resume_token,
5518 sync_audit: SyncAudit {
5519 batch_id: next_id("sync-fetch"),
5520 requested_cursor,
5521 scanned_count: all_assets.len(),
5522 applied_count,
5523 skipped_count,
5524 failed_count: 0,
5525 failure_reasons: Vec::new(),
5526 },
5527 })
5528}
5529
5530fn revoke_assets_in_store(
5531 store: &dyn EvolutionStore,
5532 notice: &RevokeNotice,
5533) -> Result<RevokeNotice, EvoKernelError> {
5534 let projection = projection_snapshot(store)?;
5535 let (sender_id, requested) = validate_remote_revoke_notice_assets(store, notice)?;
5536 let mut revoked_gene_ids = BTreeSet::new();
5537 let mut quarantined_capsule_ids = BTreeSet::new();
5538
5539 for gene in &projection.genes {
5540 if requested.contains(&gene.id) {
5541 revoked_gene_ids.insert(gene.id.clone());
5542 }
5543 }
5544 for capsule in &projection.capsules {
5545 if requested.contains(&capsule.id) {
5546 quarantined_capsule_ids.insert(capsule.id.clone());
5547 revoked_gene_ids.insert(capsule.gene_id.clone());
5548 }
5549 }
5550 for capsule in &projection.capsules {
5551 if revoked_gene_ids.contains(&capsule.gene_id) {
5552 quarantined_capsule_ids.insert(capsule.id.clone());
5553 }
5554 }
5555
5556 for gene_id in &revoked_gene_ids {
5557 store
5558 .append_event(EvolutionEvent::GeneRevoked {
5559 gene_id: gene_id.clone(),
5560 reason: notice.reason.clone(),
5561 })
5562 .map_err(store_err)?;
5563 }
5564 for capsule_id in &quarantined_capsule_ids {
5565 store
5566 .append_event(EvolutionEvent::CapsuleQuarantined {
5567 capsule_id: capsule_id.clone(),
5568 })
5569 .map_err(store_err)?;
5570 }
5571
5572 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
5573 affected_ids.extend(quarantined_capsule_ids);
5574 affected_ids.sort();
5575 affected_ids.dedup();
5576
5577 Ok(RevokeNotice {
5578 sender_id,
5579 asset_ids: affected_ids,
5580 reason: notice.reason.clone(),
5581 })
5582}
5583
5584fn evolution_metrics_snapshot(
5585 store: &dyn EvolutionStore,
5586) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
5587 let (events, projection) = scan_projection(store)?;
5588 let replay = collect_replay_roi_aggregate(&events, &projection, None);
5589 let replay_reasoning_avoided_total = replay.replay_success_total;
5590 let confidence_revalidations_total = events
5591 .iter()
5592 .filter(|stored| is_confidence_revalidation_event(&stored.event))
5593 .count() as u64;
5594 let mutation_declared_total = events
5595 .iter()
5596 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
5597 .count() as u64;
5598 let promoted_mutations_total = events
5599 .iter()
5600 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
5601 .count() as u64;
5602 let gene_revocations_total = events
5603 .iter()
5604 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
5605 .count() as u64;
5606 let cutoff = Utc::now() - Duration::hours(1);
5607 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
5608 matches!(event, EvolutionEvent::MutationDeclared { .. })
5609 });
5610 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
5611 matches!(event, EvolutionEvent::GeneRevoked { .. })
5612 });
5613 let promoted_genes = projection
5614 .genes
5615 .iter()
5616 .filter(|gene| gene.state == AssetState::Promoted)
5617 .count() as u64;
5618 let promoted_capsules = projection
5619 .capsules
5620 .iter()
5621 .filter(|capsule| capsule.state == AssetState::Promoted)
5622 .count() as u64;
5623
5624 Ok(EvolutionMetricsSnapshot {
5625 replay_attempts_total: replay.replay_attempts_total,
5626 replay_success_total: replay.replay_success_total,
5627 replay_success_rate: safe_ratio(replay.replay_success_total, replay.replay_attempts_total),
5628 confidence_revalidations_total,
5629 replay_reasoning_avoided_total,
5630 reasoning_avoided_tokens_total: replay.reasoning_avoided_tokens_total,
5631 replay_fallback_cost_total: replay.replay_fallback_cost_total,
5632 replay_roi: compute_replay_roi(
5633 replay.reasoning_avoided_tokens_total,
5634 replay.replay_fallback_cost_total,
5635 ),
5636 replay_task_classes: replay.replay_task_classes,
5637 replay_sources: replay.replay_sources,
5638 mutation_declared_total,
5639 promoted_mutations_total,
5640 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
5641 gene_revocations_total,
5642 mutation_velocity_last_hour,
5643 revoke_frequency_last_hour,
5644 promoted_genes,
5645 promoted_capsules,
5646 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
5647 })
5648}
5649
5650struct ReplayRoiAggregate {
5651 replay_attempts_total: u64,
5652 replay_success_total: u64,
5653 replay_failure_total: u64,
5654 reasoning_avoided_tokens_total: u64,
5655 replay_fallback_cost_total: u64,
5656 replay_task_classes: Vec<ReplayTaskClassMetrics>,
5657 replay_sources: Vec<ReplaySourceRoiMetrics>,
5658}
5659
5660fn collect_replay_roi_aggregate(
5661 events: &[StoredEvolutionEvent],
5662 projection: &EvolutionProjection,
5663 cutoff: Option<DateTime<Utc>>,
5664) -> ReplayRoiAggregate {
5665 let replay_evidences = events
5666 .iter()
5667 .filter(|stored| replay_event_in_scope(stored, cutoff))
5668 .filter_map(|stored| match &stored.event {
5669 EvolutionEvent::ReplayEconomicsRecorded { evidence, .. } => Some(evidence.clone()),
5670 _ => None,
5671 })
5672 .collect::<Vec<_>>();
5673
5674 let mut task_totals = BTreeMap::<(String, String), (u64, u64, u64, u64)>::new();
5675 let mut source_totals = BTreeMap::<String, (u64, u64, u64, u64)>::new();
5676
5677 let (
5678 replay_success_total,
5679 replay_failure_total,
5680 reasoning_avoided_tokens_total,
5681 replay_fallback_cost_total,
5682 ) = if replay_evidences.is_empty() {
5683 let gene_task_classes = projection
5684 .genes
5685 .iter()
5686 .map(|gene| (gene.id.clone(), replay_task_descriptor(&gene.signals)))
5687 .collect::<BTreeMap<_, _>>();
5688 let mut replay_success_total = 0_u64;
5689 let mut replay_failure_total = 0_u64;
5690
5691 for stored in events
5692 .iter()
5693 .filter(|stored| replay_event_in_scope(stored, cutoff))
5694 {
5695 match &stored.event {
5696 EvolutionEvent::CapsuleReused { gene_id, .. } => {
5697 replay_success_total += 1;
5698 if let Some((task_class_id, task_label)) = gene_task_classes.get(gene_id) {
5699 let entry = task_totals
5700 .entry((task_class_id.clone(), task_label.clone()))
5701 .or_insert((0, 0, 0, 0));
5702 entry.0 += 1;
5703 entry.2 += REPLAY_REASONING_TOKEN_FLOOR;
5704 }
5705 }
5706 event if is_replay_validation_failure(event) => {
5707 replay_failure_total += 1;
5708 }
5709 _ => {}
5710 }
5711 }
5712
5713 (
5714 replay_success_total,
5715 replay_failure_total,
5716 replay_success_total * REPLAY_REASONING_TOKEN_FLOOR,
5717 replay_failure_total * REPLAY_REASONING_TOKEN_FLOOR,
5718 )
5719 } else {
5720 let mut replay_success_total = 0_u64;
5721 let mut replay_failure_total = 0_u64;
5722 let mut reasoning_avoided_tokens_total = 0_u64;
5723 let mut replay_fallback_cost_total = 0_u64;
5724
5725 for evidence in &replay_evidences {
5726 if evidence.success {
5727 replay_success_total += 1;
5728 } else {
5729 replay_failure_total += 1;
5730 }
5731 reasoning_avoided_tokens_total += evidence.reasoning_avoided_tokens;
5732 replay_fallback_cost_total += evidence.replay_fallback_cost;
5733
5734 let entry = task_totals
5735 .entry((evidence.task_class_id.clone(), evidence.task_label.clone()))
5736 .or_insert((0, 0, 0, 0));
5737 if evidence.success {
5738 entry.0 += 1;
5739 } else {
5740 entry.1 += 1;
5741 }
5742 entry.2 += evidence.reasoning_avoided_tokens;
5743 entry.3 += evidence.replay_fallback_cost;
5744
5745 if let Some(source_sender_id) = evidence.source_sender_id.as_deref() {
5746 let source_entry = source_totals
5747 .entry(source_sender_id.to_string())
5748 .or_insert((0, 0, 0, 0));
5749 if evidence.success {
5750 source_entry.0 += 1;
5751 } else {
5752 source_entry.1 += 1;
5753 }
5754 source_entry.2 += evidence.reasoning_avoided_tokens;
5755 source_entry.3 += evidence.replay_fallback_cost;
5756 }
5757 }
5758
5759 (
5760 replay_success_total,
5761 replay_failure_total,
5762 reasoning_avoided_tokens_total,
5763 replay_fallback_cost_total,
5764 )
5765 };
5766
5767 let replay_task_classes = task_totals
5768 .into_iter()
5769 .map(
5770 |(
5771 (task_class_id, task_label),
5772 (
5773 replay_success_total,
5774 replay_failure_total,
5775 reasoning_avoided_tokens_total,
5776 replay_fallback_cost_total,
5777 ),
5778 )| ReplayTaskClassMetrics {
5779 task_class_id,
5780 task_label,
5781 replay_success_total,
5782 replay_failure_total,
5783 reasoning_steps_avoided_total: replay_success_total,
5784 reasoning_avoided_tokens_total,
5785 replay_fallback_cost_total,
5786 replay_roi: compute_replay_roi(
5787 reasoning_avoided_tokens_total,
5788 replay_fallback_cost_total,
5789 ),
5790 },
5791 )
5792 .collect::<Vec<_>>();
5793 let replay_sources = source_totals
5794 .into_iter()
5795 .map(
5796 |(
5797 source_sender_id,
5798 (
5799 replay_success_total,
5800 replay_failure_total,
5801 reasoning_avoided_tokens_total,
5802 replay_fallback_cost_total,
5803 ),
5804 )| ReplaySourceRoiMetrics {
5805 source_sender_id,
5806 replay_success_total,
5807 replay_failure_total,
5808 reasoning_avoided_tokens_total,
5809 replay_fallback_cost_total,
5810 replay_roi: compute_replay_roi(
5811 reasoning_avoided_tokens_total,
5812 replay_fallback_cost_total,
5813 ),
5814 },
5815 )
5816 .collect::<Vec<_>>();
5817
5818 ReplayRoiAggregate {
5819 replay_attempts_total: replay_success_total + replay_failure_total,
5820 replay_success_total,
5821 replay_failure_total,
5822 reasoning_avoided_tokens_total,
5823 replay_fallback_cost_total,
5824 replay_task_classes,
5825 replay_sources,
5826 }
5827}
5828
5829fn replay_event_in_scope(stored: &StoredEvolutionEvent, cutoff: Option<DateTime<Utc>>) -> bool {
5830 match cutoff {
5831 Some(cutoff) => parse_event_timestamp(&stored.timestamp)
5832 .map(|timestamp| timestamp >= cutoff)
5833 .unwrap_or(false),
5834 None => true,
5835 }
5836}
5837
5838fn replay_roi_release_gate_summary(
5839 store: &dyn EvolutionStore,
5840 window_seconds: u64,
5841) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
5842 let (events, projection) = scan_projection(store)?;
5843 let now = Utc::now();
5844 let cutoff = if window_seconds == 0 {
5845 None
5846 } else {
5847 let seconds = i64::try_from(window_seconds).unwrap_or(i64::MAX);
5848 Some(now - Duration::seconds(seconds))
5849 };
5850 let replay = collect_replay_roi_aggregate(&events, &projection, cutoff);
5851
5852 Ok(ReplayRoiWindowSummary {
5853 generated_at: now.to_rfc3339(),
5854 window_seconds,
5855 replay_attempts_total: replay.replay_attempts_total,
5856 replay_success_total: replay.replay_success_total,
5857 replay_failure_total: replay.replay_failure_total,
5858 reasoning_avoided_tokens_total: replay.reasoning_avoided_tokens_total,
5859 replay_fallback_cost_total: replay.replay_fallback_cost_total,
5860 replay_roi: compute_replay_roi(
5861 replay.reasoning_avoided_tokens_total,
5862 replay.replay_fallback_cost_total,
5863 ),
5864 replay_task_classes: replay.replay_task_classes,
5865 replay_sources: replay.replay_sources,
5866 })
5867}
5868
5869fn replay_roi_release_gate_contract(
5870 summary: &ReplayRoiWindowSummary,
5871 thresholds: ReplayRoiReleaseGateThresholds,
5872) -> ReplayRoiReleaseGateContract {
5873 let input = replay_roi_release_gate_input_contract(summary, thresholds);
5874 let output = evaluate_replay_roi_release_gate_contract_input(&input);
5875 ReplayRoiReleaseGateContract { input, output }
5876}
5877
5878fn replay_roi_release_gate_input_contract(
5879 summary: &ReplayRoiWindowSummary,
5880 thresholds: ReplayRoiReleaseGateThresholds,
5881) -> ReplayRoiReleaseGateInputContract {
5882 let replay_safety_signal = replay_roi_release_gate_safety_signal(summary);
5883 let replay_safety = replay_safety_signal.fail_closed_default
5884 && replay_safety_signal.rollback_ready
5885 && replay_safety_signal.audit_trail_complete
5886 && replay_safety_signal.has_replay_activity;
5887 ReplayRoiReleaseGateInputContract {
5888 generated_at: summary.generated_at.clone(),
5889 window_seconds: summary.window_seconds,
5890 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
5891 .iter()
5892 .map(|dimension| (*dimension).to_string())
5893 .collect(),
5894 replay_attempts_total: summary.replay_attempts_total,
5895 replay_success_total: summary.replay_success_total,
5896 replay_failure_total: summary.replay_failure_total,
5897 replay_hit_rate: safe_ratio(summary.replay_success_total, summary.replay_attempts_total),
5898 false_replay_rate: safe_ratio(summary.replay_failure_total, summary.replay_attempts_total),
5899 reasoning_avoided_tokens: summary.reasoning_avoided_tokens_total,
5900 replay_fallback_cost_total: summary.replay_fallback_cost_total,
5901 replay_roi: summary.replay_roi,
5902 replay_safety,
5903 replay_safety_signal,
5904 thresholds,
5905 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
5906 }
5907}
5908
5909fn replay_roi_release_gate_safety_signal(
5910 summary: &ReplayRoiWindowSummary,
5911) -> ReplayRoiReleaseGateSafetySignal {
5912 ReplayRoiReleaseGateSafetySignal {
5913 fail_closed_default: true,
5914 rollback_ready: summary.replay_failure_total == 0 || summary.replay_fallback_cost_total > 0,
5915 audit_trail_complete: summary.replay_attempts_total
5916 == summary.replay_success_total + summary.replay_failure_total,
5917 has_replay_activity: summary.replay_attempts_total > 0,
5918 }
5919}
5920
5921pub fn evaluate_replay_roi_release_gate_contract_input(
5922 input: &ReplayRoiReleaseGateInputContract,
5923) -> ReplayRoiReleaseGateOutputContract {
5924 let mut failed_checks = Vec::new();
5925 let mut evidence_refs = Vec::new();
5926 let mut indeterminate = false;
5927
5928 replay_release_gate_push_unique(&mut evidence_refs, "replay_roi_release_gate_summary");
5929 replay_release_gate_push_unique(
5930 &mut evidence_refs,
5931 format!("window_seconds:{}", input.window_seconds),
5932 );
5933 if input.generated_at.trim().is_empty() {
5934 replay_release_gate_record_failed_check(
5935 &mut failed_checks,
5936 &mut evidence_refs,
5937 "missing_generated_at",
5938 &["field:generated_at"],
5939 );
5940 indeterminate = true;
5941 } else {
5942 replay_release_gate_push_unique(
5943 &mut evidence_refs,
5944 format!("generated_at:{}", input.generated_at),
5945 );
5946 }
5947
5948 let expected_attempts_total = input.replay_success_total + input.replay_failure_total;
5949 if input.replay_attempts_total != expected_attempts_total {
5950 replay_release_gate_record_failed_check(
5951 &mut failed_checks,
5952 &mut evidence_refs,
5953 "invalid_attempt_accounting",
5954 &[
5955 "metric:replay_attempts_total",
5956 "metric:replay_success_total",
5957 "metric:replay_failure_total",
5958 ],
5959 );
5960 indeterminate = true;
5961 }
5962
5963 if input.replay_attempts_total == 0 {
5964 replay_release_gate_record_failed_check(
5965 &mut failed_checks,
5966 &mut evidence_refs,
5967 "missing_replay_attempts",
5968 &["metric:replay_attempts_total"],
5969 );
5970 indeterminate = true;
5971 }
5972
5973 if !replay_release_gate_rate_valid(input.replay_hit_rate) {
5974 replay_release_gate_record_failed_check(
5975 &mut failed_checks,
5976 &mut evidence_refs,
5977 "invalid_replay_hit_rate",
5978 &["metric:replay_hit_rate"],
5979 );
5980 indeterminate = true;
5981 }
5982 if !replay_release_gate_rate_valid(input.false_replay_rate) {
5983 replay_release_gate_record_failed_check(
5984 &mut failed_checks,
5985 &mut evidence_refs,
5986 "invalid_false_replay_rate",
5987 &["metric:false_replay_rate"],
5988 );
5989 indeterminate = true;
5990 }
5991
5992 if !input.replay_roi.is_finite() {
5993 replay_release_gate_record_failed_check(
5994 &mut failed_checks,
5995 &mut evidence_refs,
5996 "invalid_replay_roi",
5997 &["metric:replay_roi"],
5998 );
5999 indeterminate = true;
6000 }
6001
6002 let expected_hit_rate = safe_ratio(input.replay_success_total, input.replay_attempts_total);
6003 let expected_false_rate = safe_ratio(input.replay_failure_total, input.replay_attempts_total);
6004 if input.replay_attempts_total > 0
6005 && !replay_release_gate_float_eq(input.replay_hit_rate, expected_hit_rate)
6006 {
6007 replay_release_gate_record_failed_check(
6008 &mut failed_checks,
6009 &mut evidence_refs,
6010 "invalid_replay_hit_rate_consistency",
6011 &["metric:replay_hit_rate", "metric:replay_success_total"],
6012 );
6013 indeterminate = true;
6014 }
6015 if input.replay_attempts_total > 0
6016 && !replay_release_gate_float_eq(input.false_replay_rate, expected_false_rate)
6017 {
6018 replay_release_gate_record_failed_check(
6019 &mut failed_checks,
6020 &mut evidence_refs,
6021 "invalid_false_replay_rate_consistency",
6022 &["metric:false_replay_rate", "metric:replay_failure_total"],
6023 );
6024 indeterminate = true;
6025 }
6026
6027 if !(0.0..=1.0).contains(&input.thresholds.min_replay_hit_rate) {
6028 replay_release_gate_record_failed_check(
6029 &mut failed_checks,
6030 &mut evidence_refs,
6031 "invalid_threshold_min_replay_hit_rate",
6032 &["threshold:min_replay_hit_rate"],
6033 );
6034 indeterminate = true;
6035 }
6036 if !(0.0..=1.0).contains(&input.thresholds.max_false_replay_rate) {
6037 replay_release_gate_record_failed_check(
6038 &mut failed_checks,
6039 &mut evidence_refs,
6040 "invalid_threshold_max_false_replay_rate",
6041 &["threshold:max_false_replay_rate"],
6042 );
6043 indeterminate = true;
6044 }
6045 if !input.thresholds.min_replay_roi.is_finite() {
6046 replay_release_gate_record_failed_check(
6047 &mut failed_checks,
6048 &mut evidence_refs,
6049 "invalid_threshold_min_replay_roi",
6050 &["threshold:min_replay_roi"],
6051 );
6052 indeterminate = true;
6053 }
6054
6055 if input.replay_attempts_total < input.thresholds.min_replay_attempts {
6056 replay_release_gate_record_failed_check(
6057 &mut failed_checks,
6058 &mut evidence_refs,
6059 "min_replay_attempts_below_threshold",
6060 &[
6061 "threshold:min_replay_attempts",
6062 "metric:replay_attempts_total",
6063 ],
6064 );
6065 }
6066 if input.replay_attempts_total > 0
6067 && input.replay_hit_rate < input.thresholds.min_replay_hit_rate
6068 {
6069 replay_release_gate_record_failed_check(
6070 &mut failed_checks,
6071 &mut evidence_refs,
6072 "replay_hit_rate_below_threshold",
6073 &["threshold:min_replay_hit_rate", "metric:replay_hit_rate"],
6074 );
6075 }
6076 if input.replay_attempts_total > 0
6077 && input.false_replay_rate > input.thresholds.max_false_replay_rate
6078 {
6079 replay_release_gate_record_failed_check(
6080 &mut failed_checks,
6081 &mut evidence_refs,
6082 "false_replay_rate_above_threshold",
6083 &[
6084 "threshold:max_false_replay_rate",
6085 "metric:false_replay_rate",
6086 ],
6087 );
6088 }
6089 if input.reasoning_avoided_tokens < input.thresholds.min_reasoning_avoided_tokens {
6090 replay_release_gate_record_failed_check(
6091 &mut failed_checks,
6092 &mut evidence_refs,
6093 "reasoning_avoided_tokens_below_threshold",
6094 &[
6095 "threshold:min_reasoning_avoided_tokens",
6096 "metric:reasoning_avoided_tokens",
6097 ],
6098 );
6099 }
6100 if input.replay_roi < input.thresholds.min_replay_roi {
6101 replay_release_gate_record_failed_check(
6102 &mut failed_checks,
6103 &mut evidence_refs,
6104 "replay_roi_below_threshold",
6105 &["threshold:min_replay_roi", "metric:replay_roi"],
6106 );
6107 }
6108 if input.thresholds.require_replay_safety && !input.replay_safety {
6109 replay_release_gate_record_failed_check(
6110 &mut failed_checks,
6111 &mut evidence_refs,
6112 "replay_safety_required",
6113 &["metric:replay_safety", "threshold:require_replay_safety"],
6114 );
6115 }
6116
6117 failed_checks.sort();
6118 evidence_refs.sort();
6119
6120 let status = if failed_checks.is_empty() {
6121 ReplayRoiReleaseGateStatus::Pass
6122 } else if indeterminate {
6123 ReplayRoiReleaseGateStatus::Indeterminate
6124 } else {
6125 ReplayRoiReleaseGateStatus::FailClosed
6126 };
6127 let joined_checks = if failed_checks.is_empty() {
6128 "none".to_string()
6129 } else {
6130 failed_checks.join(",")
6131 };
6132 let summary = match status {
6133 ReplayRoiReleaseGateStatus::Pass => format!(
6134 "release gate pass: attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6135 input.replay_attempts_total,
6136 input.replay_hit_rate,
6137 input.false_replay_rate,
6138 input.reasoning_avoided_tokens,
6139 input.replay_roi,
6140 input.replay_safety
6141 ),
6142 ReplayRoiReleaseGateStatus::FailClosed => format!(
6143 "release gate fail_closed: failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6144 joined_checks,
6145 input.replay_attempts_total,
6146 input.replay_hit_rate,
6147 input.false_replay_rate,
6148 input.reasoning_avoided_tokens,
6149 input.replay_roi,
6150 input.replay_safety
6151 ),
6152 ReplayRoiReleaseGateStatus::Indeterminate => format!(
6153 "release gate indeterminate (fail-closed): failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6154 joined_checks,
6155 input.replay_attempts_total,
6156 input.replay_hit_rate,
6157 input.false_replay_rate,
6158 input.reasoning_avoided_tokens,
6159 input.replay_roi,
6160 input.replay_safety
6161 ),
6162 };
6163
6164 ReplayRoiReleaseGateOutputContract {
6165 status,
6166 failed_checks,
6167 evidence_refs,
6168 summary,
6169 }
6170}
6171
6172fn replay_release_gate_record_failed_check(
6173 failed_checks: &mut Vec<String>,
6174 evidence_refs: &mut Vec<String>,
6175 check: &str,
6176 refs: &[&str],
6177) {
6178 replay_release_gate_push_unique(failed_checks, check.to_string());
6179 for entry in refs {
6180 replay_release_gate_push_unique(evidence_refs, (*entry).to_string());
6181 }
6182}
6183
6184fn replay_release_gate_push_unique(values: &mut Vec<String>, entry: impl Into<String>) {
6185 let entry = entry.into();
6186 if !values.iter().any(|current| current == &entry) {
6187 values.push(entry);
6188 }
6189}
6190
6191fn replay_release_gate_rate_valid(value: f64) -> bool {
6192 value.is_finite() && (0.0..=1.0).contains(&value)
6193}
6194
6195fn replay_release_gate_float_eq(left: f64, right: f64) -> bool {
6196 (left - right).abs() <= 1e-9
6197}
6198
6199fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
6200 EvolutionHealthSnapshot {
6201 status: "ok".into(),
6202 last_event_seq: snapshot.last_event_seq,
6203 promoted_genes: snapshot.promoted_genes,
6204 promoted_capsules: snapshot.promoted_capsules,
6205 }
6206}
6207
6208fn render_evolution_metrics_prometheus(
6209 snapshot: &EvolutionMetricsSnapshot,
6210 health: &EvolutionHealthSnapshot,
6211) -> String {
6212 let mut out = String::new();
6213 out.push_str(
6214 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
6215 );
6216 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
6217 out.push_str(&format!(
6218 "oris_evolution_replay_attempts_total {}\n",
6219 snapshot.replay_attempts_total
6220 ));
6221 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
6222 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
6223 out.push_str(&format!(
6224 "oris_evolution_replay_success_total {}\n",
6225 snapshot.replay_success_total
6226 ));
6227 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_total Total planner steps avoided by successful replay.\n");
6228 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_total counter\n");
6229 out.push_str(&format!(
6230 "oris_evolution_replay_reasoning_avoided_total {}\n",
6231 snapshot.replay_reasoning_avoided_total
6232 ));
6233 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_total Estimated reasoning tokens avoided by replay hits.\n");
6234 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_total counter\n");
6235 out.push_str(&format!(
6236 "oris_evolution_reasoning_avoided_tokens_total {}\n",
6237 snapshot.reasoning_avoided_tokens_total
6238 ));
6239 out.push_str("# HELP oris_evolution_replay_fallback_cost_total Estimated reasoning token cost spent on replay fallbacks.\n");
6240 out.push_str("# TYPE oris_evolution_replay_fallback_cost_total counter\n");
6241 out.push_str(&format!(
6242 "oris_evolution_replay_fallback_cost_total {}\n",
6243 snapshot.replay_fallback_cost_total
6244 ));
6245 out.push_str("# HELP oris_evolution_replay_roi Net replay ROI in token space ((avoided - fallback_cost) / total).\n");
6246 out.push_str("# TYPE oris_evolution_replay_roi gauge\n");
6247 out.push_str(&format!(
6248 "oris_evolution_replay_roi {:.6}\n",
6249 snapshot.replay_roi
6250 ));
6251 out.push_str("# HELP oris_evolution_replay_utilization_by_task_class_total Successful replay reuse counts grouped by deterministic task class.\n");
6252 out.push_str("# TYPE oris_evolution_replay_utilization_by_task_class_total counter\n");
6253 for task_class in &snapshot.replay_task_classes {
6254 out.push_str(&format!(
6255 "oris_evolution_replay_utilization_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6256 prometheus_label_value(&task_class.task_class_id),
6257 prometheus_label_value(&task_class.task_label),
6258 task_class.replay_success_total
6259 ));
6260 }
6261 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_by_task_class_total Planner steps avoided by successful replay grouped by deterministic task class.\n");
6262 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_by_task_class_total counter\n");
6263 for task_class in &snapshot.replay_task_classes {
6264 out.push_str(&format!(
6265 "oris_evolution_replay_reasoning_avoided_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6266 prometheus_label_value(&task_class.task_class_id),
6267 prometheus_label_value(&task_class.task_label),
6268 task_class.reasoning_steps_avoided_total
6269 ));
6270 }
6271 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_task_class_total Estimated reasoning tokens avoided by replay hits grouped by deterministic task class.\n");
6272 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_task_class_total counter\n");
6273 for task_class in &snapshot.replay_task_classes {
6274 out.push_str(&format!(
6275 "oris_evolution_reasoning_avoided_tokens_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6276 prometheus_label_value(&task_class.task_class_id),
6277 prometheus_label_value(&task_class.task_label),
6278 task_class.reasoning_avoided_tokens_total
6279 ));
6280 }
6281 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_task_class_total Estimated fallback token cost grouped by deterministic task class.\n");
6282 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_task_class_total counter\n");
6283 for task_class in &snapshot.replay_task_classes {
6284 out.push_str(&format!(
6285 "oris_evolution_replay_fallback_cost_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6286 prometheus_label_value(&task_class.task_class_id),
6287 prometheus_label_value(&task_class.task_label),
6288 task_class.replay_fallback_cost_total
6289 ));
6290 }
6291 out.push_str("# HELP oris_evolution_replay_roi_by_task_class Replay ROI in token space grouped by deterministic task class.\n");
6292 out.push_str("# TYPE oris_evolution_replay_roi_by_task_class gauge\n");
6293 for task_class in &snapshot.replay_task_classes {
6294 out.push_str(&format!(
6295 "oris_evolution_replay_roi_by_task_class{{task_class_id=\"{}\",task_label=\"{}\"}} {:.6}\n",
6296 prometheus_label_value(&task_class.task_class_id),
6297 prometheus_label_value(&task_class.task_label),
6298 task_class.replay_roi
6299 ));
6300 }
6301 out.push_str("# HELP oris_evolution_replay_roi_by_source Replay ROI in token space grouped by remote sender id for cross-node reconciliation.\n");
6302 out.push_str("# TYPE oris_evolution_replay_roi_by_source gauge\n");
6303 for source in &snapshot.replay_sources {
6304 out.push_str(&format!(
6305 "oris_evolution_replay_roi_by_source{{source_sender_id=\"{}\"}} {:.6}\n",
6306 prometheus_label_value(&source.source_sender_id),
6307 source.replay_roi
6308 ));
6309 }
6310 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_source_total Estimated reasoning tokens avoided grouped by remote sender id.\n");
6311 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_source_total counter\n");
6312 for source in &snapshot.replay_sources {
6313 out.push_str(&format!(
6314 "oris_evolution_reasoning_avoided_tokens_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6315 prometheus_label_value(&source.source_sender_id),
6316 source.reasoning_avoided_tokens_total
6317 ));
6318 }
6319 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_source_total Estimated replay fallback token cost grouped by remote sender id.\n");
6320 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_source_total counter\n");
6321 for source in &snapshot.replay_sources {
6322 out.push_str(&format!(
6323 "oris_evolution_replay_fallback_cost_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6324 prometheus_label_value(&source.source_sender_id),
6325 source.replay_fallback_cost_total
6326 ));
6327 }
6328 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
6329 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
6330 out.push_str(&format!(
6331 "oris_evolution_replay_success_rate {:.6}\n",
6332 snapshot.replay_success_rate
6333 ));
6334 out.push_str("# HELP oris_evolution_confidence_revalidations_total Total confidence-driven demotions that require revalidation before replay.\n");
6335 out.push_str("# TYPE oris_evolution_confidence_revalidations_total counter\n");
6336 out.push_str(&format!(
6337 "oris_evolution_confidence_revalidations_total {}\n",
6338 snapshot.confidence_revalidations_total
6339 ));
6340 out.push_str(
6341 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
6342 );
6343 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
6344 out.push_str(&format!(
6345 "oris_evolution_mutation_declared_total {}\n",
6346 snapshot.mutation_declared_total
6347 ));
6348 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
6349 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
6350 out.push_str(&format!(
6351 "oris_evolution_promoted_mutations_total {}\n",
6352 snapshot.promoted_mutations_total
6353 ));
6354 out.push_str(
6355 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
6356 );
6357 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
6358 out.push_str(&format!(
6359 "oris_evolution_promotion_ratio {:.6}\n",
6360 snapshot.promotion_ratio
6361 ));
6362 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
6363 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
6364 out.push_str(&format!(
6365 "oris_evolution_gene_revocations_total {}\n",
6366 snapshot.gene_revocations_total
6367 ));
6368 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
6369 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
6370 out.push_str(&format!(
6371 "oris_evolution_mutation_velocity_last_hour {}\n",
6372 snapshot.mutation_velocity_last_hour
6373 ));
6374 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
6375 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
6376 out.push_str(&format!(
6377 "oris_evolution_revoke_frequency_last_hour {}\n",
6378 snapshot.revoke_frequency_last_hour
6379 ));
6380 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
6381 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
6382 out.push_str(&format!(
6383 "oris_evolution_promoted_genes {}\n",
6384 snapshot.promoted_genes
6385 ));
6386 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
6387 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
6388 out.push_str(&format!(
6389 "oris_evolution_promoted_capsules {}\n",
6390 snapshot.promoted_capsules
6391 ));
6392 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
6393 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
6394 out.push_str(&format!(
6395 "oris_evolution_store_last_event_seq {}\n",
6396 snapshot.last_event_seq
6397 ));
6398 out.push_str(
6399 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
6400 );
6401 out.push_str("# TYPE oris_evolution_health gauge\n");
6402 out.push_str(&format!(
6403 "oris_evolution_health {}\n",
6404 u8::from(health.status == "ok")
6405 ));
6406 out
6407}
6408
6409fn count_recent_events(
6410 events: &[StoredEvolutionEvent],
6411 cutoff: DateTime<Utc>,
6412 predicate: impl Fn(&EvolutionEvent) -> bool,
6413) -> u64 {
6414 events
6415 .iter()
6416 .filter(|stored| {
6417 predicate(&stored.event)
6418 && parse_event_timestamp(&stored.timestamp)
6419 .map(|timestamp| timestamp >= cutoff)
6420 .unwrap_or(false)
6421 })
6422 .count() as u64
6423}
6424
6425fn prometheus_label_value(input: &str) -> String {
6426 input
6427 .replace('\\', "\\\\")
6428 .replace('\n', "\\n")
6429 .replace('"', "\\\"")
6430}
6431
6432fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
6433 DateTime::parse_from_rfc3339(raw)
6434 .ok()
6435 .map(|parsed| parsed.with_timezone(&Utc))
6436}
6437
6438fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
6439 matches!(
6440 event,
6441 EvolutionEvent::ValidationFailed {
6442 gene_id: Some(_),
6443 ..
6444 }
6445 )
6446}
6447
6448fn is_confidence_revalidation_event(event: &EvolutionEvent) -> bool {
6449 matches!(
6450 event,
6451 EvolutionEvent::PromotionEvaluated {
6452 state,
6453 reason,
6454 reason_code,
6455 ..
6456 }
6457 if *state == AssetState::Quarantined
6458 && (reason_code == &TransitionReasonCode::RevalidationConfidenceDecay
6459 || (reason_code == &TransitionReasonCode::Unspecified
6460 && reason.contains("confidence decayed")))
6461 )
6462}
6463
6464fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
6465 if denominator == 0 {
6466 0.0
6467 } else {
6468 numerator as f64 / denominator as f64
6469 }
6470}
6471
6472fn store_err(err: EvolutionError) -> EvoKernelError {
6473 EvoKernelError::Store(err.to_string())
6474}
6475
6476#[cfg(test)]
6477mod tests {
6478 use super::*;
6479 use oris_agent_contract::{
6480 AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
6481 };
6482 use oris_kernel::{
6483 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
6484 NoopStepFn, StateUpdatedOnlyReducer,
6485 };
6486 use serde::{Deserialize, Serialize};
6487
6488 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
6489 struct TestState;
6490
6491 impl KernelState for TestState {
6492 fn version(&self) -> u32 {
6493 1
6494 }
6495 }
6496
6497 #[test]
6498 fn repair_quality_gate_accepts_semantic_variants() {
6499 let plan = r#"
6500根本原因:脚本中拼写错误导致 unknown command 'process'。
6501修复建议:将 `proccess` 更正为 `process`,并统一命令入口。
6502验证方式:执行 `cargo check -p oris-runtime` 与回归测试。
6503恢复方案:若新入口异常,立即回滚到旧命令映射。
6504"#;
6505 let report = evaluate_repair_quality_gate(plan);
6506 assert!(report.passes());
6507 assert!(report.failed_checks().is_empty());
6508 }
6509
6510 #[test]
6511 fn repair_quality_gate_rejects_missing_incident_anchor() {
6512 let plan = r#"
6513原因分析:逻辑分支覆盖不足。
6514修复方案:补充分支与日志。
6515验证命令:cargo check -p oris-runtime
6516回滚方案:git revert HEAD
6517"#;
6518 let report = evaluate_repair_quality_gate(plan);
6519 assert!(!report.passes());
6520 assert!(report
6521 .failed_checks()
6522 .iter()
6523 .any(|check| check.contains("unknown command")));
6524 }
6525
6526 fn temp_workspace(name: &str) -> std::path::PathBuf {
6527 let root =
6528 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
6529 if root.exists() {
6530 fs::remove_dir_all(&root).unwrap();
6531 }
6532 fs::create_dir_all(root.join("src")).unwrap();
6533 fs::write(
6534 root.join("Cargo.toml"),
6535 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
6536 )
6537 .unwrap();
6538 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
6539 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
6540 root
6541 }
6542
6543 fn test_kernel() -> Arc<Kernel<TestState>> {
6544 Arc::new(Kernel::<TestState> {
6545 events: Box::new(InMemoryEventStore::new()),
6546 snaps: None,
6547 reducer: Box::new(StateUpdatedOnlyReducer),
6548 exec: Box::new(NoopActionExecutor),
6549 step: Box::new(NoopStepFn),
6550 policy: Box::new(AllowAllPolicy),
6551 effect_sink: None,
6552 mode: KernelMode::Normal,
6553 })
6554 }
6555
6556 fn lightweight_plan() -> ValidationPlan {
6557 ValidationPlan {
6558 profile: "test".into(),
6559 stages: vec![ValidationStage::Command {
6560 program: "git".into(),
6561 args: vec!["--version".into()],
6562 timeout_ms: 5_000,
6563 }],
6564 }
6565 }
6566
6567 fn sample_mutation() -> PreparedMutation {
6568 prepare_mutation(
6569 MutationIntent {
6570 id: "mutation-1".into(),
6571 intent: "add README".into(),
6572 target: MutationTarget::Paths {
6573 allow: vec!["README.md".into()],
6574 },
6575 expected_effect: "repo still builds".into(),
6576 risk: RiskLevel::Low,
6577 signals: vec!["missing readme".into()],
6578 spec_id: None,
6579 },
6580 "\
6581diff --git a/README.md b/README.md
6582new file mode 100644
6583index 0000000..1111111
6584--- /dev/null
6585+++ b/README.md
6586@@ -0,0 +1 @@
6587+# sample
6588"
6589 .into(),
6590 Some("HEAD".into()),
6591 )
6592 }
6593
6594 fn base_sandbox_policy() -> SandboxPolicy {
6595 SandboxPolicy {
6596 allowed_programs: vec!["git".into()],
6597 max_duration_ms: 60_000,
6598 max_output_bytes: 1024 * 1024,
6599 denied_env_prefixes: Vec::new(),
6600 }
6601 }
6602
6603 fn command_validator() -> Arc<dyn Validator> {
6604 Arc::new(CommandValidator::new(base_sandbox_policy()))
6605 }
6606
6607 fn replay_input(signal: &str) -> SelectorInput {
6608 let rustc_version = std::process::Command::new("rustc")
6609 .arg("--version")
6610 .output()
6611 .ok()
6612 .filter(|output| output.status.success())
6613 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
6614 .unwrap_or_else(|| "rustc unknown".into());
6615 SelectorInput {
6616 signals: vec![signal.into()],
6617 env: EnvFingerprint {
6618 rustc_version,
6619 cargo_lock_hash: compute_artifact_hash("# lock\n"),
6620 target_triple: format!(
6621 "{}-unknown-{}",
6622 std::env::consts::ARCH,
6623 std::env::consts::OS
6624 ),
6625 os: std::env::consts::OS.into(),
6626 },
6627 spec_id: None,
6628 limit: 1,
6629 }
6630 }
6631
6632 fn build_test_evo_with_store(
6633 name: &str,
6634 run_id: &str,
6635 validator: Arc<dyn Validator>,
6636 store: Arc<dyn EvolutionStore>,
6637 ) -> EvoKernel<TestState> {
6638 let workspace = temp_workspace(name);
6639 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
6640 run_id,
6641 &workspace,
6642 std::env::temp_dir(),
6643 ));
6644 EvoKernel::new(test_kernel(), sandbox, validator, store)
6645 .with_governor(Arc::new(DefaultGovernor::new(
6646 oris_governor::GovernorConfig {
6647 promote_after_successes: 1,
6648 ..Default::default()
6649 },
6650 )))
6651 .with_validation_plan(lightweight_plan())
6652 .with_sandbox_policy(base_sandbox_policy())
6653 }
6654
6655 fn build_test_evo(
6656 name: &str,
6657 run_id: &str,
6658 validator: Arc<dyn Validator>,
6659 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
6660 let store_root = std::env::temp_dir().join(format!(
6661 "oris-evokernel-{name}-store-{}",
6662 std::process::id()
6663 ));
6664 if store_root.exists() {
6665 fs::remove_dir_all(&store_root).unwrap();
6666 }
6667 let store: Arc<dyn EvolutionStore> =
6668 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
6669 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
6670 (evo, store)
6671 }
6672
6673 fn remote_publish_envelope(
6674 sender_id: &str,
6675 run_id: &str,
6676 gene_id: &str,
6677 capsule_id: &str,
6678 mutation_id: &str,
6679 signal: &str,
6680 file_name: &str,
6681 line: &str,
6682 ) -> EvolutionEnvelope {
6683 remote_publish_envelope_with_env(
6684 sender_id,
6685 run_id,
6686 gene_id,
6687 capsule_id,
6688 mutation_id,
6689 signal,
6690 file_name,
6691 line,
6692 replay_input(signal).env,
6693 )
6694 }
6695
6696 fn remote_publish_envelope_with_env(
6697 sender_id: &str,
6698 run_id: &str,
6699 gene_id: &str,
6700 capsule_id: &str,
6701 mutation_id: &str,
6702 signal: &str,
6703 file_name: &str,
6704 line: &str,
6705 env: EnvFingerprint,
6706 ) -> EvolutionEnvelope {
6707 let mutation = prepare_mutation(
6708 MutationIntent {
6709 id: mutation_id.into(),
6710 intent: format!("add {file_name}"),
6711 target: MutationTarget::Paths {
6712 allow: vec![file_name.into()],
6713 },
6714 expected_effect: "replay should still validate".into(),
6715 risk: RiskLevel::Low,
6716 signals: vec![signal.into()],
6717 spec_id: None,
6718 },
6719 format!(
6720 "\
6721diff --git a/{file_name} b/{file_name}
6722new file mode 100644
6723index 0000000..1111111
6724--- /dev/null
6725+++ b/{file_name}
6726@@ -0,0 +1 @@
6727+{line}
6728"
6729 ),
6730 Some("HEAD".into()),
6731 );
6732 let gene = Gene {
6733 id: gene_id.into(),
6734 signals: vec![signal.into()],
6735 strategy: vec![file_name.into()],
6736 validation: vec!["test".into()],
6737 state: AssetState::Promoted,
6738 };
6739 let capsule = Capsule {
6740 id: capsule_id.into(),
6741 gene_id: gene_id.into(),
6742 mutation_id: mutation_id.into(),
6743 run_id: run_id.into(),
6744 diff_hash: mutation.artifact.content_hash.clone(),
6745 confidence: 0.9,
6746 env,
6747 outcome: Outcome {
6748 success: true,
6749 validation_profile: "test".into(),
6750 validation_duration_ms: 1,
6751 changed_files: vec![file_name.into()],
6752 validator_hash: "validator-hash".into(),
6753 lines_changed: 1,
6754 replay_verified: false,
6755 },
6756 state: AssetState::Promoted,
6757 };
6758 EvolutionEnvelope::publish(
6759 sender_id,
6760 vec![
6761 NetworkAsset::EvolutionEvent {
6762 event: EvolutionEvent::MutationDeclared { mutation },
6763 },
6764 NetworkAsset::Gene { gene: gene.clone() },
6765 NetworkAsset::Capsule {
6766 capsule: capsule.clone(),
6767 },
6768 NetworkAsset::EvolutionEvent {
6769 event: EvolutionEvent::CapsuleReleased {
6770 capsule_id: capsule.id.clone(),
6771 state: AssetState::Promoted,
6772 },
6773 },
6774 ],
6775 )
6776 }
6777
6778 fn remote_publish_envelope_with_signals(
6779 sender_id: &str,
6780 run_id: &str,
6781 gene_id: &str,
6782 capsule_id: &str,
6783 mutation_id: &str,
6784 mutation_signals: Vec<String>,
6785 gene_signals: Vec<String>,
6786 file_name: &str,
6787 line: &str,
6788 env: EnvFingerprint,
6789 ) -> EvolutionEnvelope {
6790 let mutation = prepare_mutation(
6791 MutationIntent {
6792 id: mutation_id.into(),
6793 intent: format!("add {file_name}"),
6794 target: MutationTarget::Paths {
6795 allow: vec![file_name.into()],
6796 },
6797 expected_effect: "replay should still validate".into(),
6798 risk: RiskLevel::Low,
6799 signals: mutation_signals,
6800 spec_id: None,
6801 },
6802 format!(
6803 "\
6804diff --git a/{file_name} b/{file_name}
6805new file mode 100644
6806index 0000000..1111111
6807--- /dev/null
6808+++ b/{file_name}
6809@@ -0,0 +1 @@
6810+{line}
6811"
6812 ),
6813 Some("HEAD".into()),
6814 );
6815 let gene = Gene {
6816 id: gene_id.into(),
6817 signals: gene_signals,
6818 strategy: vec![file_name.into()],
6819 validation: vec!["test".into()],
6820 state: AssetState::Promoted,
6821 };
6822 let capsule = Capsule {
6823 id: capsule_id.into(),
6824 gene_id: gene_id.into(),
6825 mutation_id: mutation_id.into(),
6826 run_id: run_id.into(),
6827 diff_hash: mutation.artifact.content_hash.clone(),
6828 confidence: 0.9,
6829 env,
6830 outcome: Outcome {
6831 success: true,
6832 validation_profile: "test".into(),
6833 validation_duration_ms: 1,
6834 changed_files: vec![file_name.into()],
6835 validator_hash: "validator-hash".into(),
6836 lines_changed: 1,
6837 replay_verified: false,
6838 },
6839 state: AssetState::Promoted,
6840 };
6841 EvolutionEnvelope::publish(
6842 sender_id,
6843 vec![
6844 NetworkAsset::EvolutionEvent {
6845 event: EvolutionEvent::MutationDeclared { mutation },
6846 },
6847 NetworkAsset::Gene { gene: gene.clone() },
6848 NetworkAsset::Capsule {
6849 capsule: capsule.clone(),
6850 },
6851 NetworkAsset::EvolutionEvent {
6852 event: EvolutionEvent::CapsuleReleased {
6853 capsule_id: capsule.id.clone(),
6854 state: AssetState::Promoted,
6855 },
6856 },
6857 ],
6858 )
6859 }
6860
6861 struct FixedValidator {
6862 success: bool,
6863 }
6864
6865 #[async_trait]
6866 impl Validator for FixedValidator {
6867 async fn run(
6868 &self,
6869 _receipt: &SandboxReceipt,
6870 plan: &ValidationPlan,
6871 ) -> Result<ValidationReport, ValidationError> {
6872 Ok(ValidationReport {
6873 success: self.success,
6874 duration_ms: 1,
6875 stages: Vec::new(),
6876 logs: if self.success {
6877 format!("{} ok", plan.profile)
6878 } else {
6879 format!("{} failed", plan.profile)
6880 },
6881 })
6882 }
6883 }
6884
6885 struct FailOnAppendStore {
6886 inner: JsonlEvolutionStore,
6887 fail_on_call: usize,
6888 call_count: Mutex<usize>,
6889 }
6890
6891 impl FailOnAppendStore {
6892 fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
6893 Self {
6894 inner: JsonlEvolutionStore::new(root_dir),
6895 fail_on_call,
6896 call_count: Mutex::new(0),
6897 }
6898 }
6899 }
6900
6901 impl EvolutionStore for FailOnAppendStore {
6902 fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
6903 let mut call_count = self
6904 .call_count
6905 .lock()
6906 .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
6907 *call_count += 1;
6908 if *call_count == self.fail_on_call {
6909 return Err(EvolutionError::Io("injected append failure".into()));
6910 }
6911 self.inner.append_event(event)
6912 }
6913
6914 fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
6915 self.inner.scan(from_seq)
6916 }
6917
6918 fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
6919 self.inner.rebuild_projection()
6920 }
6921 }
6922
6923 #[test]
6924 fn coordination_planner_to_coder_handoff_is_deterministic() {
6925 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6926 root_goal: "ship feature".into(),
6927 primitive: CoordinationPrimitive::Sequential,
6928 tasks: vec![
6929 CoordinationTask {
6930 id: "planner".into(),
6931 role: AgentRole::Planner,
6932 description: "split the work".into(),
6933 depends_on: Vec::new(),
6934 },
6935 CoordinationTask {
6936 id: "coder".into(),
6937 role: AgentRole::Coder,
6938 description: "implement the patch".into(),
6939 depends_on: vec!["planner".into()],
6940 },
6941 ],
6942 timeout_ms: 5_000,
6943 max_retries: 0,
6944 });
6945
6946 assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
6947 assert!(result.failed_tasks.is_empty());
6948 assert!(result.messages.iter().any(|message| {
6949 message.from_role == AgentRole::Planner
6950 && message.to_role == AgentRole::Coder
6951 && message.task_id == "coder"
6952 }));
6953 }
6954
6955 #[test]
6956 fn coordination_repair_runs_only_after_coder_failure() {
6957 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6958 root_goal: "fix broken implementation".into(),
6959 primitive: CoordinationPrimitive::Sequential,
6960 tasks: vec![
6961 CoordinationTask {
6962 id: "coder".into(),
6963 role: AgentRole::Coder,
6964 description: "force-fail initial implementation".into(),
6965 depends_on: Vec::new(),
6966 },
6967 CoordinationTask {
6968 id: "repair".into(),
6969 role: AgentRole::Repair,
6970 description: "patch the failed implementation".into(),
6971 depends_on: vec!["coder".into()],
6972 },
6973 ],
6974 timeout_ms: 5_000,
6975 max_retries: 0,
6976 });
6977
6978 assert_eq!(result.completed_tasks, vec!["repair"]);
6979 assert_eq!(result.failed_tasks, vec!["coder"]);
6980 assert!(result.messages.iter().any(|message| {
6981 message.from_role == AgentRole::Coder
6982 && message.to_role == AgentRole::Repair
6983 && message.task_id == "repair"
6984 }));
6985 }
6986
6987 #[test]
6988 fn coordination_optimizer_runs_after_successful_implementation_step() {
6989 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6990 root_goal: "ship optimized patch".into(),
6991 primitive: CoordinationPrimitive::Sequential,
6992 tasks: vec![
6993 CoordinationTask {
6994 id: "coder".into(),
6995 role: AgentRole::Coder,
6996 description: "implement a working patch".into(),
6997 depends_on: Vec::new(),
6998 },
6999 CoordinationTask {
7000 id: "optimizer".into(),
7001 role: AgentRole::Optimizer,
7002 description: "tighten the implementation".into(),
7003 depends_on: vec!["coder".into()],
7004 },
7005 ],
7006 timeout_ms: 5_000,
7007 max_retries: 0,
7008 });
7009
7010 assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
7011 assert!(result.failed_tasks.is_empty());
7012 }
7013
7014 #[test]
7015 fn coordination_parallel_waves_preserve_sorted_merge_order() {
7016 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7017 root_goal: "parallelize safe tasks".into(),
7018 primitive: CoordinationPrimitive::Parallel,
7019 tasks: vec![
7020 CoordinationTask {
7021 id: "z-task".into(),
7022 role: AgentRole::Planner,
7023 description: "analyze z".into(),
7024 depends_on: Vec::new(),
7025 },
7026 CoordinationTask {
7027 id: "a-task".into(),
7028 role: AgentRole::Coder,
7029 description: "implement a".into(),
7030 depends_on: Vec::new(),
7031 },
7032 CoordinationTask {
7033 id: "mid-task".into(),
7034 role: AgentRole::Optimizer,
7035 description: "polish after both".into(),
7036 depends_on: vec!["z-task".into(), "a-task".into()],
7037 },
7038 ],
7039 timeout_ms: 5_000,
7040 max_retries: 0,
7041 });
7042
7043 assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
7044 assert!(result.failed_tasks.is_empty());
7045 }
7046
7047 #[test]
7048 fn coordination_retries_stop_at_max_retries() {
7049 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7050 root_goal: "retry then stop".into(),
7051 primitive: CoordinationPrimitive::Sequential,
7052 tasks: vec![CoordinationTask {
7053 id: "coder".into(),
7054 role: AgentRole::Coder,
7055 description: "force-fail this task".into(),
7056 depends_on: Vec::new(),
7057 }],
7058 timeout_ms: 5_000,
7059 max_retries: 1,
7060 });
7061
7062 assert!(result.completed_tasks.is_empty());
7063 assert_eq!(result.failed_tasks, vec!["coder"]);
7064 assert_eq!(
7065 result
7066 .messages
7067 .iter()
7068 .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
7069 .count(),
7070 2
7071 );
7072 }
7073
7074 #[test]
7075 fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
7076 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7077 root_goal: "skip blocked follow-up work".into(),
7078 primitive: CoordinationPrimitive::Conditional,
7079 tasks: vec![
7080 CoordinationTask {
7081 id: "coder".into(),
7082 role: AgentRole::Coder,
7083 description: "force-fail the implementation".into(),
7084 depends_on: Vec::new(),
7085 },
7086 CoordinationTask {
7087 id: "optimizer".into(),
7088 role: AgentRole::Optimizer,
7089 description: "only optimize a successful implementation".into(),
7090 depends_on: vec!["coder".into()],
7091 },
7092 ],
7093 timeout_ms: 5_000,
7094 max_retries: 0,
7095 });
7096
7097 assert!(result.completed_tasks.is_empty());
7098 assert_eq!(result.failed_tasks, vec!["coder"]);
7099 assert!(result.messages.iter().any(|message| {
7100 message.task_id == "optimizer"
7101 && message
7102 .content
7103 .contains("skipped due to failed dependency chain")
7104 }));
7105 assert!(!result
7106 .failed_tasks
7107 .iter()
7108 .any(|task_id| task_id == "optimizer"));
7109 }
7110
7111 #[tokio::test]
7112 async fn command_validator_aggregates_stage_reports() {
7113 let workspace = temp_workspace("validator");
7114 let receipt = SandboxReceipt {
7115 mutation_id: "m".into(),
7116 workdir: workspace,
7117 applied: true,
7118 changed_files: Vec::new(),
7119 patch_hash: "hash".into(),
7120 stdout_log: std::env::temp_dir().join("stdout.log"),
7121 stderr_log: std::env::temp_dir().join("stderr.log"),
7122 };
7123 let validator = CommandValidator::new(SandboxPolicy {
7124 allowed_programs: vec!["git".into()],
7125 max_duration_ms: 1_000,
7126 max_output_bytes: 1024,
7127 denied_env_prefixes: Vec::new(),
7128 });
7129 let report = validator
7130 .run(
7131 &receipt,
7132 &ValidationPlan {
7133 profile: "test".into(),
7134 stages: vec![ValidationStage::Command {
7135 program: "git".into(),
7136 args: vec!["--version".into()],
7137 timeout_ms: 1_000,
7138 }],
7139 },
7140 )
7141 .await
7142 .unwrap();
7143 assert_eq!(report.stages.len(), 1);
7144 }
7145
7146 #[tokio::test]
7147 async fn capture_successful_mutation_appends_capsule() {
7148 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
7149 let capsule = evo
7150 .capture_successful_mutation(&"run-1".into(), sample_mutation())
7151 .await
7152 .unwrap();
7153 let events = store.scan(1).unwrap();
7154 assert!(events
7155 .iter()
7156 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
7157 assert!(!capsule.id.is_empty());
7158 }
7159
7160 #[tokio::test]
7161 async fn replay_hit_records_capsule_reused() {
7162 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
7163 let capsule = evo
7164 .capture_successful_mutation(&"run-2".into(), sample_mutation())
7165 .await
7166 .unwrap();
7167 let replay_run_id = "run-replay".to_string();
7168 let decision = evo
7169 .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
7170 .await
7171 .unwrap();
7172 assert!(decision.used_capsule);
7173 assert_eq!(decision.capsule_id, Some(capsule.id));
7174 assert!(!decision.detect_evidence.task_class_id.is_empty());
7175 assert!(!decision.detect_evidence.matched_signals.is_empty());
7176 assert!(decision.detect_evidence.mismatch_reasons.is_empty());
7177 assert!(!decision.select_evidence.candidates.is_empty());
7178 assert!(!decision.select_evidence.exact_match_lookup);
7179 assert_eq!(
7180 decision.select_evidence.selected_capsule_id.as_deref(),
7181 decision.capsule_id.as_deref()
7182 );
7183 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7184 &stored.event,
7185 EvolutionEvent::CapsuleReused {
7186 run_id,
7187 replay_run_id: Some(current_replay_run_id),
7188 ..
7189 } if run_id == "run-2" && current_replay_run_id == &replay_run_id
7190 )));
7191 }
7192
7193 #[tokio::test]
7194 async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
7195 let capture_run_id = "run-legacy-capture".to_string();
7196 let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
7197 let capsule = evo
7198 .capture_successful_mutation(&capture_run_id, sample_mutation())
7199 .await
7200 .unwrap();
7201 let executor = StoreReplayExecutor {
7202 sandbox: evo.sandbox.clone(),
7203 validator: evo.validator.clone(),
7204 store: evo.store.clone(),
7205 selector: evo.selector.clone(),
7206 governor: evo.governor.clone(),
7207 economics: Some(evo.economics.clone()),
7208 remote_publishers: Some(evo.remote_publishers.clone()),
7209 stake_policy: evo.stake_policy.clone(),
7210 };
7211
7212 let decision = executor
7213 .try_replay(
7214 &replay_input("missing readme"),
7215 &evo.sandbox_policy,
7216 &evo.validation_plan,
7217 )
7218 .await
7219 .unwrap();
7220
7221 assert!(decision.used_capsule);
7222 assert_eq!(decision.capsule_id, Some(capsule.id));
7223 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7224 &stored.event,
7225 EvolutionEvent::CapsuleReused {
7226 run_id,
7227 replay_run_id: None,
7228 ..
7229 } if run_id == &capture_run_id
7230 )));
7231 }
7232
7233 #[tokio::test]
7234 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
7235 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
7236 let capsule = evo
7237 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
7238 .await
7239 .unwrap();
7240 let decision = evo
7241 .replay_or_fallback(replay_input("missing readme"))
7242 .await
7243 .unwrap();
7244 assert!(decision.used_capsule);
7245
7246 evo.revoke_assets(&RevokeNotice {
7247 sender_id: "node-metrics".into(),
7248 asset_ids: vec![capsule.id.clone()],
7249 reason: "manual test revoke".into(),
7250 })
7251 .unwrap();
7252
7253 let snapshot = evo.metrics_snapshot().unwrap();
7254 assert_eq!(snapshot.replay_attempts_total, 1);
7255 assert_eq!(snapshot.replay_success_total, 1);
7256 assert_eq!(snapshot.replay_success_rate, 1.0);
7257 assert_eq!(snapshot.confidence_revalidations_total, 0);
7258 assert_eq!(snapshot.replay_reasoning_avoided_total, 1);
7259 assert_eq!(
7260 snapshot.reasoning_avoided_tokens_total,
7261 decision.economics_evidence.reasoning_avoided_tokens
7262 );
7263 assert_eq!(snapshot.replay_fallback_cost_total, 0);
7264 assert_eq!(snapshot.replay_roi, 1.0);
7265 assert_eq!(snapshot.replay_task_classes.len(), 1);
7266 assert_eq!(snapshot.replay_task_classes[0].replay_success_total, 1);
7267 assert_eq!(snapshot.replay_task_classes[0].replay_failure_total, 0);
7268 assert_eq!(
7269 snapshot.replay_task_classes[0].reasoning_steps_avoided_total,
7270 1
7271 );
7272 assert_eq!(
7273 snapshot.replay_task_classes[0].replay_fallback_cost_total,
7274 0
7275 );
7276 assert_eq!(snapshot.replay_task_classes[0].replay_roi, 1.0);
7277 assert!(snapshot.replay_sources.is_empty());
7278 assert_eq!(snapshot.confidence_revalidations_total, 0);
7279 assert_eq!(snapshot.mutation_declared_total, 1);
7280 assert_eq!(snapshot.promoted_mutations_total, 1);
7281 assert_eq!(snapshot.promotion_ratio, 1.0);
7282 assert_eq!(snapshot.gene_revocations_total, 1);
7283 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
7284 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
7285 assert_eq!(snapshot.promoted_genes, 0);
7286 assert_eq!(snapshot.promoted_capsules, 0);
7287
7288 let rendered = evo.render_metrics_prometheus().unwrap();
7289 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_total 1"));
7290 assert!(rendered.contains("oris_evolution_reasoning_avoided_tokens_total"));
7291 assert!(rendered.contains("oris_evolution_replay_fallback_cost_total"));
7292 assert!(rendered.contains("oris_evolution_replay_roi 1.000000"));
7293 assert!(rendered.contains("oris_evolution_replay_utilization_by_task_class_total"));
7294 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_by_task_class_total"));
7295 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
7296 assert!(rendered.contains("oris_evolution_confidence_revalidations_total 0"));
7297 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
7298 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
7299 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
7300 assert!(rendered.contains("oris_evolution_health 1"));
7301 }
7302
7303 #[tokio::test]
7304 async fn replay_roi_release_gate_summary_matches_metrics_snapshot_for_legacy_replay_history() {
7305 let (evo, _) = build_test_evo("roi-legacy", "run-roi-legacy", command_validator());
7306 let capsule = evo
7307 .capture_successful_mutation(&"run-roi-legacy".into(), sample_mutation())
7308 .await
7309 .unwrap();
7310
7311 evo.store
7312 .append_event(EvolutionEvent::CapsuleReused {
7313 capsule_id: capsule.id.clone(),
7314 gene_id: capsule.gene_id.clone(),
7315 run_id: capsule.run_id.clone(),
7316 replay_run_id: Some("run-roi-legacy-replay".into()),
7317 })
7318 .unwrap();
7319 evo.store
7320 .append_event(EvolutionEvent::ValidationFailed {
7321 mutation_id: "legacy-replay-failure".into(),
7322 report: ValidationSnapshot {
7323 success: false,
7324 profile: "test".into(),
7325 duration_ms: 1,
7326 summary: "legacy replay validation failed".into(),
7327 },
7328 gene_id: Some(capsule.gene_id.clone()),
7329 })
7330 .unwrap();
7331
7332 let metrics = evo.metrics_snapshot().unwrap();
7333 let summary = evo.replay_roi_release_gate_summary(0).unwrap();
7334 let task_class = &metrics.replay_task_classes[0];
7335
7336 assert_eq!(metrics.replay_attempts_total, 2);
7337 assert_eq!(metrics.replay_success_total, 1);
7338 assert_eq!(summary.replay_attempts_total, metrics.replay_attempts_total);
7339 assert_eq!(summary.replay_success_total, metrics.replay_success_total);
7340 assert_eq!(
7341 summary.replay_failure_total,
7342 metrics.replay_attempts_total - metrics.replay_success_total
7343 );
7344 assert_eq!(
7345 summary.reasoning_avoided_tokens_total,
7346 metrics.reasoning_avoided_tokens_total
7347 );
7348 assert_eq!(
7349 summary.replay_fallback_cost_total,
7350 metrics.replay_fallback_cost_total
7351 );
7352 assert_eq!(summary.replay_roi, metrics.replay_roi);
7353 assert_eq!(summary.replay_task_classes.len(), 1);
7354 assert_eq!(
7355 summary.replay_task_classes[0].task_class_id,
7356 task_class.task_class_id
7357 );
7358 assert_eq!(
7359 summary.replay_task_classes[0].replay_success_total,
7360 task_class.replay_success_total
7361 );
7362 assert_eq!(
7363 summary.replay_task_classes[0].replay_failure_total,
7364 task_class.replay_failure_total
7365 );
7366 assert_eq!(
7367 summary.replay_task_classes[0].reasoning_avoided_tokens_total,
7368 task_class.reasoning_avoided_tokens_total
7369 );
7370 assert_eq!(
7371 summary.replay_task_classes[0].replay_fallback_cost_total,
7372 task_class.replay_fallback_cost_total
7373 );
7374 }
7375
7376 #[tokio::test]
7377 async fn replay_roi_release_gate_summary_aggregates_task_class_and_remote_source() {
7378 let (evo, _) = build_test_evo("roi-summary", "run-roi-summary", command_validator());
7379 let envelope = remote_publish_envelope(
7380 "node-roi",
7381 "run-remote-roi",
7382 "gene-roi",
7383 "capsule-roi",
7384 "mutation-roi",
7385 "roi-signal",
7386 "ROI.md",
7387 "# roi",
7388 );
7389 evo.import_remote_envelope(&envelope).unwrap();
7390
7391 let miss = evo
7392 .replay_or_fallback(replay_input("entropy-hash-12345-no-overlap"))
7393 .await
7394 .unwrap();
7395 assert!(!miss.used_capsule);
7396 assert!(miss.fallback_to_planner);
7397 assert!(miss.select_evidence.candidates.is_empty());
7398 assert!(miss
7399 .detect_evidence
7400 .mismatch_reasons
7401 .iter()
7402 .any(|reason| reason == "no_candidate_after_select"));
7403
7404 let hit = evo
7405 .replay_or_fallback(replay_input("roi-signal"))
7406 .await
7407 .unwrap();
7408 assert!(hit.used_capsule);
7409 assert!(!hit.select_evidence.candidates.is_empty());
7410 assert_eq!(
7411 hit.select_evidence.selected_capsule_id.as_deref(),
7412 hit.capsule_id.as_deref()
7413 );
7414
7415 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7416 assert_eq!(summary.replay_attempts_total, 2);
7417 assert_eq!(summary.replay_success_total, 1);
7418 assert_eq!(summary.replay_failure_total, 1);
7419 assert!(summary.reasoning_avoided_tokens_total > 0);
7420 assert!(summary.replay_fallback_cost_total > 0);
7421 assert!(summary
7422 .replay_task_classes
7423 .iter()
7424 .any(|entry| { entry.replay_success_total == 1 && entry.replay_failure_total == 0 }));
7425 assert!(summary.replay_sources.iter().any(|source| {
7426 source.source_sender_id == "node-roi" && source.replay_success_total == 1
7427 }));
7428
7429 let rendered = evo
7430 .render_replay_roi_release_gate_summary_json(60 * 60)
7431 .unwrap();
7432 assert!(rendered.contains("\"replay_attempts_total\": 2"));
7433 assert!(rendered.contains("\"source_sender_id\": \"node-roi\""));
7434 }
7435
7436 #[tokio::test]
7437 async fn replay_roi_release_gate_summary_contract_exposes_core_metrics_and_fail_closed_defaults(
7438 ) {
7439 let (evo, _) = build_test_evo("roi-contract", "run-roi-contract", command_validator());
7440 let envelope = remote_publish_envelope(
7441 "node-contract",
7442 "run-remote-contract",
7443 "gene-contract",
7444 "capsule-contract",
7445 "mutation-contract",
7446 "contract-signal",
7447 "CONTRACT.md",
7448 "# contract",
7449 );
7450 evo.import_remote_envelope(&envelope).unwrap();
7451
7452 let miss = evo
7453 .replay_or_fallback(replay_input("entropy-hash-contract-no-overlap"))
7454 .await
7455 .unwrap();
7456 assert!(!miss.used_capsule);
7457 assert!(miss.fallback_to_planner);
7458
7459 let hit = evo
7460 .replay_or_fallback(replay_input("contract-signal"))
7461 .await
7462 .unwrap();
7463 assert!(hit.used_capsule);
7464
7465 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7466 let contract = evo
7467 .replay_roi_release_gate_contract(60 * 60, ReplayRoiReleaseGateThresholds::default())
7468 .unwrap();
7469
7470 assert_eq!(contract.input.replay_attempts_total, 2);
7471 assert_eq!(contract.input.replay_success_total, 1);
7472 assert_eq!(contract.input.replay_failure_total, 1);
7473 assert_eq!(
7474 contract.input.reasoning_avoided_tokens,
7475 summary.reasoning_avoided_tokens_total
7476 );
7477 assert_eq!(
7478 contract.input.replay_fallback_cost_total,
7479 summary.replay_fallback_cost_total
7480 );
7481 assert!((contract.input.replay_hit_rate - 0.5).abs() < f64::EPSILON);
7482 assert!((contract.input.false_replay_rate - 0.5).abs() < f64::EPSILON);
7483 assert!((contract.input.replay_roi - summary.replay_roi).abs() < f64::EPSILON);
7484 assert!(contract.input.replay_safety);
7485 assert_eq!(
7486 contract.input.aggregation_dimensions,
7487 REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7488 .iter()
7489 .map(|dimension| (*dimension).to_string())
7490 .collect::<Vec<_>>()
7491 );
7492 assert_eq!(
7493 contract.input.thresholds,
7494 ReplayRoiReleaseGateThresholds::default()
7495 );
7496 assert_eq!(
7497 contract.input.fail_closed_policy,
7498 ReplayRoiReleaseGateFailClosedPolicy::default()
7499 );
7500 assert_eq!(
7501 contract.output.status,
7502 ReplayRoiReleaseGateStatus::FailClosed
7503 );
7504 assert!(contract
7505 .output
7506 .failed_checks
7507 .iter()
7508 .any(|check| check == "min_replay_attempts_below_threshold"));
7509 assert!(contract
7510 .output
7511 .failed_checks
7512 .iter()
7513 .any(|check| check == "replay_hit_rate_below_threshold"));
7514 assert!(contract
7515 .output
7516 .failed_checks
7517 .iter()
7518 .any(|check| check == "false_replay_rate_above_threshold"));
7519 assert!(contract
7520 .output
7521 .evidence_refs
7522 .iter()
7523 .any(|evidence| evidence == "replay_roi_release_gate_summary"));
7524 assert!(contract.output.summary.contains("release gate fail_closed"));
7525 }
7526
7527 #[tokio::test]
7528 async fn replay_roi_release_gate_summary_contract_accepts_custom_thresholds_and_json() {
7529 let (evo, _) = build_test_evo(
7530 "roi-contract-thresholds",
7531 "run-roi-contract-thresholds",
7532 command_validator(),
7533 );
7534 let thresholds = ReplayRoiReleaseGateThresholds {
7535 min_replay_attempts: 8,
7536 min_replay_hit_rate: 0.75,
7537 max_false_replay_rate: 0.10,
7538 min_reasoning_avoided_tokens: 600,
7539 min_replay_roi: 0.30,
7540 require_replay_safety: true,
7541 };
7542 let contract = evo
7543 .replay_roi_release_gate_contract(60 * 60, thresholds.clone())
7544 .unwrap();
7545 assert_eq!(contract.input.thresholds, thresholds.clone());
7546 assert_eq!(contract.input.replay_attempts_total, 0);
7547 assert_eq!(contract.input.replay_hit_rate, 0.0);
7548 assert_eq!(contract.input.false_replay_rate, 0.0);
7549 assert!(!contract.input.replay_safety_signal.has_replay_activity);
7550 assert!(!contract.input.replay_safety);
7551 assert_eq!(
7552 contract.output.status,
7553 ReplayRoiReleaseGateStatus::Indeterminate
7554 );
7555 assert!(contract
7556 .output
7557 .failed_checks
7558 .iter()
7559 .any(|check| check == "missing_replay_attempts"));
7560 assert!(contract
7561 .output
7562 .summary
7563 .contains("indeterminate (fail-closed)"));
7564
7565 let rendered = evo
7566 .render_replay_roi_release_gate_contract_json(60 * 60, thresholds)
7567 .unwrap();
7568 assert!(rendered.contains("\"min_replay_attempts\": 8"));
7569 assert!(rendered.contains("\"min_replay_hit_rate\": 0.75"));
7570 assert!(rendered.contains("\"status\": \"indeterminate\""));
7571 }
7572
7573 #[tokio::test]
7574 async fn replay_roi_release_gate_summary_window_boundary_filters_old_events() {
7575 let (evo, _) = build_test_evo("roi-window", "run-roi-window", command_validator());
7576 let envelope = remote_publish_envelope(
7577 "node-window",
7578 "run-remote-window",
7579 "gene-window",
7580 "capsule-window",
7581 "mutation-window",
7582 "window-signal",
7583 "WINDOW.md",
7584 "# window",
7585 );
7586 evo.import_remote_envelope(&envelope).unwrap();
7587
7588 let miss = evo
7589 .replay_or_fallback(replay_input("window-no-match-signal"))
7590 .await
7591 .unwrap();
7592 assert!(!miss.used_capsule);
7593 assert!(miss.fallback_to_planner);
7594
7595 let first_hit = evo
7596 .replay_or_fallback(replay_input("window-signal"))
7597 .await
7598 .unwrap();
7599 assert!(first_hit.used_capsule);
7600
7601 std::thread::sleep(std::time::Duration::from_secs(2));
7602
7603 let second_hit = evo
7604 .replay_or_fallback(replay_input("window-signal"))
7605 .await
7606 .unwrap();
7607 assert!(second_hit.used_capsule);
7608
7609 let narrow = evo.replay_roi_release_gate_summary(1).unwrap();
7610 assert_eq!(narrow.replay_attempts_total, 1);
7611 assert_eq!(narrow.replay_success_total, 1);
7612 assert_eq!(narrow.replay_failure_total, 0);
7613
7614 let all = evo.replay_roi_release_gate_summary(0).unwrap();
7615 assert_eq!(all.replay_attempts_total, 3);
7616 assert_eq!(all.replay_success_total, 2);
7617 assert_eq!(all.replay_failure_total, 1);
7618 }
7619
7620 fn fixed_release_gate_pass_fixture() -> ReplayRoiReleaseGateInputContract {
7621 ReplayRoiReleaseGateInputContract {
7622 generated_at: "2026-03-13T00:00:00Z".to_string(),
7623 window_seconds: 86_400,
7624 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7625 .iter()
7626 .map(|dimension| (*dimension).to_string())
7627 .collect(),
7628 replay_attempts_total: 4,
7629 replay_success_total: 3,
7630 replay_failure_total: 1,
7631 replay_hit_rate: 0.75,
7632 false_replay_rate: 0.25,
7633 reasoning_avoided_tokens: 480,
7634 replay_fallback_cost_total: 64,
7635 replay_roi: compute_replay_roi(480, 64),
7636 replay_safety: true,
7637 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7638 fail_closed_default: true,
7639 rollback_ready: true,
7640 audit_trail_complete: true,
7641 has_replay_activity: true,
7642 },
7643 thresholds: ReplayRoiReleaseGateThresholds::default(),
7644 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7645 }
7646 }
7647
7648 fn fixed_release_gate_fail_fixture() -> ReplayRoiReleaseGateInputContract {
7649 ReplayRoiReleaseGateInputContract {
7650 generated_at: "2026-03-13T00:00:00Z".to_string(),
7651 window_seconds: 86_400,
7652 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7653 .iter()
7654 .map(|dimension| (*dimension).to_string())
7655 .collect(),
7656 replay_attempts_total: 10,
7657 replay_success_total: 4,
7658 replay_failure_total: 6,
7659 replay_hit_rate: 0.4,
7660 false_replay_rate: 0.6,
7661 reasoning_avoided_tokens: 80,
7662 replay_fallback_cost_total: 400,
7663 replay_roi: compute_replay_roi(80, 400),
7664 replay_safety: false,
7665 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7666 fail_closed_default: true,
7667 rollback_ready: true,
7668 audit_trail_complete: true,
7669 has_replay_activity: true,
7670 },
7671 thresholds: ReplayRoiReleaseGateThresholds::default(),
7672 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7673 }
7674 }
7675
7676 fn fixed_release_gate_borderline_fixture() -> ReplayRoiReleaseGateInputContract {
7677 ReplayRoiReleaseGateInputContract {
7678 generated_at: "2026-03-13T00:00:00Z".to_string(),
7679 window_seconds: 3_600,
7680 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7681 .iter()
7682 .map(|dimension| (*dimension).to_string())
7683 .collect(),
7684 replay_attempts_total: 4,
7685 replay_success_total: 3,
7686 replay_failure_total: 1,
7687 replay_hit_rate: 0.75,
7688 false_replay_rate: 0.25,
7689 reasoning_avoided_tokens: 192,
7690 replay_fallback_cost_total: 173,
7691 replay_roi: 0.05,
7692 replay_safety: true,
7693 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7694 fail_closed_default: true,
7695 rollback_ready: true,
7696 audit_trail_complete: true,
7697 has_replay_activity: true,
7698 },
7699 thresholds: ReplayRoiReleaseGateThresholds {
7700 min_replay_attempts: 4,
7701 min_replay_hit_rate: 0.75,
7702 max_false_replay_rate: 0.25,
7703 min_reasoning_avoided_tokens: 192,
7704 min_replay_roi: 0.05,
7705 require_replay_safety: true,
7706 },
7707 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7708 }
7709 }
7710
7711 #[test]
7712 fn replay_roi_release_gate_summary_fixed_fixtures_cover_pass_fail_and_borderline() {
7713 let pass =
7714 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_pass_fixture());
7715 let fail =
7716 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7717 let borderline = evaluate_replay_roi_release_gate_contract_input(
7718 &fixed_release_gate_borderline_fixture(),
7719 );
7720
7721 assert_eq!(pass.status, ReplayRoiReleaseGateStatus::Pass);
7722 assert!(pass.failed_checks.is_empty());
7723 assert_eq!(fail.status, ReplayRoiReleaseGateStatus::FailClosed);
7724 assert!(!fail.failed_checks.is_empty());
7725 assert_eq!(borderline.status, ReplayRoiReleaseGateStatus::Pass);
7726 assert!(borderline.failed_checks.is_empty());
7727 }
7728
7729 #[test]
7730 fn replay_roi_release_gate_summary_machine_readable_output_is_stable_and_sorted() {
7731 let output =
7732 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7733
7734 assert_eq!(
7735 output.failed_checks,
7736 vec![
7737 "false_replay_rate_above_threshold".to_string(),
7738 "reasoning_avoided_tokens_below_threshold".to_string(),
7739 "replay_hit_rate_below_threshold".to_string(),
7740 "replay_roi_below_threshold".to_string(),
7741 "replay_safety_required".to_string(),
7742 ]
7743 );
7744 assert_eq!(
7745 output.evidence_refs,
7746 vec![
7747 "generated_at:2026-03-13T00:00:00Z".to_string(),
7748 "metric:false_replay_rate".to_string(),
7749 "metric:reasoning_avoided_tokens".to_string(),
7750 "metric:replay_hit_rate".to_string(),
7751 "metric:replay_roi".to_string(),
7752 "metric:replay_safety".to_string(),
7753 "replay_roi_release_gate_summary".to_string(),
7754 "threshold:max_false_replay_rate".to_string(),
7755 "threshold:min_reasoning_avoided_tokens".to_string(),
7756 "threshold:min_replay_hit_rate".to_string(),
7757 "threshold:min_replay_roi".to_string(),
7758 "threshold:require_replay_safety".to_string(),
7759 "window_seconds:86400".to_string(),
7760 ]
7761 );
7762
7763 let rendered = serde_json::to_string(&output).unwrap();
7764 assert!(rendered.starts_with("{\"status\":\"fail_closed\",\"failed_checks\":"));
7765 assert_eq!(rendered, serde_json::to_string(&output).unwrap());
7766 }
7767
7768 #[test]
7769 fn replay_roi_release_gate_summary_evaluator_passes_with_threshold_compliance() {
7770 let input = ReplayRoiReleaseGateInputContract {
7771 generated_at: Utc::now().to_rfc3339(),
7772 window_seconds: 86_400,
7773 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7774 .iter()
7775 .map(|dimension| (*dimension).to_string())
7776 .collect(),
7777 replay_attempts_total: 10,
7778 replay_success_total: 9,
7779 replay_failure_total: 1,
7780 replay_hit_rate: 0.9,
7781 false_replay_rate: 0.1,
7782 reasoning_avoided_tokens: 960,
7783 replay_fallback_cost_total: 64,
7784 replay_roi: compute_replay_roi(960, 64),
7785 replay_safety: true,
7786 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7787 fail_closed_default: true,
7788 rollback_ready: true,
7789 audit_trail_complete: true,
7790 has_replay_activity: true,
7791 },
7792 thresholds: ReplayRoiReleaseGateThresholds::default(),
7793 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7794 };
7795
7796 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7797 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Pass);
7798 assert!(output.failed_checks.is_empty());
7799 assert!(output.summary.contains("release gate pass"));
7800 }
7801
7802 #[test]
7803 fn replay_roi_release_gate_summary_evaluator_fail_closed_on_threshold_violations() {
7804 let input = ReplayRoiReleaseGateInputContract {
7805 generated_at: Utc::now().to_rfc3339(),
7806 window_seconds: 86_400,
7807 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7808 .iter()
7809 .map(|dimension| (*dimension).to_string())
7810 .collect(),
7811 replay_attempts_total: 10,
7812 replay_success_total: 4,
7813 replay_failure_total: 6,
7814 replay_hit_rate: 0.4,
7815 false_replay_rate: 0.6,
7816 reasoning_avoided_tokens: 80,
7817 replay_fallback_cost_total: 400,
7818 replay_roi: compute_replay_roi(80, 400),
7819 replay_safety: false,
7820 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7821 fail_closed_default: true,
7822 rollback_ready: true,
7823 audit_trail_complete: true,
7824 has_replay_activity: true,
7825 },
7826 thresholds: ReplayRoiReleaseGateThresholds::default(),
7827 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7828 };
7829
7830 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7831 assert_eq!(output.status, ReplayRoiReleaseGateStatus::FailClosed);
7832 assert!(output
7833 .failed_checks
7834 .iter()
7835 .any(|check| check == "replay_hit_rate_below_threshold"));
7836 assert!(output
7837 .failed_checks
7838 .iter()
7839 .any(|check| check == "false_replay_rate_above_threshold"));
7840 assert!(output
7841 .failed_checks
7842 .iter()
7843 .any(|check| check == "replay_roi_below_threshold"));
7844 assert!(output.summary.contains("release gate fail_closed"));
7845 }
7846
7847 #[test]
7848 fn replay_roi_release_gate_summary_evaluator_marks_missing_data_indeterminate() {
7849 let input = ReplayRoiReleaseGateInputContract {
7850 generated_at: String::new(),
7851 window_seconds: 86_400,
7852 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7853 .iter()
7854 .map(|dimension| (*dimension).to_string())
7855 .collect(),
7856 replay_attempts_total: 0,
7857 replay_success_total: 0,
7858 replay_failure_total: 0,
7859 replay_hit_rate: 0.0,
7860 false_replay_rate: 0.0,
7861 reasoning_avoided_tokens: 0,
7862 replay_fallback_cost_total: 0,
7863 replay_roi: 0.0,
7864 replay_safety: false,
7865 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7866 fail_closed_default: true,
7867 rollback_ready: true,
7868 audit_trail_complete: true,
7869 has_replay_activity: false,
7870 },
7871 thresholds: ReplayRoiReleaseGateThresholds::default(),
7872 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7873 };
7874
7875 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7876 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Indeterminate);
7877 assert!(output
7878 .failed_checks
7879 .iter()
7880 .any(|check| check == "missing_generated_at"));
7881 assert!(output
7882 .failed_checks
7883 .iter()
7884 .any(|check| check == "missing_replay_attempts"));
7885 assert!(output
7886 .summary
7887 .contains("release gate indeterminate (fail-closed)"));
7888 }
7889
7890 #[test]
7891 fn stale_replay_targets_require_confidence_revalidation() {
7892 let now = Utc::now();
7893 let projection = EvolutionProjection {
7894 genes: vec![Gene {
7895 id: "gene-stale".into(),
7896 signals: vec!["missing readme".into()],
7897 strategy: vec!["README.md".into()],
7898 validation: vec!["test".into()],
7899 state: AssetState::Promoted,
7900 }],
7901 capsules: vec![Capsule {
7902 id: "capsule-stale".into(),
7903 gene_id: "gene-stale".into(),
7904 mutation_id: "mutation-stale".into(),
7905 run_id: "run-stale".into(),
7906 diff_hash: "hash".into(),
7907 confidence: 0.8,
7908 env: replay_input("missing readme").env,
7909 outcome: Outcome {
7910 success: true,
7911 validation_profile: "test".into(),
7912 validation_duration_ms: 1,
7913 changed_files: vec!["README.md".into()],
7914 validator_hash: "validator".into(),
7915 lines_changed: 1,
7916 replay_verified: false,
7917 },
7918 state: AssetState::Promoted,
7919 }],
7920 reuse_counts: BTreeMap::from([("gene-stale".into(), 1)]),
7921 attempt_counts: BTreeMap::from([("gene-stale".into(), 1)]),
7922 last_updated_at: BTreeMap::from([(
7923 "gene-stale".into(),
7924 (now - Duration::hours(48)).to_rfc3339(),
7925 )]),
7926 spec_ids_by_gene: BTreeMap::new(),
7927 };
7928
7929 let targets = stale_replay_revalidation_targets(&projection, now);
7930
7931 assert_eq!(targets.len(), 1);
7932 assert_eq!(targets[0].gene_id, "gene-stale");
7933 assert_eq!(targets[0].capsule_ids, vec!["capsule-stale".to_string()]);
7934 assert!(targets[0].decayed_confidence < MIN_REPLAY_CONFIDENCE);
7935 }
7936
7937 #[tokio::test]
7938 async fn remote_replay_prefers_closest_environment_match() {
7939 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
7940 let input = replay_input("env-signal");
7941
7942 let envelope_a = remote_publish_envelope_with_env(
7943 "node-a",
7944 "run-remote-a",
7945 "gene-a",
7946 "capsule-a",
7947 "mutation-a",
7948 "env-signal",
7949 "A.md",
7950 "# from a",
7951 input.env.clone(),
7952 );
7953 let envelope_b = remote_publish_envelope_with_env(
7954 "node-b",
7955 "run-remote-b",
7956 "gene-b",
7957 "capsule-b",
7958 "mutation-b",
7959 "env-signal",
7960 "B.md",
7961 "# from b",
7962 EnvFingerprint {
7963 rustc_version: "old-rustc".into(),
7964 cargo_lock_hash: "other-lock".into(),
7965 target_triple: "aarch64-apple-darwin".into(),
7966 os: "linux".into(),
7967 },
7968 );
7969
7970 evo.import_remote_envelope(&envelope_a).unwrap();
7971 evo.import_remote_envelope(&envelope_b).unwrap();
7972
7973 let decision = evo.replay_or_fallback(input).await.unwrap();
7974
7975 assert!(decision.used_capsule);
7976 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
7977 assert!(!decision.fallback_to_planner);
7978 }
7979
7980 #[test]
7981 fn remote_cold_start_scoring_caps_distinct_query_coverage() {
7982 let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
7983 let input = replay_input("missing readme");
7984
7985 let exact = remote_publish_envelope_with_signals(
7986 "node-exact",
7987 "run-remote-exact",
7988 "gene-exact",
7989 "capsule-exact",
7990 "mutation-exact",
7991 vec!["missing readme".into()],
7992 vec!["missing readme".into()],
7993 "EXACT.md",
7994 "# exact",
7995 input.env.clone(),
7996 );
7997 let overlapping = remote_publish_envelope_with_signals(
7998 "node-overlap",
7999 "run-remote-overlap",
8000 "gene-overlap",
8001 "capsule-overlap",
8002 "mutation-overlap",
8003 vec!["missing readme".into()],
8004 vec!["missing".into(), "readme".into()],
8005 "OVERLAP.md",
8006 "# overlap",
8007 input.env.clone(),
8008 );
8009
8010 evo.import_remote_envelope(&exact).unwrap();
8011 evo.import_remote_envelope(&overlapping).unwrap();
8012
8013 let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
8014 let exact_candidate = candidates
8015 .iter()
8016 .find(|candidate| candidate.gene.id == "gene-exact")
8017 .unwrap();
8018 let overlap_candidate = candidates
8019 .iter()
8020 .find(|candidate| candidate.gene.id == "gene-overlap")
8021 .unwrap();
8022
8023 assert_eq!(exact_candidate.score, 1.0);
8024 assert_eq!(overlap_candidate.score, 1.0);
8025 assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
8026 }
8027
8028 #[test]
8029 fn exact_match_candidates_respect_spec_linked_events() {
8030 let (evo, _) = build_test_evo(
8031 "spec-linked-filter",
8032 "run-spec-linked-filter",
8033 command_validator(),
8034 );
8035 let mut input = replay_input("missing readme");
8036 input.spec_id = Some("spec-readme".into());
8037
8038 let mut mutation = sample_mutation();
8039 mutation.intent.id = "mutation-spec-linked".into();
8040 mutation.intent.spec_id = None;
8041 let gene = Gene {
8042 id: "gene-spec-linked".into(),
8043 signals: vec!["missing readme".into()],
8044 strategy: vec!["README.md".into()],
8045 validation: vec!["test".into()],
8046 state: AssetState::Promoted,
8047 };
8048 let capsule = Capsule {
8049 id: "capsule-spec-linked".into(),
8050 gene_id: gene.id.clone(),
8051 mutation_id: mutation.intent.id.clone(),
8052 run_id: "run-spec-linked".into(),
8053 diff_hash: mutation.artifact.content_hash.clone(),
8054 confidence: 0.9,
8055 env: input.env.clone(),
8056 outcome: Outcome {
8057 success: true,
8058 validation_profile: "test".into(),
8059 validation_duration_ms: 1,
8060 changed_files: vec!["README.md".into()],
8061 validator_hash: "validator-hash".into(),
8062 lines_changed: 1,
8063 replay_verified: false,
8064 },
8065 state: AssetState::Promoted,
8066 };
8067
8068 evo.store
8069 .append_event(EvolutionEvent::MutationDeclared { mutation })
8070 .unwrap();
8071 evo.store
8072 .append_event(EvolutionEvent::GeneProjected { gene })
8073 .unwrap();
8074 evo.store
8075 .append_event(EvolutionEvent::CapsuleCommitted { capsule })
8076 .unwrap();
8077 evo.store
8078 .append_event(EvolutionEvent::SpecLinked {
8079 mutation_id: "mutation-spec-linked".into(),
8080 spec_id: "spec-readme".into(),
8081 })
8082 .unwrap();
8083
8084 let candidates = exact_match_candidates(evo.store.as_ref(), &input);
8085 assert_eq!(candidates.len(), 1);
8086 assert_eq!(candidates[0].gene.id, "gene-spec-linked");
8087 }
8088
8089 #[tokio::test]
8090 async fn remote_capsule_advances_from_quarantine_to_shadow_then_promoted() {
8091 let (evo, store) = build_test_evo(
8092 "remote-quarantine",
8093 "run-remote-quarantine",
8094 command_validator(),
8095 );
8096 let envelope = remote_publish_envelope(
8097 "node-remote",
8098 "run-remote-quarantine",
8099 "gene-remote",
8100 "capsule-remote",
8101 "mutation-remote",
8102 "remote-signal",
8103 "REMOTE.md",
8104 "# from remote",
8105 );
8106
8107 evo.import_remote_envelope(&envelope).unwrap();
8108
8109 let before_replay = store.rebuild_projection().unwrap();
8110 let imported_gene = before_replay
8111 .genes
8112 .iter()
8113 .find(|gene| gene.id == "gene-remote")
8114 .unwrap();
8115 let imported_capsule = before_replay
8116 .capsules
8117 .iter()
8118 .find(|capsule| capsule.id == "capsule-remote")
8119 .unwrap();
8120 assert_eq!(imported_gene.state, AssetState::Quarantined);
8121 assert_eq!(imported_capsule.state, AssetState::Quarantined);
8122 let exported_before_replay =
8123 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8124 assert!(exported_before_replay.assets.is_empty());
8125
8126 let first_decision = evo
8127 .replay_or_fallback(replay_input("remote-signal"))
8128 .await
8129 .unwrap();
8130
8131 assert!(first_decision.used_capsule);
8132 assert_eq!(first_decision.capsule_id, Some("capsule-remote".into()));
8133
8134 let after_first_replay = store.rebuild_projection().unwrap();
8135 let shadow_gene = after_first_replay
8136 .genes
8137 .iter()
8138 .find(|gene| gene.id == "gene-remote")
8139 .unwrap();
8140 let shadow_capsule = after_first_replay
8141 .capsules
8142 .iter()
8143 .find(|capsule| capsule.id == "capsule-remote")
8144 .unwrap();
8145 assert_eq!(shadow_gene.state, AssetState::ShadowValidated);
8146 assert_eq!(shadow_capsule.state, AssetState::ShadowValidated);
8147 let exported_after_first_replay =
8148 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8149 assert!(exported_after_first_replay.assets.is_empty());
8150
8151 let second_decision = evo
8152 .replay_or_fallback(replay_input("remote-signal"))
8153 .await
8154 .unwrap();
8155 assert!(second_decision.used_capsule);
8156 assert_eq!(second_decision.capsule_id, Some("capsule-remote".into()));
8157
8158 let after_second_replay = store.rebuild_projection().unwrap();
8159 let promoted_gene = after_second_replay
8160 .genes
8161 .iter()
8162 .find(|gene| gene.id == "gene-remote")
8163 .unwrap();
8164 let promoted_capsule = after_second_replay
8165 .capsules
8166 .iter()
8167 .find(|capsule| capsule.id == "capsule-remote")
8168 .unwrap();
8169 assert_eq!(promoted_gene.state, AssetState::Promoted);
8170 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8171 let exported_after_second_replay =
8172 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8173 assert_eq!(exported_after_second_replay.assets.len(), 3);
8174 assert!(exported_after_second_replay
8175 .assets
8176 .iter()
8177 .any(|asset| matches!(
8178 asset,
8179 NetworkAsset::EvolutionEvent {
8180 event: EvolutionEvent::MutationDeclared { .. }
8181 }
8182 )));
8183 }
8184
8185 #[tokio::test]
8186 async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
8187 let (source, source_store) = build_test_evo(
8188 "remote-publish-export",
8189 "run-remote-publish-export",
8190 command_validator(),
8191 );
8192 source
8193 .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
8194 .await
8195 .unwrap();
8196 let envelope = EvolutionNetworkNode::new(source_store.clone())
8197 .publish_local_assets("node-source")
8198 .unwrap();
8199 assert!(envelope.assets.iter().any(|asset| matches!(
8200 asset,
8201 NetworkAsset::EvolutionEvent {
8202 event: EvolutionEvent::MutationDeclared { mutation }
8203 } if mutation.intent.id == "mutation-1"
8204 )));
8205
8206 let (remote, _) = build_test_evo(
8207 "remote-publish-import",
8208 "run-remote-publish-import",
8209 command_validator(),
8210 );
8211 remote.import_remote_envelope(&envelope).unwrap();
8212
8213 let decision = remote
8214 .replay_or_fallback(replay_input("missing readme"))
8215 .await
8216 .unwrap();
8217
8218 assert!(decision.used_capsule);
8219 assert!(!decision.fallback_to_planner);
8220 }
8221
8222 #[tokio::test]
8223 async fn import_remote_envelope_records_manifest_validation_event() {
8224 let (source, source_store) = build_test_evo(
8225 "remote-manifest-success-source",
8226 "run-remote-manifest-success-source",
8227 command_validator(),
8228 );
8229 source
8230 .capture_successful_mutation(
8231 &"run-remote-manifest-success-source".into(),
8232 sample_mutation(),
8233 )
8234 .await
8235 .unwrap();
8236 let envelope = EvolutionNetworkNode::new(source_store.clone())
8237 .publish_local_assets("node-source")
8238 .unwrap();
8239
8240 let (remote, remote_store) = build_test_evo(
8241 "remote-manifest-success-remote",
8242 "run-remote-manifest-success-remote",
8243 command_validator(),
8244 );
8245 remote.import_remote_envelope(&envelope).unwrap();
8246
8247 let events = remote_store.scan(1).unwrap();
8248 assert!(events.iter().any(|stored| matches!(
8249 &stored.event,
8250 EvolutionEvent::ManifestValidated {
8251 accepted: true,
8252 reason,
8253 sender_id: Some(sender_id),
8254 publisher: Some(publisher),
8255 asset_ids,
8256 } if reason == "manifest validated"
8257 && sender_id == "node-source"
8258 && publisher == "node-source"
8259 && !asset_ids.is_empty()
8260 )));
8261 }
8262
8263 #[test]
8264 fn import_remote_envelope_rejects_invalid_manifest_and_records_audit_event() {
8265 let (remote, remote_store) = build_test_evo(
8266 "remote-manifest-invalid",
8267 "run-remote-manifest-invalid",
8268 command_validator(),
8269 );
8270 let mut envelope = remote_publish_envelope(
8271 "node-remote",
8272 "run-remote-manifest-invalid",
8273 "gene-remote",
8274 "capsule-remote",
8275 "mutation-remote",
8276 "manifest-signal",
8277 "MANIFEST.md",
8278 "# drift",
8279 );
8280 if let Some(manifest) = envelope.manifest.as_mut() {
8281 manifest.asset_hash = "tampered-hash".to_string();
8282 }
8283 envelope.content_hash = envelope.compute_content_hash();
8284
8285 let error = remote.import_remote_envelope(&envelope).unwrap_err();
8286 assert!(error.to_string().contains("manifest"));
8287
8288 let events = remote_store.scan(1).unwrap();
8289 assert!(events.iter().any(|stored| matches!(
8290 &stored.event,
8291 EvolutionEvent::ManifestValidated {
8292 accepted: false,
8293 reason,
8294 sender_id: Some(sender_id),
8295 publisher: Some(publisher),
8296 asset_ids,
8297 } if reason.contains("manifest asset_hash mismatch")
8298 && sender_id == "node-remote"
8299 && publisher == "node-remote"
8300 && !asset_ids.is_empty()
8301 )));
8302 }
8303
8304 #[tokio::test]
8305 async fn fetch_assets_include_mutation_payload_for_remote_replay() {
8306 let (evo, store) = build_test_evo(
8307 "remote-fetch-export",
8308 "run-remote-fetch",
8309 command_validator(),
8310 );
8311 evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
8312 .await
8313 .unwrap();
8314
8315 let response = EvolutionNetworkNode::new(store.clone())
8316 .fetch_assets(
8317 "node-source",
8318 &FetchQuery {
8319 sender_id: "node-client".into(),
8320 signals: vec!["missing readme".into()],
8321 since_cursor: None,
8322 resume_token: None,
8323 },
8324 )
8325 .unwrap();
8326
8327 assert!(response.assets.iter().any(|asset| matches!(
8328 asset,
8329 NetworkAsset::EvolutionEvent {
8330 event: EvolutionEvent::MutationDeclared { mutation }
8331 } if mutation.intent.id == "mutation-1"
8332 )));
8333 assert!(response
8334 .assets
8335 .iter()
8336 .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
8337 assert!(response
8338 .assets
8339 .iter()
8340 .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
8341 }
8342
8343 #[test]
8344 fn fetch_assets_delta_sync_supports_since_cursor_and_resume_token() {
8345 let store_root =
8346 std::env::temp_dir().join(format!("oris-evokernel-fetch-delta-store-{}", next_id("t")));
8347 if store_root.exists() {
8348 fs::remove_dir_all(&store_root).unwrap();
8349 }
8350 let store: Arc<dyn EvolutionStore> =
8351 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
8352 let node = EvolutionNetworkNode::new(store.clone());
8353 node.record_reported_experience(
8354 "delta-agent",
8355 "gene-delta-a",
8356 vec!["delta.signal".into()],
8357 vec![
8358 "task_class=delta.signal".into(),
8359 "task_label=delta replay".into(),
8360 ],
8361 vec!["a2a.tasks.report".into()],
8362 )
8363 .unwrap();
8364
8365 let first = node
8366 .fetch_assets(
8367 "execution-api",
8368 &FetchQuery {
8369 sender_id: "delta-agent".into(),
8370 signals: vec!["delta.signal".into()],
8371 since_cursor: None,
8372 resume_token: None,
8373 },
8374 )
8375 .unwrap();
8376 let first_cursor = first.next_cursor.clone().expect("first next_cursor");
8377 let first_token = first.resume_token.clone().expect("first resume_token");
8378 assert!(first.assets.iter().any(
8379 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8380 ));
8381
8382 let restarted = EvolutionNetworkNode::new(store.clone());
8383 restarted
8384 .record_reported_experience(
8385 "delta-agent",
8386 "gene-delta-b",
8387 vec!["delta.signal".into()],
8388 vec![
8389 "task_class=delta.signal".into(),
8390 "task_label=delta replay".into(),
8391 ],
8392 vec!["a2a.tasks.report".into()],
8393 )
8394 .unwrap();
8395
8396 let from_token = restarted
8397 .fetch_assets(
8398 "execution-api",
8399 &FetchQuery {
8400 sender_id: "delta-agent".into(),
8401 signals: vec!["delta.signal".into()],
8402 since_cursor: None,
8403 resume_token: Some(first_token),
8404 },
8405 )
8406 .unwrap();
8407 assert!(from_token.assets.iter().any(
8408 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8409 ));
8410 assert!(!from_token.assets.iter().any(
8411 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8412 ));
8413 assert_eq!(
8414 from_token.sync_audit.requested_cursor,
8415 Some(first_cursor.clone())
8416 );
8417 assert!(from_token.sync_audit.applied_count >= 1);
8418
8419 let from_cursor = restarted
8420 .fetch_assets(
8421 "execution-api",
8422 &FetchQuery {
8423 sender_id: "delta-agent".into(),
8424 signals: vec!["delta.signal".into()],
8425 since_cursor: Some(first_cursor),
8426 resume_token: None,
8427 },
8428 )
8429 .unwrap();
8430 assert!(from_cursor.assets.iter().any(
8431 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8432 ));
8433 }
8434
8435 #[test]
8436 fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
8437 let store_root = std::env::temp_dir().join(format!(
8438 "oris-evokernel-remote-partial-store-{}",
8439 std::process::id()
8440 ));
8441 if store_root.exists() {
8442 fs::remove_dir_all(&store_root).unwrap();
8443 }
8444 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8445 let evo = build_test_evo_with_store(
8446 "remote-partial",
8447 "run-remote-partial",
8448 command_validator(),
8449 store.clone(),
8450 );
8451 let envelope = remote_publish_envelope(
8452 "node-partial",
8453 "run-remote-partial",
8454 "gene-partial",
8455 "capsule-partial",
8456 "mutation-partial",
8457 "partial-signal",
8458 "PARTIAL.md",
8459 "# partial",
8460 );
8461
8462 let result = evo.import_remote_envelope(&envelope);
8463
8464 assert!(matches!(result, Err(EvoKernelError::Store(_))));
8465 let projection = store.rebuild_projection().unwrap();
8466 assert!(projection
8467 .genes
8468 .iter()
8469 .any(|gene| gene.id == "gene-partial"));
8470 assert!(projection.capsules.is_empty());
8471 let publishers = evo.remote_publishers.lock().unwrap();
8472 assert_eq!(
8473 publishers.get("gene-partial").map(String::as_str),
8474 Some("node-partial")
8475 );
8476 }
8477
8478 #[test]
8479 fn retry_remote_import_after_partial_failure_only_imports_missing_assets() {
8480 let store_root = std::env::temp_dir().join(format!(
8481 "oris-evokernel-remote-partial-retry-store-{}",
8482 next_id("t")
8483 ));
8484 if store_root.exists() {
8485 fs::remove_dir_all(&store_root).unwrap();
8486 }
8487 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8488 let evo = build_test_evo_with_store(
8489 "remote-partial-retry",
8490 "run-remote-partial-retry",
8491 command_validator(),
8492 store.clone(),
8493 );
8494 let envelope = remote_publish_envelope(
8495 "node-partial",
8496 "run-remote-partial-retry",
8497 "gene-partial-retry",
8498 "capsule-partial-retry",
8499 "mutation-partial-retry",
8500 "partial-retry-signal",
8501 "PARTIAL_RETRY.md",
8502 "# partial retry",
8503 );
8504
8505 let first = evo.import_remote_envelope(&envelope);
8506 assert!(matches!(first, Err(EvoKernelError::Store(_))));
8507
8508 let retry = evo.import_remote_envelope(&envelope).unwrap();
8509
8510 assert_eq!(retry.imported_asset_ids, vec!["capsule-partial-retry"]);
8511 let projection = store.rebuild_projection().unwrap();
8512 let gene = projection
8513 .genes
8514 .iter()
8515 .find(|gene| gene.id == "gene-partial-retry")
8516 .unwrap();
8517 assert_eq!(gene.state, AssetState::Quarantined);
8518 let capsule = projection
8519 .capsules
8520 .iter()
8521 .find(|capsule| capsule.id == "capsule-partial-retry")
8522 .unwrap();
8523 assert_eq!(capsule.state, AssetState::Quarantined);
8524 assert_eq!(projection.attempt_counts["gene-partial-retry"], 1);
8525
8526 let events = store.scan(1).unwrap();
8527 assert_eq!(
8528 events
8529 .iter()
8530 .filter(|stored| {
8531 matches!(
8532 &stored.event,
8533 EvolutionEvent::MutationDeclared { mutation }
8534 if mutation.intent.id == "mutation-partial-retry"
8535 )
8536 })
8537 .count(),
8538 1
8539 );
8540 assert_eq!(
8541 events
8542 .iter()
8543 .filter(|stored| {
8544 matches!(
8545 &stored.event,
8546 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-partial-retry"
8547 )
8548 })
8549 .count(),
8550 1
8551 );
8552 assert_eq!(
8553 events
8554 .iter()
8555 .filter(|stored| {
8556 matches!(
8557 &stored.event,
8558 EvolutionEvent::CapsuleCommitted { capsule }
8559 if capsule.id == "capsule-partial-retry"
8560 )
8561 })
8562 .count(),
8563 1
8564 );
8565 }
8566
8567 #[tokio::test]
8568 async fn duplicate_remote_import_does_not_requarantine_locally_validated_assets() {
8569 let (evo, store) = build_test_evo(
8570 "remote-idempotent",
8571 "run-remote-idempotent",
8572 command_validator(),
8573 );
8574 let envelope = remote_publish_envelope(
8575 "node-idempotent",
8576 "run-remote-idempotent",
8577 "gene-idempotent",
8578 "capsule-idempotent",
8579 "mutation-idempotent",
8580 "idempotent-signal",
8581 "IDEMPOTENT.md",
8582 "# idempotent",
8583 );
8584
8585 let first = evo.import_remote_envelope(&envelope).unwrap();
8586 assert_eq!(
8587 first.imported_asset_ids,
8588 vec!["gene-idempotent", "capsule-idempotent"]
8589 );
8590
8591 let decision = evo
8592 .replay_or_fallback(replay_input("idempotent-signal"))
8593 .await
8594 .unwrap();
8595 assert!(decision.used_capsule);
8596 assert_eq!(decision.capsule_id, Some("capsule-idempotent".into()));
8597
8598 let projection_before = store.rebuild_projection().unwrap();
8599 let attempts_before = projection_before.attempt_counts["gene-idempotent"];
8600 let gene_before = projection_before
8601 .genes
8602 .iter()
8603 .find(|gene| gene.id == "gene-idempotent")
8604 .unwrap();
8605 assert_eq!(gene_before.state, AssetState::ShadowValidated);
8606 let capsule_before = projection_before
8607 .capsules
8608 .iter()
8609 .find(|capsule| capsule.id == "capsule-idempotent")
8610 .unwrap();
8611 assert_eq!(capsule_before.state, AssetState::ShadowValidated);
8612
8613 let second = evo.import_remote_envelope(&envelope).unwrap();
8614 assert!(second.imported_asset_ids.is_empty());
8615
8616 let projection_after = store.rebuild_projection().unwrap();
8617 assert_eq!(
8618 projection_after.attempt_counts["gene-idempotent"],
8619 attempts_before
8620 );
8621 let gene_after = projection_after
8622 .genes
8623 .iter()
8624 .find(|gene| gene.id == "gene-idempotent")
8625 .unwrap();
8626 assert_eq!(gene_after.state, AssetState::ShadowValidated);
8627 let capsule_after = projection_after
8628 .capsules
8629 .iter()
8630 .find(|capsule| capsule.id == "capsule-idempotent")
8631 .unwrap();
8632 assert_eq!(capsule_after.state, AssetState::ShadowValidated);
8633
8634 let third_decision = evo
8635 .replay_or_fallback(replay_input("idempotent-signal"))
8636 .await
8637 .unwrap();
8638 assert!(third_decision.used_capsule);
8639 assert_eq!(third_decision.capsule_id, Some("capsule-idempotent".into()));
8640
8641 let projection_promoted = store.rebuild_projection().unwrap();
8642 let promoted_gene = projection_promoted
8643 .genes
8644 .iter()
8645 .find(|gene| gene.id == "gene-idempotent")
8646 .unwrap();
8647 let promoted_capsule = projection_promoted
8648 .capsules
8649 .iter()
8650 .find(|capsule| capsule.id == "capsule-idempotent")
8651 .unwrap();
8652 assert_eq!(promoted_gene.state, AssetState::Promoted);
8653 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8654
8655 let events = store.scan(1).unwrap();
8656 assert_eq!(
8657 events
8658 .iter()
8659 .filter(|stored| {
8660 matches!(
8661 &stored.event,
8662 EvolutionEvent::MutationDeclared { mutation }
8663 if mutation.intent.id == "mutation-idempotent"
8664 )
8665 })
8666 .count(),
8667 1
8668 );
8669 assert_eq!(
8670 events
8671 .iter()
8672 .filter(|stored| {
8673 matches!(
8674 &stored.event,
8675 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-idempotent"
8676 )
8677 })
8678 .count(),
8679 1
8680 );
8681 assert_eq!(
8682 events
8683 .iter()
8684 .filter(|stored| {
8685 matches!(
8686 &stored.event,
8687 EvolutionEvent::CapsuleCommitted { capsule }
8688 if capsule.id == "capsule-idempotent"
8689 )
8690 })
8691 .count(),
8692 1
8693 );
8694
8695 assert_eq!(first.sync_audit.scanned_count, envelope.assets.len());
8696 assert_eq!(first.sync_audit.failed_count, 0);
8697 assert_eq!(second.sync_audit.applied_count, 0);
8698 assert_eq!(second.sync_audit.skipped_count, envelope.assets.len());
8699 assert!(second.resume_token.is_some());
8700 }
8701
8702 #[tokio::test]
8703 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
8704 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
8705 let capsule = evo
8706 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
8707 .await
8708 .unwrap();
8709 let publish = evo.export_promoted_assets("node-local");
8710 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
8711
8712 let decision = evo
8713 .replay_or_fallback(replay_input("missing readme"))
8714 .await
8715 .unwrap();
8716 assert!(decision.used_capsule);
8717 assert_eq!(decision.capsule_id, Some(capsule.id));
8718 }
8719
8720 #[tokio::test]
8721 async fn second_replay_validation_failure_revokes_gene_immediately() {
8722 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
8723 let capsule = capturer
8724 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
8725 .await
8726 .unwrap();
8727
8728 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
8729 let failing_replay = build_test_evo_with_store(
8730 "revoke-replay",
8731 "run-replay-fail",
8732 failing_validator,
8733 store.clone(),
8734 );
8735
8736 let first = failing_replay
8737 .replay_or_fallback(replay_input("missing readme"))
8738 .await
8739 .unwrap();
8740 let second = failing_replay
8741 .replay_or_fallback(replay_input("missing readme"))
8742 .await
8743 .unwrap();
8744
8745 assert!(!first.used_capsule);
8746 assert!(first.fallback_to_planner);
8747 assert!(!second.used_capsule);
8748 assert!(second.fallback_to_planner);
8749
8750 let projection = store.rebuild_projection().unwrap();
8751 let gene = projection
8752 .genes
8753 .iter()
8754 .find(|gene| gene.id == capsule.gene_id)
8755 .unwrap();
8756 assert_eq!(gene.state, AssetState::Promoted);
8757 let committed_capsule = projection
8758 .capsules
8759 .iter()
8760 .find(|current| current.id == capsule.id)
8761 .unwrap();
8762 assert_eq!(committed_capsule.state, AssetState::Promoted);
8763
8764 let events = store.scan(1).unwrap();
8765 assert_eq!(
8766 events
8767 .iter()
8768 .filter(|stored| {
8769 matches!(
8770 &stored.event,
8771 EvolutionEvent::ValidationFailed {
8772 gene_id: Some(gene_id),
8773 ..
8774 } if gene_id == &capsule.gene_id
8775 )
8776 })
8777 .count(),
8778 1
8779 );
8780 assert!(!events.iter().any(|stored| {
8781 matches!(
8782 &stored.event,
8783 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
8784 )
8785 }));
8786
8787 let recovered = build_test_evo_with_store(
8788 "revoke-replay",
8789 "run-replay-check",
8790 command_validator(),
8791 store.clone(),
8792 );
8793 let after_revoke = recovered
8794 .replay_or_fallback(replay_input("missing readme"))
8795 .await
8796 .unwrap();
8797 assert!(!after_revoke.used_capsule);
8798 assert!(after_revoke.fallback_to_planner);
8799 assert!(after_revoke.reason.contains("below replay threshold"));
8800 }
8801
8802 #[tokio::test]
8803 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
8804 let ledger = Arc::new(Mutex::new(EvuLedger {
8805 accounts: vec![],
8806 reputations: vec![
8807 oris_economics::ReputationRecord {
8808 node_id: "node-a".into(),
8809 publish_success_rate: 0.4,
8810 validator_accuracy: 0.4,
8811 reuse_impact: 0,
8812 },
8813 oris_economics::ReputationRecord {
8814 node_id: "node-b".into(),
8815 publish_success_rate: 0.95,
8816 validator_accuracy: 0.95,
8817 reuse_impact: 8,
8818 },
8819 ],
8820 }));
8821 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
8822 let evo = evo.with_economics(ledger.clone());
8823
8824 let envelope_a = remote_publish_envelope(
8825 "node-a",
8826 "run-remote-a",
8827 "gene-a",
8828 "capsule-a",
8829 "mutation-a",
8830 "shared-signal",
8831 "A.md",
8832 "# from a",
8833 );
8834 let envelope_b = remote_publish_envelope(
8835 "node-b",
8836 "run-remote-b",
8837 "gene-b",
8838 "capsule-b",
8839 "mutation-b",
8840 "shared-signal",
8841 "B.md",
8842 "# from b",
8843 );
8844
8845 evo.import_remote_envelope(&envelope_a).unwrap();
8846 evo.import_remote_envelope(&envelope_b).unwrap();
8847
8848 let decision = evo
8849 .replay_or_fallback(replay_input("shared-signal"))
8850 .await
8851 .unwrap();
8852
8853 assert!(decision.used_capsule);
8854 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
8855 let locked = ledger.lock().unwrap();
8856 let rewarded = locked
8857 .accounts
8858 .iter()
8859 .find(|item| item.node_id == "node-b")
8860 .unwrap();
8861 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8862 assert!(
8863 locked.selector_reputation_bias()["node-b"]
8864 > locked.selector_reputation_bias()["node-a"]
8865 );
8866 }
8867
8868 #[tokio::test]
8869 async fn remote_reuse_settlement_tracks_selected_capsule_publisher_for_shared_gene() {
8870 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
8871 let (evo, _) = build_test_evo(
8872 "remote-shared-publisher",
8873 "run-remote-shared-publisher",
8874 command_validator(),
8875 );
8876 let evo = evo.with_economics(ledger.clone());
8877 let input = replay_input("shared-signal");
8878 let preferred = remote_publish_envelope_with_env(
8879 "node-a",
8880 "run-remote-a",
8881 "gene-shared",
8882 "capsule-preferred",
8883 "mutation-preferred",
8884 "shared-signal",
8885 "A.md",
8886 "# from a",
8887 input.env.clone(),
8888 );
8889 let fallback = remote_publish_envelope_with_env(
8890 "node-b",
8891 "run-remote-b",
8892 "gene-shared",
8893 "capsule-fallback",
8894 "mutation-fallback",
8895 "shared-signal",
8896 "B.md",
8897 "# from b",
8898 EnvFingerprint {
8899 rustc_version: "old-rustc".into(),
8900 cargo_lock_hash: "other-lock".into(),
8901 target_triple: "aarch64-apple-darwin".into(),
8902 os: "linux".into(),
8903 },
8904 );
8905
8906 evo.import_remote_envelope(&preferred).unwrap();
8907 evo.import_remote_envelope(&fallback).unwrap();
8908
8909 let decision = evo.replay_or_fallback(input).await.unwrap();
8910
8911 assert!(decision.used_capsule);
8912 assert_eq!(decision.capsule_id, Some("capsule-preferred".into()));
8913 let locked = ledger.lock().unwrap();
8914 let rewarded = locked
8915 .accounts
8916 .iter()
8917 .find(|item| item.node_id == "node-a")
8918 .unwrap();
8919 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8920 assert!(locked.accounts.iter().all(|item| item.node_id != "node-b"));
8921 }
8922
8923 #[test]
8924 fn select_candidates_surfaces_ranked_remote_cold_start_candidates() {
8925 let ledger = Arc::new(Mutex::new(EvuLedger {
8926 accounts: vec![],
8927 reputations: vec![
8928 oris_economics::ReputationRecord {
8929 node_id: "node-a".into(),
8930 publish_success_rate: 0.4,
8931 validator_accuracy: 0.4,
8932 reuse_impact: 0,
8933 },
8934 oris_economics::ReputationRecord {
8935 node_id: "node-b".into(),
8936 publish_success_rate: 0.95,
8937 validator_accuracy: 0.95,
8938 reuse_impact: 8,
8939 },
8940 ],
8941 }));
8942 let (evo, _) = build_test_evo("remote-select", "run-remote-select", command_validator());
8943 let evo = evo.with_economics(ledger);
8944
8945 let envelope_a = remote_publish_envelope(
8946 "node-a",
8947 "run-remote-a",
8948 "gene-a",
8949 "capsule-a",
8950 "mutation-a",
8951 "shared-signal",
8952 "A.md",
8953 "# from a",
8954 );
8955 let envelope_b = remote_publish_envelope(
8956 "node-b",
8957 "run-remote-b",
8958 "gene-b",
8959 "capsule-b",
8960 "mutation-b",
8961 "shared-signal",
8962 "B.md",
8963 "# from b",
8964 );
8965
8966 evo.import_remote_envelope(&envelope_a).unwrap();
8967 evo.import_remote_envelope(&envelope_b).unwrap();
8968
8969 let candidates = evo.select_candidates(&replay_input("shared-signal"));
8970
8971 assert_eq!(candidates.len(), 1);
8972 assert_eq!(candidates[0].gene.id, "gene-b");
8973 assert_eq!(candidates[0].capsules[0].id, "capsule-b");
8974 }
8975
8976 #[tokio::test]
8977 async fn remote_reuse_publisher_bias_survives_restart() {
8978 let ledger = Arc::new(Mutex::new(EvuLedger {
8979 accounts: vec![],
8980 reputations: vec![
8981 oris_economics::ReputationRecord {
8982 node_id: "node-a".into(),
8983 publish_success_rate: 0.4,
8984 validator_accuracy: 0.4,
8985 reuse_impact: 0,
8986 },
8987 oris_economics::ReputationRecord {
8988 node_id: "node-b".into(),
8989 publish_success_rate: 0.95,
8990 validator_accuracy: 0.95,
8991 reuse_impact: 8,
8992 },
8993 ],
8994 }));
8995 let store_root = std::env::temp_dir().join(format!(
8996 "oris-evokernel-remote-restart-store-{}",
8997 next_id("t")
8998 ));
8999 if store_root.exists() {
9000 fs::remove_dir_all(&store_root).unwrap();
9001 }
9002 let store: Arc<dyn EvolutionStore> =
9003 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9004 let evo = build_test_evo_with_store(
9005 "remote-success-restart-source",
9006 "run-remote-restart-source",
9007 command_validator(),
9008 store.clone(),
9009 )
9010 .with_economics(ledger.clone());
9011
9012 let envelope_a = remote_publish_envelope(
9013 "node-a",
9014 "run-remote-a",
9015 "gene-a",
9016 "capsule-a",
9017 "mutation-a",
9018 "shared-signal",
9019 "A.md",
9020 "# from a",
9021 );
9022 let envelope_b = remote_publish_envelope(
9023 "node-b",
9024 "run-remote-b",
9025 "gene-b",
9026 "capsule-b",
9027 "mutation-b",
9028 "shared-signal",
9029 "B.md",
9030 "# from b",
9031 );
9032
9033 evo.import_remote_envelope(&envelope_a).unwrap();
9034 evo.import_remote_envelope(&envelope_b).unwrap();
9035
9036 let recovered = build_test_evo_with_store(
9037 "remote-success-restart-recovered",
9038 "run-remote-restart-recovered",
9039 command_validator(),
9040 store.clone(),
9041 )
9042 .with_economics(ledger.clone());
9043
9044 let decision = recovered
9045 .replay_or_fallback(replay_input("shared-signal"))
9046 .await
9047 .unwrap();
9048
9049 assert!(decision.used_capsule);
9050 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
9051 let locked = ledger.lock().unwrap();
9052 let rewarded = locked
9053 .accounts
9054 .iter()
9055 .find(|item| item.node_id == "node-b")
9056 .unwrap();
9057 assert_eq!(rewarded.balance, recovered.stake_policy.reuse_reward);
9058 }
9059
9060 #[tokio::test]
9061 async fn remote_reuse_failure_penalizes_remote_reputation() {
9062 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
9063 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
9064 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
9065 let evo = evo.with_economics(ledger.clone());
9066
9067 let envelope = remote_publish_envelope(
9068 "node-remote",
9069 "run-remote-failed",
9070 "gene-remote",
9071 "capsule-remote",
9072 "mutation-remote",
9073 "failure-signal",
9074 "FAILED.md",
9075 "# from remote",
9076 );
9077 evo.import_remote_envelope(&envelope).unwrap();
9078
9079 let decision = evo
9080 .replay_or_fallback(replay_input("failure-signal"))
9081 .await
9082 .unwrap();
9083
9084 assert!(!decision.used_capsule);
9085 assert!(decision.fallback_to_planner);
9086
9087 let signal = evo.economics_signal("node-remote").unwrap();
9088 assert_eq!(signal.available_evu, 0);
9089 assert!(signal.publish_success_rate < 0.5);
9090 assert!(signal.validator_accuracy < 0.5);
9091 }
9092
9093 #[test]
9094 fn ensure_builtin_experience_assets_is_idempotent_and_fetchable() {
9095 let store_root = std::env::temp_dir().join(format!(
9096 "oris-evokernel-builtin-experience-store-{}",
9097 next_id("t")
9098 ));
9099 if store_root.exists() {
9100 fs::remove_dir_all(&store_root).unwrap();
9101 }
9102 let store: Arc<dyn EvolutionStore> =
9103 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9104 let node = EvolutionNetworkNode::new(store.clone());
9105
9106 let first = node
9107 .ensure_builtin_experience_assets("runtime-bootstrap")
9108 .unwrap();
9109 assert!(!first.imported_asset_ids.is_empty());
9110
9111 let second = node
9112 .ensure_builtin_experience_assets("runtime-bootstrap")
9113 .unwrap();
9114 assert!(second.imported_asset_ids.is_empty());
9115
9116 let fetch = node
9117 .fetch_assets(
9118 "execution-api",
9119 &FetchQuery {
9120 sender_id: "compat-agent".into(),
9121 signals: vec!["error".into()],
9122 since_cursor: None,
9123 resume_token: None,
9124 },
9125 )
9126 .unwrap();
9127
9128 let mut has_builtin_evomap = false;
9129 for asset in fetch.assets {
9130 if let NetworkAsset::Gene { gene } = asset {
9131 if strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9132 == Some("builtin_evomap")
9133 && gene.state == AssetState::Promoted
9134 {
9135 has_builtin_evomap = true;
9136 break;
9137 }
9138 }
9139 }
9140 assert!(has_builtin_evomap);
9141 }
9142
9143 #[test]
9144 fn reported_experience_retention_keeps_latest_three_and_preserves_builtin_assets() {
9145 let store_root = std::env::temp_dir().join(format!(
9146 "oris-evokernel-reported-retention-store-{}",
9147 next_id("t")
9148 ));
9149 if store_root.exists() {
9150 fs::remove_dir_all(&store_root).unwrap();
9151 }
9152 let store: Arc<dyn EvolutionStore> =
9153 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9154 let node = EvolutionNetworkNode::new(store.clone());
9155
9156 node.ensure_builtin_experience_assets("runtime-bootstrap")
9157 .unwrap();
9158
9159 for idx in 0..4 {
9160 node.record_reported_experience(
9161 "reporter-a",
9162 format!("reported-docs-rewrite-v{}", idx + 1),
9163 vec!["docs.rewrite".into(), format!("task-{}", idx + 1)],
9164 vec![
9165 "task_class=docs.rewrite".into(),
9166 format!("task_label=Docs rewrite v{}", idx + 1),
9167 format!("summary=reported replay {}", idx + 1),
9168 ],
9169 vec!["a2a.tasks.report".into()],
9170 )
9171 .unwrap();
9172 }
9173
9174 let (_, projection) = store.scan_projection().unwrap();
9175 let reported_promoted = projection
9176 .genes
9177 .iter()
9178 .filter(|gene| {
9179 gene.state == AssetState::Promoted
9180 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9181 == Some("reported_experience")
9182 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9183 == Some("docs.rewrite")
9184 })
9185 .count();
9186 let reported_revoked = projection
9187 .genes
9188 .iter()
9189 .filter(|gene| {
9190 gene.state == AssetState::Revoked
9191 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9192 == Some("reported_experience")
9193 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9194 == Some("docs.rewrite")
9195 })
9196 .count();
9197 let builtin_promoted = projection
9198 .genes
9199 .iter()
9200 .filter(|gene| {
9201 gene.state == AssetState::Promoted
9202 && matches!(
9203 strategy_metadata_value(&gene.strategy, "asset_origin").as_deref(),
9204 Some("builtin") | Some("builtin_evomap")
9205 )
9206 })
9207 .count();
9208
9209 assert_eq!(reported_promoted, 3);
9210 assert_eq!(reported_revoked, 1);
9211 assert!(builtin_promoted >= 1);
9212
9213 let fetch = node
9214 .fetch_assets(
9215 "execution-api",
9216 &FetchQuery {
9217 sender_id: "consumer-b".into(),
9218 signals: vec!["docs.rewrite".into()],
9219 since_cursor: None,
9220 resume_token: None,
9221 },
9222 )
9223 .unwrap();
9224 let docs_genes = fetch
9225 .assets
9226 .into_iter()
9227 .filter_map(|asset| match asset {
9228 NetworkAsset::Gene { gene } => Some(gene),
9229 _ => None,
9230 })
9231 .filter(|gene| {
9232 strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9233 == Some("docs.rewrite")
9234 })
9235 .collect::<Vec<_>>();
9236 assert!(docs_genes.len() >= 3);
9237 }
9238}