1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::{Path, PathBuf};
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12 infer_mutation_needed_failure_reason_code, infer_replay_fallback_reason_code,
13 normalize_mutation_needed_failure_contract, normalize_replay_fallback_contract, AgentRole,
14 BoundedTaskClass, CoordinationMessage, CoordinationPlan, CoordinationPrimitive,
15 CoordinationResult, CoordinationTask, ExecutionFeedback, MutationNeededFailureContract,
16 MutationNeededFailureReasonCode, MutationProposal as AgentMutationProposal, ReplayFeedback,
17 ReplayPlannerDirective, SupervisedDevloopOutcome, SupervisedDevloopRequest,
18 SupervisedDevloopStatus,
19};
20use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
21use oris_evolution::{
22 compute_artifact_hash, decayed_replay_confidence, next_id, stable_hash_json, AssetState,
23 BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
24 EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
25 PreparedMutation, ReplayRoiEvidence, ReplayRoiReasonCode, Selector, SelectorInput,
26 StoreBackedSelector, StoredEvolutionEvent, TransitionEvidence, TransitionReasonCode,
27 ValidationSnapshot, MIN_REPLAY_CONFIDENCE,
28};
29use oris_evolution_network::{EvolutionEnvelope, NetworkAsset, SyncAudit};
30use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
31use oris_kernel::{Kernel, KernelState, RunId};
32use oris_sandbox::{
33 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
34};
35use oris_spec::CompiledMutationPlan;
36use serde::{Deserialize, Serialize};
37use serde_json::Value;
38use thiserror::Error;
39
40pub use oris_evolution::{
41 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
42 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
43 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
44 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
45 SelectorInput as EvoSelectorInput, TransitionReasonCode as EvoTransitionReasonCode,
46};
47pub use oris_evolution_network::{
48 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
49};
50pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
51pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
52pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
53
54#[derive(Clone, Debug, Serialize, Deserialize)]
55pub struct ValidationPlan {
56 pub profile: String,
57 pub stages: Vec<ValidationStage>,
58}
59
60impl ValidationPlan {
61 pub fn oris_default() -> Self {
62 Self {
63 profile: "oris-default".into(),
64 stages: vec![
65 ValidationStage::Command {
66 program: "cargo".into(),
67 args: vec!["fmt".into(), "--all".into(), "--check".into()],
68 timeout_ms: 60_000,
69 },
70 ValidationStage::Command {
71 program: "cargo".into(),
72 args: vec!["check".into(), "--workspace".into()],
73 timeout_ms: 180_000,
74 },
75 ValidationStage::Command {
76 program: "cargo".into(),
77 args: vec![
78 "test".into(),
79 "-p".into(),
80 "oris-kernel".into(),
81 "-p".into(),
82 "oris-evolution".into(),
83 "-p".into(),
84 "oris-sandbox".into(),
85 "-p".into(),
86 "oris-evokernel".into(),
87 "--lib".into(),
88 ],
89 timeout_ms: 300_000,
90 },
91 ValidationStage::Command {
92 program: "cargo".into(),
93 args: vec![
94 "test".into(),
95 "-p".into(),
96 "oris-runtime".into(),
97 "--lib".into(),
98 ],
99 timeout_ms: 300_000,
100 },
101 ],
102 }
103 }
104}
105
106#[derive(Clone, Debug, Serialize, Deserialize)]
107pub enum ValidationStage {
108 Command {
109 program: String,
110 args: Vec<String>,
111 timeout_ms: u64,
112 },
113}
114
115#[derive(Clone, Debug, Serialize, Deserialize)]
116pub struct ValidationStageReport {
117 pub stage: String,
118 pub success: bool,
119 pub exit_code: Option<i32>,
120 pub duration_ms: u64,
121 pub stdout: String,
122 pub stderr: String,
123}
124
125#[derive(Clone, Debug, Serialize, Deserialize)]
126pub struct ValidationReport {
127 pub success: bool,
128 pub duration_ms: u64,
129 pub stages: Vec<ValidationStageReport>,
130 pub logs: String,
131}
132
133#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
134pub struct SignalExtractionInput {
135 pub patch_diff: String,
136 pub intent: String,
137 pub expected_effect: String,
138 pub declared_signals: Vec<String>,
139 pub changed_files: Vec<String>,
140 pub validation_success: bool,
141 pub validation_logs: String,
142 pub stage_outputs: Vec<String>,
143}
144
145#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
146pub struct SignalExtractionOutput {
147 pub values: Vec<String>,
148 pub hash: String,
149}
150
151#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
152pub struct SeedTemplate {
153 pub id: String,
154 pub intent: String,
155 pub signals: Vec<String>,
156 pub diff_payload: String,
157 pub validation_profile: String,
158}
159
160#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
161pub struct BootstrapReport {
162 pub seeded: bool,
163 pub genes_added: usize,
164 pub capsules_added: usize,
165}
166
167const REPORTED_EXPERIENCE_RETENTION_LIMIT: usize = 3;
168const SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS: u64 = 2;
169const SHADOW_PROMOTION_MIN_SUCCESS_RATE: f32 = 0.70;
170const SHADOW_PROMOTION_MIN_ENV_MATCH: f32 = 0.75;
171const SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE: f32 = MIN_REPLAY_CONFIDENCE;
172const REPLAY_REASONING_TOKEN_FLOOR: u64 = 192;
173const REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT: u64 = 24;
174const COLD_START_LOOKUP_PENALTY: f32 = 0.05;
175const MUTATION_NEEDED_MAX_DIFF_BYTES: usize = 128 * 1024;
176const MUTATION_NEEDED_MAX_CHANGED_LINES: usize = 600;
177const MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS: u64 = 120_000;
178const MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS: u64 = 900_000;
179pub const REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS: [&str; 2] =
180 ["task_class", "source_sender_id"];
181
182#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
183pub struct RepairQualityGateReport {
184 pub root_cause: bool,
185 pub fix: bool,
186 pub verification: bool,
187 pub rollback: bool,
188 pub incident_anchor: bool,
189 pub structure_score: usize,
190 pub has_actionable_command: bool,
191}
192
193impl RepairQualityGateReport {
194 pub fn passes(&self) -> bool {
195 self.incident_anchor
196 && self.structure_score >= 3
197 && (self.has_actionable_command || self.verification)
198 }
199
200 pub fn failed_checks(&self) -> Vec<String> {
201 let mut failed = Vec::new();
202 if !self.incident_anchor {
203 failed.push("包含unknown command故障上下文".to_string());
204 }
205 if self.structure_score < 3 {
206 failed.push("结构化修复信息至少满足3项(根因/修复/验证/回滚)".to_string());
207 }
208 if !(self.has_actionable_command || self.verification) {
209 failed.push("包含可执行验证命令或验证计划".to_string());
210 }
211 failed
212 }
213}
214
215pub fn evaluate_repair_quality_gate(plan: &str) -> RepairQualityGateReport {
216 fn contains_any(haystack: &str, needles: &[&str]) -> bool {
217 needles.iter().any(|needle| haystack.contains(needle))
218 }
219
220 let lower = plan.to_ascii_lowercase();
221 let root_cause = contains_any(
222 plan,
223 &["根因", "原因分析", "问题定位", "原因定位", "根本原因"],
224 ) || contains_any(
225 &lower,
226 &[
227 "root cause",
228 "cause analysis",
229 "problem diagnosis",
230 "diagnosis",
231 ],
232 );
233 let fix = contains_any(
234 plan,
235 &["修复步骤", "修复方案", "处理步骤", "修复建议", "整改方案"],
236 ) || contains_any(
237 &lower,
238 &[
239 "fix",
240 "remediation",
241 "mitigation",
242 "resolution",
243 "repair steps",
244 ],
245 );
246 let verification = contains_any(
247 plan,
248 &["验证命令", "验证步骤", "回归测试", "验证方式", "验收步骤"],
249 ) || contains_any(
250 &lower,
251 &[
252 "verification",
253 "validate",
254 "regression test",
255 "smoke test",
256 "test command",
257 ],
258 );
259 let rollback = contains_any(plan, &["回滚方案", "回滚步骤", "恢复方案", "撤销方案"])
260 || contains_any(&lower, &["rollback", "revert", "fallback plan", "undo"]);
261 let incident_anchor = contains_any(
262 &lower,
263 &[
264 "unknown command",
265 "process",
266 "proccess",
267 "command not found",
268 ],
269 ) || contains_any(plan, &["命令不存在", "命令未找到", "未知命令"]);
270 let structure_score = [root_cause, fix, verification, rollback]
271 .into_iter()
272 .filter(|ok| *ok)
273 .count();
274 let has_actionable_command = contains_any(
275 &lower,
276 &[
277 "cargo ", "git ", "python ", "pip ", "npm ", "pnpm ", "yarn ", "bash ", "make ",
278 ],
279 );
280
281 RepairQualityGateReport {
282 root_cause,
283 fix,
284 verification,
285 rollback,
286 incident_anchor,
287 structure_score,
288 has_actionable_command,
289 }
290}
291
292impl ValidationReport {
293 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
294 ValidationSnapshot {
295 success: self.success,
296 profile: profile.to_string(),
297 duration_ms: self.duration_ms,
298 summary: if self.success {
299 "validation passed".into()
300 } else {
301 "validation failed".into()
302 },
303 }
304 }
305}
306
307pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
308 let mut signals = BTreeSet::new();
309
310 for declared in &input.declared_signals {
311 if let Some(phrase) = normalize_signal_phrase(declared) {
312 signals.insert(phrase);
313 }
314 extend_signal_tokens(&mut signals, declared);
315 }
316
317 for text in [
318 input.patch_diff.as_str(),
319 input.intent.as_str(),
320 input.expected_effect.as_str(),
321 input.validation_logs.as_str(),
322 ] {
323 extend_signal_tokens(&mut signals, text);
324 }
325
326 for changed_file in &input.changed_files {
327 extend_signal_tokens(&mut signals, changed_file);
328 }
329
330 for stage_output in &input.stage_outputs {
331 extend_signal_tokens(&mut signals, stage_output);
332 }
333
334 signals.insert(if input.validation_success {
335 "validation passed".into()
336 } else {
337 "validation failed".into()
338 });
339
340 let values = signals.into_iter().take(32).collect::<Vec<_>>();
341 let hash =
342 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
343 SignalExtractionOutput { values, hash }
344}
345
346#[derive(Debug, Error)]
347pub enum ValidationError {
348 #[error("validation execution failed: {0}")]
349 Execution(String),
350}
351
352#[async_trait]
353pub trait Validator: Send + Sync {
354 async fn run(
355 &self,
356 receipt: &SandboxReceipt,
357 plan: &ValidationPlan,
358 ) -> Result<ValidationReport, ValidationError>;
359}
360
361pub struct CommandValidator {
362 policy: SandboxPolicy,
363}
364
365impl CommandValidator {
366 pub fn new(policy: SandboxPolicy) -> Self {
367 Self { policy }
368 }
369}
370
371#[async_trait]
372impl Validator for CommandValidator {
373 async fn run(
374 &self,
375 receipt: &SandboxReceipt,
376 plan: &ValidationPlan,
377 ) -> Result<ValidationReport, ValidationError> {
378 let started = std::time::Instant::now();
379 let mut stages = Vec::new();
380 let mut success = true;
381 let mut logs = String::new();
382
383 for stage in &plan.stages {
384 match stage {
385 ValidationStage::Command {
386 program,
387 args,
388 timeout_ms,
389 } => {
390 let result = execute_allowed_command(
391 &self.policy,
392 &receipt.workdir,
393 program,
394 args,
395 *timeout_ms,
396 )
397 .await;
398 let report = match result {
399 Ok(output) => ValidationStageReport {
400 stage: format!("{program} {}", args.join(" ")),
401 success: output.success,
402 exit_code: output.exit_code,
403 duration_ms: output.duration_ms,
404 stdout: output.stdout,
405 stderr: output.stderr,
406 },
407 Err(err) => ValidationStageReport {
408 stage: format!("{program} {}", args.join(" ")),
409 success: false,
410 exit_code: None,
411 duration_ms: 0,
412 stdout: String::new(),
413 stderr: err.to_string(),
414 },
415 };
416 if !report.success {
417 success = false;
418 }
419 if !report.stdout.is_empty() {
420 logs.push_str(&report.stdout);
421 logs.push('\n');
422 }
423 if !report.stderr.is_empty() {
424 logs.push_str(&report.stderr);
425 logs.push('\n');
426 }
427 stages.push(report);
428 if !success {
429 break;
430 }
431 }
432 }
433 }
434
435 Ok(ValidationReport {
436 success,
437 duration_ms: started.elapsed().as_millis() as u64,
438 stages,
439 logs,
440 })
441 }
442}
443
444#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
445pub struct ReplayDetectEvidence {
446 pub task_class_id: String,
447 pub task_label: String,
448 pub matched_signals: Vec<String>,
449 pub mismatch_reasons: Vec<String>,
450}
451
452#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
453pub struct ReplayCandidateEvidence {
454 pub rank: usize,
455 pub gene_id: String,
456 pub capsule_id: Option<String>,
457 pub match_quality: f32,
458 pub confidence: Option<f32>,
459 pub environment_match_factor: Option<f32>,
460 pub cold_start_penalty: f32,
461 pub final_score: f32,
462}
463
464#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
465pub struct ReplaySelectEvidence {
466 pub exact_match_lookup: bool,
467 pub selected_gene_id: Option<String>,
468 pub selected_capsule_id: Option<String>,
469 pub candidates: Vec<ReplayCandidateEvidence>,
470}
471
472#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
473pub struct ReplayDecision {
474 pub used_capsule: bool,
475 pub capsule_id: Option<CapsuleId>,
476 pub fallback_to_planner: bool,
477 pub reason: String,
478 pub detect_evidence: ReplayDetectEvidence,
479 pub select_evidence: ReplaySelectEvidence,
480 pub economics_evidence: ReplayRoiEvidence,
481}
482
483#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
484pub struct ReplayTaskClassMetrics {
485 pub task_class_id: String,
486 pub task_label: String,
487 pub replay_success_total: u64,
488 pub replay_failure_total: u64,
489 pub reasoning_steps_avoided_total: u64,
490 pub reasoning_avoided_tokens_total: u64,
491 pub replay_fallback_cost_total: u64,
492 pub replay_roi: f64,
493}
494
495#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
496pub struct ReplaySourceRoiMetrics {
497 pub source_sender_id: String,
498 pub replay_success_total: u64,
499 pub replay_failure_total: u64,
500 pub reasoning_avoided_tokens_total: u64,
501 pub replay_fallback_cost_total: u64,
502 pub replay_roi: f64,
503}
504
505#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
506pub struct ReplayRoiWindowSummary {
507 pub generated_at: String,
508 pub window_seconds: u64,
509 pub replay_attempts_total: u64,
510 pub replay_success_total: u64,
511 pub replay_failure_total: u64,
512 pub reasoning_avoided_tokens_total: u64,
513 pub replay_fallback_cost_total: u64,
514 pub replay_roi: f64,
515 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
516 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
517}
518
519#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
520pub struct ReplayRoiReleaseGateThresholds {
521 pub min_replay_attempts: u64,
522 pub min_replay_hit_rate: f64,
523 pub max_false_replay_rate: f64,
524 pub min_reasoning_avoided_tokens: u64,
525 pub min_replay_roi: f64,
526 pub require_replay_safety: bool,
527}
528
529impl Default for ReplayRoiReleaseGateThresholds {
530 fn default() -> Self {
531 Self {
532 min_replay_attempts: 3,
533 min_replay_hit_rate: 0.60,
534 max_false_replay_rate: 0.25,
535 min_reasoning_avoided_tokens: REPLAY_REASONING_TOKEN_FLOOR,
536 min_replay_roi: 0.05,
537 require_replay_safety: true,
538 }
539 }
540}
541
542#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
543#[serde(rename_all = "snake_case")]
544pub enum ReplayRoiReleaseGateAction {
545 BlockRelease,
546}
547
548#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
549pub struct ReplayRoiReleaseGateFailClosedPolicy {
550 pub on_threshold_violation: ReplayRoiReleaseGateAction,
551 pub on_missing_metrics: ReplayRoiReleaseGateAction,
552 pub on_invalid_metrics: ReplayRoiReleaseGateAction,
553}
554
555impl Default for ReplayRoiReleaseGateFailClosedPolicy {
556 fn default() -> Self {
557 Self {
558 on_threshold_violation: ReplayRoiReleaseGateAction::BlockRelease,
559 on_missing_metrics: ReplayRoiReleaseGateAction::BlockRelease,
560 on_invalid_metrics: ReplayRoiReleaseGateAction::BlockRelease,
561 }
562 }
563}
564
565#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
566pub struct ReplayRoiReleaseGateSafetySignal {
567 pub fail_closed_default: bool,
568 pub rollback_ready: bool,
569 pub audit_trail_complete: bool,
570 pub has_replay_activity: bool,
571}
572
573#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
574pub struct ReplayRoiReleaseGateInputContract {
575 pub generated_at: String,
576 pub window_seconds: u64,
577 pub aggregation_dimensions: Vec<String>,
578 pub replay_attempts_total: u64,
579 pub replay_success_total: u64,
580 pub replay_failure_total: u64,
581 pub replay_hit_rate: f64,
582 pub false_replay_rate: f64,
583 pub reasoning_avoided_tokens: u64,
584 pub replay_fallback_cost_total: u64,
585 pub replay_roi: f64,
586 pub replay_safety: bool,
587 pub replay_safety_signal: ReplayRoiReleaseGateSafetySignal,
588 pub thresholds: ReplayRoiReleaseGateThresholds,
589 pub fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy,
590}
591
592#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
593#[serde(rename_all = "snake_case")]
594pub enum ReplayRoiReleaseGateStatus {
595 Pass,
596 FailClosed,
597 Indeterminate,
598}
599
600#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
601pub struct ReplayRoiReleaseGateOutputContract {
602 pub status: ReplayRoiReleaseGateStatus,
603 pub failed_checks: Vec<String>,
604 pub evidence_refs: Vec<String>,
605 pub summary: String,
606}
607
608#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
609pub struct ReplayRoiReleaseGateContract {
610 pub input: ReplayRoiReleaseGateInputContract,
611 pub output: ReplayRoiReleaseGateOutputContract,
612}
613
614#[derive(Clone, Copy, Debug, Eq, PartialEq)]
615enum CoordinationTaskState {
616 Ready,
617 Waiting,
618 BlockedByFailure,
619 PermanentlyBlocked,
620}
621
622#[derive(Clone, Debug, Default)]
623pub struct MultiAgentCoordinator;
624
625impl MultiAgentCoordinator {
626 pub fn new() -> Self {
627 Self
628 }
629
630 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
631 let primitive = plan.primitive.clone();
632 let root_goal = plan.root_goal.clone();
633 let timeout_ms = plan.timeout_ms;
634 let max_retries = plan.max_retries;
635 let mut tasks = BTreeMap::new();
636 for task in plan.tasks {
637 tasks.entry(task.id.clone()).or_insert(task);
638 }
639
640 let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
641 let mut completed = BTreeSet::new();
642 let mut failed = BTreeSet::new();
643 let mut completed_order = Vec::new();
644 let mut failed_order = Vec::new();
645 let mut skipped = BTreeSet::new();
646 let mut attempts = BTreeMap::new();
647 let mut messages = Vec::new();
648
649 loop {
650 if matches!(primitive, CoordinationPrimitive::Conditional) {
651 self.apply_conditional_skips(
652 &tasks,
653 &mut pending,
654 &completed,
655 &failed,
656 &mut skipped,
657 &mut messages,
658 );
659 }
660
661 let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
662 if ready.is_empty() {
663 break;
664 }
665 if matches!(primitive, CoordinationPrimitive::Sequential) {
666 ready.truncate(1);
667 }
668
669 for task_id in ready {
670 let Some(task) = tasks.get(&task_id) else {
671 continue;
672 };
673 if !pending.contains(&task_id) {
674 continue;
675 }
676 self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
677
678 let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
679 if Self::simulate_task_failure(task, prior_failures) {
680 let failure_count = prior_failures + 1;
681 attempts.insert(task_id.clone(), failure_count);
682 let will_retry = failure_count <= max_retries;
683 messages.push(CoordinationMessage {
684 from_role: task.role.clone(),
685 to_role: task.role.clone(),
686 task_id: task_id.clone(),
687 content: if will_retry {
688 format!("task {task_id} failed on attempt {failure_count} and will retry")
689 } else {
690 format!(
691 "task {task_id} failed on attempt {failure_count} and exhausted retries"
692 )
693 },
694 });
695 if !will_retry {
696 pending.remove(&task_id);
697 if failed.insert(task_id.clone()) {
698 failed_order.push(task_id);
699 }
700 }
701 continue;
702 }
703
704 pending.remove(&task_id);
705 if completed.insert(task_id.clone()) {
706 completed_order.push(task_id);
707 }
708 }
709 }
710
711 let blocked_ids = pending.into_iter().collect::<Vec<_>>();
712 for task_id in blocked_ids {
713 let Some(task) = tasks.get(&task_id) else {
714 continue;
715 };
716 let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
717 let content = match state {
718 CoordinationTaskState::BlockedByFailure => {
719 format!("task {task_id} blocked by failed dependencies")
720 }
721 CoordinationTaskState::PermanentlyBlocked => {
722 format!("task {task_id} has invalid coordination prerequisites")
723 }
724 CoordinationTaskState::Waiting => {
725 format!("task {task_id} has unresolved dependencies")
726 }
727 CoordinationTaskState::Ready => {
728 format!("task {task_id} was left pending unexpectedly")
729 }
730 };
731 messages.push(CoordinationMessage {
732 from_role: task.role.clone(),
733 to_role: task.role.clone(),
734 task_id: task_id.clone(),
735 content,
736 });
737 if failed.insert(task_id.clone()) {
738 failed_order.push(task_id);
739 }
740 }
741
742 CoordinationResult {
743 completed_tasks: completed_order,
744 failed_tasks: failed_order,
745 messages,
746 summary: format!(
747 "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
748 root_goal,
749 completed.len(),
750 failed.len(),
751 skipped.len(),
752 primitive,
753 timeout_ms,
754 max_retries
755 ),
756 }
757 }
758
759 fn ready_task_ids(
760 &self,
761 tasks: &BTreeMap<String, CoordinationTask>,
762 pending: &BTreeSet<String>,
763 completed: &BTreeSet<String>,
764 failed: &BTreeSet<String>,
765 skipped: &BTreeSet<String>,
766 ) -> Vec<String> {
767 pending
768 .iter()
769 .filter_map(|task_id| {
770 let task = tasks.get(task_id)?;
771 (self.classify_task(task, tasks, completed, failed, skipped)
772 == CoordinationTaskState::Ready)
773 .then(|| task_id.clone())
774 })
775 .collect()
776 }
777
778 fn apply_conditional_skips(
779 &self,
780 tasks: &BTreeMap<String, CoordinationTask>,
781 pending: &mut BTreeSet<String>,
782 completed: &BTreeSet<String>,
783 failed: &BTreeSet<String>,
784 skipped: &mut BTreeSet<String>,
785 messages: &mut Vec<CoordinationMessage>,
786 ) {
787 let skip_ids = pending
788 .iter()
789 .filter_map(|task_id| {
790 let task = tasks.get(task_id)?;
791 (self.classify_task(task, tasks, completed, failed, skipped)
792 == CoordinationTaskState::BlockedByFailure)
793 .then(|| task_id.clone())
794 })
795 .collect::<Vec<_>>();
796
797 for task_id in skip_ids {
798 let Some(task) = tasks.get(&task_id) else {
799 continue;
800 };
801 pending.remove(&task_id);
802 skipped.insert(task_id.clone());
803 messages.push(CoordinationMessage {
804 from_role: task.role.clone(),
805 to_role: task.role.clone(),
806 task_id: task_id.clone(),
807 content: format!("task {task_id} skipped due to failed dependency chain"),
808 });
809 }
810 }
811
812 fn classify_task(
813 &self,
814 task: &CoordinationTask,
815 tasks: &BTreeMap<String, CoordinationTask>,
816 completed: &BTreeSet<String>,
817 failed: &BTreeSet<String>,
818 skipped: &BTreeSet<String>,
819 ) -> CoordinationTaskState {
820 match task.role {
821 AgentRole::Planner | AgentRole::Coder => {
822 let mut waiting = false;
823 for dependency_id in &task.depends_on {
824 if !tasks.contains_key(dependency_id) {
825 return CoordinationTaskState::PermanentlyBlocked;
826 }
827 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
828 return CoordinationTaskState::BlockedByFailure;
829 }
830 if !completed.contains(dependency_id) {
831 waiting = true;
832 }
833 }
834 if waiting {
835 CoordinationTaskState::Waiting
836 } else {
837 CoordinationTaskState::Ready
838 }
839 }
840 AgentRole::Repair => {
841 let mut waiting = false;
842 let mut has_coder_dependency = false;
843 let mut has_failed_coder = false;
844 for dependency_id in &task.depends_on {
845 let Some(dependency) = tasks.get(dependency_id) else {
846 return CoordinationTaskState::PermanentlyBlocked;
847 };
848 let is_coder = matches!(dependency.role, AgentRole::Coder);
849 if is_coder {
850 has_coder_dependency = true;
851 }
852 if skipped.contains(dependency_id) {
853 return CoordinationTaskState::BlockedByFailure;
854 }
855 if failed.contains(dependency_id) {
856 if is_coder {
857 has_failed_coder = true;
858 } else {
859 return CoordinationTaskState::BlockedByFailure;
860 }
861 continue;
862 }
863 if !completed.contains(dependency_id) {
864 waiting = true;
865 }
866 }
867 if !has_coder_dependency {
868 CoordinationTaskState::PermanentlyBlocked
869 } else if waiting {
870 CoordinationTaskState::Waiting
871 } else if has_failed_coder {
872 CoordinationTaskState::Ready
873 } else {
874 CoordinationTaskState::PermanentlyBlocked
875 }
876 }
877 AgentRole::Optimizer => {
878 let mut waiting = false;
879 let mut has_impl_dependency = false;
880 let mut has_completed_impl = false;
881 let mut has_failed_impl = false;
882 for dependency_id in &task.depends_on {
883 let Some(dependency) = tasks.get(dependency_id) else {
884 return CoordinationTaskState::PermanentlyBlocked;
885 };
886 let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
887 if is_impl {
888 has_impl_dependency = true;
889 }
890 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
891 if is_impl {
892 has_failed_impl = true;
893 continue;
894 }
895 return CoordinationTaskState::BlockedByFailure;
896 }
897 if completed.contains(dependency_id) {
898 if is_impl {
899 has_completed_impl = true;
900 }
901 continue;
902 }
903 waiting = true;
904 }
905 if !has_impl_dependency {
906 CoordinationTaskState::PermanentlyBlocked
907 } else if waiting {
908 CoordinationTaskState::Waiting
909 } else if has_completed_impl {
910 CoordinationTaskState::Ready
911 } else if has_failed_impl {
912 CoordinationTaskState::BlockedByFailure
913 } else {
914 CoordinationTaskState::PermanentlyBlocked
915 }
916 }
917 }
918 }
919
920 fn record_handoff_messages(
921 &self,
922 task: &CoordinationTask,
923 tasks: &BTreeMap<String, CoordinationTask>,
924 completed: &BTreeSet<String>,
925 failed: &BTreeSet<String>,
926 messages: &mut Vec<CoordinationMessage>,
927 ) {
928 let mut dependency_ids = task.depends_on.clone();
929 dependency_ids.sort();
930 dependency_ids.dedup();
931
932 for dependency_id in dependency_ids {
933 let Some(dependency) = tasks.get(&dependency_id) else {
934 continue;
935 };
936 if completed.contains(&dependency_id) {
937 messages.push(CoordinationMessage {
938 from_role: dependency.role.clone(),
939 to_role: task.role.clone(),
940 task_id: task.id.clone(),
941 content: format!("handoff from {dependency_id} to {}", task.id),
942 });
943 } else if failed.contains(&dependency_id) {
944 messages.push(CoordinationMessage {
945 from_role: dependency.role.clone(),
946 to_role: task.role.clone(),
947 task_id: task.id.clone(),
948 content: format!("failed dependency {dependency_id} routed to {}", task.id),
949 });
950 }
951 }
952 }
953
954 fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
955 let normalized = task.description.to_ascii_lowercase();
956 normalized.contains("force-fail")
957 || (normalized.contains("fail-once") && prior_failures == 0)
958 }
959}
960
961#[derive(Debug, Error)]
962pub enum ReplayError {
963 #[error("store error: {0}")]
964 Store(String),
965 #[error("sandbox error: {0}")]
966 Sandbox(String),
967 #[error("validation error: {0}")]
968 Validation(String),
969}
970
971#[async_trait]
972pub trait ReplayExecutor: Send + Sync {
973 async fn try_replay(
974 &self,
975 input: &SelectorInput,
976 policy: &SandboxPolicy,
977 validation: &ValidationPlan,
978 ) -> Result<ReplayDecision, ReplayError>;
979
980 async fn try_replay_for_run(
981 &self,
982 run_id: &RunId,
983 input: &SelectorInput,
984 policy: &SandboxPolicy,
985 validation: &ValidationPlan,
986 ) -> Result<ReplayDecision, ReplayError> {
987 let _ = run_id;
988 self.try_replay(input, policy, validation).await
989 }
990}
991
992pub struct StoreReplayExecutor {
993 pub sandbox: Arc<dyn Sandbox>,
994 pub validator: Arc<dyn Validator>,
995 pub store: Arc<dyn EvolutionStore>,
996 pub selector: Arc<dyn Selector>,
997 pub governor: Arc<dyn Governor>,
998 pub economics: Option<Arc<Mutex<EvuLedger>>>,
999 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
1000 pub stake_policy: StakePolicy,
1001}
1002
1003struct ReplayCandidates {
1004 candidates: Vec<GeneCandidate>,
1005 exact_match: bool,
1006}
1007
1008#[async_trait]
1009impl ReplayExecutor for StoreReplayExecutor {
1010 async fn try_replay(
1011 &self,
1012 input: &SelectorInput,
1013 policy: &SandboxPolicy,
1014 validation: &ValidationPlan,
1015 ) -> Result<ReplayDecision, ReplayError> {
1016 self.try_replay_inner(None, input, policy, validation).await
1017 }
1018
1019 async fn try_replay_for_run(
1020 &self,
1021 run_id: &RunId,
1022 input: &SelectorInput,
1023 policy: &SandboxPolicy,
1024 validation: &ValidationPlan,
1025 ) -> Result<ReplayDecision, ReplayError> {
1026 self.try_replay_inner(Some(run_id), input, policy, validation)
1027 .await
1028 }
1029}
1030
1031impl StoreReplayExecutor {
1032 fn collect_replay_candidates(&self, input: &SelectorInput) -> ReplayCandidates {
1033 self.apply_confidence_revalidation();
1034 let mut selector_input = input.clone();
1035 if self.economics.is_some() && self.remote_publishers.is_some() {
1036 selector_input.limit = selector_input.limit.max(4);
1037 }
1038 let mut candidates = self.selector.select(&selector_input);
1039 self.rerank_with_reputation_bias(&mut candidates);
1040 let mut exact_match = false;
1041 if candidates.is_empty() {
1042 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
1043 self.rerank_with_reputation_bias(&mut exact_candidates);
1044 if !exact_candidates.is_empty() {
1045 candidates = exact_candidates;
1046 exact_match = true;
1047 }
1048 }
1049 if candidates.is_empty() {
1050 let mut remote_candidates =
1051 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
1052 self.rerank_with_reputation_bias(&mut remote_candidates);
1053 if !remote_candidates.is_empty() {
1054 candidates = remote_candidates;
1055 exact_match = true;
1056 }
1057 }
1058 candidates.truncate(input.limit.max(1));
1059 ReplayCandidates {
1060 candidates,
1061 exact_match,
1062 }
1063 }
1064
1065 fn build_select_evidence(
1066 &self,
1067 input: &SelectorInput,
1068 candidates: &[GeneCandidate],
1069 exact_match: bool,
1070 ) -> ReplaySelectEvidence {
1071 let cold_start_penalty = if exact_match {
1072 COLD_START_LOOKUP_PENALTY
1073 } else {
1074 0.0
1075 };
1076 let candidate_rows = candidates
1077 .iter()
1078 .enumerate()
1079 .map(|(idx, candidate)| {
1080 let top_capsule = candidate.capsules.first();
1081 let environment_match_factor = top_capsule
1082 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env));
1083 let final_score = candidate.score * (1.0 - cold_start_penalty);
1084 ReplayCandidateEvidence {
1085 rank: idx + 1,
1086 gene_id: candidate.gene.id.clone(),
1087 capsule_id: top_capsule.map(|capsule| capsule.id.clone()),
1088 match_quality: candidate.score,
1089 confidence: top_capsule.map(|capsule| capsule.confidence),
1090 environment_match_factor,
1091 cold_start_penalty,
1092 final_score,
1093 }
1094 })
1095 .collect::<Vec<_>>();
1096
1097 ReplaySelectEvidence {
1098 exact_match_lookup: exact_match,
1099 selected_gene_id: candidate_rows
1100 .first()
1101 .map(|candidate| candidate.gene_id.clone()),
1102 selected_capsule_id: candidate_rows
1103 .first()
1104 .and_then(|candidate| candidate.capsule_id.clone()),
1105 candidates: candidate_rows,
1106 }
1107 }
1108
1109 fn apply_confidence_revalidation(&self) {
1110 let Ok(projection) = projection_snapshot(self.store.as_ref()) else {
1111 return;
1112 };
1113 for target in stale_replay_revalidation_targets(&projection, Utc::now()) {
1114 let reason = format!(
1115 "confidence decayed to {:.3}; revalidation required before replay",
1116 target.decayed_confidence
1117 );
1118 let confidence_decay_ratio = if target.peak_confidence > 0.0 {
1119 (target.decayed_confidence / target.peak_confidence).clamp(0.0, 1.0)
1120 } else {
1121 0.0
1122 };
1123 if self
1124 .store
1125 .append_event(EvolutionEvent::PromotionEvaluated {
1126 gene_id: target.gene_id.clone(),
1127 state: AssetState::Quarantined,
1128 reason: reason.clone(),
1129 reason_code: TransitionReasonCode::RevalidationConfidenceDecay,
1130 evidence: Some(TransitionEvidence {
1131 replay_attempts: None,
1132 replay_successes: None,
1133 replay_success_rate: None,
1134 environment_match_factor: None,
1135 decayed_confidence: Some(target.decayed_confidence),
1136 confidence_decay_ratio: Some(confidence_decay_ratio),
1137 summary: Some(format!(
1138 "phase=confidence_revalidation; decayed_confidence={:.3}; confidence_decay_ratio={:.3}",
1139 target.decayed_confidence, confidence_decay_ratio
1140 )),
1141 }),
1142 })
1143 .is_err()
1144 {
1145 continue;
1146 }
1147 for capsule_id in target.capsule_ids {
1148 if self
1149 .store
1150 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
1151 .is_err()
1152 {
1153 break;
1154 }
1155 }
1156 }
1157 }
1158
1159 fn build_replay_economics_evidence(
1160 &self,
1161 input: &SelectorInput,
1162 candidate: Option<&GeneCandidate>,
1163 source_sender_id: Option<&str>,
1164 success: bool,
1165 reason_code: ReplayRoiReasonCode,
1166 reason: &str,
1167 ) -> ReplayRoiEvidence {
1168 let (task_class_id, task_label) =
1169 replay_descriptor_from_candidate_or_input(candidate, input);
1170 let signal_source = candidate
1171 .map(|best| best.gene.signals.as_slice())
1172 .unwrap_or(input.signals.as_slice());
1173 let baseline_tokens = estimated_reasoning_tokens(signal_source);
1174 let reasoning_avoided_tokens = if success { baseline_tokens } else { 0 };
1175 let replay_fallback_cost = if success { 0 } else { baseline_tokens };
1176 let asset_origin =
1177 candidate.and_then(|best| strategy_metadata_value(&best.gene.strategy, "asset_origin"));
1178 let mut context_dimensions = vec![
1179 format!(
1180 "outcome={}",
1181 if success {
1182 "replay_hit"
1183 } else {
1184 "planner_fallback"
1185 }
1186 ),
1187 format!("reason={reason}"),
1188 format!("task_class_id={task_class_id}"),
1189 format!("task_label={task_label}"),
1190 ];
1191 if let Some(asset_origin) = asset_origin.as_deref() {
1192 context_dimensions.push(format!("asset_origin={asset_origin}"));
1193 }
1194 if let Some(source_sender_id) = source_sender_id {
1195 context_dimensions.push(format!("source_sender_id={source_sender_id}"));
1196 }
1197 ReplayRoiEvidence {
1198 success,
1199 reason_code,
1200 task_class_id,
1201 task_label,
1202 reasoning_avoided_tokens,
1203 replay_fallback_cost,
1204 replay_roi: compute_replay_roi(reasoning_avoided_tokens, replay_fallback_cost),
1205 asset_origin,
1206 source_sender_id: source_sender_id.map(ToOwned::to_owned),
1207 context_dimensions,
1208 }
1209 }
1210
1211 fn record_replay_economics(
1212 &self,
1213 replay_run_id: Option<&RunId>,
1214 candidate: Option<&GeneCandidate>,
1215 capsule_id: Option<&str>,
1216 evidence: ReplayRoiEvidence,
1217 ) -> Result<(), ReplayError> {
1218 self.store
1219 .append_event(EvolutionEvent::ReplayEconomicsRecorded {
1220 gene_id: candidate.map(|best| best.gene.id.clone()),
1221 capsule_id: capsule_id.map(ToOwned::to_owned),
1222 replay_run_id: replay_run_id.cloned(),
1223 evidence,
1224 })
1225 .map_err(|err| ReplayError::Store(err.to_string()))?;
1226 Ok(())
1227 }
1228
1229 async fn try_replay_inner(
1230 &self,
1231 replay_run_id: Option<&RunId>,
1232 input: &SelectorInput,
1233 policy: &SandboxPolicy,
1234 validation: &ValidationPlan,
1235 ) -> Result<ReplayDecision, ReplayError> {
1236 let ReplayCandidates {
1237 candidates,
1238 exact_match,
1239 } = self.collect_replay_candidates(input);
1240 let mut detect_evidence = replay_detect_evidence_from_input(input);
1241 let select_evidence = self.build_select_evidence(input, &candidates, exact_match);
1242 let Some(best) = candidates.into_iter().next() else {
1243 detect_evidence
1244 .mismatch_reasons
1245 .push("no_candidate_after_select".to_string());
1246 let economics_evidence = self.build_replay_economics_evidence(
1247 input,
1248 None,
1249 None,
1250 false,
1251 ReplayRoiReasonCode::ReplayMissNoMatchingGene,
1252 "no matching gene",
1253 );
1254 self.record_replay_economics(replay_run_id, None, None, economics_evidence.clone())?;
1255 return Ok(ReplayDecision {
1256 used_capsule: false,
1257 capsule_id: None,
1258 fallback_to_planner: true,
1259 reason: "no matching gene".into(),
1260 detect_evidence,
1261 select_evidence,
1262 economics_evidence,
1263 });
1264 };
1265 let (detected_task_class_id, detected_task_label) =
1266 replay_descriptor_from_candidate_or_input(Some(&best), input);
1267 detect_evidence.task_class_id = detected_task_class_id;
1268 detect_evidence.task_label = detected_task_label;
1269 detect_evidence.matched_signals =
1270 matched_replay_signals(&input.signals, &best.gene.signals);
1271 if !exact_match && best.score < 0.82 {
1272 detect_evidence
1273 .mismatch_reasons
1274 .push("score_below_threshold".to_string());
1275 let reason = format!("best gene score {:.3} below replay threshold", best.score);
1276 let economics_evidence = self.build_replay_economics_evidence(
1277 input,
1278 Some(&best),
1279 None,
1280 false,
1281 ReplayRoiReasonCode::ReplayMissScoreBelowThreshold,
1282 &reason,
1283 );
1284 self.record_replay_economics(
1285 replay_run_id,
1286 Some(&best),
1287 None,
1288 economics_evidence.clone(),
1289 )?;
1290 return Ok(ReplayDecision {
1291 used_capsule: false,
1292 capsule_id: None,
1293 fallback_to_planner: true,
1294 reason,
1295 detect_evidence,
1296 select_evidence,
1297 economics_evidence,
1298 });
1299 }
1300
1301 let Some(capsule) = best.capsules.first().cloned() else {
1302 detect_evidence
1303 .mismatch_reasons
1304 .push("candidate_has_no_capsule".to_string());
1305 let economics_evidence = self.build_replay_economics_evidence(
1306 input,
1307 Some(&best),
1308 None,
1309 false,
1310 ReplayRoiReasonCode::ReplayMissCandidateHasNoCapsule,
1311 "candidate gene has no capsule",
1312 );
1313 self.record_replay_economics(
1314 replay_run_id,
1315 Some(&best),
1316 None,
1317 economics_evidence.clone(),
1318 )?;
1319 return Ok(ReplayDecision {
1320 used_capsule: false,
1321 capsule_id: None,
1322 fallback_to_planner: true,
1323 reason: "candidate gene has no capsule".into(),
1324 detect_evidence,
1325 select_evidence,
1326 economics_evidence,
1327 });
1328 };
1329 let remote_publisher = self.publisher_for_capsule(&capsule.id);
1330
1331 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
1332 .map_err(|err| ReplayError::Store(err.to_string()))?
1333 else {
1334 detect_evidence
1335 .mismatch_reasons
1336 .push("mutation_payload_missing".to_string());
1337 let economics_evidence = self.build_replay_economics_evidence(
1338 input,
1339 Some(&best),
1340 remote_publisher.as_deref(),
1341 false,
1342 ReplayRoiReasonCode::ReplayMissMutationPayloadMissing,
1343 "mutation payload missing from store",
1344 );
1345 self.record_replay_economics(
1346 replay_run_id,
1347 Some(&best),
1348 Some(&capsule.id),
1349 economics_evidence.clone(),
1350 )?;
1351 return Ok(ReplayDecision {
1352 used_capsule: false,
1353 capsule_id: None,
1354 fallback_to_planner: true,
1355 reason: "mutation payload missing from store".into(),
1356 detect_evidence,
1357 select_evidence,
1358 economics_evidence,
1359 });
1360 };
1361
1362 let receipt = match self.sandbox.apply(&mutation, policy).await {
1363 Ok(receipt) => receipt,
1364 Err(err) => {
1365 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1366 let reason = format!("replay patch apply failed: {err}");
1367 let economics_evidence = self.build_replay_economics_evidence(
1368 input,
1369 Some(&best),
1370 remote_publisher.as_deref(),
1371 false,
1372 ReplayRoiReasonCode::ReplayMissPatchApplyFailed,
1373 &reason,
1374 );
1375 self.record_replay_economics(
1376 replay_run_id,
1377 Some(&best),
1378 Some(&capsule.id),
1379 economics_evidence.clone(),
1380 )?;
1381 detect_evidence
1382 .mismatch_reasons
1383 .push("patch_apply_failed".to_string());
1384 return Ok(ReplayDecision {
1385 used_capsule: false,
1386 capsule_id: Some(capsule.id.clone()),
1387 fallback_to_planner: true,
1388 reason,
1389 detect_evidence,
1390 select_evidence,
1391 economics_evidence,
1392 });
1393 }
1394 };
1395
1396 let report = self
1397 .validator
1398 .run(&receipt, validation)
1399 .await
1400 .map_err(|err| ReplayError::Validation(err.to_string()))?;
1401 if !report.success {
1402 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
1403 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1404 let economics_evidence = self.build_replay_economics_evidence(
1405 input,
1406 Some(&best),
1407 remote_publisher.as_deref(),
1408 false,
1409 ReplayRoiReasonCode::ReplayMissValidationFailed,
1410 "replay validation failed",
1411 );
1412 self.record_replay_economics(
1413 replay_run_id,
1414 Some(&best),
1415 Some(&capsule.id),
1416 economics_evidence.clone(),
1417 )?;
1418 detect_evidence
1419 .mismatch_reasons
1420 .push("validation_failed".to_string());
1421 return Ok(ReplayDecision {
1422 used_capsule: false,
1423 capsule_id: Some(capsule.id.clone()),
1424 fallback_to_planner: true,
1425 reason: "replay validation failed".into(),
1426 detect_evidence,
1427 select_evidence,
1428 economics_evidence,
1429 });
1430 }
1431
1432 let requires_shadow_progression = remote_publisher.is_some()
1433 && matches!(
1434 capsule.state,
1435 AssetState::Quarantined | AssetState::ShadowValidated
1436 );
1437 if requires_shadow_progression {
1438 self.store
1439 .append_event(EvolutionEvent::ValidationPassed {
1440 mutation_id: capsule.mutation_id.clone(),
1441 report: report.to_snapshot(&validation.profile),
1442 gene_id: Some(best.gene.id.clone()),
1443 })
1444 .map_err(|err| ReplayError::Store(err.to_string()))?;
1445 let evidence = self.shadow_transition_evidence(&best.gene.id, &capsule, &input.env)?;
1446 let (target_state, reason_code, reason, promote_now, phase) =
1447 if matches!(best.gene.state, AssetState::Quarantined) {
1448 (
1449 AssetState::ShadowValidated,
1450 TransitionReasonCode::PromotionShadowValidationPassed,
1451 "remote asset passed first local replay and entered shadow validation"
1452 .into(),
1453 false,
1454 "quarantine_to_shadow",
1455 )
1456 } else if shadow_promotion_gate_passed(&evidence) {
1457 (
1458 AssetState::Promoted,
1459 TransitionReasonCode::PromotionRemoteReplayValidated,
1460 "shadow validation thresholds satisfied; remote asset promoted".into(),
1461 true,
1462 "shadow_to_promoted",
1463 )
1464 } else {
1465 (
1466 AssetState::ShadowValidated,
1467 TransitionReasonCode::ShadowCollectingReplayEvidence,
1468 "shadow validation collecting additional replay evidence".into(),
1469 false,
1470 "shadow_hold",
1471 )
1472 };
1473 self.store
1474 .append_event(EvolutionEvent::PromotionEvaluated {
1475 gene_id: best.gene.id.clone(),
1476 state: target_state.clone(),
1477 reason,
1478 reason_code,
1479 evidence: Some(evidence.to_transition_evidence(shadow_evidence_summary(
1480 &evidence,
1481 promote_now,
1482 phase,
1483 ))),
1484 })
1485 .map_err(|err| ReplayError::Store(err.to_string()))?;
1486 if promote_now {
1487 self.store
1488 .append_event(EvolutionEvent::GenePromoted {
1489 gene_id: best.gene.id.clone(),
1490 })
1491 .map_err(|err| ReplayError::Store(err.to_string()))?;
1492 }
1493 self.store
1494 .append_event(EvolutionEvent::CapsuleReleased {
1495 capsule_id: capsule.id.clone(),
1496 state: target_state,
1497 })
1498 .map_err(|err| ReplayError::Store(err.to_string()))?;
1499 }
1500
1501 self.store
1502 .append_event(EvolutionEvent::CapsuleReused {
1503 capsule_id: capsule.id.clone(),
1504 gene_id: capsule.gene_id.clone(),
1505 run_id: capsule.run_id.clone(),
1506 replay_run_id: replay_run_id.cloned(),
1507 })
1508 .map_err(|err| ReplayError::Store(err.to_string()))?;
1509 self.record_reuse_settlement(remote_publisher.as_deref(), true);
1510 let reason = if exact_match {
1511 "replayed via cold-start lookup".to_string()
1512 } else {
1513 "replayed via selector".to_string()
1514 };
1515 let economics_evidence = self.build_replay_economics_evidence(
1516 input,
1517 Some(&best),
1518 remote_publisher.as_deref(),
1519 true,
1520 ReplayRoiReasonCode::ReplayHit,
1521 &reason,
1522 );
1523 self.record_replay_economics(
1524 replay_run_id,
1525 Some(&best),
1526 Some(&capsule.id),
1527 economics_evidence.clone(),
1528 )?;
1529
1530 Ok(ReplayDecision {
1531 used_capsule: true,
1532 capsule_id: Some(capsule.id),
1533 fallback_to_planner: false,
1534 reason,
1535 detect_evidence,
1536 select_evidence,
1537 economics_evidence,
1538 })
1539 }
1540
1541 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
1542 let Some(ledger) = self.economics.as_ref() else {
1543 return;
1544 };
1545 let reputation_bias = ledger
1546 .lock()
1547 .ok()
1548 .map(|locked| locked.selector_reputation_bias())
1549 .unwrap_or_default();
1550 if reputation_bias.is_empty() {
1551 return;
1552 }
1553 let required_assets = candidates
1554 .iter()
1555 .filter_map(|candidate| {
1556 candidate
1557 .capsules
1558 .first()
1559 .map(|capsule| capsule.id.as_str())
1560 })
1561 .collect::<Vec<_>>();
1562 let publisher_map = self.remote_publishers_snapshot(&required_assets);
1563 if publisher_map.is_empty() {
1564 return;
1565 }
1566 candidates.sort_by(|left, right| {
1567 effective_candidate_score(right, &publisher_map, &reputation_bias)
1568 .partial_cmp(&effective_candidate_score(
1569 left,
1570 &publisher_map,
1571 &reputation_bias,
1572 ))
1573 .unwrap_or(std::cmp::Ordering::Equal)
1574 .then_with(|| left.gene.id.cmp(&right.gene.id))
1575 });
1576 }
1577
1578 fn publisher_for_capsule(&self, capsule_id: &str) -> Option<String> {
1579 self.remote_publishers_snapshot(&[capsule_id])
1580 .get(capsule_id)
1581 .cloned()
1582 }
1583
1584 fn remote_publishers_snapshot(&self, required_assets: &[&str]) -> BTreeMap<String, String> {
1585 let cached = self
1586 .remote_publishers
1587 .as_ref()
1588 .and_then(|remote_publishers| {
1589 remote_publishers.lock().ok().map(|locked| locked.clone())
1590 })
1591 .unwrap_or_default();
1592 if !cached.is_empty()
1593 && required_assets
1594 .iter()
1595 .all(|asset_id| cached.contains_key(*asset_id))
1596 {
1597 return cached;
1598 }
1599
1600 let persisted = remote_publishers_by_asset_from_store(self.store.as_ref());
1601 if persisted.is_empty() {
1602 return cached;
1603 }
1604
1605 let mut merged = cached;
1606 for (asset_id, sender_id) in persisted {
1607 merged.entry(asset_id).or_insert(sender_id);
1608 }
1609
1610 if let Some(remote_publishers) = self.remote_publishers.as_ref() {
1611 if let Ok(mut locked) = remote_publishers.lock() {
1612 for (asset_id, sender_id) in &merged {
1613 locked.entry(asset_id.clone()).or_insert(sender_id.clone());
1614 }
1615 }
1616 }
1617
1618 merged
1619 }
1620
1621 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
1622 let Some(publisher_id) = publisher_id else {
1623 return;
1624 };
1625 let Some(ledger) = self.economics.as_ref() else {
1626 return;
1627 };
1628 if let Ok(mut locked) = ledger.lock() {
1629 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
1630 }
1631 }
1632
1633 fn record_replay_validation_failure(
1634 &self,
1635 best: &GeneCandidate,
1636 capsule: &Capsule,
1637 validation: &ValidationPlan,
1638 report: &ValidationReport,
1639 ) -> Result<(), ReplayError> {
1640 let projection = projection_snapshot(self.store.as_ref())
1641 .map_err(|err| ReplayError::Store(err.to_string()))?;
1642 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1643 Self::confidence_context(&projection, &best.gene.id);
1644
1645 self.store
1646 .append_event(EvolutionEvent::ValidationFailed {
1647 mutation_id: capsule.mutation_id.clone(),
1648 report: report.to_snapshot(&validation.profile),
1649 gene_id: Some(best.gene.id.clone()),
1650 })
1651 .map_err(|err| ReplayError::Store(err.to_string()))?;
1652
1653 let replay_failures = self.replay_failure_count(&best.gene.id)?;
1654 let governor_decision = self.governor.evaluate(GovernorInput {
1655 candidate_source: if self.publisher_for_capsule(&capsule.id).is_some() {
1656 CandidateSource::Remote
1657 } else {
1658 CandidateSource::Local
1659 },
1660 success_count: 0,
1661 blast_radius: BlastRadius {
1662 files_changed: capsule.outcome.changed_files.len(),
1663 lines_changed: capsule.outcome.lines_changed,
1664 },
1665 replay_failures,
1666 recent_mutation_ages_secs: Vec::new(),
1667 current_confidence,
1668 historical_peak_confidence,
1669 confidence_last_updated_secs,
1670 });
1671
1672 if matches!(governor_decision.target_state, AssetState::Revoked) {
1673 self.store
1674 .append_event(EvolutionEvent::PromotionEvaluated {
1675 gene_id: best.gene.id.clone(),
1676 state: AssetState::Revoked,
1677 reason: governor_decision.reason.clone(),
1678 reason_code: governor_decision.reason_code.clone(),
1679 evidence: Some(TransitionEvidence {
1680 replay_attempts: Some(replay_failures),
1681 replay_successes: None,
1682 replay_success_rate: None,
1683 environment_match_factor: None,
1684 decayed_confidence: Some(current_confidence),
1685 confidence_decay_ratio: if historical_peak_confidence > 0.0 {
1686 Some((current_confidence / historical_peak_confidence).clamp(0.0, 1.0))
1687 } else {
1688 None
1689 },
1690 summary: Some(format!(
1691 "phase=replay_failure_revocation; replay_failures={replay_failures}; current_confidence={:.3}; historical_peak_confidence={:.3}",
1692 current_confidence, historical_peak_confidence
1693 )),
1694 }),
1695 })
1696 .map_err(|err| ReplayError::Store(err.to_string()))?;
1697 self.store
1698 .append_event(EvolutionEvent::GeneRevoked {
1699 gene_id: best.gene.id.clone(),
1700 reason: governor_decision.reason,
1701 })
1702 .map_err(|err| ReplayError::Store(err.to_string()))?;
1703 for related in &best.capsules {
1704 self.store
1705 .append_event(EvolutionEvent::CapsuleQuarantined {
1706 capsule_id: related.id.clone(),
1707 })
1708 .map_err(|err| ReplayError::Store(err.to_string()))?;
1709 }
1710 }
1711
1712 Ok(())
1713 }
1714
1715 fn confidence_context(
1716 projection: &EvolutionProjection,
1717 gene_id: &str,
1718 ) -> (f32, f32, Option<u64>) {
1719 let peak_confidence = projection
1720 .capsules
1721 .iter()
1722 .filter(|capsule| capsule.gene_id == gene_id)
1723 .map(|capsule| capsule.confidence)
1724 .fold(0.0_f32, f32::max);
1725 let age_secs = projection
1726 .last_updated_at
1727 .get(gene_id)
1728 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1729 (peak_confidence, peak_confidence, age_secs)
1730 }
1731
1732 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1733 let parsed = DateTime::parse_from_rfc3339(timestamp)
1734 .ok()?
1735 .with_timezone(&Utc);
1736 let elapsed = now.signed_duration_since(parsed);
1737 if elapsed < Duration::zero() {
1738 Some(0)
1739 } else {
1740 u64::try_from(elapsed.num_seconds()).ok()
1741 }
1742 }
1743
1744 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1745 Ok(self
1746 .store
1747 .scan(1)
1748 .map_err(|err| ReplayError::Store(err.to_string()))?
1749 .into_iter()
1750 .filter(|stored| {
1751 matches!(
1752 &stored.event,
1753 EvolutionEvent::ValidationFailed {
1754 gene_id: Some(current_gene_id),
1755 ..
1756 } if current_gene_id == gene_id
1757 )
1758 })
1759 .count() as u64)
1760 }
1761
1762 fn shadow_transition_evidence(
1763 &self,
1764 gene_id: &str,
1765 capsule: &Capsule,
1766 input_env: &EnvFingerprint,
1767 ) -> Result<ShadowTransitionEvidence, ReplayError> {
1768 let events = self
1769 .store
1770 .scan(1)
1771 .map_err(|err| ReplayError::Store(err.to_string()))?;
1772 let (replay_attempts, replay_successes) = events.iter().fold(
1773 (0_u64, 0_u64),
1774 |(attempts, successes), stored| match &stored.event {
1775 EvolutionEvent::ValidationPassed {
1776 gene_id: Some(current_gene_id),
1777 ..
1778 } if current_gene_id == gene_id => (attempts + 1, successes + 1),
1779 EvolutionEvent::ValidationFailed {
1780 gene_id: Some(current_gene_id),
1781 ..
1782 } if current_gene_id == gene_id => (attempts + 1, successes),
1783 _ => (attempts, successes),
1784 },
1785 );
1786 let replay_success_rate = safe_ratio(replay_successes, replay_attempts) as f32;
1787 let environment_match_factor = replay_environment_match_factor(input_env, &capsule.env);
1788 let projection = projection_snapshot(self.store.as_ref())
1789 .map_err(|err| ReplayError::Store(err.to_string()))?;
1790 let age_secs = projection
1791 .last_updated_at
1792 .get(gene_id)
1793 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1794 let decayed_confidence = decayed_replay_confidence(capsule.confidence, age_secs);
1795 let confidence_decay_ratio = if capsule.confidence > 0.0 {
1796 (decayed_confidence / capsule.confidence).clamp(0.0, 1.0)
1797 } else {
1798 0.0
1799 };
1800
1801 Ok(ShadowTransitionEvidence {
1802 replay_attempts,
1803 replay_successes,
1804 replay_success_rate,
1805 environment_match_factor,
1806 decayed_confidence,
1807 confidence_decay_ratio,
1808 })
1809 }
1810}
1811
1812#[derive(Clone, Debug)]
1813struct ShadowTransitionEvidence {
1814 replay_attempts: u64,
1815 replay_successes: u64,
1816 replay_success_rate: f32,
1817 environment_match_factor: f32,
1818 decayed_confidence: f32,
1819 confidence_decay_ratio: f32,
1820}
1821
1822impl ShadowTransitionEvidence {
1823 fn to_transition_evidence(&self, summary: String) -> TransitionEvidence {
1824 TransitionEvidence {
1825 replay_attempts: Some(self.replay_attempts),
1826 replay_successes: Some(self.replay_successes),
1827 replay_success_rate: Some(self.replay_success_rate),
1828 environment_match_factor: Some(self.environment_match_factor),
1829 decayed_confidence: Some(self.decayed_confidence),
1830 confidence_decay_ratio: Some(self.confidence_decay_ratio),
1831 summary: Some(summary),
1832 }
1833 }
1834}
1835
1836fn shadow_promotion_gate_passed(evidence: &ShadowTransitionEvidence) -> bool {
1837 evidence.replay_attempts >= SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS
1838 && evidence.replay_success_rate >= SHADOW_PROMOTION_MIN_SUCCESS_RATE
1839 && evidence.environment_match_factor >= SHADOW_PROMOTION_MIN_ENV_MATCH
1840 && evidence.decayed_confidence >= SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE
1841}
1842
1843fn shadow_evidence_summary(
1844 evidence: &ShadowTransitionEvidence,
1845 promoted: bool,
1846 phase: &str,
1847) -> String {
1848 format!(
1849 "phase={phase}; replay_attempts={}; replay_successes={}; replay_success_rate={:.3}; environment_match_factor={:.3}; decayed_confidence={:.3}; confidence_decay_ratio={:.3}; promote={promoted}",
1850 evidence.replay_attempts,
1851 evidence.replay_successes,
1852 evidence.replay_success_rate,
1853 evidence.environment_match_factor,
1854 evidence.decayed_confidence,
1855 evidence.confidence_decay_ratio,
1856 )
1857}
1858
1859#[derive(Clone, Debug, PartialEq)]
1860struct ConfidenceRevalidationTarget {
1861 gene_id: String,
1862 capsule_ids: Vec<String>,
1863 peak_confidence: f32,
1864 decayed_confidence: f32,
1865}
1866
1867fn stale_replay_revalidation_targets(
1868 projection: &EvolutionProjection,
1869 now: DateTime<Utc>,
1870) -> Vec<ConfidenceRevalidationTarget> {
1871 projection
1872 .genes
1873 .iter()
1874 .filter(|gene| gene.state == AssetState::Promoted)
1875 .filter_map(|gene| {
1876 let promoted_capsules = projection
1877 .capsules
1878 .iter()
1879 .filter(|capsule| {
1880 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1881 })
1882 .collect::<Vec<_>>();
1883 if promoted_capsules.is_empty() {
1884 return None;
1885 }
1886 let age_secs = projection
1887 .last_updated_at
1888 .get(&gene.id)
1889 .and_then(|timestamp| seconds_since_timestamp_for_confidence(timestamp, now));
1890 let decayed_confidence = promoted_capsules
1891 .iter()
1892 .map(|capsule| decayed_replay_confidence(capsule.confidence, age_secs))
1893 .fold(0.0_f32, f32::max);
1894 if decayed_confidence >= MIN_REPLAY_CONFIDENCE {
1895 return None;
1896 }
1897 let peak_confidence = promoted_capsules
1898 .iter()
1899 .map(|capsule| capsule.confidence)
1900 .fold(0.0_f32, f32::max);
1901 Some(ConfidenceRevalidationTarget {
1902 gene_id: gene.id.clone(),
1903 capsule_ids: promoted_capsules
1904 .into_iter()
1905 .map(|capsule| capsule.id.clone())
1906 .collect(),
1907 peak_confidence,
1908 decayed_confidence,
1909 })
1910 })
1911 .collect()
1912}
1913
1914fn seconds_since_timestamp_for_confidence(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1915 let parsed = DateTime::parse_from_rfc3339(timestamp)
1916 .ok()?
1917 .with_timezone(&Utc);
1918 let elapsed = now.signed_duration_since(parsed);
1919 if elapsed < Duration::zero() {
1920 Some(0)
1921 } else {
1922 u64::try_from(elapsed.num_seconds()).ok()
1923 }
1924}
1925
1926#[derive(Debug, Error)]
1927pub enum EvoKernelError {
1928 #[error("sandbox error: {0}")]
1929 Sandbox(String),
1930 #[error("validation error: {0}")]
1931 Validation(String),
1932 #[error("validation failed")]
1933 ValidationFailed(ValidationReport),
1934 #[error("store error: {0}")]
1935 Store(String),
1936}
1937
1938#[derive(Clone, Debug)]
1939pub struct CaptureOutcome {
1940 pub capsule: Capsule,
1941 pub gene: Gene,
1942 pub governor_decision: GovernorDecision,
1943}
1944
1945#[derive(Clone, Debug, Serialize, Deserialize)]
1946pub struct ImportOutcome {
1947 pub imported_asset_ids: Vec<String>,
1948 pub accepted: bool,
1949 #[serde(default, skip_serializing_if = "Option::is_none")]
1950 pub next_cursor: Option<String>,
1951 #[serde(default, skip_serializing_if = "Option::is_none")]
1952 pub resume_token: Option<String>,
1953 #[serde(default)]
1954 pub sync_audit: SyncAudit,
1955}
1956
1957#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1958pub struct EvolutionMetricsSnapshot {
1959 pub replay_attempts_total: u64,
1960 pub replay_success_total: u64,
1961 pub replay_success_rate: f64,
1962 pub confidence_revalidations_total: u64,
1963 pub replay_reasoning_avoided_total: u64,
1964 pub reasoning_avoided_tokens_total: u64,
1965 pub replay_fallback_cost_total: u64,
1966 pub replay_roi: f64,
1967 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
1968 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
1969 pub mutation_declared_total: u64,
1970 pub promoted_mutations_total: u64,
1971 pub promotion_ratio: f64,
1972 pub gene_revocations_total: u64,
1973 pub mutation_velocity_last_hour: u64,
1974 pub revoke_frequency_last_hour: u64,
1975 pub promoted_genes: u64,
1976 pub promoted_capsules: u64,
1977 pub last_event_seq: u64,
1978}
1979
1980#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1981pub struct EvolutionHealthSnapshot {
1982 pub status: String,
1983 pub last_event_seq: u64,
1984 pub promoted_genes: u64,
1985 pub promoted_capsules: u64,
1986}
1987
1988#[derive(Clone)]
1989pub struct EvolutionNetworkNode {
1990 pub store: Arc<dyn EvolutionStore>,
1991}
1992
1993impl EvolutionNetworkNode {
1994 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1995 Self { store }
1996 }
1997
1998 pub fn with_default_store() -> Self {
1999 Self {
2000 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
2001 }
2002 }
2003
2004 pub fn accept_publish_request(
2005 &self,
2006 request: &PublishRequest,
2007 ) -> Result<ImportOutcome, EvoKernelError> {
2008 let requested_cursor = resolve_requested_cursor(
2009 &request.sender_id,
2010 request.since_cursor.as_deref(),
2011 request.resume_token.as_deref(),
2012 )?;
2013 import_remote_envelope_into_store(
2014 self.store.as_ref(),
2015 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
2016 None,
2017 requested_cursor,
2018 )
2019 }
2020
2021 pub fn ensure_builtin_experience_assets(
2022 &self,
2023 sender_id: impl Into<String>,
2024 ) -> Result<ImportOutcome, EvoKernelError> {
2025 ensure_builtin_experience_assets_in_store(self.store.as_ref(), sender_id.into())
2026 }
2027
2028 pub fn record_reported_experience(
2029 &self,
2030 sender_id: impl Into<String>,
2031 gene_id: impl Into<String>,
2032 signals: Vec<String>,
2033 strategy: Vec<String>,
2034 validation: Vec<String>,
2035 ) -> Result<ImportOutcome, EvoKernelError> {
2036 record_reported_experience_in_store(
2037 self.store.as_ref(),
2038 sender_id.into(),
2039 gene_id.into(),
2040 signals,
2041 strategy,
2042 validation,
2043 )
2044 }
2045
2046 pub fn publish_local_assets(
2047 &self,
2048 sender_id: impl Into<String>,
2049 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2050 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
2051 }
2052
2053 pub fn fetch_assets(
2054 &self,
2055 responder_id: impl Into<String>,
2056 query: &FetchQuery,
2057 ) -> Result<FetchResponse, EvoKernelError> {
2058 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2059 }
2060
2061 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2062 revoke_assets_in_store(self.store.as_ref(), notice)
2063 }
2064
2065 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2066 evolution_metrics_snapshot(self.store.as_ref())
2067 }
2068
2069 pub fn replay_roi_release_gate_summary(
2070 &self,
2071 window_seconds: u64,
2072 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2073 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2074 }
2075
2076 pub fn render_replay_roi_release_gate_summary_json(
2077 &self,
2078 window_seconds: u64,
2079 ) -> Result<String, EvoKernelError> {
2080 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2081 serde_json::to_string_pretty(&summary)
2082 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2083 }
2084
2085 pub fn replay_roi_release_gate_contract(
2086 &self,
2087 window_seconds: u64,
2088 thresholds: ReplayRoiReleaseGateThresholds,
2089 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2090 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2091 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2092 }
2093
2094 pub fn render_replay_roi_release_gate_contract_json(
2095 &self,
2096 window_seconds: u64,
2097 thresholds: ReplayRoiReleaseGateThresholds,
2098 ) -> Result<String, EvoKernelError> {
2099 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2100 serde_json::to_string_pretty(&contract)
2101 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2102 }
2103
2104 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2105 self.metrics_snapshot().map(|snapshot| {
2106 let health = evolution_health_snapshot(&snapshot);
2107 render_evolution_metrics_prometheus(&snapshot, &health)
2108 })
2109 }
2110
2111 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2112 self.metrics_snapshot()
2113 .map(|snapshot| evolution_health_snapshot(&snapshot))
2114 }
2115}
2116
2117pub struct EvoKernel<S: KernelState> {
2118 pub kernel: Arc<Kernel<S>>,
2119 pub sandbox: Arc<dyn Sandbox>,
2120 pub validator: Arc<dyn Validator>,
2121 pub store: Arc<dyn EvolutionStore>,
2122 pub selector: Arc<dyn Selector>,
2123 pub governor: Arc<dyn Governor>,
2124 pub economics: Arc<Mutex<EvuLedger>>,
2125 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
2126 pub stake_policy: StakePolicy,
2127 pub sandbox_policy: SandboxPolicy,
2128 pub validation_plan: ValidationPlan,
2129}
2130
2131impl<S: KernelState> EvoKernel<S> {
2132 fn recent_prior_mutation_ages_secs(
2133 &self,
2134 exclude_mutation_id: Option<&str>,
2135 ) -> Result<Vec<u64>, EvolutionError> {
2136 let now = Utc::now();
2137 let mut ages = self
2138 .store
2139 .scan(1)?
2140 .into_iter()
2141 .filter_map(|stored| match stored.event {
2142 EvolutionEvent::MutationDeclared { mutation }
2143 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
2144 {
2145 Self::seconds_since_timestamp(&stored.timestamp, now)
2146 }
2147 _ => None,
2148 })
2149 .collect::<Vec<_>>();
2150 ages.sort_unstable();
2151 Ok(ages)
2152 }
2153
2154 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
2155 let parsed = DateTime::parse_from_rfc3339(timestamp)
2156 .ok()?
2157 .with_timezone(&Utc);
2158 let elapsed = now.signed_duration_since(parsed);
2159 if elapsed < Duration::zero() {
2160 Some(0)
2161 } else {
2162 u64::try_from(elapsed.num_seconds()).ok()
2163 }
2164 }
2165
2166 pub fn new(
2167 kernel: Arc<Kernel<S>>,
2168 sandbox: Arc<dyn Sandbox>,
2169 validator: Arc<dyn Validator>,
2170 store: Arc<dyn EvolutionStore>,
2171 ) -> Self {
2172 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
2173 Self {
2174 kernel,
2175 sandbox,
2176 validator,
2177 store,
2178 selector,
2179 governor: Arc::new(DefaultGovernor::default()),
2180 economics: Arc::new(Mutex::new(EvuLedger::default())),
2181 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
2182 stake_policy: StakePolicy::default(),
2183 sandbox_policy: SandboxPolicy::oris_default(),
2184 validation_plan: ValidationPlan::oris_default(),
2185 }
2186 }
2187
2188 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
2189 self.selector = selector;
2190 self
2191 }
2192
2193 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
2194 self.sandbox_policy = policy;
2195 self
2196 }
2197
2198 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
2199 self.governor = governor;
2200 self
2201 }
2202
2203 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
2204 self.economics = economics;
2205 self
2206 }
2207
2208 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
2209 self.stake_policy = policy;
2210 self
2211 }
2212
2213 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
2214 self.validation_plan = plan;
2215 self
2216 }
2217
2218 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
2219 let executor = StoreReplayExecutor {
2220 sandbox: self.sandbox.clone(),
2221 validator: self.validator.clone(),
2222 store: self.store.clone(),
2223 selector: self.selector.clone(),
2224 governor: self.governor.clone(),
2225 economics: Some(self.economics.clone()),
2226 remote_publishers: Some(self.remote_publishers.clone()),
2227 stake_policy: self.stake_policy.clone(),
2228 };
2229 executor.collect_replay_candidates(input).candidates
2230 }
2231
2232 pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
2233 let projection = projection_snapshot(self.store.as_ref())?;
2234 if !projection.genes.is_empty() {
2235 return Ok(BootstrapReport::default());
2236 }
2237
2238 let templates = built_in_seed_templates();
2239 for template in &templates {
2240 let mutation = build_seed_mutation(template);
2241 let extracted = extract_seed_signals(template);
2242 let gene = build_bootstrap_gene(template, &extracted)
2243 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2244 let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
2245 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2246
2247 self.store
2248 .append_event(EvolutionEvent::MutationDeclared {
2249 mutation: mutation.clone(),
2250 })
2251 .map_err(store_err)?;
2252 self.store
2253 .append_event(EvolutionEvent::SignalsExtracted {
2254 mutation_id: mutation.intent.id.clone(),
2255 hash: extracted.hash.clone(),
2256 signals: extracted.values.clone(),
2257 })
2258 .map_err(store_err)?;
2259 self.store
2260 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2261 .map_err(store_err)?;
2262 self.store
2263 .append_event(EvolutionEvent::PromotionEvaluated {
2264 gene_id: gene.id.clone(),
2265 state: AssetState::Quarantined,
2266 reason: "bootstrap seeds require local validation before replay".into(),
2267 reason_code: TransitionReasonCode::DowngradeBootstrapRequiresLocalValidation,
2268 evidence: None,
2269 })
2270 .map_err(store_err)?;
2271 self.store
2272 .append_event(EvolutionEvent::CapsuleCommitted {
2273 capsule: capsule.clone(),
2274 })
2275 .map_err(store_err)?;
2276 self.store
2277 .append_event(EvolutionEvent::CapsuleQuarantined {
2278 capsule_id: capsule.id,
2279 })
2280 .map_err(store_err)?;
2281 }
2282
2283 Ok(BootstrapReport {
2284 seeded: true,
2285 genes_added: templates.len(),
2286 capsules_added: templates.len(),
2287 })
2288 }
2289
2290 pub async fn capture_successful_mutation(
2291 &self,
2292 run_id: &RunId,
2293 mutation: PreparedMutation,
2294 ) -> Result<Capsule, EvoKernelError> {
2295 Ok(self
2296 .capture_mutation_with_governor(run_id, mutation)
2297 .await?
2298 .capsule)
2299 }
2300
2301 pub async fn capture_mutation_with_governor(
2302 &self,
2303 run_id: &RunId,
2304 mutation: PreparedMutation,
2305 ) -> Result<CaptureOutcome, EvoKernelError> {
2306 self.store
2307 .append_event(EvolutionEvent::MutationDeclared {
2308 mutation: mutation.clone(),
2309 })
2310 .map_err(store_err)?;
2311
2312 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
2313 Ok(receipt) => receipt,
2314 Err(err) => {
2315 let message = err.to_string();
2316 let contract = mutation_needed_contract_for_error_message(&message);
2317 self.store
2318 .append_event(EvolutionEvent::MutationRejected {
2319 mutation_id: mutation.intent.id.clone(),
2320 reason: contract.failure_reason,
2321 reason_code: Some(
2322 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2323 ),
2324 recovery_hint: Some(contract.recovery_hint),
2325 fail_closed: contract.fail_closed,
2326 })
2327 .map_err(store_err)?;
2328 return Err(EvoKernelError::Sandbox(message));
2329 }
2330 };
2331
2332 self.store
2333 .append_event(EvolutionEvent::MutationApplied {
2334 mutation_id: mutation.intent.id.clone(),
2335 patch_hash: receipt.patch_hash.clone(),
2336 changed_files: receipt
2337 .changed_files
2338 .iter()
2339 .map(|path| path.to_string_lossy().to_string())
2340 .collect(),
2341 })
2342 .map_err(store_err)?;
2343
2344 let report = match self.validator.run(&receipt, &self.validation_plan).await {
2345 Ok(report) => report,
2346 Err(err) => {
2347 let message = format!("mutation-needed validation execution error: {err}");
2348 let contract = mutation_needed_contract_for_error_message(&message);
2349 self.store
2350 .append_event(EvolutionEvent::MutationRejected {
2351 mutation_id: mutation.intent.id.clone(),
2352 reason: contract.failure_reason,
2353 reason_code: Some(
2354 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2355 ),
2356 recovery_hint: Some(contract.recovery_hint),
2357 fail_closed: contract.fail_closed,
2358 })
2359 .map_err(store_err)?;
2360 return Err(EvoKernelError::Validation(message));
2361 }
2362 };
2363 if !report.success {
2364 self.store
2365 .append_event(EvolutionEvent::ValidationFailed {
2366 mutation_id: mutation.intent.id.clone(),
2367 report: report.to_snapshot(&self.validation_plan.profile),
2368 gene_id: None,
2369 })
2370 .map_err(store_err)?;
2371 let contract = mutation_needed_contract_for_validation_failure(
2372 &self.validation_plan.profile,
2373 &report,
2374 );
2375 self.store
2376 .append_event(EvolutionEvent::MutationRejected {
2377 mutation_id: mutation.intent.id.clone(),
2378 reason: contract.failure_reason,
2379 reason_code: Some(
2380 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2381 ),
2382 recovery_hint: Some(contract.recovery_hint),
2383 fail_closed: contract.fail_closed,
2384 })
2385 .map_err(store_err)?;
2386 return Err(EvoKernelError::ValidationFailed(report));
2387 }
2388
2389 self.store
2390 .append_event(EvolutionEvent::ValidationPassed {
2391 mutation_id: mutation.intent.id.clone(),
2392 report: report.to_snapshot(&self.validation_plan.profile),
2393 gene_id: None,
2394 })
2395 .map_err(store_err)?;
2396
2397 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
2398 patch_diff: mutation.artifact.payload.clone(),
2399 intent: mutation.intent.intent.clone(),
2400 expected_effect: mutation.intent.expected_effect.clone(),
2401 declared_signals: mutation.intent.signals.clone(),
2402 changed_files: receipt
2403 .changed_files
2404 .iter()
2405 .map(|path| path.to_string_lossy().to_string())
2406 .collect(),
2407 validation_success: report.success,
2408 validation_logs: report.logs.clone(),
2409 stage_outputs: report
2410 .stages
2411 .iter()
2412 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
2413 .filter(|value| !value.is_empty())
2414 .collect(),
2415 });
2416 self.store
2417 .append_event(EvolutionEvent::SignalsExtracted {
2418 mutation_id: mutation.intent.id.clone(),
2419 hash: extracted_signals.hash.clone(),
2420 signals: extracted_signals.values.clone(),
2421 })
2422 .map_err(store_err)?;
2423
2424 let projection = projection_snapshot(self.store.as_ref())?;
2425 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
2426 let recent_mutation_ages_secs = self
2427 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
2428 .map_err(store_err)?;
2429 let mut gene = derive_gene(
2430 &mutation,
2431 &receipt,
2432 &self.validation_plan.profile,
2433 &extracted_signals.values,
2434 );
2435 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
2436 StoreReplayExecutor::confidence_context(&projection, &gene.id);
2437 let success_count = projection
2438 .genes
2439 .iter()
2440 .find(|existing| existing.id == gene.id)
2441 .map(|existing| {
2442 projection
2443 .capsules
2444 .iter()
2445 .filter(|capsule| capsule.gene_id == existing.id)
2446 .count() as u64
2447 })
2448 .unwrap_or(0)
2449 + 1;
2450 let governor_decision = self.governor.evaluate(GovernorInput {
2451 candidate_source: CandidateSource::Local,
2452 success_count,
2453 blast_radius: blast_radius.clone(),
2454 replay_failures: 0,
2455 recent_mutation_ages_secs,
2456 current_confidence,
2457 historical_peak_confidence,
2458 confidence_last_updated_secs,
2459 });
2460
2461 gene.state = governor_decision.target_state.clone();
2462 self.store
2463 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2464 .map_err(store_err)?;
2465 self.store
2466 .append_event(EvolutionEvent::PromotionEvaluated {
2467 gene_id: gene.id.clone(),
2468 state: governor_decision.target_state.clone(),
2469 reason: governor_decision.reason.clone(),
2470 reason_code: governor_decision.reason_code.clone(),
2471 evidence: None,
2472 })
2473 .map_err(store_err)?;
2474 if matches!(governor_decision.target_state, AssetState::Promoted) {
2475 self.store
2476 .append_event(EvolutionEvent::GenePromoted {
2477 gene_id: gene.id.clone(),
2478 })
2479 .map_err(store_err)?;
2480 }
2481 if matches!(governor_decision.target_state, AssetState::Revoked) {
2482 self.store
2483 .append_event(EvolutionEvent::GeneRevoked {
2484 gene_id: gene.id.clone(),
2485 reason: governor_decision.reason.clone(),
2486 })
2487 .map_err(store_err)?;
2488 }
2489 if let Some(spec_id) = &mutation.intent.spec_id {
2490 self.store
2491 .append_event(EvolutionEvent::SpecLinked {
2492 mutation_id: mutation.intent.id.clone(),
2493 spec_id: spec_id.clone(),
2494 })
2495 .map_err(store_err)?;
2496 }
2497
2498 let mut capsule = build_capsule(
2499 run_id,
2500 &mutation,
2501 &receipt,
2502 &report,
2503 &self.validation_plan.profile,
2504 &gene,
2505 &blast_radius,
2506 )
2507 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2508 capsule.state = governor_decision.target_state.clone();
2509 self.store
2510 .append_event(EvolutionEvent::CapsuleCommitted {
2511 capsule: capsule.clone(),
2512 })
2513 .map_err(store_err)?;
2514 if matches!(governor_decision.target_state, AssetState::Quarantined) {
2515 self.store
2516 .append_event(EvolutionEvent::CapsuleQuarantined {
2517 capsule_id: capsule.id.clone(),
2518 })
2519 .map_err(store_err)?;
2520 }
2521
2522 Ok(CaptureOutcome {
2523 capsule,
2524 gene,
2525 governor_decision,
2526 })
2527 }
2528
2529 pub async fn capture_from_proposal(
2530 &self,
2531 run_id: &RunId,
2532 proposal: &AgentMutationProposal,
2533 diff_payload: String,
2534 base_revision: Option<String>,
2535 ) -> Result<CaptureOutcome, EvoKernelError> {
2536 let intent = MutationIntent {
2537 id: next_id("proposal"),
2538 intent: proposal.intent.clone(),
2539 target: MutationTarget::Paths {
2540 allow: proposal.files.clone(),
2541 },
2542 expected_effect: proposal.expected_effect.clone(),
2543 risk: RiskLevel::Low,
2544 signals: proposal.files.clone(),
2545 spec_id: None,
2546 };
2547 self.capture_mutation_with_governor(
2548 run_id,
2549 prepare_mutation(intent, diff_payload, base_revision),
2550 )
2551 .await
2552 }
2553
2554 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
2555 ExecutionFeedback {
2556 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
2557 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
2558 summary: outcome.governor_decision.reason.clone(),
2559 }
2560 }
2561
2562 pub fn replay_feedback_for_agent(
2563 signals: &[String],
2564 decision: &ReplayDecision,
2565 ) -> ReplayFeedback {
2566 let (task_class_id, task_label) = replay_task_descriptor(signals);
2567 let planner_directive = if decision.used_capsule {
2568 ReplayPlannerDirective::SkipPlanner
2569 } else {
2570 ReplayPlannerDirective::PlanFallback
2571 };
2572 let reasoning_steps_avoided = u64::from(decision.used_capsule);
2573 let reason_code_hint = decision
2574 .detect_evidence
2575 .mismatch_reasons
2576 .first()
2577 .and_then(|reason| infer_replay_fallback_reason_code(reason));
2578 let fallback_contract = normalize_replay_fallback_contract(
2579 &planner_directive,
2580 decision
2581 .fallback_to_planner
2582 .then_some(decision.reason.as_str()),
2583 reason_code_hint,
2584 None,
2585 None,
2586 None,
2587 );
2588 let summary = if decision.used_capsule {
2589 format!("reused prior capsule for task class '{task_label}'; skip planner")
2590 } else {
2591 format!(
2592 "planner fallback required for task class '{task_label}': {}",
2593 decision.reason
2594 )
2595 };
2596
2597 ReplayFeedback {
2598 used_capsule: decision.used_capsule,
2599 capsule_id: decision.capsule_id.clone(),
2600 planner_directive,
2601 reasoning_steps_avoided,
2602 fallback_reason: fallback_contract
2603 .as_ref()
2604 .map(|contract| contract.fallback_reason.clone()),
2605 reason_code: fallback_contract
2606 .as_ref()
2607 .map(|contract| contract.reason_code),
2608 repair_hint: fallback_contract
2609 .as_ref()
2610 .map(|contract| contract.repair_hint.clone()),
2611 next_action: fallback_contract
2612 .as_ref()
2613 .map(|contract| contract.next_action),
2614 confidence: fallback_contract
2615 .as_ref()
2616 .map(|contract| contract.confidence),
2617 task_class_id,
2618 task_label,
2619 summary,
2620 }
2621 }
2622
2623 fn mutation_needed_failure_outcome(
2624 &self,
2625 request: &SupervisedDevloopRequest,
2626 task_class: Option<BoundedTaskClass>,
2627 status: SupervisedDevloopStatus,
2628 contract: MutationNeededFailureContract,
2629 mutation_id_for_audit: Option<String>,
2630 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2631 if let Some(mutation_id) = mutation_id_for_audit {
2632 self.store
2633 .append_event(EvolutionEvent::MutationRejected {
2634 mutation_id,
2635 reason: contract.failure_reason.clone(),
2636 reason_code: Some(
2637 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2638 ),
2639 recovery_hint: Some(contract.recovery_hint.clone()),
2640 fail_closed: contract.fail_closed,
2641 })
2642 .map_err(store_err)?;
2643 }
2644 let status_label = match status {
2645 SupervisedDevloopStatus::AwaitingApproval => "awaiting_approval",
2646 SupervisedDevloopStatus::RejectedByPolicy => "rejected_by_policy",
2647 SupervisedDevloopStatus::FailedClosed => "failed_closed",
2648 SupervisedDevloopStatus::Executed => "executed",
2649 };
2650 let reason_code_key = mutation_needed_reason_code_key(contract.reason_code);
2651 Ok(SupervisedDevloopOutcome {
2652 task_id: request.task.id.clone(),
2653 task_class,
2654 status,
2655 execution_feedback: None,
2656 failure_contract: Some(contract.clone()),
2657 summary: format!(
2658 "supervised devloop {status_label} task '{}' [{reason_code_key}]: {}",
2659 request.task.id, contract.failure_reason
2660 ),
2661 })
2662 }
2663
2664 pub async fn run_supervised_devloop(
2665 &self,
2666 run_id: &RunId,
2667 request: &SupervisedDevloopRequest,
2668 diff_payload: String,
2669 base_revision: Option<String>,
2670 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2671 let audit_mutation_id = mutation_needed_audit_mutation_id(request);
2672 let task_class = classify_supervised_devloop_request(request);
2673 let Some(task_class) = task_class else {
2674 let contract = normalize_mutation_needed_failure_contract(
2675 Some(&format!(
2676 "supervised devloop rejected task '{}' because it is an unsupported task outside the bounded scope",
2677 request.task.id
2678 )),
2679 Some(MutationNeededFailureReasonCode::PolicyDenied),
2680 );
2681 return self.mutation_needed_failure_outcome(
2682 request,
2683 None,
2684 SupervisedDevloopStatus::RejectedByPolicy,
2685 contract,
2686 Some(audit_mutation_id),
2687 );
2688 };
2689
2690 if !request.approval.approved {
2691 return Ok(SupervisedDevloopOutcome {
2692 task_id: request.task.id.clone(),
2693 task_class: Some(task_class),
2694 status: SupervisedDevloopStatus::AwaitingApproval,
2695 execution_feedback: None,
2696 failure_contract: None,
2697 summary: format!(
2698 "supervised devloop paused task '{}' until explicit human approval is granted",
2699 request.task.id
2700 ),
2701 });
2702 }
2703
2704 if diff_payload.len() > MUTATION_NEEDED_MAX_DIFF_BYTES {
2705 let contract = normalize_mutation_needed_failure_contract(
2706 Some(&format!(
2707 "mutation-needed diff payload exceeds bounded byte budget (size={}, max={})",
2708 diff_payload.len(),
2709 MUTATION_NEEDED_MAX_DIFF_BYTES
2710 )),
2711 Some(MutationNeededFailureReasonCode::PolicyDenied),
2712 );
2713 return self.mutation_needed_failure_outcome(
2714 request,
2715 Some(task_class),
2716 SupervisedDevloopStatus::RejectedByPolicy,
2717 contract,
2718 Some(audit_mutation_id),
2719 );
2720 }
2721
2722 let blast_radius = compute_blast_radius(&diff_payload);
2723 if blast_radius.lines_changed > MUTATION_NEEDED_MAX_CHANGED_LINES {
2724 let contract = normalize_mutation_needed_failure_contract(
2725 Some(&format!(
2726 "mutation-needed patch exceeds bounded changed-line budget (lines_changed={}, max={})",
2727 blast_radius.lines_changed,
2728 MUTATION_NEEDED_MAX_CHANGED_LINES
2729 )),
2730 Some(MutationNeededFailureReasonCode::UnsafePatch),
2731 );
2732 return self.mutation_needed_failure_outcome(
2733 request,
2734 Some(task_class),
2735 SupervisedDevloopStatus::FailedClosed,
2736 contract,
2737 Some(audit_mutation_id),
2738 );
2739 }
2740
2741 if self.sandbox_policy.max_duration_ms > MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS {
2742 let contract = normalize_mutation_needed_failure_contract(
2743 Some(&format!(
2744 "mutation-needed sandbox duration budget exceeds bounded policy (configured={}ms, max={}ms)",
2745 self.sandbox_policy.max_duration_ms,
2746 MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS
2747 )),
2748 Some(MutationNeededFailureReasonCode::PolicyDenied),
2749 );
2750 return self.mutation_needed_failure_outcome(
2751 request,
2752 Some(task_class),
2753 SupervisedDevloopStatus::RejectedByPolicy,
2754 contract,
2755 Some(audit_mutation_id),
2756 );
2757 }
2758
2759 let validation_budget_ms = validation_plan_timeout_budget_ms(&self.validation_plan);
2760 if validation_budget_ms > MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS {
2761 let contract = normalize_mutation_needed_failure_contract(
2762 Some(&format!(
2763 "mutation-needed validation timeout budget exceeds bounded policy (configured={}ms, max={}ms)",
2764 validation_budget_ms,
2765 MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS
2766 )),
2767 Some(MutationNeededFailureReasonCode::PolicyDenied),
2768 );
2769 return self.mutation_needed_failure_outcome(
2770 request,
2771 Some(task_class),
2772 SupervisedDevloopStatus::RejectedByPolicy,
2773 contract,
2774 Some(audit_mutation_id),
2775 );
2776 }
2777
2778 let capture = match self
2779 .capture_from_proposal(run_id, &request.proposal, diff_payload, base_revision)
2780 .await
2781 {
2782 Ok(capture) => capture,
2783 Err(EvoKernelError::Sandbox(message)) => {
2784 let contract = mutation_needed_contract_for_error_message(&message);
2785 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2786 return self.mutation_needed_failure_outcome(
2787 request,
2788 Some(task_class),
2789 status,
2790 contract,
2791 None,
2792 );
2793 }
2794 Err(EvoKernelError::ValidationFailed(report)) => {
2795 let contract = mutation_needed_contract_for_validation_failure(
2796 &self.validation_plan.profile,
2797 &report,
2798 );
2799 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2800 return self.mutation_needed_failure_outcome(
2801 request,
2802 Some(task_class),
2803 status,
2804 contract,
2805 None,
2806 );
2807 }
2808 Err(EvoKernelError::Validation(message)) => {
2809 let contract = mutation_needed_contract_for_error_message(&message);
2810 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2811 return self.mutation_needed_failure_outcome(
2812 request,
2813 Some(task_class),
2814 status,
2815 contract,
2816 None,
2817 );
2818 }
2819 Err(err) => return Err(err),
2820 };
2821 let approver = request
2822 .approval
2823 .approver
2824 .as_deref()
2825 .unwrap_or("unknown approver");
2826
2827 Ok(SupervisedDevloopOutcome {
2828 task_id: request.task.id.clone(),
2829 task_class: Some(task_class),
2830 status: SupervisedDevloopStatus::Executed,
2831 execution_feedback: Some(Self::feedback_for_agent(&capture)),
2832 failure_contract: None,
2833 summary: format!(
2834 "supervised devloop executed task '{}' with explicit approval from {approver}",
2835 request.task.id
2836 ),
2837 })
2838 }
2839 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
2840 MultiAgentCoordinator::new().coordinate(plan)
2841 }
2842
2843 pub fn export_promoted_assets(
2844 &self,
2845 sender_id: impl Into<String>,
2846 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2847 let sender_id = sender_id.into();
2848 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
2849 if !envelope.assets.is_empty() {
2850 let mut ledger = self
2851 .economics
2852 .lock()
2853 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
2854 if ledger
2855 .reserve_publish_stake(&sender_id, &self.stake_policy)
2856 .is_none()
2857 {
2858 return Err(EvoKernelError::Validation(
2859 "insufficient EVU for remote publish".into(),
2860 ));
2861 }
2862 }
2863 Ok(envelope)
2864 }
2865
2866 pub fn import_remote_envelope(
2867 &self,
2868 envelope: &EvolutionEnvelope,
2869 ) -> Result<ImportOutcome, EvoKernelError> {
2870 import_remote_envelope_into_store(
2871 self.store.as_ref(),
2872 envelope,
2873 Some(self.remote_publishers.as_ref()),
2874 None,
2875 )
2876 }
2877
2878 pub fn fetch_assets(
2879 &self,
2880 responder_id: impl Into<String>,
2881 query: &FetchQuery,
2882 ) -> Result<FetchResponse, EvoKernelError> {
2883 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2884 }
2885
2886 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2887 revoke_assets_in_store(self.store.as_ref(), notice)
2888 }
2889
2890 pub async fn replay_or_fallback(
2891 &self,
2892 input: SelectorInput,
2893 ) -> Result<ReplayDecision, EvoKernelError> {
2894 let replay_run_id = next_id("replay");
2895 self.replay_or_fallback_for_run(&replay_run_id, input).await
2896 }
2897
2898 pub async fn replay_or_fallback_for_run(
2899 &self,
2900 run_id: &RunId,
2901 input: SelectorInput,
2902 ) -> Result<ReplayDecision, EvoKernelError> {
2903 let executor = StoreReplayExecutor {
2904 sandbox: self.sandbox.clone(),
2905 validator: self.validator.clone(),
2906 store: self.store.clone(),
2907 selector: self.selector.clone(),
2908 governor: self.governor.clone(),
2909 economics: Some(self.economics.clone()),
2910 remote_publishers: Some(self.remote_publishers.clone()),
2911 stake_policy: self.stake_policy.clone(),
2912 };
2913 executor
2914 .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
2915 .await
2916 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2917 }
2918
2919 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
2920 self.economics.lock().ok()?.governor_signal(node_id)
2921 }
2922
2923 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
2924 self.economics
2925 .lock()
2926 .ok()
2927 .map(|locked| locked.selector_reputation_bias())
2928 .unwrap_or_default()
2929 }
2930
2931 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2932 evolution_metrics_snapshot(self.store.as_ref())
2933 }
2934
2935 pub fn replay_roi_release_gate_summary(
2936 &self,
2937 window_seconds: u64,
2938 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2939 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2940 }
2941
2942 pub fn render_replay_roi_release_gate_summary_json(
2943 &self,
2944 window_seconds: u64,
2945 ) -> Result<String, EvoKernelError> {
2946 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2947 serde_json::to_string_pretty(&summary)
2948 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2949 }
2950
2951 pub fn replay_roi_release_gate_contract(
2952 &self,
2953 window_seconds: u64,
2954 thresholds: ReplayRoiReleaseGateThresholds,
2955 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2956 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2957 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2958 }
2959
2960 pub fn render_replay_roi_release_gate_contract_json(
2961 &self,
2962 window_seconds: u64,
2963 thresholds: ReplayRoiReleaseGateThresholds,
2964 ) -> Result<String, EvoKernelError> {
2965 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2966 serde_json::to_string_pretty(&contract)
2967 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2968 }
2969
2970 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2971 self.metrics_snapshot().map(|snapshot| {
2972 let health = evolution_health_snapshot(&snapshot);
2973 render_evolution_metrics_prometheus(&snapshot, &health)
2974 })
2975 }
2976
2977 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2978 self.metrics_snapshot()
2979 .map(|snapshot| evolution_health_snapshot(&snapshot))
2980 }
2981}
2982
2983pub fn prepare_mutation(
2984 intent: MutationIntent,
2985 diff_payload: String,
2986 base_revision: Option<String>,
2987) -> PreparedMutation {
2988 PreparedMutation {
2989 intent,
2990 artifact: MutationArtifact {
2991 encoding: ArtifactEncoding::UnifiedDiff,
2992 content_hash: compute_artifact_hash(&diff_payload),
2993 payload: diff_payload,
2994 base_revision,
2995 },
2996 }
2997}
2998
2999pub fn prepare_mutation_from_spec(
3000 plan: CompiledMutationPlan,
3001 diff_payload: String,
3002 base_revision: Option<String>,
3003) -> PreparedMutation {
3004 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
3005}
3006
3007pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
3008 Arc::new(oris_evolution::JsonlEvolutionStore::new(
3009 default_store_root(),
3010 ))
3011}
3012
3013fn built_in_seed_templates() -> Vec<SeedTemplate> {
3014 vec![
3015 SeedTemplate {
3016 id: "bootstrap-readme".into(),
3017 intent: "Seed a baseline README recovery pattern".into(),
3018 signals: vec!["bootstrap readme".into(), "missing readme".into()],
3019 diff_payload: "\
3020diff --git a/README.md b/README.md
3021new file mode 100644
3022index 0000000..1111111
3023--- /dev/null
3024+++ b/README.md
3025@@ -0,0 +1,3 @@
3026+# Oris
3027+Bootstrap documentation seed
3028+"
3029 .into(),
3030 validation_profile: "bootstrap-seed".into(),
3031 },
3032 SeedTemplate {
3033 id: "bootstrap-test-fix".into(),
3034 intent: "Seed a deterministic test stabilization pattern".into(),
3035 signals: vec!["bootstrap test fix".into(), "failing tests".into()],
3036 diff_payload: "\
3037diff --git a/src/lib.rs b/src/lib.rs
3038index 1111111..2222222 100644
3039--- a/src/lib.rs
3040+++ b/src/lib.rs
3041@@ -1 +1,2 @@
3042 pub fn demo() -> usize { 1 }
3043+pub fn normalize_test_output() -> bool { true }
3044"
3045 .into(),
3046 validation_profile: "bootstrap-seed".into(),
3047 },
3048 SeedTemplate {
3049 id: "bootstrap-refactor".into(),
3050 intent: "Seed a low-risk refactor capsule".into(),
3051 signals: vec!["bootstrap refactor".into(), "small refactor".into()],
3052 diff_payload: "\
3053diff --git a/src/lib.rs b/src/lib.rs
3054index 2222222..3333333 100644
3055--- a/src/lib.rs
3056+++ b/src/lib.rs
3057@@ -1 +1,3 @@
3058 pub fn demo() -> usize { 1 }
3059+
3060+fn extract_strategy_key(input: &str) -> &str { input }
3061"
3062 .into(),
3063 validation_profile: "bootstrap-seed".into(),
3064 },
3065 SeedTemplate {
3066 id: "bootstrap-logging".into(),
3067 intent: "Seed a baseline structured logging mutation".into(),
3068 signals: vec!["bootstrap logging".into(), "structured logs".into()],
3069 diff_payload: "\
3070diff --git a/src/lib.rs b/src/lib.rs
3071index 3333333..4444444 100644
3072--- a/src/lib.rs
3073+++ b/src/lib.rs
3074@@ -1 +1,3 @@
3075 pub fn demo() -> usize { 1 }
3076+
3077+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
3078"
3079 .into(),
3080 validation_profile: "bootstrap-seed".into(),
3081 },
3082 ]
3083}
3084
3085fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
3086 let changed_files = seed_changed_files(&template.diff_payload);
3087 let target = if changed_files.is_empty() {
3088 MutationTarget::WorkspaceRoot
3089 } else {
3090 MutationTarget::Paths {
3091 allow: changed_files,
3092 }
3093 };
3094 prepare_mutation(
3095 MutationIntent {
3096 id: stable_hash_json(&("bootstrap-mutation", &template.id))
3097 .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
3098 intent: template.intent.clone(),
3099 target,
3100 expected_effect: format!("seed {}", template.id),
3101 risk: RiskLevel::Low,
3102 signals: template.signals.clone(),
3103 spec_id: None,
3104 },
3105 template.diff_payload.clone(),
3106 None,
3107 )
3108}
3109
3110fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
3111 let mut signals = BTreeSet::new();
3112 for declared in &template.signals {
3113 if let Some(phrase) = normalize_signal_phrase(declared) {
3114 signals.insert(phrase);
3115 }
3116 extend_signal_tokens(&mut signals, declared);
3117 }
3118 extend_signal_tokens(&mut signals, &template.intent);
3119 extend_signal_tokens(&mut signals, &template.diff_payload);
3120 for changed_file in seed_changed_files(&template.diff_payload) {
3121 extend_signal_tokens(&mut signals, &changed_file);
3122 }
3123 let values = signals.into_iter().take(32).collect::<Vec<_>>();
3124 let hash =
3125 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
3126 SignalExtractionOutput { values, hash }
3127}
3128
3129fn seed_changed_files(diff_payload: &str) -> Vec<String> {
3130 let mut changed_files = BTreeSet::new();
3131 for line in diff_payload.lines() {
3132 if let Some(path) = line.strip_prefix("+++ b/") {
3133 let normalized = path.trim();
3134 if !normalized.is_empty() {
3135 changed_files.insert(normalized.to_string());
3136 }
3137 }
3138 }
3139 changed_files.into_iter().collect()
3140}
3141
3142fn build_bootstrap_gene(
3143 template: &SeedTemplate,
3144 extracted: &SignalExtractionOutput,
3145) -> Result<Gene, EvolutionError> {
3146 let strategy = vec![template.id.clone(), "bootstrap".into()];
3147 let id = stable_hash_json(&(
3148 "bootstrap-gene",
3149 &template.id,
3150 &extracted.values,
3151 &template.validation_profile,
3152 ))?;
3153 Ok(Gene {
3154 id,
3155 signals: extracted.values.clone(),
3156 strategy,
3157 validation: vec![template.validation_profile.clone()],
3158 state: AssetState::Quarantined,
3159 })
3160}
3161
3162fn build_bootstrap_capsule(
3163 run_id: &RunId,
3164 template: &SeedTemplate,
3165 mutation: &PreparedMutation,
3166 gene: &Gene,
3167) -> Result<Capsule, EvolutionError> {
3168 let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
3169 let env = current_env_fingerprint(&cwd);
3170 let diff_hash = mutation.artifact.content_hash.clone();
3171 let changed_files = seed_changed_files(&template.diff_payload);
3172 let validator_hash = stable_hash_json(&(
3173 "bootstrap-validator",
3174 &template.id,
3175 &template.validation_profile,
3176 &diff_hash,
3177 ))?;
3178 let id = stable_hash_json(&(
3179 "bootstrap-capsule",
3180 &template.id,
3181 run_id,
3182 &gene.id,
3183 &diff_hash,
3184 &env,
3185 ))?;
3186 Ok(Capsule {
3187 id,
3188 gene_id: gene.id.clone(),
3189 mutation_id: mutation.intent.id.clone(),
3190 run_id: run_id.clone(),
3191 diff_hash,
3192 confidence: 0.0,
3193 env,
3194 outcome: Outcome {
3195 success: false,
3196 validation_profile: template.validation_profile.clone(),
3197 validation_duration_ms: 0,
3198 changed_files,
3199 validator_hash,
3200 lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
3201 replay_verified: false,
3202 },
3203 state: AssetState::Quarantined,
3204 })
3205}
3206
3207fn derive_gene(
3208 mutation: &PreparedMutation,
3209 receipt: &SandboxReceipt,
3210 validation_profile: &str,
3211 extracted_signals: &[String],
3212) -> Gene {
3213 let mut strategy = BTreeSet::new();
3214 for file in &receipt.changed_files {
3215 if let Some(component) = file.components().next() {
3216 strategy.insert(component.as_os_str().to_string_lossy().to_string());
3217 }
3218 }
3219 for token in mutation
3220 .artifact
3221 .payload
3222 .split(|ch: char| !ch.is_ascii_alphanumeric())
3223 {
3224 if token.len() == 5
3225 && token.starts_with('E')
3226 && token[1..].chars().all(|ch| ch.is_ascii_digit())
3227 {
3228 strategy.insert(token.to_string());
3229 }
3230 }
3231 for token in mutation.intent.intent.split_whitespace().take(8) {
3232 strategy.insert(token.to_ascii_lowercase());
3233 }
3234 let strategy = strategy.into_iter().collect::<Vec<_>>();
3235 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
3236 .unwrap_or_else(|_| next_id("gene"));
3237 Gene {
3238 id,
3239 signals: extracted_signals.to_vec(),
3240 strategy,
3241 validation: vec![validation_profile.to_string()],
3242 state: AssetState::Promoted,
3243 }
3244}
3245
3246fn build_capsule(
3247 run_id: &RunId,
3248 mutation: &PreparedMutation,
3249 receipt: &SandboxReceipt,
3250 report: &ValidationReport,
3251 validation_profile: &str,
3252 gene: &Gene,
3253 blast_radius: &BlastRadius,
3254) -> Result<Capsule, EvolutionError> {
3255 let env = current_env_fingerprint(&receipt.workdir);
3256 let validator_hash = stable_hash_json(report)?;
3257 let diff_hash = mutation.artifact.content_hash.clone();
3258 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
3259 Ok(Capsule {
3260 id,
3261 gene_id: gene.id.clone(),
3262 mutation_id: mutation.intent.id.clone(),
3263 run_id: run_id.clone(),
3264 diff_hash,
3265 confidence: 0.7,
3266 env,
3267 outcome: oris_evolution::Outcome {
3268 success: true,
3269 validation_profile: validation_profile.to_string(),
3270 validation_duration_ms: report.duration_ms,
3271 changed_files: receipt
3272 .changed_files
3273 .iter()
3274 .map(|path| path.to_string_lossy().to_string())
3275 .collect(),
3276 validator_hash,
3277 lines_changed: blast_radius.lines_changed,
3278 replay_verified: false,
3279 },
3280 state: AssetState::Promoted,
3281 })
3282}
3283
3284fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
3285 let rustc_version = Command::new("rustc")
3286 .arg("--version")
3287 .output()
3288 .ok()
3289 .filter(|output| output.status.success())
3290 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
3291 .unwrap_or_else(|| "rustc unknown".into());
3292 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
3293 .ok()
3294 .map(|bytes| {
3295 let value = String::from_utf8_lossy(&bytes);
3296 compute_artifact_hash(&value)
3297 })
3298 .unwrap_or_else(|| "missing-cargo-lock".into());
3299 let target_triple = format!(
3300 "{}-unknown-{}",
3301 std::env::consts::ARCH,
3302 std::env::consts::OS
3303 );
3304 EnvFingerprint {
3305 rustc_version,
3306 cargo_lock_hash,
3307 target_triple,
3308 os: std::env::consts::OS.to_string(),
3309 }
3310}
3311
3312fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
3313 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
3314 let trimmed = raw.trim();
3315 if trimmed.is_empty() {
3316 continue;
3317 }
3318 let normalized = if is_rust_error_code(trimmed) {
3319 let mut chars = trimmed.chars();
3320 let prefix = chars
3321 .next()
3322 .map(|ch| ch.to_ascii_uppercase())
3323 .unwrap_or('E');
3324 format!("{prefix}{}", chars.as_str())
3325 } else {
3326 trimmed.to_ascii_lowercase()
3327 };
3328 if normalized.len() < 3 {
3329 continue;
3330 }
3331 out.insert(normalized);
3332 }
3333}
3334
3335fn normalize_signal_phrase(input: &str) -> Option<String> {
3336 let normalized = input
3337 .split(|ch: char| !ch.is_ascii_alphanumeric())
3338 .filter_map(|raw| {
3339 let trimmed = raw.trim();
3340 if trimmed.is_empty() {
3341 return None;
3342 }
3343 let normalized = if is_rust_error_code(trimmed) {
3344 let mut chars = trimmed.chars();
3345 let prefix = chars
3346 .next()
3347 .map(|ch| ch.to_ascii_uppercase())
3348 .unwrap_or('E');
3349 format!("{prefix}{}", chars.as_str())
3350 } else {
3351 trimmed.to_ascii_lowercase()
3352 };
3353 if normalized.len() < 3 {
3354 None
3355 } else {
3356 Some(normalized)
3357 }
3358 })
3359 .collect::<Vec<_>>()
3360 .join(" ");
3361 if normalized.is_empty() {
3362 None
3363 } else {
3364 Some(normalized)
3365 }
3366}
3367
3368fn replay_task_descriptor(signals: &[String]) -> (String, String) {
3369 let normalized = signals
3370 .iter()
3371 .filter_map(|signal| normalize_signal_phrase(signal))
3372 .collect::<BTreeSet<_>>()
3373 .into_iter()
3374 .collect::<Vec<_>>();
3375 if normalized.is_empty() {
3376 return ("unknown".into(), "unknown".into());
3377 }
3378 let task_label = normalized
3379 .iter()
3380 .find(|value| {
3381 value.as_str() != "validation passed" && value.as_str() != "validation failed"
3382 })
3383 .cloned()
3384 .unwrap_or_else(|| normalized[0].clone());
3385 let task_class_id = stable_hash_json(&normalized)
3386 .unwrap_or_else(|_| compute_artifact_hash(&normalized.join("\n")));
3387 (task_class_id, task_label)
3388}
3389
3390fn normalized_signal_values(signals: &[String]) -> Vec<String> {
3391 signals
3392 .iter()
3393 .filter_map(|signal| normalize_signal_phrase(signal))
3394 .collect::<BTreeSet<_>>()
3395 .into_iter()
3396 .collect::<Vec<_>>()
3397}
3398
3399fn matched_replay_signals(input_signals: &[String], candidate_signals: &[String]) -> Vec<String> {
3400 let normalized_input = normalized_signal_values(input_signals);
3401 if normalized_input.is_empty() {
3402 return Vec::new();
3403 }
3404 let normalized_candidate = normalized_signal_values(candidate_signals);
3405 if normalized_candidate.is_empty() {
3406 return normalized_input;
3407 }
3408 let matched = normalized_input
3409 .iter()
3410 .filter(|signal| {
3411 normalized_candidate
3412 .iter()
3413 .any(|candidate| candidate.contains(signal.as_str()) || signal.contains(candidate))
3414 })
3415 .cloned()
3416 .collect::<Vec<_>>();
3417 if matched.is_empty() {
3418 normalized_input
3419 } else {
3420 matched
3421 }
3422}
3423
3424fn replay_detect_evidence_from_input(input: &SelectorInput) -> ReplayDetectEvidence {
3425 let (task_class_id, task_label) = replay_task_descriptor(&input.signals);
3426 ReplayDetectEvidence {
3427 task_class_id,
3428 task_label,
3429 matched_signals: normalized_signal_values(&input.signals),
3430 mismatch_reasons: Vec::new(),
3431 }
3432}
3433
3434fn replay_descriptor_from_candidate_or_input(
3435 candidate: Option<&GeneCandidate>,
3436 input: &SelectorInput,
3437) -> (String, String) {
3438 if let Some(candidate) = candidate {
3439 let task_class_id = strategy_metadata_value(&candidate.gene.strategy, "task_class");
3440 let task_label = strategy_metadata_value(&candidate.gene.strategy, "task_label");
3441 if let Some(task_class_id) = task_class_id {
3442 return (
3443 task_class_id.clone(),
3444 task_label.unwrap_or_else(|| task_class_id.clone()),
3445 );
3446 }
3447 return replay_task_descriptor(&candidate.gene.signals);
3448 }
3449 replay_task_descriptor(&input.signals)
3450}
3451
3452fn estimated_reasoning_tokens(signals: &[String]) -> u64 {
3453 let normalized = signals
3454 .iter()
3455 .filter_map(|signal| normalize_signal_phrase(signal))
3456 .collect::<BTreeSet<_>>();
3457 let signal_count = normalized.len() as u64;
3458 REPLAY_REASONING_TOKEN_FLOOR + REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT * signal_count.max(1)
3459}
3460
3461fn compute_replay_roi(reasoning_avoided_tokens: u64, replay_fallback_cost: u64) -> f64 {
3462 let total = reasoning_avoided_tokens + replay_fallback_cost;
3463 if total == 0 {
3464 return 0.0;
3465 }
3466 (reasoning_avoided_tokens as f64 - replay_fallback_cost as f64) / total as f64
3467}
3468
3469fn is_rust_error_code(value: &str) -> bool {
3470 value.len() == 5
3471 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
3472 && value[1..].chars().all(|ch| ch.is_ascii_digit())
3473}
3474
3475fn validation_plan_timeout_budget_ms(plan: &ValidationPlan) -> u64 {
3476 plan.stages.iter().fold(0_u64, |acc, stage| match stage {
3477 ValidationStage::Command { timeout_ms, .. } => acc.saturating_add(*timeout_ms),
3478 })
3479}
3480
3481fn mutation_needed_reason_code_key(reason_code: MutationNeededFailureReasonCode) -> &'static str {
3482 match reason_code {
3483 MutationNeededFailureReasonCode::PolicyDenied => "policy_denied",
3484 MutationNeededFailureReasonCode::ValidationFailed => "validation_failed",
3485 MutationNeededFailureReasonCode::UnsafePatch => "unsafe_patch",
3486 MutationNeededFailureReasonCode::Timeout => "timeout",
3487 MutationNeededFailureReasonCode::MutationPayloadMissing => "mutation_payload_missing",
3488 MutationNeededFailureReasonCode::UnknownFailClosed => "unknown_fail_closed",
3489 }
3490}
3491
3492fn mutation_needed_status_from_reason_code(
3493 reason_code: MutationNeededFailureReasonCode,
3494) -> SupervisedDevloopStatus {
3495 if matches!(reason_code, MutationNeededFailureReasonCode::PolicyDenied) {
3496 SupervisedDevloopStatus::RejectedByPolicy
3497 } else {
3498 SupervisedDevloopStatus::FailedClosed
3499 }
3500}
3501
3502fn mutation_needed_contract_for_validation_failure(
3503 profile: &str,
3504 report: &ValidationReport,
3505) -> MutationNeededFailureContract {
3506 let lower_logs = report.logs.to_ascii_lowercase();
3507 if lower_logs.contains("timed out") {
3508 normalize_mutation_needed_failure_contract(
3509 Some(&format!(
3510 "mutation-needed validation command timed out under profile '{profile}'"
3511 )),
3512 Some(MutationNeededFailureReasonCode::Timeout),
3513 )
3514 } else {
3515 normalize_mutation_needed_failure_contract(
3516 Some(&format!(
3517 "mutation-needed validation failed under profile '{profile}'"
3518 )),
3519 Some(MutationNeededFailureReasonCode::ValidationFailed),
3520 )
3521 }
3522}
3523
3524fn mutation_needed_contract_for_error_message(message: &str) -> MutationNeededFailureContract {
3525 let reason_code = infer_mutation_needed_failure_reason_code(message);
3526 normalize_mutation_needed_failure_contract(Some(message), reason_code)
3527}
3528
3529fn mutation_needed_audit_mutation_id(request: &SupervisedDevloopRequest) -> String {
3530 stable_hash_json(&(
3531 "mutation-needed-audit",
3532 &request.task.id,
3533 &request.proposal.intent,
3534 &request.proposal.files,
3535 ))
3536 .map(|hash| format!("mutation-needed-{hash}"))
3537 .unwrap_or_else(|_| format!("mutation-needed-{}", request.task.id))
3538}
3539
3540fn classify_supervised_devloop_request(
3541 request: &SupervisedDevloopRequest,
3542) -> Option<BoundedTaskClass> {
3543 let path = request.proposal.files.first()?.trim();
3544 if request.proposal.files.len() != 1 || path.is_empty() {
3545 return None;
3546 }
3547 let normalized = path.replace('\\', "/");
3548 if normalized.starts_with("docs/") && normalized.ends_with(".md") {
3549 Some(BoundedTaskClass::DocsSingleFile)
3550 } else {
3551 None
3552 }
3553}
3554
3555fn find_declared_mutation(
3556 store: &dyn EvolutionStore,
3557 mutation_id: &MutationId,
3558) -> Result<Option<PreparedMutation>, EvolutionError> {
3559 for stored in store.scan(1)? {
3560 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
3561 if &mutation.intent.id == mutation_id {
3562 return Ok(Some(mutation));
3563 }
3564 }
3565 }
3566 Ok(None)
3567}
3568
3569fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
3570 let Ok(projection) = projection_snapshot(store) else {
3571 return Vec::new();
3572 };
3573 let capsules = projection.capsules.clone();
3574 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3575 let requested_spec_id = input
3576 .spec_id
3577 .as_deref()
3578 .map(str::trim)
3579 .filter(|value| !value.is_empty());
3580 let signal_set = input
3581 .signals
3582 .iter()
3583 .map(|signal| signal.to_ascii_lowercase())
3584 .collect::<BTreeSet<_>>();
3585 let mut candidates = projection
3586 .genes
3587 .into_iter()
3588 .filter_map(|gene| {
3589 if gene.state != AssetState::Promoted {
3590 return None;
3591 }
3592 if let Some(spec_id) = requested_spec_id {
3593 let matches_spec = spec_ids_by_gene
3594 .get(&gene.id)
3595 .map(|values| {
3596 values
3597 .iter()
3598 .any(|value| value.eq_ignore_ascii_case(spec_id))
3599 })
3600 .unwrap_or(false);
3601 if !matches_spec {
3602 return None;
3603 }
3604 }
3605 let gene_signals = gene
3606 .signals
3607 .iter()
3608 .map(|signal| signal.to_ascii_lowercase())
3609 .collect::<BTreeSet<_>>();
3610 if gene_signals == signal_set {
3611 let mut matched_capsules = capsules
3612 .iter()
3613 .filter(|capsule| {
3614 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
3615 })
3616 .cloned()
3617 .collect::<Vec<_>>();
3618 matched_capsules.sort_by(|left, right| {
3619 replay_environment_match_factor(&input.env, &right.env)
3620 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3621 .unwrap_or(std::cmp::Ordering::Equal)
3622 .then_with(|| {
3623 right
3624 .confidence
3625 .partial_cmp(&left.confidence)
3626 .unwrap_or(std::cmp::Ordering::Equal)
3627 })
3628 .then_with(|| left.id.cmp(&right.id))
3629 });
3630 if matched_capsules.is_empty() {
3631 None
3632 } else {
3633 let score = matched_capsules
3634 .first()
3635 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3636 .unwrap_or(0.0);
3637 Some(GeneCandidate {
3638 gene,
3639 score,
3640 capsules: matched_capsules,
3641 })
3642 }
3643 } else {
3644 None
3645 }
3646 })
3647 .collect::<Vec<_>>();
3648 candidates.sort_by(|left, right| {
3649 right
3650 .score
3651 .partial_cmp(&left.score)
3652 .unwrap_or(std::cmp::Ordering::Equal)
3653 .then_with(|| left.gene.id.cmp(&right.gene.id))
3654 });
3655 candidates
3656}
3657
3658fn quarantined_remote_exact_match_candidates(
3659 store: &dyn EvolutionStore,
3660 input: &SelectorInput,
3661) -> Vec<GeneCandidate> {
3662 let remote_asset_ids = store
3663 .scan(1)
3664 .ok()
3665 .map(|events| {
3666 events
3667 .into_iter()
3668 .filter_map(|stored| match stored.event {
3669 EvolutionEvent::RemoteAssetImported {
3670 source: CandidateSource::Remote,
3671 asset_ids,
3672 ..
3673 } => Some(asset_ids),
3674 _ => None,
3675 })
3676 .flatten()
3677 .collect::<BTreeSet<_>>()
3678 })
3679 .unwrap_or_default();
3680 if remote_asset_ids.is_empty() {
3681 return Vec::new();
3682 }
3683
3684 let Ok(projection) = projection_snapshot(store) else {
3685 return Vec::new();
3686 };
3687 let capsules = projection.capsules.clone();
3688 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3689 let requested_spec_id = input
3690 .spec_id
3691 .as_deref()
3692 .map(str::trim)
3693 .filter(|value| !value.is_empty());
3694 let normalized_signals = input
3695 .signals
3696 .iter()
3697 .filter_map(|signal| normalize_signal_phrase(signal))
3698 .collect::<BTreeSet<_>>()
3699 .into_iter()
3700 .collect::<Vec<_>>();
3701 if normalized_signals.is_empty() {
3702 return Vec::new();
3703 }
3704 let mut candidates = projection
3705 .genes
3706 .into_iter()
3707 .filter_map(|gene| {
3708 if !matches!(
3709 gene.state,
3710 AssetState::Promoted | AssetState::Quarantined | AssetState::ShadowValidated
3711 ) {
3712 return None;
3713 }
3714 if let Some(spec_id) = requested_spec_id {
3715 let matches_spec = spec_ids_by_gene
3716 .get(&gene.id)
3717 .map(|values| {
3718 values
3719 .iter()
3720 .any(|value| value.eq_ignore_ascii_case(spec_id))
3721 })
3722 .unwrap_or(false);
3723 if !matches_spec {
3724 return None;
3725 }
3726 }
3727 let normalized_gene_signals = gene
3728 .signals
3729 .iter()
3730 .filter_map(|candidate| normalize_signal_phrase(candidate))
3731 .collect::<Vec<_>>();
3732 let matched_query_count = normalized_signals
3733 .iter()
3734 .filter(|signal| {
3735 normalized_gene_signals.iter().any(|candidate| {
3736 candidate.contains(signal.as_str()) || signal.contains(candidate)
3737 })
3738 })
3739 .count();
3740 if matched_query_count == 0 {
3741 return None;
3742 }
3743
3744 let mut matched_capsules = capsules
3745 .iter()
3746 .filter(|capsule| {
3747 capsule.gene_id == gene.id
3748 && matches!(
3749 capsule.state,
3750 AssetState::Quarantined | AssetState::ShadowValidated
3751 )
3752 && remote_asset_ids.contains(&capsule.id)
3753 })
3754 .cloned()
3755 .collect::<Vec<_>>();
3756 matched_capsules.sort_by(|left, right| {
3757 replay_environment_match_factor(&input.env, &right.env)
3758 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3759 .unwrap_or(std::cmp::Ordering::Equal)
3760 .then_with(|| {
3761 right
3762 .confidence
3763 .partial_cmp(&left.confidence)
3764 .unwrap_or(std::cmp::Ordering::Equal)
3765 })
3766 .then_with(|| left.id.cmp(&right.id))
3767 });
3768 if matched_capsules.is_empty() {
3769 None
3770 } else {
3771 let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
3772 let env_score = matched_capsules
3773 .first()
3774 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3775 .unwrap_or(0.0);
3776 Some(GeneCandidate {
3777 gene,
3778 score: overlap.max(env_score),
3779 capsules: matched_capsules,
3780 })
3781 }
3782 })
3783 .collect::<Vec<_>>();
3784 candidates.sort_by(|left, right| {
3785 right
3786 .score
3787 .partial_cmp(&left.score)
3788 .unwrap_or(std::cmp::Ordering::Equal)
3789 .then_with(|| left.gene.id.cmp(&right.gene.id))
3790 });
3791 candidates
3792}
3793
3794fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
3795 let fields = [
3796 input
3797 .rustc_version
3798 .eq_ignore_ascii_case(&candidate.rustc_version),
3799 input
3800 .cargo_lock_hash
3801 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
3802 input
3803 .target_triple
3804 .eq_ignore_ascii_case(&candidate.target_triple),
3805 input.os.eq_ignore_ascii_case(&candidate.os),
3806 ];
3807 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
3808 0.5 + ((matched_fields / 4.0) * 0.5)
3809}
3810
3811fn effective_candidate_score(
3812 candidate: &GeneCandidate,
3813 publishers_by_asset: &BTreeMap<String, String>,
3814 reputation_bias: &BTreeMap<String, f32>,
3815) -> f32 {
3816 let bias = candidate
3817 .capsules
3818 .first()
3819 .and_then(|capsule| publishers_by_asset.get(&capsule.id))
3820 .and_then(|publisher| reputation_bias.get(publisher))
3821 .copied()
3822 .unwrap_or(0.0)
3823 .clamp(0.0, 1.0);
3824 candidate.score * (1.0 + (bias * 0.1))
3825}
3826
3827fn export_promoted_assets_from_store(
3828 store: &dyn EvolutionStore,
3829 sender_id: impl Into<String>,
3830) -> Result<EvolutionEnvelope, EvoKernelError> {
3831 let (events, projection) = scan_projection(store)?;
3832 let genes = projection
3833 .genes
3834 .into_iter()
3835 .filter(|gene| gene.state == AssetState::Promoted)
3836 .collect::<Vec<_>>();
3837 let capsules = projection
3838 .capsules
3839 .into_iter()
3840 .filter(|capsule| capsule.state == AssetState::Promoted)
3841 .collect::<Vec<_>>();
3842 let assets = replay_export_assets(&events, genes, capsules);
3843 Ok(EvolutionEnvelope::publish(sender_id, assets))
3844}
3845
3846fn scan_projection(
3847 store: &dyn EvolutionStore,
3848) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
3849 store.scan_projection().map_err(store_err)
3850}
3851
3852fn projection_snapshot(store: &dyn EvolutionStore) -> Result<EvolutionProjection, EvoKernelError> {
3853 scan_projection(store).map(|(_, projection)| projection)
3854}
3855
3856fn replay_export_assets(
3857 events: &[StoredEvolutionEvent],
3858 genes: Vec<Gene>,
3859 capsules: Vec<Capsule>,
3860) -> Vec<NetworkAsset> {
3861 let mutation_ids = capsules
3862 .iter()
3863 .map(|capsule| capsule.mutation_id.clone())
3864 .collect::<BTreeSet<_>>();
3865 let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
3866 for gene in genes {
3867 assets.push(NetworkAsset::Gene { gene });
3868 }
3869 for capsule in capsules {
3870 assets.push(NetworkAsset::Capsule { capsule });
3871 }
3872 assets
3873}
3874
3875fn replay_export_events_for_mutations(
3876 events: &[StoredEvolutionEvent],
3877 mutation_ids: &BTreeSet<String>,
3878) -> Vec<NetworkAsset> {
3879 if mutation_ids.is_empty() {
3880 return Vec::new();
3881 }
3882
3883 let mut assets = Vec::new();
3884 let mut seen_mutations = BTreeSet::new();
3885 let mut seen_spec_links = BTreeSet::new();
3886 for stored in events {
3887 match &stored.event {
3888 EvolutionEvent::MutationDeclared { mutation }
3889 if mutation_ids.contains(mutation.intent.id.as_str())
3890 && seen_mutations.insert(mutation.intent.id.clone()) =>
3891 {
3892 assets.push(NetworkAsset::EvolutionEvent {
3893 event: EvolutionEvent::MutationDeclared {
3894 mutation: mutation.clone(),
3895 },
3896 });
3897 }
3898 EvolutionEvent::SpecLinked {
3899 mutation_id,
3900 spec_id,
3901 } if mutation_ids.contains(mutation_id.as_str())
3902 && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
3903 {
3904 assets.push(NetworkAsset::EvolutionEvent {
3905 event: EvolutionEvent::SpecLinked {
3906 mutation_id: mutation_id.clone(),
3907 spec_id: spec_id.clone(),
3908 },
3909 });
3910 }
3911 _ => {}
3912 }
3913 }
3914
3915 assets
3916}
3917
3918const SYNC_CURSOR_PREFIX: &str = "seq:";
3919const SYNC_RESUME_TOKEN_PREFIX: &str = "gep-rt1|";
3920
3921#[derive(Clone, Debug)]
3922struct DeltaWindow {
3923 changed_gene_ids: BTreeSet<String>,
3924 changed_capsule_ids: BTreeSet<String>,
3925 changed_mutation_ids: BTreeSet<String>,
3926}
3927
3928fn normalize_sync_value(value: Option<&str>) -> Option<String> {
3929 value
3930 .map(str::trim)
3931 .filter(|value| !value.is_empty())
3932 .map(ToOwned::to_owned)
3933}
3934
3935fn parse_sync_cursor_seq(cursor: &str) -> Option<u64> {
3936 let trimmed = cursor.trim();
3937 if trimmed.is_empty() {
3938 return None;
3939 }
3940 let raw = trimmed.strip_prefix(SYNC_CURSOR_PREFIX).unwrap_or(trimmed);
3941 raw.parse::<u64>().ok()
3942}
3943
3944fn format_sync_cursor(seq: u64) -> String {
3945 format!("{SYNC_CURSOR_PREFIX}{seq}")
3946}
3947
3948fn encode_resume_token(sender_id: &str, cursor: &str) -> String {
3949 format!("{SYNC_RESUME_TOKEN_PREFIX}{sender_id}|{cursor}")
3950}
3951
3952fn decode_resume_token(sender_id: &str, token: &str) -> Result<String, EvoKernelError> {
3953 let token = token.trim();
3954 let Some(encoded) = token.strip_prefix(SYNC_RESUME_TOKEN_PREFIX) else {
3955 return Ok(token.to_string());
3956 };
3957 let (token_sender, cursor) = encoded.split_once('|').ok_or_else(|| {
3958 EvoKernelError::Validation(
3959 "invalid resume_token format; expected gep-rt1|<sender>|<seq>".into(),
3960 )
3961 })?;
3962 if token_sender != sender_id.trim() {
3963 return Err(EvoKernelError::Validation(
3964 "resume_token sender mismatch".into(),
3965 ));
3966 }
3967 Ok(cursor.to_string())
3968}
3969
3970fn resolve_requested_cursor(
3971 sender_id: &str,
3972 since_cursor: Option<&str>,
3973 resume_token: Option<&str>,
3974) -> Result<Option<String>, EvoKernelError> {
3975 let cursor = if let Some(token) = normalize_sync_value(resume_token) {
3976 Some(decode_resume_token(sender_id, &token)?)
3977 } else {
3978 normalize_sync_value(since_cursor)
3979 };
3980
3981 let Some(cursor) = cursor else {
3982 return Ok(None);
3983 };
3984 let seq = parse_sync_cursor_seq(&cursor).ok_or_else(|| {
3985 EvoKernelError::Validation("invalid since_cursor/resume_token cursor format".into())
3986 })?;
3987 Ok(Some(format_sync_cursor(seq)))
3988}
3989
3990fn latest_store_cursor(store: &dyn EvolutionStore) -> Result<Option<String>, EvoKernelError> {
3991 let events = store.scan(1).map_err(store_err)?;
3992 Ok(events.last().map(|stored| format_sync_cursor(stored.seq)))
3993}
3994
3995fn delta_window(events: &[StoredEvolutionEvent], since_seq: u64) -> DeltaWindow {
3996 let mut changed_gene_ids = BTreeSet::new();
3997 let mut changed_capsule_ids = BTreeSet::new();
3998 let mut changed_mutation_ids = BTreeSet::new();
3999
4000 for stored in events {
4001 if stored.seq <= since_seq {
4002 continue;
4003 }
4004 match &stored.event {
4005 EvolutionEvent::MutationDeclared { mutation } => {
4006 changed_mutation_ids.insert(mutation.intent.id.clone());
4007 }
4008 EvolutionEvent::SpecLinked { mutation_id, .. } => {
4009 changed_mutation_ids.insert(mutation_id.clone());
4010 }
4011 EvolutionEvent::GeneProjected { gene } => {
4012 changed_gene_ids.insert(gene.id.clone());
4013 }
4014 EvolutionEvent::GenePromoted { gene_id }
4015 | EvolutionEvent::GeneRevoked { gene_id, .. }
4016 | EvolutionEvent::PromotionEvaluated { gene_id, .. } => {
4017 changed_gene_ids.insert(gene_id.clone());
4018 }
4019 EvolutionEvent::CapsuleCommitted { capsule } => {
4020 changed_capsule_ids.insert(capsule.id.clone());
4021 changed_gene_ids.insert(capsule.gene_id.clone());
4022 changed_mutation_ids.insert(capsule.mutation_id.clone());
4023 }
4024 EvolutionEvent::CapsuleReleased { capsule_id, .. }
4025 | EvolutionEvent::CapsuleQuarantined { capsule_id } => {
4026 changed_capsule_ids.insert(capsule_id.clone());
4027 }
4028 EvolutionEvent::RemoteAssetImported { asset_ids, .. } => {
4029 for asset_id in asset_ids {
4030 changed_gene_ids.insert(asset_id.clone());
4031 changed_capsule_ids.insert(asset_id.clone());
4032 }
4033 }
4034 _ => {}
4035 }
4036 }
4037
4038 DeltaWindow {
4039 changed_gene_ids,
4040 changed_capsule_ids,
4041 changed_mutation_ids,
4042 }
4043}
4044
4045fn import_remote_envelope_into_store(
4046 store: &dyn EvolutionStore,
4047 envelope: &EvolutionEnvelope,
4048 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
4049 requested_cursor: Option<String>,
4050) -> Result<ImportOutcome, EvoKernelError> {
4051 if !envelope.verify_content_hash() {
4052 record_manifest_validation(store, envelope, false, "invalid evolution envelope hash")?;
4053 return Err(EvoKernelError::Validation(
4054 "invalid evolution envelope hash".into(),
4055 ));
4056 }
4057 if let Err(reason) = envelope.verify_manifest() {
4058 record_manifest_validation(
4059 store,
4060 envelope,
4061 false,
4062 format!("manifest validation failed: {reason}"),
4063 )?;
4064 return Err(EvoKernelError::Validation(format!(
4065 "invalid evolution envelope manifest: {reason}"
4066 )));
4067 }
4068 record_manifest_validation(store, envelope, true, "manifest validated")?;
4069
4070 let sender_id = normalized_sender_id(&envelope.sender_id);
4071 let (events, projection) = scan_projection(store)?;
4072 let mut known_gene_ids = projection
4073 .genes
4074 .into_iter()
4075 .map(|gene| gene.id)
4076 .collect::<BTreeSet<_>>();
4077 let mut known_capsule_ids = projection
4078 .capsules
4079 .into_iter()
4080 .map(|capsule| capsule.id)
4081 .collect::<BTreeSet<_>>();
4082 let mut known_mutation_ids = BTreeSet::new();
4083 let mut known_spec_links = BTreeSet::new();
4084 for stored in &events {
4085 match &stored.event {
4086 EvolutionEvent::MutationDeclared { mutation } => {
4087 known_mutation_ids.insert(mutation.intent.id.clone());
4088 }
4089 EvolutionEvent::SpecLinked {
4090 mutation_id,
4091 spec_id,
4092 } => {
4093 known_spec_links.insert((mutation_id.clone(), spec_id.clone()));
4094 }
4095 _ => {}
4096 }
4097 }
4098 let mut imported_asset_ids = Vec::new();
4099 let mut applied_count = 0usize;
4100 let mut skipped_count = 0usize;
4101 for asset in &envelope.assets {
4102 match asset {
4103 NetworkAsset::Gene { gene } => {
4104 if !known_gene_ids.insert(gene.id.clone()) {
4105 skipped_count += 1;
4106 continue;
4107 }
4108 imported_asset_ids.push(gene.id.clone());
4109 applied_count += 1;
4110 let mut quarantined_gene = gene.clone();
4111 quarantined_gene.state = AssetState::Quarantined;
4112 store
4113 .append_event(EvolutionEvent::RemoteAssetImported {
4114 source: CandidateSource::Remote,
4115 asset_ids: vec![gene.id.clone()],
4116 sender_id: sender_id.clone(),
4117 })
4118 .map_err(store_err)?;
4119 store
4120 .append_event(EvolutionEvent::GeneProjected {
4121 gene: quarantined_gene.clone(),
4122 })
4123 .map_err(store_err)?;
4124 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4125 store
4126 .append_event(EvolutionEvent::PromotionEvaluated {
4127 gene_id: quarantined_gene.id,
4128 state: AssetState::Quarantined,
4129 reason: "remote asset requires local validation before promotion".into(),
4130 reason_code: TransitionReasonCode::DowngradeRemoteRequiresLocalValidation,
4131 evidence: Some(TransitionEvidence {
4132 replay_attempts: None,
4133 replay_successes: None,
4134 replay_success_rate: None,
4135 environment_match_factor: None,
4136 decayed_confidence: None,
4137 confidence_decay_ratio: None,
4138 summary: Some("phase=remote_import; source=remote; action=quarantine_before_shadow_validation".into()),
4139 }),
4140 })
4141 .map_err(store_err)?;
4142 }
4143 NetworkAsset::Capsule { capsule } => {
4144 if !known_capsule_ids.insert(capsule.id.clone()) {
4145 skipped_count += 1;
4146 continue;
4147 }
4148 imported_asset_ids.push(capsule.id.clone());
4149 applied_count += 1;
4150 store
4151 .append_event(EvolutionEvent::RemoteAssetImported {
4152 source: CandidateSource::Remote,
4153 asset_ids: vec![capsule.id.clone()],
4154 sender_id: sender_id.clone(),
4155 })
4156 .map_err(store_err)?;
4157 let mut quarantined = capsule.clone();
4158 quarantined.state = AssetState::Quarantined;
4159 store
4160 .append_event(EvolutionEvent::CapsuleCommitted {
4161 capsule: quarantined.clone(),
4162 })
4163 .map_err(store_err)?;
4164 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4165 store
4166 .append_event(EvolutionEvent::CapsuleQuarantined {
4167 capsule_id: quarantined.id,
4168 })
4169 .map_err(store_err)?;
4170 }
4171 NetworkAsset::EvolutionEvent { event } => {
4172 let should_append = match event {
4173 EvolutionEvent::MutationDeclared { mutation } => {
4174 known_mutation_ids.insert(mutation.intent.id.clone())
4175 }
4176 EvolutionEvent::SpecLinked {
4177 mutation_id,
4178 spec_id,
4179 } => known_spec_links.insert((mutation_id.clone(), spec_id.clone())),
4180 _ if should_import_remote_event(event) => true,
4181 _ => false,
4182 };
4183 if should_append {
4184 store.append_event(event.clone()).map_err(store_err)?;
4185 applied_count += 1;
4186 } else {
4187 skipped_count += 1;
4188 }
4189 }
4190 }
4191 }
4192 let next_cursor = latest_store_cursor(store)?;
4193 let resume_token = next_cursor.as_ref().and_then(|cursor| {
4194 normalized_sender_id(&envelope.sender_id).map(|sender| encode_resume_token(&sender, cursor))
4195 });
4196
4197 Ok(ImportOutcome {
4198 imported_asset_ids,
4199 accepted: true,
4200 next_cursor: next_cursor.clone(),
4201 resume_token,
4202 sync_audit: SyncAudit {
4203 batch_id: next_id("sync-import"),
4204 requested_cursor,
4205 scanned_count: envelope.assets.len(),
4206 applied_count,
4207 skipped_count,
4208 failed_count: 0,
4209 failure_reasons: Vec::new(),
4210 },
4211 })
4212}
4213
4214const EVOMAP_SNAPSHOT_ROOT: &str = "assets/gep/evomap_snapshot";
4215const EVOMAP_SNAPSHOT_GENES_FILE: &str = "genes.json";
4216const EVOMAP_SNAPSHOT_CAPSULES_FILE: &str = "capsules.json";
4217const EVOMAP_BUILTIN_RUN_ID: &str = "builtin-evomap-seed";
4218
4219#[derive(Debug, Deserialize)]
4220struct EvoMapGeneDocument {
4221 #[serde(default)]
4222 genes: Vec<EvoMapGeneAsset>,
4223}
4224
4225#[derive(Debug, Deserialize)]
4226struct EvoMapGeneAsset {
4227 id: String,
4228 #[serde(default)]
4229 category: Option<String>,
4230 #[serde(default)]
4231 signals_match: Vec<Value>,
4232 #[serde(default)]
4233 strategy: Vec<String>,
4234 #[serde(default)]
4235 validation: Vec<String>,
4236 #[serde(default)]
4237 constraints: Option<EvoMapConstraintAsset>,
4238 #[serde(default)]
4239 model_name: Option<String>,
4240 #[serde(default)]
4241 schema_version: Option<String>,
4242 #[serde(default)]
4243 compatibility: Option<Value>,
4244}
4245
4246#[derive(Clone, Debug, Deserialize, Default)]
4247struct EvoMapConstraintAsset {
4248 #[serde(default)]
4249 max_files: Option<usize>,
4250 #[serde(default)]
4251 forbidden_paths: Vec<String>,
4252}
4253
4254#[derive(Debug, Deserialize)]
4255struct EvoMapCapsuleDocument {
4256 #[serde(default)]
4257 capsules: Vec<EvoMapCapsuleAsset>,
4258}
4259
4260#[derive(Debug, Deserialize)]
4261struct EvoMapCapsuleAsset {
4262 id: String,
4263 gene: String,
4264 #[serde(default)]
4265 trigger: Vec<String>,
4266 #[serde(default)]
4267 summary: String,
4268 #[serde(default)]
4269 diff: Option<String>,
4270 #[serde(default)]
4271 confidence: Option<f32>,
4272 #[serde(default)]
4273 outcome: Option<EvoMapOutcomeAsset>,
4274 #[serde(default)]
4275 blast_radius: Option<EvoMapBlastRadiusAsset>,
4276 #[serde(default)]
4277 content: Option<EvoMapCapsuleContentAsset>,
4278 #[serde(default)]
4279 env_fingerprint: Option<Value>,
4280 #[serde(default)]
4281 model_name: Option<String>,
4282 #[serde(default)]
4283 schema_version: Option<String>,
4284 #[serde(default)]
4285 compatibility: Option<Value>,
4286}
4287
4288#[derive(Clone, Debug, Deserialize, Default)]
4289struct EvoMapOutcomeAsset {
4290 #[serde(default)]
4291 status: Option<String>,
4292 #[serde(default)]
4293 score: Option<f32>,
4294}
4295
4296#[derive(Clone, Debug, Deserialize, Default)]
4297struct EvoMapBlastRadiusAsset {
4298 #[serde(default)]
4299 lines: usize,
4300}
4301
4302#[derive(Clone, Debug, Deserialize, Default)]
4303struct EvoMapCapsuleContentAsset {
4304 #[serde(default)]
4305 changed_files: Vec<String>,
4306}
4307
4308#[derive(Debug)]
4309struct BuiltinCapsuleSeed {
4310 capsule: Capsule,
4311 mutation: PreparedMutation,
4312}
4313
4314#[derive(Debug)]
4315struct BuiltinAssetBundle {
4316 genes: Vec<Gene>,
4317 capsules: Vec<BuiltinCapsuleSeed>,
4318}
4319
4320fn built_in_experience_genes() -> Vec<Gene> {
4321 vec![
4322 Gene {
4323 id: "builtin-experience-docs-rewrite-v1".into(),
4324 signals: vec!["docs.rewrite".into(), "docs".into(), "rewrite".into()],
4325 strategy: vec![
4326 "asset_origin=builtin".into(),
4327 "task_class=docs.rewrite".into(),
4328 "task_label=Docs rewrite".into(),
4329 "template_id=builtin-docs-rewrite-v1".into(),
4330 "summary=baseline docs rewrite experience".into(),
4331 ],
4332 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4333 state: AssetState::Promoted,
4334 },
4335 Gene {
4336 id: "builtin-experience-ci-fix-v1".into(),
4337 signals: vec![
4338 "ci.fix".into(),
4339 "ci".into(),
4340 "test".into(),
4341 "failure".into(),
4342 ],
4343 strategy: vec![
4344 "asset_origin=builtin".into(),
4345 "task_class=ci.fix".into(),
4346 "task_label=CI fix".into(),
4347 "template_id=builtin-ci-fix-v1".into(),
4348 "summary=baseline ci stabilization experience".into(),
4349 ],
4350 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4351 state: AssetState::Promoted,
4352 },
4353 Gene {
4354 id: "builtin-experience-task-decomposition-v1".into(),
4355 signals: vec![
4356 "task.decomposition".into(),
4357 "task".into(),
4358 "decomposition".into(),
4359 "planning".into(),
4360 ],
4361 strategy: vec![
4362 "asset_origin=builtin".into(),
4363 "task_class=task.decomposition".into(),
4364 "task_label=Task decomposition".into(),
4365 "template_id=builtin-task-decomposition-v1".into(),
4366 "summary=baseline task decomposition and routing experience".into(),
4367 ],
4368 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4369 state: AssetState::Promoted,
4370 },
4371 Gene {
4372 id: "builtin-experience-project-workflow-v1".into(),
4373 signals: vec![
4374 "project.workflow".into(),
4375 "project".into(),
4376 "workflow".into(),
4377 "milestone".into(),
4378 ],
4379 strategy: vec![
4380 "asset_origin=builtin".into(),
4381 "task_class=project.workflow".into(),
4382 "task_label=Project workflow".into(),
4383 "template_id=builtin-project-workflow-v1".into(),
4384 "summary=baseline project proposal and merge workflow experience".into(),
4385 ],
4386 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4387 state: AssetState::Promoted,
4388 },
4389 Gene {
4390 id: "builtin-experience-service-bid-v1".into(),
4391 signals: vec![
4392 "service.bid".into(),
4393 "service".into(),
4394 "bid".into(),
4395 "economics".into(),
4396 ],
4397 strategy: vec![
4398 "asset_origin=builtin".into(),
4399 "task_class=service.bid".into(),
4400 "task_label=Service bid".into(),
4401 "template_id=builtin-service-bid-v1".into(),
4402 "summary=baseline service bidding and settlement experience".into(),
4403 ],
4404 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4405 state: AssetState::Promoted,
4406 },
4407 ]
4408}
4409
4410fn evomap_snapshot_path(file_name: &str) -> PathBuf {
4411 PathBuf::from(env!("CARGO_MANIFEST_DIR"))
4412 .join(EVOMAP_SNAPSHOT_ROOT)
4413 .join(file_name)
4414}
4415
4416fn read_evomap_snapshot(file_name: &str) -> Result<Option<String>, EvoKernelError> {
4417 let path = evomap_snapshot_path(file_name);
4418 if !path.exists() {
4419 return Ok(None);
4420 }
4421 fs::read_to_string(&path).map(Some).map_err(|err| {
4422 EvoKernelError::Validation(format!(
4423 "failed to read EvoMap snapshot {}: {err}",
4424 path.display()
4425 ))
4426 })
4427}
4428
4429fn compatibility_state_from_value(value: Option<&Value>) -> Option<String> {
4430 let value = value?;
4431 if let Some(state) = value.as_str() {
4432 let normalized = state.trim().to_ascii_lowercase();
4433 if normalized.is_empty() {
4434 return None;
4435 }
4436 return Some(normalized);
4437 }
4438 value
4439 .get("state")
4440 .and_then(Value::as_str)
4441 .map(str::trim)
4442 .filter(|state| !state.is_empty())
4443 .map(|state| state.to_ascii_lowercase())
4444}
4445
4446fn map_evomap_state(value: Option<&Value>) -> AssetState {
4447 match compatibility_state_from_value(value).as_deref() {
4448 Some("promoted") => AssetState::Promoted,
4449 Some("candidate") => AssetState::Candidate,
4450 Some("quarantined") => AssetState::Quarantined,
4451 Some("shadow_validated") => AssetState::ShadowValidated,
4452 Some("revoked") => AssetState::Revoked,
4453 Some("rejected") => AssetState::Archived,
4454 Some("archived") => AssetState::Archived,
4455 _ => AssetState::Candidate,
4456 }
4457}
4458
4459fn value_as_signal_string(value: &Value) -> Option<String> {
4460 match value {
4461 Value::String(raw) => {
4462 let normalized = raw.trim();
4463 if normalized.is_empty() {
4464 None
4465 } else {
4466 Some(normalized.to_string())
4467 }
4468 }
4469 Value::Object(_) => {
4470 let serialized = serde_json::to_string(value).ok()?;
4471 let normalized = serialized.trim();
4472 if normalized.is_empty() {
4473 None
4474 } else {
4475 Some(normalized.to_string())
4476 }
4477 }
4478 Value::Null => None,
4479 other => {
4480 let rendered = other.to_string();
4481 let normalized = rendered.trim();
4482 if normalized.is_empty() {
4483 None
4484 } else {
4485 Some(normalized.to_string())
4486 }
4487 }
4488 }
4489}
4490
4491fn parse_diff_changed_files(payload: &str) -> Vec<String> {
4492 let mut changed_files = BTreeSet::new();
4493 for line in payload.lines() {
4494 let line = line.trim();
4495 if let Some(path) = line.strip_prefix("+++ b/") {
4496 let path = path.trim();
4497 if !path.is_empty() && path != "/dev/null" {
4498 changed_files.insert(path.to_string());
4499 }
4500 continue;
4501 }
4502 if let Some(path) = line.strip_prefix("diff --git a/") {
4503 if let Some((_, right)) = path.split_once(" b/") {
4504 let right = right.trim();
4505 if !right.is_empty() {
4506 changed_files.insert(right.to_string());
4507 }
4508 }
4509 }
4510 }
4511 changed_files.into_iter().collect()
4512}
4513
4514fn strip_diff_code_fence(payload: &str) -> String {
4515 let trimmed = payload.trim();
4516 if !trimmed.starts_with("```") {
4517 return trimmed.to_string();
4518 }
4519 let mut lines = trimmed.lines().collect::<Vec<_>>();
4520 if lines.is_empty() {
4521 return String::new();
4522 }
4523 lines.remove(0);
4524 if lines
4525 .last()
4526 .map(|line| line.trim() == "```")
4527 .unwrap_or(false)
4528 {
4529 lines.pop();
4530 }
4531 lines.join("\n").trim().to_string()
4532}
4533
4534fn synthetic_diff_for_capsule(capsule: &EvoMapCapsuleAsset) -> String {
4535 let file_path = format!("docs/evomap_builtin_capsules/{}.md", capsule.id);
4536 let mut content = Vec::new();
4537 content.push(format!("# EvoMap Builtin Capsule {}", capsule.id));
4538 if capsule.summary.trim().is_empty() {
4539 content.push("summary: missing".to_string());
4540 } else {
4541 content.push(format!("summary: {}", capsule.summary.trim()));
4542 }
4543 if !capsule.trigger.is_empty() {
4544 content.push(format!("trigger: {}", capsule.trigger.join(", ")));
4545 }
4546 content.push(format!("gene: {}", capsule.gene));
4547 let added = content
4548 .into_iter()
4549 .map(|line| format!("+{}", line.replace('\r', "")))
4550 .collect::<Vec<_>>()
4551 .join("\n");
4552 format!(
4553 "diff --git a/{file_path} b/{file_path}\nnew file mode 100644\nindex 0000000..1111111\n--- /dev/null\n+++ b/{file_path}\n@@ -0,0 +1,{line_count} @@\n{added}\n",
4554 line_count = added.lines().count()
4555 )
4556}
4557
4558fn normalized_diff_payload(capsule: &EvoMapCapsuleAsset) -> String {
4559 if let Some(raw) = capsule.diff.as_deref() {
4560 let normalized = strip_diff_code_fence(raw);
4561 if !normalized.trim().is_empty() {
4562 return normalized;
4563 }
4564 }
4565 synthetic_diff_for_capsule(capsule)
4566}
4567
4568fn env_field(value: Option<&Value>, keys: &[&str]) -> Option<String> {
4569 let object = value?.as_object()?;
4570 keys.iter().find_map(|key| {
4571 object
4572 .get(*key)
4573 .and_then(Value::as_str)
4574 .map(str::trim)
4575 .filter(|value| !value.is_empty())
4576 .map(|value| value.to_string())
4577 })
4578}
4579
4580fn map_evomap_env_fingerprint(value: Option<&Value>) -> EnvFingerprint {
4581 let os =
4582 env_field(value, &["os", "platform", "os_release"]).unwrap_or_else(|| "unknown".into());
4583 let target_triple = env_field(value, &["target_triple"]).unwrap_or_else(|| {
4584 let arch = env_field(value, &["arch"]).unwrap_or_else(|| "unknown".into());
4585 format!("{arch}-unknown-{os}")
4586 });
4587 EnvFingerprint {
4588 rustc_version: env_field(value, &["runtime", "rustc_version", "node_version"])
4589 .unwrap_or_else(|| "unknown".into()),
4590 cargo_lock_hash: env_field(value, &["cargo_lock_hash"]).unwrap_or_else(|| "unknown".into()),
4591 target_triple,
4592 os,
4593 }
4594}
4595
4596fn load_evomap_builtin_assets() -> Result<Option<BuiltinAssetBundle>, EvoKernelError> {
4597 let genes_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_GENES_FILE)?;
4598 let capsules_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_CAPSULES_FILE)?;
4599 let (Some(genes_raw), Some(capsules_raw)) = (genes_raw, capsules_raw) else {
4600 return Ok(None);
4601 };
4602
4603 let genes_doc: EvoMapGeneDocument = serde_json::from_str(&genes_raw).map_err(|err| {
4604 EvoKernelError::Validation(format!("failed to parse EvoMap genes snapshot: {err}"))
4605 })?;
4606 let capsules_doc: EvoMapCapsuleDocument =
4607 serde_json::from_str(&capsules_raw).map_err(|err| {
4608 EvoKernelError::Validation(format!("failed to parse EvoMap capsules snapshot: {err}"))
4609 })?;
4610
4611 let mut genes = Vec::new();
4612 let mut known_gene_ids = BTreeSet::new();
4613 for source in genes_doc.genes {
4614 let EvoMapGeneAsset {
4615 id,
4616 category,
4617 signals_match,
4618 strategy,
4619 validation,
4620 constraints,
4621 model_name,
4622 schema_version,
4623 compatibility,
4624 } = source;
4625 let gene_id = id.trim();
4626 if gene_id.is_empty() {
4627 return Err(EvoKernelError::Validation(
4628 "EvoMap snapshot gene id must not be empty".into(),
4629 ));
4630 }
4631 if !known_gene_ids.insert(gene_id.to_string()) {
4632 continue;
4633 }
4634
4635 let mut seen_signals = BTreeSet::new();
4636 let mut signals = Vec::new();
4637 for signal in signals_match {
4638 let Some(normalized) = value_as_signal_string(&signal) else {
4639 continue;
4640 };
4641 if seen_signals.insert(normalized.clone()) {
4642 signals.push(normalized);
4643 }
4644 }
4645 if signals.is_empty() {
4646 signals.push(format!("gene:{}", gene_id.to_ascii_lowercase()));
4647 }
4648
4649 let mut strategy = strategy
4650 .into_iter()
4651 .map(|item| item.trim().to_string())
4652 .filter(|item| !item.is_empty())
4653 .collect::<Vec<_>>();
4654 if strategy.is_empty() {
4655 strategy.push("evomap strategy missing in snapshot".into());
4656 }
4657 let constraint = constraints.unwrap_or_default();
4658 let compat_state = compatibility_state_from_value(compatibility.as_ref())
4659 .unwrap_or_else(|| "candidate".to_string());
4660 ensure_strategy_metadata(&mut strategy, "asset_origin", "builtin_evomap");
4661 ensure_strategy_metadata(
4662 &mut strategy,
4663 "evomap_category",
4664 category.as_deref().unwrap_or("unknown"),
4665 );
4666 ensure_strategy_metadata(
4667 &mut strategy,
4668 "evomap_constraints_max_files",
4669 &constraint.max_files.unwrap_or_default().to_string(),
4670 );
4671 ensure_strategy_metadata(
4672 &mut strategy,
4673 "evomap_constraints_forbidden_paths",
4674 &constraint.forbidden_paths.join("|"),
4675 );
4676 ensure_strategy_metadata(
4677 &mut strategy,
4678 "evomap_model_name",
4679 model_name.as_deref().unwrap_or("unknown"),
4680 );
4681 ensure_strategy_metadata(
4682 &mut strategy,
4683 "evomap_schema_version",
4684 schema_version.as_deref().unwrap_or("1.5.0"),
4685 );
4686 ensure_strategy_metadata(&mut strategy, "evomap_compatibility_state", &compat_state);
4687
4688 let mut validation = validation
4689 .into_iter()
4690 .map(|item| item.trim().to_string())
4691 .filter(|item| !item.is_empty())
4692 .collect::<Vec<_>>();
4693 if validation.is_empty() {
4694 validation.push("evomap-builtin-seed".into());
4695 }
4696
4697 genes.push(Gene {
4698 id: gene_id.to_string(),
4699 signals,
4700 strategy,
4701 validation,
4702 state: map_evomap_state(compatibility.as_ref()),
4703 });
4704 }
4705
4706 let mut capsules = Vec::new();
4707 let known_gene_ids = genes
4708 .iter()
4709 .map(|gene| gene.id.clone())
4710 .collect::<BTreeSet<_>>();
4711 for source in capsules_doc.capsules {
4712 let EvoMapCapsuleAsset {
4713 id,
4714 gene,
4715 trigger,
4716 summary,
4717 diff,
4718 confidence,
4719 outcome,
4720 blast_radius,
4721 content,
4722 env_fingerprint,
4723 model_name: _model_name,
4724 schema_version: _schema_version,
4725 compatibility,
4726 } = source;
4727 let source_for_diff = EvoMapCapsuleAsset {
4728 id: id.clone(),
4729 gene: gene.clone(),
4730 trigger: trigger.clone(),
4731 summary: summary.clone(),
4732 diff,
4733 confidence,
4734 outcome: outcome.clone(),
4735 blast_radius: blast_radius.clone(),
4736 content: content.clone(),
4737 env_fingerprint: env_fingerprint.clone(),
4738 model_name: None,
4739 schema_version: None,
4740 compatibility: compatibility.clone(),
4741 };
4742 if !known_gene_ids.contains(gene.as_str()) {
4743 return Err(EvoKernelError::Validation(format!(
4744 "EvoMap capsule {} references unknown gene {}",
4745 id, gene
4746 )));
4747 }
4748 let normalized_diff = normalized_diff_payload(&source_for_diff);
4749 if normalized_diff.trim().is_empty() {
4750 return Err(EvoKernelError::Validation(format!(
4751 "EvoMap capsule {} has empty normalized diff payload",
4752 id
4753 )));
4754 }
4755 let mut changed_files = content
4756 .as_ref()
4757 .map(|content| {
4758 content
4759 .changed_files
4760 .iter()
4761 .map(|item| item.trim().to_string())
4762 .filter(|item| !item.is_empty())
4763 .collect::<Vec<_>>()
4764 })
4765 .unwrap_or_default();
4766 if changed_files.is_empty() {
4767 changed_files = parse_diff_changed_files(&normalized_diff);
4768 }
4769 if changed_files.is_empty() {
4770 changed_files.push(format!("docs/evomap_builtin_capsules/{}.md", id));
4771 }
4772
4773 let confidence = confidence
4774 .or_else(|| outcome.as_ref().and_then(|outcome| outcome.score))
4775 .unwrap_or(0.6)
4776 .clamp(0.0, 1.0);
4777 let status_success = outcome
4778 .as_ref()
4779 .and_then(|outcome| outcome.status.as_deref())
4780 .map(|status| status.eq_ignore_ascii_case("success"))
4781 .unwrap_or(true);
4782 let blast_radius = blast_radius.unwrap_or_default();
4783 let mutation_id = format!("builtin-evomap-mutation-{}", id);
4784 let intent = MutationIntent {
4785 id: mutation_id.clone(),
4786 intent: if summary.trim().is_empty() {
4787 format!("apply EvoMap capsule {}", id)
4788 } else {
4789 summary.trim().to_string()
4790 },
4791 target: MutationTarget::Paths {
4792 allow: changed_files.clone(),
4793 },
4794 expected_effect: format!("seed replay candidate from EvoMap capsule {}", id),
4795 risk: RiskLevel::Low,
4796 signals: if trigger.is_empty() {
4797 vec![format!("capsule:{}", id.to_ascii_lowercase())]
4798 } else {
4799 trigger
4800 .iter()
4801 .map(|signal| signal.trim().to_ascii_lowercase())
4802 .filter(|signal| !signal.is_empty())
4803 .collect::<Vec<_>>()
4804 },
4805 spec_id: None,
4806 };
4807 let mutation = PreparedMutation {
4808 intent,
4809 artifact: oris_evolution::MutationArtifact {
4810 encoding: ArtifactEncoding::UnifiedDiff,
4811 payload: normalized_diff.clone(),
4812 base_revision: None,
4813 content_hash: compute_artifact_hash(&normalized_diff),
4814 },
4815 };
4816 let capsule = Capsule {
4817 id: id.clone(),
4818 gene_id: gene.clone(),
4819 mutation_id,
4820 run_id: EVOMAP_BUILTIN_RUN_ID.to_string(),
4821 diff_hash: compute_artifact_hash(&normalized_diff),
4822 confidence,
4823 env: map_evomap_env_fingerprint(env_fingerprint.as_ref()),
4824 outcome: Outcome {
4825 success: status_success,
4826 validation_profile: "evomap-builtin-seed".into(),
4827 validation_duration_ms: 0,
4828 changed_files,
4829 validator_hash: "builtin-evomap".into(),
4830 lines_changed: blast_radius.lines,
4831 replay_verified: false,
4832 },
4833 state: map_evomap_state(compatibility.as_ref()),
4834 };
4835 capsules.push(BuiltinCapsuleSeed { capsule, mutation });
4836 }
4837
4838 Ok(Some(BuiltinAssetBundle { genes, capsules }))
4839}
4840
4841fn ensure_builtin_experience_assets_in_store(
4842 store: &dyn EvolutionStore,
4843 sender_id: String,
4844) -> Result<ImportOutcome, EvoKernelError> {
4845 let (events, projection) = scan_projection(store)?;
4846 let mut known_gene_ids = projection
4847 .genes
4848 .into_iter()
4849 .map(|gene| gene.id)
4850 .collect::<BTreeSet<_>>();
4851 let mut known_capsule_ids = projection
4852 .capsules
4853 .into_iter()
4854 .map(|capsule| capsule.id)
4855 .collect::<BTreeSet<_>>();
4856 let mut known_mutation_ids = BTreeSet::new();
4857 for stored in &events {
4858 if let EvolutionEvent::MutationDeclared { mutation } = &stored.event {
4859 known_mutation_ids.insert(mutation.intent.id.clone());
4860 }
4861 }
4862 let normalized_sender = normalized_sender_id(&sender_id);
4863 let mut imported_asset_ids = Vec::new();
4864 let mut bundle = BuiltinAssetBundle {
4867 genes: built_in_experience_genes(),
4868 capsules: Vec::new(),
4869 };
4870 if let Some(snapshot_bundle) = load_evomap_builtin_assets()? {
4871 bundle.genes.extend(snapshot_bundle.genes);
4872 bundle.capsules.extend(snapshot_bundle.capsules);
4873 }
4874 let scanned_count = bundle.genes.len() + bundle.capsules.len();
4875
4876 for gene in bundle.genes {
4877 if !known_gene_ids.insert(gene.id.clone()) {
4878 continue;
4879 }
4880
4881 store
4882 .append_event(EvolutionEvent::RemoteAssetImported {
4883 source: CandidateSource::Local,
4884 asset_ids: vec![gene.id.clone()],
4885 sender_id: normalized_sender.clone(),
4886 })
4887 .map_err(store_err)?;
4888 store
4889 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
4890 .map_err(store_err)?;
4891 match gene.state {
4892 AssetState::Revoked | AssetState::Archived => {}
4893 AssetState::Quarantined | AssetState::ShadowValidated => {
4894 store
4895 .append_event(EvolutionEvent::PromotionEvaluated {
4896 gene_id: gene.id.clone(),
4897 state: AssetState::Quarantined,
4898 reason:
4899 "built-in EvoMap asset requires additional validation before promotion"
4900 .into(),
4901 reason_code: TransitionReasonCode::DowngradeBuiltinRequiresValidation,
4902 evidence: None,
4903 })
4904 .map_err(store_err)?;
4905 }
4906 AssetState::Promoted | AssetState::Candidate => {
4907 store
4908 .append_event(EvolutionEvent::PromotionEvaluated {
4909 gene_id: gene.id.clone(),
4910 state: AssetState::Promoted,
4911 reason: "built-in experience asset promoted for cold-start compatibility"
4912 .into(),
4913 reason_code: TransitionReasonCode::PromotionBuiltinColdStartCompatibility,
4914 evidence: None,
4915 })
4916 .map_err(store_err)?;
4917 store
4918 .append_event(EvolutionEvent::GenePromoted {
4919 gene_id: gene.id.clone(),
4920 })
4921 .map_err(store_err)?;
4922 }
4923 }
4924 imported_asset_ids.push(gene.id.clone());
4925 }
4926
4927 for seed in bundle.capsules {
4928 if !known_gene_ids.contains(seed.capsule.gene_id.as_str()) {
4929 return Err(EvoKernelError::Validation(format!(
4930 "built-in capsule {} references unknown gene {}",
4931 seed.capsule.id, seed.capsule.gene_id
4932 )));
4933 }
4934 if known_mutation_ids.insert(seed.mutation.intent.id.clone()) {
4935 store
4936 .append_event(EvolutionEvent::MutationDeclared {
4937 mutation: seed.mutation.clone(),
4938 })
4939 .map_err(store_err)?;
4940 }
4941 if !known_capsule_ids.insert(seed.capsule.id.clone()) {
4942 continue;
4943 }
4944 store
4945 .append_event(EvolutionEvent::RemoteAssetImported {
4946 source: CandidateSource::Local,
4947 asset_ids: vec![seed.capsule.id.clone()],
4948 sender_id: normalized_sender.clone(),
4949 })
4950 .map_err(store_err)?;
4951 store
4952 .append_event(EvolutionEvent::CapsuleCommitted {
4953 capsule: seed.capsule.clone(),
4954 })
4955 .map_err(store_err)?;
4956 match seed.capsule.state {
4957 AssetState::Revoked | AssetState::Archived => {}
4958 AssetState::Quarantined | AssetState::ShadowValidated => {
4959 store
4960 .append_event(EvolutionEvent::CapsuleQuarantined {
4961 capsule_id: seed.capsule.id.clone(),
4962 })
4963 .map_err(store_err)?;
4964 }
4965 AssetState::Promoted | AssetState::Candidate => {
4966 store
4967 .append_event(EvolutionEvent::CapsuleReleased {
4968 capsule_id: seed.capsule.id.clone(),
4969 state: AssetState::Promoted,
4970 })
4971 .map_err(store_err)?;
4972 }
4973 }
4974 imported_asset_ids.push(seed.capsule.id.clone());
4975 }
4976
4977 let next_cursor = latest_store_cursor(store)?;
4978 let resume_token = next_cursor.as_ref().and_then(|cursor| {
4979 normalized_sender
4980 .as_deref()
4981 .map(|sender| encode_resume_token(sender, cursor))
4982 });
4983 let applied_count = imported_asset_ids.len();
4984 let skipped_count = scanned_count.saturating_sub(applied_count);
4985
4986 Ok(ImportOutcome {
4987 imported_asset_ids,
4988 accepted: true,
4989 next_cursor: next_cursor.clone(),
4990 resume_token,
4991 sync_audit: SyncAudit {
4992 batch_id: next_id("sync-import"),
4993 requested_cursor: None,
4994 scanned_count,
4995 applied_count,
4996 skipped_count,
4997 failed_count: 0,
4998 failure_reasons: Vec::new(),
4999 },
5000 })
5001}
5002
5003fn strategy_metadata_value(strategy: &[String], key: &str) -> Option<String> {
5004 strategy.iter().find_map(|entry| {
5005 let (entry_key, entry_value) = entry.split_once('=')?;
5006 if entry_key.trim().eq_ignore_ascii_case(key) {
5007 let normalized = entry_value.trim();
5008 if normalized.is_empty() {
5009 None
5010 } else {
5011 Some(normalized.to_string())
5012 }
5013 } else {
5014 None
5015 }
5016 })
5017}
5018
5019fn ensure_strategy_metadata(strategy: &mut Vec<String>, key: &str, value: &str) {
5020 let normalized = value.trim();
5021 if normalized.is_empty() || strategy_metadata_value(strategy, key).is_some() {
5022 return;
5023 }
5024 strategy.push(format!("{key}={normalized}"));
5025}
5026
5027fn enforce_reported_experience_retention(
5028 store: &dyn EvolutionStore,
5029 task_class: &str,
5030 keep_latest: usize,
5031) -> Result<(), EvoKernelError> {
5032 let task_class = task_class.trim();
5033 if task_class.is_empty() || keep_latest == 0 {
5034 return Ok(());
5035 }
5036
5037 let (_, projection) = scan_projection(store)?;
5038 let mut candidates = projection
5039 .genes
5040 .iter()
5041 .filter(|gene| gene.state == AssetState::Promoted)
5042 .filter_map(|gene| {
5043 let origin = strategy_metadata_value(&gene.strategy, "asset_origin")?;
5044 if !origin.eq_ignore_ascii_case("reported_experience") {
5045 return None;
5046 }
5047 let gene_task_class = strategy_metadata_value(&gene.strategy, "task_class")?;
5048 if !gene_task_class.eq_ignore_ascii_case(task_class) {
5049 return None;
5050 }
5051 let updated_at = projection
5052 .last_updated_at
5053 .get(&gene.id)
5054 .cloned()
5055 .unwrap_or_default();
5056 Some((gene.id.clone(), updated_at))
5057 })
5058 .collect::<Vec<_>>();
5059 if candidates.len() <= keep_latest {
5060 return Ok(());
5061 }
5062
5063 candidates.sort_by(|left, right| right.1.cmp(&left.1).then_with(|| right.0.cmp(&left.0)));
5064 let stale_gene_ids = candidates
5065 .into_iter()
5066 .skip(keep_latest)
5067 .map(|(gene_id, _)| gene_id)
5068 .collect::<BTreeSet<_>>();
5069 if stale_gene_ids.is_empty() {
5070 return Ok(());
5071 }
5072
5073 let reason =
5074 format!("reported experience retention limit exceeded for task_class={task_class}");
5075 for gene_id in &stale_gene_ids {
5076 store
5077 .append_event(EvolutionEvent::GeneRevoked {
5078 gene_id: gene_id.clone(),
5079 reason: reason.clone(),
5080 })
5081 .map_err(store_err)?;
5082 }
5083
5084 let stale_capsule_ids = projection
5085 .capsules
5086 .iter()
5087 .filter(|capsule| stale_gene_ids.contains(&capsule.gene_id))
5088 .map(|capsule| capsule.id.clone())
5089 .collect::<BTreeSet<_>>();
5090 for capsule_id in stale_capsule_ids {
5091 store
5092 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
5093 .map_err(store_err)?;
5094 }
5095 Ok(())
5096}
5097
5098fn record_reported_experience_in_store(
5099 store: &dyn EvolutionStore,
5100 sender_id: String,
5101 gene_id: String,
5102 signals: Vec<String>,
5103 strategy: Vec<String>,
5104 validation: Vec<String>,
5105) -> Result<ImportOutcome, EvoKernelError> {
5106 let gene_id = gene_id.trim();
5107 if gene_id.is_empty() {
5108 return Err(EvoKernelError::Validation(
5109 "reported experience gene_id must not be empty".into(),
5110 ));
5111 }
5112
5113 let mut unique_signals = BTreeSet::new();
5114 let mut normalized_signals = Vec::new();
5115 for signal in signals {
5116 let normalized = signal.trim().to_ascii_lowercase();
5117 if normalized.is_empty() {
5118 continue;
5119 }
5120 if unique_signals.insert(normalized.clone()) {
5121 normalized_signals.push(normalized);
5122 }
5123 }
5124 if normalized_signals.is_empty() {
5125 return Err(EvoKernelError::Validation(
5126 "reported experience signals must not be empty".into(),
5127 ));
5128 }
5129
5130 let mut unique_strategy = BTreeSet::new();
5131 let mut normalized_strategy = Vec::new();
5132 for entry in strategy {
5133 let normalized = entry.trim().to_string();
5134 if normalized.is_empty() {
5135 continue;
5136 }
5137 if unique_strategy.insert(normalized.clone()) {
5138 normalized_strategy.push(normalized);
5139 }
5140 }
5141 if normalized_strategy.is_empty() {
5142 normalized_strategy.push("reported local replay experience".into());
5143 }
5144 let task_class_id = strategy_metadata_value(&normalized_strategy, "task_class")
5145 .or_else(|| normalized_signals.first().cloned())
5146 .unwrap_or_else(|| "reported-experience".into());
5147 let task_label = strategy_metadata_value(&normalized_strategy, "task_label")
5148 .or_else(|| normalized_signals.first().cloned())
5149 .unwrap_or_else(|| task_class_id.clone());
5150 ensure_strategy_metadata(
5151 &mut normalized_strategy,
5152 "asset_origin",
5153 "reported_experience",
5154 );
5155 ensure_strategy_metadata(&mut normalized_strategy, "task_class", &task_class_id);
5156 ensure_strategy_metadata(&mut normalized_strategy, "task_label", &task_label);
5157
5158 let mut unique_validation = BTreeSet::new();
5159 let mut normalized_validation = Vec::new();
5160 for entry in validation {
5161 let normalized = entry.trim().to_string();
5162 if normalized.is_empty() {
5163 continue;
5164 }
5165 if unique_validation.insert(normalized.clone()) {
5166 normalized_validation.push(normalized);
5167 }
5168 }
5169 if normalized_validation.is_empty() {
5170 normalized_validation.push("a2a.tasks.report".into());
5171 }
5172
5173 let gene = Gene {
5174 id: gene_id.to_string(),
5175 signals: normalized_signals,
5176 strategy: normalized_strategy,
5177 validation: normalized_validation,
5178 state: AssetState::Promoted,
5179 };
5180 let normalized_sender = normalized_sender_id(&sender_id);
5181
5182 store
5183 .append_event(EvolutionEvent::RemoteAssetImported {
5184 source: CandidateSource::Local,
5185 asset_ids: vec![gene.id.clone()],
5186 sender_id: normalized_sender.clone(),
5187 })
5188 .map_err(store_err)?;
5189 store
5190 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
5191 .map_err(store_err)?;
5192 store
5193 .append_event(EvolutionEvent::PromotionEvaluated {
5194 gene_id: gene.id.clone(),
5195 state: AssetState::Promoted,
5196 reason: "trusted local report promoted reusable experience".into(),
5197 reason_code: TransitionReasonCode::PromotionTrustedLocalReport,
5198 evidence: None,
5199 })
5200 .map_err(store_err)?;
5201 store
5202 .append_event(EvolutionEvent::GenePromoted {
5203 gene_id: gene.id.clone(),
5204 })
5205 .map_err(store_err)?;
5206 enforce_reported_experience_retention(
5207 store,
5208 &task_class_id,
5209 REPORTED_EXPERIENCE_RETENTION_LIMIT,
5210 )?;
5211
5212 let imported_asset_ids = vec![gene.id];
5213 let next_cursor = latest_store_cursor(store)?;
5214 let resume_token = next_cursor.as_ref().and_then(|cursor| {
5215 normalized_sender
5216 .as_deref()
5217 .map(|sender| encode_resume_token(sender, cursor))
5218 });
5219 Ok(ImportOutcome {
5220 imported_asset_ids,
5221 accepted: true,
5222 next_cursor,
5223 resume_token,
5224 sync_audit: SyncAudit {
5225 batch_id: next_id("sync-import"),
5226 requested_cursor: None,
5227 scanned_count: 1,
5228 applied_count: 1,
5229 skipped_count: 0,
5230 failed_count: 0,
5231 failure_reasons: Vec::new(),
5232 },
5233 })
5234}
5235
5236fn normalized_sender_id(sender_id: &str) -> Option<String> {
5237 let trimmed = sender_id.trim();
5238 if trimmed.is_empty() {
5239 None
5240 } else {
5241 Some(trimmed.to_string())
5242 }
5243}
5244
5245fn record_manifest_validation(
5246 store: &dyn EvolutionStore,
5247 envelope: &EvolutionEnvelope,
5248 accepted: bool,
5249 reason: impl Into<String>,
5250) -> Result<(), EvoKernelError> {
5251 let manifest = envelope.manifest.as_ref();
5252 let sender_id = manifest
5253 .and_then(|value| normalized_sender_id(&value.sender_id))
5254 .or_else(|| normalized_sender_id(&envelope.sender_id));
5255 let publisher = manifest.and_then(|value| normalized_sender_id(&value.publisher));
5256 let asset_ids = manifest
5257 .map(|value| value.asset_ids.clone())
5258 .unwrap_or_else(|| EvolutionEnvelope::manifest_asset_ids(&envelope.assets));
5259
5260 store
5261 .append_event(EvolutionEvent::ManifestValidated {
5262 accepted,
5263 reason: reason.into(),
5264 sender_id,
5265 publisher,
5266 asset_ids,
5267 })
5268 .map_err(store_err)?;
5269 Ok(())
5270}
5271
5272fn record_remote_publisher_for_asset(
5273 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
5274 sender_id: &str,
5275 asset: &NetworkAsset,
5276) {
5277 let Some(remote_publishers) = remote_publishers else {
5278 return;
5279 };
5280 let sender_id = sender_id.trim();
5281 if sender_id.is_empty() {
5282 return;
5283 }
5284 let Ok(mut publishers) = remote_publishers.lock() else {
5285 return;
5286 };
5287 match asset {
5288 NetworkAsset::Gene { gene } => {
5289 publishers.insert(gene.id.clone(), sender_id.to_string());
5290 }
5291 NetworkAsset::Capsule { capsule } => {
5292 publishers.insert(capsule.id.clone(), sender_id.to_string());
5293 }
5294 NetworkAsset::EvolutionEvent { .. } => {}
5295 }
5296}
5297
5298fn remote_publishers_by_asset_from_store(store: &dyn EvolutionStore) -> BTreeMap<String, String> {
5299 let Ok(events) = store.scan(1) else {
5300 return BTreeMap::new();
5301 };
5302 remote_publishers_by_asset_from_events(&events)
5303}
5304
5305fn remote_publishers_by_asset_from_events(
5306 events: &[StoredEvolutionEvent],
5307) -> BTreeMap<String, String> {
5308 let mut imported_asset_publishers = BTreeMap::<String, String>::new();
5309 let mut known_gene_ids = BTreeSet::<String>::new();
5310 let mut known_capsule_ids = BTreeSet::<String>::new();
5311 let mut publishers_by_asset = BTreeMap::<String, String>::new();
5312
5313 for stored in events {
5314 match &stored.event {
5315 EvolutionEvent::RemoteAssetImported {
5316 source: CandidateSource::Remote,
5317 asset_ids,
5318 sender_id,
5319 } => {
5320 let Some(sender_id) = sender_id.as_deref().and_then(normalized_sender_id) else {
5321 continue;
5322 };
5323 for asset_id in asset_ids {
5324 imported_asset_publishers.insert(asset_id.clone(), sender_id.clone());
5325 if known_gene_ids.contains(asset_id) || known_capsule_ids.contains(asset_id) {
5326 publishers_by_asset.insert(asset_id.clone(), sender_id.clone());
5327 }
5328 }
5329 }
5330 EvolutionEvent::GeneProjected { gene } => {
5331 known_gene_ids.insert(gene.id.clone());
5332 if let Some(sender_id) = imported_asset_publishers.get(&gene.id) {
5333 publishers_by_asset.insert(gene.id.clone(), sender_id.clone());
5334 }
5335 }
5336 EvolutionEvent::CapsuleCommitted { capsule } => {
5337 known_capsule_ids.insert(capsule.id.clone());
5338 if let Some(sender_id) = imported_asset_publishers.get(&capsule.id) {
5339 publishers_by_asset.insert(capsule.id.clone(), sender_id.clone());
5340 }
5341 }
5342 _ => {}
5343 }
5344 }
5345
5346 publishers_by_asset
5347}
5348
5349fn should_import_remote_event(event: &EvolutionEvent) -> bool {
5350 matches!(
5351 event,
5352 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
5353 )
5354}
5355
5356fn fetch_assets_from_store(
5357 store: &dyn EvolutionStore,
5358 responder_id: impl Into<String>,
5359 query: &FetchQuery,
5360) -> Result<FetchResponse, EvoKernelError> {
5361 let (events, projection) = scan_projection(store)?;
5362 let requested_cursor = resolve_requested_cursor(
5363 &query.sender_id,
5364 query.since_cursor.as_deref(),
5365 query.resume_token.as_deref(),
5366 )?;
5367 let since_seq = requested_cursor
5368 .as_deref()
5369 .and_then(parse_sync_cursor_seq)
5370 .unwrap_or(0);
5371 let normalized_signals: Vec<String> = query
5372 .signals
5373 .iter()
5374 .map(|signal| signal.trim().to_ascii_lowercase())
5375 .filter(|signal| !signal.is_empty())
5376 .collect();
5377 let matches_any_signal = |candidate: &str| {
5378 if normalized_signals.is_empty() {
5379 return true;
5380 }
5381 let candidate = candidate.to_ascii_lowercase();
5382 normalized_signals
5383 .iter()
5384 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
5385 };
5386
5387 let matched_genes: Vec<Gene> = projection
5388 .genes
5389 .into_iter()
5390 .filter(|gene| gene.state == AssetState::Promoted)
5391 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
5392 .collect();
5393 let matched_gene_ids: BTreeSet<String> =
5394 matched_genes.iter().map(|gene| gene.id.clone()).collect();
5395 let matched_capsules: Vec<Capsule> = projection
5396 .capsules
5397 .into_iter()
5398 .filter(|capsule| capsule.state == AssetState::Promoted)
5399 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
5400 .collect();
5401 let all_assets = replay_export_assets(&events, matched_genes.clone(), matched_capsules.clone());
5402 let (selected_genes, selected_capsules) = if requested_cursor.is_some() {
5403 let delta = delta_window(&events, since_seq);
5404 let selected_capsules = matched_capsules
5405 .into_iter()
5406 .filter(|capsule| {
5407 delta.changed_capsule_ids.contains(&capsule.id)
5408 || delta.changed_mutation_ids.contains(&capsule.mutation_id)
5409 })
5410 .collect::<Vec<_>>();
5411 let selected_gene_ids = selected_capsules
5412 .iter()
5413 .map(|capsule| capsule.gene_id.clone())
5414 .collect::<BTreeSet<_>>();
5415 let selected_genes = matched_genes
5416 .into_iter()
5417 .filter(|gene| {
5418 delta.changed_gene_ids.contains(&gene.id) || selected_gene_ids.contains(&gene.id)
5419 })
5420 .collect::<Vec<_>>();
5421 (selected_genes, selected_capsules)
5422 } else {
5423 (matched_genes, matched_capsules)
5424 };
5425 let assets = replay_export_assets(&events, selected_genes, selected_capsules);
5426 let next_cursor = events.last().map(|stored| format_sync_cursor(stored.seq));
5427 let resume_token = next_cursor
5428 .as_ref()
5429 .map(|cursor| encode_resume_token(&query.sender_id, cursor));
5430 let applied_count = assets.len();
5431 let skipped_count = all_assets.len().saturating_sub(applied_count);
5432
5433 Ok(FetchResponse {
5434 sender_id: responder_id.into(),
5435 assets,
5436 next_cursor: next_cursor.clone(),
5437 resume_token,
5438 sync_audit: SyncAudit {
5439 batch_id: next_id("sync-fetch"),
5440 requested_cursor,
5441 scanned_count: all_assets.len(),
5442 applied_count,
5443 skipped_count,
5444 failed_count: 0,
5445 failure_reasons: Vec::new(),
5446 },
5447 })
5448}
5449
5450fn revoke_assets_in_store(
5451 store: &dyn EvolutionStore,
5452 notice: &RevokeNotice,
5453) -> Result<RevokeNotice, EvoKernelError> {
5454 let projection = projection_snapshot(store)?;
5455 let requested: BTreeSet<String> = notice
5456 .asset_ids
5457 .iter()
5458 .map(|asset_id| asset_id.trim().to_string())
5459 .filter(|asset_id| !asset_id.is_empty())
5460 .collect();
5461 let mut revoked_gene_ids = BTreeSet::new();
5462 let mut quarantined_capsule_ids = BTreeSet::new();
5463
5464 for gene in &projection.genes {
5465 if requested.contains(&gene.id) {
5466 revoked_gene_ids.insert(gene.id.clone());
5467 }
5468 }
5469 for capsule in &projection.capsules {
5470 if requested.contains(&capsule.id) {
5471 quarantined_capsule_ids.insert(capsule.id.clone());
5472 revoked_gene_ids.insert(capsule.gene_id.clone());
5473 }
5474 }
5475 for capsule in &projection.capsules {
5476 if revoked_gene_ids.contains(&capsule.gene_id) {
5477 quarantined_capsule_ids.insert(capsule.id.clone());
5478 }
5479 }
5480
5481 for gene_id in &revoked_gene_ids {
5482 store
5483 .append_event(EvolutionEvent::GeneRevoked {
5484 gene_id: gene_id.clone(),
5485 reason: notice.reason.clone(),
5486 })
5487 .map_err(store_err)?;
5488 }
5489 for capsule_id in &quarantined_capsule_ids {
5490 store
5491 .append_event(EvolutionEvent::CapsuleQuarantined {
5492 capsule_id: capsule_id.clone(),
5493 })
5494 .map_err(store_err)?;
5495 }
5496
5497 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
5498 affected_ids.extend(quarantined_capsule_ids);
5499 affected_ids.sort();
5500 affected_ids.dedup();
5501
5502 Ok(RevokeNotice {
5503 sender_id: notice.sender_id.clone(),
5504 asset_ids: affected_ids,
5505 reason: notice.reason.clone(),
5506 })
5507}
5508
5509fn evolution_metrics_snapshot(
5510 store: &dyn EvolutionStore,
5511) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
5512 let (events, projection) = scan_projection(store)?;
5513 let replay = collect_replay_roi_aggregate(&events, &projection, None);
5514 let replay_reasoning_avoided_total = replay.replay_success_total;
5515 let confidence_revalidations_total = events
5516 .iter()
5517 .filter(|stored| is_confidence_revalidation_event(&stored.event))
5518 .count() as u64;
5519 let mutation_declared_total = events
5520 .iter()
5521 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
5522 .count() as u64;
5523 let promoted_mutations_total = events
5524 .iter()
5525 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
5526 .count() as u64;
5527 let gene_revocations_total = events
5528 .iter()
5529 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
5530 .count() as u64;
5531 let cutoff = Utc::now() - Duration::hours(1);
5532 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
5533 matches!(event, EvolutionEvent::MutationDeclared { .. })
5534 });
5535 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
5536 matches!(event, EvolutionEvent::GeneRevoked { .. })
5537 });
5538 let promoted_genes = projection
5539 .genes
5540 .iter()
5541 .filter(|gene| gene.state == AssetState::Promoted)
5542 .count() as u64;
5543 let promoted_capsules = projection
5544 .capsules
5545 .iter()
5546 .filter(|capsule| capsule.state == AssetState::Promoted)
5547 .count() as u64;
5548
5549 Ok(EvolutionMetricsSnapshot {
5550 replay_attempts_total: replay.replay_attempts_total,
5551 replay_success_total: replay.replay_success_total,
5552 replay_success_rate: safe_ratio(replay.replay_success_total, replay.replay_attempts_total),
5553 confidence_revalidations_total,
5554 replay_reasoning_avoided_total,
5555 reasoning_avoided_tokens_total: replay.reasoning_avoided_tokens_total,
5556 replay_fallback_cost_total: replay.replay_fallback_cost_total,
5557 replay_roi: compute_replay_roi(
5558 replay.reasoning_avoided_tokens_total,
5559 replay.replay_fallback_cost_total,
5560 ),
5561 replay_task_classes: replay.replay_task_classes,
5562 replay_sources: replay.replay_sources,
5563 mutation_declared_total,
5564 promoted_mutations_total,
5565 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
5566 gene_revocations_total,
5567 mutation_velocity_last_hour,
5568 revoke_frequency_last_hour,
5569 promoted_genes,
5570 promoted_capsules,
5571 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
5572 })
5573}
5574
5575struct ReplayRoiAggregate {
5576 replay_attempts_total: u64,
5577 replay_success_total: u64,
5578 replay_failure_total: u64,
5579 reasoning_avoided_tokens_total: u64,
5580 replay_fallback_cost_total: u64,
5581 replay_task_classes: Vec<ReplayTaskClassMetrics>,
5582 replay_sources: Vec<ReplaySourceRoiMetrics>,
5583}
5584
5585fn collect_replay_roi_aggregate(
5586 events: &[StoredEvolutionEvent],
5587 projection: &EvolutionProjection,
5588 cutoff: Option<DateTime<Utc>>,
5589) -> ReplayRoiAggregate {
5590 let replay_evidences = events
5591 .iter()
5592 .filter(|stored| replay_event_in_scope(stored, cutoff))
5593 .filter_map(|stored| match &stored.event {
5594 EvolutionEvent::ReplayEconomicsRecorded { evidence, .. } => Some(evidence.clone()),
5595 _ => None,
5596 })
5597 .collect::<Vec<_>>();
5598
5599 let mut task_totals = BTreeMap::<(String, String), (u64, u64, u64, u64)>::new();
5600 let mut source_totals = BTreeMap::<String, (u64, u64, u64, u64)>::new();
5601
5602 let (
5603 replay_success_total,
5604 replay_failure_total,
5605 reasoning_avoided_tokens_total,
5606 replay_fallback_cost_total,
5607 ) = if replay_evidences.is_empty() {
5608 let gene_task_classes = projection
5609 .genes
5610 .iter()
5611 .map(|gene| (gene.id.clone(), replay_task_descriptor(&gene.signals)))
5612 .collect::<BTreeMap<_, _>>();
5613 let mut replay_success_total = 0_u64;
5614 let mut replay_failure_total = 0_u64;
5615
5616 for stored in events
5617 .iter()
5618 .filter(|stored| replay_event_in_scope(stored, cutoff))
5619 {
5620 match &stored.event {
5621 EvolutionEvent::CapsuleReused { gene_id, .. } => {
5622 replay_success_total += 1;
5623 if let Some((task_class_id, task_label)) = gene_task_classes.get(gene_id) {
5624 let entry = task_totals
5625 .entry((task_class_id.clone(), task_label.clone()))
5626 .or_insert((0, 0, 0, 0));
5627 entry.0 += 1;
5628 entry.2 += REPLAY_REASONING_TOKEN_FLOOR;
5629 }
5630 }
5631 event if is_replay_validation_failure(event) => {
5632 replay_failure_total += 1;
5633 }
5634 _ => {}
5635 }
5636 }
5637
5638 (
5639 replay_success_total,
5640 replay_failure_total,
5641 replay_success_total * REPLAY_REASONING_TOKEN_FLOOR,
5642 replay_failure_total * REPLAY_REASONING_TOKEN_FLOOR,
5643 )
5644 } else {
5645 let mut replay_success_total = 0_u64;
5646 let mut replay_failure_total = 0_u64;
5647 let mut reasoning_avoided_tokens_total = 0_u64;
5648 let mut replay_fallback_cost_total = 0_u64;
5649
5650 for evidence in &replay_evidences {
5651 if evidence.success {
5652 replay_success_total += 1;
5653 } else {
5654 replay_failure_total += 1;
5655 }
5656 reasoning_avoided_tokens_total += evidence.reasoning_avoided_tokens;
5657 replay_fallback_cost_total += evidence.replay_fallback_cost;
5658
5659 let entry = task_totals
5660 .entry((evidence.task_class_id.clone(), evidence.task_label.clone()))
5661 .or_insert((0, 0, 0, 0));
5662 if evidence.success {
5663 entry.0 += 1;
5664 } else {
5665 entry.1 += 1;
5666 }
5667 entry.2 += evidence.reasoning_avoided_tokens;
5668 entry.3 += evidence.replay_fallback_cost;
5669
5670 if let Some(source_sender_id) = evidence.source_sender_id.as_deref() {
5671 let source_entry = source_totals
5672 .entry(source_sender_id.to_string())
5673 .or_insert((0, 0, 0, 0));
5674 if evidence.success {
5675 source_entry.0 += 1;
5676 } else {
5677 source_entry.1 += 1;
5678 }
5679 source_entry.2 += evidence.reasoning_avoided_tokens;
5680 source_entry.3 += evidence.replay_fallback_cost;
5681 }
5682 }
5683
5684 (
5685 replay_success_total,
5686 replay_failure_total,
5687 reasoning_avoided_tokens_total,
5688 replay_fallback_cost_total,
5689 )
5690 };
5691
5692 let replay_task_classes = task_totals
5693 .into_iter()
5694 .map(
5695 |(
5696 (task_class_id, task_label),
5697 (
5698 replay_success_total,
5699 replay_failure_total,
5700 reasoning_avoided_tokens_total,
5701 replay_fallback_cost_total,
5702 ),
5703 )| ReplayTaskClassMetrics {
5704 task_class_id,
5705 task_label,
5706 replay_success_total,
5707 replay_failure_total,
5708 reasoning_steps_avoided_total: replay_success_total,
5709 reasoning_avoided_tokens_total,
5710 replay_fallback_cost_total,
5711 replay_roi: compute_replay_roi(
5712 reasoning_avoided_tokens_total,
5713 replay_fallback_cost_total,
5714 ),
5715 },
5716 )
5717 .collect::<Vec<_>>();
5718 let replay_sources = source_totals
5719 .into_iter()
5720 .map(
5721 |(
5722 source_sender_id,
5723 (
5724 replay_success_total,
5725 replay_failure_total,
5726 reasoning_avoided_tokens_total,
5727 replay_fallback_cost_total,
5728 ),
5729 )| ReplaySourceRoiMetrics {
5730 source_sender_id,
5731 replay_success_total,
5732 replay_failure_total,
5733 reasoning_avoided_tokens_total,
5734 replay_fallback_cost_total,
5735 replay_roi: compute_replay_roi(
5736 reasoning_avoided_tokens_total,
5737 replay_fallback_cost_total,
5738 ),
5739 },
5740 )
5741 .collect::<Vec<_>>();
5742
5743 ReplayRoiAggregate {
5744 replay_attempts_total: replay_success_total + replay_failure_total,
5745 replay_success_total,
5746 replay_failure_total,
5747 reasoning_avoided_tokens_total,
5748 replay_fallback_cost_total,
5749 replay_task_classes,
5750 replay_sources,
5751 }
5752}
5753
5754fn replay_event_in_scope(stored: &StoredEvolutionEvent, cutoff: Option<DateTime<Utc>>) -> bool {
5755 match cutoff {
5756 Some(cutoff) => parse_event_timestamp(&stored.timestamp)
5757 .map(|timestamp| timestamp >= cutoff)
5758 .unwrap_or(false),
5759 None => true,
5760 }
5761}
5762
5763fn replay_roi_release_gate_summary(
5764 store: &dyn EvolutionStore,
5765 window_seconds: u64,
5766) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
5767 let (events, projection) = scan_projection(store)?;
5768 let now = Utc::now();
5769 let cutoff = if window_seconds == 0 {
5770 None
5771 } else {
5772 let seconds = i64::try_from(window_seconds).unwrap_or(i64::MAX);
5773 Some(now - Duration::seconds(seconds))
5774 };
5775 let replay = collect_replay_roi_aggregate(&events, &projection, cutoff);
5776
5777 Ok(ReplayRoiWindowSummary {
5778 generated_at: now.to_rfc3339(),
5779 window_seconds,
5780 replay_attempts_total: replay.replay_attempts_total,
5781 replay_success_total: replay.replay_success_total,
5782 replay_failure_total: replay.replay_failure_total,
5783 reasoning_avoided_tokens_total: replay.reasoning_avoided_tokens_total,
5784 replay_fallback_cost_total: replay.replay_fallback_cost_total,
5785 replay_roi: compute_replay_roi(
5786 replay.reasoning_avoided_tokens_total,
5787 replay.replay_fallback_cost_total,
5788 ),
5789 replay_task_classes: replay.replay_task_classes,
5790 replay_sources: replay.replay_sources,
5791 })
5792}
5793
5794fn replay_roi_release_gate_contract(
5795 summary: &ReplayRoiWindowSummary,
5796 thresholds: ReplayRoiReleaseGateThresholds,
5797) -> ReplayRoiReleaseGateContract {
5798 let input = replay_roi_release_gate_input_contract(summary, thresholds);
5799 let output = evaluate_replay_roi_release_gate_contract_input(&input);
5800 ReplayRoiReleaseGateContract { input, output }
5801}
5802
5803fn replay_roi_release_gate_input_contract(
5804 summary: &ReplayRoiWindowSummary,
5805 thresholds: ReplayRoiReleaseGateThresholds,
5806) -> ReplayRoiReleaseGateInputContract {
5807 let replay_safety_signal = replay_roi_release_gate_safety_signal(summary);
5808 let replay_safety = replay_safety_signal.fail_closed_default
5809 && replay_safety_signal.rollback_ready
5810 && replay_safety_signal.audit_trail_complete
5811 && replay_safety_signal.has_replay_activity;
5812 ReplayRoiReleaseGateInputContract {
5813 generated_at: summary.generated_at.clone(),
5814 window_seconds: summary.window_seconds,
5815 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
5816 .iter()
5817 .map(|dimension| (*dimension).to_string())
5818 .collect(),
5819 replay_attempts_total: summary.replay_attempts_total,
5820 replay_success_total: summary.replay_success_total,
5821 replay_failure_total: summary.replay_failure_total,
5822 replay_hit_rate: safe_ratio(summary.replay_success_total, summary.replay_attempts_total),
5823 false_replay_rate: safe_ratio(summary.replay_failure_total, summary.replay_attempts_total),
5824 reasoning_avoided_tokens: summary.reasoning_avoided_tokens_total,
5825 replay_fallback_cost_total: summary.replay_fallback_cost_total,
5826 replay_roi: summary.replay_roi,
5827 replay_safety,
5828 replay_safety_signal,
5829 thresholds,
5830 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
5831 }
5832}
5833
5834fn replay_roi_release_gate_safety_signal(
5835 summary: &ReplayRoiWindowSummary,
5836) -> ReplayRoiReleaseGateSafetySignal {
5837 ReplayRoiReleaseGateSafetySignal {
5838 fail_closed_default: true,
5839 rollback_ready: summary.replay_failure_total == 0 || summary.replay_fallback_cost_total > 0,
5840 audit_trail_complete: summary.replay_attempts_total
5841 == summary.replay_success_total + summary.replay_failure_total,
5842 has_replay_activity: summary.replay_attempts_total > 0,
5843 }
5844}
5845
5846pub fn evaluate_replay_roi_release_gate_contract_input(
5847 input: &ReplayRoiReleaseGateInputContract,
5848) -> ReplayRoiReleaseGateOutputContract {
5849 let mut failed_checks = Vec::new();
5850 let mut evidence_refs = Vec::new();
5851 let mut indeterminate = false;
5852
5853 replay_release_gate_push_unique(&mut evidence_refs, "replay_roi_release_gate_summary");
5854 replay_release_gate_push_unique(
5855 &mut evidence_refs,
5856 format!("window_seconds:{}", input.window_seconds),
5857 );
5858 if input.generated_at.trim().is_empty() {
5859 replay_release_gate_record_failed_check(
5860 &mut failed_checks,
5861 &mut evidence_refs,
5862 "missing_generated_at",
5863 &["field:generated_at"],
5864 );
5865 indeterminate = true;
5866 } else {
5867 replay_release_gate_push_unique(
5868 &mut evidence_refs,
5869 format!("generated_at:{}", input.generated_at),
5870 );
5871 }
5872
5873 let expected_attempts_total = input.replay_success_total + input.replay_failure_total;
5874 if input.replay_attempts_total != expected_attempts_total {
5875 replay_release_gate_record_failed_check(
5876 &mut failed_checks,
5877 &mut evidence_refs,
5878 "invalid_attempt_accounting",
5879 &[
5880 "metric:replay_attempts_total",
5881 "metric:replay_success_total",
5882 "metric:replay_failure_total",
5883 ],
5884 );
5885 indeterminate = true;
5886 }
5887
5888 if input.replay_attempts_total == 0 {
5889 replay_release_gate_record_failed_check(
5890 &mut failed_checks,
5891 &mut evidence_refs,
5892 "missing_replay_attempts",
5893 &["metric:replay_attempts_total"],
5894 );
5895 indeterminate = true;
5896 }
5897
5898 if !replay_release_gate_rate_valid(input.replay_hit_rate) {
5899 replay_release_gate_record_failed_check(
5900 &mut failed_checks,
5901 &mut evidence_refs,
5902 "invalid_replay_hit_rate",
5903 &["metric:replay_hit_rate"],
5904 );
5905 indeterminate = true;
5906 }
5907 if !replay_release_gate_rate_valid(input.false_replay_rate) {
5908 replay_release_gate_record_failed_check(
5909 &mut failed_checks,
5910 &mut evidence_refs,
5911 "invalid_false_replay_rate",
5912 &["metric:false_replay_rate"],
5913 );
5914 indeterminate = true;
5915 }
5916
5917 if !input.replay_roi.is_finite() {
5918 replay_release_gate_record_failed_check(
5919 &mut failed_checks,
5920 &mut evidence_refs,
5921 "invalid_replay_roi",
5922 &["metric:replay_roi"],
5923 );
5924 indeterminate = true;
5925 }
5926
5927 let expected_hit_rate = safe_ratio(input.replay_success_total, input.replay_attempts_total);
5928 let expected_false_rate = safe_ratio(input.replay_failure_total, input.replay_attempts_total);
5929 if input.replay_attempts_total > 0
5930 && !replay_release_gate_float_eq(input.replay_hit_rate, expected_hit_rate)
5931 {
5932 replay_release_gate_record_failed_check(
5933 &mut failed_checks,
5934 &mut evidence_refs,
5935 "invalid_replay_hit_rate_consistency",
5936 &["metric:replay_hit_rate", "metric:replay_success_total"],
5937 );
5938 indeterminate = true;
5939 }
5940 if input.replay_attempts_total > 0
5941 && !replay_release_gate_float_eq(input.false_replay_rate, expected_false_rate)
5942 {
5943 replay_release_gate_record_failed_check(
5944 &mut failed_checks,
5945 &mut evidence_refs,
5946 "invalid_false_replay_rate_consistency",
5947 &["metric:false_replay_rate", "metric:replay_failure_total"],
5948 );
5949 indeterminate = true;
5950 }
5951
5952 if !(0.0..=1.0).contains(&input.thresholds.min_replay_hit_rate) {
5953 replay_release_gate_record_failed_check(
5954 &mut failed_checks,
5955 &mut evidence_refs,
5956 "invalid_threshold_min_replay_hit_rate",
5957 &["threshold:min_replay_hit_rate"],
5958 );
5959 indeterminate = true;
5960 }
5961 if !(0.0..=1.0).contains(&input.thresholds.max_false_replay_rate) {
5962 replay_release_gate_record_failed_check(
5963 &mut failed_checks,
5964 &mut evidence_refs,
5965 "invalid_threshold_max_false_replay_rate",
5966 &["threshold:max_false_replay_rate"],
5967 );
5968 indeterminate = true;
5969 }
5970 if !input.thresholds.min_replay_roi.is_finite() {
5971 replay_release_gate_record_failed_check(
5972 &mut failed_checks,
5973 &mut evidence_refs,
5974 "invalid_threshold_min_replay_roi",
5975 &["threshold:min_replay_roi"],
5976 );
5977 indeterminate = true;
5978 }
5979
5980 if input.replay_attempts_total < input.thresholds.min_replay_attempts {
5981 replay_release_gate_record_failed_check(
5982 &mut failed_checks,
5983 &mut evidence_refs,
5984 "min_replay_attempts_below_threshold",
5985 &[
5986 "threshold:min_replay_attempts",
5987 "metric:replay_attempts_total",
5988 ],
5989 );
5990 }
5991 if input.replay_attempts_total > 0
5992 && input.replay_hit_rate < input.thresholds.min_replay_hit_rate
5993 {
5994 replay_release_gate_record_failed_check(
5995 &mut failed_checks,
5996 &mut evidence_refs,
5997 "replay_hit_rate_below_threshold",
5998 &["threshold:min_replay_hit_rate", "metric:replay_hit_rate"],
5999 );
6000 }
6001 if input.replay_attempts_total > 0
6002 && input.false_replay_rate > input.thresholds.max_false_replay_rate
6003 {
6004 replay_release_gate_record_failed_check(
6005 &mut failed_checks,
6006 &mut evidence_refs,
6007 "false_replay_rate_above_threshold",
6008 &[
6009 "threshold:max_false_replay_rate",
6010 "metric:false_replay_rate",
6011 ],
6012 );
6013 }
6014 if input.reasoning_avoided_tokens < input.thresholds.min_reasoning_avoided_tokens {
6015 replay_release_gate_record_failed_check(
6016 &mut failed_checks,
6017 &mut evidence_refs,
6018 "reasoning_avoided_tokens_below_threshold",
6019 &[
6020 "threshold:min_reasoning_avoided_tokens",
6021 "metric:reasoning_avoided_tokens",
6022 ],
6023 );
6024 }
6025 if input.replay_roi < input.thresholds.min_replay_roi {
6026 replay_release_gate_record_failed_check(
6027 &mut failed_checks,
6028 &mut evidence_refs,
6029 "replay_roi_below_threshold",
6030 &["threshold:min_replay_roi", "metric:replay_roi"],
6031 );
6032 }
6033 if input.thresholds.require_replay_safety && !input.replay_safety {
6034 replay_release_gate_record_failed_check(
6035 &mut failed_checks,
6036 &mut evidence_refs,
6037 "replay_safety_required",
6038 &["metric:replay_safety", "threshold:require_replay_safety"],
6039 );
6040 }
6041
6042 failed_checks.sort();
6043 evidence_refs.sort();
6044
6045 let status = if failed_checks.is_empty() {
6046 ReplayRoiReleaseGateStatus::Pass
6047 } else if indeterminate {
6048 ReplayRoiReleaseGateStatus::Indeterminate
6049 } else {
6050 ReplayRoiReleaseGateStatus::FailClosed
6051 };
6052 let joined_checks = if failed_checks.is_empty() {
6053 "none".to_string()
6054 } else {
6055 failed_checks.join(",")
6056 };
6057 let summary = match status {
6058 ReplayRoiReleaseGateStatus::Pass => format!(
6059 "release gate pass: attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6060 input.replay_attempts_total,
6061 input.replay_hit_rate,
6062 input.false_replay_rate,
6063 input.reasoning_avoided_tokens,
6064 input.replay_roi,
6065 input.replay_safety
6066 ),
6067 ReplayRoiReleaseGateStatus::FailClosed => format!(
6068 "release gate fail_closed: failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6069 joined_checks,
6070 input.replay_attempts_total,
6071 input.replay_hit_rate,
6072 input.false_replay_rate,
6073 input.reasoning_avoided_tokens,
6074 input.replay_roi,
6075 input.replay_safety
6076 ),
6077 ReplayRoiReleaseGateStatus::Indeterminate => format!(
6078 "release gate indeterminate (fail-closed): failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6079 joined_checks,
6080 input.replay_attempts_total,
6081 input.replay_hit_rate,
6082 input.false_replay_rate,
6083 input.reasoning_avoided_tokens,
6084 input.replay_roi,
6085 input.replay_safety
6086 ),
6087 };
6088
6089 ReplayRoiReleaseGateOutputContract {
6090 status,
6091 failed_checks,
6092 evidence_refs,
6093 summary,
6094 }
6095}
6096
6097fn replay_release_gate_record_failed_check(
6098 failed_checks: &mut Vec<String>,
6099 evidence_refs: &mut Vec<String>,
6100 check: &str,
6101 refs: &[&str],
6102) {
6103 replay_release_gate_push_unique(failed_checks, check.to_string());
6104 for entry in refs {
6105 replay_release_gate_push_unique(evidence_refs, (*entry).to_string());
6106 }
6107}
6108
6109fn replay_release_gate_push_unique(values: &mut Vec<String>, entry: impl Into<String>) {
6110 let entry = entry.into();
6111 if !values.iter().any(|current| current == &entry) {
6112 values.push(entry);
6113 }
6114}
6115
6116fn replay_release_gate_rate_valid(value: f64) -> bool {
6117 value.is_finite() && (0.0..=1.0).contains(&value)
6118}
6119
6120fn replay_release_gate_float_eq(left: f64, right: f64) -> bool {
6121 (left - right).abs() <= 1e-9
6122}
6123
6124fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
6125 EvolutionHealthSnapshot {
6126 status: "ok".into(),
6127 last_event_seq: snapshot.last_event_seq,
6128 promoted_genes: snapshot.promoted_genes,
6129 promoted_capsules: snapshot.promoted_capsules,
6130 }
6131}
6132
6133fn render_evolution_metrics_prometheus(
6134 snapshot: &EvolutionMetricsSnapshot,
6135 health: &EvolutionHealthSnapshot,
6136) -> String {
6137 let mut out = String::new();
6138 out.push_str(
6139 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
6140 );
6141 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
6142 out.push_str(&format!(
6143 "oris_evolution_replay_attempts_total {}\n",
6144 snapshot.replay_attempts_total
6145 ));
6146 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
6147 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
6148 out.push_str(&format!(
6149 "oris_evolution_replay_success_total {}\n",
6150 snapshot.replay_success_total
6151 ));
6152 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_total Total planner steps avoided by successful replay.\n");
6153 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_total counter\n");
6154 out.push_str(&format!(
6155 "oris_evolution_replay_reasoning_avoided_total {}\n",
6156 snapshot.replay_reasoning_avoided_total
6157 ));
6158 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_total Estimated reasoning tokens avoided by replay hits.\n");
6159 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_total counter\n");
6160 out.push_str(&format!(
6161 "oris_evolution_reasoning_avoided_tokens_total {}\n",
6162 snapshot.reasoning_avoided_tokens_total
6163 ));
6164 out.push_str("# HELP oris_evolution_replay_fallback_cost_total Estimated reasoning token cost spent on replay fallbacks.\n");
6165 out.push_str("# TYPE oris_evolution_replay_fallback_cost_total counter\n");
6166 out.push_str(&format!(
6167 "oris_evolution_replay_fallback_cost_total {}\n",
6168 snapshot.replay_fallback_cost_total
6169 ));
6170 out.push_str("# HELP oris_evolution_replay_roi Net replay ROI in token space ((avoided - fallback_cost) / total).\n");
6171 out.push_str("# TYPE oris_evolution_replay_roi gauge\n");
6172 out.push_str(&format!(
6173 "oris_evolution_replay_roi {:.6}\n",
6174 snapshot.replay_roi
6175 ));
6176 out.push_str("# HELP oris_evolution_replay_utilization_by_task_class_total Successful replay reuse counts grouped by deterministic task class.\n");
6177 out.push_str("# TYPE oris_evolution_replay_utilization_by_task_class_total counter\n");
6178 for task_class in &snapshot.replay_task_classes {
6179 out.push_str(&format!(
6180 "oris_evolution_replay_utilization_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6181 prometheus_label_value(&task_class.task_class_id),
6182 prometheus_label_value(&task_class.task_label),
6183 task_class.replay_success_total
6184 ));
6185 }
6186 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_by_task_class_total Planner steps avoided by successful replay grouped by deterministic task class.\n");
6187 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_by_task_class_total counter\n");
6188 for task_class in &snapshot.replay_task_classes {
6189 out.push_str(&format!(
6190 "oris_evolution_replay_reasoning_avoided_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6191 prometheus_label_value(&task_class.task_class_id),
6192 prometheus_label_value(&task_class.task_label),
6193 task_class.reasoning_steps_avoided_total
6194 ));
6195 }
6196 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_task_class_total Estimated reasoning tokens avoided by replay hits grouped by deterministic task class.\n");
6197 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_task_class_total counter\n");
6198 for task_class in &snapshot.replay_task_classes {
6199 out.push_str(&format!(
6200 "oris_evolution_reasoning_avoided_tokens_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6201 prometheus_label_value(&task_class.task_class_id),
6202 prometheus_label_value(&task_class.task_label),
6203 task_class.reasoning_avoided_tokens_total
6204 ));
6205 }
6206 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_task_class_total Estimated fallback token cost grouped by deterministic task class.\n");
6207 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_task_class_total counter\n");
6208 for task_class in &snapshot.replay_task_classes {
6209 out.push_str(&format!(
6210 "oris_evolution_replay_fallback_cost_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6211 prometheus_label_value(&task_class.task_class_id),
6212 prometheus_label_value(&task_class.task_label),
6213 task_class.replay_fallback_cost_total
6214 ));
6215 }
6216 out.push_str("# HELP oris_evolution_replay_roi_by_task_class Replay ROI in token space grouped by deterministic task class.\n");
6217 out.push_str("# TYPE oris_evolution_replay_roi_by_task_class gauge\n");
6218 for task_class in &snapshot.replay_task_classes {
6219 out.push_str(&format!(
6220 "oris_evolution_replay_roi_by_task_class{{task_class_id=\"{}\",task_label=\"{}\"}} {:.6}\n",
6221 prometheus_label_value(&task_class.task_class_id),
6222 prometheus_label_value(&task_class.task_label),
6223 task_class.replay_roi
6224 ));
6225 }
6226 out.push_str("# HELP oris_evolution_replay_roi_by_source Replay ROI in token space grouped by remote sender id for cross-node reconciliation.\n");
6227 out.push_str("# TYPE oris_evolution_replay_roi_by_source gauge\n");
6228 for source in &snapshot.replay_sources {
6229 out.push_str(&format!(
6230 "oris_evolution_replay_roi_by_source{{source_sender_id=\"{}\"}} {:.6}\n",
6231 prometheus_label_value(&source.source_sender_id),
6232 source.replay_roi
6233 ));
6234 }
6235 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_source_total Estimated reasoning tokens avoided grouped by remote sender id.\n");
6236 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_source_total counter\n");
6237 for source in &snapshot.replay_sources {
6238 out.push_str(&format!(
6239 "oris_evolution_reasoning_avoided_tokens_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6240 prometheus_label_value(&source.source_sender_id),
6241 source.reasoning_avoided_tokens_total
6242 ));
6243 }
6244 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_source_total Estimated replay fallback token cost grouped by remote sender id.\n");
6245 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_source_total counter\n");
6246 for source in &snapshot.replay_sources {
6247 out.push_str(&format!(
6248 "oris_evolution_replay_fallback_cost_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6249 prometheus_label_value(&source.source_sender_id),
6250 source.replay_fallback_cost_total
6251 ));
6252 }
6253 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
6254 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
6255 out.push_str(&format!(
6256 "oris_evolution_replay_success_rate {:.6}\n",
6257 snapshot.replay_success_rate
6258 ));
6259 out.push_str("# HELP oris_evolution_confidence_revalidations_total Total confidence-driven demotions that require revalidation before replay.\n");
6260 out.push_str("# TYPE oris_evolution_confidence_revalidations_total counter\n");
6261 out.push_str(&format!(
6262 "oris_evolution_confidence_revalidations_total {}\n",
6263 snapshot.confidence_revalidations_total
6264 ));
6265 out.push_str(
6266 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
6267 );
6268 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
6269 out.push_str(&format!(
6270 "oris_evolution_mutation_declared_total {}\n",
6271 snapshot.mutation_declared_total
6272 ));
6273 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
6274 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
6275 out.push_str(&format!(
6276 "oris_evolution_promoted_mutations_total {}\n",
6277 snapshot.promoted_mutations_total
6278 ));
6279 out.push_str(
6280 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
6281 );
6282 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
6283 out.push_str(&format!(
6284 "oris_evolution_promotion_ratio {:.6}\n",
6285 snapshot.promotion_ratio
6286 ));
6287 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
6288 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
6289 out.push_str(&format!(
6290 "oris_evolution_gene_revocations_total {}\n",
6291 snapshot.gene_revocations_total
6292 ));
6293 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
6294 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
6295 out.push_str(&format!(
6296 "oris_evolution_mutation_velocity_last_hour {}\n",
6297 snapshot.mutation_velocity_last_hour
6298 ));
6299 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
6300 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
6301 out.push_str(&format!(
6302 "oris_evolution_revoke_frequency_last_hour {}\n",
6303 snapshot.revoke_frequency_last_hour
6304 ));
6305 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
6306 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
6307 out.push_str(&format!(
6308 "oris_evolution_promoted_genes {}\n",
6309 snapshot.promoted_genes
6310 ));
6311 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
6312 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
6313 out.push_str(&format!(
6314 "oris_evolution_promoted_capsules {}\n",
6315 snapshot.promoted_capsules
6316 ));
6317 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
6318 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
6319 out.push_str(&format!(
6320 "oris_evolution_store_last_event_seq {}\n",
6321 snapshot.last_event_seq
6322 ));
6323 out.push_str(
6324 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
6325 );
6326 out.push_str("# TYPE oris_evolution_health gauge\n");
6327 out.push_str(&format!(
6328 "oris_evolution_health {}\n",
6329 u8::from(health.status == "ok")
6330 ));
6331 out
6332}
6333
6334fn count_recent_events(
6335 events: &[StoredEvolutionEvent],
6336 cutoff: DateTime<Utc>,
6337 predicate: impl Fn(&EvolutionEvent) -> bool,
6338) -> u64 {
6339 events
6340 .iter()
6341 .filter(|stored| {
6342 predicate(&stored.event)
6343 && parse_event_timestamp(&stored.timestamp)
6344 .map(|timestamp| timestamp >= cutoff)
6345 .unwrap_or(false)
6346 })
6347 .count() as u64
6348}
6349
6350fn prometheus_label_value(input: &str) -> String {
6351 input
6352 .replace('\\', "\\\\")
6353 .replace('\n', "\\n")
6354 .replace('"', "\\\"")
6355}
6356
6357fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
6358 DateTime::parse_from_rfc3339(raw)
6359 .ok()
6360 .map(|parsed| parsed.with_timezone(&Utc))
6361}
6362
6363fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
6364 matches!(
6365 event,
6366 EvolutionEvent::ValidationFailed {
6367 gene_id: Some(_),
6368 ..
6369 }
6370 )
6371}
6372
6373fn is_confidence_revalidation_event(event: &EvolutionEvent) -> bool {
6374 matches!(
6375 event,
6376 EvolutionEvent::PromotionEvaluated {
6377 state,
6378 reason,
6379 reason_code,
6380 ..
6381 }
6382 if *state == AssetState::Quarantined
6383 && (reason_code == &TransitionReasonCode::RevalidationConfidenceDecay
6384 || (reason_code == &TransitionReasonCode::Unspecified
6385 && reason.contains("confidence decayed")))
6386 )
6387}
6388
6389fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
6390 if denominator == 0 {
6391 0.0
6392 } else {
6393 numerator as f64 / denominator as f64
6394 }
6395}
6396
6397fn store_err(err: EvolutionError) -> EvoKernelError {
6398 EvoKernelError::Store(err.to_string())
6399}
6400
6401#[cfg(test)]
6402mod tests {
6403 use super::*;
6404 use oris_agent_contract::{
6405 AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
6406 };
6407 use oris_kernel::{
6408 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
6409 NoopStepFn, StateUpdatedOnlyReducer,
6410 };
6411 use serde::{Deserialize, Serialize};
6412
6413 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
6414 struct TestState;
6415
6416 impl KernelState for TestState {
6417 fn version(&self) -> u32 {
6418 1
6419 }
6420 }
6421
6422 #[test]
6423 fn repair_quality_gate_accepts_semantic_variants() {
6424 let plan = r#"
6425根本原因:脚本中拼写错误导致 unknown command 'process'。
6426修复建议:将 `proccess` 更正为 `process`,并统一命令入口。
6427验证方式:执行 `cargo check -p oris-runtime` 与回归测试。
6428恢复方案:若新入口异常,立即回滚到旧命令映射。
6429"#;
6430 let report = evaluate_repair_quality_gate(plan);
6431 assert!(report.passes());
6432 assert!(report.failed_checks().is_empty());
6433 }
6434
6435 #[test]
6436 fn repair_quality_gate_rejects_missing_incident_anchor() {
6437 let plan = r#"
6438原因分析:逻辑分支覆盖不足。
6439修复方案:补充分支与日志。
6440验证命令:cargo check -p oris-runtime
6441回滚方案:git revert HEAD
6442"#;
6443 let report = evaluate_repair_quality_gate(plan);
6444 assert!(!report.passes());
6445 assert!(report
6446 .failed_checks()
6447 .iter()
6448 .any(|check| check.contains("unknown command")));
6449 }
6450
6451 fn temp_workspace(name: &str) -> std::path::PathBuf {
6452 let root =
6453 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
6454 if root.exists() {
6455 fs::remove_dir_all(&root).unwrap();
6456 }
6457 fs::create_dir_all(root.join("src")).unwrap();
6458 fs::write(
6459 root.join("Cargo.toml"),
6460 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
6461 )
6462 .unwrap();
6463 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
6464 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
6465 root
6466 }
6467
6468 fn test_kernel() -> Arc<Kernel<TestState>> {
6469 Arc::new(Kernel::<TestState> {
6470 events: Box::new(InMemoryEventStore::new()),
6471 snaps: None,
6472 reducer: Box::new(StateUpdatedOnlyReducer),
6473 exec: Box::new(NoopActionExecutor),
6474 step: Box::new(NoopStepFn),
6475 policy: Box::new(AllowAllPolicy),
6476 effect_sink: None,
6477 mode: KernelMode::Normal,
6478 })
6479 }
6480
6481 fn lightweight_plan() -> ValidationPlan {
6482 ValidationPlan {
6483 profile: "test".into(),
6484 stages: vec![ValidationStage::Command {
6485 program: "git".into(),
6486 args: vec!["--version".into()],
6487 timeout_ms: 5_000,
6488 }],
6489 }
6490 }
6491
6492 fn sample_mutation() -> PreparedMutation {
6493 prepare_mutation(
6494 MutationIntent {
6495 id: "mutation-1".into(),
6496 intent: "add README".into(),
6497 target: MutationTarget::Paths {
6498 allow: vec!["README.md".into()],
6499 },
6500 expected_effect: "repo still builds".into(),
6501 risk: RiskLevel::Low,
6502 signals: vec!["missing readme".into()],
6503 spec_id: None,
6504 },
6505 "\
6506diff --git a/README.md b/README.md
6507new file mode 100644
6508index 0000000..1111111
6509--- /dev/null
6510+++ b/README.md
6511@@ -0,0 +1 @@
6512+# sample
6513"
6514 .into(),
6515 Some("HEAD".into()),
6516 )
6517 }
6518
6519 fn base_sandbox_policy() -> SandboxPolicy {
6520 SandboxPolicy {
6521 allowed_programs: vec!["git".into()],
6522 max_duration_ms: 60_000,
6523 max_output_bytes: 1024 * 1024,
6524 denied_env_prefixes: Vec::new(),
6525 }
6526 }
6527
6528 fn command_validator() -> Arc<dyn Validator> {
6529 Arc::new(CommandValidator::new(base_sandbox_policy()))
6530 }
6531
6532 fn replay_input(signal: &str) -> SelectorInput {
6533 let rustc_version = std::process::Command::new("rustc")
6534 .arg("--version")
6535 .output()
6536 .ok()
6537 .filter(|output| output.status.success())
6538 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
6539 .unwrap_or_else(|| "rustc unknown".into());
6540 SelectorInput {
6541 signals: vec![signal.into()],
6542 env: EnvFingerprint {
6543 rustc_version,
6544 cargo_lock_hash: compute_artifact_hash("# lock\n"),
6545 target_triple: format!(
6546 "{}-unknown-{}",
6547 std::env::consts::ARCH,
6548 std::env::consts::OS
6549 ),
6550 os: std::env::consts::OS.into(),
6551 },
6552 spec_id: None,
6553 limit: 1,
6554 }
6555 }
6556
6557 fn build_test_evo_with_store(
6558 name: &str,
6559 run_id: &str,
6560 validator: Arc<dyn Validator>,
6561 store: Arc<dyn EvolutionStore>,
6562 ) -> EvoKernel<TestState> {
6563 let workspace = temp_workspace(name);
6564 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
6565 run_id,
6566 &workspace,
6567 std::env::temp_dir(),
6568 ));
6569 EvoKernel::new(test_kernel(), sandbox, validator, store)
6570 .with_governor(Arc::new(DefaultGovernor::new(
6571 oris_governor::GovernorConfig {
6572 promote_after_successes: 1,
6573 ..Default::default()
6574 },
6575 )))
6576 .with_validation_plan(lightweight_plan())
6577 .with_sandbox_policy(base_sandbox_policy())
6578 }
6579
6580 fn build_test_evo(
6581 name: &str,
6582 run_id: &str,
6583 validator: Arc<dyn Validator>,
6584 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
6585 let store_root = std::env::temp_dir().join(format!(
6586 "oris-evokernel-{name}-store-{}",
6587 std::process::id()
6588 ));
6589 if store_root.exists() {
6590 fs::remove_dir_all(&store_root).unwrap();
6591 }
6592 let store: Arc<dyn EvolutionStore> =
6593 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
6594 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
6595 (evo, store)
6596 }
6597
6598 fn remote_publish_envelope(
6599 sender_id: &str,
6600 run_id: &str,
6601 gene_id: &str,
6602 capsule_id: &str,
6603 mutation_id: &str,
6604 signal: &str,
6605 file_name: &str,
6606 line: &str,
6607 ) -> EvolutionEnvelope {
6608 remote_publish_envelope_with_env(
6609 sender_id,
6610 run_id,
6611 gene_id,
6612 capsule_id,
6613 mutation_id,
6614 signal,
6615 file_name,
6616 line,
6617 replay_input(signal).env,
6618 )
6619 }
6620
6621 fn remote_publish_envelope_with_env(
6622 sender_id: &str,
6623 run_id: &str,
6624 gene_id: &str,
6625 capsule_id: &str,
6626 mutation_id: &str,
6627 signal: &str,
6628 file_name: &str,
6629 line: &str,
6630 env: EnvFingerprint,
6631 ) -> EvolutionEnvelope {
6632 let mutation = prepare_mutation(
6633 MutationIntent {
6634 id: mutation_id.into(),
6635 intent: format!("add {file_name}"),
6636 target: MutationTarget::Paths {
6637 allow: vec![file_name.into()],
6638 },
6639 expected_effect: "replay should still validate".into(),
6640 risk: RiskLevel::Low,
6641 signals: vec![signal.into()],
6642 spec_id: None,
6643 },
6644 format!(
6645 "\
6646diff --git a/{file_name} b/{file_name}
6647new file mode 100644
6648index 0000000..1111111
6649--- /dev/null
6650+++ b/{file_name}
6651@@ -0,0 +1 @@
6652+{line}
6653"
6654 ),
6655 Some("HEAD".into()),
6656 );
6657 let gene = Gene {
6658 id: gene_id.into(),
6659 signals: vec![signal.into()],
6660 strategy: vec![file_name.into()],
6661 validation: vec!["test".into()],
6662 state: AssetState::Promoted,
6663 };
6664 let capsule = Capsule {
6665 id: capsule_id.into(),
6666 gene_id: gene_id.into(),
6667 mutation_id: mutation_id.into(),
6668 run_id: run_id.into(),
6669 diff_hash: mutation.artifact.content_hash.clone(),
6670 confidence: 0.9,
6671 env,
6672 outcome: Outcome {
6673 success: true,
6674 validation_profile: "test".into(),
6675 validation_duration_ms: 1,
6676 changed_files: vec![file_name.into()],
6677 validator_hash: "validator-hash".into(),
6678 lines_changed: 1,
6679 replay_verified: false,
6680 },
6681 state: AssetState::Promoted,
6682 };
6683 EvolutionEnvelope::publish(
6684 sender_id,
6685 vec![
6686 NetworkAsset::EvolutionEvent {
6687 event: EvolutionEvent::MutationDeclared { mutation },
6688 },
6689 NetworkAsset::Gene { gene: gene.clone() },
6690 NetworkAsset::Capsule {
6691 capsule: capsule.clone(),
6692 },
6693 NetworkAsset::EvolutionEvent {
6694 event: EvolutionEvent::CapsuleReleased {
6695 capsule_id: capsule.id.clone(),
6696 state: AssetState::Promoted,
6697 },
6698 },
6699 ],
6700 )
6701 }
6702
6703 fn remote_publish_envelope_with_signals(
6704 sender_id: &str,
6705 run_id: &str,
6706 gene_id: &str,
6707 capsule_id: &str,
6708 mutation_id: &str,
6709 mutation_signals: Vec<String>,
6710 gene_signals: Vec<String>,
6711 file_name: &str,
6712 line: &str,
6713 env: EnvFingerprint,
6714 ) -> EvolutionEnvelope {
6715 let mutation = prepare_mutation(
6716 MutationIntent {
6717 id: mutation_id.into(),
6718 intent: format!("add {file_name}"),
6719 target: MutationTarget::Paths {
6720 allow: vec![file_name.into()],
6721 },
6722 expected_effect: "replay should still validate".into(),
6723 risk: RiskLevel::Low,
6724 signals: mutation_signals,
6725 spec_id: None,
6726 },
6727 format!(
6728 "\
6729diff --git a/{file_name} b/{file_name}
6730new file mode 100644
6731index 0000000..1111111
6732--- /dev/null
6733+++ b/{file_name}
6734@@ -0,0 +1 @@
6735+{line}
6736"
6737 ),
6738 Some("HEAD".into()),
6739 );
6740 let gene = Gene {
6741 id: gene_id.into(),
6742 signals: gene_signals,
6743 strategy: vec![file_name.into()],
6744 validation: vec!["test".into()],
6745 state: AssetState::Promoted,
6746 };
6747 let capsule = Capsule {
6748 id: capsule_id.into(),
6749 gene_id: gene_id.into(),
6750 mutation_id: mutation_id.into(),
6751 run_id: run_id.into(),
6752 diff_hash: mutation.artifact.content_hash.clone(),
6753 confidence: 0.9,
6754 env,
6755 outcome: Outcome {
6756 success: true,
6757 validation_profile: "test".into(),
6758 validation_duration_ms: 1,
6759 changed_files: vec![file_name.into()],
6760 validator_hash: "validator-hash".into(),
6761 lines_changed: 1,
6762 replay_verified: false,
6763 },
6764 state: AssetState::Promoted,
6765 };
6766 EvolutionEnvelope::publish(
6767 sender_id,
6768 vec![
6769 NetworkAsset::EvolutionEvent {
6770 event: EvolutionEvent::MutationDeclared { mutation },
6771 },
6772 NetworkAsset::Gene { gene: gene.clone() },
6773 NetworkAsset::Capsule {
6774 capsule: capsule.clone(),
6775 },
6776 NetworkAsset::EvolutionEvent {
6777 event: EvolutionEvent::CapsuleReleased {
6778 capsule_id: capsule.id.clone(),
6779 state: AssetState::Promoted,
6780 },
6781 },
6782 ],
6783 )
6784 }
6785
6786 struct FixedValidator {
6787 success: bool,
6788 }
6789
6790 #[async_trait]
6791 impl Validator for FixedValidator {
6792 async fn run(
6793 &self,
6794 _receipt: &SandboxReceipt,
6795 plan: &ValidationPlan,
6796 ) -> Result<ValidationReport, ValidationError> {
6797 Ok(ValidationReport {
6798 success: self.success,
6799 duration_ms: 1,
6800 stages: Vec::new(),
6801 logs: if self.success {
6802 format!("{} ok", plan.profile)
6803 } else {
6804 format!("{} failed", plan.profile)
6805 },
6806 })
6807 }
6808 }
6809
6810 struct FailOnAppendStore {
6811 inner: JsonlEvolutionStore,
6812 fail_on_call: usize,
6813 call_count: Mutex<usize>,
6814 }
6815
6816 impl FailOnAppendStore {
6817 fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
6818 Self {
6819 inner: JsonlEvolutionStore::new(root_dir),
6820 fail_on_call,
6821 call_count: Mutex::new(0),
6822 }
6823 }
6824 }
6825
6826 impl EvolutionStore for FailOnAppendStore {
6827 fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
6828 let mut call_count = self
6829 .call_count
6830 .lock()
6831 .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
6832 *call_count += 1;
6833 if *call_count == self.fail_on_call {
6834 return Err(EvolutionError::Io("injected append failure".into()));
6835 }
6836 self.inner.append_event(event)
6837 }
6838
6839 fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
6840 self.inner.scan(from_seq)
6841 }
6842
6843 fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
6844 self.inner.rebuild_projection()
6845 }
6846 }
6847
6848 #[test]
6849 fn coordination_planner_to_coder_handoff_is_deterministic() {
6850 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6851 root_goal: "ship feature".into(),
6852 primitive: CoordinationPrimitive::Sequential,
6853 tasks: vec![
6854 CoordinationTask {
6855 id: "planner".into(),
6856 role: AgentRole::Planner,
6857 description: "split the work".into(),
6858 depends_on: Vec::new(),
6859 },
6860 CoordinationTask {
6861 id: "coder".into(),
6862 role: AgentRole::Coder,
6863 description: "implement the patch".into(),
6864 depends_on: vec!["planner".into()],
6865 },
6866 ],
6867 timeout_ms: 5_000,
6868 max_retries: 0,
6869 });
6870
6871 assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
6872 assert!(result.failed_tasks.is_empty());
6873 assert!(result.messages.iter().any(|message| {
6874 message.from_role == AgentRole::Planner
6875 && message.to_role == AgentRole::Coder
6876 && message.task_id == "coder"
6877 }));
6878 }
6879
6880 #[test]
6881 fn coordination_repair_runs_only_after_coder_failure() {
6882 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6883 root_goal: "fix broken implementation".into(),
6884 primitive: CoordinationPrimitive::Sequential,
6885 tasks: vec![
6886 CoordinationTask {
6887 id: "coder".into(),
6888 role: AgentRole::Coder,
6889 description: "force-fail initial implementation".into(),
6890 depends_on: Vec::new(),
6891 },
6892 CoordinationTask {
6893 id: "repair".into(),
6894 role: AgentRole::Repair,
6895 description: "patch the failed implementation".into(),
6896 depends_on: vec!["coder".into()],
6897 },
6898 ],
6899 timeout_ms: 5_000,
6900 max_retries: 0,
6901 });
6902
6903 assert_eq!(result.completed_tasks, vec!["repair"]);
6904 assert_eq!(result.failed_tasks, vec!["coder"]);
6905 assert!(result.messages.iter().any(|message| {
6906 message.from_role == AgentRole::Coder
6907 && message.to_role == AgentRole::Repair
6908 && message.task_id == "repair"
6909 }));
6910 }
6911
6912 #[test]
6913 fn coordination_optimizer_runs_after_successful_implementation_step() {
6914 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6915 root_goal: "ship optimized patch".into(),
6916 primitive: CoordinationPrimitive::Sequential,
6917 tasks: vec![
6918 CoordinationTask {
6919 id: "coder".into(),
6920 role: AgentRole::Coder,
6921 description: "implement a working patch".into(),
6922 depends_on: Vec::new(),
6923 },
6924 CoordinationTask {
6925 id: "optimizer".into(),
6926 role: AgentRole::Optimizer,
6927 description: "tighten the implementation".into(),
6928 depends_on: vec!["coder".into()],
6929 },
6930 ],
6931 timeout_ms: 5_000,
6932 max_retries: 0,
6933 });
6934
6935 assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
6936 assert!(result.failed_tasks.is_empty());
6937 }
6938
6939 #[test]
6940 fn coordination_parallel_waves_preserve_sorted_merge_order() {
6941 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6942 root_goal: "parallelize safe tasks".into(),
6943 primitive: CoordinationPrimitive::Parallel,
6944 tasks: vec![
6945 CoordinationTask {
6946 id: "z-task".into(),
6947 role: AgentRole::Planner,
6948 description: "analyze z".into(),
6949 depends_on: Vec::new(),
6950 },
6951 CoordinationTask {
6952 id: "a-task".into(),
6953 role: AgentRole::Coder,
6954 description: "implement a".into(),
6955 depends_on: Vec::new(),
6956 },
6957 CoordinationTask {
6958 id: "mid-task".into(),
6959 role: AgentRole::Optimizer,
6960 description: "polish after both".into(),
6961 depends_on: vec!["z-task".into(), "a-task".into()],
6962 },
6963 ],
6964 timeout_ms: 5_000,
6965 max_retries: 0,
6966 });
6967
6968 assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
6969 assert!(result.failed_tasks.is_empty());
6970 }
6971
6972 #[test]
6973 fn coordination_retries_stop_at_max_retries() {
6974 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6975 root_goal: "retry then stop".into(),
6976 primitive: CoordinationPrimitive::Sequential,
6977 tasks: vec![CoordinationTask {
6978 id: "coder".into(),
6979 role: AgentRole::Coder,
6980 description: "force-fail this task".into(),
6981 depends_on: Vec::new(),
6982 }],
6983 timeout_ms: 5_000,
6984 max_retries: 1,
6985 });
6986
6987 assert!(result.completed_tasks.is_empty());
6988 assert_eq!(result.failed_tasks, vec!["coder"]);
6989 assert_eq!(
6990 result
6991 .messages
6992 .iter()
6993 .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
6994 .count(),
6995 2
6996 );
6997 }
6998
6999 #[test]
7000 fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
7001 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7002 root_goal: "skip blocked follow-up work".into(),
7003 primitive: CoordinationPrimitive::Conditional,
7004 tasks: vec![
7005 CoordinationTask {
7006 id: "coder".into(),
7007 role: AgentRole::Coder,
7008 description: "force-fail the implementation".into(),
7009 depends_on: Vec::new(),
7010 },
7011 CoordinationTask {
7012 id: "optimizer".into(),
7013 role: AgentRole::Optimizer,
7014 description: "only optimize a successful implementation".into(),
7015 depends_on: vec!["coder".into()],
7016 },
7017 ],
7018 timeout_ms: 5_000,
7019 max_retries: 0,
7020 });
7021
7022 assert!(result.completed_tasks.is_empty());
7023 assert_eq!(result.failed_tasks, vec!["coder"]);
7024 assert!(result.messages.iter().any(|message| {
7025 message.task_id == "optimizer"
7026 && message
7027 .content
7028 .contains("skipped due to failed dependency chain")
7029 }));
7030 assert!(!result
7031 .failed_tasks
7032 .iter()
7033 .any(|task_id| task_id == "optimizer"));
7034 }
7035
7036 #[tokio::test]
7037 async fn command_validator_aggregates_stage_reports() {
7038 let workspace = temp_workspace("validator");
7039 let receipt = SandboxReceipt {
7040 mutation_id: "m".into(),
7041 workdir: workspace,
7042 applied: true,
7043 changed_files: Vec::new(),
7044 patch_hash: "hash".into(),
7045 stdout_log: std::env::temp_dir().join("stdout.log"),
7046 stderr_log: std::env::temp_dir().join("stderr.log"),
7047 };
7048 let validator = CommandValidator::new(SandboxPolicy {
7049 allowed_programs: vec!["git".into()],
7050 max_duration_ms: 1_000,
7051 max_output_bytes: 1024,
7052 denied_env_prefixes: Vec::new(),
7053 });
7054 let report = validator
7055 .run(
7056 &receipt,
7057 &ValidationPlan {
7058 profile: "test".into(),
7059 stages: vec![ValidationStage::Command {
7060 program: "git".into(),
7061 args: vec!["--version".into()],
7062 timeout_ms: 1_000,
7063 }],
7064 },
7065 )
7066 .await
7067 .unwrap();
7068 assert_eq!(report.stages.len(), 1);
7069 }
7070
7071 #[tokio::test]
7072 async fn capture_successful_mutation_appends_capsule() {
7073 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
7074 let capsule = evo
7075 .capture_successful_mutation(&"run-1".into(), sample_mutation())
7076 .await
7077 .unwrap();
7078 let events = store.scan(1).unwrap();
7079 assert!(events
7080 .iter()
7081 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
7082 assert!(!capsule.id.is_empty());
7083 }
7084
7085 #[tokio::test]
7086 async fn replay_hit_records_capsule_reused() {
7087 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
7088 let capsule = evo
7089 .capture_successful_mutation(&"run-2".into(), sample_mutation())
7090 .await
7091 .unwrap();
7092 let replay_run_id = "run-replay".to_string();
7093 let decision = evo
7094 .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
7095 .await
7096 .unwrap();
7097 assert!(decision.used_capsule);
7098 assert_eq!(decision.capsule_id, Some(capsule.id));
7099 assert!(!decision.detect_evidence.task_class_id.is_empty());
7100 assert!(!decision.detect_evidence.matched_signals.is_empty());
7101 assert!(decision.detect_evidence.mismatch_reasons.is_empty());
7102 assert!(!decision.select_evidence.candidates.is_empty());
7103 assert!(!decision.select_evidence.exact_match_lookup);
7104 assert_eq!(
7105 decision.select_evidence.selected_capsule_id.as_deref(),
7106 decision.capsule_id.as_deref()
7107 );
7108 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7109 &stored.event,
7110 EvolutionEvent::CapsuleReused {
7111 run_id,
7112 replay_run_id: Some(current_replay_run_id),
7113 ..
7114 } if run_id == "run-2" && current_replay_run_id == &replay_run_id
7115 )));
7116 }
7117
7118 #[tokio::test]
7119 async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
7120 let capture_run_id = "run-legacy-capture".to_string();
7121 let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
7122 let capsule = evo
7123 .capture_successful_mutation(&capture_run_id, sample_mutation())
7124 .await
7125 .unwrap();
7126 let executor = StoreReplayExecutor {
7127 sandbox: evo.sandbox.clone(),
7128 validator: evo.validator.clone(),
7129 store: evo.store.clone(),
7130 selector: evo.selector.clone(),
7131 governor: evo.governor.clone(),
7132 economics: Some(evo.economics.clone()),
7133 remote_publishers: Some(evo.remote_publishers.clone()),
7134 stake_policy: evo.stake_policy.clone(),
7135 };
7136
7137 let decision = executor
7138 .try_replay(
7139 &replay_input("missing readme"),
7140 &evo.sandbox_policy,
7141 &evo.validation_plan,
7142 )
7143 .await
7144 .unwrap();
7145
7146 assert!(decision.used_capsule);
7147 assert_eq!(decision.capsule_id, Some(capsule.id));
7148 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7149 &stored.event,
7150 EvolutionEvent::CapsuleReused {
7151 run_id,
7152 replay_run_id: None,
7153 ..
7154 } if run_id == &capture_run_id
7155 )));
7156 }
7157
7158 #[tokio::test]
7159 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
7160 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
7161 let capsule = evo
7162 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
7163 .await
7164 .unwrap();
7165 let decision = evo
7166 .replay_or_fallback(replay_input("missing readme"))
7167 .await
7168 .unwrap();
7169 assert!(decision.used_capsule);
7170
7171 evo.revoke_assets(&RevokeNotice {
7172 sender_id: "node-metrics".into(),
7173 asset_ids: vec![capsule.id.clone()],
7174 reason: "manual test revoke".into(),
7175 })
7176 .unwrap();
7177
7178 let snapshot = evo.metrics_snapshot().unwrap();
7179 assert_eq!(snapshot.replay_attempts_total, 1);
7180 assert_eq!(snapshot.replay_success_total, 1);
7181 assert_eq!(snapshot.replay_success_rate, 1.0);
7182 assert_eq!(snapshot.confidence_revalidations_total, 0);
7183 assert_eq!(snapshot.replay_reasoning_avoided_total, 1);
7184 assert_eq!(
7185 snapshot.reasoning_avoided_tokens_total,
7186 decision.economics_evidence.reasoning_avoided_tokens
7187 );
7188 assert_eq!(snapshot.replay_fallback_cost_total, 0);
7189 assert_eq!(snapshot.replay_roi, 1.0);
7190 assert_eq!(snapshot.replay_task_classes.len(), 1);
7191 assert_eq!(snapshot.replay_task_classes[0].replay_success_total, 1);
7192 assert_eq!(snapshot.replay_task_classes[0].replay_failure_total, 0);
7193 assert_eq!(
7194 snapshot.replay_task_classes[0].reasoning_steps_avoided_total,
7195 1
7196 );
7197 assert_eq!(
7198 snapshot.replay_task_classes[0].replay_fallback_cost_total,
7199 0
7200 );
7201 assert_eq!(snapshot.replay_task_classes[0].replay_roi, 1.0);
7202 assert!(snapshot.replay_sources.is_empty());
7203 assert_eq!(snapshot.confidence_revalidations_total, 0);
7204 assert_eq!(snapshot.mutation_declared_total, 1);
7205 assert_eq!(snapshot.promoted_mutations_total, 1);
7206 assert_eq!(snapshot.promotion_ratio, 1.0);
7207 assert_eq!(snapshot.gene_revocations_total, 1);
7208 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
7209 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
7210 assert_eq!(snapshot.promoted_genes, 0);
7211 assert_eq!(snapshot.promoted_capsules, 0);
7212
7213 let rendered = evo.render_metrics_prometheus().unwrap();
7214 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_total 1"));
7215 assert!(rendered.contains("oris_evolution_reasoning_avoided_tokens_total"));
7216 assert!(rendered.contains("oris_evolution_replay_fallback_cost_total"));
7217 assert!(rendered.contains("oris_evolution_replay_roi 1.000000"));
7218 assert!(rendered.contains("oris_evolution_replay_utilization_by_task_class_total"));
7219 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_by_task_class_total"));
7220 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
7221 assert!(rendered.contains("oris_evolution_confidence_revalidations_total 0"));
7222 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
7223 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
7224 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
7225 assert!(rendered.contains("oris_evolution_health 1"));
7226 }
7227
7228 #[tokio::test]
7229 async fn replay_roi_release_gate_summary_matches_metrics_snapshot_for_legacy_replay_history() {
7230 let (evo, _) = build_test_evo("roi-legacy", "run-roi-legacy", command_validator());
7231 let capsule = evo
7232 .capture_successful_mutation(&"run-roi-legacy".into(), sample_mutation())
7233 .await
7234 .unwrap();
7235
7236 evo.store
7237 .append_event(EvolutionEvent::CapsuleReused {
7238 capsule_id: capsule.id.clone(),
7239 gene_id: capsule.gene_id.clone(),
7240 run_id: capsule.run_id.clone(),
7241 replay_run_id: Some("run-roi-legacy-replay".into()),
7242 })
7243 .unwrap();
7244 evo.store
7245 .append_event(EvolutionEvent::ValidationFailed {
7246 mutation_id: "legacy-replay-failure".into(),
7247 report: ValidationSnapshot {
7248 success: false,
7249 profile: "test".into(),
7250 duration_ms: 1,
7251 summary: "legacy replay validation failed".into(),
7252 },
7253 gene_id: Some(capsule.gene_id.clone()),
7254 })
7255 .unwrap();
7256
7257 let metrics = evo.metrics_snapshot().unwrap();
7258 let summary = evo.replay_roi_release_gate_summary(0).unwrap();
7259 let task_class = &metrics.replay_task_classes[0];
7260
7261 assert_eq!(metrics.replay_attempts_total, 2);
7262 assert_eq!(metrics.replay_success_total, 1);
7263 assert_eq!(summary.replay_attempts_total, metrics.replay_attempts_total);
7264 assert_eq!(summary.replay_success_total, metrics.replay_success_total);
7265 assert_eq!(
7266 summary.replay_failure_total,
7267 metrics.replay_attempts_total - metrics.replay_success_total
7268 );
7269 assert_eq!(
7270 summary.reasoning_avoided_tokens_total,
7271 metrics.reasoning_avoided_tokens_total
7272 );
7273 assert_eq!(
7274 summary.replay_fallback_cost_total,
7275 metrics.replay_fallback_cost_total
7276 );
7277 assert_eq!(summary.replay_roi, metrics.replay_roi);
7278 assert_eq!(summary.replay_task_classes.len(), 1);
7279 assert_eq!(
7280 summary.replay_task_classes[0].task_class_id,
7281 task_class.task_class_id
7282 );
7283 assert_eq!(
7284 summary.replay_task_classes[0].replay_success_total,
7285 task_class.replay_success_total
7286 );
7287 assert_eq!(
7288 summary.replay_task_classes[0].replay_failure_total,
7289 task_class.replay_failure_total
7290 );
7291 assert_eq!(
7292 summary.replay_task_classes[0].reasoning_avoided_tokens_total,
7293 task_class.reasoning_avoided_tokens_total
7294 );
7295 assert_eq!(
7296 summary.replay_task_classes[0].replay_fallback_cost_total,
7297 task_class.replay_fallback_cost_total
7298 );
7299 }
7300
7301 #[tokio::test]
7302 async fn replay_roi_release_gate_summary_aggregates_task_class_and_remote_source() {
7303 let (evo, _) = build_test_evo("roi-summary", "run-roi-summary", command_validator());
7304 let envelope = remote_publish_envelope(
7305 "node-roi",
7306 "run-remote-roi",
7307 "gene-roi",
7308 "capsule-roi",
7309 "mutation-roi",
7310 "roi-signal",
7311 "ROI.md",
7312 "# roi",
7313 );
7314 evo.import_remote_envelope(&envelope).unwrap();
7315
7316 let miss = evo
7317 .replay_or_fallback(replay_input("entropy-hash-12345-no-overlap"))
7318 .await
7319 .unwrap();
7320 assert!(!miss.used_capsule);
7321 assert!(miss.fallback_to_planner);
7322 assert!(miss.select_evidence.candidates.is_empty());
7323 assert!(miss
7324 .detect_evidence
7325 .mismatch_reasons
7326 .iter()
7327 .any(|reason| reason == "no_candidate_after_select"));
7328
7329 let hit = evo
7330 .replay_or_fallback(replay_input("roi-signal"))
7331 .await
7332 .unwrap();
7333 assert!(hit.used_capsule);
7334 assert!(!hit.select_evidence.candidates.is_empty());
7335 assert_eq!(
7336 hit.select_evidence.selected_capsule_id.as_deref(),
7337 hit.capsule_id.as_deref()
7338 );
7339
7340 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7341 assert_eq!(summary.replay_attempts_total, 2);
7342 assert_eq!(summary.replay_success_total, 1);
7343 assert_eq!(summary.replay_failure_total, 1);
7344 assert!(summary.reasoning_avoided_tokens_total > 0);
7345 assert!(summary.replay_fallback_cost_total > 0);
7346 assert!(summary
7347 .replay_task_classes
7348 .iter()
7349 .any(|entry| { entry.replay_success_total == 1 && entry.replay_failure_total == 0 }));
7350 assert!(summary.replay_sources.iter().any(|source| {
7351 source.source_sender_id == "node-roi" && source.replay_success_total == 1
7352 }));
7353
7354 let rendered = evo
7355 .render_replay_roi_release_gate_summary_json(60 * 60)
7356 .unwrap();
7357 assert!(rendered.contains("\"replay_attempts_total\": 2"));
7358 assert!(rendered.contains("\"source_sender_id\": \"node-roi\""));
7359 }
7360
7361 #[tokio::test]
7362 async fn replay_roi_release_gate_summary_contract_exposes_core_metrics_and_fail_closed_defaults(
7363 ) {
7364 let (evo, _) = build_test_evo("roi-contract", "run-roi-contract", command_validator());
7365 let envelope = remote_publish_envelope(
7366 "node-contract",
7367 "run-remote-contract",
7368 "gene-contract",
7369 "capsule-contract",
7370 "mutation-contract",
7371 "contract-signal",
7372 "CONTRACT.md",
7373 "# contract",
7374 );
7375 evo.import_remote_envelope(&envelope).unwrap();
7376
7377 let miss = evo
7378 .replay_or_fallback(replay_input("entropy-hash-contract-no-overlap"))
7379 .await
7380 .unwrap();
7381 assert!(!miss.used_capsule);
7382 assert!(miss.fallback_to_planner);
7383
7384 let hit = evo
7385 .replay_or_fallback(replay_input("contract-signal"))
7386 .await
7387 .unwrap();
7388 assert!(hit.used_capsule);
7389
7390 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7391 let contract = evo
7392 .replay_roi_release_gate_contract(60 * 60, ReplayRoiReleaseGateThresholds::default())
7393 .unwrap();
7394
7395 assert_eq!(contract.input.replay_attempts_total, 2);
7396 assert_eq!(contract.input.replay_success_total, 1);
7397 assert_eq!(contract.input.replay_failure_total, 1);
7398 assert_eq!(
7399 contract.input.reasoning_avoided_tokens,
7400 summary.reasoning_avoided_tokens_total
7401 );
7402 assert_eq!(
7403 contract.input.replay_fallback_cost_total,
7404 summary.replay_fallback_cost_total
7405 );
7406 assert!((contract.input.replay_hit_rate - 0.5).abs() < f64::EPSILON);
7407 assert!((contract.input.false_replay_rate - 0.5).abs() < f64::EPSILON);
7408 assert!((contract.input.replay_roi - summary.replay_roi).abs() < f64::EPSILON);
7409 assert!(contract.input.replay_safety);
7410 assert_eq!(
7411 contract.input.aggregation_dimensions,
7412 REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7413 .iter()
7414 .map(|dimension| (*dimension).to_string())
7415 .collect::<Vec<_>>()
7416 );
7417 assert_eq!(
7418 contract.input.thresholds,
7419 ReplayRoiReleaseGateThresholds::default()
7420 );
7421 assert_eq!(
7422 contract.input.fail_closed_policy,
7423 ReplayRoiReleaseGateFailClosedPolicy::default()
7424 );
7425 assert_eq!(
7426 contract.output.status,
7427 ReplayRoiReleaseGateStatus::FailClosed
7428 );
7429 assert!(contract
7430 .output
7431 .failed_checks
7432 .iter()
7433 .any(|check| check == "min_replay_attempts_below_threshold"));
7434 assert!(contract
7435 .output
7436 .failed_checks
7437 .iter()
7438 .any(|check| check == "replay_hit_rate_below_threshold"));
7439 assert!(contract
7440 .output
7441 .failed_checks
7442 .iter()
7443 .any(|check| check == "false_replay_rate_above_threshold"));
7444 assert!(contract
7445 .output
7446 .evidence_refs
7447 .iter()
7448 .any(|evidence| evidence == "replay_roi_release_gate_summary"));
7449 assert!(contract.output.summary.contains("release gate fail_closed"));
7450 }
7451
7452 #[tokio::test]
7453 async fn replay_roi_release_gate_summary_contract_accepts_custom_thresholds_and_json() {
7454 let (evo, _) = build_test_evo(
7455 "roi-contract-thresholds",
7456 "run-roi-contract-thresholds",
7457 command_validator(),
7458 );
7459 let thresholds = ReplayRoiReleaseGateThresholds {
7460 min_replay_attempts: 8,
7461 min_replay_hit_rate: 0.75,
7462 max_false_replay_rate: 0.10,
7463 min_reasoning_avoided_tokens: 600,
7464 min_replay_roi: 0.30,
7465 require_replay_safety: true,
7466 };
7467 let contract = evo
7468 .replay_roi_release_gate_contract(60 * 60, thresholds.clone())
7469 .unwrap();
7470 assert_eq!(contract.input.thresholds, thresholds.clone());
7471 assert_eq!(contract.input.replay_attempts_total, 0);
7472 assert_eq!(contract.input.replay_hit_rate, 0.0);
7473 assert_eq!(contract.input.false_replay_rate, 0.0);
7474 assert!(!contract.input.replay_safety_signal.has_replay_activity);
7475 assert!(!contract.input.replay_safety);
7476 assert_eq!(
7477 contract.output.status,
7478 ReplayRoiReleaseGateStatus::Indeterminate
7479 );
7480 assert!(contract
7481 .output
7482 .failed_checks
7483 .iter()
7484 .any(|check| check == "missing_replay_attempts"));
7485 assert!(contract
7486 .output
7487 .summary
7488 .contains("indeterminate (fail-closed)"));
7489
7490 let rendered = evo
7491 .render_replay_roi_release_gate_contract_json(60 * 60, thresholds)
7492 .unwrap();
7493 assert!(rendered.contains("\"min_replay_attempts\": 8"));
7494 assert!(rendered.contains("\"min_replay_hit_rate\": 0.75"));
7495 assert!(rendered.contains("\"status\": \"indeterminate\""));
7496 }
7497
7498 #[tokio::test]
7499 async fn replay_roi_release_gate_summary_window_boundary_filters_old_events() {
7500 let (evo, _) = build_test_evo("roi-window", "run-roi-window", command_validator());
7501 let envelope = remote_publish_envelope(
7502 "node-window",
7503 "run-remote-window",
7504 "gene-window",
7505 "capsule-window",
7506 "mutation-window",
7507 "window-signal",
7508 "WINDOW.md",
7509 "# window",
7510 );
7511 evo.import_remote_envelope(&envelope).unwrap();
7512
7513 let miss = evo
7514 .replay_or_fallback(replay_input("window-no-match-signal"))
7515 .await
7516 .unwrap();
7517 assert!(!miss.used_capsule);
7518 assert!(miss.fallback_to_planner);
7519
7520 let first_hit = evo
7521 .replay_or_fallback(replay_input("window-signal"))
7522 .await
7523 .unwrap();
7524 assert!(first_hit.used_capsule);
7525
7526 std::thread::sleep(std::time::Duration::from_secs(2));
7527
7528 let second_hit = evo
7529 .replay_or_fallback(replay_input("window-signal"))
7530 .await
7531 .unwrap();
7532 assert!(second_hit.used_capsule);
7533
7534 let narrow = evo.replay_roi_release_gate_summary(1).unwrap();
7535 assert_eq!(narrow.replay_attempts_total, 1);
7536 assert_eq!(narrow.replay_success_total, 1);
7537 assert_eq!(narrow.replay_failure_total, 0);
7538
7539 let all = evo.replay_roi_release_gate_summary(0).unwrap();
7540 assert_eq!(all.replay_attempts_total, 3);
7541 assert_eq!(all.replay_success_total, 2);
7542 assert_eq!(all.replay_failure_total, 1);
7543 }
7544
7545 fn fixed_release_gate_pass_fixture() -> ReplayRoiReleaseGateInputContract {
7546 ReplayRoiReleaseGateInputContract {
7547 generated_at: "2026-03-13T00:00:00Z".to_string(),
7548 window_seconds: 86_400,
7549 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7550 .iter()
7551 .map(|dimension| (*dimension).to_string())
7552 .collect(),
7553 replay_attempts_total: 4,
7554 replay_success_total: 3,
7555 replay_failure_total: 1,
7556 replay_hit_rate: 0.75,
7557 false_replay_rate: 0.25,
7558 reasoning_avoided_tokens: 480,
7559 replay_fallback_cost_total: 64,
7560 replay_roi: compute_replay_roi(480, 64),
7561 replay_safety: true,
7562 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7563 fail_closed_default: true,
7564 rollback_ready: true,
7565 audit_trail_complete: true,
7566 has_replay_activity: true,
7567 },
7568 thresholds: ReplayRoiReleaseGateThresholds::default(),
7569 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7570 }
7571 }
7572
7573 fn fixed_release_gate_fail_fixture() -> ReplayRoiReleaseGateInputContract {
7574 ReplayRoiReleaseGateInputContract {
7575 generated_at: "2026-03-13T00:00:00Z".to_string(),
7576 window_seconds: 86_400,
7577 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7578 .iter()
7579 .map(|dimension| (*dimension).to_string())
7580 .collect(),
7581 replay_attempts_total: 10,
7582 replay_success_total: 4,
7583 replay_failure_total: 6,
7584 replay_hit_rate: 0.4,
7585 false_replay_rate: 0.6,
7586 reasoning_avoided_tokens: 80,
7587 replay_fallback_cost_total: 400,
7588 replay_roi: compute_replay_roi(80, 400),
7589 replay_safety: false,
7590 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7591 fail_closed_default: true,
7592 rollback_ready: true,
7593 audit_trail_complete: true,
7594 has_replay_activity: true,
7595 },
7596 thresholds: ReplayRoiReleaseGateThresholds::default(),
7597 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7598 }
7599 }
7600
7601 fn fixed_release_gate_borderline_fixture() -> ReplayRoiReleaseGateInputContract {
7602 ReplayRoiReleaseGateInputContract {
7603 generated_at: "2026-03-13T00:00:00Z".to_string(),
7604 window_seconds: 3_600,
7605 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7606 .iter()
7607 .map(|dimension| (*dimension).to_string())
7608 .collect(),
7609 replay_attempts_total: 4,
7610 replay_success_total: 3,
7611 replay_failure_total: 1,
7612 replay_hit_rate: 0.75,
7613 false_replay_rate: 0.25,
7614 reasoning_avoided_tokens: 192,
7615 replay_fallback_cost_total: 173,
7616 replay_roi: 0.05,
7617 replay_safety: true,
7618 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7619 fail_closed_default: true,
7620 rollback_ready: true,
7621 audit_trail_complete: true,
7622 has_replay_activity: true,
7623 },
7624 thresholds: ReplayRoiReleaseGateThresholds {
7625 min_replay_attempts: 4,
7626 min_replay_hit_rate: 0.75,
7627 max_false_replay_rate: 0.25,
7628 min_reasoning_avoided_tokens: 192,
7629 min_replay_roi: 0.05,
7630 require_replay_safety: true,
7631 },
7632 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7633 }
7634 }
7635
7636 #[test]
7637 fn replay_roi_release_gate_summary_fixed_fixtures_cover_pass_fail_and_borderline() {
7638 let pass =
7639 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_pass_fixture());
7640 let fail =
7641 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7642 let borderline = evaluate_replay_roi_release_gate_contract_input(
7643 &fixed_release_gate_borderline_fixture(),
7644 );
7645
7646 assert_eq!(pass.status, ReplayRoiReleaseGateStatus::Pass);
7647 assert!(pass.failed_checks.is_empty());
7648 assert_eq!(fail.status, ReplayRoiReleaseGateStatus::FailClosed);
7649 assert!(!fail.failed_checks.is_empty());
7650 assert_eq!(borderline.status, ReplayRoiReleaseGateStatus::Pass);
7651 assert!(borderline.failed_checks.is_empty());
7652 }
7653
7654 #[test]
7655 fn replay_roi_release_gate_summary_machine_readable_output_is_stable_and_sorted() {
7656 let output =
7657 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7658
7659 assert_eq!(
7660 output.failed_checks,
7661 vec![
7662 "false_replay_rate_above_threshold".to_string(),
7663 "reasoning_avoided_tokens_below_threshold".to_string(),
7664 "replay_hit_rate_below_threshold".to_string(),
7665 "replay_roi_below_threshold".to_string(),
7666 "replay_safety_required".to_string(),
7667 ]
7668 );
7669 assert_eq!(
7670 output.evidence_refs,
7671 vec![
7672 "generated_at:2026-03-13T00:00:00Z".to_string(),
7673 "metric:false_replay_rate".to_string(),
7674 "metric:reasoning_avoided_tokens".to_string(),
7675 "metric:replay_hit_rate".to_string(),
7676 "metric:replay_roi".to_string(),
7677 "metric:replay_safety".to_string(),
7678 "replay_roi_release_gate_summary".to_string(),
7679 "threshold:max_false_replay_rate".to_string(),
7680 "threshold:min_reasoning_avoided_tokens".to_string(),
7681 "threshold:min_replay_hit_rate".to_string(),
7682 "threshold:min_replay_roi".to_string(),
7683 "threshold:require_replay_safety".to_string(),
7684 "window_seconds:86400".to_string(),
7685 ]
7686 );
7687
7688 let rendered = serde_json::to_string(&output).unwrap();
7689 assert!(rendered.starts_with("{\"status\":\"fail_closed\",\"failed_checks\":"));
7690 assert_eq!(rendered, serde_json::to_string(&output).unwrap());
7691 }
7692
7693 #[test]
7694 fn replay_roi_release_gate_summary_evaluator_passes_with_threshold_compliance() {
7695 let input = ReplayRoiReleaseGateInputContract {
7696 generated_at: Utc::now().to_rfc3339(),
7697 window_seconds: 86_400,
7698 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7699 .iter()
7700 .map(|dimension| (*dimension).to_string())
7701 .collect(),
7702 replay_attempts_total: 10,
7703 replay_success_total: 9,
7704 replay_failure_total: 1,
7705 replay_hit_rate: 0.9,
7706 false_replay_rate: 0.1,
7707 reasoning_avoided_tokens: 960,
7708 replay_fallback_cost_total: 64,
7709 replay_roi: compute_replay_roi(960, 64),
7710 replay_safety: true,
7711 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7712 fail_closed_default: true,
7713 rollback_ready: true,
7714 audit_trail_complete: true,
7715 has_replay_activity: true,
7716 },
7717 thresholds: ReplayRoiReleaseGateThresholds::default(),
7718 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7719 };
7720
7721 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7722 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Pass);
7723 assert!(output.failed_checks.is_empty());
7724 assert!(output.summary.contains("release gate pass"));
7725 }
7726
7727 #[test]
7728 fn replay_roi_release_gate_summary_evaluator_fail_closed_on_threshold_violations() {
7729 let input = ReplayRoiReleaseGateInputContract {
7730 generated_at: Utc::now().to_rfc3339(),
7731 window_seconds: 86_400,
7732 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7733 .iter()
7734 .map(|dimension| (*dimension).to_string())
7735 .collect(),
7736 replay_attempts_total: 10,
7737 replay_success_total: 4,
7738 replay_failure_total: 6,
7739 replay_hit_rate: 0.4,
7740 false_replay_rate: 0.6,
7741 reasoning_avoided_tokens: 80,
7742 replay_fallback_cost_total: 400,
7743 replay_roi: compute_replay_roi(80, 400),
7744 replay_safety: false,
7745 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7746 fail_closed_default: true,
7747 rollback_ready: true,
7748 audit_trail_complete: true,
7749 has_replay_activity: true,
7750 },
7751 thresholds: ReplayRoiReleaseGateThresholds::default(),
7752 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7753 };
7754
7755 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7756 assert_eq!(output.status, ReplayRoiReleaseGateStatus::FailClosed);
7757 assert!(output
7758 .failed_checks
7759 .iter()
7760 .any(|check| check == "replay_hit_rate_below_threshold"));
7761 assert!(output
7762 .failed_checks
7763 .iter()
7764 .any(|check| check == "false_replay_rate_above_threshold"));
7765 assert!(output
7766 .failed_checks
7767 .iter()
7768 .any(|check| check == "replay_roi_below_threshold"));
7769 assert!(output.summary.contains("release gate fail_closed"));
7770 }
7771
7772 #[test]
7773 fn replay_roi_release_gate_summary_evaluator_marks_missing_data_indeterminate() {
7774 let input = ReplayRoiReleaseGateInputContract {
7775 generated_at: String::new(),
7776 window_seconds: 86_400,
7777 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7778 .iter()
7779 .map(|dimension| (*dimension).to_string())
7780 .collect(),
7781 replay_attempts_total: 0,
7782 replay_success_total: 0,
7783 replay_failure_total: 0,
7784 replay_hit_rate: 0.0,
7785 false_replay_rate: 0.0,
7786 reasoning_avoided_tokens: 0,
7787 replay_fallback_cost_total: 0,
7788 replay_roi: 0.0,
7789 replay_safety: false,
7790 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7791 fail_closed_default: true,
7792 rollback_ready: true,
7793 audit_trail_complete: true,
7794 has_replay_activity: false,
7795 },
7796 thresholds: ReplayRoiReleaseGateThresholds::default(),
7797 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7798 };
7799
7800 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7801 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Indeterminate);
7802 assert!(output
7803 .failed_checks
7804 .iter()
7805 .any(|check| check == "missing_generated_at"));
7806 assert!(output
7807 .failed_checks
7808 .iter()
7809 .any(|check| check == "missing_replay_attempts"));
7810 assert!(output
7811 .summary
7812 .contains("release gate indeterminate (fail-closed)"));
7813 }
7814
7815 #[test]
7816 fn stale_replay_targets_require_confidence_revalidation() {
7817 let now = Utc::now();
7818 let projection = EvolutionProjection {
7819 genes: vec![Gene {
7820 id: "gene-stale".into(),
7821 signals: vec!["missing readme".into()],
7822 strategy: vec!["README.md".into()],
7823 validation: vec!["test".into()],
7824 state: AssetState::Promoted,
7825 }],
7826 capsules: vec![Capsule {
7827 id: "capsule-stale".into(),
7828 gene_id: "gene-stale".into(),
7829 mutation_id: "mutation-stale".into(),
7830 run_id: "run-stale".into(),
7831 diff_hash: "hash".into(),
7832 confidence: 0.8,
7833 env: replay_input("missing readme").env,
7834 outcome: Outcome {
7835 success: true,
7836 validation_profile: "test".into(),
7837 validation_duration_ms: 1,
7838 changed_files: vec!["README.md".into()],
7839 validator_hash: "validator".into(),
7840 lines_changed: 1,
7841 replay_verified: false,
7842 },
7843 state: AssetState::Promoted,
7844 }],
7845 reuse_counts: BTreeMap::from([("gene-stale".into(), 1)]),
7846 attempt_counts: BTreeMap::from([("gene-stale".into(), 1)]),
7847 last_updated_at: BTreeMap::from([(
7848 "gene-stale".into(),
7849 (now - Duration::hours(48)).to_rfc3339(),
7850 )]),
7851 spec_ids_by_gene: BTreeMap::new(),
7852 };
7853
7854 let targets = stale_replay_revalidation_targets(&projection, now);
7855
7856 assert_eq!(targets.len(), 1);
7857 assert_eq!(targets[0].gene_id, "gene-stale");
7858 assert_eq!(targets[0].capsule_ids, vec!["capsule-stale".to_string()]);
7859 assert!(targets[0].decayed_confidence < MIN_REPLAY_CONFIDENCE);
7860 }
7861
7862 #[tokio::test]
7863 async fn remote_replay_prefers_closest_environment_match() {
7864 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
7865 let input = replay_input("env-signal");
7866
7867 let envelope_a = remote_publish_envelope_with_env(
7868 "node-a",
7869 "run-remote-a",
7870 "gene-a",
7871 "capsule-a",
7872 "mutation-a",
7873 "env-signal",
7874 "A.md",
7875 "# from a",
7876 input.env.clone(),
7877 );
7878 let envelope_b = remote_publish_envelope_with_env(
7879 "node-b",
7880 "run-remote-b",
7881 "gene-b",
7882 "capsule-b",
7883 "mutation-b",
7884 "env-signal",
7885 "B.md",
7886 "# from b",
7887 EnvFingerprint {
7888 rustc_version: "old-rustc".into(),
7889 cargo_lock_hash: "other-lock".into(),
7890 target_triple: "aarch64-apple-darwin".into(),
7891 os: "linux".into(),
7892 },
7893 );
7894
7895 evo.import_remote_envelope(&envelope_a).unwrap();
7896 evo.import_remote_envelope(&envelope_b).unwrap();
7897
7898 let decision = evo.replay_or_fallback(input).await.unwrap();
7899
7900 assert!(decision.used_capsule);
7901 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
7902 assert!(!decision.fallback_to_planner);
7903 }
7904
7905 #[test]
7906 fn remote_cold_start_scoring_caps_distinct_query_coverage() {
7907 let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
7908 let input = replay_input("missing readme");
7909
7910 let exact = remote_publish_envelope_with_signals(
7911 "node-exact",
7912 "run-remote-exact",
7913 "gene-exact",
7914 "capsule-exact",
7915 "mutation-exact",
7916 vec!["missing readme".into()],
7917 vec!["missing readme".into()],
7918 "EXACT.md",
7919 "# exact",
7920 input.env.clone(),
7921 );
7922 let overlapping = remote_publish_envelope_with_signals(
7923 "node-overlap",
7924 "run-remote-overlap",
7925 "gene-overlap",
7926 "capsule-overlap",
7927 "mutation-overlap",
7928 vec!["missing readme".into()],
7929 vec!["missing".into(), "readme".into()],
7930 "OVERLAP.md",
7931 "# overlap",
7932 input.env.clone(),
7933 );
7934
7935 evo.import_remote_envelope(&exact).unwrap();
7936 evo.import_remote_envelope(&overlapping).unwrap();
7937
7938 let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
7939 let exact_candidate = candidates
7940 .iter()
7941 .find(|candidate| candidate.gene.id == "gene-exact")
7942 .unwrap();
7943 let overlap_candidate = candidates
7944 .iter()
7945 .find(|candidate| candidate.gene.id == "gene-overlap")
7946 .unwrap();
7947
7948 assert_eq!(exact_candidate.score, 1.0);
7949 assert_eq!(overlap_candidate.score, 1.0);
7950 assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
7951 }
7952
7953 #[test]
7954 fn exact_match_candidates_respect_spec_linked_events() {
7955 let (evo, _) = build_test_evo(
7956 "spec-linked-filter",
7957 "run-spec-linked-filter",
7958 command_validator(),
7959 );
7960 let mut input = replay_input("missing readme");
7961 input.spec_id = Some("spec-readme".into());
7962
7963 let mut mutation = sample_mutation();
7964 mutation.intent.id = "mutation-spec-linked".into();
7965 mutation.intent.spec_id = None;
7966 let gene = Gene {
7967 id: "gene-spec-linked".into(),
7968 signals: vec!["missing readme".into()],
7969 strategy: vec!["README.md".into()],
7970 validation: vec!["test".into()],
7971 state: AssetState::Promoted,
7972 };
7973 let capsule = Capsule {
7974 id: "capsule-spec-linked".into(),
7975 gene_id: gene.id.clone(),
7976 mutation_id: mutation.intent.id.clone(),
7977 run_id: "run-spec-linked".into(),
7978 diff_hash: mutation.artifact.content_hash.clone(),
7979 confidence: 0.9,
7980 env: input.env.clone(),
7981 outcome: Outcome {
7982 success: true,
7983 validation_profile: "test".into(),
7984 validation_duration_ms: 1,
7985 changed_files: vec!["README.md".into()],
7986 validator_hash: "validator-hash".into(),
7987 lines_changed: 1,
7988 replay_verified: false,
7989 },
7990 state: AssetState::Promoted,
7991 };
7992
7993 evo.store
7994 .append_event(EvolutionEvent::MutationDeclared { mutation })
7995 .unwrap();
7996 evo.store
7997 .append_event(EvolutionEvent::GeneProjected { gene })
7998 .unwrap();
7999 evo.store
8000 .append_event(EvolutionEvent::CapsuleCommitted { capsule })
8001 .unwrap();
8002 evo.store
8003 .append_event(EvolutionEvent::SpecLinked {
8004 mutation_id: "mutation-spec-linked".into(),
8005 spec_id: "spec-readme".into(),
8006 })
8007 .unwrap();
8008
8009 let candidates = exact_match_candidates(evo.store.as_ref(), &input);
8010 assert_eq!(candidates.len(), 1);
8011 assert_eq!(candidates[0].gene.id, "gene-spec-linked");
8012 }
8013
8014 #[tokio::test]
8015 async fn remote_capsule_advances_from_quarantine_to_shadow_then_promoted() {
8016 let (evo, store) = build_test_evo(
8017 "remote-quarantine",
8018 "run-remote-quarantine",
8019 command_validator(),
8020 );
8021 let envelope = remote_publish_envelope(
8022 "node-remote",
8023 "run-remote-quarantine",
8024 "gene-remote",
8025 "capsule-remote",
8026 "mutation-remote",
8027 "remote-signal",
8028 "REMOTE.md",
8029 "# from remote",
8030 );
8031
8032 evo.import_remote_envelope(&envelope).unwrap();
8033
8034 let before_replay = store.rebuild_projection().unwrap();
8035 let imported_gene = before_replay
8036 .genes
8037 .iter()
8038 .find(|gene| gene.id == "gene-remote")
8039 .unwrap();
8040 let imported_capsule = before_replay
8041 .capsules
8042 .iter()
8043 .find(|capsule| capsule.id == "capsule-remote")
8044 .unwrap();
8045 assert_eq!(imported_gene.state, AssetState::Quarantined);
8046 assert_eq!(imported_capsule.state, AssetState::Quarantined);
8047 let exported_before_replay =
8048 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8049 assert!(exported_before_replay.assets.is_empty());
8050
8051 let first_decision = evo
8052 .replay_or_fallback(replay_input("remote-signal"))
8053 .await
8054 .unwrap();
8055
8056 assert!(first_decision.used_capsule);
8057 assert_eq!(first_decision.capsule_id, Some("capsule-remote".into()));
8058
8059 let after_first_replay = store.rebuild_projection().unwrap();
8060 let shadow_gene = after_first_replay
8061 .genes
8062 .iter()
8063 .find(|gene| gene.id == "gene-remote")
8064 .unwrap();
8065 let shadow_capsule = after_first_replay
8066 .capsules
8067 .iter()
8068 .find(|capsule| capsule.id == "capsule-remote")
8069 .unwrap();
8070 assert_eq!(shadow_gene.state, AssetState::ShadowValidated);
8071 assert_eq!(shadow_capsule.state, AssetState::ShadowValidated);
8072 let exported_after_first_replay =
8073 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8074 assert!(exported_after_first_replay.assets.is_empty());
8075
8076 let second_decision = evo
8077 .replay_or_fallback(replay_input("remote-signal"))
8078 .await
8079 .unwrap();
8080 assert!(second_decision.used_capsule);
8081 assert_eq!(second_decision.capsule_id, Some("capsule-remote".into()));
8082
8083 let after_second_replay = store.rebuild_projection().unwrap();
8084 let promoted_gene = after_second_replay
8085 .genes
8086 .iter()
8087 .find(|gene| gene.id == "gene-remote")
8088 .unwrap();
8089 let promoted_capsule = after_second_replay
8090 .capsules
8091 .iter()
8092 .find(|capsule| capsule.id == "capsule-remote")
8093 .unwrap();
8094 assert_eq!(promoted_gene.state, AssetState::Promoted);
8095 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8096 let exported_after_second_replay =
8097 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8098 assert_eq!(exported_after_second_replay.assets.len(), 3);
8099 assert!(exported_after_second_replay
8100 .assets
8101 .iter()
8102 .any(|asset| matches!(
8103 asset,
8104 NetworkAsset::EvolutionEvent {
8105 event: EvolutionEvent::MutationDeclared { .. }
8106 }
8107 )));
8108 }
8109
8110 #[tokio::test]
8111 async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
8112 let (source, source_store) = build_test_evo(
8113 "remote-publish-export",
8114 "run-remote-publish-export",
8115 command_validator(),
8116 );
8117 source
8118 .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
8119 .await
8120 .unwrap();
8121 let envelope = EvolutionNetworkNode::new(source_store.clone())
8122 .publish_local_assets("node-source")
8123 .unwrap();
8124 assert!(envelope.assets.iter().any(|asset| matches!(
8125 asset,
8126 NetworkAsset::EvolutionEvent {
8127 event: EvolutionEvent::MutationDeclared { mutation }
8128 } if mutation.intent.id == "mutation-1"
8129 )));
8130
8131 let (remote, _) = build_test_evo(
8132 "remote-publish-import",
8133 "run-remote-publish-import",
8134 command_validator(),
8135 );
8136 remote.import_remote_envelope(&envelope).unwrap();
8137
8138 let decision = remote
8139 .replay_or_fallback(replay_input("missing readme"))
8140 .await
8141 .unwrap();
8142
8143 assert!(decision.used_capsule);
8144 assert!(!decision.fallback_to_planner);
8145 }
8146
8147 #[tokio::test]
8148 async fn import_remote_envelope_records_manifest_validation_event() {
8149 let (source, source_store) = build_test_evo(
8150 "remote-manifest-success-source",
8151 "run-remote-manifest-success-source",
8152 command_validator(),
8153 );
8154 source
8155 .capture_successful_mutation(
8156 &"run-remote-manifest-success-source".into(),
8157 sample_mutation(),
8158 )
8159 .await
8160 .unwrap();
8161 let envelope = EvolutionNetworkNode::new(source_store.clone())
8162 .publish_local_assets("node-source")
8163 .unwrap();
8164
8165 let (remote, remote_store) = build_test_evo(
8166 "remote-manifest-success-remote",
8167 "run-remote-manifest-success-remote",
8168 command_validator(),
8169 );
8170 remote.import_remote_envelope(&envelope).unwrap();
8171
8172 let events = remote_store.scan(1).unwrap();
8173 assert!(events.iter().any(|stored| matches!(
8174 &stored.event,
8175 EvolutionEvent::ManifestValidated {
8176 accepted: true,
8177 reason,
8178 sender_id: Some(sender_id),
8179 publisher: Some(publisher),
8180 asset_ids,
8181 } if reason == "manifest validated"
8182 && sender_id == "node-source"
8183 && publisher == "node-source"
8184 && !asset_ids.is_empty()
8185 )));
8186 }
8187
8188 #[test]
8189 fn import_remote_envelope_rejects_invalid_manifest_and_records_audit_event() {
8190 let (remote, remote_store) = build_test_evo(
8191 "remote-manifest-invalid",
8192 "run-remote-manifest-invalid",
8193 command_validator(),
8194 );
8195 let mut envelope = remote_publish_envelope(
8196 "node-remote",
8197 "run-remote-manifest-invalid",
8198 "gene-remote",
8199 "capsule-remote",
8200 "mutation-remote",
8201 "manifest-signal",
8202 "MANIFEST.md",
8203 "# drift",
8204 );
8205 if let Some(manifest) = envelope.manifest.as_mut() {
8206 manifest.asset_hash = "tampered-hash".to_string();
8207 }
8208 envelope.content_hash = envelope.compute_content_hash();
8209
8210 let error = remote.import_remote_envelope(&envelope).unwrap_err();
8211 assert!(error.to_string().contains("manifest"));
8212
8213 let events = remote_store.scan(1).unwrap();
8214 assert!(events.iter().any(|stored| matches!(
8215 &stored.event,
8216 EvolutionEvent::ManifestValidated {
8217 accepted: false,
8218 reason,
8219 sender_id: Some(sender_id),
8220 publisher: Some(publisher),
8221 asset_ids,
8222 } if reason.contains("manifest asset_hash mismatch")
8223 && sender_id == "node-remote"
8224 && publisher == "node-remote"
8225 && !asset_ids.is_empty()
8226 )));
8227 }
8228
8229 #[tokio::test]
8230 async fn fetch_assets_include_mutation_payload_for_remote_replay() {
8231 let (evo, store) = build_test_evo(
8232 "remote-fetch-export",
8233 "run-remote-fetch",
8234 command_validator(),
8235 );
8236 evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
8237 .await
8238 .unwrap();
8239
8240 let response = EvolutionNetworkNode::new(store.clone())
8241 .fetch_assets(
8242 "node-source",
8243 &FetchQuery {
8244 sender_id: "node-client".into(),
8245 signals: vec!["missing readme".into()],
8246 since_cursor: None,
8247 resume_token: None,
8248 },
8249 )
8250 .unwrap();
8251
8252 assert!(response.assets.iter().any(|asset| matches!(
8253 asset,
8254 NetworkAsset::EvolutionEvent {
8255 event: EvolutionEvent::MutationDeclared { mutation }
8256 } if mutation.intent.id == "mutation-1"
8257 )));
8258 assert!(response
8259 .assets
8260 .iter()
8261 .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
8262 assert!(response
8263 .assets
8264 .iter()
8265 .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
8266 }
8267
8268 #[test]
8269 fn fetch_assets_delta_sync_supports_since_cursor_and_resume_token() {
8270 let store_root =
8271 std::env::temp_dir().join(format!("oris-evokernel-fetch-delta-store-{}", next_id("t")));
8272 if store_root.exists() {
8273 fs::remove_dir_all(&store_root).unwrap();
8274 }
8275 let store: Arc<dyn EvolutionStore> =
8276 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
8277 let node = EvolutionNetworkNode::new(store.clone());
8278 node.record_reported_experience(
8279 "delta-agent",
8280 "gene-delta-a",
8281 vec!["delta.signal".into()],
8282 vec![
8283 "task_class=delta.signal".into(),
8284 "task_label=delta replay".into(),
8285 ],
8286 vec!["a2a.tasks.report".into()],
8287 )
8288 .unwrap();
8289
8290 let first = node
8291 .fetch_assets(
8292 "execution-api",
8293 &FetchQuery {
8294 sender_id: "delta-agent".into(),
8295 signals: vec!["delta.signal".into()],
8296 since_cursor: None,
8297 resume_token: None,
8298 },
8299 )
8300 .unwrap();
8301 let first_cursor = first.next_cursor.clone().expect("first next_cursor");
8302 let first_token = first.resume_token.clone().expect("first resume_token");
8303 assert!(first.assets.iter().any(
8304 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8305 ));
8306
8307 let restarted = EvolutionNetworkNode::new(store.clone());
8308 restarted
8309 .record_reported_experience(
8310 "delta-agent",
8311 "gene-delta-b",
8312 vec!["delta.signal".into()],
8313 vec![
8314 "task_class=delta.signal".into(),
8315 "task_label=delta replay".into(),
8316 ],
8317 vec!["a2a.tasks.report".into()],
8318 )
8319 .unwrap();
8320
8321 let from_token = restarted
8322 .fetch_assets(
8323 "execution-api",
8324 &FetchQuery {
8325 sender_id: "delta-agent".into(),
8326 signals: vec!["delta.signal".into()],
8327 since_cursor: None,
8328 resume_token: Some(first_token),
8329 },
8330 )
8331 .unwrap();
8332 assert!(from_token.assets.iter().any(
8333 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8334 ));
8335 assert!(!from_token.assets.iter().any(
8336 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8337 ));
8338 assert_eq!(
8339 from_token.sync_audit.requested_cursor,
8340 Some(first_cursor.clone())
8341 );
8342 assert!(from_token.sync_audit.applied_count >= 1);
8343
8344 let from_cursor = restarted
8345 .fetch_assets(
8346 "execution-api",
8347 &FetchQuery {
8348 sender_id: "delta-agent".into(),
8349 signals: vec!["delta.signal".into()],
8350 since_cursor: Some(first_cursor),
8351 resume_token: None,
8352 },
8353 )
8354 .unwrap();
8355 assert!(from_cursor.assets.iter().any(
8356 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8357 ));
8358 }
8359
8360 #[test]
8361 fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
8362 let store_root = std::env::temp_dir().join(format!(
8363 "oris-evokernel-remote-partial-store-{}",
8364 std::process::id()
8365 ));
8366 if store_root.exists() {
8367 fs::remove_dir_all(&store_root).unwrap();
8368 }
8369 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8370 let evo = build_test_evo_with_store(
8371 "remote-partial",
8372 "run-remote-partial",
8373 command_validator(),
8374 store.clone(),
8375 );
8376 let envelope = remote_publish_envelope(
8377 "node-partial",
8378 "run-remote-partial",
8379 "gene-partial",
8380 "capsule-partial",
8381 "mutation-partial",
8382 "partial-signal",
8383 "PARTIAL.md",
8384 "# partial",
8385 );
8386
8387 let result = evo.import_remote_envelope(&envelope);
8388
8389 assert!(matches!(result, Err(EvoKernelError::Store(_))));
8390 let projection = store.rebuild_projection().unwrap();
8391 assert!(projection
8392 .genes
8393 .iter()
8394 .any(|gene| gene.id == "gene-partial"));
8395 assert!(projection.capsules.is_empty());
8396 let publishers = evo.remote_publishers.lock().unwrap();
8397 assert_eq!(
8398 publishers.get("gene-partial").map(String::as_str),
8399 Some("node-partial")
8400 );
8401 }
8402
8403 #[test]
8404 fn retry_remote_import_after_partial_failure_only_imports_missing_assets() {
8405 let store_root = std::env::temp_dir().join(format!(
8406 "oris-evokernel-remote-partial-retry-store-{}",
8407 next_id("t")
8408 ));
8409 if store_root.exists() {
8410 fs::remove_dir_all(&store_root).unwrap();
8411 }
8412 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8413 let evo = build_test_evo_with_store(
8414 "remote-partial-retry",
8415 "run-remote-partial-retry",
8416 command_validator(),
8417 store.clone(),
8418 );
8419 let envelope = remote_publish_envelope(
8420 "node-partial",
8421 "run-remote-partial-retry",
8422 "gene-partial-retry",
8423 "capsule-partial-retry",
8424 "mutation-partial-retry",
8425 "partial-retry-signal",
8426 "PARTIAL_RETRY.md",
8427 "# partial retry",
8428 );
8429
8430 let first = evo.import_remote_envelope(&envelope);
8431 assert!(matches!(first, Err(EvoKernelError::Store(_))));
8432
8433 let retry = evo.import_remote_envelope(&envelope).unwrap();
8434
8435 assert_eq!(retry.imported_asset_ids, vec!["capsule-partial-retry"]);
8436 let projection = store.rebuild_projection().unwrap();
8437 let gene = projection
8438 .genes
8439 .iter()
8440 .find(|gene| gene.id == "gene-partial-retry")
8441 .unwrap();
8442 assert_eq!(gene.state, AssetState::Quarantined);
8443 let capsule = projection
8444 .capsules
8445 .iter()
8446 .find(|capsule| capsule.id == "capsule-partial-retry")
8447 .unwrap();
8448 assert_eq!(capsule.state, AssetState::Quarantined);
8449 assert_eq!(projection.attempt_counts["gene-partial-retry"], 1);
8450
8451 let events = store.scan(1).unwrap();
8452 assert_eq!(
8453 events
8454 .iter()
8455 .filter(|stored| {
8456 matches!(
8457 &stored.event,
8458 EvolutionEvent::MutationDeclared { mutation }
8459 if mutation.intent.id == "mutation-partial-retry"
8460 )
8461 })
8462 .count(),
8463 1
8464 );
8465 assert_eq!(
8466 events
8467 .iter()
8468 .filter(|stored| {
8469 matches!(
8470 &stored.event,
8471 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-partial-retry"
8472 )
8473 })
8474 .count(),
8475 1
8476 );
8477 assert_eq!(
8478 events
8479 .iter()
8480 .filter(|stored| {
8481 matches!(
8482 &stored.event,
8483 EvolutionEvent::CapsuleCommitted { capsule }
8484 if capsule.id == "capsule-partial-retry"
8485 )
8486 })
8487 .count(),
8488 1
8489 );
8490 }
8491
8492 #[tokio::test]
8493 async fn duplicate_remote_import_does_not_requarantine_locally_validated_assets() {
8494 let (evo, store) = build_test_evo(
8495 "remote-idempotent",
8496 "run-remote-idempotent",
8497 command_validator(),
8498 );
8499 let envelope = remote_publish_envelope(
8500 "node-idempotent",
8501 "run-remote-idempotent",
8502 "gene-idempotent",
8503 "capsule-idempotent",
8504 "mutation-idempotent",
8505 "idempotent-signal",
8506 "IDEMPOTENT.md",
8507 "# idempotent",
8508 );
8509
8510 let first = evo.import_remote_envelope(&envelope).unwrap();
8511 assert_eq!(
8512 first.imported_asset_ids,
8513 vec!["gene-idempotent", "capsule-idempotent"]
8514 );
8515
8516 let decision = evo
8517 .replay_or_fallback(replay_input("idempotent-signal"))
8518 .await
8519 .unwrap();
8520 assert!(decision.used_capsule);
8521 assert_eq!(decision.capsule_id, Some("capsule-idempotent".into()));
8522
8523 let projection_before = store.rebuild_projection().unwrap();
8524 let attempts_before = projection_before.attempt_counts["gene-idempotent"];
8525 let gene_before = projection_before
8526 .genes
8527 .iter()
8528 .find(|gene| gene.id == "gene-idempotent")
8529 .unwrap();
8530 assert_eq!(gene_before.state, AssetState::ShadowValidated);
8531 let capsule_before = projection_before
8532 .capsules
8533 .iter()
8534 .find(|capsule| capsule.id == "capsule-idempotent")
8535 .unwrap();
8536 assert_eq!(capsule_before.state, AssetState::ShadowValidated);
8537
8538 let second = evo.import_remote_envelope(&envelope).unwrap();
8539 assert!(second.imported_asset_ids.is_empty());
8540
8541 let projection_after = store.rebuild_projection().unwrap();
8542 assert_eq!(
8543 projection_after.attempt_counts["gene-idempotent"],
8544 attempts_before
8545 );
8546 let gene_after = projection_after
8547 .genes
8548 .iter()
8549 .find(|gene| gene.id == "gene-idempotent")
8550 .unwrap();
8551 assert_eq!(gene_after.state, AssetState::ShadowValidated);
8552 let capsule_after = projection_after
8553 .capsules
8554 .iter()
8555 .find(|capsule| capsule.id == "capsule-idempotent")
8556 .unwrap();
8557 assert_eq!(capsule_after.state, AssetState::ShadowValidated);
8558
8559 let third_decision = evo
8560 .replay_or_fallback(replay_input("idempotent-signal"))
8561 .await
8562 .unwrap();
8563 assert!(third_decision.used_capsule);
8564 assert_eq!(third_decision.capsule_id, Some("capsule-idempotent".into()));
8565
8566 let projection_promoted = store.rebuild_projection().unwrap();
8567 let promoted_gene = projection_promoted
8568 .genes
8569 .iter()
8570 .find(|gene| gene.id == "gene-idempotent")
8571 .unwrap();
8572 let promoted_capsule = projection_promoted
8573 .capsules
8574 .iter()
8575 .find(|capsule| capsule.id == "capsule-idempotent")
8576 .unwrap();
8577 assert_eq!(promoted_gene.state, AssetState::Promoted);
8578 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8579
8580 let events = store.scan(1).unwrap();
8581 assert_eq!(
8582 events
8583 .iter()
8584 .filter(|stored| {
8585 matches!(
8586 &stored.event,
8587 EvolutionEvent::MutationDeclared { mutation }
8588 if mutation.intent.id == "mutation-idempotent"
8589 )
8590 })
8591 .count(),
8592 1
8593 );
8594 assert_eq!(
8595 events
8596 .iter()
8597 .filter(|stored| {
8598 matches!(
8599 &stored.event,
8600 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-idempotent"
8601 )
8602 })
8603 .count(),
8604 1
8605 );
8606 assert_eq!(
8607 events
8608 .iter()
8609 .filter(|stored| {
8610 matches!(
8611 &stored.event,
8612 EvolutionEvent::CapsuleCommitted { capsule }
8613 if capsule.id == "capsule-idempotent"
8614 )
8615 })
8616 .count(),
8617 1
8618 );
8619
8620 assert_eq!(first.sync_audit.scanned_count, envelope.assets.len());
8621 assert_eq!(first.sync_audit.failed_count, 0);
8622 assert_eq!(second.sync_audit.applied_count, 0);
8623 assert_eq!(second.sync_audit.skipped_count, envelope.assets.len());
8624 assert!(second.resume_token.is_some());
8625 }
8626
8627 #[tokio::test]
8628 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
8629 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
8630 let capsule = evo
8631 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
8632 .await
8633 .unwrap();
8634 let publish = evo.export_promoted_assets("node-local");
8635 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
8636
8637 let decision = evo
8638 .replay_or_fallback(replay_input("missing readme"))
8639 .await
8640 .unwrap();
8641 assert!(decision.used_capsule);
8642 assert_eq!(decision.capsule_id, Some(capsule.id));
8643 }
8644
8645 #[tokio::test]
8646 async fn second_replay_validation_failure_revokes_gene_immediately() {
8647 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
8648 let capsule = capturer
8649 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
8650 .await
8651 .unwrap();
8652
8653 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
8654 let failing_replay = build_test_evo_with_store(
8655 "revoke-replay",
8656 "run-replay-fail",
8657 failing_validator,
8658 store.clone(),
8659 );
8660
8661 let first = failing_replay
8662 .replay_or_fallback(replay_input("missing readme"))
8663 .await
8664 .unwrap();
8665 let second = failing_replay
8666 .replay_or_fallback(replay_input("missing readme"))
8667 .await
8668 .unwrap();
8669
8670 assert!(!first.used_capsule);
8671 assert!(first.fallback_to_planner);
8672 assert!(!second.used_capsule);
8673 assert!(second.fallback_to_planner);
8674
8675 let projection = store.rebuild_projection().unwrap();
8676 let gene = projection
8677 .genes
8678 .iter()
8679 .find(|gene| gene.id == capsule.gene_id)
8680 .unwrap();
8681 assert_eq!(gene.state, AssetState::Promoted);
8682 let committed_capsule = projection
8683 .capsules
8684 .iter()
8685 .find(|current| current.id == capsule.id)
8686 .unwrap();
8687 assert_eq!(committed_capsule.state, AssetState::Promoted);
8688
8689 let events = store.scan(1).unwrap();
8690 assert_eq!(
8691 events
8692 .iter()
8693 .filter(|stored| {
8694 matches!(
8695 &stored.event,
8696 EvolutionEvent::ValidationFailed {
8697 gene_id: Some(gene_id),
8698 ..
8699 } if gene_id == &capsule.gene_id
8700 )
8701 })
8702 .count(),
8703 1
8704 );
8705 assert!(!events.iter().any(|stored| {
8706 matches!(
8707 &stored.event,
8708 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
8709 )
8710 }));
8711
8712 let recovered = build_test_evo_with_store(
8713 "revoke-replay",
8714 "run-replay-check",
8715 command_validator(),
8716 store.clone(),
8717 );
8718 let after_revoke = recovered
8719 .replay_or_fallback(replay_input("missing readme"))
8720 .await
8721 .unwrap();
8722 assert!(!after_revoke.used_capsule);
8723 assert!(after_revoke.fallback_to_planner);
8724 assert!(after_revoke.reason.contains("below replay threshold"));
8725 }
8726
8727 #[tokio::test]
8728 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
8729 let ledger = Arc::new(Mutex::new(EvuLedger {
8730 accounts: vec![],
8731 reputations: vec![
8732 oris_economics::ReputationRecord {
8733 node_id: "node-a".into(),
8734 publish_success_rate: 0.4,
8735 validator_accuracy: 0.4,
8736 reuse_impact: 0,
8737 },
8738 oris_economics::ReputationRecord {
8739 node_id: "node-b".into(),
8740 publish_success_rate: 0.95,
8741 validator_accuracy: 0.95,
8742 reuse_impact: 8,
8743 },
8744 ],
8745 }));
8746 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
8747 let evo = evo.with_economics(ledger.clone());
8748
8749 let envelope_a = remote_publish_envelope(
8750 "node-a",
8751 "run-remote-a",
8752 "gene-a",
8753 "capsule-a",
8754 "mutation-a",
8755 "shared-signal",
8756 "A.md",
8757 "# from a",
8758 );
8759 let envelope_b = remote_publish_envelope(
8760 "node-b",
8761 "run-remote-b",
8762 "gene-b",
8763 "capsule-b",
8764 "mutation-b",
8765 "shared-signal",
8766 "B.md",
8767 "# from b",
8768 );
8769
8770 evo.import_remote_envelope(&envelope_a).unwrap();
8771 evo.import_remote_envelope(&envelope_b).unwrap();
8772
8773 let decision = evo
8774 .replay_or_fallback(replay_input("shared-signal"))
8775 .await
8776 .unwrap();
8777
8778 assert!(decision.used_capsule);
8779 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
8780 let locked = ledger.lock().unwrap();
8781 let rewarded = locked
8782 .accounts
8783 .iter()
8784 .find(|item| item.node_id == "node-b")
8785 .unwrap();
8786 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8787 assert!(
8788 locked.selector_reputation_bias()["node-b"]
8789 > locked.selector_reputation_bias()["node-a"]
8790 );
8791 }
8792
8793 #[tokio::test]
8794 async fn remote_reuse_settlement_tracks_selected_capsule_publisher_for_shared_gene() {
8795 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
8796 let (evo, _) = build_test_evo(
8797 "remote-shared-publisher",
8798 "run-remote-shared-publisher",
8799 command_validator(),
8800 );
8801 let evo = evo.with_economics(ledger.clone());
8802 let input = replay_input("shared-signal");
8803 let preferred = remote_publish_envelope_with_env(
8804 "node-a",
8805 "run-remote-a",
8806 "gene-shared",
8807 "capsule-preferred",
8808 "mutation-preferred",
8809 "shared-signal",
8810 "A.md",
8811 "# from a",
8812 input.env.clone(),
8813 );
8814 let fallback = remote_publish_envelope_with_env(
8815 "node-b",
8816 "run-remote-b",
8817 "gene-shared",
8818 "capsule-fallback",
8819 "mutation-fallback",
8820 "shared-signal",
8821 "B.md",
8822 "# from b",
8823 EnvFingerprint {
8824 rustc_version: "old-rustc".into(),
8825 cargo_lock_hash: "other-lock".into(),
8826 target_triple: "aarch64-apple-darwin".into(),
8827 os: "linux".into(),
8828 },
8829 );
8830
8831 evo.import_remote_envelope(&preferred).unwrap();
8832 evo.import_remote_envelope(&fallback).unwrap();
8833
8834 let decision = evo.replay_or_fallback(input).await.unwrap();
8835
8836 assert!(decision.used_capsule);
8837 assert_eq!(decision.capsule_id, Some("capsule-preferred".into()));
8838 let locked = ledger.lock().unwrap();
8839 let rewarded = locked
8840 .accounts
8841 .iter()
8842 .find(|item| item.node_id == "node-a")
8843 .unwrap();
8844 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8845 assert!(locked.accounts.iter().all(|item| item.node_id != "node-b"));
8846 }
8847
8848 #[test]
8849 fn select_candidates_surfaces_ranked_remote_cold_start_candidates() {
8850 let ledger = Arc::new(Mutex::new(EvuLedger {
8851 accounts: vec![],
8852 reputations: vec![
8853 oris_economics::ReputationRecord {
8854 node_id: "node-a".into(),
8855 publish_success_rate: 0.4,
8856 validator_accuracy: 0.4,
8857 reuse_impact: 0,
8858 },
8859 oris_economics::ReputationRecord {
8860 node_id: "node-b".into(),
8861 publish_success_rate: 0.95,
8862 validator_accuracy: 0.95,
8863 reuse_impact: 8,
8864 },
8865 ],
8866 }));
8867 let (evo, _) = build_test_evo("remote-select", "run-remote-select", command_validator());
8868 let evo = evo.with_economics(ledger);
8869
8870 let envelope_a = remote_publish_envelope(
8871 "node-a",
8872 "run-remote-a",
8873 "gene-a",
8874 "capsule-a",
8875 "mutation-a",
8876 "shared-signal",
8877 "A.md",
8878 "# from a",
8879 );
8880 let envelope_b = remote_publish_envelope(
8881 "node-b",
8882 "run-remote-b",
8883 "gene-b",
8884 "capsule-b",
8885 "mutation-b",
8886 "shared-signal",
8887 "B.md",
8888 "# from b",
8889 );
8890
8891 evo.import_remote_envelope(&envelope_a).unwrap();
8892 evo.import_remote_envelope(&envelope_b).unwrap();
8893
8894 let candidates = evo.select_candidates(&replay_input("shared-signal"));
8895
8896 assert_eq!(candidates.len(), 1);
8897 assert_eq!(candidates[0].gene.id, "gene-b");
8898 assert_eq!(candidates[0].capsules[0].id, "capsule-b");
8899 }
8900
8901 #[tokio::test]
8902 async fn remote_reuse_publisher_bias_survives_restart() {
8903 let ledger = Arc::new(Mutex::new(EvuLedger {
8904 accounts: vec![],
8905 reputations: vec![
8906 oris_economics::ReputationRecord {
8907 node_id: "node-a".into(),
8908 publish_success_rate: 0.4,
8909 validator_accuracy: 0.4,
8910 reuse_impact: 0,
8911 },
8912 oris_economics::ReputationRecord {
8913 node_id: "node-b".into(),
8914 publish_success_rate: 0.95,
8915 validator_accuracy: 0.95,
8916 reuse_impact: 8,
8917 },
8918 ],
8919 }));
8920 let store_root = std::env::temp_dir().join(format!(
8921 "oris-evokernel-remote-restart-store-{}",
8922 next_id("t")
8923 ));
8924 if store_root.exists() {
8925 fs::remove_dir_all(&store_root).unwrap();
8926 }
8927 let store: Arc<dyn EvolutionStore> =
8928 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
8929 let evo = build_test_evo_with_store(
8930 "remote-success-restart-source",
8931 "run-remote-restart-source",
8932 command_validator(),
8933 store.clone(),
8934 )
8935 .with_economics(ledger.clone());
8936
8937 let envelope_a = remote_publish_envelope(
8938 "node-a",
8939 "run-remote-a",
8940 "gene-a",
8941 "capsule-a",
8942 "mutation-a",
8943 "shared-signal",
8944 "A.md",
8945 "# from a",
8946 );
8947 let envelope_b = remote_publish_envelope(
8948 "node-b",
8949 "run-remote-b",
8950 "gene-b",
8951 "capsule-b",
8952 "mutation-b",
8953 "shared-signal",
8954 "B.md",
8955 "# from b",
8956 );
8957
8958 evo.import_remote_envelope(&envelope_a).unwrap();
8959 evo.import_remote_envelope(&envelope_b).unwrap();
8960
8961 let recovered = build_test_evo_with_store(
8962 "remote-success-restart-recovered",
8963 "run-remote-restart-recovered",
8964 command_validator(),
8965 store.clone(),
8966 )
8967 .with_economics(ledger.clone());
8968
8969 let decision = recovered
8970 .replay_or_fallback(replay_input("shared-signal"))
8971 .await
8972 .unwrap();
8973
8974 assert!(decision.used_capsule);
8975 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
8976 let locked = ledger.lock().unwrap();
8977 let rewarded = locked
8978 .accounts
8979 .iter()
8980 .find(|item| item.node_id == "node-b")
8981 .unwrap();
8982 assert_eq!(rewarded.balance, recovered.stake_policy.reuse_reward);
8983 }
8984
8985 #[tokio::test]
8986 async fn remote_reuse_failure_penalizes_remote_reputation() {
8987 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
8988 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
8989 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
8990 let evo = evo.with_economics(ledger.clone());
8991
8992 let envelope = remote_publish_envelope(
8993 "node-remote",
8994 "run-remote-failed",
8995 "gene-remote",
8996 "capsule-remote",
8997 "mutation-remote",
8998 "failure-signal",
8999 "FAILED.md",
9000 "# from remote",
9001 );
9002 evo.import_remote_envelope(&envelope).unwrap();
9003
9004 let decision = evo
9005 .replay_or_fallback(replay_input("failure-signal"))
9006 .await
9007 .unwrap();
9008
9009 assert!(!decision.used_capsule);
9010 assert!(decision.fallback_to_planner);
9011
9012 let signal = evo.economics_signal("node-remote").unwrap();
9013 assert_eq!(signal.available_evu, 0);
9014 assert!(signal.publish_success_rate < 0.5);
9015 assert!(signal.validator_accuracy < 0.5);
9016 }
9017
9018 #[test]
9019 fn ensure_builtin_experience_assets_is_idempotent_and_fetchable() {
9020 let store_root = std::env::temp_dir().join(format!(
9021 "oris-evokernel-builtin-experience-store-{}",
9022 next_id("t")
9023 ));
9024 if store_root.exists() {
9025 fs::remove_dir_all(&store_root).unwrap();
9026 }
9027 let store: Arc<dyn EvolutionStore> =
9028 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9029 let node = EvolutionNetworkNode::new(store.clone());
9030
9031 let first = node
9032 .ensure_builtin_experience_assets("runtime-bootstrap")
9033 .unwrap();
9034 assert!(!first.imported_asset_ids.is_empty());
9035
9036 let second = node
9037 .ensure_builtin_experience_assets("runtime-bootstrap")
9038 .unwrap();
9039 assert!(second.imported_asset_ids.is_empty());
9040
9041 let fetch = node
9042 .fetch_assets(
9043 "execution-api",
9044 &FetchQuery {
9045 sender_id: "compat-agent".into(),
9046 signals: vec!["error".into()],
9047 since_cursor: None,
9048 resume_token: None,
9049 },
9050 )
9051 .unwrap();
9052
9053 let mut has_builtin_evomap = false;
9054 for asset in fetch.assets {
9055 if let NetworkAsset::Gene { gene } = asset {
9056 if strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9057 == Some("builtin_evomap")
9058 && gene.state == AssetState::Promoted
9059 {
9060 has_builtin_evomap = true;
9061 break;
9062 }
9063 }
9064 }
9065 assert!(has_builtin_evomap);
9066 }
9067
9068 #[test]
9069 fn reported_experience_retention_keeps_latest_three_and_preserves_builtin_assets() {
9070 let store_root = std::env::temp_dir().join(format!(
9071 "oris-evokernel-reported-retention-store-{}",
9072 next_id("t")
9073 ));
9074 if store_root.exists() {
9075 fs::remove_dir_all(&store_root).unwrap();
9076 }
9077 let store: Arc<dyn EvolutionStore> =
9078 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9079 let node = EvolutionNetworkNode::new(store.clone());
9080
9081 node.ensure_builtin_experience_assets("runtime-bootstrap")
9082 .unwrap();
9083
9084 for idx in 0..4 {
9085 node.record_reported_experience(
9086 "reporter-a",
9087 format!("reported-docs-rewrite-v{}", idx + 1),
9088 vec!["docs.rewrite".into(), format!("task-{}", idx + 1)],
9089 vec![
9090 "task_class=docs.rewrite".into(),
9091 format!("task_label=Docs rewrite v{}", idx + 1),
9092 format!("summary=reported replay {}", idx + 1),
9093 ],
9094 vec!["a2a.tasks.report".into()],
9095 )
9096 .unwrap();
9097 }
9098
9099 let (_, projection) = store.scan_projection().unwrap();
9100 let reported_promoted = projection
9101 .genes
9102 .iter()
9103 .filter(|gene| {
9104 gene.state == AssetState::Promoted
9105 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9106 == Some("reported_experience")
9107 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9108 == Some("docs.rewrite")
9109 })
9110 .count();
9111 let reported_revoked = projection
9112 .genes
9113 .iter()
9114 .filter(|gene| {
9115 gene.state == AssetState::Revoked
9116 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9117 == Some("reported_experience")
9118 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9119 == Some("docs.rewrite")
9120 })
9121 .count();
9122 let builtin_promoted = projection
9123 .genes
9124 .iter()
9125 .filter(|gene| {
9126 gene.state == AssetState::Promoted
9127 && matches!(
9128 strategy_metadata_value(&gene.strategy, "asset_origin").as_deref(),
9129 Some("builtin") | Some("builtin_evomap")
9130 )
9131 })
9132 .count();
9133
9134 assert_eq!(reported_promoted, 3);
9135 assert_eq!(reported_revoked, 1);
9136 assert!(builtin_promoted >= 1);
9137
9138 let fetch = node
9139 .fetch_assets(
9140 "execution-api",
9141 &FetchQuery {
9142 sender_id: "consumer-b".into(),
9143 signals: vec!["docs.rewrite".into()],
9144 since_cursor: None,
9145 resume_token: None,
9146 },
9147 )
9148 .unwrap();
9149 let docs_genes = fetch
9150 .assets
9151 .into_iter()
9152 .filter_map(|asset| match asset {
9153 NetworkAsset::Gene { gene } => Some(gene),
9154 _ => None,
9155 })
9156 .filter(|gene| {
9157 strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9158 == Some("docs.rewrite")
9159 })
9160 .collect::<Vec<_>>();
9161 assert!(docs_genes.len() >= 3);
9162 }
9163}