1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::{Path, PathBuf};
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12 accept_self_evolution_selection_decision, infer_mutation_needed_failure_reason_code,
13 infer_replay_fallback_reason_code, normalize_mutation_needed_failure_contract,
14 normalize_replay_fallback_contract, reject_self_evolution_selection_decision, AgentRole,
15 BoundedTaskClass, CoordinationMessage, CoordinationPlan, CoordinationPrimitive,
16 CoordinationResult, CoordinationTask, ExecutionFeedback, MutationNeededFailureContract,
17 MutationNeededFailureReasonCode, MutationProposal as AgentMutationProposal, ReplayFeedback,
18 ReplayPlannerDirective, SelfEvolutionCandidateIntakeRequest, SelfEvolutionSelectionDecision,
19 SelfEvolutionSelectionReasonCode, SupervisedDevloopOutcome, SupervisedDevloopRequest,
20 SupervisedDevloopStatus,
21};
22use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
23use oris_evolution::{
24 compute_artifact_hash, decayed_replay_confidence, next_id, stable_hash_json, AssetState,
25 BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
26 EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
27 PreparedMutation, ReplayRoiEvidence, ReplayRoiReasonCode, Selector, SelectorInput,
28 StoreBackedSelector, StoredEvolutionEvent, TransitionEvidence, TransitionReasonCode,
29 ValidationSnapshot, MIN_REPLAY_CONFIDENCE,
30};
31use oris_evolution_network::{EvolutionEnvelope, NetworkAsset, SyncAudit};
32use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
33use oris_kernel::{Kernel, KernelState, RunId};
34use oris_sandbox::{
35 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
36};
37use oris_spec::CompiledMutationPlan;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use thiserror::Error;
41
42pub use oris_evolution::{
43 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
44 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
45 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
46 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
47 SelectorInput as EvoSelectorInput, TransitionReasonCode as EvoTransitionReasonCode,
48};
49pub use oris_evolution_network::{
50 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
51};
52pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
53pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
54pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
55
56#[derive(Clone, Debug, Serialize, Deserialize)]
57pub struct ValidationPlan {
58 pub profile: String,
59 pub stages: Vec<ValidationStage>,
60}
61
62impl ValidationPlan {
63 pub fn oris_default() -> Self {
64 Self {
65 profile: "oris-default".into(),
66 stages: vec![
67 ValidationStage::Command {
68 program: "cargo".into(),
69 args: vec!["fmt".into(), "--all".into(), "--check".into()],
70 timeout_ms: 60_000,
71 },
72 ValidationStage::Command {
73 program: "cargo".into(),
74 args: vec!["check".into(), "--workspace".into()],
75 timeout_ms: 180_000,
76 },
77 ValidationStage::Command {
78 program: "cargo".into(),
79 args: vec![
80 "test".into(),
81 "-p".into(),
82 "oris-kernel".into(),
83 "-p".into(),
84 "oris-evolution".into(),
85 "-p".into(),
86 "oris-sandbox".into(),
87 "-p".into(),
88 "oris-evokernel".into(),
89 "--lib".into(),
90 ],
91 timeout_ms: 300_000,
92 },
93 ValidationStage::Command {
94 program: "cargo".into(),
95 args: vec![
96 "test".into(),
97 "-p".into(),
98 "oris-runtime".into(),
99 "--lib".into(),
100 ],
101 timeout_ms: 300_000,
102 },
103 ],
104 }
105 }
106}
107
108#[derive(Clone, Debug, Serialize, Deserialize)]
109pub enum ValidationStage {
110 Command {
111 program: String,
112 args: Vec<String>,
113 timeout_ms: u64,
114 },
115}
116
117#[derive(Clone, Debug, Serialize, Deserialize)]
118pub struct ValidationStageReport {
119 pub stage: String,
120 pub success: bool,
121 pub exit_code: Option<i32>,
122 pub duration_ms: u64,
123 pub stdout: String,
124 pub stderr: String,
125}
126
127#[derive(Clone, Debug, Serialize, Deserialize)]
128pub struct ValidationReport {
129 pub success: bool,
130 pub duration_ms: u64,
131 pub stages: Vec<ValidationStageReport>,
132 pub logs: String,
133}
134
135#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
136pub struct SignalExtractionInput {
137 pub patch_diff: String,
138 pub intent: String,
139 pub expected_effect: String,
140 pub declared_signals: Vec<String>,
141 pub changed_files: Vec<String>,
142 pub validation_success: bool,
143 pub validation_logs: String,
144 pub stage_outputs: Vec<String>,
145}
146
147#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
148pub struct SignalExtractionOutput {
149 pub values: Vec<String>,
150 pub hash: String,
151}
152
153#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
154pub struct SeedTemplate {
155 pub id: String,
156 pub intent: String,
157 pub signals: Vec<String>,
158 pub diff_payload: String,
159 pub validation_profile: String,
160}
161
162#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
163pub struct BootstrapReport {
164 pub seeded: bool,
165 pub genes_added: usize,
166 pub capsules_added: usize,
167}
168
169const REPORTED_EXPERIENCE_RETENTION_LIMIT: usize = 3;
170const SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS: u64 = 2;
171const SHADOW_PROMOTION_MIN_SUCCESS_RATE: f32 = 0.70;
172const SHADOW_PROMOTION_MIN_ENV_MATCH: f32 = 0.75;
173const SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE: f32 = MIN_REPLAY_CONFIDENCE;
174const REPLAY_REASONING_TOKEN_FLOOR: u64 = 192;
175const REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT: u64 = 24;
176const COLD_START_LOOKUP_PENALTY: f32 = 0.05;
177const MUTATION_NEEDED_MAX_DIFF_BYTES: usize = 128 * 1024;
178const MUTATION_NEEDED_MAX_CHANGED_LINES: usize = 600;
179const MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS: u64 = 120_000;
180const MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS: u64 = 900_000;
181const SUPERVISED_DEVLOOP_MAX_DOC_FILES: usize = 3;
182pub const REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS: [&str; 2] =
183 ["task_class", "source_sender_id"];
184
185#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
186pub struct RepairQualityGateReport {
187 pub root_cause: bool,
188 pub fix: bool,
189 pub verification: bool,
190 pub rollback: bool,
191 pub incident_anchor: bool,
192 pub structure_score: usize,
193 pub has_actionable_command: bool,
194}
195
196impl RepairQualityGateReport {
197 pub fn passes(&self) -> bool {
198 self.incident_anchor
199 && self.structure_score >= 3
200 && (self.has_actionable_command || self.verification)
201 }
202
203 pub fn failed_checks(&self) -> Vec<String> {
204 let mut failed = Vec::new();
205 if !self.incident_anchor {
206 failed.push("包含unknown command故障上下文".to_string());
207 }
208 if self.structure_score < 3 {
209 failed.push("结构化修复信息至少满足3项(根因/修复/验证/回滚)".to_string());
210 }
211 if !(self.has_actionable_command || self.verification) {
212 failed.push("包含可执行验证命令或验证计划".to_string());
213 }
214 failed
215 }
216}
217
218pub fn evaluate_repair_quality_gate(plan: &str) -> RepairQualityGateReport {
219 fn contains_any(haystack: &str, needles: &[&str]) -> bool {
220 needles.iter().any(|needle| haystack.contains(needle))
221 }
222
223 let lower = plan.to_ascii_lowercase();
224 let root_cause = contains_any(
225 plan,
226 &["根因", "原因分析", "问题定位", "原因定位", "根本原因"],
227 ) || contains_any(
228 &lower,
229 &[
230 "root cause",
231 "cause analysis",
232 "problem diagnosis",
233 "diagnosis",
234 ],
235 );
236 let fix = contains_any(
237 plan,
238 &["修复步骤", "修复方案", "处理步骤", "修复建议", "整改方案"],
239 ) || contains_any(
240 &lower,
241 &[
242 "fix",
243 "remediation",
244 "mitigation",
245 "resolution",
246 "repair steps",
247 ],
248 );
249 let verification = contains_any(
250 plan,
251 &["验证命令", "验证步骤", "回归测试", "验证方式", "验收步骤"],
252 ) || contains_any(
253 &lower,
254 &[
255 "verification",
256 "validate",
257 "regression test",
258 "smoke test",
259 "test command",
260 ],
261 );
262 let rollback = contains_any(plan, &["回滚方案", "回滚步骤", "恢复方案", "撤销方案"])
263 || contains_any(&lower, &["rollback", "revert", "fallback plan", "undo"]);
264 let incident_anchor = contains_any(
265 &lower,
266 &[
267 "unknown command",
268 "process",
269 "proccess",
270 "command not found",
271 ],
272 ) || contains_any(plan, &["命令不存在", "命令未找到", "未知命令"]);
273 let structure_score = [root_cause, fix, verification, rollback]
274 .into_iter()
275 .filter(|ok| *ok)
276 .count();
277 let has_actionable_command = contains_any(
278 &lower,
279 &[
280 "cargo ", "git ", "python ", "pip ", "npm ", "pnpm ", "yarn ", "bash ", "make ",
281 ],
282 );
283
284 RepairQualityGateReport {
285 root_cause,
286 fix,
287 verification,
288 rollback,
289 incident_anchor,
290 structure_score,
291 has_actionable_command,
292 }
293}
294
295impl ValidationReport {
296 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
297 ValidationSnapshot {
298 success: self.success,
299 profile: profile.to_string(),
300 duration_ms: self.duration_ms,
301 summary: if self.success {
302 "validation passed".into()
303 } else {
304 "validation failed".into()
305 },
306 }
307 }
308}
309
310pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
311 let mut signals = BTreeSet::new();
312
313 for declared in &input.declared_signals {
314 if let Some(phrase) = normalize_signal_phrase(declared) {
315 signals.insert(phrase);
316 }
317 extend_signal_tokens(&mut signals, declared);
318 }
319
320 for text in [
321 input.patch_diff.as_str(),
322 input.intent.as_str(),
323 input.expected_effect.as_str(),
324 input.validation_logs.as_str(),
325 ] {
326 extend_signal_tokens(&mut signals, text);
327 }
328
329 for changed_file in &input.changed_files {
330 extend_signal_tokens(&mut signals, changed_file);
331 }
332
333 for stage_output in &input.stage_outputs {
334 extend_signal_tokens(&mut signals, stage_output);
335 }
336
337 signals.insert(if input.validation_success {
338 "validation passed".into()
339 } else {
340 "validation failed".into()
341 });
342
343 let values = signals.into_iter().take(32).collect::<Vec<_>>();
344 let hash =
345 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
346 SignalExtractionOutput { values, hash }
347}
348
349#[derive(Debug, Error)]
350pub enum ValidationError {
351 #[error("validation execution failed: {0}")]
352 Execution(String),
353}
354
355#[async_trait]
356pub trait Validator: Send + Sync {
357 async fn run(
358 &self,
359 receipt: &SandboxReceipt,
360 plan: &ValidationPlan,
361 ) -> Result<ValidationReport, ValidationError>;
362}
363
364pub struct CommandValidator {
365 policy: SandboxPolicy,
366}
367
368impl CommandValidator {
369 pub fn new(policy: SandboxPolicy) -> Self {
370 Self { policy }
371 }
372}
373
374#[async_trait]
375impl Validator for CommandValidator {
376 async fn run(
377 &self,
378 receipt: &SandboxReceipt,
379 plan: &ValidationPlan,
380 ) -> Result<ValidationReport, ValidationError> {
381 let started = std::time::Instant::now();
382 let mut stages = Vec::new();
383 let mut success = true;
384 let mut logs = String::new();
385
386 for stage in &plan.stages {
387 match stage {
388 ValidationStage::Command {
389 program,
390 args,
391 timeout_ms,
392 } => {
393 let result = execute_allowed_command(
394 &self.policy,
395 &receipt.workdir,
396 program,
397 args,
398 *timeout_ms,
399 )
400 .await;
401 let report = match result {
402 Ok(output) => ValidationStageReport {
403 stage: format!("{program} {}", args.join(" ")),
404 success: output.success,
405 exit_code: output.exit_code,
406 duration_ms: output.duration_ms,
407 stdout: output.stdout,
408 stderr: output.stderr,
409 },
410 Err(err) => ValidationStageReport {
411 stage: format!("{program} {}", args.join(" ")),
412 success: false,
413 exit_code: None,
414 duration_ms: 0,
415 stdout: String::new(),
416 stderr: err.to_string(),
417 },
418 };
419 if !report.success {
420 success = false;
421 }
422 if !report.stdout.is_empty() {
423 logs.push_str(&report.stdout);
424 logs.push('\n');
425 }
426 if !report.stderr.is_empty() {
427 logs.push_str(&report.stderr);
428 logs.push('\n');
429 }
430 stages.push(report);
431 if !success {
432 break;
433 }
434 }
435 }
436 }
437
438 Ok(ValidationReport {
439 success,
440 duration_ms: started.elapsed().as_millis() as u64,
441 stages,
442 logs,
443 })
444 }
445}
446
447#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
448pub struct ReplayDetectEvidence {
449 pub task_class_id: String,
450 pub task_label: String,
451 pub matched_signals: Vec<String>,
452 pub mismatch_reasons: Vec<String>,
453}
454
455#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
456pub struct ReplayCandidateEvidence {
457 pub rank: usize,
458 pub gene_id: String,
459 pub capsule_id: Option<String>,
460 pub match_quality: f32,
461 pub confidence: Option<f32>,
462 pub environment_match_factor: Option<f32>,
463 pub cold_start_penalty: f32,
464 pub final_score: f32,
465}
466
467#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
468pub struct ReplaySelectEvidence {
469 pub exact_match_lookup: bool,
470 pub selected_gene_id: Option<String>,
471 pub selected_capsule_id: Option<String>,
472 pub candidates: Vec<ReplayCandidateEvidence>,
473}
474
475#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
476pub struct ReplayDecision {
477 pub used_capsule: bool,
478 pub capsule_id: Option<CapsuleId>,
479 pub fallback_to_planner: bool,
480 pub reason: String,
481 pub detect_evidence: ReplayDetectEvidence,
482 pub select_evidence: ReplaySelectEvidence,
483 pub economics_evidence: ReplayRoiEvidence,
484}
485
486#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
487pub struct ReplayTaskClassMetrics {
488 pub task_class_id: String,
489 pub task_label: String,
490 pub replay_success_total: u64,
491 pub replay_failure_total: u64,
492 pub reasoning_steps_avoided_total: u64,
493 pub reasoning_avoided_tokens_total: u64,
494 pub replay_fallback_cost_total: u64,
495 pub replay_roi: f64,
496}
497
498#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
499pub struct ReplaySourceRoiMetrics {
500 pub source_sender_id: String,
501 pub replay_success_total: u64,
502 pub replay_failure_total: u64,
503 pub reasoning_avoided_tokens_total: u64,
504 pub replay_fallback_cost_total: u64,
505 pub replay_roi: f64,
506}
507
508#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
509pub struct ReplayRoiWindowSummary {
510 pub generated_at: String,
511 pub window_seconds: u64,
512 pub replay_attempts_total: u64,
513 pub replay_success_total: u64,
514 pub replay_failure_total: u64,
515 pub reasoning_avoided_tokens_total: u64,
516 pub replay_fallback_cost_total: u64,
517 pub replay_roi: f64,
518 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
519 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
520}
521
522#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
523pub struct ReplayRoiReleaseGateThresholds {
524 pub min_replay_attempts: u64,
525 pub min_replay_hit_rate: f64,
526 pub max_false_replay_rate: f64,
527 pub min_reasoning_avoided_tokens: u64,
528 pub min_replay_roi: f64,
529 pub require_replay_safety: bool,
530}
531
532impl Default for ReplayRoiReleaseGateThresholds {
533 fn default() -> Self {
534 Self {
535 min_replay_attempts: 3,
536 min_replay_hit_rate: 0.60,
537 max_false_replay_rate: 0.25,
538 min_reasoning_avoided_tokens: REPLAY_REASONING_TOKEN_FLOOR,
539 min_replay_roi: 0.05,
540 require_replay_safety: true,
541 }
542 }
543}
544
545#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
546#[serde(rename_all = "snake_case")]
547pub enum ReplayRoiReleaseGateAction {
548 BlockRelease,
549}
550
551#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
552pub struct ReplayRoiReleaseGateFailClosedPolicy {
553 pub on_threshold_violation: ReplayRoiReleaseGateAction,
554 pub on_missing_metrics: ReplayRoiReleaseGateAction,
555 pub on_invalid_metrics: ReplayRoiReleaseGateAction,
556}
557
558impl Default for ReplayRoiReleaseGateFailClosedPolicy {
559 fn default() -> Self {
560 Self {
561 on_threshold_violation: ReplayRoiReleaseGateAction::BlockRelease,
562 on_missing_metrics: ReplayRoiReleaseGateAction::BlockRelease,
563 on_invalid_metrics: ReplayRoiReleaseGateAction::BlockRelease,
564 }
565 }
566}
567
568#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
569pub struct ReplayRoiReleaseGateSafetySignal {
570 pub fail_closed_default: bool,
571 pub rollback_ready: bool,
572 pub audit_trail_complete: bool,
573 pub has_replay_activity: bool,
574}
575
576#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
577pub struct ReplayRoiReleaseGateInputContract {
578 pub generated_at: String,
579 pub window_seconds: u64,
580 pub aggregation_dimensions: Vec<String>,
581 pub replay_attempts_total: u64,
582 pub replay_success_total: u64,
583 pub replay_failure_total: u64,
584 pub replay_hit_rate: f64,
585 pub false_replay_rate: f64,
586 pub reasoning_avoided_tokens: u64,
587 pub replay_fallback_cost_total: u64,
588 pub replay_roi: f64,
589 pub replay_safety: bool,
590 pub replay_safety_signal: ReplayRoiReleaseGateSafetySignal,
591 pub thresholds: ReplayRoiReleaseGateThresholds,
592 pub fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy,
593}
594
595#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
596#[serde(rename_all = "snake_case")]
597pub enum ReplayRoiReleaseGateStatus {
598 Pass,
599 FailClosed,
600 Indeterminate,
601}
602
603#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
604pub struct ReplayRoiReleaseGateOutputContract {
605 pub status: ReplayRoiReleaseGateStatus,
606 pub failed_checks: Vec<String>,
607 pub evidence_refs: Vec<String>,
608 pub summary: String,
609}
610
611#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
612pub struct ReplayRoiReleaseGateContract {
613 pub input: ReplayRoiReleaseGateInputContract,
614 pub output: ReplayRoiReleaseGateOutputContract,
615}
616
617#[derive(Clone, Copy, Debug, Eq, PartialEq)]
618enum CoordinationTaskState {
619 Ready,
620 Waiting,
621 BlockedByFailure,
622 PermanentlyBlocked,
623}
624
625#[derive(Clone, Debug, Default)]
626pub struct MultiAgentCoordinator;
627
628impl MultiAgentCoordinator {
629 pub fn new() -> Self {
630 Self
631 }
632
633 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
634 let primitive = plan.primitive.clone();
635 let root_goal = plan.root_goal.clone();
636 let timeout_ms = plan.timeout_ms;
637 let max_retries = plan.max_retries;
638 let mut tasks = BTreeMap::new();
639 for task in plan.tasks {
640 tasks.entry(task.id.clone()).or_insert(task);
641 }
642
643 let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
644 let mut completed = BTreeSet::new();
645 let mut failed = BTreeSet::new();
646 let mut completed_order = Vec::new();
647 let mut failed_order = Vec::new();
648 let mut skipped = BTreeSet::new();
649 let mut attempts = BTreeMap::new();
650 let mut messages = Vec::new();
651
652 loop {
653 if matches!(primitive, CoordinationPrimitive::Conditional) {
654 self.apply_conditional_skips(
655 &tasks,
656 &mut pending,
657 &completed,
658 &failed,
659 &mut skipped,
660 &mut messages,
661 );
662 }
663
664 let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
665 if ready.is_empty() {
666 break;
667 }
668 if matches!(primitive, CoordinationPrimitive::Sequential) {
669 ready.truncate(1);
670 }
671
672 for task_id in ready {
673 let Some(task) = tasks.get(&task_id) else {
674 continue;
675 };
676 if !pending.contains(&task_id) {
677 continue;
678 }
679 self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
680
681 let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
682 if Self::simulate_task_failure(task, prior_failures) {
683 let failure_count = prior_failures + 1;
684 attempts.insert(task_id.clone(), failure_count);
685 let will_retry = failure_count <= max_retries;
686 messages.push(CoordinationMessage {
687 from_role: task.role.clone(),
688 to_role: task.role.clone(),
689 task_id: task_id.clone(),
690 content: if will_retry {
691 format!("task {task_id} failed on attempt {failure_count} and will retry")
692 } else {
693 format!(
694 "task {task_id} failed on attempt {failure_count} and exhausted retries"
695 )
696 },
697 });
698 if !will_retry {
699 pending.remove(&task_id);
700 if failed.insert(task_id.clone()) {
701 failed_order.push(task_id);
702 }
703 }
704 continue;
705 }
706
707 pending.remove(&task_id);
708 if completed.insert(task_id.clone()) {
709 completed_order.push(task_id);
710 }
711 }
712 }
713
714 let blocked_ids = pending.into_iter().collect::<Vec<_>>();
715 for task_id in blocked_ids {
716 let Some(task) = tasks.get(&task_id) else {
717 continue;
718 };
719 let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
720 let content = match state {
721 CoordinationTaskState::BlockedByFailure => {
722 format!("task {task_id} blocked by failed dependencies")
723 }
724 CoordinationTaskState::PermanentlyBlocked => {
725 format!("task {task_id} has invalid coordination prerequisites")
726 }
727 CoordinationTaskState::Waiting => {
728 format!("task {task_id} has unresolved dependencies")
729 }
730 CoordinationTaskState::Ready => {
731 format!("task {task_id} was left pending unexpectedly")
732 }
733 };
734 messages.push(CoordinationMessage {
735 from_role: task.role.clone(),
736 to_role: task.role.clone(),
737 task_id: task_id.clone(),
738 content,
739 });
740 if failed.insert(task_id.clone()) {
741 failed_order.push(task_id);
742 }
743 }
744
745 CoordinationResult {
746 completed_tasks: completed_order,
747 failed_tasks: failed_order,
748 messages,
749 summary: format!(
750 "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
751 root_goal,
752 completed.len(),
753 failed.len(),
754 skipped.len(),
755 primitive,
756 timeout_ms,
757 max_retries
758 ),
759 }
760 }
761
762 fn ready_task_ids(
763 &self,
764 tasks: &BTreeMap<String, CoordinationTask>,
765 pending: &BTreeSet<String>,
766 completed: &BTreeSet<String>,
767 failed: &BTreeSet<String>,
768 skipped: &BTreeSet<String>,
769 ) -> Vec<String> {
770 pending
771 .iter()
772 .filter_map(|task_id| {
773 let task = tasks.get(task_id)?;
774 (self.classify_task(task, tasks, completed, failed, skipped)
775 == CoordinationTaskState::Ready)
776 .then(|| task_id.clone())
777 })
778 .collect()
779 }
780
781 fn apply_conditional_skips(
782 &self,
783 tasks: &BTreeMap<String, CoordinationTask>,
784 pending: &mut BTreeSet<String>,
785 completed: &BTreeSet<String>,
786 failed: &BTreeSet<String>,
787 skipped: &mut BTreeSet<String>,
788 messages: &mut Vec<CoordinationMessage>,
789 ) {
790 let skip_ids = pending
791 .iter()
792 .filter_map(|task_id| {
793 let task = tasks.get(task_id)?;
794 (self.classify_task(task, tasks, completed, failed, skipped)
795 == CoordinationTaskState::BlockedByFailure)
796 .then(|| task_id.clone())
797 })
798 .collect::<Vec<_>>();
799
800 for task_id in skip_ids {
801 let Some(task) = tasks.get(&task_id) else {
802 continue;
803 };
804 pending.remove(&task_id);
805 skipped.insert(task_id.clone());
806 messages.push(CoordinationMessage {
807 from_role: task.role.clone(),
808 to_role: task.role.clone(),
809 task_id: task_id.clone(),
810 content: format!("task {task_id} skipped due to failed dependency chain"),
811 });
812 }
813 }
814
815 fn classify_task(
816 &self,
817 task: &CoordinationTask,
818 tasks: &BTreeMap<String, CoordinationTask>,
819 completed: &BTreeSet<String>,
820 failed: &BTreeSet<String>,
821 skipped: &BTreeSet<String>,
822 ) -> CoordinationTaskState {
823 match task.role {
824 AgentRole::Planner | AgentRole::Coder => {
825 let mut waiting = false;
826 for dependency_id in &task.depends_on {
827 if !tasks.contains_key(dependency_id) {
828 return CoordinationTaskState::PermanentlyBlocked;
829 }
830 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
831 return CoordinationTaskState::BlockedByFailure;
832 }
833 if !completed.contains(dependency_id) {
834 waiting = true;
835 }
836 }
837 if waiting {
838 CoordinationTaskState::Waiting
839 } else {
840 CoordinationTaskState::Ready
841 }
842 }
843 AgentRole::Repair => {
844 let mut waiting = false;
845 let mut has_coder_dependency = false;
846 let mut has_failed_coder = false;
847 for dependency_id in &task.depends_on {
848 let Some(dependency) = tasks.get(dependency_id) else {
849 return CoordinationTaskState::PermanentlyBlocked;
850 };
851 let is_coder = matches!(dependency.role, AgentRole::Coder);
852 if is_coder {
853 has_coder_dependency = true;
854 }
855 if skipped.contains(dependency_id) {
856 return CoordinationTaskState::BlockedByFailure;
857 }
858 if failed.contains(dependency_id) {
859 if is_coder {
860 has_failed_coder = true;
861 } else {
862 return CoordinationTaskState::BlockedByFailure;
863 }
864 continue;
865 }
866 if !completed.contains(dependency_id) {
867 waiting = true;
868 }
869 }
870 if !has_coder_dependency {
871 CoordinationTaskState::PermanentlyBlocked
872 } else if waiting {
873 CoordinationTaskState::Waiting
874 } else if has_failed_coder {
875 CoordinationTaskState::Ready
876 } else {
877 CoordinationTaskState::PermanentlyBlocked
878 }
879 }
880 AgentRole::Optimizer => {
881 let mut waiting = false;
882 let mut has_impl_dependency = false;
883 let mut has_completed_impl = false;
884 let mut has_failed_impl = false;
885 for dependency_id in &task.depends_on {
886 let Some(dependency) = tasks.get(dependency_id) else {
887 return CoordinationTaskState::PermanentlyBlocked;
888 };
889 let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
890 if is_impl {
891 has_impl_dependency = true;
892 }
893 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
894 if is_impl {
895 has_failed_impl = true;
896 continue;
897 }
898 return CoordinationTaskState::BlockedByFailure;
899 }
900 if completed.contains(dependency_id) {
901 if is_impl {
902 has_completed_impl = true;
903 }
904 continue;
905 }
906 waiting = true;
907 }
908 if !has_impl_dependency {
909 CoordinationTaskState::PermanentlyBlocked
910 } else if waiting {
911 CoordinationTaskState::Waiting
912 } else if has_completed_impl {
913 CoordinationTaskState::Ready
914 } else if has_failed_impl {
915 CoordinationTaskState::BlockedByFailure
916 } else {
917 CoordinationTaskState::PermanentlyBlocked
918 }
919 }
920 }
921 }
922
923 fn record_handoff_messages(
924 &self,
925 task: &CoordinationTask,
926 tasks: &BTreeMap<String, CoordinationTask>,
927 completed: &BTreeSet<String>,
928 failed: &BTreeSet<String>,
929 messages: &mut Vec<CoordinationMessage>,
930 ) {
931 let mut dependency_ids = task.depends_on.clone();
932 dependency_ids.sort();
933 dependency_ids.dedup();
934
935 for dependency_id in dependency_ids {
936 let Some(dependency) = tasks.get(&dependency_id) else {
937 continue;
938 };
939 if completed.contains(&dependency_id) {
940 messages.push(CoordinationMessage {
941 from_role: dependency.role.clone(),
942 to_role: task.role.clone(),
943 task_id: task.id.clone(),
944 content: format!("handoff from {dependency_id} to {}", task.id),
945 });
946 } else if failed.contains(&dependency_id) {
947 messages.push(CoordinationMessage {
948 from_role: dependency.role.clone(),
949 to_role: task.role.clone(),
950 task_id: task.id.clone(),
951 content: format!("failed dependency {dependency_id} routed to {}", task.id),
952 });
953 }
954 }
955 }
956
957 fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
958 let normalized = task.description.to_ascii_lowercase();
959 normalized.contains("force-fail")
960 || (normalized.contains("fail-once") && prior_failures == 0)
961 }
962}
963
964#[derive(Debug, Error)]
965pub enum ReplayError {
966 #[error("store error: {0}")]
967 Store(String),
968 #[error("sandbox error: {0}")]
969 Sandbox(String),
970 #[error("validation error: {0}")]
971 Validation(String),
972}
973
974#[async_trait]
975pub trait ReplayExecutor: Send + Sync {
976 async fn try_replay(
977 &self,
978 input: &SelectorInput,
979 policy: &SandboxPolicy,
980 validation: &ValidationPlan,
981 ) -> Result<ReplayDecision, ReplayError>;
982
983 async fn try_replay_for_run(
984 &self,
985 run_id: &RunId,
986 input: &SelectorInput,
987 policy: &SandboxPolicy,
988 validation: &ValidationPlan,
989 ) -> Result<ReplayDecision, ReplayError> {
990 let _ = run_id;
991 self.try_replay(input, policy, validation).await
992 }
993}
994
995pub struct StoreReplayExecutor {
996 pub sandbox: Arc<dyn Sandbox>,
997 pub validator: Arc<dyn Validator>,
998 pub store: Arc<dyn EvolutionStore>,
999 pub selector: Arc<dyn Selector>,
1000 pub governor: Arc<dyn Governor>,
1001 pub economics: Option<Arc<Mutex<EvuLedger>>>,
1002 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
1003 pub stake_policy: StakePolicy,
1004}
1005
1006struct ReplayCandidates {
1007 candidates: Vec<GeneCandidate>,
1008 exact_match: bool,
1009}
1010
1011#[async_trait]
1012impl ReplayExecutor for StoreReplayExecutor {
1013 async fn try_replay(
1014 &self,
1015 input: &SelectorInput,
1016 policy: &SandboxPolicy,
1017 validation: &ValidationPlan,
1018 ) -> Result<ReplayDecision, ReplayError> {
1019 self.try_replay_inner(None, input, policy, validation).await
1020 }
1021
1022 async fn try_replay_for_run(
1023 &self,
1024 run_id: &RunId,
1025 input: &SelectorInput,
1026 policy: &SandboxPolicy,
1027 validation: &ValidationPlan,
1028 ) -> Result<ReplayDecision, ReplayError> {
1029 self.try_replay_inner(Some(run_id), input, policy, validation)
1030 .await
1031 }
1032}
1033
1034impl StoreReplayExecutor {
1035 fn collect_replay_candidates(&self, input: &SelectorInput) -> ReplayCandidates {
1036 self.apply_confidence_revalidation();
1037 let mut selector_input = input.clone();
1038 if self.economics.is_some() && self.remote_publishers.is_some() {
1039 selector_input.limit = selector_input.limit.max(4);
1040 }
1041 let mut candidates = self.selector.select(&selector_input);
1042 self.rerank_with_reputation_bias(&mut candidates);
1043 let mut exact_match = false;
1044 if candidates.is_empty() {
1045 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
1046 self.rerank_with_reputation_bias(&mut exact_candidates);
1047 if !exact_candidates.is_empty() {
1048 candidates = exact_candidates;
1049 exact_match = true;
1050 }
1051 }
1052 if candidates.is_empty() {
1053 let mut remote_candidates =
1054 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
1055 self.rerank_with_reputation_bias(&mut remote_candidates);
1056 if !remote_candidates.is_empty() {
1057 candidates = remote_candidates;
1058 exact_match = true;
1059 }
1060 }
1061 candidates.truncate(input.limit.max(1));
1062 ReplayCandidates {
1063 candidates,
1064 exact_match,
1065 }
1066 }
1067
1068 fn build_select_evidence(
1069 &self,
1070 input: &SelectorInput,
1071 candidates: &[GeneCandidate],
1072 exact_match: bool,
1073 ) -> ReplaySelectEvidence {
1074 let cold_start_penalty = if exact_match {
1075 COLD_START_LOOKUP_PENALTY
1076 } else {
1077 0.0
1078 };
1079 let candidate_rows = candidates
1080 .iter()
1081 .enumerate()
1082 .map(|(idx, candidate)| {
1083 let top_capsule = candidate.capsules.first();
1084 let environment_match_factor = top_capsule
1085 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env));
1086 let final_score = candidate.score * (1.0 - cold_start_penalty);
1087 ReplayCandidateEvidence {
1088 rank: idx + 1,
1089 gene_id: candidate.gene.id.clone(),
1090 capsule_id: top_capsule.map(|capsule| capsule.id.clone()),
1091 match_quality: candidate.score,
1092 confidence: top_capsule.map(|capsule| capsule.confidence),
1093 environment_match_factor,
1094 cold_start_penalty,
1095 final_score,
1096 }
1097 })
1098 .collect::<Vec<_>>();
1099
1100 ReplaySelectEvidence {
1101 exact_match_lookup: exact_match,
1102 selected_gene_id: candidate_rows
1103 .first()
1104 .map(|candidate| candidate.gene_id.clone()),
1105 selected_capsule_id: candidate_rows
1106 .first()
1107 .and_then(|candidate| candidate.capsule_id.clone()),
1108 candidates: candidate_rows,
1109 }
1110 }
1111
1112 fn apply_confidence_revalidation(&self) {
1113 let Ok(projection) = projection_snapshot(self.store.as_ref()) else {
1114 return;
1115 };
1116 for target in stale_replay_revalidation_targets(&projection, Utc::now()) {
1117 let reason = format!(
1118 "confidence decayed to {:.3}; revalidation required before replay",
1119 target.decayed_confidence
1120 );
1121 let confidence_decay_ratio = if target.peak_confidence > 0.0 {
1122 (target.decayed_confidence / target.peak_confidence).clamp(0.0, 1.0)
1123 } else {
1124 0.0
1125 };
1126 if self
1127 .store
1128 .append_event(EvolutionEvent::PromotionEvaluated {
1129 gene_id: target.gene_id.clone(),
1130 state: AssetState::Quarantined,
1131 reason: reason.clone(),
1132 reason_code: TransitionReasonCode::RevalidationConfidenceDecay,
1133 evidence: Some(TransitionEvidence {
1134 replay_attempts: None,
1135 replay_successes: None,
1136 replay_success_rate: None,
1137 environment_match_factor: None,
1138 decayed_confidence: Some(target.decayed_confidence),
1139 confidence_decay_ratio: Some(confidence_decay_ratio),
1140 summary: Some(format!(
1141 "phase=confidence_revalidation; decayed_confidence={:.3}; confidence_decay_ratio={:.3}",
1142 target.decayed_confidence, confidence_decay_ratio
1143 )),
1144 }),
1145 })
1146 .is_err()
1147 {
1148 continue;
1149 }
1150 for capsule_id in target.capsule_ids {
1151 if self
1152 .store
1153 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
1154 .is_err()
1155 {
1156 break;
1157 }
1158 }
1159 }
1160 }
1161
1162 fn build_replay_economics_evidence(
1163 &self,
1164 input: &SelectorInput,
1165 candidate: Option<&GeneCandidate>,
1166 source_sender_id: Option<&str>,
1167 success: bool,
1168 reason_code: ReplayRoiReasonCode,
1169 reason: &str,
1170 ) -> ReplayRoiEvidence {
1171 let (task_class_id, task_label) =
1172 replay_descriptor_from_candidate_or_input(candidate, input);
1173 let signal_source = candidate
1174 .map(|best| best.gene.signals.as_slice())
1175 .unwrap_or(input.signals.as_slice());
1176 let baseline_tokens = estimated_reasoning_tokens(signal_source);
1177 let reasoning_avoided_tokens = if success { baseline_tokens } else { 0 };
1178 let replay_fallback_cost = if success { 0 } else { baseline_tokens };
1179 let asset_origin =
1180 candidate.and_then(|best| strategy_metadata_value(&best.gene.strategy, "asset_origin"));
1181 let mut context_dimensions = vec![
1182 format!(
1183 "outcome={}",
1184 if success {
1185 "replay_hit"
1186 } else {
1187 "planner_fallback"
1188 }
1189 ),
1190 format!("reason={reason}"),
1191 format!("task_class_id={task_class_id}"),
1192 format!("task_label={task_label}"),
1193 ];
1194 if let Some(asset_origin) = asset_origin.as_deref() {
1195 context_dimensions.push(format!("asset_origin={asset_origin}"));
1196 }
1197 if let Some(source_sender_id) = source_sender_id {
1198 context_dimensions.push(format!("source_sender_id={source_sender_id}"));
1199 }
1200 ReplayRoiEvidence {
1201 success,
1202 reason_code,
1203 task_class_id,
1204 task_label,
1205 reasoning_avoided_tokens,
1206 replay_fallback_cost,
1207 replay_roi: compute_replay_roi(reasoning_avoided_tokens, replay_fallback_cost),
1208 asset_origin,
1209 source_sender_id: source_sender_id.map(ToOwned::to_owned),
1210 context_dimensions,
1211 }
1212 }
1213
1214 fn record_replay_economics(
1215 &self,
1216 replay_run_id: Option<&RunId>,
1217 candidate: Option<&GeneCandidate>,
1218 capsule_id: Option<&str>,
1219 evidence: ReplayRoiEvidence,
1220 ) -> Result<(), ReplayError> {
1221 self.store
1222 .append_event(EvolutionEvent::ReplayEconomicsRecorded {
1223 gene_id: candidate.map(|best| best.gene.id.clone()),
1224 capsule_id: capsule_id.map(ToOwned::to_owned),
1225 replay_run_id: replay_run_id.cloned(),
1226 evidence,
1227 })
1228 .map_err(|err| ReplayError::Store(err.to_string()))?;
1229 Ok(())
1230 }
1231
1232 async fn try_replay_inner(
1233 &self,
1234 replay_run_id: Option<&RunId>,
1235 input: &SelectorInput,
1236 policy: &SandboxPolicy,
1237 validation: &ValidationPlan,
1238 ) -> Result<ReplayDecision, ReplayError> {
1239 let ReplayCandidates {
1240 candidates,
1241 exact_match,
1242 } = self.collect_replay_candidates(input);
1243 let mut detect_evidence = replay_detect_evidence_from_input(input);
1244 let select_evidence = self.build_select_evidence(input, &candidates, exact_match);
1245 let Some(best) = candidates.into_iter().next() else {
1246 detect_evidence
1247 .mismatch_reasons
1248 .push("no_candidate_after_select".to_string());
1249 let economics_evidence = self.build_replay_economics_evidence(
1250 input,
1251 None,
1252 None,
1253 false,
1254 ReplayRoiReasonCode::ReplayMissNoMatchingGene,
1255 "no matching gene",
1256 );
1257 self.record_replay_economics(replay_run_id, None, None, economics_evidence.clone())?;
1258 return Ok(ReplayDecision {
1259 used_capsule: false,
1260 capsule_id: None,
1261 fallback_to_planner: true,
1262 reason: "no matching gene".into(),
1263 detect_evidence,
1264 select_evidence,
1265 economics_evidence,
1266 });
1267 };
1268 let (detected_task_class_id, detected_task_label) =
1269 replay_descriptor_from_candidate_or_input(Some(&best), input);
1270 detect_evidence.task_class_id = detected_task_class_id;
1271 detect_evidence.task_label = detected_task_label;
1272 detect_evidence.matched_signals =
1273 matched_replay_signals(&input.signals, &best.gene.signals);
1274 if !exact_match && best.score < 0.82 {
1275 detect_evidence
1276 .mismatch_reasons
1277 .push("score_below_threshold".to_string());
1278 let reason = format!("best gene score {:.3} below replay threshold", best.score);
1279 let economics_evidence = self.build_replay_economics_evidence(
1280 input,
1281 Some(&best),
1282 None,
1283 false,
1284 ReplayRoiReasonCode::ReplayMissScoreBelowThreshold,
1285 &reason,
1286 );
1287 self.record_replay_economics(
1288 replay_run_id,
1289 Some(&best),
1290 None,
1291 economics_evidence.clone(),
1292 )?;
1293 return Ok(ReplayDecision {
1294 used_capsule: false,
1295 capsule_id: None,
1296 fallback_to_planner: true,
1297 reason,
1298 detect_evidence,
1299 select_evidence,
1300 economics_evidence,
1301 });
1302 }
1303
1304 let Some(capsule) = best.capsules.first().cloned() else {
1305 detect_evidence
1306 .mismatch_reasons
1307 .push("candidate_has_no_capsule".to_string());
1308 let economics_evidence = self.build_replay_economics_evidence(
1309 input,
1310 Some(&best),
1311 None,
1312 false,
1313 ReplayRoiReasonCode::ReplayMissCandidateHasNoCapsule,
1314 "candidate gene has no capsule",
1315 );
1316 self.record_replay_economics(
1317 replay_run_id,
1318 Some(&best),
1319 None,
1320 economics_evidence.clone(),
1321 )?;
1322 return Ok(ReplayDecision {
1323 used_capsule: false,
1324 capsule_id: None,
1325 fallback_to_planner: true,
1326 reason: "candidate gene has no capsule".into(),
1327 detect_evidence,
1328 select_evidence,
1329 economics_evidence,
1330 });
1331 };
1332 let remote_publisher = self.publisher_for_capsule(&capsule.id);
1333
1334 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
1335 .map_err(|err| ReplayError::Store(err.to_string()))?
1336 else {
1337 detect_evidence
1338 .mismatch_reasons
1339 .push("mutation_payload_missing".to_string());
1340 let economics_evidence = self.build_replay_economics_evidence(
1341 input,
1342 Some(&best),
1343 remote_publisher.as_deref(),
1344 false,
1345 ReplayRoiReasonCode::ReplayMissMutationPayloadMissing,
1346 "mutation payload missing from store",
1347 );
1348 self.record_replay_economics(
1349 replay_run_id,
1350 Some(&best),
1351 Some(&capsule.id),
1352 economics_evidence.clone(),
1353 )?;
1354 return Ok(ReplayDecision {
1355 used_capsule: false,
1356 capsule_id: None,
1357 fallback_to_planner: true,
1358 reason: "mutation payload missing from store".into(),
1359 detect_evidence,
1360 select_evidence,
1361 economics_evidence,
1362 });
1363 };
1364
1365 let receipt = match self.sandbox.apply(&mutation, policy).await {
1366 Ok(receipt) => receipt,
1367 Err(err) => {
1368 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1369 let reason = format!("replay patch apply failed: {err}");
1370 let economics_evidence = self.build_replay_economics_evidence(
1371 input,
1372 Some(&best),
1373 remote_publisher.as_deref(),
1374 false,
1375 ReplayRoiReasonCode::ReplayMissPatchApplyFailed,
1376 &reason,
1377 );
1378 self.record_replay_economics(
1379 replay_run_id,
1380 Some(&best),
1381 Some(&capsule.id),
1382 economics_evidence.clone(),
1383 )?;
1384 detect_evidence
1385 .mismatch_reasons
1386 .push("patch_apply_failed".to_string());
1387 return Ok(ReplayDecision {
1388 used_capsule: false,
1389 capsule_id: Some(capsule.id.clone()),
1390 fallback_to_planner: true,
1391 reason,
1392 detect_evidence,
1393 select_evidence,
1394 economics_evidence,
1395 });
1396 }
1397 };
1398
1399 let report = self
1400 .validator
1401 .run(&receipt, validation)
1402 .await
1403 .map_err(|err| ReplayError::Validation(err.to_string()))?;
1404 if !report.success {
1405 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
1406 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1407 let economics_evidence = self.build_replay_economics_evidence(
1408 input,
1409 Some(&best),
1410 remote_publisher.as_deref(),
1411 false,
1412 ReplayRoiReasonCode::ReplayMissValidationFailed,
1413 "replay validation failed",
1414 );
1415 self.record_replay_economics(
1416 replay_run_id,
1417 Some(&best),
1418 Some(&capsule.id),
1419 economics_evidence.clone(),
1420 )?;
1421 detect_evidence
1422 .mismatch_reasons
1423 .push("validation_failed".to_string());
1424 return Ok(ReplayDecision {
1425 used_capsule: false,
1426 capsule_id: Some(capsule.id.clone()),
1427 fallback_to_planner: true,
1428 reason: "replay validation failed".into(),
1429 detect_evidence,
1430 select_evidence,
1431 economics_evidence,
1432 });
1433 }
1434
1435 let requires_shadow_progression = remote_publisher.is_some()
1436 && matches!(
1437 capsule.state,
1438 AssetState::Quarantined | AssetState::ShadowValidated
1439 );
1440 if requires_shadow_progression {
1441 self.store
1442 .append_event(EvolutionEvent::ValidationPassed {
1443 mutation_id: capsule.mutation_id.clone(),
1444 report: report.to_snapshot(&validation.profile),
1445 gene_id: Some(best.gene.id.clone()),
1446 })
1447 .map_err(|err| ReplayError::Store(err.to_string()))?;
1448 let evidence = self.shadow_transition_evidence(&best.gene.id, &capsule, &input.env)?;
1449 let (target_state, reason_code, reason, promote_now, phase) =
1450 if matches!(best.gene.state, AssetState::Quarantined) {
1451 (
1452 AssetState::ShadowValidated,
1453 TransitionReasonCode::PromotionShadowValidationPassed,
1454 "remote asset passed first local replay and entered shadow validation"
1455 .into(),
1456 false,
1457 "quarantine_to_shadow",
1458 )
1459 } else if shadow_promotion_gate_passed(&evidence) {
1460 (
1461 AssetState::Promoted,
1462 TransitionReasonCode::PromotionRemoteReplayValidated,
1463 "shadow validation thresholds satisfied; remote asset promoted".into(),
1464 true,
1465 "shadow_to_promoted",
1466 )
1467 } else {
1468 (
1469 AssetState::ShadowValidated,
1470 TransitionReasonCode::ShadowCollectingReplayEvidence,
1471 "shadow validation collecting additional replay evidence".into(),
1472 false,
1473 "shadow_hold",
1474 )
1475 };
1476 self.store
1477 .append_event(EvolutionEvent::PromotionEvaluated {
1478 gene_id: best.gene.id.clone(),
1479 state: target_state.clone(),
1480 reason,
1481 reason_code,
1482 evidence: Some(evidence.to_transition_evidence(shadow_evidence_summary(
1483 &evidence,
1484 promote_now,
1485 phase,
1486 ))),
1487 })
1488 .map_err(|err| ReplayError::Store(err.to_string()))?;
1489 if promote_now {
1490 self.store
1491 .append_event(EvolutionEvent::GenePromoted {
1492 gene_id: best.gene.id.clone(),
1493 })
1494 .map_err(|err| ReplayError::Store(err.to_string()))?;
1495 }
1496 self.store
1497 .append_event(EvolutionEvent::CapsuleReleased {
1498 capsule_id: capsule.id.clone(),
1499 state: target_state,
1500 })
1501 .map_err(|err| ReplayError::Store(err.to_string()))?;
1502 }
1503
1504 self.store
1505 .append_event(EvolutionEvent::CapsuleReused {
1506 capsule_id: capsule.id.clone(),
1507 gene_id: capsule.gene_id.clone(),
1508 run_id: capsule.run_id.clone(),
1509 replay_run_id: replay_run_id.cloned(),
1510 })
1511 .map_err(|err| ReplayError::Store(err.to_string()))?;
1512 self.record_reuse_settlement(remote_publisher.as_deref(), true);
1513 let reason = if exact_match {
1514 "replayed via cold-start lookup".to_string()
1515 } else {
1516 "replayed via selector".to_string()
1517 };
1518 let economics_evidence = self.build_replay_economics_evidence(
1519 input,
1520 Some(&best),
1521 remote_publisher.as_deref(),
1522 true,
1523 ReplayRoiReasonCode::ReplayHit,
1524 &reason,
1525 );
1526 self.record_replay_economics(
1527 replay_run_id,
1528 Some(&best),
1529 Some(&capsule.id),
1530 economics_evidence.clone(),
1531 )?;
1532
1533 Ok(ReplayDecision {
1534 used_capsule: true,
1535 capsule_id: Some(capsule.id),
1536 fallback_to_planner: false,
1537 reason,
1538 detect_evidence,
1539 select_evidence,
1540 economics_evidence,
1541 })
1542 }
1543
1544 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
1545 let Some(ledger) = self.economics.as_ref() else {
1546 return;
1547 };
1548 let reputation_bias = ledger
1549 .lock()
1550 .ok()
1551 .map(|locked| locked.selector_reputation_bias())
1552 .unwrap_or_default();
1553 if reputation_bias.is_empty() {
1554 return;
1555 }
1556 let required_assets = candidates
1557 .iter()
1558 .filter_map(|candidate| {
1559 candidate
1560 .capsules
1561 .first()
1562 .map(|capsule| capsule.id.as_str())
1563 })
1564 .collect::<Vec<_>>();
1565 let publisher_map = self.remote_publishers_snapshot(&required_assets);
1566 if publisher_map.is_empty() {
1567 return;
1568 }
1569 candidates.sort_by(|left, right| {
1570 effective_candidate_score(right, &publisher_map, &reputation_bias)
1571 .partial_cmp(&effective_candidate_score(
1572 left,
1573 &publisher_map,
1574 &reputation_bias,
1575 ))
1576 .unwrap_or(std::cmp::Ordering::Equal)
1577 .then_with(|| left.gene.id.cmp(&right.gene.id))
1578 });
1579 }
1580
1581 fn publisher_for_capsule(&self, capsule_id: &str) -> Option<String> {
1582 self.remote_publishers_snapshot(&[capsule_id])
1583 .get(capsule_id)
1584 .cloned()
1585 }
1586
1587 fn remote_publishers_snapshot(&self, required_assets: &[&str]) -> BTreeMap<String, String> {
1588 let cached = self
1589 .remote_publishers
1590 .as_ref()
1591 .and_then(|remote_publishers| {
1592 remote_publishers.lock().ok().map(|locked| locked.clone())
1593 })
1594 .unwrap_or_default();
1595 if !cached.is_empty()
1596 && required_assets
1597 .iter()
1598 .all(|asset_id| cached.contains_key(*asset_id))
1599 {
1600 return cached;
1601 }
1602
1603 let persisted = remote_publishers_by_asset_from_store(self.store.as_ref());
1604 if persisted.is_empty() {
1605 return cached;
1606 }
1607
1608 let mut merged = cached;
1609 for (asset_id, sender_id) in persisted {
1610 merged.entry(asset_id).or_insert(sender_id);
1611 }
1612
1613 if let Some(remote_publishers) = self.remote_publishers.as_ref() {
1614 if let Ok(mut locked) = remote_publishers.lock() {
1615 for (asset_id, sender_id) in &merged {
1616 locked.entry(asset_id.clone()).or_insert(sender_id.clone());
1617 }
1618 }
1619 }
1620
1621 merged
1622 }
1623
1624 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
1625 let Some(publisher_id) = publisher_id else {
1626 return;
1627 };
1628 let Some(ledger) = self.economics.as_ref() else {
1629 return;
1630 };
1631 if let Ok(mut locked) = ledger.lock() {
1632 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
1633 }
1634 }
1635
1636 fn record_replay_validation_failure(
1637 &self,
1638 best: &GeneCandidate,
1639 capsule: &Capsule,
1640 validation: &ValidationPlan,
1641 report: &ValidationReport,
1642 ) -> Result<(), ReplayError> {
1643 let projection = projection_snapshot(self.store.as_ref())
1644 .map_err(|err| ReplayError::Store(err.to_string()))?;
1645 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1646 Self::confidence_context(&projection, &best.gene.id);
1647
1648 self.store
1649 .append_event(EvolutionEvent::ValidationFailed {
1650 mutation_id: capsule.mutation_id.clone(),
1651 report: report.to_snapshot(&validation.profile),
1652 gene_id: Some(best.gene.id.clone()),
1653 })
1654 .map_err(|err| ReplayError::Store(err.to_string()))?;
1655
1656 let replay_failures = self.replay_failure_count(&best.gene.id)?;
1657 let source_sender_id = self.publisher_for_capsule(&capsule.id);
1658 let governor_decision = self.governor.evaluate(GovernorInput {
1659 candidate_source: if source_sender_id.is_some() {
1660 CandidateSource::Remote
1661 } else {
1662 CandidateSource::Local
1663 },
1664 success_count: 0,
1665 blast_radius: BlastRadius {
1666 files_changed: capsule.outcome.changed_files.len(),
1667 lines_changed: capsule.outcome.lines_changed,
1668 },
1669 replay_failures,
1670 recent_mutation_ages_secs: Vec::new(),
1671 current_confidence,
1672 historical_peak_confidence,
1673 confidence_last_updated_secs,
1674 });
1675
1676 if matches!(governor_decision.target_state, AssetState::Revoked) {
1677 self.store
1678 .append_event(EvolutionEvent::PromotionEvaluated {
1679 gene_id: best.gene.id.clone(),
1680 state: AssetState::Revoked,
1681 reason: governor_decision.reason.clone(),
1682 reason_code: governor_decision.reason_code.clone(),
1683 evidence: Some(TransitionEvidence {
1684 replay_attempts: Some(replay_failures),
1685 replay_successes: None,
1686 replay_success_rate: None,
1687 environment_match_factor: None,
1688 decayed_confidence: Some(current_confidence),
1689 confidence_decay_ratio: if historical_peak_confidence > 0.0 {
1690 Some((current_confidence / historical_peak_confidence).clamp(0.0, 1.0))
1691 } else {
1692 None
1693 },
1694 summary: Some(replay_failure_revocation_summary(
1695 replay_failures,
1696 current_confidence,
1697 historical_peak_confidence,
1698 source_sender_id.as_deref(),
1699 )),
1700 }),
1701 })
1702 .map_err(|err| ReplayError::Store(err.to_string()))?;
1703 self.store
1704 .append_event(EvolutionEvent::GeneRevoked {
1705 gene_id: best.gene.id.clone(),
1706 reason: governor_decision.reason,
1707 })
1708 .map_err(|err| ReplayError::Store(err.to_string()))?;
1709 for related in &best.capsules {
1710 self.store
1711 .append_event(EvolutionEvent::CapsuleQuarantined {
1712 capsule_id: related.id.clone(),
1713 })
1714 .map_err(|err| ReplayError::Store(err.to_string()))?;
1715 }
1716 }
1717
1718 Ok(())
1719 }
1720
1721 fn confidence_context(
1722 projection: &EvolutionProjection,
1723 gene_id: &str,
1724 ) -> (f32, f32, Option<u64>) {
1725 let peak_confidence = projection
1726 .capsules
1727 .iter()
1728 .filter(|capsule| capsule.gene_id == gene_id)
1729 .map(|capsule| capsule.confidence)
1730 .fold(0.0_f32, f32::max);
1731 let age_secs = projection
1732 .last_updated_at
1733 .get(gene_id)
1734 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1735 (peak_confidence, peak_confidence, age_secs)
1736 }
1737
1738 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1739 let parsed = DateTime::parse_from_rfc3339(timestamp)
1740 .ok()?
1741 .with_timezone(&Utc);
1742 let elapsed = now.signed_duration_since(parsed);
1743 if elapsed < Duration::zero() {
1744 Some(0)
1745 } else {
1746 u64::try_from(elapsed.num_seconds()).ok()
1747 }
1748 }
1749
1750 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1751 Ok(self
1752 .store
1753 .scan(1)
1754 .map_err(|err| ReplayError::Store(err.to_string()))?
1755 .into_iter()
1756 .filter(|stored| {
1757 matches!(
1758 &stored.event,
1759 EvolutionEvent::ValidationFailed {
1760 gene_id: Some(current_gene_id),
1761 ..
1762 } if current_gene_id == gene_id
1763 )
1764 })
1765 .count() as u64)
1766 }
1767
1768 fn shadow_transition_evidence(
1769 &self,
1770 gene_id: &str,
1771 capsule: &Capsule,
1772 input_env: &EnvFingerprint,
1773 ) -> Result<ShadowTransitionEvidence, ReplayError> {
1774 let events = self
1775 .store
1776 .scan(1)
1777 .map_err(|err| ReplayError::Store(err.to_string()))?;
1778 let (replay_attempts, replay_successes) = events.iter().fold(
1779 (0_u64, 0_u64),
1780 |(attempts, successes), stored| match &stored.event {
1781 EvolutionEvent::ValidationPassed {
1782 gene_id: Some(current_gene_id),
1783 ..
1784 } if current_gene_id == gene_id => (attempts + 1, successes + 1),
1785 EvolutionEvent::ValidationFailed {
1786 gene_id: Some(current_gene_id),
1787 ..
1788 } if current_gene_id == gene_id => (attempts + 1, successes),
1789 _ => (attempts, successes),
1790 },
1791 );
1792 let replay_success_rate = safe_ratio(replay_successes, replay_attempts) as f32;
1793 let environment_match_factor = replay_environment_match_factor(input_env, &capsule.env);
1794 let projection = projection_snapshot(self.store.as_ref())
1795 .map_err(|err| ReplayError::Store(err.to_string()))?;
1796 let age_secs = projection
1797 .last_updated_at
1798 .get(gene_id)
1799 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1800 let decayed_confidence = decayed_replay_confidence(capsule.confidence, age_secs);
1801 let confidence_decay_ratio = if capsule.confidence > 0.0 {
1802 (decayed_confidence / capsule.confidence).clamp(0.0, 1.0)
1803 } else {
1804 0.0
1805 };
1806
1807 Ok(ShadowTransitionEvidence {
1808 replay_attempts,
1809 replay_successes,
1810 replay_success_rate,
1811 environment_match_factor,
1812 decayed_confidence,
1813 confidence_decay_ratio,
1814 })
1815 }
1816}
1817
1818#[derive(Clone, Debug)]
1819struct ShadowTransitionEvidence {
1820 replay_attempts: u64,
1821 replay_successes: u64,
1822 replay_success_rate: f32,
1823 environment_match_factor: f32,
1824 decayed_confidence: f32,
1825 confidence_decay_ratio: f32,
1826}
1827
1828impl ShadowTransitionEvidence {
1829 fn to_transition_evidence(&self, summary: String) -> TransitionEvidence {
1830 TransitionEvidence {
1831 replay_attempts: Some(self.replay_attempts),
1832 replay_successes: Some(self.replay_successes),
1833 replay_success_rate: Some(self.replay_success_rate),
1834 environment_match_factor: Some(self.environment_match_factor),
1835 decayed_confidence: Some(self.decayed_confidence),
1836 confidence_decay_ratio: Some(self.confidence_decay_ratio),
1837 summary: Some(summary),
1838 }
1839 }
1840}
1841
1842fn shadow_promotion_gate_passed(evidence: &ShadowTransitionEvidence) -> bool {
1843 evidence.replay_attempts >= SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS
1844 && evidence.replay_success_rate >= SHADOW_PROMOTION_MIN_SUCCESS_RATE
1845 && evidence.environment_match_factor >= SHADOW_PROMOTION_MIN_ENV_MATCH
1846 && evidence.decayed_confidence >= SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE
1847}
1848
1849fn shadow_evidence_summary(
1850 evidence: &ShadowTransitionEvidence,
1851 promoted: bool,
1852 phase: &str,
1853) -> String {
1854 format!(
1855 "phase={phase}; replay_attempts={}; replay_successes={}; replay_success_rate={:.3}; environment_match_factor={:.3}; decayed_confidence={:.3}; confidence_decay_ratio={:.3}; promote={promoted}",
1856 evidence.replay_attempts,
1857 evidence.replay_successes,
1858 evidence.replay_success_rate,
1859 evidence.environment_match_factor,
1860 evidence.decayed_confidence,
1861 evidence.confidence_decay_ratio,
1862 )
1863}
1864
1865#[derive(Clone, Debug, PartialEq)]
1866struct ConfidenceRevalidationTarget {
1867 gene_id: String,
1868 capsule_ids: Vec<String>,
1869 peak_confidence: f32,
1870 decayed_confidence: f32,
1871}
1872
1873fn stale_replay_revalidation_targets(
1874 projection: &EvolutionProjection,
1875 now: DateTime<Utc>,
1876) -> Vec<ConfidenceRevalidationTarget> {
1877 projection
1878 .genes
1879 .iter()
1880 .filter(|gene| gene.state == AssetState::Promoted)
1881 .filter_map(|gene| {
1882 let promoted_capsules = projection
1883 .capsules
1884 .iter()
1885 .filter(|capsule| {
1886 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1887 })
1888 .collect::<Vec<_>>();
1889 if promoted_capsules.is_empty() {
1890 return None;
1891 }
1892 let age_secs = projection
1893 .last_updated_at
1894 .get(&gene.id)
1895 .and_then(|timestamp| seconds_since_timestamp_for_confidence(timestamp, now));
1896 let decayed_confidence = promoted_capsules
1897 .iter()
1898 .map(|capsule| decayed_replay_confidence(capsule.confidence, age_secs))
1899 .fold(0.0_f32, f32::max);
1900 if decayed_confidence >= MIN_REPLAY_CONFIDENCE {
1901 return None;
1902 }
1903 let peak_confidence = promoted_capsules
1904 .iter()
1905 .map(|capsule| capsule.confidence)
1906 .fold(0.0_f32, f32::max);
1907 Some(ConfidenceRevalidationTarget {
1908 gene_id: gene.id.clone(),
1909 capsule_ids: promoted_capsules
1910 .into_iter()
1911 .map(|capsule| capsule.id.clone())
1912 .collect(),
1913 peak_confidence,
1914 decayed_confidence,
1915 })
1916 })
1917 .collect()
1918}
1919
1920fn seconds_since_timestamp_for_confidence(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1921 let parsed = DateTime::parse_from_rfc3339(timestamp)
1922 .ok()?
1923 .with_timezone(&Utc);
1924 let elapsed = now.signed_duration_since(parsed);
1925 if elapsed < Duration::zero() {
1926 Some(0)
1927 } else {
1928 u64::try_from(elapsed.num_seconds()).ok()
1929 }
1930}
1931
1932#[derive(Debug, Error)]
1933pub enum EvoKernelError {
1934 #[error("sandbox error: {0}")]
1935 Sandbox(String),
1936 #[error("validation error: {0}")]
1937 Validation(String),
1938 #[error("validation failed")]
1939 ValidationFailed(ValidationReport),
1940 #[error("store error: {0}")]
1941 Store(String),
1942}
1943
1944#[derive(Clone, Debug)]
1945pub struct CaptureOutcome {
1946 pub capsule: Capsule,
1947 pub gene: Gene,
1948 pub governor_decision: GovernorDecision,
1949}
1950
1951#[derive(Clone, Debug, Serialize, Deserialize)]
1952pub struct ImportOutcome {
1953 pub imported_asset_ids: Vec<String>,
1954 pub accepted: bool,
1955 #[serde(default, skip_serializing_if = "Option::is_none")]
1956 pub next_cursor: Option<String>,
1957 #[serde(default, skip_serializing_if = "Option::is_none")]
1958 pub resume_token: Option<String>,
1959 #[serde(default)]
1960 pub sync_audit: SyncAudit,
1961}
1962
1963#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1964pub struct EvolutionMetricsSnapshot {
1965 pub replay_attempts_total: u64,
1966 pub replay_success_total: u64,
1967 pub replay_success_rate: f64,
1968 pub confidence_revalidations_total: u64,
1969 pub replay_reasoning_avoided_total: u64,
1970 pub reasoning_avoided_tokens_total: u64,
1971 pub replay_fallback_cost_total: u64,
1972 pub replay_roi: f64,
1973 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
1974 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
1975 pub mutation_declared_total: u64,
1976 pub promoted_mutations_total: u64,
1977 pub promotion_ratio: f64,
1978 pub gene_revocations_total: u64,
1979 pub mutation_velocity_last_hour: u64,
1980 pub revoke_frequency_last_hour: u64,
1981 pub promoted_genes: u64,
1982 pub promoted_capsules: u64,
1983 pub last_event_seq: u64,
1984}
1985
1986#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1987pub struct EvolutionHealthSnapshot {
1988 pub status: String,
1989 pub last_event_seq: u64,
1990 pub promoted_genes: u64,
1991 pub promoted_capsules: u64,
1992}
1993
1994#[derive(Clone)]
1995pub struct EvolutionNetworkNode {
1996 pub store: Arc<dyn EvolutionStore>,
1997}
1998
1999impl EvolutionNetworkNode {
2000 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
2001 Self { store }
2002 }
2003
2004 pub fn with_default_store() -> Self {
2005 Self {
2006 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
2007 }
2008 }
2009
2010 pub fn accept_publish_request(
2011 &self,
2012 request: &PublishRequest,
2013 ) -> Result<ImportOutcome, EvoKernelError> {
2014 let requested_cursor = resolve_requested_cursor(
2015 &request.sender_id,
2016 request.since_cursor.as_deref(),
2017 request.resume_token.as_deref(),
2018 )?;
2019 import_remote_envelope_into_store(
2020 self.store.as_ref(),
2021 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
2022 None,
2023 requested_cursor,
2024 )
2025 }
2026
2027 pub fn ensure_builtin_experience_assets(
2028 &self,
2029 sender_id: impl Into<String>,
2030 ) -> Result<ImportOutcome, EvoKernelError> {
2031 ensure_builtin_experience_assets_in_store(self.store.as_ref(), sender_id.into())
2032 }
2033
2034 pub fn record_reported_experience(
2035 &self,
2036 sender_id: impl Into<String>,
2037 gene_id: impl Into<String>,
2038 signals: Vec<String>,
2039 strategy: Vec<String>,
2040 validation: Vec<String>,
2041 ) -> Result<ImportOutcome, EvoKernelError> {
2042 record_reported_experience_in_store(
2043 self.store.as_ref(),
2044 sender_id.into(),
2045 gene_id.into(),
2046 signals,
2047 strategy,
2048 validation,
2049 )
2050 }
2051
2052 pub fn publish_local_assets(
2053 &self,
2054 sender_id: impl Into<String>,
2055 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2056 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
2057 }
2058
2059 pub fn fetch_assets(
2060 &self,
2061 responder_id: impl Into<String>,
2062 query: &FetchQuery,
2063 ) -> Result<FetchResponse, EvoKernelError> {
2064 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2065 }
2066
2067 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2068 revoke_assets_in_store(self.store.as_ref(), notice)
2069 }
2070
2071 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2072 evolution_metrics_snapshot(self.store.as_ref())
2073 }
2074
2075 pub fn replay_roi_release_gate_summary(
2076 &self,
2077 window_seconds: u64,
2078 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2079 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2080 }
2081
2082 pub fn render_replay_roi_release_gate_summary_json(
2083 &self,
2084 window_seconds: u64,
2085 ) -> Result<String, EvoKernelError> {
2086 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2087 serde_json::to_string_pretty(&summary)
2088 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2089 }
2090
2091 pub fn replay_roi_release_gate_contract(
2092 &self,
2093 window_seconds: u64,
2094 thresholds: ReplayRoiReleaseGateThresholds,
2095 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2096 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2097 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2098 }
2099
2100 pub fn render_replay_roi_release_gate_contract_json(
2101 &self,
2102 window_seconds: u64,
2103 thresholds: ReplayRoiReleaseGateThresholds,
2104 ) -> Result<String, EvoKernelError> {
2105 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2106 serde_json::to_string_pretty(&contract)
2107 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2108 }
2109
2110 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2111 self.metrics_snapshot().map(|snapshot| {
2112 let health = evolution_health_snapshot(&snapshot);
2113 render_evolution_metrics_prometheus(&snapshot, &health)
2114 })
2115 }
2116
2117 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2118 self.metrics_snapshot()
2119 .map(|snapshot| evolution_health_snapshot(&snapshot))
2120 }
2121}
2122
2123pub struct EvoKernel<S: KernelState> {
2124 pub kernel: Arc<Kernel<S>>,
2125 pub sandbox: Arc<dyn Sandbox>,
2126 pub validator: Arc<dyn Validator>,
2127 pub store: Arc<dyn EvolutionStore>,
2128 pub selector: Arc<dyn Selector>,
2129 pub governor: Arc<dyn Governor>,
2130 pub economics: Arc<Mutex<EvuLedger>>,
2131 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
2132 pub stake_policy: StakePolicy,
2133 pub sandbox_policy: SandboxPolicy,
2134 pub validation_plan: ValidationPlan,
2135}
2136
2137impl<S: KernelState> EvoKernel<S> {
2138 fn recent_prior_mutation_ages_secs(
2139 &self,
2140 exclude_mutation_id: Option<&str>,
2141 ) -> Result<Vec<u64>, EvolutionError> {
2142 let now = Utc::now();
2143 let mut ages = self
2144 .store
2145 .scan(1)?
2146 .into_iter()
2147 .filter_map(|stored| match stored.event {
2148 EvolutionEvent::MutationDeclared { mutation }
2149 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
2150 {
2151 Self::seconds_since_timestamp(&stored.timestamp, now)
2152 }
2153 _ => None,
2154 })
2155 .collect::<Vec<_>>();
2156 ages.sort_unstable();
2157 Ok(ages)
2158 }
2159
2160 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
2161 let parsed = DateTime::parse_from_rfc3339(timestamp)
2162 .ok()?
2163 .with_timezone(&Utc);
2164 let elapsed = now.signed_duration_since(parsed);
2165 if elapsed < Duration::zero() {
2166 Some(0)
2167 } else {
2168 u64::try_from(elapsed.num_seconds()).ok()
2169 }
2170 }
2171
2172 pub fn new(
2173 kernel: Arc<Kernel<S>>,
2174 sandbox: Arc<dyn Sandbox>,
2175 validator: Arc<dyn Validator>,
2176 store: Arc<dyn EvolutionStore>,
2177 ) -> Self {
2178 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
2179 Self {
2180 kernel,
2181 sandbox,
2182 validator,
2183 store,
2184 selector,
2185 governor: Arc::new(DefaultGovernor::default()),
2186 economics: Arc::new(Mutex::new(EvuLedger::default())),
2187 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
2188 stake_policy: StakePolicy::default(),
2189 sandbox_policy: SandboxPolicy::oris_default(),
2190 validation_plan: ValidationPlan::oris_default(),
2191 }
2192 }
2193
2194 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
2195 self.selector = selector;
2196 self
2197 }
2198
2199 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
2200 self.sandbox_policy = policy;
2201 self
2202 }
2203
2204 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
2205 self.governor = governor;
2206 self
2207 }
2208
2209 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
2210 self.economics = economics;
2211 self
2212 }
2213
2214 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
2215 self.stake_policy = policy;
2216 self
2217 }
2218
2219 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
2220 self.validation_plan = plan;
2221 self
2222 }
2223
2224 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
2225 let executor = StoreReplayExecutor {
2226 sandbox: self.sandbox.clone(),
2227 validator: self.validator.clone(),
2228 store: self.store.clone(),
2229 selector: self.selector.clone(),
2230 governor: self.governor.clone(),
2231 economics: Some(self.economics.clone()),
2232 remote_publishers: Some(self.remote_publishers.clone()),
2233 stake_policy: self.stake_policy.clone(),
2234 };
2235 executor.collect_replay_candidates(input).candidates
2236 }
2237
2238 pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
2239 let projection = projection_snapshot(self.store.as_ref())?;
2240 if !projection.genes.is_empty() {
2241 return Ok(BootstrapReport::default());
2242 }
2243
2244 let templates = built_in_seed_templates();
2245 for template in &templates {
2246 let mutation = build_seed_mutation(template);
2247 let extracted = extract_seed_signals(template);
2248 let gene = build_bootstrap_gene(template, &extracted)
2249 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2250 let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
2251 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2252
2253 self.store
2254 .append_event(EvolutionEvent::MutationDeclared {
2255 mutation: mutation.clone(),
2256 })
2257 .map_err(store_err)?;
2258 self.store
2259 .append_event(EvolutionEvent::SignalsExtracted {
2260 mutation_id: mutation.intent.id.clone(),
2261 hash: extracted.hash.clone(),
2262 signals: extracted.values.clone(),
2263 })
2264 .map_err(store_err)?;
2265 self.store
2266 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2267 .map_err(store_err)?;
2268 self.store
2269 .append_event(EvolutionEvent::PromotionEvaluated {
2270 gene_id: gene.id.clone(),
2271 state: AssetState::Quarantined,
2272 reason: "bootstrap seeds require local validation before replay".into(),
2273 reason_code: TransitionReasonCode::DowngradeBootstrapRequiresLocalValidation,
2274 evidence: None,
2275 })
2276 .map_err(store_err)?;
2277 self.store
2278 .append_event(EvolutionEvent::CapsuleCommitted {
2279 capsule: capsule.clone(),
2280 })
2281 .map_err(store_err)?;
2282 self.store
2283 .append_event(EvolutionEvent::CapsuleQuarantined {
2284 capsule_id: capsule.id,
2285 })
2286 .map_err(store_err)?;
2287 }
2288
2289 Ok(BootstrapReport {
2290 seeded: true,
2291 genes_added: templates.len(),
2292 capsules_added: templates.len(),
2293 })
2294 }
2295
2296 pub async fn capture_successful_mutation(
2297 &self,
2298 run_id: &RunId,
2299 mutation: PreparedMutation,
2300 ) -> Result<Capsule, EvoKernelError> {
2301 Ok(self
2302 .capture_mutation_with_governor(run_id, mutation)
2303 .await?
2304 .capsule)
2305 }
2306
2307 pub async fn capture_mutation_with_governor(
2308 &self,
2309 run_id: &RunId,
2310 mutation: PreparedMutation,
2311 ) -> Result<CaptureOutcome, EvoKernelError> {
2312 self.store
2313 .append_event(EvolutionEvent::MutationDeclared {
2314 mutation: mutation.clone(),
2315 })
2316 .map_err(store_err)?;
2317
2318 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
2319 Ok(receipt) => receipt,
2320 Err(err) => {
2321 let message = err.to_string();
2322 let contract = mutation_needed_contract_for_error_message(&message);
2323 self.store
2324 .append_event(EvolutionEvent::MutationRejected {
2325 mutation_id: mutation.intent.id.clone(),
2326 reason: contract.failure_reason,
2327 reason_code: Some(
2328 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2329 ),
2330 recovery_hint: Some(contract.recovery_hint),
2331 fail_closed: contract.fail_closed,
2332 })
2333 .map_err(store_err)?;
2334 return Err(EvoKernelError::Sandbox(message));
2335 }
2336 };
2337
2338 self.store
2339 .append_event(EvolutionEvent::MutationApplied {
2340 mutation_id: mutation.intent.id.clone(),
2341 patch_hash: receipt.patch_hash.clone(),
2342 changed_files: receipt
2343 .changed_files
2344 .iter()
2345 .map(|path| path.to_string_lossy().to_string())
2346 .collect(),
2347 })
2348 .map_err(store_err)?;
2349
2350 let report = match self.validator.run(&receipt, &self.validation_plan).await {
2351 Ok(report) => report,
2352 Err(err) => {
2353 let message = format!("mutation-needed validation execution error: {err}");
2354 let contract = mutation_needed_contract_for_error_message(&message);
2355 self.store
2356 .append_event(EvolutionEvent::MutationRejected {
2357 mutation_id: mutation.intent.id.clone(),
2358 reason: contract.failure_reason,
2359 reason_code: Some(
2360 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2361 ),
2362 recovery_hint: Some(contract.recovery_hint),
2363 fail_closed: contract.fail_closed,
2364 })
2365 .map_err(store_err)?;
2366 return Err(EvoKernelError::Validation(message));
2367 }
2368 };
2369 if !report.success {
2370 self.store
2371 .append_event(EvolutionEvent::ValidationFailed {
2372 mutation_id: mutation.intent.id.clone(),
2373 report: report.to_snapshot(&self.validation_plan.profile),
2374 gene_id: None,
2375 })
2376 .map_err(store_err)?;
2377 let contract = mutation_needed_contract_for_validation_failure(
2378 &self.validation_plan.profile,
2379 &report,
2380 );
2381 self.store
2382 .append_event(EvolutionEvent::MutationRejected {
2383 mutation_id: mutation.intent.id.clone(),
2384 reason: contract.failure_reason,
2385 reason_code: Some(
2386 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2387 ),
2388 recovery_hint: Some(contract.recovery_hint),
2389 fail_closed: contract.fail_closed,
2390 })
2391 .map_err(store_err)?;
2392 return Err(EvoKernelError::ValidationFailed(report));
2393 }
2394
2395 self.store
2396 .append_event(EvolutionEvent::ValidationPassed {
2397 mutation_id: mutation.intent.id.clone(),
2398 report: report.to_snapshot(&self.validation_plan.profile),
2399 gene_id: None,
2400 })
2401 .map_err(store_err)?;
2402
2403 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
2404 patch_diff: mutation.artifact.payload.clone(),
2405 intent: mutation.intent.intent.clone(),
2406 expected_effect: mutation.intent.expected_effect.clone(),
2407 declared_signals: mutation.intent.signals.clone(),
2408 changed_files: receipt
2409 .changed_files
2410 .iter()
2411 .map(|path| path.to_string_lossy().to_string())
2412 .collect(),
2413 validation_success: report.success,
2414 validation_logs: report.logs.clone(),
2415 stage_outputs: report
2416 .stages
2417 .iter()
2418 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
2419 .filter(|value| !value.is_empty())
2420 .collect(),
2421 });
2422 self.store
2423 .append_event(EvolutionEvent::SignalsExtracted {
2424 mutation_id: mutation.intent.id.clone(),
2425 hash: extracted_signals.hash.clone(),
2426 signals: extracted_signals.values.clone(),
2427 })
2428 .map_err(store_err)?;
2429
2430 let projection = projection_snapshot(self.store.as_ref())?;
2431 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
2432 let recent_mutation_ages_secs = self
2433 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
2434 .map_err(store_err)?;
2435 let mut gene = derive_gene(
2436 &mutation,
2437 &receipt,
2438 &self.validation_plan.profile,
2439 &extracted_signals.values,
2440 );
2441 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
2442 StoreReplayExecutor::confidence_context(&projection, &gene.id);
2443 let success_count = projection
2444 .genes
2445 .iter()
2446 .find(|existing| existing.id == gene.id)
2447 .map(|existing| {
2448 projection
2449 .capsules
2450 .iter()
2451 .filter(|capsule| capsule.gene_id == existing.id)
2452 .count() as u64
2453 })
2454 .unwrap_or(0)
2455 + 1;
2456 let governor_decision = self.governor.evaluate(GovernorInput {
2457 candidate_source: CandidateSource::Local,
2458 success_count,
2459 blast_radius: blast_radius.clone(),
2460 replay_failures: 0,
2461 recent_mutation_ages_secs,
2462 current_confidence,
2463 historical_peak_confidence,
2464 confidence_last_updated_secs,
2465 });
2466
2467 gene.state = governor_decision.target_state.clone();
2468 self.store
2469 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2470 .map_err(store_err)?;
2471 self.store
2472 .append_event(EvolutionEvent::PromotionEvaluated {
2473 gene_id: gene.id.clone(),
2474 state: governor_decision.target_state.clone(),
2475 reason: governor_decision.reason.clone(),
2476 reason_code: governor_decision.reason_code.clone(),
2477 evidence: None,
2478 })
2479 .map_err(store_err)?;
2480 if matches!(governor_decision.target_state, AssetState::Promoted) {
2481 self.store
2482 .append_event(EvolutionEvent::GenePromoted {
2483 gene_id: gene.id.clone(),
2484 })
2485 .map_err(store_err)?;
2486 }
2487 if matches!(governor_decision.target_state, AssetState::Revoked) {
2488 self.store
2489 .append_event(EvolutionEvent::GeneRevoked {
2490 gene_id: gene.id.clone(),
2491 reason: governor_decision.reason.clone(),
2492 })
2493 .map_err(store_err)?;
2494 }
2495 if let Some(spec_id) = &mutation.intent.spec_id {
2496 self.store
2497 .append_event(EvolutionEvent::SpecLinked {
2498 mutation_id: mutation.intent.id.clone(),
2499 spec_id: spec_id.clone(),
2500 })
2501 .map_err(store_err)?;
2502 }
2503
2504 let mut capsule = build_capsule(
2505 run_id,
2506 &mutation,
2507 &receipt,
2508 &report,
2509 &self.validation_plan.profile,
2510 &gene,
2511 &blast_radius,
2512 )
2513 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2514 capsule.state = governor_decision.target_state.clone();
2515 self.store
2516 .append_event(EvolutionEvent::CapsuleCommitted {
2517 capsule: capsule.clone(),
2518 })
2519 .map_err(store_err)?;
2520 if matches!(governor_decision.target_state, AssetState::Quarantined) {
2521 self.store
2522 .append_event(EvolutionEvent::CapsuleQuarantined {
2523 capsule_id: capsule.id.clone(),
2524 })
2525 .map_err(store_err)?;
2526 }
2527
2528 Ok(CaptureOutcome {
2529 capsule,
2530 gene,
2531 governor_decision,
2532 })
2533 }
2534
2535 pub async fn capture_from_proposal(
2536 &self,
2537 run_id: &RunId,
2538 proposal: &AgentMutationProposal,
2539 diff_payload: String,
2540 base_revision: Option<String>,
2541 ) -> Result<CaptureOutcome, EvoKernelError> {
2542 let intent = MutationIntent {
2543 id: next_id("proposal"),
2544 intent: proposal.intent.clone(),
2545 target: MutationTarget::Paths {
2546 allow: proposal.files.clone(),
2547 },
2548 expected_effect: proposal.expected_effect.clone(),
2549 risk: RiskLevel::Low,
2550 signals: proposal.files.clone(),
2551 spec_id: None,
2552 };
2553 self.capture_mutation_with_governor(
2554 run_id,
2555 prepare_mutation(intent, diff_payload, base_revision),
2556 )
2557 .await
2558 }
2559
2560 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
2561 ExecutionFeedback {
2562 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
2563 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
2564 summary: outcome.governor_decision.reason.clone(),
2565 }
2566 }
2567
2568 pub fn replay_feedback_for_agent(
2569 signals: &[String],
2570 decision: &ReplayDecision,
2571 ) -> ReplayFeedback {
2572 let (task_class_id, task_label) = replay_task_descriptor(signals);
2573 let planner_directive = if decision.used_capsule {
2574 ReplayPlannerDirective::SkipPlanner
2575 } else {
2576 ReplayPlannerDirective::PlanFallback
2577 };
2578 let reasoning_steps_avoided = u64::from(decision.used_capsule);
2579 let reason_code_hint = decision
2580 .detect_evidence
2581 .mismatch_reasons
2582 .first()
2583 .and_then(|reason| infer_replay_fallback_reason_code(reason));
2584 let fallback_contract = normalize_replay_fallback_contract(
2585 &planner_directive,
2586 decision
2587 .fallback_to_planner
2588 .then_some(decision.reason.as_str()),
2589 reason_code_hint,
2590 None,
2591 None,
2592 None,
2593 );
2594 let summary = if decision.used_capsule {
2595 format!("reused prior capsule for task class '{task_label}'; skip planner")
2596 } else {
2597 format!(
2598 "planner fallback required for task class '{task_label}': {}",
2599 decision.reason
2600 )
2601 };
2602
2603 ReplayFeedback {
2604 used_capsule: decision.used_capsule,
2605 capsule_id: decision.capsule_id.clone(),
2606 planner_directive,
2607 reasoning_steps_avoided,
2608 fallback_reason: fallback_contract
2609 .as_ref()
2610 .map(|contract| contract.fallback_reason.clone()),
2611 reason_code: fallback_contract
2612 .as_ref()
2613 .map(|contract| contract.reason_code),
2614 repair_hint: fallback_contract
2615 .as_ref()
2616 .map(|contract| contract.repair_hint.clone()),
2617 next_action: fallback_contract
2618 .as_ref()
2619 .map(|contract| contract.next_action),
2620 confidence: fallback_contract
2621 .as_ref()
2622 .map(|contract| contract.confidence),
2623 task_class_id,
2624 task_label,
2625 summary,
2626 }
2627 }
2628
2629 fn mutation_needed_failure_outcome(
2630 &self,
2631 request: &SupervisedDevloopRequest,
2632 task_class: Option<BoundedTaskClass>,
2633 status: SupervisedDevloopStatus,
2634 contract: MutationNeededFailureContract,
2635 mutation_id_for_audit: Option<String>,
2636 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2637 if let Some(mutation_id) = mutation_id_for_audit {
2638 self.store
2639 .append_event(EvolutionEvent::MutationRejected {
2640 mutation_id,
2641 reason: contract.failure_reason.clone(),
2642 reason_code: Some(
2643 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2644 ),
2645 recovery_hint: Some(contract.recovery_hint.clone()),
2646 fail_closed: contract.fail_closed,
2647 })
2648 .map_err(store_err)?;
2649 }
2650 let status_label = match status {
2651 SupervisedDevloopStatus::AwaitingApproval => "awaiting_approval",
2652 SupervisedDevloopStatus::RejectedByPolicy => "rejected_by_policy",
2653 SupervisedDevloopStatus::FailedClosed => "failed_closed",
2654 SupervisedDevloopStatus::Executed => "executed",
2655 };
2656 let reason_code_key = mutation_needed_reason_code_key(contract.reason_code);
2657 Ok(SupervisedDevloopOutcome {
2658 task_id: request.task.id.clone(),
2659 task_class,
2660 status,
2661 execution_feedback: None,
2662 failure_contract: Some(contract.clone()),
2663 summary: format!(
2664 "supervised devloop {status_label} task '{}' [{reason_code_key}]: {}",
2665 request.task.id, contract.failure_reason
2666 ),
2667 })
2668 }
2669
2670 pub async fn run_supervised_devloop(
2671 &self,
2672 run_id: &RunId,
2673 request: &SupervisedDevloopRequest,
2674 diff_payload: String,
2675 base_revision: Option<String>,
2676 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2677 let audit_mutation_id = mutation_needed_audit_mutation_id(request);
2678 let task_class = classify_supervised_devloop_request(request);
2679 let Some(task_class) = task_class else {
2680 let contract = normalize_mutation_needed_failure_contract(
2681 Some(&format!(
2682 "supervised devloop rejected task '{}' because it is an unsupported task outside the bounded scope",
2683 request.task.id
2684 )),
2685 Some(MutationNeededFailureReasonCode::PolicyDenied),
2686 );
2687 return self.mutation_needed_failure_outcome(
2688 request,
2689 None,
2690 SupervisedDevloopStatus::RejectedByPolicy,
2691 contract,
2692 Some(audit_mutation_id),
2693 );
2694 };
2695
2696 if !request.approval.approved {
2697 return Ok(SupervisedDevloopOutcome {
2698 task_id: request.task.id.clone(),
2699 task_class: Some(task_class),
2700 status: SupervisedDevloopStatus::AwaitingApproval,
2701 execution_feedback: None,
2702 failure_contract: None,
2703 summary: format!(
2704 "supervised devloop paused task '{}' until explicit human approval is granted",
2705 request.task.id
2706 ),
2707 });
2708 }
2709
2710 if diff_payload.len() > MUTATION_NEEDED_MAX_DIFF_BYTES {
2711 let contract = normalize_mutation_needed_failure_contract(
2712 Some(&format!(
2713 "mutation-needed diff payload exceeds bounded byte budget (size={}, max={})",
2714 diff_payload.len(),
2715 MUTATION_NEEDED_MAX_DIFF_BYTES
2716 )),
2717 Some(MutationNeededFailureReasonCode::PolicyDenied),
2718 );
2719 return self.mutation_needed_failure_outcome(
2720 request,
2721 Some(task_class),
2722 SupervisedDevloopStatus::RejectedByPolicy,
2723 contract,
2724 Some(audit_mutation_id),
2725 );
2726 }
2727
2728 let blast_radius = compute_blast_radius(&diff_payload);
2729 if blast_radius.lines_changed > MUTATION_NEEDED_MAX_CHANGED_LINES {
2730 let contract = normalize_mutation_needed_failure_contract(
2731 Some(&format!(
2732 "mutation-needed patch exceeds bounded changed-line budget (lines_changed={}, max={})",
2733 blast_radius.lines_changed,
2734 MUTATION_NEEDED_MAX_CHANGED_LINES
2735 )),
2736 Some(MutationNeededFailureReasonCode::UnsafePatch),
2737 );
2738 return self.mutation_needed_failure_outcome(
2739 request,
2740 Some(task_class),
2741 SupervisedDevloopStatus::FailedClosed,
2742 contract,
2743 Some(audit_mutation_id),
2744 );
2745 }
2746
2747 if self.sandbox_policy.max_duration_ms > MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS {
2748 let contract = normalize_mutation_needed_failure_contract(
2749 Some(&format!(
2750 "mutation-needed sandbox duration budget exceeds bounded policy (configured={}ms, max={}ms)",
2751 self.sandbox_policy.max_duration_ms,
2752 MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS
2753 )),
2754 Some(MutationNeededFailureReasonCode::PolicyDenied),
2755 );
2756 return self.mutation_needed_failure_outcome(
2757 request,
2758 Some(task_class),
2759 SupervisedDevloopStatus::RejectedByPolicy,
2760 contract,
2761 Some(audit_mutation_id),
2762 );
2763 }
2764
2765 let validation_budget_ms = validation_plan_timeout_budget_ms(&self.validation_plan);
2766 if validation_budget_ms > MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS {
2767 let contract = normalize_mutation_needed_failure_contract(
2768 Some(&format!(
2769 "mutation-needed validation timeout budget exceeds bounded policy (configured={}ms, max={}ms)",
2770 validation_budget_ms,
2771 MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS
2772 )),
2773 Some(MutationNeededFailureReasonCode::PolicyDenied),
2774 );
2775 return self.mutation_needed_failure_outcome(
2776 request,
2777 Some(task_class),
2778 SupervisedDevloopStatus::RejectedByPolicy,
2779 contract,
2780 Some(audit_mutation_id),
2781 );
2782 }
2783
2784 let capture = match self
2785 .capture_from_proposal(run_id, &request.proposal, diff_payload, base_revision)
2786 .await
2787 {
2788 Ok(capture) => capture,
2789 Err(EvoKernelError::Sandbox(message)) => {
2790 let contract = mutation_needed_contract_for_error_message(&message);
2791 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2792 return self.mutation_needed_failure_outcome(
2793 request,
2794 Some(task_class),
2795 status,
2796 contract,
2797 None,
2798 );
2799 }
2800 Err(EvoKernelError::ValidationFailed(report)) => {
2801 let contract = mutation_needed_contract_for_validation_failure(
2802 &self.validation_plan.profile,
2803 &report,
2804 );
2805 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2806 return self.mutation_needed_failure_outcome(
2807 request,
2808 Some(task_class),
2809 status,
2810 contract,
2811 None,
2812 );
2813 }
2814 Err(EvoKernelError::Validation(message)) => {
2815 let contract = mutation_needed_contract_for_error_message(&message);
2816 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2817 return self.mutation_needed_failure_outcome(
2818 request,
2819 Some(task_class),
2820 status,
2821 contract,
2822 None,
2823 );
2824 }
2825 Err(err) => return Err(err),
2826 };
2827 let approver = request
2828 .approval
2829 .approver
2830 .as_deref()
2831 .unwrap_or("unknown approver");
2832
2833 Ok(SupervisedDevloopOutcome {
2834 task_id: request.task.id.clone(),
2835 task_class: Some(task_class),
2836 status: SupervisedDevloopStatus::Executed,
2837 execution_feedback: Some(Self::feedback_for_agent(&capture)),
2838 failure_contract: None,
2839 summary: format!(
2840 "supervised devloop executed task '{}' with explicit approval from {approver}",
2841 request.task.id
2842 ),
2843 })
2844 }
2845
2846 pub fn select_self_evolution_candidate(
2847 &self,
2848 request: &SelfEvolutionCandidateIntakeRequest,
2849 ) -> Result<SelfEvolutionSelectionDecision, EvoKernelError> {
2850 let normalized_state = request.state.trim().to_ascii_lowercase();
2851 if normalized_state != "open" {
2852 let reason_code = if normalized_state == "closed" {
2853 SelfEvolutionSelectionReasonCode::IssueClosed
2854 } else {
2855 SelfEvolutionSelectionReasonCode::UnknownFailClosed
2856 };
2857 return Ok(reject_self_evolution_selection_decision(
2858 request.issue_number,
2859 reason_code,
2860 None,
2861 None,
2862 ));
2863 }
2864
2865 let normalized_labels = normalized_selection_labels(&request.labels);
2866 if normalized_labels.contains("duplicate")
2867 || normalized_labels.contains("invalid")
2868 || normalized_labels.contains("wontfix")
2869 {
2870 return Ok(reject_self_evolution_selection_decision(
2871 request.issue_number,
2872 SelfEvolutionSelectionReasonCode::ExcludedByLabel,
2873 Some(&format!(
2874 "self-evolution candidate rejected because issue #{} carries an excluded label",
2875 request.issue_number
2876 )),
2877 None,
2878 ));
2879 }
2880
2881 if !normalized_labels.contains("area/evolution") {
2882 return Ok(reject_self_evolution_selection_decision(
2883 request.issue_number,
2884 SelfEvolutionSelectionReasonCode::MissingEvolutionLabel,
2885 None,
2886 None,
2887 ));
2888 }
2889
2890 if !normalized_labels.contains("type/feature") {
2891 return Ok(reject_self_evolution_selection_decision(
2892 request.issue_number,
2893 SelfEvolutionSelectionReasonCode::MissingFeatureLabel,
2894 None,
2895 None,
2896 ));
2897 }
2898
2899 let Some(task_class) = classify_self_evolution_candidate_request(request) else {
2900 return Ok(reject_self_evolution_selection_decision(
2901 request.issue_number,
2902 SelfEvolutionSelectionReasonCode::UnsupportedCandidateScope,
2903 Some(&format!(
2904 "self-evolution candidate rejected because issue #{} declares unsupported candidate scope",
2905 request.issue_number
2906 )),
2907 None,
2908 ));
2909 };
2910
2911 Ok(accept_self_evolution_selection_decision(
2912 request.issue_number,
2913 task_class,
2914 Some(&format!(
2915 "selected GitHub issue #{} for bounded self-evolution intake",
2916 request.issue_number
2917 )),
2918 ))
2919 }
2920 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
2921 MultiAgentCoordinator::new().coordinate(plan)
2922 }
2923
2924 pub fn export_promoted_assets(
2925 &self,
2926 sender_id: impl Into<String>,
2927 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2928 let sender_id = sender_id.into();
2929 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
2930 if !envelope.assets.is_empty() {
2931 let mut ledger = self
2932 .economics
2933 .lock()
2934 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
2935 if ledger
2936 .reserve_publish_stake(&sender_id, &self.stake_policy)
2937 .is_none()
2938 {
2939 return Err(EvoKernelError::Validation(
2940 "insufficient EVU for remote publish".into(),
2941 ));
2942 }
2943 }
2944 Ok(envelope)
2945 }
2946
2947 pub fn import_remote_envelope(
2948 &self,
2949 envelope: &EvolutionEnvelope,
2950 ) -> Result<ImportOutcome, EvoKernelError> {
2951 import_remote_envelope_into_store(
2952 self.store.as_ref(),
2953 envelope,
2954 Some(self.remote_publishers.as_ref()),
2955 None,
2956 )
2957 }
2958
2959 pub fn fetch_assets(
2960 &self,
2961 responder_id: impl Into<String>,
2962 query: &FetchQuery,
2963 ) -> Result<FetchResponse, EvoKernelError> {
2964 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2965 }
2966
2967 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2968 revoke_assets_in_store(self.store.as_ref(), notice)
2969 }
2970
2971 pub async fn replay_or_fallback(
2972 &self,
2973 input: SelectorInput,
2974 ) -> Result<ReplayDecision, EvoKernelError> {
2975 let replay_run_id = next_id("replay");
2976 self.replay_or_fallback_for_run(&replay_run_id, input).await
2977 }
2978
2979 pub async fn replay_or_fallback_for_run(
2980 &self,
2981 run_id: &RunId,
2982 input: SelectorInput,
2983 ) -> Result<ReplayDecision, EvoKernelError> {
2984 let executor = StoreReplayExecutor {
2985 sandbox: self.sandbox.clone(),
2986 validator: self.validator.clone(),
2987 store: self.store.clone(),
2988 selector: self.selector.clone(),
2989 governor: self.governor.clone(),
2990 economics: Some(self.economics.clone()),
2991 remote_publishers: Some(self.remote_publishers.clone()),
2992 stake_policy: self.stake_policy.clone(),
2993 };
2994 executor
2995 .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
2996 .await
2997 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2998 }
2999
3000 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
3001 self.economics.lock().ok()?.governor_signal(node_id)
3002 }
3003
3004 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
3005 self.economics
3006 .lock()
3007 .ok()
3008 .map(|locked| locked.selector_reputation_bias())
3009 .unwrap_or_default()
3010 }
3011
3012 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
3013 evolution_metrics_snapshot(self.store.as_ref())
3014 }
3015
3016 pub fn replay_roi_release_gate_summary(
3017 &self,
3018 window_seconds: u64,
3019 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
3020 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
3021 }
3022
3023 pub fn render_replay_roi_release_gate_summary_json(
3024 &self,
3025 window_seconds: u64,
3026 ) -> Result<String, EvoKernelError> {
3027 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
3028 serde_json::to_string_pretty(&summary)
3029 .map_err(|err| EvoKernelError::Validation(err.to_string()))
3030 }
3031
3032 pub fn replay_roi_release_gate_contract(
3033 &self,
3034 window_seconds: u64,
3035 thresholds: ReplayRoiReleaseGateThresholds,
3036 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
3037 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
3038 Ok(replay_roi_release_gate_contract(&summary, thresholds))
3039 }
3040
3041 pub fn render_replay_roi_release_gate_contract_json(
3042 &self,
3043 window_seconds: u64,
3044 thresholds: ReplayRoiReleaseGateThresholds,
3045 ) -> Result<String, EvoKernelError> {
3046 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
3047 serde_json::to_string_pretty(&contract)
3048 .map_err(|err| EvoKernelError::Validation(err.to_string()))
3049 }
3050
3051 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
3052 self.metrics_snapshot().map(|snapshot| {
3053 let health = evolution_health_snapshot(&snapshot);
3054 render_evolution_metrics_prometheus(&snapshot, &health)
3055 })
3056 }
3057
3058 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
3059 self.metrics_snapshot()
3060 .map(|snapshot| evolution_health_snapshot(&snapshot))
3061 }
3062}
3063
3064pub fn prepare_mutation(
3065 intent: MutationIntent,
3066 diff_payload: String,
3067 base_revision: Option<String>,
3068) -> PreparedMutation {
3069 PreparedMutation {
3070 intent,
3071 artifact: MutationArtifact {
3072 encoding: ArtifactEncoding::UnifiedDiff,
3073 content_hash: compute_artifact_hash(&diff_payload),
3074 payload: diff_payload,
3075 base_revision,
3076 },
3077 }
3078}
3079
3080pub fn prepare_mutation_from_spec(
3081 plan: CompiledMutationPlan,
3082 diff_payload: String,
3083 base_revision: Option<String>,
3084) -> PreparedMutation {
3085 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
3086}
3087
3088pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
3089 Arc::new(oris_evolution::JsonlEvolutionStore::new(
3090 default_store_root(),
3091 ))
3092}
3093
3094fn built_in_seed_templates() -> Vec<SeedTemplate> {
3095 vec![
3096 SeedTemplate {
3097 id: "bootstrap-readme".into(),
3098 intent: "Seed a baseline README recovery pattern".into(),
3099 signals: vec!["bootstrap readme".into(), "missing readme".into()],
3100 diff_payload: "\
3101diff --git a/README.md b/README.md
3102new file mode 100644
3103index 0000000..1111111
3104--- /dev/null
3105+++ b/README.md
3106@@ -0,0 +1,3 @@
3107+# Oris
3108+Bootstrap documentation seed
3109+"
3110 .into(),
3111 validation_profile: "bootstrap-seed".into(),
3112 },
3113 SeedTemplate {
3114 id: "bootstrap-test-fix".into(),
3115 intent: "Seed a deterministic test stabilization pattern".into(),
3116 signals: vec!["bootstrap test fix".into(), "failing tests".into()],
3117 diff_payload: "\
3118diff --git a/src/lib.rs b/src/lib.rs
3119index 1111111..2222222 100644
3120--- a/src/lib.rs
3121+++ b/src/lib.rs
3122@@ -1 +1,2 @@
3123 pub fn demo() -> usize { 1 }
3124+pub fn normalize_test_output() -> bool { true }
3125"
3126 .into(),
3127 validation_profile: "bootstrap-seed".into(),
3128 },
3129 SeedTemplate {
3130 id: "bootstrap-refactor".into(),
3131 intent: "Seed a low-risk refactor capsule".into(),
3132 signals: vec!["bootstrap refactor".into(), "small refactor".into()],
3133 diff_payload: "\
3134diff --git a/src/lib.rs b/src/lib.rs
3135index 2222222..3333333 100644
3136--- a/src/lib.rs
3137+++ b/src/lib.rs
3138@@ -1 +1,3 @@
3139 pub fn demo() -> usize { 1 }
3140+
3141+fn extract_strategy_key(input: &str) -> &str { input }
3142"
3143 .into(),
3144 validation_profile: "bootstrap-seed".into(),
3145 },
3146 SeedTemplate {
3147 id: "bootstrap-logging".into(),
3148 intent: "Seed a baseline structured logging mutation".into(),
3149 signals: vec!["bootstrap logging".into(), "structured logs".into()],
3150 diff_payload: "\
3151diff --git a/src/lib.rs b/src/lib.rs
3152index 3333333..4444444 100644
3153--- a/src/lib.rs
3154+++ b/src/lib.rs
3155@@ -1 +1,3 @@
3156 pub fn demo() -> usize { 1 }
3157+
3158+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
3159"
3160 .into(),
3161 validation_profile: "bootstrap-seed".into(),
3162 },
3163 ]
3164}
3165
3166fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
3167 let changed_files = seed_changed_files(&template.diff_payload);
3168 let target = if changed_files.is_empty() {
3169 MutationTarget::WorkspaceRoot
3170 } else {
3171 MutationTarget::Paths {
3172 allow: changed_files,
3173 }
3174 };
3175 prepare_mutation(
3176 MutationIntent {
3177 id: stable_hash_json(&("bootstrap-mutation", &template.id))
3178 .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
3179 intent: template.intent.clone(),
3180 target,
3181 expected_effect: format!("seed {}", template.id),
3182 risk: RiskLevel::Low,
3183 signals: template.signals.clone(),
3184 spec_id: None,
3185 },
3186 template.diff_payload.clone(),
3187 None,
3188 )
3189}
3190
3191fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
3192 let mut signals = BTreeSet::new();
3193 for declared in &template.signals {
3194 if let Some(phrase) = normalize_signal_phrase(declared) {
3195 signals.insert(phrase);
3196 }
3197 extend_signal_tokens(&mut signals, declared);
3198 }
3199 extend_signal_tokens(&mut signals, &template.intent);
3200 extend_signal_tokens(&mut signals, &template.diff_payload);
3201 for changed_file in seed_changed_files(&template.diff_payload) {
3202 extend_signal_tokens(&mut signals, &changed_file);
3203 }
3204 let values = signals.into_iter().take(32).collect::<Vec<_>>();
3205 let hash =
3206 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
3207 SignalExtractionOutput { values, hash }
3208}
3209
3210fn seed_changed_files(diff_payload: &str) -> Vec<String> {
3211 let mut changed_files = BTreeSet::new();
3212 for line in diff_payload.lines() {
3213 if let Some(path) = line.strip_prefix("+++ b/") {
3214 let normalized = path.trim();
3215 if !normalized.is_empty() {
3216 changed_files.insert(normalized.to_string());
3217 }
3218 }
3219 }
3220 changed_files.into_iter().collect()
3221}
3222
3223fn build_bootstrap_gene(
3224 template: &SeedTemplate,
3225 extracted: &SignalExtractionOutput,
3226) -> Result<Gene, EvolutionError> {
3227 let strategy = vec![template.id.clone(), "bootstrap".into()];
3228 let id = stable_hash_json(&(
3229 "bootstrap-gene",
3230 &template.id,
3231 &extracted.values,
3232 &template.validation_profile,
3233 ))?;
3234 Ok(Gene {
3235 id,
3236 signals: extracted.values.clone(),
3237 strategy,
3238 validation: vec![template.validation_profile.clone()],
3239 state: AssetState::Quarantined,
3240 })
3241}
3242
3243fn build_bootstrap_capsule(
3244 run_id: &RunId,
3245 template: &SeedTemplate,
3246 mutation: &PreparedMutation,
3247 gene: &Gene,
3248) -> Result<Capsule, EvolutionError> {
3249 let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
3250 let env = current_env_fingerprint(&cwd);
3251 let diff_hash = mutation.artifact.content_hash.clone();
3252 let changed_files = seed_changed_files(&template.diff_payload);
3253 let validator_hash = stable_hash_json(&(
3254 "bootstrap-validator",
3255 &template.id,
3256 &template.validation_profile,
3257 &diff_hash,
3258 ))?;
3259 let id = stable_hash_json(&(
3260 "bootstrap-capsule",
3261 &template.id,
3262 run_id,
3263 &gene.id,
3264 &diff_hash,
3265 &env,
3266 ))?;
3267 Ok(Capsule {
3268 id,
3269 gene_id: gene.id.clone(),
3270 mutation_id: mutation.intent.id.clone(),
3271 run_id: run_id.clone(),
3272 diff_hash,
3273 confidence: 0.0,
3274 env,
3275 outcome: Outcome {
3276 success: false,
3277 validation_profile: template.validation_profile.clone(),
3278 validation_duration_ms: 0,
3279 changed_files,
3280 validator_hash,
3281 lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
3282 replay_verified: false,
3283 },
3284 state: AssetState::Quarantined,
3285 })
3286}
3287
3288fn derive_gene(
3289 mutation: &PreparedMutation,
3290 receipt: &SandboxReceipt,
3291 validation_profile: &str,
3292 extracted_signals: &[String],
3293) -> Gene {
3294 let mut strategy = BTreeSet::new();
3295 for file in &receipt.changed_files {
3296 if let Some(component) = file.components().next() {
3297 strategy.insert(component.as_os_str().to_string_lossy().to_string());
3298 }
3299 }
3300 for token in mutation
3301 .artifact
3302 .payload
3303 .split(|ch: char| !ch.is_ascii_alphanumeric())
3304 {
3305 if token.len() == 5
3306 && token.starts_with('E')
3307 && token[1..].chars().all(|ch| ch.is_ascii_digit())
3308 {
3309 strategy.insert(token.to_string());
3310 }
3311 }
3312 for token in mutation.intent.intent.split_whitespace().take(8) {
3313 strategy.insert(token.to_ascii_lowercase());
3314 }
3315 let strategy = strategy.into_iter().collect::<Vec<_>>();
3316 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
3317 .unwrap_or_else(|_| next_id("gene"));
3318 Gene {
3319 id,
3320 signals: extracted_signals.to_vec(),
3321 strategy,
3322 validation: vec![validation_profile.to_string()],
3323 state: AssetState::Promoted,
3324 }
3325}
3326
3327fn build_capsule(
3328 run_id: &RunId,
3329 mutation: &PreparedMutation,
3330 receipt: &SandboxReceipt,
3331 report: &ValidationReport,
3332 validation_profile: &str,
3333 gene: &Gene,
3334 blast_radius: &BlastRadius,
3335) -> Result<Capsule, EvolutionError> {
3336 let env = current_env_fingerprint(&receipt.workdir);
3337 let validator_hash = stable_hash_json(report)?;
3338 let diff_hash = mutation.artifact.content_hash.clone();
3339 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
3340 Ok(Capsule {
3341 id,
3342 gene_id: gene.id.clone(),
3343 mutation_id: mutation.intent.id.clone(),
3344 run_id: run_id.clone(),
3345 diff_hash,
3346 confidence: 0.7,
3347 env,
3348 outcome: oris_evolution::Outcome {
3349 success: true,
3350 validation_profile: validation_profile.to_string(),
3351 validation_duration_ms: report.duration_ms,
3352 changed_files: receipt
3353 .changed_files
3354 .iter()
3355 .map(|path| path.to_string_lossy().to_string())
3356 .collect(),
3357 validator_hash,
3358 lines_changed: blast_radius.lines_changed,
3359 replay_verified: false,
3360 },
3361 state: AssetState::Promoted,
3362 })
3363}
3364
3365fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
3366 let rustc_version = Command::new("rustc")
3367 .arg("--version")
3368 .output()
3369 .ok()
3370 .filter(|output| output.status.success())
3371 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
3372 .unwrap_or_else(|| "rustc unknown".into());
3373 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
3374 .ok()
3375 .map(|bytes| {
3376 let value = String::from_utf8_lossy(&bytes);
3377 compute_artifact_hash(&value)
3378 })
3379 .unwrap_or_else(|| "missing-cargo-lock".into());
3380 let target_triple = format!(
3381 "{}-unknown-{}",
3382 std::env::consts::ARCH,
3383 std::env::consts::OS
3384 );
3385 EnvFingerprint {
3386 rustc_version,
3387 cargo_lock_hash,
3388 target_triple,
3389 os: std::env::consts::OS.to_string(),
3390 }
3391}
3392
3393fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
3394 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
3395 let trimmed = raw.trim();
3396 if trimmed.is_empty() {
3397 continue;
3398 }
3399 let normalized = if is_rust_error_code(trimmed) {
3400 let mut chars = trimmed.chars();
3401 let prefix = chars
3402 .next()
3403 .map(|ch| ch.to_ascii_uppercase())
3404 .unwrap_or('E');
3405 format!("{prefix}{}", chars.as_str())
3406 } else {
3407 trimmed.to_ascii_lowercase()
3408 };
3409 if normalized.len() < 3 {
3410 continue;
3411 }
3412 out.insert(normalized);
3413 }
3414}
3415
3416fn normalize_signal_phrase(input: &str) -> Option<String> {
3417 let normalized = input
3418 .split(|ch: char| !ch.is_ascii_alphanumeric())
3419 .filter_map(|raw| {
3420 let trimmed = raw.trim();
3421 if trimmed.is_empty() {
3422 return None;
3423 }
3424 let normalized = if is_rust_error_code(trimmed) {
3425 let mut chars = trimmed.chars();
3426 let prefix = chars
3427 .next()
3428 .map(|ch| ch.to_ascii_uppercase())
3429 .unwrap_or('E');
3430 format!("{prefix}{}", chars.as_str())
3431 } else {
3432 trimmed.to_ascii_lowercase()
3433 };
3434 if normalized.len() < 3 {
3435 None
3436 } else {
3437 Some(normalized)
3438 }
3439 })
3440 .collect::<Vec<_>>()
3441 .join(" ");
3442 if normalized.is_empty() {
3443 None
3444 } else {
3445 Some(normalized)
3446 }
3447}
3448
3449fn replay_task_descriptor(signals: &[String]) -> (String, String) {
3450 let normalized = signals
3451 .iter()
3452 .filter_map(|signal| normalize_signal_phrase(signal))
3453 .collect::<BTreeSet<_>>()
3454 .into_iter()
3455 .collect::<Vec<_>>();
3456 if normalized.is_empty() {
3457 return ("unknown".into(), "unknown".into());
3458 }
3459 let task_label = normalized
3460 .iter()
3461 .find(|value| {
3462 value.as_str() != "validation passed" && value.as_str() != "validation failed"
3463 })
3464 .cloned()
3465 .unwrap_or_else(|| normalized[0].clone());
3466 let task_class_id = stable_hash_json(&normalized)
3467 .unwrap_or_else(|_| compute_artifact_hash(&normalized.join("\n")));
3468 (task_class_id, task_label)
3469}
3470
3471fn normalized_signal_values(signals: &[String]) -> Vec<String> {
3472 signals
3473 .iter()
3474 .filter_map(|signal| normalize_signal_phrase(signal))
3475 .collect::<BTreeSet<_>>()
3476 .into_iter()
3477 .collect::<Vec<_>>()
3478}
3479
3480fn matched_replay_signals(input_signals: &[String], candidate_signals: &[String]) -> Vec<String> {
3481 let normalized_input = normalized_signal_values(input_signals);
3482 if normalized_input.is_empty() {
3483 return Vec::new();
3484 }
3485 let normalized_candidate = normalized_signal_values(candidate_signals);
3486 if normalized_candidate.is_empty() {
3487 return normalized_input;
3488 }
3489 let matched = normalized_input
3490 .iter()
3491 .filter(|signal| {
3492 normalized_candidate
3493 .iter()
3494 .any(|candidate| candidate.contains(signal.as_str()) || signal.contains(candidate))
3495 })
3496 .cloned()
3497 .collect::<Vec<_>>();
3498 if matched.is_empty() {
3499 normalized_input
3500 } else {
3501 matched
3502 }
3503}
3504
3505fn replay_detect_evidence_from_input(input: &SelectorInput) -> ReplayDetectEvidence {
3506 let (task_class_id, task_label) = replay_task_descriptor(&input.signals);
3507 ReplayDetectEvidence {
3508 task_class_id,
3509 task_label,
3510 matched_signals: normalized_signal_values(&input.signals),
3511 mismatch_reasons: Vec::new(),
3512 }
3513}
3514
3515fn replay_descriptor_from_candidate_or_input(
3516 candidate: Option<&GeneCandidate>,
3517 input: &SelectorInput,
3518) -> (String, String) {
3519 if let Some(candidate) = candidate {
3520 let task_class_id = strategy_metadata_value(&candidate.gene.strategy, "task_class");
3521 let task_label = strategy_metadata_value(&candidate.gene.strategy, "task_label");
3522 if let Some(task_class_id) = task_class_id {
3523 return (
3524 task_class_id.clone(),
3525 task_label.unwrap_or_else(|| task_class_id.clone()),
3526 );
3527 }
3528 return replay_task_descriptor(&candidate.gene.signals);
3529 }
3530 replay_task_descriptor(&input.signals)
3531}
3532
3533fn estimated_reasoning_tokens(signals: &[String]) -> u64 {
3534 let normalized = signals
3535 .iter()
3536 .filter_map(|signal| normalize_signal_phrase(signal))
3537 .collect::<BTreeSet<_>>();
3538 let signal_count = normalized.len() as u64;
3539 REPLAY_REASONING_TOKEN_FLOOR + REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT * signal_count.max(1)
3540}
3541
3542fn compute_replay_roi(reasoning_avoided_tokens: u64, replay_fallback_cost: u64) -> f64 {
3543 let total = reasoning_avoided_tokens + replay_fallback_cost;
3544 if total == 0 {
3545 return 0.0;
3546 }
3547 (reasoning_avoided_tokens as f64 - replay_fallback_cost as f64) / total as f64
3548}
3549
3550fn is_rust_error_code(value: &str) -> bool {
3551 value.len() == 5
3552 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
3553 && value[1..].chars().all(|ch| ch.is_ascii_digit())
3554}
3555
3556fn validation_plan_timeout_budget_ms(plan: &ValidationPlan) -> u64 {
3557 plan.stages.iter().fold(0_u64, |acc, stage| match stage {
3558 ValidationStage::Command { timeout_ms, .. } => acc.saturating_add(*timeout_ms),
3559 })
3560}
3561
3562fn mutation_needed_reason_code_key(reason_code: MutationNeededFailureReasonCode) -> &'static str {
3563 match reason_code {
3564 MutationNeededFailureReasonCode::PolicyDenied => "policy_denied",
3565 MutationNeededFailureReasonCode::ValidationFailed => "validation_failed",
3566 MutationNeededFailureReasonCode::UnsafePatch => "unsafe_patch",
3567 MutationNeededFailureReasonCode::Timeout => "timeout",
3568 MutationNeededFailureReasonCode::MutationPayloadMissing => "mutation_payload_missing",
3569 MutationNeededFailureReasonCode::UnknownFailClosed => "unknown_fail_closed",
3570 }
3571}
3572
3573fn mutation_needed_status_from_reason_code(
3574 reason_code: MutationNeededFailureReasonCode,
3575) -> SupervisedDevloopStatus {
3576 if matches!(reason_code, MutationNeededFailureReasonCode::PolicyDenied) {
3577 SupervisedDevloopStatus::RejectedByPolicy
3578 } else {
3579 SupervisedDevloopStatus::FailedClosed
3580 }
3581}
3582
3583fn mutation_needed_contract_for_validation_failure(
3584 profile: &str,
3585 report: &ValidationReport,
3586) -> MutationNeededFailureContract {
3587 let lower_logs = report.logs.to_ascii_lowercase();
3588 if lower_logs.contains("timed out") {
3589 normalize_mutation_needed_failure_contract(
3590 Some(&format!(
3591 "mutation-needed validation command timed out under profile '{profile}'"
3592 )),
3593 Some(MutationNeededFailureReasonCode::Timeout),
3594 )
3595 } else {
3596 normalize_mutation_needed_failure_contract(
3597 Some(&format!(
3598 "mutation-needed validation failed under profile '{profile}'"
3599 )),
3600 Some(MutationNeededFailureReasonCode::ValidationFailed),
3601 )
3602 }
3603}
3604
3605fn mutation_needed_contract_for_error_message(message: &str) -> MutationNeededFailureContract {
3606 let reason_code = infer_mutation_needed_failure_reason_code(message);
3607 normalize_mutation_needed_failure_contract(Some(message), reason_code)
3608}
3609
3610fn mutation_needed_audit_mutation_id(request: &SupervisedDevloopRequest) -> String {
3611 stable_hash_json(&(
3612 "mutation-needed-audit",
3613 &request.task.id,
3614 &request.proposal.intent,
3615 &request.proposal.files,
3616 ))
3617 .map(|hash| format!("mutation-needed-{hash}"))
3618 .unwrap_or_else(|_| format!("mutation-needed-{}", request.task.id))
3619}
3620
3621fn classify_supervised_devloop_request(
3622 request: &SupervisedDevloopRequest,
3623) -> Option<BoundedTaskClass> {
3624 let file_count = normalized_supervised_devloop_docs_files(&request.proposal.files)?.len();
3625 match file_count {
3626 1 => Some(BoundedTaskClass::DocsSingleFile),
3627 2..=SUPERVISED_DEVLOOP_MAX_DOC_FILES => Some(BoundedTaskClass::DocsMultiFile),
3628 _ => None,
3629 }
3630}
3631
3632fn normalized_supervised_devloop_docs_files(files: &[String]) -> Option<Vec<String>> {
3633 if files.is_empty() || files.len() > SUPERVISED_DEVLOOP_MAX_DOC_FILES {
3634 return None;
3635 }
3636
3637 let mut normalized_files = Vec::with_capacity(files.len());
3638 let mut seen = BTreeSet::new();
3639
3640 for path in files {
3641 let normalized = path.trim().replace('\\', "/");
3642 if normalized.is_empty()
3643 || !normalized.starts_with("docs/")
3644 || !normalized.ends_with(".md")
3645 || !seen.insert(normalized.clone())
3646 {
3647 return None;
3648 }
3649 normalized_files.push(normalized);
3650 }
3651
3652 Some(normalized_files)
3653}
3654
3655fn classify_self_evolution_candidate_request(
3656 request: &SelfEvolutionCandidateIntakeRequest,
3657) -> Option<BoundedTaskClass> {
3658 let file_count = normalized_supervised_devloop_docs_files(&request.candidate_hint_paths)?.len();
3659 match file_count {
3660 1 => Some(BoundedTaskClass::DocsSingleFile),
3661 2..=SUPERVISED_DEVLOOP_MAX_DOC_FILES => Some(BoundedTaskClass::DocsMultiFile),
3662 _ => None,
3663 }
3664}
3665
3666fn normalized_selection_labels(labels: &[String]) -> BTreeSet<String> {
3667 labels
3668 .iter()
3669 .map(|label| label.trim().to_ascii_lowercase())
3670 .filter(|label| !label.is_empty())
3671 .collect()
3672}
3673
3674fn find_declared_mutation(
3675 store: &dyn EvolutionStore,
3676 mutation_id: &MutationId,
3677) -> Result<Option<PreparedMutation>, EvolutionError> {
3678 for stored in store.scan(1)? {
3679 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
3680 if &mutation.intent.id == mutation_id {
3681 return Ok(Some(mutation));
3682 }
3683 }
3684 }
3685 Ok(None)
3686}
3687
3688fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
3689 let Ok(projection) = projection_snapshot(store) else {
3690 return Vec::new();
3691 };
3692 let capsules = projection.capsules.clone();
3693 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3694 let requested_spec_id = input
3695 .spec_id
3696 .as_deref()
3697 .map(str::trim)
3698 .filter(|value| !value.is_empty());
3699 let signal_set = input
3700 .signals
3701 .iter()
3702 .map(|signal| signal.to_ascii_lowercase())
3703 .collect::<BTreeSet<_>>();
3704 let mut candidates = projection
3705 .genes
3706 .into_iter()
3707 .filter_map(|gene| {
3708 if gene.state != AssetState::Promoted {
3709 return None;
3710 }
3711 if let Some(spec_id) = requested_spec_id {
3712 let matches_spec = spec_ids_by_gene
3713 .get(&gene.id)
3714 .map(|values| {
3715 values
3716 .iter()
3717 .any(|value| value.eq_ignore_ascii_case(spec_id))
3718 })
3719 .unwrap_or(false);
3720 if !matches_spec {
3721 return None;
3722 }
3723 }
3724 let gene_signals = gene
3725 .signals
3726 .iter()
3727 .map(|signal| signal.to_ascii_lowercase())
3728 .collect::<BTreeSet<_>>();
3729 if gene_signals == signal_set {
3730 let mut matched_capsules = capsules
3731 .iter()
3732 .filter(|capsule| {
3733 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
3734 })
3735 .cloned()
3736 .collect::<Vec<_>>();
3737 matched_capsules.sort_by(|left, right| {
3738 replay_environment_match_factor(&input.env, &right.env)
3739 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3740 .unwrap_or(std::cmp::Ordering::Equal)
3741 .then_with(|| {
3742 right
3743 .confidence
3744 .partial_cmp(&left.confidence)
3745 .unwrap_or(std::cmp::Ordering::Equal)
3746 })
3747 .then_with(|| left.id.cmp(&right.id))
3748 });
3749 if matched_capsules.is_empty() {
3750 None
3751 } else {
3752 let score = matched_capsules
3753 .first()
3754 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3755 .unwrap_or(0.0);
3756 Some(GeneCandidate {
3757 gene,
3758 score,
3759 capsules: matched_capsules,
3760 })
3761 }
3762 } else {
3763 None
3764 }
3765 })
3766 .collect::<Vec<_>>();
3767 candidates.sort_by(|left, right| {
3768 right
3769 .score
3770 .partial_cmp(&left.score)
3771 .unwrap_or(std::cmp::Ordering::Equal)
3772 .then_with(|| left.gene.id.cmp(&right.gene.id))
3773 });
3774 candidates
3775}
3776
3777fn quarantined_remote_exact_match_candidates(
3778 store: &dyn EvolutionStore,
3779 input: &SelectorInput,
3780) -> Vec<GeneCandidate> {
3781 let remote_asset_ids = store
3782 .scan(1)
3783 .ok()
3784 .map(|events| {
3785 events
3786 .into_iter()
3787 .filter_map(|stored| match stored.event {
3788 EvolutionEvent::RemoteAssetImported {
3789 source: CandidateSource::Remote,
3790 asset_ids,
3791 ..
3792 } => Some(asset_ids),
3793 _ => None,
3794 })
3795 .flatten()
3796 .collect::<BTreeSet<_>>()
3797 })
3798 .unwrap_or_default();
3799 if remote_asset_ids.is_empty() {
3800 return Vec::new();
3801 }
3802
3803 let Ok(projection) = projection_snapshot(store) else {
3804 return Vec::new();
3805 };
3806 let capsules = projection.capsules.clone();
3807 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3808 let requested_spec_id = input
3809 .spec_id
3810 .as_deref()
3811 .map(str::trim)
3812 .filter(|value| !value.is_empty());
3813 let normalized_signals = input
3814 .signals
3815 .iter()
3816 .filter_map(|signal| normalize_signal_phrase(signal))
3817 .collect::<BTreeSet<_>>()
3818 .into_iter()
3819 .collect::<Vec<_>>();
3820 if normalized_signals.is_empty() {
3821 return Vec::new();
3822 }
3823 let mut candidates = projection
3824 .genes
3825 .into_iter()
3826 .filter_map(|gene| {
3827 if !matches!(
3828 gene.state,
3829 AssetState::Promoted | AssetState::Quarantined | AssetState::ShadowValidated
3830 ) {
3831 return None;
3832 }
3833 if let Some(spec_id) = requested_spec_id {
3834 let matches_spec = spec_ids_by_gene
3835 .get(&gene.id)
3836 .map(|values| {
3837 values
3838 .iter()
3839 .any(|value| value.eq_ignore_ascii_case(spec_id))
3840 })
3841 .unwrap_or(false);
3842 if !matches_spec {
3843 return None;
3844 }
3845 }
3846 let normalized_gene_signals = gene
3847 .signals
3848 .iter()
3849 .filter_map(|candidate| normalize_signal_phrase(candidate))
3850 .collect::<Vec<_>>();
3851 let matched_query_count = normalized_signals
3852 .iter()
3853 .filter(|signal| {
3854 normalized_gene_signals.iter().any(|candidate| {
3855 candidate.contains(signal.as_str()) || signal.contains(candidate)
3856 })
3857 })
3858 .count();
3859 if matched_query_count == 0 {
3860 return None;
3861 }
3862
3863 let mut matched_capsules = capsules
3864 .iter()
3865 .filter(|capsule| {
3866 capsule.gene_id == gene.id
3867 && matches!(
3868 capsule.state,
3869 AssetState::Quarantined | AssetState::ShadowValidated
3870 )
3871 && remote_asset_ids.contains(&capsule.id)
3872 })
3873 .cloned()
3874 .collect::<Vec<_>>();
3875 matched_capsules.sort_by(|left, right| {
3876 replay_environment_match_factor(&input.env, &right.env)
3877 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3878 .unwrap_or(std::cmp::Ordering::Equal)
3879 .then_with(|| {
3880 right
3881 .confidence
3882 .partial_cmp(&left.confidence)
3883 .unwrap_or(std::cmp::Ordering::Equal)
3884 })
3885 .then_with(|| left.id.cmp(&right.id))
3886 });
3887 if matched_capsules.is_empty() {
3888 None
3889 } else {
3890 let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
3891 let env_score = matched_capsules
3892 .first()
3893 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3894 .unwrap_or(0.0);
3895 Some(GeneCandidate {
3896 gene,
3897 score: overlap.max(env_score),
3898 capsules: matched_capsules,
3899 })
3900 }
3901 })
3902 .collect::<Vec<_>>();
3903 candidates.sort_by(|left, right| {
3904 right
3905 .score
3906 .partial_cmp(&left.score)
3907 .unwrap_or(std::cmp::Ordering::Equal)
3908 .then_with(|| left.gene.id.cmp(&right.gene.id))
3909 });
3910 candidates
3911}
3912
3913fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
3914 let fields = [
3915 input
3916 .rustc_version
3917 .eq_ignore_ascii_case(&candidate.rustc_version),
3918 input
3919 .cargo_lock_hash
3920 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
3921 input
3922 .target_triple
3923 .eq_ignore_ascii_case(&candidate.target_triple),
3924 input.os.eq_ignore_ascii_case(&candidate.os),
3925 ];
3926 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
3927 0.5 + ((matched_fields / 4.0) * 0.5)
3928}
3929
3930fn effective_candidate_score(
3931 candidate: &GeneCandidate,
3932 publishers_by_asset: &BTreeMap<String, String>,
3933 reputation_bias: &BTreeMap<String, f32>,
3934) -> f32 {
3935 let bias = candidate
3936 .capsules
3937 .first()
3938 .and_then(|capsule| publishers_by_asset.get(&capsule.id))
3939 .and_then(|publisher| reputation_bias.get(publisher))
3940 .copied()
3941 .unwrap_or(0.0)
3942 .clamp(0.0, 1.0);
3943 candidate.score * (1.0 + (bias * 0.1))
3944}
3945
3946fn export_promoted_assets_from_store(
3947 store: &dyn EvolutionStore,
3948 sender_id: impl Into<String>,
3949) -> Result<EvolutionEnvelope, EvoKernelError> {
3950 let (events, projection) = scan_projection(store)?;
3951 let genes = projection
3952 .genes
3953 .into_iter()
3954 .filter(|gene| gene.state == AssetState::Promoted)
3955 .collect::<Vec<_>>();
3956 let capsules = projection
3957 .capsules
3958 .into_iter()
3959 .filter(|capsule| capsule.state == AssetState::Promoted)
3960 .collect::<Vec<_>>();
3961 let assets = replay_export_assets(&events, genes, capsules);
3962 Ok(EvolutionEnvelope::publish(sender_id, assets))
3963}
3964
3965fn scan_projection(
3966 store: &dyn EvolutionStore,
3967) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
3968 store.scan_projection().map_err(store_err)
3969}
3970
3971fn projection_snapshot(store: &dyn EvolutionStore) -> Result<EvolutionProjection, EvoKernelError> {
3972 scan_projection(store).map(|(_, projection)| projection)
3973}
3974
3975fn replay_export_assets(
3976 events: &[StoredEvolutionEvent],
3977 genes: Vec<Gene>,
3978 capsules: Vec<Capsule>,
3979) -> Vec<NetworkAsset> {
3980 let mutation_ids = capsules
3981 .iter()
3982 .map(|capsule| capsule.mutation_id.clone())
3983 .collect::<BTreeSet<_>>();
3984 let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
3985 for gene in genes {
3986 assets.push(NetworkAsset::Gene { gene });
3987 }
3988 for capsule in capsules {
3989 assets.push(NetworkAsset::Capsule { capsule });
3990 }
3991 assets
3992}
3993
3994fn replay_export_events_for_mutations(
3995 events: &[StoredEvolutionEvent],
3996 mutation_ids: &BTreeSet<String>,
3997) -> Vec<NetworkAsset> {
3998 if mutation_ids.is_empty() {
3999 return Vec::new();
4000 }
4001
4002 let mut assets = Vec::new();
4003 let mut seen_mutations = BTreeSet::new();
4004 let mut seen_spec_links = BTreeSet::new();
4005 for stored in events {
4006 match &stored.event {
4007 EvolutionEvent::MutationDeclared { mutation }
4008 if mutation_ids.contains(mutation.intent.id.as_str())
4009 && seen_mutations.insert(mutation.intent.id.clone()) =>
4010 {
4011 assets.push(NetworkAsset::EvolutionEvent {
4012 event: EvolutionEvent::MutationDeclared {
4013 mutation: mutation.clone(),
4014 },
4015 });
4016 }
4017 EvolutionEvent::SpecLinked {
4018 mutation_id,
4019 spec_id,
4020 } if mutation_ids.contains(mutation_id.as_str())
4021 && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
4022 {
4023 assets.push(NetworkAsset::EvolutionEvent {
4024 event: EvolutionEvent::SpecLinked {
4025 mutation_id: mutation_id.clone(),
4026 spec_id: spec_id.clone(),
4027 },
4028 });
4029 }
4030 _ => {}
4031 }
4032 }
4033
4034 assets
4035}
4036
4037const SYNC_CURSOR_PREFIX: &str = "seq:";
4038const SYNC_RESUME_TOKEN_PREFIX: &str = "gep-rt1|";
4039
4040#[derive(Clone, Debug)]
4041struct DeltaWindow {
4042 changed_gene_ids: BTreeSet<String>,
4043 changed_capsule_ids: BTreeSet<String>,
4044 changed_mutation_ids: BTreeSet<String>,
4045}
4046
4047fn normalize_sync_value(value: Option<&str>) -> Option<String> {
4048 value
4049 .map(str::trim)
4050 .filter(|value| !value.is_empty())
4051 .map(ToOwned::to_owned)
4052}
4053
4054fn parse_sync_cursor_seq(cursor: &str) -> Option<u64> {
4055 let trimmed = cursor.trim();
4056 if trimmed.is_empty() {
4057 return None;
4058 }
4059 let raw = trimmed.strip_prefix(SYNC_CURSOR_PREFIX).unwrap_or(trimmed);
4060 raw.parse::<u64>().ok()
4061}
4062
4063fn format_sync_cursor(seq: u64) -> String {
4064 format!("{SYNC_CURSOR_PREFIX}{seq}")
4065}
4066
4067fn encode_resume_token(sender_id: &str, cursor: &str) -> String {
4068 format!("{SYNC_RESUME_TOKEN_PREFIX}{sender_id}|{cursor}")
4069}
4070
4071fn decode_resume_token(sender_id: &str, token: &str) -> Result<String, EvoKernelError> {
4072 let token = token.trim();
4073 let Some(encoded) = token.strip_prefix(SYNC_RESUME_TOKEN_PREFIX) else {
4074 return Ok(token.to_string());
4075 };
4076 let (token_sender, cursor) = encoded.split_once('|').ok_or_else(|| {
4077 EvoKernelError::Validation(
4078 "invalid resume_token format; expected gep-rt1|<sender>|<seq>".into(),
4079 )
4080 })?;
4081 if token_sender != sender_id.trim() {
4082 return Err(EvoKernelError::Validation(
4083 "resume_token sender mismatch".into(),
4084 ));
4085 }
4086 Ok(cursor.to_string())
4087}
4088
4089fn resolve_requested_cursor(
4090 sender_id: &str,
4091 since_cursor: Option<&str>,
4092 resume_token: Option<&str>,
4093) -> Result<Option<String>, EvoKernelError> {
4094 let cursor = if let Some(token) = normalize_sync_value(resume_token) {
4095 Some(decode_resume_token(sender_id, &token)?)
4096 } else {
4097 normalize_sync_value(since_cursor)
4098 };
4099
4100 let Some(cursor) = cursor else {
4101 return Ok(None);
4102 };
4103 let seq = parse_sync_cursor_seq(&cursor).ok_or_else(|| {
4104 EvoKernelError::Validation("invalid since_cursor/resume_token cursor format".into())
4105 })?;
4106 Ok(Some(format_sync_cursor(seq)))
4107}
4108
4109fn latest_store_cursor(store: &dyn EvolutionStore) -> Result<Option<String>, EvoKernelError> {
4110 let events = store.scan(1).map_err(store_err)?;
4111 Ok(events.last().map(|stored| format_sync_cursor(stored.seq)))
4112}
4113
4114fn delta_window(events: &[StoredEvolutionEvent], since_seq: u64) -> DeltaWindow {
4115 let mut changed_gene_ids = BTreeSet::new();
4116 let mut changed_capsule_ids = BTreeSet::new();
4117 let mut changed_mutation_ids = BTreeSet::new();
4118
4119 for stored in events {
4120 if stored.seq <= since_seq {
4121 continue;
4122 }
4123 match &stored.event {
4124 EvolutionEvent::MutationDeclared { mutation } => {
4125 changed_mutation_ids.insert(mutation.intent.id.clone());
4126 }
4127 EvolutionEvent::SpecLinked { mutation_id, .. } => {
4128 changed_mutation_ids.insert(mutation_id.clone());
4129 }
4130 EvolutionEvent::GeneProjected { gene } => {
4131 changed_gene_ids.insert(gene.id.clone());
4132 }
4133 EvolutionEvent::GenePromoted { gene_id }
4134 | EvolutionEvent::GeneRevoked { gene_id, .. }
4135 | EvolutionEvent::PromotionEvaluated { gene_id, .. } => {
4136 changed_gene_ids.insert(gene_id.clone());
4137 }
4138 EvolutionEvent::CapsuleCommitted { capsule } => {
4139 changed_capsule_ids.insert(capsule.id.clone());
4140 changed_gene_ids.insert(capsule.gene_id.clone());
4141 changed_mutation_ids.insert(capsule.mutation_id.clone());
4142 }
4143 EvolutionEvent::CapsuleReleased { capsule_id, .. }
4144 | EvolutionEvent::CapsuleQuarantined { capsule_id } => {
4145 changed_capsule_ids.insert(capsule_id.clone());
4146 }
4147 EvolutionEvent::RemoteAssetImported { asset_ids, .. } => {
4148 for asset_id in asset_ids {
4149 changed_gene_ids.insert(asset_id.clone());
4150 changed_capsule_ids.insert(asset_id.clone());
4151 }
4152 }
4153 _ => {}
4154 }
4155 }
4156
4157 DeltaWindow {
4158 changed_gene_ids,
4159 changed_capsule_ids,
4160 changed_mutation_ids,
4161 }
4162}
4163
4164fn import_remote_envelope_into_store(
4165 store: &dyn EvolutionStore,
4166 envelope: &EvolutionEnvelope,
4167 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
4168 requested_cursor: Option<String>,
4169) -> Result<ImportOutcome, EvoKernelError> {
4170 if !envelope.verify_content_hash() {
4171 record_manifest_validation(store, envelope, false, "invalid evolution envelope hash")?;
4172 return Err(EvoKernelError::Validation(
4173 "invalid evolution envelope hash".into(),
4174 ));
4175 }
4176 if let Err(reason) = envelope.verify_manifest() {
4177 record_manifest_validation(
4178 store,
4179 envelope,
4180 false,
4181 format!("manifest validation failed: {reason}"),
4182 )?;
4183 return Err(EvoKernelError::Validation(format!(
4184 "invalid evolution envelope manifest: {reason}"
4185 )));
4186 }
4187 record_manifest_validation(store, envelope, true, "manifest validated")?;
4188
4189 let sender_id = normalized_sender_id(&envelope.sender_id);
4190 let (events, projection) = scan_projection(store)?;
4191 let mut known_gene_ids = projection
4192 .genes
4193 .into_iter()
4194 .map(|gene| gene.id)
4195 .collect::<BTreeSet<_>>();
4196 let mut known_capsule_ids = projection
4197 .capsules
4198 .into_iter()
4199 .map(|capsule| capsule.id)
4200 .collect::<BTreeSet<_>>();
4201 let mut known_mutation_ids = BTreeSet::new();
4202 let mut known_spec_links = BTreeSet::new();
4203 for stored in &events {
4204 match &stored.event {
4205 EvolutionEvent::MutationDeclared { mutation } => {
4206 known_mutation_ids.insert(mutation.intent.id.clone());
4207 }
4208 EvolutionEvent::SpecLinked {
4209 mutation_id,
4210 spec_id,
4211 } => {
4212 known_spec_links.insert((mutation_id.clone(), spec_id.clone()));
4213 }
4214 _ => {}
4215 }
4216 }
4217 let mut imported_asset_ids = Vec::new();
4218 let mut applied_count = 0usize;
4219 let mut skipped_count = 0usize;
4220 for asset in &envelope.assets {
4221 match asset {
4222 NetworkAsset::Gene { gene } => {
4223 if !known_gene_ids.insert(gene.id.clone()) {
4224 skipped_count += 1;
4225 continue;
4226 }
4227 imported_asset_ids.push(gene.id.clone());
4228 applied_count += 1;
4229 let mut quarantined_gene = gene.clone();
4230 quarantined_gene.state = AssetState::Quarantined;
4231 store
4232 .append_event(EvolutionEvent::RemoteAssetImported {
4233 source: CandidateSource::Remote,
4234 asset_ids: vec![gene.id.clone()],
4235 sender_id: sender_id.clone(),
4236 })
4237 .map_err(store_err)?;
4238 store
4239 .append_event(EvolutionEvent::GeneProjected {
4240 gene: quarantined_gene.clone(),
4241 })
4242 .map_err(store_err)?;
4243 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4244 store
4245 .append_event(EvolutionEvent::PromotionEvaluated {
4246 gene_id: quarantined_gene.id,
4247 state: AssetState::Quarantined,
4248 reason: "remote asset requires local validation before promotion".into(),
4249 reason_code: TransitionReasonCode::DowngradeRemoteRequiresLocalValidation,
4250 evidence: Some(TransitionEvidence {
4251 replay_attempts: None,
4252 replay_successes: None,
4253 replay_success_rate: None,
4254 environment_match_factor: None,
4255 decayed_confidence: None,
4256 confidence_decay_ratio: None,
4257 summary: Some("phase=remote_import; source=remote; action=quarantine_before_shadow_validation".into()),
4258 }),
4259 })
4260 .map_err(store_err)?;
4261 }
4262 NetworkAsset::Capsule { capsule } => {
4263 if !known_capsule_ids.insert(capsule.id.clone()) {
4264 skipped_count += 1;
4265 continue;
4266 }
4267 imported_asset_ids.push(capsule.id.clone());
4268 applied_count += 1;
4269 store
4270 .append_event(EvolutionEvent::RemoteAssetImported {
4271 source: CandidateSource::Remote,
4272 asset_ids: vec![capsule.id.clone()],
4273 sender_id: sender_id.clone(),
4274 })
4275 .map_err(store_err)?;
4276 let mut quarantined = capsule.clone();
4277 quarantined.state = AssetState::Quarantined;
4278 store
4279 .append_event(EvolutionEvent::CapsuleCommitted {
4280 capsule: quarantined.clone(),
4281 })
4282 .map_err(store_err)?;
4283 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4284 store
4285 .append_event(EvolutionEvent::CapsuleQuarantined {
4286 capsule_id: quarantined.id,
4287 })
4288 .map_err(store_err)?;
4289 }
4290 NetworkAsset::EvolutionEvent { event } => {
4291 let should_append = match event {
4292 EvolutionEvent::MutationDeclared { mutation } => {
4293 known_mutation_ids.insert(mutation.intent.id.clone())
4294 }
4295 EvolutionEvent::SpecLinked {
4296 mutation_id,
4297 spec_id,
4298 } => known_spec_links.insert((mutation_id.clone(), spec_id.clone())),
4299 _ if should_import_remote_event(event) => true,
4300 _ => false,
4301 };
4302 if should_append {
4303 store.append_event(event.clone()).map_err(store_err)?;
4304 applied_count += 1;
4305 } else {
4306 skipped_count += 1;
4307 }
4308 }
4309 }
4310 }
4311 let next_cursor = latest_store_cursor(store)?;
4312 let resume_token = next_cursor.as_ref().and_then(|cursor| {
4313 normalized_sender_id(&envelope.sender_id).map(|sender| encode_resume_token(&sender, cursor))
4314 });
4315
4316 Ok(ImportOutcome {
4317 imported_asset_ids,
4318 accepted: true,
4319 next_cursor: next_cursor.clone(),
4320 resume_token,
4321 sync_audit: SyncAudit {
4322 batch_id: next_id("sync-import"),
4323 requested_cursor,
4324 scanned_count: envelope.assets.len(),
4325 applied_count,
4326 skipped_count,
4327 failed_count: 0,
4328 failure_reasons: Vec::new(),
4329 },
4330 })
4331}
4332
4333const EVOMAP_SNAPSHOT_ROOT: &str = "assets/gep/evomap_snapshot";
4334const EVOMAP_SNAPSHOT_GENES_FILE: &str = "genes.json";
4335const EVOMAP_SNAPSHOT_CAPSULES_FILE: &str = "capsules.json";
4336const EVOMAP_BUILTIN_RUN_ID: &str = "builtin-evomap-seed";
4337
4338#[derive(Debug, Deserialize)]
4339struct EvoMapGeneDocument {
4340 #[serde(default)]
4341 genes: Vec<EvoMapGeneAsset>,
4342}
4343
4344#[derive(Debug, Deserialize)]
4345struct EvoMapGeneAsset {
4346 id: String,
4347 #[serde(default)]
4348 category: Option<String>,
4349 #[serde(default)]
4350 signals_match: Vec<Value>,
4351 #[serde(default)]
4352 strategy: Vec<String>,
4353 #[serde(default)]
4354 validation: Vec<String>,
4355 #[serde(default)]
4356 constraints: Option<EvoMapConstraintAsset>,
4357 #[serde(default)]
4358 model_name: Option<String>,
4359 #[serde(default)]
4360 schema_version: Option<String>,
4361 #[serde(default)]
4362 compatibility: Option<Value>,
4363}
4364
4365#[derive(Clone, Debug, Deserialize, Default)]
4366struct EvoMapConstraintAsset {
4367 #[serde(default)]
4368 max_files: Option<usize>,
4369 #[serde(default)]
4370 forbidden_paths: Vec<String>,
4371}
4372
4373#[derive(Debug, Deserialize)]
4374struct EvoMapCapsuleDocument {
4375 #[serde(default)]
4376 capsules: Vec<EvoMapCapsuleAsset>,
4377}
4378
4379#[derive(Debug, Deserialize)]
4380struct EvoMapCapsuleAsset {
4381 id: String,
4382 gene: String,
4383 #[serde(default)]
4384 trigger: Vec<String>,
4385 #[serde(default)]
4386 summary: String,
4387 #[serde(default)]
4388 diff: Option<String>,
4389 #[serde(default)]
4390 confidence: Option<f32>,
4391 #[serde(default)]
4392 outcome: Option<EvoMapOutcomeAsset>,
4393 #[serde(default)]
4394 blast_radius: Option<EvoMapBlastRadiusAsset>,
4395 #[serde(default)]
4396 content: Option<EvoMapCapsuleContentAsset>,
4397 #[serde(default)]
4398 env_fingerprint: Option<Value>,
4399 #[serde(default)]
4400 model_name: Option<String>,
4401 #[serde(default)]
4402 schema_version: Option<String>,
4403 #[serde(default)]
4404 compatibility: Option<Value>,
4405}
4406
4407#[derive(Clone, Debug, Deserialize, Default)]
4408struct EvoMapOutcomeAsset {
4409 #[serde(default)]
4410 status: Option<String>,
4411 #[serde(default)]
4412 score: Option<f32>,
4413}
4414
4415#[derive(Clone, Debug, Deserialize, Default)]
4416struct EvoMapBlastRadiusAsset {
4417 #[serde(default)]
4418 lines: usize,
4419}
4420
4421#[derive(Clone, Debug, Deserialize, Default)]
4422struct EvoMapCapsuleContentAsset {
4423 #[serde(default)]
4424 changed_files: Vec<String>,
4425}
4426
4427#[derive(Debug)]
4428struct BuiltinCapsuleSeed {
4429 capsule: Capsule,
4430 mutation: PreparedMutation,
4431}
4432
4433#[derive(Debug)]
4434struct BuiltinAssetBundle {
4435 genes: Vec<Gene>,
4436 capsules: Vec<BuiltinCapsuleSeed>,
4437}
4438
4439fn built_in_experience_genes() -> Vec<Gene> {
4440 vec![
4441 Gene {
4442 id: "builtin-experience-docs-rewrite-v1".into(),
4443 signals: vec!["docs.rewrite".into(), "docs".into(), "rewrite".into()],
4444 strategy: vec![
4445 "asset_origin=builtin".into(),
4446 "task_class=docs.rewrite".into(),
4447 "task_label=Docs rewrite".into(),
4448 "template_id=builtin-docs-rewrite-v1".into(),
4449 "summary=baseline docs rewrite experience".into(),
4450 ],
4451 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4452 state: AssetState::Promoted,
4453 },
4454 Gene {
4455 id: "builtin-experience-ci-fix-v1".into(),
4456 signals: vec![
4457 "ci.fix".into(),
4458 "ci".into(),
4459 "test".into(),
4460 "failure".into(),
4461 ],
4462 strategy: vec![
4463 "asset_origin=builtin".into(),
4464 "task_class=ci.fix".into(),
4465 "task_label=CI fix".into(),
4466 "template_id=builtin-ci-fix-v1".into(),
4467 "summary=baseline ci stabilization experience".into(),
4468 ],
4469 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4470 state: AssetState::Promoted,
4471 },
4472 Gene {
4473 id: "builtin-experience-task-decomposition-v1".into(),
4474 signals: vec![
4475 "task.decomposition".into(),
4476 "task".into(),
4477 "decomposition".into(),
4478 "planning".into(),
4479 ],
4480 strategy: vec![
4481 "asset_origin=builtin".into(),
4482 "task_class=task.decomposition".into(),
4483 "task_label=Task decomposition".into(),
4484 "template_id=builtin-task-decomposition-v1".into(),
4485 "summary=baseline task decomposition and routing experience".into(),
4486 ],
4487 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4488 state: AssetState::Promoted,
4489 },
4490 Gene {
4491 id: "builtin-experience-project-workflow-v1".into(),
4492 signals: vec![
4493 "project.workflow".into(),
4494 "project".into(),
4495 "workflow".into(),
4496 "milestone".into(),
4497 ],
4498 strategy: vec![
4499 "asset_origin=builtin".into(),
4500 "task_class=project.workflow".into(),
4501 "task_label=Project workflow".into(),
4502 "template_id=builtin-project-workflow-v1".into(),
4503 "summary=baseline project proposal and merge workflow experience".into(),
4504 ],
4505 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4506 state: AssetState::Promoted,
4507 },
4508 Gene {
4509 id: "builtin-experience-service-bid-v1".into(),
4510 signals: vec![
4511 "service.bid".into(),
4512 "service".into(),
4513 "bid".into(),
4514 "economics".into(),
4515 ],
4516 strategy: vec![
4517 "asset_origin=builtin".into(),
4518 "task_class=service.bid".into(),
4519 "task_label=Service bid".into(),
4520 "template_id=builtin-service-bid-v1".into(),
4521 "summary=baseline service bidding and settlement experience".into(),
4522 ],
4523 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4524 state: AssetState::Promoted,
4525 },
4526 ]
4527}
4528
4529fn evomap_snapshot_path(file_name: &str) -> PathBuf {
4530 PathBuf::from(env!("CARGO_MANIFEST_DIR"))
4531 .join(EVOMAP_SNAPSHOT_ROOT)
4532 .join(file_name)
4533}
4534
4535fn read_evomap_snapshot(file_name: &str) -> Result<Option<String>, EvoKernelError> {
4536 let path = evomap_snapshot_path(file_name);
4537 if !path.exists() {
4538 return Ok(None);
4539 }
4540 fs::read_to_string(&path).map(Some).map_err(|err| {
4541 EvoKernelError::Validation(format!(
4542 "failed to read EvoMap snapshot {}: {err}",
4543 path.display()
4544 ))
4545 })
4546}
4547
4548fn compatibility_state_from_value(value: Option<&Value>) -> Option<String> {
4549 let value = value?;
4550 if let Some(state) = value.as_str() {
4551 let normalized = state.trim().to_ascii_lowercase();
4552 if normalized.is_empty() {
4553 return None;
4554 }
4555 return Some(normalized);
4556 }
4557 value
4558 .get("state")
4559 .and_then(Value::as_str)
4560 .map(str::trim)
4561 .filter(|state| !state.is_empty())
4562 .map(|state| state.to_ascii_lowercase())
4563}
4564
4565fn map_evomap_state(value: Option<&Value>) -> AssetState {
4566 match compatibility_state_from_value(value).as_deref() {
4567 Some("promoted") => AssetState::Promoted,
4568 Some("candidate") => AssetState::Candidate,
4569 Some("quarantined") => AssetState::Quarantined,
4570 Some("shadow_validated") => AssetState::ShadowValidated,
4571 Some("revoked") => AssetState::Revoked,
4572 Some("rejected") => AssetState::Archived,
4573 Some("archived") => AssetState::Archived,
4574 _ => AssetState::Candidate,
4575 }
4576}
4577
4578fn value_as_signal_string(value: &Value) -> Option<String> {
4579 match value {
4580 Value::String(raw) => {
4581 let normalized = raw.trim();
4582 if normalized.is_empty() {
4583 None
4584 } else {
4585 Some(normalized.to_string())
4586 }
4587 }
4588 Value::Object(_) => {
4589 let serialized = serde_json::to_string(value).ok()?;
4590 let normalized = serialized.trim();
4591 if normalized.is_empty() {
4592 None
4593 } else {
4594 Some(normalized.to_string())
4595 }
4596 }
4597 Value::Null => None,
4598 other => {
4599 let rendered = other.to_string();
4600 let normalized = rendered.trim();
4601 if normalized.is_empty() {
4602 None
4603 } else {
4604 Some(normalized.to_string())
4605 }
4606 }
4607 }
4608}
4609
4610fn parse_diff_changed_files(payload: &str) -> Vec<String> {
4611 let mut changed_files = BTreeSet::new();
4612 for line in payload.lines() {
4613 let line = line.trim();
4614 if let Some(path) = line.strip_prefix("+++ b/") {
4615 let path = path.trim();
4616 if !path.is_empty() && path != "/dev/null" {
4617 changed_files.insert(path.to_string());
4618 }
4619 continue;
4620 }
4621 if let Some(path) = line.strip_prefix("diff --git a/") {
4622 if let Some((_, right)) = path.split_once(" b/") {
4623 let right = right.trim();
4624 if !right.is_empty() {
4625 changed_files.insert(right.to_string());
4626 }
4627 }
4628 }
4629 }
4630 changed_files.into_iter().collect()
4631}
4632
4633fn strip_diff_code_fence(payload: &str) -> String {
4634 let trimmed = payload.trim();
4635 if !trimmed.starts_with("```") {
4636 return trimmed.to_string();
4637 }
4638 let mut lines = trimmed.lines().collect::<Vec<_>>();
4639 if lines.is_empty() {
4640 return String::new();
4641 }
4642 lines.remove(0);
4643 if lines
4644 .last()
4645 .map(|line| line.trim() == "```")
4646 .unwrap_or(false)
4647 {
4648 lines.pop();
4649 }
4650 lines.join("\n").trim().to_string()
4651}
4652
4653fn synthetic_diff_for_capsule(capsule: &EvoMapCapsuleAsset) -> String {
4654 let file_path = format!("docs/evomap_builtin_capsules/{}.md", capsule.id);
4655 let mut content = Vec::new();
4656 content.push(format!("# EvoMap Builtin Capsule {}", capsule.id));
4657 if capsule.summary.trim().is_empty() {
4658 content.push("summary: missing".to_string());
4659 } else {
4660 content.push(format!("summary: {}", capsule.summary.trim()));
4661 }
4662 if !capsule.trigger.is_empty() {
4663 content.push(format!("trigger: {}", capsule.trigger.join(", ")));
4664 }
4665 content.push(format!("gene: {}", capsule.gene));
4666 let added = content
4667 .into_iter()
4668 .map(|line| format!("+{}", line.replace('\r', "")))
4669 .collect::<Vec<_>>()
4670 .join("\n");
4671 format!(
4672 "diff --git a/{file_path} b/{file_path}\nnew file mode 100644\nindex 0000000..1111111\n--- /dev/null\n+++ b/{file_path}\n@@ -0,0 +1,{line_count} @@\n{added}\n",
4673 line_count = added.lines().count()
4674 )
4675}
4676
4677fn normalized_diff_payload(capsule: &EvoMapCapsuleAsset) -> String {
4678 if let Some(raw) = capsule.diff.as_deref() {
4679 let normalized = strip_diff_code_fence(raw);
4680 if !normalized.trim().is_empty() {
4681 return normalized;
4682 }
4683 }
4684 synthetic_diff_for_capsule(capsule)
4685}
4686
4687fn env_field(value: Option<&Value>, keys: &[&str]) -> Option<String> {
4688 let object = value?.as_object()?;
4689 keys.iter().find_map(|key| {
4690 object
4691 .get(*key)
4692 .and_then(Value::as_str)
4693 .map(str::trim)
4694 .filter(|value| !value.is_empty())
4695 .map(|value| value.to_string())
4696 })
4697}
4698
4699fn map_evomap_env_fingerprint(value: Option<&Value>) -> EnvFingerprint {
4700 let os =
4701 env_field(value, &["os", "platform", "os_release"]).unwrap_or_else(|| "unknown".into());
4702 let target_triple = env_field(value, &["target_triple"]).unwrap_or_else(|| {
4703 let arch = env_field(value, &["arch"]).unwrap_or_else(|| "unknown".into());
4704 format!("{arch}-unknown-{os}")
4705 });
4706 EnvFingerprint {
4707 rustc_version: env_field(value, &["runtime", "rustc_version", "node_version"])
4708 .unwrap_or_else(|| "unknown".into()),
4709 cargo_lock_hash: env_field(value, &["cargo_lock_hash"]).unwrap_or_else(|| "unknown".into()),
4710 target_triple,
4711 os,
4712 }
4713}
4714
4715fn load_evomap_builtin_assets() -> Result<Option<BuiltinAssetBundle>, EvoKernelError> {
4716 let genes_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_GENES_FILE)?;
4717 let capsules_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_CAPSULES_FILE)?;
4718 let (Some(genes_raw), Some(capsules_raw)) = (genes_raw, capsules_raw) else {
4719 return Ok(None);
4720 };
4721
4722 let genes_doc: EvoMapGeneDocument = serde_json::from_str(&genes_raw).map_err(|err| {
4723 EvoKernelError::Validation(format!("failed to parse EvoMap genes snapshot: {err}"))
4724 })?;
4725 let capsules_doc: EvoMapCapsuleDocument =
4726 serde_json::from_str(&capsules_raw).map_err(|err| {
4727 EvoKernelError::Validation(format!("failed to parse EvoMap capsules snapshot: {err}"))
4728 })?;
4729
4730 let mut genes = Vec::new();
4731 let mut known_gene_ids = BTreeSet::new();
4732 for source in genes_doc.genes {
4733 let EvoMapGeneAsset {
4734 id,
4735 category,
4736 signals_match,
4737 strategy,
4738 validation,
4739 constraints,
4740 model_name,
4741 schema_version,
4742 compatibility,
4743 } = source;
4744 let gene_id = id.trim();
4745 if gene_id.is_empty() {
4746 return Err(EvoKernelError::Validation(
4747 "EvoMap snapshot gene id must not be empty".into(),
4748 ));
4749 }
4750 if !known_gene_ids.insert(gene_id.to_string()) {
4751 continue;
4752 }
4753
4754 let mut seen_signals = BTreeSet::new();
4755 let mut signals = Vec::new();
4756 for signal in signals_match {
4757 let Some(normalized) = value_as_signal_string(&signal) else {
4758 continue;
4759 };
4760 if seen_signals.insert(normalized.clone()) {
4761 signals.push(normalized);
4762 }
4763 }
4764 if signals.is_empty() {
4765 signals.push(format!("gene:{}", gene_id.to_ascii_lowercase()));
4766 }
4767
4768 let mut strategy = strategy
4769 .into_iter()
4770 .map(|item| item.trim().to_string())
4771 .filter(|item| !item.is_empty())
4772 .collect::<Vec<_>>();
4773 if strategy.is_empty() {
4774 strategy.push("evomap strategy missing in snapshot".into());
4775 }
4776 let constraint = constraints.unwrap_or_default();
4777 let compat_state = compatibility_state_from_value(compatibility.as_ref())
4778 .unwrap_or_else(|| "candidate".to_string());
4779 ensure_strategy_metadata(&mut strategy, "asset_origin", "builtin_evomap");
4780 ensure_strategy_metadata(
4781 &mut strategy,
4782 "evomap_category",
4783 category.as_deref().unwrap_or("unknown"),
4784 );
4785 ensure_strategy_metadata(
4786 &mut strategy,
4787 "evomap_constraints_max_files",
4788 &constraint.max_files.unwrap_or_default().to_string(),
4789 );
4790 ensure_strategy_metadata(
4791 &mut strategy,
4792 "evomap_constraints_forbidden_paths",
4793 &constraint.forbidden_paths.join("|"),
4794 );
4795 ensure_strategy_metadata(
4796 &mut strategy,
4797 "evomap_model_name",
4798 model_name.as_deref().unwrap_or("unknown"),
4799 );
4800 ensure_strategy_metadata(
4801 &mut strategy,
4802 "evomap_schema_version",
4803 schema_version.as_deref().unwrap_or("1.5.0"),
4804 );
4805 ensure_strategy_metadata(&mut strategy, "evomap_compatibility_state", &compat_state);
4806
4807 let mut validation = validation
4808 .into_iter()
4809 .map(|item| item.trim().to_string())
4810 .filter(|item| !item.is_empty())
4811 .collect::<Vec<_>>();
4812 if validation.is_empty() {
4813 validation.push("evomap-builtin-seed".into());
4814 }
4815
4816 genes.push(Gene {
4817 id: gene_id.to_string(),
4818 signals,
4819 strategy,
4820 validation,
4821 state: map_evomap_state(compatibility.as_ref()),
4822 });
4823 }
4824
4825 let mut capsules = Vec::new();
4826 let known_gene_ids = genes
4827 .iter()
4828 .map(|gene| gene.id.clone())
4829 .collect::<BTreeSet<_>>();
4830 for source in capsules_doc.capsules {
4831 let EvoMapCapsuleAsset {
4832 id,
4833 gene,
4834 trigger,
4835 summary,
4836 diff,
4837 confidence,
4838 outcome,
4839 blast_radius,
4840 content,
4841 env_fingerprint,
4842 model_name: _model_name,
4843 schema_version: _schema_version,
4844 compatibility,
4845 } = source;
4846 let source_for_diff = EvoMapCapsuleAsset {
4847 id: id.clone(),
4848 gene: gene.clone(),
4849 trigger: trigger.clone(),
4850 summary: summary.clone(),
4851 diff,
4852 confidence,
4853 outcome: outcome.clone(),
4854 blast_radius: blast_radius.clone(),
4855 content: content.clone(),
4856 env_fingerprint: env_fingerprint.clone(),
4857 model_name: None,
4858 schema_version: None,
4859 compatibility: compatibility.clone(),
4860 };
4861 if !known_gene_ids.contains(gene.as_str()) {
4862 return Err(EvoKernelError::Validation(format!(
4863 "EvoMap capsule {} references unknown gene {}",
4864 id, gene
4865 )));
4866 }
4867 let normalized_diff = normalized_diff_payload(&source_for_diff);
4868 if normalized_diff.trim().is_empty() {
4869 return Err(EvoKernelError::Validation(format!(
4870 "EvoMap capsule {} has empty normalized diff payload",
4871 id
4872 )));
4873 }
4874 let mut changed_files = content
4875 .as_ref()
4876 .map(|content| {
4877 content
4878 .changed_files
4879 .iter()
4880 .map(|item| item.trim().to_string())
4881 .filter(|item| !item.is_empty())
4882 .collect::<Vec<_>>()
4883 })
4884 .unwrap_or_default();
4885 if changed_files.is_empty() {
4886 changed_files = parse_diff_changed_files(&normalized_diff);
4887 }
4888 if changed_files.is_empty() {
4889 changed_files.push(format!("docs/evomap_builtin_capsules/{}.md", id));
4890 }
4891
4892 let confidence = confidence
4893 .or_else(|| outcome.as_ref().and_then(|outcome| outcome.score))
4894 .unwrap_or(0.6)
4895 .clamp(0.0, 1.0);
4896 let status_success = outcome
4897 .as_ref()
4898 .and_then(|outcome| outcome.status.as_deref())
4899 .map(|status| status.eq_ignore_ascii_case("success"))
4900 .unwrap_or(true);
4901 let blast_radius = blast_radius.unwrap_or_default();
4902 let mutation_id = format!("builtin-evomap-mutation-{}", id);
4903 let intent = MutationIntent {
4904 id: mutation_id.clone(),
4905 intent: if summary.trim().is_empty() {
4906 format!("apply EvoMap capsule {}", id)
4907 } else {
4908 summary.trim().to_string()
4909 },
4910 target: MutationTarget::Paths {
4911 allow: changed_files.clone(),
4912 },
4913 expected_effect: format!("seed replay candidate from EvoMap capsule {}", id),
4914 risk: RiskLevel::Low,
4915 signals: if trigger.is_empty() {
4916 vec![format!("capsule:{}", id.to_ascii_lowercase())]
4917 } else {
4918 trigger
4919 .iter()
4920 .map(|signal| signal.trim().to_ascii_lowercase())
4921 .filter(|signal| !signal.is_empty())
4922 .collect::<Vec<_>>()
4923 },
4924 spec_id: None,
4925 };
4926 let mutation = PreparedMutation {
4927 intent,
4928 artifact: oris_evolution::MutationArtifact {
4929 encoding: ArtifactEncoding::UnifiedDiff,
4930 payload: normalized_diff.clone(),
4931 base_revision: None,
4932 content_hash: compute_artifact_hash(&normalized_diff),
4933 },
4934 };
4935 let capsule = Capsule {
4936 id: id.clone(),
4937 gene_id: gene.clone(),
4938 mutation_id,
4939 run_id: EVOMAP_BUILTIN_RUN_ID.to_string(),
4940 diff_hash: compute_artifact_hash(&normalized_diff),
4941 confidence,
4942 env: map_evomap_env_fingerprint(env_fingerprint.as_ref()),
4943 outcome: Outcome {
4944 success: status_success,
4945 validation_profile: "evomap-builtin-seed".into(),
4946 validation_duration_ms: 0,
4947 changed_files,
4948 validator_hash: "builtin-evomap".into(),
4949 lines_changed: blast_radius.lines,
4950 replay_verified: false,
4951 },
4952 state: map_evomap_state(compatibility.as_ref()),
4953 };
4954 capsules.push(BuiltinCapsuleSeed { capsule, mutation });
4955 }
4956
4957 Ok(Some(BuiltinAssetBundle { genes, capsules }))
4958}
4959
4960fn ensure_builtin_experience_assets_in_store(
4961 store: &dyn EvolutionStore,
4962 sender_id: String,
4963) -> Result<ImportOutcome, EvoKernelError> {
4964 let (events, projection) = scan_projection(store)?;
4965 let mut known_gene_ids = projection
4966 .genes
4967 .into_iter()
4968 .map(|gene| gene.id)
4969 .collect::<BTreeSet<_>>();
4970 let mut known_capsule_ids = projection
4971 .capsules
4972 .into_iter()
4973 .map(|capsule| capsule.id)
4974 .collect::<BTreeSet<_>>();
4975 let mut known_mutation_ids = BTreeSet::new();
4976 for stored in &events {
4977 if let EvolutionEvent::MutationDeclared { mutation } = &stored.event {
4978 known_mutation_ids.insert(mutation.intent.id.clone());
4979 }
4980 }
4981 let normalized_sender = normalized_sender_id(&sender_id);
4982 let mut imported_asset_ids = Vec::new();
4983 let mut bundle = BuiltinAssetBundle {
4986 genes: built_in_experience_genes(),
4987 capsules: Vec::new(),
4988 };
4989 if let Some(snapshot_bundle) = load_evomap_builtin_assets()? {
4990 bundle.genes.extend(snapshot_bundle.genes);
4991 bundle.capsules.extend(snapshot_bundle.capsules);
4992 }
4993 let scanned_count = bundle.genes.len() + bundle.capsules.len();
4994
4995 for gene in bundle.genes {
4996 if !known_gene_ids.insert(gene.id.clone()) {
4997 continue;
4998 }
4999
5000 store
5001 .append_event(EvolutionEvent::RemoteAssetImported {
5002 source: CandidateSource::Local,
5003 asset_ids: vec![gene.id.clone()],
5004 sender_id: normalized_sender.clone(),
5005 })
5006 .map_err(store_err)?;
5007 store
5008 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
5009 .map_err(store_err)?;
5010 match gene.state {
5011 AssetState::Revoked | AssetState::Archived => {}
5012 AssetState::Quarantined | AssetState::ShadowValidated => {
5013 store
5014 .append_event(EvolutionEvent::PromotionEvaluated {
5015 gene_id: gene.id.clone(),
5016 state: AssetState::Quarantined,
5017 reason:
5018 "built-in EvoMap asset requires additional validation before promotion"
5019 .into(),
5020 reason_code: TransitionReasonCode::DowngradeBuiltinRequiresValidation,
5021 evidence: None,
5022 })
5023 .map_err(store_err)?;
5024 }
5025 AssetState::Promoted | AssetState::Candidate => {
5026 store
5027 .append_event(EvolutionEvent::PromotionEvaluated {
5028 gene_id: gene.id.clone(),
5029 state: AssetState::Promoted,
5030 reason: "built-in experience asset promoted for cold-start compatibility"
5031 .into(),
5032 reason_code: TransitionReasonCode::PromotionBuiltinColdStartCompatibility,
5033 evidence: None,
5034 })
5035 .map_err(store_err)?;
5036 store
5037 .append_event(EvolutionEvent::GenePromoted {
5038 gene_id: gene.id.clone(),
5039 })
5040 .map_err(store_err)?;
5041 }
5042 }
5043 imported_asset_ids.push(gene.id.clone());
5044 }
5045
5046 for seed in bundle.capsules {
5047 if !known_gene_ids.contains(seed.capsule.gene_id.as_str()) {
5048 return Err(EvoKernelError::Validation(format!(
5049 "built-in capsule {} references unknown gene {}",
5050 seed.capsule.id, seed.capsule.gene_id
5051 )));
5052 }
5053 if known_mutation_ids.insert(seed.mutation.intent.id.clone()) {
5054 store
5055 .append_event(EvolutionEvent::MutationDeclared {
5056 mutation: seed.mutation.clone(),
5057 })
5058 .map_err(store_err)?;
5059 }
5060 if !known_capsule_ids.insert(seed.capsule.id.clone()) {
5061 continue;
5062 }
5063 store
5064 .append_event(EvolutionEvent::RemoteAssetImported {
5065 source: CandidateSource::Local,
5066 asset_ids: vec![seed.capsule.id.clone()],
5067 sender_id: normalized_sender.clone(),
5068 })
5069 .map_err(store_err)?;
5070 store
5071 .append_event(EvolutionEvent::CapsuleCommitted {
5072 capsule: seed.capsule.clone(),
5073 })
5074 .map_err(store_err)?;
5075 match seed.capsule.state {
5076 AssetState::Revoked | AssetState::Archived => {}
5077 AssetState::Quarantined | AssetState::ShadowValidated => {
5078 store
5079 .append_event(EvolutionEvent::CapsuleQuarantined {
5080 capsule_id: seed.capsule.id.clone(),
5081 })
5082 .map_err(store_err)?;
5083 }
5084 AssetState::Promoted | AssetState::Candidate => {
5085 store
5086 .append_event(EvolutionEvent::CapsuleReleased {
5087 capsule_id: seed.capsule.id.clone(),
5088 state: AssetState::Promoted,
5089 })
5090 .map_err(store_err)?;
5091 }
5092 }
5093 imported_asset_ids.push(seed.capsule.id.clone());
5094 }
5095
5096 let next_cursor = latest_store_cursor(store)?;
5097 let resume_token = next_cursor.as_ref().and_then(|cursor| {
5098 normalized_sender
5099 .as_deref()
5100 .map(|sender| encode_resume_token(sender, cursor))
5101 });
5102 let applied_count = imported_asset_ids.len();
5103 let skipped_count = scanned_count.saturating_sub(applied_count);
5104
5105 Ok(ImportOutcome {
5106 imported_asset_ids,
5107 accepted: true,
5108 next_cursor: next_cursor.clone(),
5109 resume_token,
5110 sync_audit: SyncAudit {
5111 batch_id: next_id("sync-import"),
5112 requested_cursor: None,
5113 scanned_count,
5114 applied_count,
5115 skipped_count,
5116 failed_count: 0,
5117 failure_reasons: Vec::new(),
5118 },
5119 })
5120}
5121
5122fn strategy_metadata_value(strategy: &[String], key: &str) -> Option<String> {
5123 strategy.iter().find_map(|entry| {
5124 let (entry_key, entry_value) = entry.split_once('=')?;
5125 if entry_key.trim().eq_ignore_ascii_case(key) {
5126 let normalized = entry_value.trim();
5127 if normalized.is_empty() {
5128 None
5129 } else {
5130 Some(normalized.to_string())
5131 }
5132 } else {
5133 None
5134 }
5135 })
5136}
5137
5138fn ensure_strategy_metadata(strategy: &mut Vec<String>, key: &str, value: &str) {
5139 let normalized = value.trim();
5140 if normalized.is_empty() || strategy_metadata_value(strategy, key).is_some() {
5141 return;
5142 }
5143 strategy.push(format!("{key}={normalized}"));
5144}
5145
5146fn enforce_reported_experience_retention(
5147 store: &dyn EvolutionStore,
5148 task_class: &str,
5149 keep_latest: usize,
5150) -> Result<(), EvoKernelError> {
5151 let task_class = task_class.trim();
5152 if task_class.is_empty() || keep_latest == 0 {
5153 return Ok(());
5154 }
5155
5156 let (_, projection) = scan_projection(store)?;
5157 let mut candidates = projection
5158 .genes
5159 .iter()
5160 .filter(|gene| gene.state == AssetState::Promoted)
5161 .filter_map(|gene| {
5162 let origin = strategy_metadata_value(&gene.strategy, "asset_origin")?;
5163 if !origin.eq_ignore_ascii_case("reported_experience") {
5164 return None;
5165 }
5166 let gene_task_class = strategy_metadata_value(&gene.strategy, "task_class")?;
5167 if !gene_task_class.eq_ignore_ascii_case(task_class) {
5168 return None;
5169 }
5170 let updated_at = projection
5171 .last_updated_at
5172 .get(&gene.id)
5173 .cloned()
5174 .unwrap_or_default();
5175 Some((gene.id.clone(), updated_at))
5176 })
5177 .collect::<Vec<_>>();
5178 if candidates.len() <= keep_latest {
5179 return Ok(());
5180 }
5181
5182 candidates.sort_by(|left, right| right.1.cmp(&left.1).then_with(|| right.0.cmp(&left.0)));
5183 let stale_gene_ids = candidates
5184 .into_iter()
5185 .skip(keep_latest)
5186 .map(|(gene_id, _)| gene_id)
5187 .collect::<BTreeSet<_>>();
5188 if stale_gene_ids.is_empty() {
5189 return Ok(());
5190 }
5191
5192 let reason =
5193 format!("reported experience retention limit exceeded for task_class={task_class}");
5194 for gene_id in &stale_gene_ids {
5195 store
5196 .append_event(EvolutionEvent::GeneRevoked {
5197 gene_id: gene_id.clone(),
5198 reason: reason.clone(),
5199 })
5200 .map_err(store_err)?;
5201 }
5202
5203 let stale_capsule_ids = projection
5204 .capsules
5205 .iter()
5206 .filter(|capsule| stale_gene_ids.contains(&capsule.gene_id))
5207 .map(|capsule| capsule.id.clone())
5208 .collect::<BTreeSet<_>>();
5209 for capsule_id in stale_capsule_ids {
5210 store
5211 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
5212 .map_err(store_err)?;
5213 }
5214 Ok(())
5215}
5216
5217fn record_reported_experience_in_store(
5218 store: &dyn EvolutionStore,
5219 sender_id: String,
5220 gene_id: String,
5221 signals: Vec<String>,
5222 strategy: Vec<String>,
5223 validation: Vec<String>,
5224) -> Result<ImportOutcome, EvoKernelError> {
5225 let gene_id = gene_id.trim();
5226 if gene_id.is_empty() {
5227 return Err(EvoKernelError::Validation(
5228 "reported experience gene_id must not be empty".into(),
5229 ));
5230 }
5231
5232 let mut unique_signals = BTreeSet::new();
5233 let mut normalized_signals = Vec::new();
5234 for signal in signals {
5235 let normalized = signal.trim().to_ascii_lowercase();
5236 if normalized.is_empty() {
5237 continue;
5238 }
5239 if unique_signals.insert(normalized.clone()) {
5240 normalized_signals.push(normalized);
5241 }
5242 }
5243 if normalized_signals.is_empty() {
5244 return Err(EvoKernelError::Validation(
5245 "reported experience signals must not be empty".into(),
5246 ));
5247 }
5248
5249 let mut unique_strategy = BTreeSet::new();
5250 let mut normalized_strategy = Vec::new();
5251 for entry in strategy {
5252 let normalized = entry.trim().to_string();
5253 if normalized.is_empty() {
5254 continue;
5255 }
5256 if unique_strategy.insert(normalized.clone()) {
5257 normalized_strategy.push(normalized);
5258 }
5259 }
5260 if normalized_strategy.is_empty() {
5261 normalized_strategy.push("reported local replay experience".into());
5262 }
5263 let task_class_id = strategy_metadata_value(&normalized_strategy, "task_class")
5264 .or_else(|| normalized_signals.first().cloned())
5265 .unwrap_or_else(|| "reported-experience".into());
5266 let task_label = strategy_metadata_value(&normalized_strategy, "task_label")
5267 .or_else(|| normalized_signals.first().cloned())
5268 .unwrap_or_else(|| task_class_id.clone());
5269 ensure_strategy_metadata(
5270 &mut normalized_strategy,
5271 "asset_origin",
5272 "reported_experience",
5273 );
5274 ensure_strategy_metadata(&mut normalized_strategy, "task_class", &task_class_id);
5275 ensure_strategy_metadata(&mut normalized_strategy, "task_label", &task_label);
5276
5277 let mut unique_validation = BTreeSet::new();
5278 let mut normalized_validation = Vec::new();
5279 for entry in validation {
5280 let normalized = entry.trim().to_string();
5281 if normalized.is_empty() {
5282 continue;
5283 }
5284 if unique_validation.insert(normalized.clone()) {
5285 normalized_validation.push(normalized);
5286 }
5287 }
5288 if normalized_validation.is_empty() {
5289 normalized_validation.push("a2a.tasks.report".into());
5290 }
5291
5292 let gene = Gene {
5293 id: gene_id.to_string(),
5294 signals: normalized_signals,
5295 strategy: normalized_strategy,
5296 validation: normalized_validation,
5297 state: AssetState::Promoted,
5298 };
5299 let normalized_sender = normalized_sender_id(&sender_id);
5300
5301 store
5302 .append_event(EvolutionEvent::RemoteAssetImported {
5303 source: CandidateSource::Local,
5304 asset_ids: vec![gene.id.clone()],
5305 sender_id: normalized_sender.clone(),
5306 })
5307 .map_err(store_err)?;
5308 store
5309 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
5310 .map_err(store_err)?;
5311 store
5312 .append_event(EvolutionEvent::PromotionEvaluated {
5313 gene_id: gene.id.clone(),
5314 state: AssetState::Promoted,
5315 reason: "trusted local report promoted reusable experience".into(),
5316 reason_code: TransitionReasonCode::PromotionTrustedLocalReport,
5317 evidence: None,
5318 })
5319 .map_err(store_err)?;
5320 store
5321 .append_event(EvolutionEvent::GenePromoted {
5322 gene_id: gene.id.clone(),
5323 })
5324 .map_err(store_err)?;
5325 enforce_reported_experience_retention(
5326 store,
5327 &task_class_id,
5328 REPORTED_EXPERIENCE_RETENTION_LIMIT,
5329 )?;
5330
5331 let imported_asset_ids = vec![gene.id];
5332 let next_cursor = latest_store_cursor(store)?;
5333 let resume_token = next_cursor.as_ref().and_then(|cursor| {
5334 normalized_sender
5335 .as_deref()
5336 .map(|sender| encode_resume_token(sender, cursor))
5337 });
5338 Ok(ImportOutcome {
5339 imported_asset_ids,
5340 accepted: true,
5341 next_cursor,
5342 resume_token,
5343 sync_audit: SyncAudit {
5344 batch_id: next_id("sync-import"),
5345 requested_cursor: None,
5346 scanned_count: 1,
5347 applied_count: 1,
5348 skipped_count: 0,
5349 failed_count: 0,
5350 failure_reasons: Vec::new(),
5351 },
5352 })
5353}
5354
5355fn normalized_sender_id(sender_id: &str) -> Option<String> {
5356 let trimmed = sender_id.trim();
5357 if trimmed.is_empty() {
5358 None
5359 } else {
5360 Some(trimmed.to_string())
5361 }
5362}
5363
5364fn normalized_asset_ids(asset_ids: &[String]) -> BTreeSet<String> {
5365 asset_ids
5366 .iter()
5367 .map(|asset_id| asset_id.trim().to_string())
5368 .filter(|asset_id| !asset_id.is_empty())
5369 .collect()
5370}
5371
5372fn validate_remote_revoke_notice_assets(
5373 store: &dyn EvolutionStore,
5374 notice: &RevokeNotice,
5375) -> Result<(String, BTreeSet<String>), EvoKernelError> {
5376 let sender_id = normalized_sender_id(¬ice.sender_id).ok_or_else(|| {
5377 EvoKernelError::Validation("revoke notice sender_id must not be empty".into())
5378 })?;
5379 let requested = normalized_asset_ids(¬ice.asset_ids);
5380 if requested.is_empty() {
5381 return Ok((sender_id, requested));
5382 }
5383
5384 let remote_publishers = remote_publishers_by_asset_from_store(store);
5385 let has_remote_assets = requested
5386 .iter()
5387 .any(|asset_id| remote_publishers.contains_key(asset_id));
5388 if !has_remote_assets {
5389 return Ok((sender_id, requested));
5390 }
5391
5392 let unauthorized = requested
5393 .iter()
5394 .filter(|asset_id| {
5395 remote_publishers.get(*asset_id).map(String::as_str) != Some(sender_id.as_str())
5396 })
5397 .cloned()
5398 .collect::<Vec<_>>();
5399 if !unauthorized.is_empty() {
5400 return Err(EvoKernelError::Validation(format!(
5401 "remote revoke notice contains assets not owned by sender {sender_id}: {}",
5402 unauthorized.join(", ")
5403 )));
5404 }
5405
5406 Ok((sender_id, requested))
5407}
5408
5409fn replay_failure_revocation_summary(
5410 replay_failures: u64,
5411 current_confidence: f32,
5412 historical_peak_confidence: f32,
5413 source_sender_id: Option<&str>,
5414) -> String {
5415 let source_sender_id = source_sender_id.unwrap_or("unavailable");
5416 format!(
5417 "phase=replay_failure_revocation; source_sender_id={source_sender_id}; replay_failures={replay_failures}; current_confidence={current_confidence:.3}; historical_peak_confidence={historical_peak_confidence:.3}"
5418 )
5419}
5420
5421fn record_manifest_validation(
5422 store: &dyn EvolutionStore,
5423 envelope: &EvolutionEnvelope,
5424 accepted: bool,
5425 reason: impl Into<String>,
5426) -> Result<(), EvoKernelError> {
5427 let manifest = envelope.manifest.as_ref();
5428 let sender_id = manifest
5429 .and_then(|value| normalized_sender_id(&value.sender_id))
5430 .or_else(|| normalized_sender_id(&envelope.sender_id));
5431 let publisher = manifest.and_then(|value| normalized_sender_id(&value.publisher));
5432 let asset_ids = manifest
5433 .map(|value| value.asset_ids.clone())
5434 .unwrap_or_else(|| EvolutionEnvelope::manifest_asset_ids(&envelope.assets));
5435
5436 store
5437 .append_event(EvolutionEvent::ManifestValidated {
5438 accepted,
5439 reason: reason.into(),
5440 sender_id,
5441 publisher,
5442 asset_ids,
5443 })
5444 .map_err(store_err)?;
5445 Ok(())
5446}
5447
5448fn record_remote_publisher_for_asset(
5449 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
5450 sender_id: &str,
5451 asset: &NetworkAsset,
5452) {
5453 let Some(remote_publishers) = remote_publishers else {
5454 return;
5455 };
5456 let sender_id = sender_id.trim();
5457 if sender_id.is_empty() {
5458 return;
5459 }
5460 let Ok(mut publishers) = remote_publishers.lock() else {
5461 return;
5462 };
5463 match asset {
5464 NetworkAsset::Gene { gene } => {
5465 publishers.insert(gene.id.clone(), sender_id.to_string());
5466 }
5467 NetworkAsset::Capsule { capsule } => {
5468 publishers.insert(capsule.id.clone(), sender_id.to_string());
5469 }
5470 NetworkAsset::EvolutionEvent { .. } => {}
5471 }
5472}
5473
5474fn remote_publishers_by_asset_from_store(store: &dyn EvolutionStore) -> BTreeMap<String, String> {
5475 let Ok(events) = store.scan(1) else {
5476 return BTreeMap::new();
5477 };
5478 remote_publishers_by_asset_from_events(&events)
5479}
5480
5481fn remote_publishers_by_asset_from_events(
5482 events: &[StoredEvolutionEvent],
5483) -> BTreeMap<String, String> {
5484 let mut imported_asset_publishers = BTreeMap::<String, String>::new();
5485 let mut known_gene_ids = BTreeSet::<String>::new();
5486 let mut known_capsule_ids = BTreeSet::<String>::new();
5487 let mut publishers_by_asset = BTreeMap::<String, String>::new();
5488
5489 for stored in events {
5490 match &stored.event {
5491 EvolutionEvent::RemoteAssetImported {
5492 source: CandidateSource::Remote,
5493 asset_ids,
5494 sender_id,
5495 } => {
5496 let Some(sender_id) = sender_id.as_deref().and_then(normalized_sender_id) else {
5497 continue;
5498 };
5499 for asset_id in asset_ids {
5500 imported_asset_publishers.insert(asset_id.clone(), sender_id.clone());
5501 if known_gene_ids.contains(asset_id) || known_capsule_ids.contains(asset_id) {
5502 publishers_by_asset.insert(asset_id.clone(), sender_id.clone());
5503 }
5504 }
5505 }
5506 EvolutionEvent::GeneProjected { gene } => {
5507 known_gene_ids.insert(gene.id.clone());
5508 if let Some(sender_id) = imported_asset_publishers.get(&gene.id) {
5509 publishers_by_asset.insert(gene.id.clone(), sender_id.clone());
5510 }
5511 }
5512 EvolutionEvent::CapsuleCommitted { capsule } => {
5513 known_capsule_ids.insert(capsule.id.clone());
5514 if let Some(sender_id) = imported_asset_publishers.get(&capsule.id) {
5515 publishers_by_asset.insert(capsule.id.clone(), sender_id.clone());
5516 }
5517 }
5518 _ => {}
5519 }
5520 }
5521
5522 publishers_by_asset
5523}
5524
5525fn should_import_remote_event(event: &EvolutionEvent) -> bool {
5526 matches!(
5527 event,
5528 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
5529 )
5530}
5531
5532fn fetch_assets_from_store(
5533 store: &dyn EvolutionStore,
5534 responder_id: impl Into<String>,
5535 query: &FetchQuery,
5536) -> Result<FetchResponse, EvoKernelError> {
5537 let (events, projection) = scan_projection(store)?;
5538 let requested_cursor = resolve_requested_cursor(
5539 &query.sender_id,
5540 query.since_cursor.as_deref(),
5541 query.resume_token.as_deref(),
5542 )?;
5543 let since_seq = requested_cursor
5544 .as_deref()
5545 .and_then(parse_sync_cursor_seq)
5546 .unwrap_or(0);
5547 let normalized_signals: Vec<String> = query
5548 .signals
5549 .iter()
5550 .map(|signal| signal.trim().to_ascii_lowercase())
5551 .filter(|signal| !signal.is_empty())
5552 .collect();
5553 let matches_any_signal = |candidate: &str| {
5554 if normalized_signals.is_empty() {
5555 return true;
5556 }
5557 let candidate = candidate.to_ascii_lowercase();
5558 normalized_signals
5559 .iter()
5560 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
5561 };
5562
5563 let matched_genes: Vec<Gene> = projection
5564 .genes
5565 .into_iter()
5566 .filter(|gene| gene.state == AssetState::Promoted)
5567 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
5568 .collect();
5569 let matched_gene_ids: BTreeSet<String> =
5570 matched_genes.iter().map(|gene| gene.id.clone()).collect();
5571 let matched_capsules: Vec<Capsule> = projection
5572 .capsules
5573 .into_iter()
5574 .filter(|capsule| capsule.state == AssetState::Promoted)
5575 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
5576 .collect();
5577 let all_assets = replay_export_assets(&events, matched_genes.clone(), matched_capsules.clone());
5578 let (selected_genes, selected_capsules) = if requested_cursor.is_some() {
5579 let delta = delta_window(&events, since_seq);
5580 let selected_capsules = matched_capsules
5581 .into_iter()
5582 .filter(|capsule| {
5583 delta.changed_capsule_ids.contains(&capsule.id)
5584 || delta.changed_mutation_ids.contains(&capsule.mutation_id)
5585 })
5586 .collect::<Vec<_>>();
5587 let selected_gene_ids = selected_capsules
5588 .iter()
5589 .map(|capsule| capsule.gene_id.clone())
5590 .collect::<BTreeSet<_>>();
5591 let selected_genes = matched_genes
5592 .into_iter()
5593 .filter(|gene| {
5594 delta.changed_gene_ids.contains(&gene.id) || selected_gene_ids.contains(&gene.id)
5595 })
5596 .collect::<Vec<_>>();
5597 (selected_genes, selected_capsules)
5598 } else {
5599 (matched_genes, matched_capsules)
5600 };
5601 let assets = replay_export_assets(&events, selected_genes, selected_capsules);
5602 let next_cursor = events.last().map(|stored| format_sync_cursor(stored.seq));
5603 let resume_token = next_cursor
5604 .as_ref()
5605 .map(|cursor| encode_resume_token(&query.sender_id, cursor));
5606 let applied_count = assets.len();
5607 let skipped_count = all_assets.len().saturating_sub(applied_count);
5608
5609 Ok(FetchResponse {
5610 sender_id: responder_id.into(),
5611 assets,
5612 next_cursor: next_cursor.clone(),
5613 resume_token,
5614 sync_audit: SyncAudit {
5615 batch_id: next_id("sync-fetch"),
5616 requested_cursor,
5617 scanned_count: all_assets.len(),
5618 applied_count,
5619 skipped_count,
5620 failed_count: 0,
5621 failure_reasons: Vec::new(),
5622 },
5623 })
5624}
5625
5626fn revoke_assets_in_store(
5627 store: &dyn EvolutionStore,
5628 notice: &RevokeNotice,
5629) -> Result<RevokeNotice, EvoKernelError> {
5630 let projection = projection_snapshot(store)?;
5631 let (sender_id, requested) = validate_remote_revoke_notice_assets(store, notice)?;
5632 let mut revoked_gene_ids = BTreeSet::new();
5633 let mut quarantined_capsule_ids = BTreeSet::new();
5634
5635 for gene in &projection.genes {
5636 if requested.contains(&gene.id) {
5637 revoked_gene_ids.insert(gene.id.clone());
5638 }
5639 }
5640 for capsule in &projection.capsules {
5641 if requested.contains(&capsule.id) {
5642 quarantined_capsule_ids.insert(capsule.id.clone());
5643 revoked_gene_ids.insert(capsule.gene_id.clone());
5644 }
5645 }
5646 for capsule in &projection.capsules {
5647 if revoked_gene_ids.contains(&capsule.gene_id) {
5648 quarantined_capsule_ids.insert(capsule.id.clone());
5649 }
5650 }
5651
5652 for gene_id in &revoked_gene_ids {
5653 store
5654 .append_event(EvolutionEvent::GeneRevoked {
5655 gene_id: gene_id.clone(),
5656 reason: notice.reason.clone(),
5657 })
5658 .map_err(store_err)?;
5659 }
5660 for capsule_id in &quarantined_capsule_ids {
5661 store
5662 .append_event(EvolutionEvent::CapsuleQuarantined {
5663 capsule_id: capsule_id.clone(),
5664 })
5665 .map_err(store_err)?;
5666 }
5667
5668 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
5669 affected_ids.extend(quarantined_capsule_ids);
5670 affected_ids.sort();
5671 affected_ids.dedup();
5672
5673 Ok(RevokeNotice {
5674 sender_id,
5675 asset_ids: affected_ids,
5676 reason: notice.reason.clone(),
5677 })
5678}
5679
5680fn evolution_metrics_snapshot(
5681 store: &dyn EvolutionStore,
5682) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
5683 let (events, projection) = scan_projection(store)?;
5684 let replay = collect_replay_roi_aggregate(&events, &projection, None);
5685 let replay_reasoning_avoided_total = replay.replay_success_total;
5686 let confidence_revalidations_total = events
5687 .iter()
5688 .filter(|stored| is_confidence_revalidation_event(&stored.event))
5689 .count() as u64;
5690 let mutation_declared_total = events
5691 .iter()
5692 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
5693 .count() as u64;
5694 let promoted_mutations_total = events
5695 .iter()
5696 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
5697 .count() as u64;
5698 let gene_revocations_total = events
5699 .iter()
5700 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
5701 .count() as u64;
5702 let cutoff = Utc::now() - Duration::hours(1);
5703 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
5704 matches!(event, EvolutionEvent::MutationDeclared { .. })
5705 });
5706 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
5707 matches!(event, EvolutionEvent::GeneRevoked { .. })
5708 });
5709 let promoted_genes = projection
5710 .genes
5711 .iter()
5712 .filter(|gene| gene.state == AssetState::Promoted)
5713 .count() as u64;
5714 let promoted_capsules = projection
5715 .capsules
5716 .iter()
5717 .filter(|capsule| capsule.state == AssetState::Promoted)
5718 .count() as u64;
5719
5720 Ok(EvolutionMetricsSnapshot {
5721 replay_attempts_total: replay.replay_attempts_total,
5722 replay_success_total: replay.replay_success_total,
5723 replay_success_rate: safe_ratio(replay.replay_success_total, replay.replay_attempts_total),
5724 confidence_revalidations_total,
5725 replay_reasoning_avoided_total,
5726 reasoning_avoided_tokens_total: replay.reasoning_avoided_tokens_total,
5727 replay_fallback_cost_total: replay.replay_fallback_cost_total,
5728 replay_roi: compute_replay_roi(
5729 replay.reasoning_avoided_tokens_total,
5730 replay.replay_fallback_cost_total,
5731 ),
5732 replay_task_classes: replay.replay_task_classes,
5733 replay_sources: replay.replay_sources,
5734 mutation_declared_total,
5735 promoted_mutations_total,
5736 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
5737 gene_revocations_total,
5738 mutation_velocity_last_hour,
5739 revoke_frequency_last_hour,
5740 promoted_genes,
5741 promoted_capsules,
5742 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
5743 })
5744}
5745
5746struct ReplayRoiAggregate {
5747 replay_attempts_total: u64,
5748 replay_success_total: u64,
5749 replay_failure_total: u64,
5750 reasoning_avoided_tokens_total: u64,
5751 replay_fallback_cost_total: u64,
5752 replay_task_classes: Vec<ReplayTaskClassMetrics>,
5753 replay_sources: Vec<ReplaySourceRoiMetrics>,
5754}
5755
5756fn collect_replay_roi_aggregate(
5757 events: &[StoredEvolutionEvent],
5758 projection: &EvolutionProjection,
5759 cutoff: Option<DateTime<Utc>>,
5760) -> ReplayRoiAggregate {
5761 let replay_evidences = events
5762 .iter()
5763 .filter(|stored| replay_event_in_scope(stored, cutoff))
5764 .filter_map(|stored| match &stored.event {
5765 EvolutionEvent::ReplayEconomicsRecorded { evidence, .. } => Some(evidence.clone()),
5766 _ => None,
5767 })
5768 .collect::<Vec<_>>();
5769
5770 let mut task_totals = BTreeMap::<(String, String), (u64, u64, u64, u64)>::new();
5771 let mut source_totals = BTreeMap::<String, (u64, u64, u64, u64)>::new();
5772
5773 let (
5774 replay_success_total,
5775 replay_failure_total,
5776 reasoning_avoided_tokens_total,
5777 replay_fallback_cost_total,
5778 ) = if replay_evidences.is_empty() {
5779 let gene_task_classes = projection
5780 .genes
5781 .iter()
5782 .map(|gene| (gene.id.clone(), replay_task_descriptor(&gene.signals)))
5783 .collect::<BTreeMap<_, _>>();
5784 let mut replay_success_total = 0_u64;
5785 let mut replay_failure_total = 0_u64;
5786
5787 for stored in events
5788 .iter()
5789 .filter(|stored| replay_event_in_scope(stored, cutoff))
5790 {
5791 match &stored.event {
5792 EvolutionEvent::CapsuleReused { gene_id, .. } => {
5793 replay_success_total += 1;
5794 if let Some((task_class_id, task_label)) = gene_task_classes.get(gene_id) {
5795 let entry = task_totals
5796 .entry((task_class_id.clone(), task_label.clone()))
5797 .or_insert((0, 0, 0, 0));
5798 entry.0 += 1;
5799 entry.2 += REPLAY_REASONING_TOKEN_FLOOR;
5800 }
5801 }
5802 event if is_replay_validation_failure(event) => {
5803 replay_failure_total += 1;
5804 }
5805 _ => {}
5806 }
5807 }
5808
5809 (
5810 replay_success_total,
5811 replay_failure_total,
5812 replay_success_total * REPLAY_REASONING_TOKEN_FLOOR,
5813 replay_failure_total * REPLAY_REASONING_TOKEN_FLOOR,
5814 )
5815 } else {
5816 let mut replay_success_total = 0_u64;
5817 let mut replay_failure_total = 0_u64;
5818 let mut reasoning_avoided_tokens_total = 0_u64;
5819 let mut replay_fallback_cost_total = 0_u64;
5820
5821 for evidence in &replay_evidences {
5822 if evidence.success {
5823 replay_success_total += 1;
5824 } else {
5825 replay_failure_total += 1;
5826 }
5827 reasoning_avoided_tokens_total += evidence.reasoning_avoided_tokens;
5828 replay_fallback_cost_total += evidence.replay_fallback_cost;
5829
5830 let entry = task_totals
5831 .entry((evidence.task_class_id.clone(), evidence.task_label.clone()))
5832 .or_insert((0, 0, 0, 0));
5833 if evidence.success {
5834 entry.0 += 1;
5835 } else {
5836 entry.1 += 1;
5837 }
5838 entry.2 += evidence.reasoning_avoided_tokens;
5839 entry.3 += evidence.replay_fallback_cost;
5840
5841 if let Some(source_sender_id) = evidence.source_sender_id.as_deref() {
5842 let source_entry = source_totals
5843 .entry(source_sender_id.to_string())
5844 .or_insert((0, 0, 0, 0));
5845 if evidence.success {
5846 source_entry.0 += 1;
5847 } else {
5848 source_entry.1 += 1;
5849 }
5850 source_entry.2 += evidence.reasoning_avoided_tokens;
5851 source_entry.3 += evidence.replay_fallback_cost;
5852 }
5853 }
5854
5855 (
5856 replay_success_total,
5857 replay_failure_total,
5858 reasoning_avoided_tokens_total,
5859 replay_fallback_cost_total,
5860 )
5861 };
5862
5863 let replay_task_classes = task_totals
5864 .into_iter()
5865 .map(
5866 |(
5867 (task_class_id, task_label),
5868 (
5869 replay_success_total,
5870 replay_failure_total,
5871 reasoning_avoided_tokens_total,
5872 replay_fallback_cost_total,
5873 ),
5874 )| ReplayTaskClassMetrics {
5875 task_class_id,
5876 task_label,
5877 replay_success_total,
5878 replay_failure_total,
5879 reasoning_steps_avoided_total: replay_success_total,
5880 reasoning_avoided_tokens_total,
5881 replay_fallback_cost_total,
5882 replay_roi: compute_replay_roi(
5883 reasoning_avoided_tokens_total,
5884 replay_fallback_cost_total,
5885 ),
5886 },
5887 )
5888 .collect::<Vec<_>>();
5889 let replay_sources = source_totals
5890 .into_iter()
5891 .map(
5892 |(
5893 source_sender_id,
5894 (
5895 replay_success_total,
5896 replay_failure_total,
5897 reasoning_avoided_tokens_total,
5898 replay_fallback_cost_total,
5899 ),
5900 )| ReplaySourceRoiMetrics {
5901 source_sender_id,
5902 replay_success_total,
5903 replay_failure_total,
5904 reasoning_avoided_tokens_total,
5905 replay_fallback_cost_total,
5906 replay_roi: compute_replay_roi(
5907 reasoning_avoided_tokens_total,
5908 replay_fallback_cost_total,
5909 ),
5910 },
5911 )
5912 .collect::<Vec<_>>();
5913
5914 ReplayRoiAggregate {
5915 replay_attempts_total: replay_success_total + replay_failure_total,
5916 replay_success_total,
5917 replay_failure_total,
5918 reasoning_avoided_tokens_total,
5919 replay_fallback_cost_total,
5920 replay_task_classes,
5921 replay_sources,
5922 }
5923}
5924
5925fn replay_event_in_scope(stored: &StoredEvolutionEvent, cutoff: Option<DateTime<Utc>>) -> bool {
5926 match cutoff {
5927 Some(cutoff) => parse_event_timestamp(&stored.timestamp)
5928 .map(|timestamp| timestamp >= cutoff)
5929 .unwrap_or(false),
5930 None => true,
5931 }
5932}
5933
5934fn replay_roi_release_gate_summary(
5935 store: &dyn EvolutionStore,
5936 window_seconds: u64,
5937) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
5938 let (events, projection) = scan_projection(store)?;
5939 let now = Utc::now();
5940 let cutoff = if window_seconds == 0 {
5941 None
5942 } else {
5943 let seconds = i64::try_from(window_seconds).unwrap_or(i64::MAX);
5944 Some(now - Duration::seconds(seconds))
5945 };
5946 let replay = collect_replay_roi_aggregate(&events, &projection, cutoff);
5947
5948 Ok(ReplayRoiWindowSummary {
5949 generated_at: now.to_rfc3339(),
5950 window_seconds,
5951 replay_attempts_total: replay.replay_attempts_total,
5952 replay_success_total: replay.replay_success_total,
5953 replay_failure_total: replay.replay_failure_total,
5954 reasoning_avoided_tokens_total: replay.reasoning_avoided_tokens_total,
5955 replay_fallback_cost_total: replay.replay_fallback_cost_total,
5956 replay_roi: compute_replay_roi(
5957 replay.reasoning_avoided_tokens_total,
5958 replay.replay_fallback_cost_total,
5959 ),
5960 replay_task_classes: replay.replay_task_classes,
5961 replay_sources: replay.replay_sources,
5962 })
5963}
5964
5965fn replay_roi_release_gate_contract(
5966 summary: &ReplayRoiWindowSummary,
5967 thresholds: ReplayRoiReleaseGateThresholds,
5968) -> ReplayRoiReleaseGateContract {
5969 let input = replay_roi_release_gate_input_contract(summary, thresholds);
5970 let output = evaluate_replay_roi_release_gate_contract_input(&input);
5971 ReplayRoiReleaseGateContract { input, output }
5972}
5973
5974fn replay_roi_release_gate_input_contract(
5975 summary: &ReplayRoiWindowSummary,
5976 thresholds: ReplayRoiReleaseGateThresholds,
5977) -> ReplayRoiReleaseGateInputContract {
5978 let replay_safety_signal = replay_roi_release_gate_safety_signal(summary);
5979 let replay_safety = replay_safety_signal.fail_closed_default
5980 && replay_safety_signal.rollback_ready
5981 && replay_safety_signal.audit_trail_complete
5982 && replay_safety_signal.has_replay_activity;
5983 ReplayRoiReleaseGateInputContract {
5984 generated_at: summary.generated_at.clone(),
5985 window_seconds: summary.window_seconds,
5986 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
5987 .iter()
5988 .map(|dimension| (*dimension).to_string())
5989 .collect(),
5990 replay_attempts_total: summary.replay_attempts_total,
5991 replay_success_total: summary.replay_success_total,
5992 replay_failure_total: summary.replay_failure_total,
5993 replay_hit_rate: safe_ratio(summary.replay_success_total, summary.replay_attempts_total),
5994 false_replay_rate: safe_ratio(summary.replay_failure_total, summary.replay_attempts_total),
5995 reasoning_avoided_tokens: summary.reasoning_avoided_tokens_total,
5996 replay_fallback_cost_total: summary.replay_fallback_cost_total,
5997 replay_roi: summary.replay_roi,
5998 replay_safety,
5999 replay_safety_signal,
6000 thresholds,
6001 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
6002 }
6003}
6004
6005fn replay_roi_release_gate_safety_signal(
6006 summary: &ReplayRoiWindowSummary,
6007) -> ReplayRoiReleaseGateSafetySignal {
6008 ReplayRoiReleaseGateSafetySignal {
6009 fail_closed_default: true,
6010 rollback_ready: summary.replay_failure_total == 0 || summary.replay_fallback_cost_total > 0,
6011 audit_trail_complete: summary.replay_attempts_total
6012 == summary.replay_success_total + summary.replay_failure_total,
6013 has_replay_activity: summary.replay_attempts_total > 0,
6014 }
6015}
6016
6017pub fn evaluate_replay_roi_release_gate_contract_input(
6018 input: &ReplayRoiReleaseGateInputContract,
6019) -> ReplayRoiReleaseGateOutputContract {
6020 let mut failed_checks = Vec::new();
6021 let mut evidence_refs = Vec::new();
6022 let mut indeterminate = false;
6023
6024 replay_release_gate_push_unique(&mut evidence_refs, "replay_roi_release_gate_summary");
6025 replay_release_gate_push_unique(
6026 &mut evidence_refs,
6027 format!("window_seconds:{}", input.window_seconds),
6028 );
6029 if input.generated_at.trim().is_empty() {
6030 replay_release_gate_record_failed_check(
6031 &mut failed_checks,
6032 &mut evidence_refs,
6033 "missing_generated_at",
6034 &["field:generated_at"],
6035 );
6036 indeterminate = true;
6037 } else {
6038 replay_release_gate_push_unique(
6039 &mut evidence_refs,
6040 format!("generated_at:{}", input.generated_at),
6041 );
6042 }
6043
6044 let expected_attempts_total = input.replay_success_total + input.replay_failure_total;
6045 if input.replay_attempts_total != expected_attempts_total {
6046 replay_release_gate_record_failed_check(
6047 &mut failed_checks,
6048 &mut evidence_refs,
6049 "invalid_attempt_accounting",
6050 &[
6051 "metric:replay_attempts_total",
6052 "metric:replay_success_total",
6053 "metric:replay_failure_total",
6054 ],
6055 );
6056 indeterminate = true;
6057 }
6058
6059 if input.replay_attempts_total == 0 {
6060 replay_release_gate_record_failed_check(
6061 &mut failed_checks,
6062 &mut evidence_refs,
6063 "missing_replay_attempts",
6064 &["metric:replay_attempts_total"],
6065 );
6066 indeterminate = true;
6067 }
6068
6069 if !replay_release_gate_rate_valid(input.replay_hit_rate) {
6070 replay_release_gate_record_failed_check(
6071 &mut failed_checks,
6072 &mut evidence_refs,
6073 "invalid_replay_hit_rate",
6074 &["metric:replay_hit_rate"],
6075 );
6076 indeterminate = true;
6077 }
6078 if !replay_release_gate_rate_valid(input.false_replay_rate) {
6079 replay_release_gate_record_failed_check(
6080 &mut failed_checks,
6081 &mut evidence_refs,
6082 "invalid_false_replay_rate",
6083 &["metric:false_replay_rate"],
6084 );
6085 indeterminate = true;
6086 }
6087
6088 if !input.replay_roi.is_finite() {
6089 replay_release_gate_record_failed_check(
6090 &mut failed_checks,
6091 &mut evidence_refs,
6092 "invalid_replay_roi",
6093 &["metric:replay_roi"],
6094 );
6095 indeterminate = true;
6096 }
6097
6098 let expected_hit_rate = safe_ratio(input.replay_success_total, input.replay_attempts_total);
6099 let expected_false_rate = safe_ratio(input.replay_failure_total, input.replay_attempts_total);
6100 if input.replay_attempts_total > 0
6101 && !replay_release_gate_float_eq(input.replay_hit_rate, expected_hit_rate)
6102 {
6103 replay_release_gate_record_failed_check(
6104 &mut failed_checks,
6105 &mut evidence_refs,
6106 "invalid_replay_hit_rate_consistency",
6107 &["metric:replay_hit_rate", "metric:replay_success_total"],
6108 );
6109 indeterminate = true;
6110 }
6111 if input.replay_attempts_total > 0
6112 && !replay_release_gate_float_eq(input.false_replay_rate, expected_false_rate)
6113 {
6114 replay_release_gate_record_failed_check(
6115 &mut failed_checks,
6116 &mut evidence_refs,
6117 "invalid_false_replay_rate_consistency",
6118 &["metric:false_replay_rate", "metric:replay_failure_total"],
6119 );
6120 indeterminate = true;
6121 }
6122
6123 if !(0.0..=1.0).contains(&input.thresholds.min_replay_hit_rate) {
6124 replay_release_gate_record_failed_check(
6125 &mut failed_checks,
6126 &mut evidence_refs,
6127 "invalid_threshold_min_replay_hit_rate",
6128 &["threshold:min_replay_hit_rate"],
6129 );
6130 indeterminate = true;
6131 }
6132 if !(0.0..=1.0).contains(&input.thresholds.max_false_replay_rate) {
6133 replay_release_gate_record_failed_check(
6134 &mut failed_checks,
6135 &mut evidence_refs,
6136 "invalid_threshold_max_false_replay_rate",
6137 &["threshold:max_false_replay_rate"],
6138 );
6139 indeterminate = true;
6140 }
6141 if !input.thresholds.min_replay_roi.is_finite() {
6142 replay_release_gate_record_failed_check(
6143 &mut failed_checks,
6144 &mut evidence_refs,
6145 "invalid_threshold_min_replay_roi",
6146 &["threshold:min_replay_roi"],
6147 );
6148 indeterminate = true;
6149 }
6150
6151 if input.replay_attempts_total < input.thresholds.min_replay_attempts {
6152 replay_release_gate_record_failed_check(
6153 &mut failed_checks,
6154 &mut evidence_refs,
6155 "min_replay_attempts_below_threshold",
6156 &[
6157 "threshold:min_replay_attempts",
6158 "metric:replay_attempts_total",
6159 ],
6160 );
6161 }
6162 if input.replay_attempts_total > 0
6163 && input.replay_hit_rate < input.thresholds.min_replay_hit_rate
6164 {
6165 replay_release_gate_record_failed_check(
6166 &mut failed_checks,
6167 &mut evidence_refs,
6168 "replay_hit_rate_below_threshold",
6169 &["threshold:min_replay_hit_rate", "metric:replay_hit_rate"],
6170 );
6171 }
6172 if input.replay_attempts_total > 0
6173 && input.false_replay_rate > input.thresholds.max_false_replay_rate
6174 {
6175 replay_release_gate_record_failed_check(
6176 &mut failed_checks,
6177 &mut evidence_refs,
6178 "false_replay_rate_above_threshold",
6179 &[
6180 "threshold:max_false_replay_rate",
6181 "metric:false_replay_rate",
6182 ],
6183 );
6184 }
6185 if input.reasoning_avoided_tokens < input.thresholds.min_reasoning_avoided_tokens {
6186 replay_release_gate_record_failed_check(
6187 &mut failed_checks,
6188 &mut evidence_refs,
6189 "reasoning_avoided_tokens_below_threshold",
6190 &[
6191 "threshold:min_reasoning_avoided_tokens",
6192 "metric:reasoning_avoided_tokens",
6193 ],
6194 );
6195 }
6196 if input.replay_roi < input.thresholds.min_replay_roi {
6197 replay_release_gate_record_failed_check(
6198 &mut failed_checks,
6199 &mut evidence_refs,
6200 "replay_roi_below_threshold",
6201 &["threshold:min_replay_roi", "metric:replay_roi"],
6202 );
6203 }
6204 if input.thresholds.require_replay_safety && !input.replay_safety {
6205 replay_release_gate_record_failed_check(
6206 &mut failed_checks,
6207 &mut evidence_refs,
6208 "replay_safety_required",
6209 &["metric:replay_safety", "threshold:require_replay_safety"],
6210 );
6211 }
6212
6213 failed_checks.sort();
6214 evidence_refs.sort();
6215
6216 let status = if failed_checks.is_empty() {
6217 ReplayRoiReleaseGateStatus::Pass
6218 } else if indeterminate {
6219 ReplayRoiReleaseGateStatus::Indeterminate
6220 } else {
6221 ReplayRoiReleaseGateStatus::FailClosed
6222 };
6223 let joined_checks = if failed_checks.is_empty() {
6224 "none".to_string()
6225 } else {
6226 failed_checks.join(",")
6227 };
6228 let summary = match status {
6229 ReplayRoiReleaseGateStatus::Pass => format!(
6230 "release gate pass: attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6231 input.replay_attempts_total,
6232 input.replay_hit_rate,
6233 input.false_replay_rate,
6234 input.reasoning_avoided_tokens,
6235 input.replay_roi,
6236 input.replay_safety
6237 ),
6238 ReplayRoiReleaseGateStatus::FailClosed => format!(
6239 "release gate fail_closed: failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6240 joined_checks,
6241 input.replay_attempts_total,
6242 input.replay_hit_rate,
6243 input.false_replay_rate,
6244 input.reasoning_avoided_tokens,
6245 input.replay_roi,
6246 input.replay_safety
6247 ),
6248 ReplayRoiReleaseGateStatus::Indeterminate => format!(
6249 "release gate indeterminate (fail-closed): failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6250 joined_checks,
6251 input.replay_attempts_total,
6252 input.replay_hit_rate,
6253 input.false_replay_rate,
6254 input.reasoning_avoided_tokens,
6255 input.replay_roi,
6256 input.replay_safety
6257 ),
6258 };
6259
6260 ReplayRoiReleaseGateOutputContract {
6261 status,
6262 failed_checks,
6263 evidence_refs,
6264 summary,
6265 }
6266}
6267
6268fn replay_release_gate_record_failed_check(
6269 failed_checks: &mut Vec<String>,
6270 evidence_refs: &mut Vec<String>,
6271 check: &str,
6272 refs: &[&str],
6273) {
6274 replay_release_gate_push_unique(failed_checks, check.to_string());
6275 for entry in refs {
6276 replay_release_gate_push_unique(evidence_refs, (*entry).to_string());
6277 }
6278}
6279
6280fn replay_release_gate_push_unique(values: &mut Vec<String>, entry: impl Into<String>) {
6281 let entry = entry.into();
6282 if !values.iter().any(|current| current == &entry) {
6283 values.push(entry);
6284 }
6285}
6286
6287fn replay_release_gate_rate_valid(value: f64) -> bool {
6288 value.is_finite() && (0.0..=1.0).contains(&value)
6289}
6290
6291fn replay_release_gate_float_eq(left: f64, right: f64) -> bool {
6292 (left - right).abs() <= 1e-9
6293}
6294
6295fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
6296 EvolutionHealthSnapshot {
6297 status: "ok".into(),
6298 last_event_seq: snapshot.last_event_seq,
6299 promoted_genes: snapshot.promoted_genes,
6300 promoted_capsules: snapshot.promoted_capsules,
6301 }
6302}
6303
6304fn render_evolution_metrics_prometheus(
6305 snapshot: &EvolutionMetricsSnapshot,
6306 health: &EvolutionHealthSnapshot,
6307) -> String {
6308 let mut out = String::new();
6309 out.push_str(
6310 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
6311 );
6312 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
6313 out.push_str(&format!(
6314 "oris_evolution_replay_attempts_total {}\n",
6315 snapshot.replay_attempts_total
6316 ));
6317 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
6318 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
6319 out.push_str(&format!(
6320 "oris_evolution_replay_success_total {}\n",
6321 snapshot.replay_success_total
6322 ));
6323 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_total Total planner steps avoided by successful replay.\n");
6324 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_total counter\n");
6325 out.push_str(&format!(
6326 "oris_evolution_replay_reasoning_avoided_total {}\n",
6327 snapshot.replay_reasoning_avoided_total
6328 ));
6329 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_total Estimated reasoning tokens avoided by replay hits.\n");
6330 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_total counter\n");
6331 out.push_str(&format!(
6332 "oris_evolution_reasoning_avoided_tokens_total {}\n",
6333 snapshot.reasoning_avoided_tokens_total
6334 ));
6335 out.push_str("# HELP oris_evolution_replay_fallback_cost_total Estimated reasoning token cost spent on replay fallbacks.\n");
6336 out.push_str("# TYPE oris_evolution_replay_fallback_cost_total counter\n");
6337 out.push_str(&format!(
6338 "oris_evolution_replay_fallback_cost_total {}\n",
6339 snapshot.replay_fallback_cost_total
6340 ));
6341 out.push_str("# HELP oris_evolution_replay_roi Net replay ROI in token space ((avoided - fallback_cost) / total).\n");
6342 out.push_str("# TYPE oris_evolution_replay_roi gauge\n");
6343 out.push_str(&format!(
6344 "oris_evolution_replay_roi {:.6}\n",
6345 snapshot.replay_roi
6346 ));
6347 out.push_str("# HELP oris_evolution_replay_utilization_by_task_class_total Successful replay reuse counts grouped by deterministic task class.\n");
6348 out.push_str("# TYPE oris_evolution_replay_utilization_by_task_class_total counter\n");
6349 for task_class in &snapshot.replay_task_classes {
6350 out.push_str(&format!(
6351 "oris_evolution_replay_utilization_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6352 prometheus_label_value(&task_class.task_class_id),
6353 prometheus_label_value(&task_class.task_label),
6354 task_class.replay_success_total
6355 ));
6356 }
6357 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_by_task_class_total Planner steps avoided by successful replay grouped by deterministic task class.\n");
6358 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_by_task_class_total counter\n");
6359 for task_class in &snapshot.replay_task_classes {
6360 out.push_str(&format!(
6361 "oris_evolution_replay_reasoning_avoided_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6362 prometheus_label_value(&task_class.task_class_id),
6363 prometheus_label_value(&task_class.task_label),
6364 task_class.reasoning_steps_avoided_total
6365 ));
6366 }
6367 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_task_class_total Estimated reasoning tokens avoided by replay hits grouped by deterministic task class.\n");
6368 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_task_class_total counter\n");
6369 for task_class in &snapshot.replay_task_classes {
6370 out.push_str(&format!(
6371 "oris_evolution_reasoning_avoided_tokens_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6372 prometheus_label_value(&task_class.task_class_id),
6373 prometheus_label_value(&task_class.task_label),
6374 task_class.reasoning_avoided_tokens_total
6375 ));
6376 }
6377 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_task_class_total Estimated fallback token cost grouped by deterministic task class.\n");
6378 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_task_class_total counter\n");
6379 for task_class in &snapshot.replay_task_classes {
6380 out.push_str(&format!(
6381 "oris_evolution_replay_fallback_cost_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6382 prometheus_label_value(&task_class.task_class_id),
6383 prometheus_label_value(&task_class.task_label),
6384 task_class.replay_fallback_cost_total
6385 ));
6386 }
6387 out.push_str("# HELP oris_evolution_replay_roi_by_task_class Replay ROI in token space grouped by deterministic task class.\n");
6388 out.push_str("# TYPE oris_evolution_replay_roi_by_task_class gauge\n");
6389 for task_class in &snapshot.replay_task_classes {
6390 out.push_str(&format!(
6391 "oris_evolution_replay_roi_by_task_class{{task_class_id=\"{}\",task_label=\"{}\"}} {:.6}\n",
6392 prometheus_label_value(&task_class.task_class_id),
6393 prometheus_label_value(&task_class.task_label),
6394 task_class.replay_roi
6395 ));
6396 }
6397 out.push_str("# HELP oris_evolution_replay_roi_by_source Replay ROI in token space grouped by remote sender id for cross-node reconciliation.\n");
6398 out.push_str("# TYPE oris_evolution_replay_roi_by_source gauge\n");
6399 for source in &snapshot.replay_sources {
6400 out.push_str(&format!(
6401 "oris_evolution_replay_roi_by_source{{source_sender_id=\"{}\"}} {:.6}\n",
6402 prometheus_label_value(&source.source_sender_id),
6403 source.replay_roi
6404 ));
6405 }
6406 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_source_total Estimated reasoning tokens avoided grouped by remote sender id.\n");
6407 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_source_total counter\n");
6408 for source in &snapshot.replay_sources {
6409 out.push_str(&format!(
6410 "oris_evolution_reasoning_avoided_tokens_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6411 prometheus_label_value(&source.source_sender_id),
6412 source.reasoning_avoided_tokens_total
6413 ));
6414 }
6415 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_source_total Estimated replay fallback token cost grouped by remote sender id.\n");
6416 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_source_total counter\n");
6417 for source in &snapshot.replay_sources {
6418 out.push_str(&format!(
6419 "oris_evolution_replay_fallback_cost_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6420 prometheus_label_value(&source.source_sender_id),
6421 source.replay_fallback_cost_total
6422 ));
6423 }
6424 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
6425 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
6426 out.push_str(&format!(
6427 "oris_evolution_replay_success_rate {:.6}\n",
6428 snapshot.replay_success_rate
6429 ));
6430 out.push_str("# HELP oris_evolution_confidence_revalidations_total Total confidence-driven demotions that require revalidation before replay.\n");
6431 out.push_str("# TYPE oris_evolution_confidence_revalidations_total counter\n");
6432 out.push_str(&format!(
6433 "oris_evolution_confidence_revalidations_total {}\n",
6434 snapshot.confidence_revalidations_total
6435 ));
6436 out.push_str(
6437 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
6438 );
6439 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
6440 out.push_str(&format!(
6441 "oris_evolution_mutation_declared_total {}\n",
6442 snapshot.mutation_declared_total
6443 ));
6444 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
6445 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
6446 out.push_str(&format!(
6447 "oris_evolution_promoted_mutations_total {}\n",
6448 snapshot.promoted_mutations_total
6449 ));
6450 out.push_str(
6451 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
6452 );
6453 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
6454 out.push_str(&format!(
6455 "oris_evolution_promotion_ratio {:.6}\n",
6456 snapshot.promotion_ratio
6457 ));
6458 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
6459 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
6460 out.push_str(&format!(
6461 "oris_evolution_gene_revocations_total {}\n",
6462 snapshot.gene_revocations_total
6463 ));
6464 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
6465 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
6466 out.push_str(&format!(
6467 "oris_evolution_mutation_velocity_last_hour {}\n",
6468 snapshot.mutation_velocity_last_hour
6469 ));
6470 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
6471 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
6472 out.push_str(&format!(
6473 "oris_evolution_revoke_frequency_last_hour {}\n",
6474 snapshot.revoke_frequency_last_hour
6475 ));
6476 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
6477 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
6478 out.push_str(&format!(
6479 "oris_evolution_promoted_genes {}\n",
6480 snapshot.promoted_genes
6481 ));
6482 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
6483 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
6484 out.push_str(&format!(
6485 "oris_evolution_promoted_capsules {}\n",
6486 snapshot.promoted_capsules
6487 ));
6488 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
6489 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
6490 out.push_str(&format!(
6491 "oris_evolution_store_last_event_seq {}\n",
6492 snapshot.last_event_seq
6493 ));
6494 out.push_str(
6495 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
6496 );
6497 out.push_str("# TYPE oris_evolution_health gauge\n");
6498 out.push_str(&format!(
6499 "oris_evolution_health {}\n",
6500 u8::from(health.status == "ok")
6501 ));
6502 out
6503}
6504
6505fn count_recent_events(
6506 events: &[StoredEvolutionEvent],
6507 cutoff: DateTime<Utc>,
6508 predicate: impl Fn(&EvolutionEvent) -> bool,
6509) -> u64 {
6510 events
6511 .iter()
6512 .filter(|stored| {
6513 predicate(&stored.event)
6514 && parse_event_timestamp(&stored.timestamp)
6515 .map(|timestamp| timestamp >= cutoff)
6516 .unwrap_or(false)
6517 })
6518 .count() as u64
6519}
6520
6521fn prometheus_label_value(input: &str) -> String {
6522 input
6523 .replace('\\', "\\\\")
6524 .replace('\n', "\\n")
6525 .replace('"', "\\\"")
6526}
6527
6528fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
6529 DateTime::parse_from_rfc3339(raw)
6530 .ok()
6531 .map(|parsed| parsed.with_timezone(&Utc))
6532}
6533
6534fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
6535 matches!(
6536 event,
6537 EvolutionEvent::ValidationFailed {
6538 gene_id: Some(_),
6539 ..
6540 }
6541 )
6542}
6543
6544fn is_confidence_revalidation_event(event: &EvolutionEvent) -> bool {
6545 matches!(
6546 event,
6547 EvolutionEvent::PromotionEvaluated {
6548 state,
6549 reason,
6550 reason_code,
6551 ..
6552 }
6553 if *state == AssetState::Quarantined
6554 && (reason_code == &TransitionReasonCode::RevalidationConfidenceDecay
6555 || (reason_code == &TransitionReasonCode::Unspecified
6556 && reason.contains("confidence decayed")))
6557 )
6558}
6559
6560fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
6561 if denominator == 0 {
6562 0.0
6563 } else {
6564 numerator as f64 / denominator as f64
6565 }
6566}
6567
6568fn store_err(err: EvolutionError) -> EvoKernelError {
6569 EvoKernelError::Store(err.to_string())
6570}
6571
6572#[cfg(test)]
6573mod tests {
6574 use super::*;
6575 use oris_agent_contract::{
6576 AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
6577 };
6578 use oris_kernel::{
6579 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
6580 NoopStepFn, StateUpdatedOnlyReducer,
6581 };
6582 use serde::{Deserialize, Serialize};
6583
6584 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
6585 struct TestState;
6586
6587 impl KernelState for TestState {
6588 fn version(&self) -> u32 {
6589 1
6590 }
6591 }
6592
6593 #[test]
6594 fn repair_quality_gate_accepts_semantic_variants() {
6595 let plan = r#"
6596根本原因:脚本中拼写错误导致 unknown command 'process'。
6597修复建议:将 `proccess` 更正为 `process`,并统一命令入口。
6598验证方式:执行 `cargo check -p oris-runtime` 与回归测试。
6599恢复方案:若新入口异常,立即回滚到旧命令映射。
6600"#;
6601 let report = evaluate_repair_quality_gate(plan);
6602 assert!(report.passes());
6603 assert!(report.failed_checks().is_empty());
6604 }
6605
6606 #[test]
6607 fn repair_quality_gate_rejects_missing_incident_anchor() {
6608 let plan = r#"
6609原因分析:逻辑分支覆盖不足。
6610修复方案:补充分支与日志。
6611验证命令:cargo check -p oris-runtime
6612回滚方案:git revert HEAD
6613"#;
6614 let report = evaluate_repair_quality_gate(plan);
6615 assert!(!report.passes());
6616 assert!(report
6617 .failed_checks()
6618 .iter()
6619 .any(|check| check.contains("unknown command")));
6620 }
6621
6622 fn temp_workspace(name: &str) -> std::path::PathBuf {
6623 let root =
6624 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
6625 if root.exists() {
6626 fs::remove_dir_all(&root).unwrap();
6627 }
6628 fs::create_dir_all(root.join("src")).unwrap();
6629 fs::write(
6630 root.join("Cargo.toml"),
6631 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
6632 )
6633 .unwrap();
6634 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
6635 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
6636 root
6637 }
6638
6639 fn test_kernel() -> Arc<Kernel<TestState>> {
6640 Arc::new(Kernel::<TestState> {
6641 events: Box::new(InMemoryEventStore::new()),
6642 snaps: None,
6643 reducer: Box::new(StateUpdatedOnlyReducer),
6644 exec: Box::new(NoopActionExecutor),
6645 step: Box::new(NoopStepFn),
6646 policy: Box::new(AllowAllPolicy),
6647 effect_sink: None,
6648 mode: KernelMode::Normal,
6649 })
6650 }
6651
6652 fn lightweight_plan() -> ValidationPlan {
6653 ValidationPlan {
6654 profile: "test".into(),
6655 stages: vec![ValidationStage::Command {
6656 program: "git".into(),
6657 args: vec!["--version".into()],
6658 timeout_ms: 5_000,
6659 }],
6660 }
6661 }
6662
6663 fn sample_mutation() -> PreparedMutation {
6664 prepare_mutation(
6665 MutationIntent {
6666 id: "mutation-1".into(),
6667 intent: "add README".into(),
6668 target: MutationTarget::Paths {
6669 allow: vec!["README.md".into()],
6670 },
6671 expected_effect: "repo still builds".into(),
6672 risk: RiskLevel::Low,
6673 signals: vec!["missing readme".into()],
6674 spec_id: None,
6675 },
6676 "\
6677diff --git a/README.md b/README.md
6678new file mode 100644
6679index 0000000..1111111
6680--- /dev/null
6681+++ b/README.md
6682@@ -0,0 +1 @@
6683+# sample
6684"
6685 .into(),
6686 Some("HEAD".into()),
6687 )
6688 }
6689
6690 fn base_sandbox_policy() -> SandboxPolicy {
6691 SandboxPolicy {
6692 allowed_programs: vec!["git".into()],
6693 max_duration_ms: 60_000,
6694 max_output_bytes: 1024 * 1024,
6695 denied_env_prefixes: Vec::new(),
6696 }
6697 }
6698
6699 fn command_validator() -> Arc<dyn Validator> {
6700 Arc::new(CommandValidator::new(base_sandbox_policy()))
6701 }
6702
6703 fn replay_input(signal: &str) -> SelectorInput {
6704 let rustc_version = std::process::Command::new("rustc")
6705 .arg("--version")
6706 .output()
6707 .ok()
6708 .filter(|output| output.status.success())
6709 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
6710 .unwrap_or_else(|| "rustc unknown".into());
6711 SelectorInput {
6712 signals: vec![signal.into()],
6713 env: EnvFingerprint {
6714 rustc_version,
6715 cargo_lock_hash: compute_artifact_hash("# lock\n"),
6716 target_triple: format!(
6717 "{}-unknown-{}",
6718 std::env::consts::ARCH,
6719 std::env::consts::OS
6720 ),
6721 os: std::env::consts::OS.into(),
6722 },
6723 spec_id: None,
6724 limit: 1,
6725 }
6726 }
6727
6728 fn build_test_evo_with_store(
6729 name: &str,
6730 run_id: &str,
6731 validator: Arc<dyn Validator>,
6732 store: Arc<dyn EvolutionStore>,
6733 ) -> EvoKernel<TestState> {
6734 let workspace = temp_workspace(name);
6735 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
6736 run_id,
6737 &workspace,
6738 std::env::temp_dir(),
6739 ));
6740 EvoKernel::new(test_kernel(), sandbox, validator, store)
6741 .with_governor(Arc::new(DefaultGovernor::new(
6742 oris_governor::GovernorConfig {
6743 promote_after_successes: 1,
6744 ..Default::default()
6745 },
6746 )))
6747 .with_validation_plan(lightweight_plan())
6748 .with_sandbox_policy(base_sandbox_policy())
6749 }
6750
6751 fn build_test_evo(
6752 name: &str,
6753 run_id: &str,
6754 validator: Arc<dyn Validator>,
6755 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
6756 let store_root = std::env::temp_dir().join(format!(
6757 "oris-evokernel-{name}-store-{}",
6758 std::process::id()
6759 ));
6760 if store_root.exists() {
6761 fs::remove_dir_all(&store_root).unwrap();
6762 }
6763 let store: Arc<dyn EvolutionStore> =
6764 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
6765 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
6766 (evo, store)
6767 }
6768
6769 fn remote_publish_envelope(
6770 sender_id: &str,
6771 run_id: &str,
6772 gene_id: &str,
6773 capsule_id: &str,
6774 mutation_id: &str,
6775 signal: &str,
6776 file_name: &str,
6777 line: &str,
6778 ) -> EvolutionEnvelope {
6779 remote_publish_envelope_with_env(
6780 sender_id,
6781 run_id,
6782 gene_id,
6783 capsule_id,
6784 mutation_id,
6785 signal,
6786 file_name,
6787 line,
6788 replay_input(signal).env,
6789 )
6790 }
6791
6792 fn remote_publish_envelope_with_env(
6793 sender_id: &str,
6794 run_id: &str,
6795 gene_id: &str,
6796 capsule_id: &str,
6797 mutation_id: &str,
6798 signal: &str,
6799 file_name: &str,
6800 line: &str,
6801 env: EnvFingerprint,
6802 ) -> EvolutionEnvelope {
6803 let mutation = prepare_mutation(
6804 MutationIntent {
6805 id: mutation_id.into(),
6806 intent: format!("add {file_name}"),
6807 target: MutationTarget::Paths {
6808 allow: vec![file_name.into()],
6809 },
6810 expected_effect: "replay should still validate".into(),
6811 risk: RiskLevel::Low,
6812 signals: vec![signal.into()],
6813 spec_id: None,
6814 },
6815 format!(
6816 "\
6817diff --git a/{file_name} b/{file_name}
6818new file mode 100644
6819index 0000000..1111111
6820--- /dev/null
6821+++ b/{file_name}
6822@@ -0,0 +1 @@
6823+{line}
6824"
6825 ),
6826 Some("HEAD".into()),
6827 );
6828 let gene = Gene {
6829 id: gene_id.into(),
6830 signals: vec![signal.into()],
6831 strategy: vec![file_name.into()],
6832 validation: vec!["test".into()],
6833 state: AssetState::Promoted,
6834 };
6835 let capsule = Capsule {
6836 id: capsule_id.into(),
6837 gene_id: gene_id.into(),
6838 mutation_id: mutation_id.into(),
6839 run_id: run_id.into(),
6840 diff_hash: mutation.artifact.content_hash.clone(),
6841 confidence: 0.9,
6842 env,
6843 outcome: Outcome {
6844 success: true,
6845 validation_profile: "test".into(),
6846 validation_duration_ms: 1,
6847 changed_files: vec![file_name.into()],
6848 validator_hash: "validator-hash".into(),
6849 lines_changed: 1,
6850 replay_verified: false,
6851 },
6852 state: AssetState::Promoted,
6853 };
6854 EvolutionEnvelope::publish(
6855 sender_id,
6856 vec![
6857 NetworkAsset::EvolutionEvent {
6858 event: EvolutionEvent::MutationDeclared { mutation },
6859 },
6860 NetworkAsset::Gene { gene: gene.clone() },
6861 NetworkAsset::Capsule {
6862 capsule: capsule.clone(),
6863 },
6864 NetworkAsset::EvolutionEvent {
6865 event: EvolutionEvent::CapsuleReleased {
6866 capsule_id: capsule.id.clone(),
6867 state: AssetState::Promoted,
6868 },
6869 },
6870 ],
6871 )
6872 }
6873
6874 fn remote_publish_envelope_with_signals(
6875 sender_id: &str,
6876 run_id: &str,
6877 gene_id: &str,
6878 capsule_id: &str,
6879 mutation_id: &str,
6880 mutation_signals: Vec<String>,
6881 gene_signals: Vec<String>,
6882 file_name: &str,
6883 line: &str,
6884 env: EnvFingerprint,
6885 ) -> EvolutionEnvelope {
6886 let mutation = prepare_mutation(
6887 MutationIntent {
6888 id: mutation_id.into(),
6889 intent: format!("add {file_name}"),
6890 target: MutationTarget::Paths {
6891 allow: vec![file_name.into()],
6892 },
6893 expected_effect: "replay should still validate".into(),
6894 risk: RiskLevel::Low,
6895 signals: mutation_signals,
6896 spec_id: None,
6897 },
6898 format!(
6899 "\
6900diff --git a/{file_name} b/{file_name}
6901new file mode 100644
6902index 0000000..1111111
6903--- /dev/null
6904+++ b/{file_name}
6905@@ -0,0 +1 @@
6906+{line}
6907"
6908 ),
6909 Some("HEAD".into()),
6910 );
6911 let gene = Gene {
6912 id: gene_id.into(),
6913 signals: gene_signals,
6914 strategy: vec![file_name.into()],
6915 validation: vec!["test".into()],
6916 state: AssetState::Promoted,
6917 };
6918 let capsule = Capsule {
6919 id: capsule_id.into(),
6920 gene_id: gene_id.into(),
6921 mutation_id: mutation_id.into(),
6922 run_id: run_id.into(),
6923 diff_hash: mutation.artifact.content_hash.clone(),
6924 confidence: 0.9,
6925 env,
6926 outcome: Outcome {
6927 success: true,
6928 validation_profile: "test".into(),
6929 validation_duration_ms: 1,
6930 changed_files: vec![file_name.into()],
6931 validator_hash: "validator-hash".into(),
6932 lines_changed: 1,
6933 replay_verified: false,
6934 },
6935 state: AssetState::Promoted,
6936 };
6937 EvolutionEnvelope::publish(
6938 sender_id,
6939 vec![
6940 NetworkAsset::EvolutionEvent {
6941 event: EvolutionEvent::MutationDeclared { mutation },
6942 },
6943 NetworkAsset::Gene { gene: gene.clone() },
6944 NetworkAsset::Capsule {
6945 capsule: capsule.clone(),
6946 },
6947 NetworkAsset::EvolutionEvent {
6948 event: EvolutionEvent::CapsuleReleased {
6949 capsule_id: capsule.id.clone(),
6950 state: AssetState::Promoted,
6951 },
6952 },
6953 ],
6954 )
6955 }
6956
6957 struct FixedValidator {
6958 success: bool,
6959 }
6960
6961 #[async_trait]
6962 impl Validator for FixedValidator {
6963 async fn run(
6964 &self,
6965 _receipt: &SandboxReceipt,
6966 plan: &ValidationPlan,
6967 ) -> Result<ValidationReport, ValidationError> {
6968 Ok(ValidationReport {
6969 success: self.success,
6970 duration_ms: 1,
6971 stages: Vec::new(),
6972 logs: if self.success {
6973 format!("{} ok", plan.profile)
6974 } else {
6975 format!("{} failed", plan.profile)
6976 },
6977 })
6978 }
6979 }
6980
6981 struct FailOnAppendStore {
6982 inner: JsonlEvolutionStore,
6983 fail_on_call: usize,
6984 call_count: Mutex<usize>,
6985 }
6986
6987 impl FailOnAppendStore {
6988 fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
6989 Self {
6990 inner: JsonlEvolutionStore::new(root_dir),
6991 fail_on_call,
6992 call_count: Mutex::new(0),
6993 }
6994 }
6995 }
6996
6997 impl EvolutionStore for FailOnAppendStore {
6998 fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
6999 let mut call_count = self
7000 .call_count
7001 .lock()
7002 .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
7003 *call_count += 1;
7004 if *call_count == self.fail_on_call {
7005 return Err(EvolutionError::Io("injected append failure".into()));
7006 }
7007 self.inner.append_event(event)
7008 }
7009
7010 fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
7011 self.inner.scan(from_seq)
7012 }
7013
7014 fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
7015 self.inner.rebuild_projection()
7016 }
7017 }
7018
7019 #[test]
7020 fn coordination_planner_to_coder_handoff_is_deterministic() {
7021 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7022 root_goal: "ship feature".into(),
7023 primitive: CoordinationPrimitive::Sequential,
7024 tasks: vec![
7025 CoordinationTask {
7026 id: "planner".into(),
7027 role: AgentRole::Planner,
7028 description: "split the work".into(),
7029 depends_on: Vec::new(),
7030 },
7031 CoordinationTask {
7032 id: "coder".into(),
7033 role: AgentRole::Coder,
7034 description: "implement the patch".into(),
7035 depends_on: vec!["planner".into()],
7036 },
7037 ],
7038 timeout_ms: 5_000,
7039 max_retries: 0,
7040 });
7041
7042 assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
7043 assert!(result.failed_tasks.is_empty());
7044 assert!(result.messages.iter().any(|message| {
7045 message.from_role == AgentRole::Planner
7046 && message.to_role == AgentRole::Coder
7047 && message.task_id == "coder"
7048 }));
7049 }
7050
7051 #[test]
7052 fn coordination_repair_runs_only_after_coder_failure() {
7053 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7054 root_goal: "fix broken implementation".into(),
7055 primitive: CoordinationPrimitive::Sequential,
7056 tasks: vec![
7057 CoordinationTask {
7058 id: "coder".into(),
7059 role: AgentRole::Coder,
7060 description: "force-fail initial implementation".into(),
7061 depends_on: Vec::new(),
7062 },
7063 CoordinationTask {
7064 id: "repair".into(),
7065 role: AgentRole::Repair,
7066 description: "patch the failed implementation".into(),
7067 depends_on: vec!["coder".into()],
7068 },
7069 ],
7070 timeout_ms: 5_000,
7071 max_retries: 0,
7072 });
7073
7074 assert_eq!(result.completed_tasks, vec!["repair"]);
7075 assert_eq!(result.failed_tasks, vec!["coder"]);
7076 assert!(result.messages.iter().any(|message| {
7077 message.from_role == AgentRole::Coder
7078 && message.to_role == AgentRole::Repair
7079 && message.task_id == "repair"
7080 }));
7081 }
7082
7083 #[test]
7084 fn coordination_optimizer_runs_after_successful_implementation_step() {
7085 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7086 root_goal: "ship optimized patch".into(),
7087 primitive: CoordinationPrimitive::Sequential,
7088 tasks: vec![
7089 CoordinationTask {
7090 id: "coder".into(),
7091 role: AgentRole::Coder,
7092 description: "implement a working patch".into(),
7093 depends_on: Vec::new(),
7094 },
7095 CoordinationTask {
7096 id: "optimizer".into(),
7097 role: AgentRole::Optimizer,
7098 description: "tighten the implementation".into(),
7099 depends_on: vec!["coder".into()],
7100 },
7101 ],
7102 timeout_ms: 5_000,
7103 max_retries: 0,
7104 });
7105
7106 assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
7107 assert!(result.failed_tasks.is_empty());
7108 }
7109
7110 #[test]
7111 fn coordination_parallel_waves_preserve_sorted_merge_order() {
7112 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7113 root_goal: "parallelize safe tasks".into(),
7114 primitive: CoordinationPrimitive::Parallel,
7115 tasks: vec![
7116 CoordinationTask {
7117 id: "z-task".into(),
7118 role: AgentRole::Planner,
7119 description: "analyze z".into(),
7120 depends_on: Vec::new(),
7121 },
7122 CoordinationTask {
7123 id: "a-task".into(),
7124 role: AgentRole::Coder,
7125 description: "implement a".into(),
7126 depends_on: Vec::new(),
7127 },
7128 CoordinationTask {
7129 id: "mid-task".into(),
7130 role: AgentRole::Optimizer,
7131 description: "polish after both".into(),
7132 depends_on: vec!["z-task".into(), "a-task".into()],
7133 },
7134 ],
7135 timeout_ms: 5_000,
7136 max_retries: 0,
7137 });
7138
7139 assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
7140 assert!(result.failed_tasks.is_empty());
7141 }
7142
7143 #[test]
7144 fn coordination_retries_stop_at_max_retries() {
7145 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7146 root_goal: "retry then stop".into(),
7147 primitive: CoordinationPrimitive::Sequential,
7148 tasks: vec![CoordinationTask {
7149 id: "coder".into(),
7150 role: AgentRole::Coder,
7151 description: "force-fail this task".into(),
7152 depends_on: Vec::new(),
7153 }],
7154 timeout_ms: 5_000,
7155 max_retries: 1,
7156 });
7157
7158 assert!(result.completed_tasks.is_empty());
7159 assert_eq!(result.failed_tasks, vec!["coder"]);
7160 assert_eq!(
7161 result
7162 .messages
7163 .iter()
7164 .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
7165 .count(),
7166 2
7167 );
7168 }
7169
7170 #[test]
7171 fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
7172 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7173 root_goal: "skip blocked follow-up work".into(),
7174 primitive: CoordinationPrimitive::Conditional,
7175 tasks: vec![
7176 CoordinationTask {
7177 id: "coder".into(),
7178 role: AgentRole::Coder,
7179 description: "force-fail the implementation".into(),
7180 depends_on: Vec::new(),
7181 },
7182 CoordinationTask {
7183 id: "optimizer".into(),
7184 role: AgentRole::Optimizer,
7185 description: "only optimize a successful implementation".into(),
7186 depends_on: vec!["coder".into()],
7187 },
7188 ],
7189 timeout_ms: 5_000,
7190 max_retries: 0,
7191 });
7192
7193 assert!(result.completed_tasks.is_empty());
7194 assert_eq!(result.failed_tasks, vec!["coder"]);
7195 assert!(result.messages.iter().any(|message| {
7196 message.task_id == "optimizer"
7197 && message
7198 .content
7199 .contains("skipped due to failed dependency chain")
7200 }));
7201 assert!(!result
7202 .failed_tasks
7203 .iter()
7204 .any(|task_id| task_id == "optimizer"));
7205 }
7206
7207 #[tokio::test]
7208 async fn command_validator_aggregates_stage_reports() {
7209 let workspace = temp_workspace("validator");
7210 let receipt = SandboxReceipt {
7211 mutation_id: "m".into(),
7212 workdir: workspace,
7213 applied: true,
7214 changed_files: Vec::new(),
7215 patch_hash: "hash".into(),
7216 stdout_log: std::env::temp_dir().join("stdout.log"),
7217 stderr_log: std::env::temp_dir().join("stderr.log"),
7218 };
7219 let validator = CommandValidator::new(SandboxPolicy {
7220 allowed_programs: vec!["git".into()],
7221 max_duration_ms: 1_000,
7222 max_output_bytes: 1024,
7223 denied_env_prefixes: Vec::new(),
7224 });
7225 let report = validator
7226 .run(
7227 &receipt,
7228 &ValidationPlan {
7229 profile: "test".into(),
7230 stages: vec![ValidationStage::Command {
7231 program: "git".into(),
7232 args: vec!["--version".into()],
7233 timeout_ms: 1_000,
7234 }],
7235 },
7236 )
7237 .await
7238 .unwrap();
7239 assert_eq!(report.stages.len(), 1);
7240 }
7241
7242 #[tokio::test]
7243 async fn capture_successful_mutation_appends_capsule() {
7244 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
7245 let capsule = evo
7246 .capture_successful_mutation(&"run-1".into(), sample_mutation())
7247 .await
7248 .unwrap();
7249 let events = store.scan(1).unwrap();
7250 assert!(events
7251 .iter()
7252 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
7253 assert!(!capsule.id.is_empty());
7254 }
7255
7256 #[tokio::test]
7257 async fn replay_hit_records_capsule_reused() {
7258 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
7259 let capsule = evo
7260 .capture_successful_mutation(&"run-2".into(), sample_mutation())
7261 .await
7262 .unwrap();
7263 let replay_run_id = "run-replay".to_string();
7264 let decision = evo
7265 .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
7266 .await
7267 .unwrap();
7268 assert!(decision.used_capsule);
7269 assert_eq!(decision.capsule_id, Some(capsule.id));
7270 assert!(!decision.detect_evidence.task_class_id.is_empty());
7271 assert!(!decision.detect_evidence.matched_signals.is_empty());
7272 assert!(decision.detect_evidence.mismatch_reasons.is_empty());
7273 assert!(!decision.select_evidence.candidates.is_empty());
7274 assert!(!decision.select_evidence.exact_match_lookup);
7275 assert_eq!(
7276 decision.select_evidence.selected_capsule_id.as_deref(),
7277 decision.capsule_id.as_deref()
7278 );
7279 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7280 &stored.event,
7281 EvolutionEvent::CapsuleReused {
7282 run_id,
7283 replay_run_id: Some(current_replay_run_id),
7284 ..
7285 } if run_id == "run-2" && current_replay_run_id == &replay_run_id
7286 )));
7287 }
7288
7289 #[tokio::test]
7290 async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
7291 let capture_run_id = "run-legacy-capture".to_string();
7292 let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
7293 let capsule = evo
7294 .capture_successful_mutation(&capture_run_id, sample_mutation())
7295 .await
7296 .unwrap();
7297 let executor = StoreReplayExecutor {
7298 sandbox: evo.sandbox.clone(),
7299 validator: evo.validator.clone(),
7300 store: evo.store.clone(),
7301 selector: evo.selector.clone(),
7302 governor: evo.governor.clone(),
7303 economics: Some(evo.economics.clone()),
7304 remote_publishers: Some(evo.remote_publishers.clone()),
7305 stake_policy: evo.stake_policy.clone(),
7306 };
7307
7308 let decision = executor
7309 .try_replay(
7310 &replay_input("missing readme"),
7311 &evo.sandbox_policy,
7312 &evo.validation_plan,
7313 )
7314 .await
7315 .unwrap();
7316
7317 assert!(decision.used_capsule);
7318 assert_eq!(decision.capsule_id, Some(capsule.id));
7319 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7320 &stored.event,
7321 EvolutionEvent::CapsuleReused {
7322 run_id,
7323 replay_run_id: None,
7324 ..
7325 } if run_id == &capture_run_id
7326 )));
7327 }
7328
7329 #[tokio::test]
7330 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
7331 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
7332 let capsule = evo
7333 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
7334 .await
7335 .unwrap();
7336 let decision = evo
7337 .replay_or_fallback(replay_input("missing readme"))
7338 .await
7339 .unwrap();
7340 assert!(decision.used_capsule);
7341
7342 evo.revoke_assets(&RevokeNotice {
7343 sender_id: "node-metrics".into(),
7344 asset_ids: vec![capsule.id.clone()],
7345 reason: "manual test revoke".into(),
7346 })
7347 .unwrap();
7348
7349 let snapshot = evo.metrics_snapshot().unwrap();
7350 assert_eq!(snapshot.replay_attempts_total, 1);
7351 assert_eq!(snapshot.replay_success_total, 1);
7352 assert_eq!(snapshot.replay_success_rate, 1.0);
7353 assert_eq!(snapshot.confidence_revalidations_total, 0);
7354 assert_eq!(snapshot.replay_reasoning_avoided_total, 1);
7355 assert_eq!(
7356 snapshot.reasoning_avoided_tokens_total,
7357 decision.economics_evidence.reasoning_avoided_tokens
7358 );
7359 assert_eq!(snapshot.replay_fallback_cost_total, 0);
7360 assert_eq!(snapshot.replay_roi, 1.0);
7361 assert_eq!(snapshot.replay_task_classes.len(), 1);
7362 assert_eq!(snapshot.replay_task_classes[0].replay_success_total, 1);
7363 assert_eq!(snapshot.replay_task_classes[0].replay_failure_total, 0);
7364 assert_eq!(
7365 snapshot.replay_task_classes[0].reasoning_steps_avoided_total,
7366 1
7367 );
7368 assert_eq!(
7369 snapshot.replay_task_classes[0].replay_fallback_cost_total,
7370 0
7371 );
7372 assert_eq!(snapshot.replay_task_classes[0].replay_roi, 1.0);
7373 assert!(snapshot.replay_sources.is_empty());
7374 assert_eq!(snapshot.confidence_revalidations_total, 0);
7375 assert_eq!(snapshot.mutation_declared_total, 1);
7376 assert_eq!(snapshot.promoted_mutations_total, 1);
7377 assert_eq!(snapshot.promotion_ratio, 1.0);
7378 assert_eq!(snapshot.gene_revocations_total, 1);
7379 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
7380 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
7381 assert_eq!(snapshot.promoted_genes, 0);
7382 assert_eq!(snapshot.promoted_capsules, 0);
7383
7384 let rendered = evo.render_metrics_prometheus().unwrap();
7385 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_total 1"));
7386 assert!(rendered.contains("oris_evolution_reasoning_avoided_tokens_total"));
7387 assert!(rendered.contains("oris_evolution_replay_fallback_cost_total"));
7388 assert!(rendered.contains("oris_evolution_replay_roi 1.000000"));
7389 assert!(rendered.contains("oris_evolution_replay_utilization_by_task_class_total"));
7390 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_by_task_class_total"));
7391 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
7392 assert!(rendered.contains("oris_evolution_confidence_revalidations_total 0"));
7393 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
7394 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
7395 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
7396 assert!(rendered.contains("oris_evolution_health 1"));
7397 }
7398
7399 #[tokio::test]
7400 async fn replay_roi_release_gate_summary_matches_metrics_snapshot_for_legacy_replay_history() {
7401 let (evo, _) = build_test_evo("roi-legacy", "run-roi-legacy", command_validator());
7402 let capsule = evo
7403 .capture_successful_mutation(&"run-roi-legacy".into(), sample_mutation())
7404 .await
7405 .unwrap();
7406
7407 evo.store
7408 .append_event(EvolutionEvent::CapsuleReused {
7409 capsule_id: capsule.id.clone(),
7410 gene_id: capsule.gene_id.clone(),
7411 run_id: capsule.run_id.clone(),
7412 replay_run_id: Some("run-roi-legacy-replay".into()),
7413 })
7414 .unwrap();
7415 evo.store
7416 .append_event(EvolutionEvent::ValidationFailed {
7417 mutation_id: "legacy-replay-failure".into(),
7418 report: ValidationSnapshot {
7419 success: false,
7420 profile: "test".into(),
7421 duration_ms: 1,
7422 summary: "legacy replay validation failed".into(),
7423 },
7424 gene_id: Some(capsule.gene_id.clone()),
7425 })
7426 .unwrap();
7427
7428 let metrics = evo.metrics_snapshot().unwrap();
7429 let summary = evo.replay_roi_release_gate_summary(0).unwrap();
7430 let task_class = &metrics.replay_task_classes[0];
7431
7432 assert_eq!(metrics.replay_attempts_total, 2);
7433 assert_eq!(metrics.replay_success_total, 1);
7434 assert_eq!(summary.replay_attempts_total, metrics.replay_attempts_total);
7435 assert_eq!(summary.replay_success_total, metrics.replay_success_total);
7436 assert_eq!(
7437 summary.replay_failure_total,
7438 metrics.replay_attempts_total - metrics.replay_success_total
7439 );
7440 assert_eq!(
7441 summary.reasoning_avoided_tokens_total,
7442 metrics.reasoning_avoided_tokens_total
7443 );
7444 assert_eq!(
7445 summary.replay_fallback_cost_total,
7446 metrics.replay_fallback_cost_total
7447 );
7448 assert_eq!(summary.replay_roi, metrics.replay_roi);
7449 assert_eq!(summary.replay_task_classes.len(), 1);
7450 assert_eq!(
7451 summary.replay_task_classes[0].task_class_id,
7452 task_class.task_class_id
7453 );
7454 assert_eq!(
7455 summary.replay_task_classes[0].replay_success_total,
7456 task_class.replay_success_total
7457 );
7458 assert_eq!(
7459 summary.replay_task_classes[0].replay_failure_total,
7460 task_class.replay_failure_total
7461 );
7462 assert_eq!(
7463 summary.replay_task_classes[0].reasoning_avoided_tokens_total,
7464 task_class.reasoning_avoided_tokens_total
7465 );
7466 assert_eq!(
7467 summary.replay_task_classes[0].replay_fallback_cost_total,
7468 task_class.replay_fallback_cost_total
7469 );
7470 }
7471
7472 #[tokio::test]
7473 async fn replay_roi_release_gate_summary_aggregates_task_class_and_remote_source() {
7474 let (evo, _) = build_test_evo("roi-summary", "run-roi-summary", command_validator());
7475 let envelope = remote_publish_envelope(
7476 "node-roi",
7477 "run-remote-roi",
7478 "gene-roi",
7479 "capsule-roi",
7480 "mutation-roi",
7481 "roi-signal",
7482 "ROI.md",
7483 "# roi",
7484 );
7485 evo.import_remote_envelope(&envelope).unwrap();
7486
7487 let miss = evo
7488 .replay_or_fallback(replay_input("entropy-hash-12345-no-overlap"))
7489 .await
7490 .unwrap();
7491 assert!(!miss.used_capsule);
7492 assert!(miss.fallback_to_planner);
7493 assert!(miss.select_evidence.candidates.is_empty());
7494 assert!(miss
7495 .detect_evidence
7496 .mismatch_reasons
7497 .iter()
7498 .any(|reason| reason == "no_candidate_after_select"));
7499
7500 let hit = evo
7501 .replay_or_fallback(replay_input("roi-signal"))
7502 .await
7503 .unwrap();
7504 assert!(hit.used_capsule);
7505 assert!(!hit.select_evidence.candidates.is_empty());
7506 assert_eq!(
7507 hit.select_evidence.selected_capsule_id.as_deref(),
7508 hit.capsule_id.as_deref()
7509 );
7510
7511 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7512 assert_eq!(summary.replay_attempts_total, 2);
7513 assert_eq!(summary.replay_success_total, 1);
7514 assert_eq!(summary.replay_failure_total, 1);
7515 assert!(summary.reasoning_avoided_tokens_total > 0);
7516 assert!(summary.replay_fallback_cost_total > 0);
7517 assert!(summary
7518 .replay_task_classes
7519 .iter()
7520 .any(|entry| { entry.replay_success_total == 1 && entry.replay_failure_total == 0 }));
7521 assert!(summary.replay_sources.iter().any(|source| {
7522 source.source_sender_id == "node-roi" && source.replay_success_total == 1
7523 }));
7524
7525 let rendered = evo
7526 .render_replay_roi_release_gate_summary_json(60 * 60)
7527 .unwrap();
7528 assert!(rendered.contains("\"replay_attempts_total\": 2"));
7529 assert!(rendered.contains("\"source_sender_id\": \"node-roi\""));
7530 }
7531
7532 #[tokio::test]
7533 async fn replay_roi_release_gate_summary_contract_exposes_core_metrics_and_fail_closed_defaults(
7534 ) {
7535 let (evo, _) = build_test_evo("roi-contract", "run-roi-contract", command_validator());
7536 let envelope = remote_publish_envelope(
7537 "node-contract",
7538 "run-remote-contract",
7539 "gene-contract",
7540 "capsule-contract",
7541 "mutation-contract",
7542 "contract-signal",
7543 "CONTRACT.md",
7544 "# contract",
7545 );
7546 evo.import_remote_envelope(&envelope).unwrap();
7547
7548 let miss = evo
7549 .replay_or_fallback(replay_input("entropy-hash-contract-no-overlap"))
7550 .await
7551 .unwrap();
7552 assert!(!miss.used_capsule);
7553 assert!(miss.fallback_to_planner);
7554
7555 let hit = evo
7556 .replay_or_fallback(replay_input("contract-signal"))
7557 .await
7558 .unwrap();
7559 assert!(hit.used_capsule);
7560
7561 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7562 let contract = evo
7563 .replay_roi_release_gate_contract(60 * 60, ReplayRoiReleaseGateThresholds::default())
7564 .unwrap();
7565
7566 assert_eq!(contract.input.replay_attempts_total, 2);
7567 assert_eq!(contract.input.replay_success_total, 1);
7568 assert_eq!(contract.input.replay_failure_total, 1);
7569 assert_eq!(
7570 contract.input.reasoning_avoided_tokens,
7571 summary.reasoning_avoided_tokens_total
7572 );
7573 assert_eq!(
7574 contract.input.replay_fallback_cost_total,
7575 summary.replay_fallback_cost_total
7576 );
7577 assert!((contract.input.replay_hit_rate - 0.5).abs() < f64::EPSILON);
7578 assert!((contract.input.false_replay_rate - 0.5).abs() < f64::EPSILON);
7579 assert!((contract.input.replay_roi - summary.replay_roi).abs() < f64::EPSILON);
7580 assert!(contract.input.replay_safety);
7581 assert_eq!(
7582 contract.input.aggregation_dimensions,
7583 REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7584 .iter()
7585 .map(|dimension| (*dimension).to_string())
7586 .collect::<Vec<_>>()
7587 );
7588 assert_eq!(
7589 contract.input.thresholds,
7590 ReplayRoiReleaseGateThresholds::default()
7591 );
7592 assert_eq!(
7593 contract.input.fail_closed_policy,
7594 ReplayRoiReleaseGateFailClosedPolicy::default()
7595 );
7596 assert_eq!(
7597 contract.output.status,
7598 ReplayRoiReleaseGateStatus::FailClosed
7599 );
7600 assert!(contract
7601 .output
7602 .failed_checks
7603 .iter()
7604 .any(|check| check == "min_replay_attempts_below_threshold"));
7605 assert!(contract
7606 .output
7607 .failed_checks
7608 .iter()
7609 .any(|check| check == "replay_hit_rate_below_threshold"));
7610 assert!(contract
7611 .output
7612 .failed_checks
7613 .iter()
7614 .any(|check| check == "false_replay_rate_above_threshold"));
7615 assert!(contract
7616 .output
7617 .evidence_refs
7618 .iter()
7619 .any(|evidence| evidence == "replay_roi_release_gate_summary"));
7620 assert!(contract.output.summary.contains("release gate fail_closed"));
7621 }
7622
7623 #[tokio::test]
7624 async fn replay_roi_release_gate_summary_contract_accepts_custom_thresholds_and_json() {
7625 let (evo, _) = build_test_evo(
7626 "roi-contract-thresholds",
7627 "run-roi-contract-thresholds",
7628 command_validator(),
7629 );
7630 let thresholds = ReplayRoiReleaseGateThresholds {
7631 min_replay_attempts: 8,
7632 min_replay_hit_rate: 0.75,
7633 max_false_replay_rate: 0.10,
7634 min_reasoning_avoided_tokens: 600,
7635 min_replay_roi: 0.30,
7636 require_replay_safety: true,
7637 };
7638 let contract = evo
7639 .replay_roi_release_gate_contract(60 * 60, thresholds.clone())
7640 .unwrap();
7641 assert_eq!(contract.input.thresholds, thresholds.clone());
7642 assert_eq!(contract.input.replay_attempts_total, 0);
7643 assert_eq!(contract.input.replay_hit_rate, 0.0);
7644 assert_eq!(contract.input.false_replay_rate, 0.0);
7645 assert!(!contract.input.replay_safety_signal.has_replay_activity);
7646 assert!(!contract.input.replay_safety);
7647 assert_eq!(
7648 contract.output.status,
7649 ReplayRoiReleaseGateStatus::Indeterminate
7650 );
7651 assert!(contract
7652 .output
7653 .failed_checks
7654 .iter()
7655 .any(|check| check == "missing_replay_attempts"));
7656 assert!(contract
7657 .output
7658 .summary
7659 .contains("indeterminate (fail-closed)"));
7660
7661 let rendered = evo
7662 .render_replay_roi_release_gate_contract_json(60 * 60, thresholds)
7663 .unwrap();
7664 assert!(rendered.contains("\"min_replay_attempts\": 8"));
7665 assert!(rendered.contains("\"min_replay_hit_rate\": 0.75"));
7666 assert!(rendered.contains("\"status\": \"indeterminate\""));
7667 }
7668
7669 #[tokio::test]
7670 async fn replay_roi_release_gate_summary_window_boundary_filters_old_events() {
7671 let (evo, _) = build_test_evo("roi-window", "run-roi-window", command_validator());
7672 let envelope = remote_publish_envelope(
7673 "node-window",
7674 "run-remote-window",
7675 "gene-window",
7676 "capsule-window",
7677 "mutation-window",
7678 "window-signal",
7679 "WINDOW.md",
7680 "# window",
7681 );
7682 evo.import_remote_envelope(&envelope).unwrap();
7683
7684 let miss = evo
7685 .replay_or_fallback(replay_input("window-no-match-signal"))
7686 .await
7687 .unwrap();
7688 assert!(!miss.used_capsule);
7689 assert!(miss.fallback_to_planner);
7690
7691 let first_hit = evo
7692 .replay_or_fallback(replay_input("window-signal"))
7693 .await
7694 .unwrap();
7695 assert!(first_hit.used_capsule);
7696
7697 std::thread::sleep(std::time::Duration::from_secs(2));
7698
7699 let second_hit = evo
7700 .replay_or_fallback(replay_input("window-signal"))
7701 .await
7702 .unwrap();
7703 assert!(second_hit.used_capsule);
7704
7705 let narrow = evo.replay_roi_release_gate_summary(1).unwrap();
7706 assert_eq!(narrow.replay_attempts_total, 1);
7707 assert_eq!(narrow.replay_success_total, 1);
7708 assert_eq!(narrow.replay_failure_total, 0);
7709
7710 let all = evo.replay_roi_release_gate_summary(0).unwrap();
7711 assert_eq!(all.replay_attempts_total, 3);
7712 assert_eq!(all.replay_success_total, 2);
7713 assert_eq!(all.replay_failure_total, 1);
7714 }
7715
7716 fn fixed_release_gate_pass_fixture() -> ReplayRoiReleaseGateInputContract {
7717 ReplayRoiReleaseGateInputContract {
7718 generated_at: "2026-03-13T00:00:00Z".to_string(),
7719 window_seconds: 86_400,
7720 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7721 .iter()
7722 .map(|dimension| (*dimension).to_string())
7723 .collect(),
7724 replay_attempts_total: 4,
7725 replay_success_total: 3,
7726 replay_failure_total: 1,
7727 replay_hit_rate: 0.75,
7728 false_replay_rate: 0.25,
7729 reasoning_avoided_tokens: 480,
7730 replay_fallback_cost_total: 64,
7731 replay_roi: compute_replay_roi(480, 64),
7732 replay_safety: true,
7733 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7734 fail_closed_default: true,
7735 rollback_ready: true,
7736 audit_trail_complete: true,
7737 has_replay_activity: true,
7738 },
7739 thresholds: ReplayRoiReleaseGateThresholds::default(),
7740 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7741 }
7742 }
7743
7744 fn fixed_release_gate_fail_fixture() -> ReplayRoiReleaseGateInputContract {
7745 ReplayRoiReleaseGateInputContract {
7746 generated_at: "2026-03-13T00:00:00Z".to_string(),
7747 window_seconds: 86_400,
7748 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7749 .iter()
7750 .map(|dimension| (*dimension).to_string())
7751 .collect(),
7752 replay_attempts_total: 10,
7753 replay_success_total: 4,
7754 replay_failure_total: 6,
7755 replay_hit_rate: 0.4,
7756 false_replay_rate: 0.6,
7757 reasoning_avoided_tokens: 80,
7758 replay_fallback_cost_total: 400,
7759 replay_roi: compute_replay_roi(80, 400),
7760 replay_safety: false,
7761 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7762 fail_closed_default: true,
7763 rollback_ready: true,
7764 audit_trail_complete: true,
7765 has_replay_activity: true,
7766 },
7767 thresholds: ReplayRoiReleaseGateThresholds::default(),
7768 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7769 }
7770 }
7771
7772 fn fixed_release_gate_borderline_fixture() -> ReplayRoiReleaseGateInputContract {
7773 ReplayRoiReleaseGateInputContract {
7774 generated_at: "2026-03-13T00:00:00Z".to_string(),
7775 window_seconds: 3_600,
7776 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7777 .iter()
7778 .map(|dimension| (*dimension).to_string())
7779 .collect(),
7780 replay_attempts_total: 4,
7781 replay_success_total: 3,
7782 replay_failure_total: 1,
7783 replay_hit_rate: 0.75,
7784 false_replay_rate: 0.25,
7785 reasoning_avoided_tokens: 192,
7786 replay_fallback_cost_total: 173,
7787 replay_roi: 0.05,
7788 replay_safety: true,
7789 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7790 fail_closed_default: true,
7791 rollback_ready: true,
7792 audit_trail_complete: true,
7793 has_replay_activity: true,
7794 },
7795 thresholds: ReplayRoiReleaseGateThresholds {
7796 min_replay_attempts: 4,
7797 min_replay_hit_rate: 0.75,
7798 max_false_replay_rate: 0.25,
7799 min_reasoning_avoided_tokens: 192,
7800 min_replay_roi: 0.05,
7801 require_replay_safety: true,
7802 },
7803 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7804 }
7805 }
7806
7807 #[test]
7808 fn replay_roi_release_gate_summary_fixed_fixtures_cover_pass_fail_and_borderline() {
7809 let pass =
7810 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_pass_fixture());
7811 let fail =
7812 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7813 let borderline = evaluate_replay_roi_release_gate_contract_input(
7814 &fixed_release_gate_borderline_fixture(),
7815 );
7816
7817 assert_eq!(pass.status, ReplayRoiReleaseGateStatus::Pass);
7818 assert!(pass.failed_checks.is_empty());
7819 assert_eq!(fail.status, ReplayRoiReleaseGateStatus::FailClosed);
7820 assert!(!fail.failed_checks.is_empty());
7821 assert_eq!(borderline.status, ReplayRoiReleaseGateStatus::Pass);
7822 assert!(borderline.failed_checks.is_empty());
7823 }
7824
7825 #[test]
7826 fn replay_roi_release_gate_summary_machine_readable_output_is_stable_and_sorted() {
7827 let output =
7828 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7829
7830 assert_eq!(
7831 output.failed_checks,
7832 vec![
7833 "false_replay_rate_above_threshold".to_string(),
7834 "reasoning_avoided_tokens_below_threshold".to_string(),
7835 "replay_hit_rate_below_threshold".to_string(),
7836 "replay_roi_below_threshold".to_string(),
7837 "replay_safety_required".to_string(),
7838 ]
7839 );
7840 assert_eq!(
7841 output.evidence_refs,
7842 vec![
7843 "generated_at:2026-03-13T00:00:00Z".to_string(),
7844 "metric:false_replay_rate".to_string(),
7845 "metric:reasoning_avoided_tokens".to_string(),
7846 "metric:replay_hit_rate".to_string(),
7847 "metric:replay_roi".to_string(),
7848 "metric:replay_safety".to_string(),
7849 "replay_roi_release_gate_summary".to_string(),
7850 "threshold:max_false_replay_rate".to_string(),
7851 "threshold:min_reasoning_avoided_tokens".to_string(),
7852 "threshold:min_replay_hit_rate".to_string(),
7853 "threshold:min_replay_roi".to_string(),
7854 "threshold:require_replay_safety".to_string(),
7855 "window_seconds:86400".to_string(),
7856 ]
7857 );
7858
7859 let rendered = serde_json::to_string(&output).unwrap();
7860 assert!(rendered.starts_with("{\"status\":\"fail_closed\",\"failed_checks\":"));
7861 assert_eq!(rendered, serde_json::to_string(&output).unwrap());
7862 }
7863
7864 #[test]
7865 fn replay_roi_release_gate_summary_evaluator_passes_with_threshold_compliance() {
7866 let input = ReplayRoiReleaseGateInputContract {
7867 generated_at: Utc::now().to_rfc3339(),
7868 window_seconds: 86_400,
7869 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7870 .iter()
7871 .map(|dimension| (*dimension).to_string())
7872 .collect(),
7873 replay_attempts_total: 10,
7874 replay_success_total: 9,
7875 replay_failure_total: 1,
7876 replay_hit_rate: 0.9,
7877 false_replay_rate: 0.1,
7878 reasoning_avoided_tokens: 960,
7879 replay_fallback_cost_total: 64,
7880 replay_roi: compute_replay_roi(960, 64),
7881 replay_safety: true,
7882 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7883 fail_closed_default: true,
7884 rollback_ready: true,
7885 audit_trail_complete: true,
7886 has_replay_activity: true,
7887 },
7888 thresholds: ReplayRoiReleaseGateThresholds::default(),
7889 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7890 };
7891
7892 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7893 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Pass);
7894 assert!(output.failed_checks.is_empty());
7895 assert!(output.summary.contains("release gate pass"));
7896 }
7897
7898 #[test]
7899 fn replay_roi_release_gate_summary_evaluator_fail_closed_on_threshold_violations() {
7900 let input = ReplayRoiReleaseGateInputContract {
7901 generated_at: Utc::now().to_rfc3339(),
7902 window_seconds: 86_400,
7903 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7904 .iter()
7905 .map(|dimension| (*dimension).to_string())
7906 .collect(),
7907 replay_attempts_total: 10,
7908 replay_success_total: 4,
7909 replay_failure_total: 6,
7910 replay_hit_rate: 0.4,
7911 false_replay_rate: 0.6,
7912 reasoning_avoided_tokens: 80,
7913 replay_fallback_cost_total: 400,
7914 replay_roi: compute_replay_roi(80, 400),
7915 replay_safety: false,
7916 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7917 fail_closed_default: true,
7918 rollback_ready: true,
7919 audit_trail_complete: true,
7920 has_replay_activity: true,
7921 },
7922 thresholds: ReplayRoiReleaseGateThresholds::default(),
7923 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7924 };
7925
7926 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7927 assert_eq!(output.status, ReplayRoiReleaseGateStatus::FailClosed);
7928 assert!(output
7929 .failed_checks
7930 .iter()
7931 .any(|check| check == "replay_hit_rate_below_threshold"));
7932 assert!(output
7933 .failed_checks
7934 .iter()
7935 .any(|check| check == "false_replay_rate_above_threshold"));
7936 assert!(output
7937 .failed_checks
7938 .iter()
7939 .any(|check| check == "replay_roi_below_threshold"));
7940 assert!(output.summary.contains("release gate fail_closed"));
7941 }
7942
7943 #[test]
7944 fn replay_roi_release_gate_summary_evaluator_marks_missing_data_indeterminate() {
7945 let input = ReplayRoiReleaseGateInputContract {
7946 generated_at: String::new(),
7947 window_seconds: 86_400,
7948 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7949 .iter()
7950 .map(|dimension| (*dimension).to_string())
7951 .collect(),
7952 replay_attempts_total: 0,
7953 replay_success_total: 0,
7954 replay_failure_total: 0,
7955 replay_hit_rate: 0.0,
7956 false_replay_rate: 0.0,
7957 reasoning_avoided_tokens: 0,
7958 replay_fallback_cost_total: 0,
7959 replay_roi: 0.0,
7960 replay_safety: false,
7961 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7962 fail_closed_default: true,
7963 rollback_ready: true,
7964 audit_trail_complete: true,
7965 has_replay_activity: false,
7966 },
7967 thresholds: ReplayRoiReleaseGateThresholds::default(),
7968 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7969 };
7970
7971 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7972 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Indeterminate);
7973 assert!(output
7974 .failed_checks
7975 .iter()
7976 .any(|check| check == "missing_generated_at"));
7977 assert!(output
7978 .failed_checks
7979 .iter()
7980 .any(|check| check == "missing_replay_attempts"));
7981 assert!(output
7982 .summary
7983 .contains("release gate indeterminate (fail-closed)"));
7984 }
7985
7986 #[test]
7987 fn stale_replay_targets_require_confidence_revalidation() {
7988 let now = Utc::now();
7989 let projection = EvolutionProjection {
7990 genes: vec![Gene {
7991 id: "gene-stale".into(),
7992 signals: vec!["missing readme".into()],
7993 strategy: vec!["README.md".into()],
7994 validation: vec!["test".into()],
7995 state: AssetState::Promoted,
7996 }],
7997 capsules: vec![Capsule {
7998 id: "capsule-stale".into(),
7999 gene_id: "gene-stale".into(),
8000 mutation_id: "mutation-stale".into(),
8001 run_id: "run-stale".into(),
8002 diff_hash: "hash".into(),
8003 confidence: 0.8,
8004 env: replay_input("missing readme").env,
8005 outcome: Outcome {
8006 success: true,
8007 validation_profile: "test".into(),
8008 validation_duration_ms: 1,
8009 changed_files: vec!["README.md".into()],
8010 validator_hash: "validator".into(),
8011 lines_changed: 1,
8012 replay_verified: false,
8013 },
8014 state: AssetState::Promoted,
8015 }],
8016 reuse_counts: BTreeMap::from([("gene-stale".into(), 1)]),
8017 attempt_counts: BTreeMap::from([("gene-stale".into(), 1)]),
8018 last_updated_at: BTreeMap::from([(
8019 "gene-stale".into(),
8020 (now - Duration::hours(48)).to_rfc3339(),
8021 )]),
8022 spec_ids_by_gene: BTreeMap::new(),
8023 };
8024
8025 let targets = stale_replay_revalidation_targets(&projection, now);
8026
8027 assert_eq!(targets.len(), 1);
8028 assert_eq!(targets[0].gene_id, "gene-stale");
8029 assert_eq!(targets[0].capsule_ids, vec!["capsule-stale".to_string()]);
8030 assert!(targets[0].decayed_confidence < MIN_REPLAY_CONFIDENCE);
8031 }
8032
8033 #[tokio::test]
8034 async fn remote_replay_prefers_closest_environment_match() {
8035 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
8036 let input = replay_input("env-signal");
8037
8038 let envelope_a = remote_publish_envelope_with_env(
8039 "node-a",
8040 "run-remote-a",
8041 "gene-a",
8042 "capsule-a",
8043 "mutation-a",
8044 "env-signal",
8045 "A.md",
8046 "# from a",
8047 input.env.clone(),
8048 );
8049 let envelope_b = remote_publish_envelope_with_env(
8050 "node-b",
8051 "run-remote-b",
8052 "gene-b",
8053 "capsule-b",
8054 "mutation-b",
8055 "env-signal",
8056 "B.md",
8057 "# from b",
8058 EnvFingerprint {
8059 rustc_version: "old-rustc".into(),
8060 cargo_lock_hash: "other-lock".into(),
8061 target_triple: "aarch64-apple-darwin".into(),
8062 os: "linux".into(),
8063 },
8064 );
8065
8066 evo.import_remote_envelope(&envelope_a).unwrap();
8067 evo.import_remote_envelope(&envelope_b).unwrap();
8068
8069 let decision = evo.replay_or_fallback(input).await.unwrap();
8070
8071 assert!(decision.used_capsule);
8072 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
8073 assert!(!decision.fallback_to_planner);
8074 }
8075
8076 #[test]
8077 fn remote_cold_start_scoring_caps_distinct_query_coverage() {
8078 let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
8079 let input = replay_input("missing readme");
8080
8081 let exact = remote_publish_envelope_with_signals(
8082 "node-exact",
8083 "run-remote-exact",
8084 "gene-exact",
8085 "capsule-exact",
8086 "mutation-exact",
8087 vec!["missing readme".into()],
8088 vec!["missing readme".into()],
8089 "EXACT.md",
8090 "# exact",
8091 input.env.clone(),
8092 );
8093 let overlapping = remote_publish_envelope_with_signals(
8094 "node-overlap",
8095 "run-remote-overlap",
8096 "gene-overlap",
8097 "capsule-overlap",
8098 "mutation-overlap",
8099 vec!["missing readme".into()],
8100 vec!["missing".into(), "readme".into()],
8101 "OVERLAP.md",
8102 "# overlap",
8103 input.env.clone(),
8104 );
8105
8106 evo.import_remote_envelope(&exact).unwrap();
8107 evo.import_remote_envelope(&overlapping).unwrap();
8108
8109 let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
8110 let exact_candidate = candidates
8111 .iter()
8112 .find(|candidate| candidate.gene.id == "gene-exact")
8113 .unwrap();
8114 let overlap_candidate = candidates
8115 .iter()
8116 .find(|candidate| candidate.gene.id == "gene-overlap")
8117 .unwrap();
8118
8119 assert_eq!(exact_candidate.score, 1.0);
8120 assert_eq!(overlap_candidate.score, 1.0);
8121 assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
8122 }
8123
8124 #[test]
8125 fn exact_match_candidates_respect_spec_linked_events() {
8126 let (evo, _) = build_test_evo(
8127 "spec-linked-filter",
8128 "run-spec-linked-filter",
8129 command_validator(),
8130 );
8131 let mut input = replay_input("missing readme");
8132 input.spec_id = Some("spec-readme".into());
8133
8134 let mut mutation = sample_mutation();
8135 mutation.intent.id = "mutation-spec-linked".into();
8136 mutation.intent.spec_id = None;
8137 let gene = Gene {
8138 id: "gene-spec-linked".into(),
8139 signals: vec!["missing readme".into()],
8140 strategy: vec!["README.md".into()],
8141 validation: vec!["test".into()],
8142 state: AssetState::Promoted,
8143 };
8144 let capsule = Capsule {
8145 id: "capsule-spec-linked".into(),
8146 gene_id: gene.id.clone(),
8147 mutation_id: mutation.intent.id.clone(),
8148 run_id: "run-spec-linked".into(),
8149 diff_hash: mutation.artifact.content_hash.clone(),
8150 confidence: 0.9,
8151 env: input.env.clone(),
8152 outcome: Outcome {
8153 success: true,
8154 validation_profile: "test".into(),
8155 validation_duration_ms: 1,
8156 changed_files: vec!["README.md".into()],
8157 validator_hash: "validator-hash".into(),
8158 lines_changed: 1,
8159 replay_verified: false,
8160 },
8161 state: AssetState::Promoted,
8162 };
8163
8164 evo.store
8165 .append_event(EvolutionEvent::MutationDeclared { mutation })
8166 .unwrap();
8167 evo.store
8168 .append_event(EvolutionEvent::GeneProjected { gene })
8169 .unwrap();
8170 evo.store
8171 .append_event(EvolutionEvent::CapsuleCommitted { capsule })
8172 .unwrap();
8173 evo.store
8174 .append_event(EvolutionEvent::SpecLinked {
8175 mutation_id: "mutation-spec-linked".into(),
8176 spec_id: "spec-readme".into(),
8177 })
8178 .unwrap();
8179
8180 let candidates = exact_match_candidates(evo.store.as_ref(), &input);
8181 assert_eq!(candidates.len(), 1);
8182 assert_eq!(candidates[0].gene.id, "gene-spec-linked");
8183 }
8184
8185 #[tokio::test]
8186 async fn remote_capsule_advances_from_quarantine_to_shadow_then_promoted() {
8187 let (evo, store) = build_test_evo(
8188 "remote-quarantine",
8189 "run-remote-quarantine",
8190 command_validator(),
8191 );
8192 let envelope = remote_publish_envelope(
8193 "node-remote",
8194 "run-remote-quarantine",
8195 "gene-remote",
8196 "capsule-remote",
8197 "mutation-remote",
8198 "remote-signal",
8199 "REMOTE.md",
8200 "# from remote",
8201 );
8202
8203 evo.import_remote_envelope(&envelope).unwrap();
8204
8205 let before_replay = store.rebuild_projection().unwrap();
8206 let imported_gene = before_replay
8207 .genes
8208 .iter()
8209 .find(|gene| gene.id == "gene-remote")
8210 .unwrap();
8211 let imported_capsule = before_replay
8212 .capsules
8213 .iter()
8214 .find(|capsule| capsule.id == "capsule-remote")
8215 .unwrap();
8216 assert_eq!(imported_gene.state, AssetState::Quarantined);
8217 assert_eq!(imported_capsule.state, AssetState::Quarantined);
8218 let exported_before_replay =
8219 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8220 assert!(exported_before_replay.assets.is_empty());
8221
8222 let first_decision = evo
8223 .replay_or_fallback(replay_input("remote-signal"))
8224 .await
8225 .unwrap();
8226
8227 assert!(first_decision.used_capsule);
8228 assert_eq!(first_decision.capsule_id, Some("capsule-remote".into()));
8229
8230 let after_first_replay = store.rebuild_projection().unwrap();
8231 let shadow_gene = after_first_replay
8232 .genes
8233 .iter()
8234 .find(|gene| gene.id == "gene-remote")
8235 .unwrap();
8236 let shadow_capsule = after_first_replay
8237 .capsules
8238 .iter()
8239 .find(|capsule| capsule.id == "capsule-remote")
8240 .unwrap();
8241 assert_eq!(shadow_gene.state, AssetState::ShadowValidated);
8242 assert_eq!(shadow_capsule.state, AssetState::ShadowValidated);
8243 let exported_after_first_replay =
8244 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8245 assert!(exported_after_first_replay.assets.is_empty());
8246
8247 let second_decision = evo
8248 .replay_or_fallback(replay_input("remote-signal"))
8249 .await
8250 .unwrap();
8251 assert!(second_decision.used_capsule);
8252 assert_eq!(second_decision.capsule_id, Some("capsule-remote".into()));
8253
8254 let after_second_replay = store.rebuild_projection().unwrap();
8255 let promoted_gene = after_second_replay
8256 .genes
8257 .iter()
8258 .find(|gene| gene.id == "gene-remote")
8259 .unwrap();
8260 let promoted_capsule = after_second_replay
8261 .capsules
8262 .iter()
8263 .find(|capsule| capsule.id == "capsule-remote")
8264 .unwrap();
8265 assert_eq!(promoted_gene.state, AssetState::Promoted);
8266 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8267 let exported_after_second_replay =
8268 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8269 assert_eq!(exported_after_second_replay.assets.len(), 3);
8270 assert!(exported_after_second_replay
8271 .assets
8272 .iter()
8273 .any(|asset| matches!(
8274 asset,
8275 NetworkAsset::EvolutionEvent {
8276 event: EvolutionEvent::MutationDeclared { .. }
8277 }
8278 )));
8279 }
8280
8281 #[tokio::test]
8282 async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
8283 let (source, source_store) = build_test_evo(
8284 "remote-publish-export",
8285 "run-remote-publish-export",
8286 command_validator(),
8287 );
8288 source
8289 .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
8290 .await
8291 .unwrap();
8292 let envelope = EvolutionNetworkNode::new(source_store.clone())
8293 .publish_local_assets("node-source")
8294 .unwrap();
8295 assert!(envelope.assets.iter().any(|asset| matches!(
8296 asset,
8297 NetworkAsset::EvolutionEvent {
8298 event: EvolutionEvent::MutationDeclared { mutation }
8299 } if mutation.intent.id == "mutation-1"
8300 )));
8301
8302 let (remote, _) = build_test_evo(
8303 "remote-publish-import",
8304 "run-remote-publish-import",
8305 command_validator(),
8306 );
8307 remote.import_remote_envelope(&envelope).unwrap();
8308
8309 let decision = remote
8310 .replay_or_fallback(replay_input("missing readme"))
8311 .await
8312 .unwrap();
8313
8314 assert!(decision.used_capsule);
8315 assert!(!decision.fallback_to_planner);
8316 }
8317
8318 #[tokio::test]
8319 async fn import_remote_envelope_records_manifest_validation_event() {
8320 let (source, source_store) = build_test_evo(
8321 "remote-manifest-success-source",
8322 "run-remote-manifest-success-source",
8323 command_validator(),
8324 );
8325 source
8326 .capture_successful_mutation(
8327 &"run-remote-manifest-success-source".into(),
8328 sample_mutation(),
8329 )
8330 .await
8331 .unwrap();
8332 let envelope = EvolutionNetworkNode::new(source_store.clone())
8333 .publish_local_assets("node-source")
8334 .unwrap();
8335
8336 let (remote, remote_store) = build_test_evo(
8337 "remote-manifest-success-remote",
8338 "run-remote-manifest-success-remote",
8339 command_validator(),
8340 );
8341 remote.import_remote_envelope(&envelope).unwrap();
8342
8343 let events = remote_store.scan(1).unwrap();
8344 assert!(events.iter().any(|stored| matches!(
8345 &stored.event,
8346 EvolutionEvent::ManifestValidated {
8347 accepted: true,
8348 reason,
8349 sender_id: Some(sender_id),
8350 publisher: Some(publisher),
8351 asset_ids,
8352 } if reason == "manifest validated"
8353 && sender_id == "node-source"
8354 && publisher == "node-source"
8355 && !asset_ids.is_empty()
8356 )));
8357 }
8358
8359 #[test]
8360 fn import_remote_envelope_rejects_invalid_manifest_and_records_audit_event() {
8361 let (remote, remote_store) = build_test_evo(
8362 "remote-manifest-invalid",
8363 "run-remote-manifest-invalid",
8364 command_validator(),
8365 );
8366 let mut envelope = remote_publish_envelope(
8367 "node-remote",
8368 "run-remote-manifest-invalid",
8369 "gene-remote",
8370 "capsule-remote",
8371 "mutation-remote",
8372 "manifest-signal",
8373 "MANIFEST.md",
8374 "# drift",
8375 );
8376 if let Some(manifest) = envelope.manifest.as_mut() {
8377 manifest.asset_hash = "tampered-hash".to_string();
8378 }
8379 envelope.content_hash = envelope.compute_content_hash();
8380
8381 let error = remote.import_remote_envelope(&envelope).unwrap_err();
8382 assert!(error.to_string().contains("manifest"));
8383
8384 let events = remote_store.scan(1).unwrap();
8385 assert!(events.iter().any(|stored| matches!(
8386 &stored.event,
8387 EvolutionEvent::ManifestValidated {
8388 accepted: false,
8389 reason,
8390 sender_id: Some(sender_id),
8391 publisher: Some(publisher),
8392 asset_ids,
8393 } if reason.contains("manifest asset_hash mismatch")
8394 && sender_id == "node-remote"
8395 && publisher == "node-remote"
8396 && !asset_ids.is_empty()
8397 )));
8398 }
8399
8400 #[tokio::test]
8401 async fn fetch_assets_include_mutation_payload_for_remote_replay() {
8402 let (evo, store) = build_test_evo(
8403 "remote-fetch-export",
8404 "run-remote-fetch",
8405 command_validator(),
8406 );
8407 evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
8408 .await
8409 .unwrap();
8410
8411 let response = EvolutionNetworkNode::new(store.clone())
8412 .fetch_assets(
8413 "node-source",
8414 &FetchQuery {
8415 sender_id: "node-client".into(),
8416 signals: vec!["missing readme".into()],
8417 since_cursor: None,
8418 resume_token: None,
8419 },
8420 )
8421 .unwrap();
8422
8423 assert!(response.assets.iter().any(|asset| matches!(
8424 asset,
8425 NetworkAsset::EvolutionEvent {
8426 event: EvolutionEvent::MutationDeclared { mutation }
8427 } if mutation.intent.id == "mutation-1"
8428 )));
8429 assert!(response
8430 .assets
8431 .iter()
8432 .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
8433 assert!(response
8434 .assets
8435 .iter()
8436 .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
8437 }
8438
8439 #[test]
8440 fn fetch_assets_delta_sync_supports_since_cursor_and_resume_token() {
8441 let store_root =
8442 std::env::temp_dir().join(format!("oris-evokernel-fetch-delta-store-{}", next_id("t")));
8443 if store_root.exists() {
8444 fs::remove_dir_all(&store_root).unwrap();
8445 }
8446 let store: Arc<dyn EvolutionStore> =
8447 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
8448 let node = EvolutionNetworkNode::new(store.clone());
8449 node.record_reported_experience(
8450 "delta-agent",
8451 "gene-delta-a",
8452 vec!["delta.signal".into()],
8453 vec![
8454 "task_class=delta.signal".into(),
8455 "task_label=delta replay".into(),
8456 ],
8457 vec!["a2a.tasks.report".into()],
8458 )
8459 .unwrap();
8460
8461 let first = node
8462 .fetch_assets(
8463 "execution-api",
8464 &FetchQuery {
8465 sender_id: "delta-agent".into(),
8466 signals: vec!["delta.signal".into()],
8467 since_cursor: None,
8468 resume_token: None,
8469 },
8470 )
8471 .unwrap();
8472 let first_cursor = first.next_cursor.clone().expect("first next_cursor");
8473 let first_token = first.resume_token.clone().expect("first resume_token");
8474 assert!(first.assets.iter().any(
8475 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8476 ));
8477
8478 let restarted = EvolutionNetworkNode::new(store.clone());
8479 restarted
8480 .record_reported_experience(
8481 "delta-agent",
8482 "gene-delta-b",
8483 vec!["delta.signal".into()],
8484 vec![
8485 "task_class=delta.signal".into(),
8486 "task_label=delta replay".into(),
8487 ],
8488 vec!["a2a.tasks.report".into()],
8489 )
8490 .unwrap();
8491
8492 let from_token = restarted
8493 .fetch_assets(
8494 "execution-api",
8495 &FetchQuery {
8496 sender_id: "delta-agent".into(),
8497 signals: vec!["delta.signal".into()],
8498 since_cursor: None,
8499 resume_token: Some(first_token),
8500 },
8501 )
8502 .unwrap();
8503 assert!(from_token.assets.iter().any(
8504 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8505 ));
8506 assert!(!from_token.assets.iter().any(
8507 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8508 ));
8509 assert_eq!(
8510 from_token.sync_audit.requested_cursor,
8511 Some(first_cursor.clone())
8512 );
8513 assert!(from_token.sync_audit.applied_count >= 1);
8514
8515 let from_cursor = restarted
8516 .fetch_assets(
8517 "execution-api",
8518 &FetchQuery {
8519 sender_id: "delta-agent".into(),
8520 signals: vec!["delta.signal".into()],
8521 since_cursor: Some(first_cursor),
8522 resume_token: None,
8523 },
8524 )
8525 .unwrap();
8526 assert!(from_cursor.assets.iter().any(
8527 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8528 ));
8529 }
8530
8531 #[test]
8532 fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
8533 let store_root = std::env::temp_dir().join(format!(
8534 "oris-evokernel-remote-partial-store-{}",
8535 std::process::id()
8536 ));
8537 if store_root.exists() {
8538 fs::remove_dir_all(&store_root).unwrap();
8539 }
8540 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8541 let evo = build_test_evo_with_store(
8542 "remote-partial",
8543 "run-remote-partial",
8544 command_validator(),
8545 store.clone(),
8546 );
8547 let envelope = remote_publish_envelope(
8548 "node-partial",
8549 "run-remote-partial",
8550 "gene-partial",
8551 "capsule-partial",
8552 "mutation-partial",
8553 "partial-signal",
8554 "PARTIAL.md",
8555 "# partial",
8556 );
8557
8558 let result = evo.import_remote_envelope(&envelope);
8559
8560 assert!(matches!(result, Err(EvoKernelError::Store(_))));
8561 let projection = store.rebuild_projection().unwrap();
8562 assert!(projection
8563 .genes
8564 .iter()
8565 .any(|gene| gene.id == "gene-partial"));
8566 assert!(projection.capsules.is_empty());
8567 let publishers = evo.remote_publishers.lock().unwrap();
8568 assert_eq!(
8569 publishers.get("gene-partial").map(String::as_str),
8570 Some("node-partial")
8571 );
8572 }
8573
8574 #[test]
8575 fn retry_remote_import_after_partial_failure_only_imports_missing_assets() {
8576 let store_root = std::env::temp_dir().join(format!(
8577 "oris-evokernel-remote-partial-retry-store-{}",
8578 next_id("t")
8579 ));
8580 if store_root.exists() {
8581 fs::remove_dir_all(&store_root).unwrap();
8582 }
8583 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8584 let evo = build_test_evo_with_store(
8585 "remote-partial-retry",
8586 "run-remote-partial-retry",
8587 command_validator(),
8588 store.clone(),
8589 );
8590 let envelope = remote_publish_envelope(
8591 "node-partial",
8592 "run-remote-partial-retry",
8593 "gene-partial-retry",
8594 "capsule-partial-retry",
8595 "mutation-partial-retry",
8596 "partial-retry-signal",
8597 "PARTIAL_RETRY.md",
8598 "# partial retry",
8599 );
8600
8601 let first = evo.import_remote_envelope(&envelope);
8602 assert!(matches!(first, Err(EvoKernelError::Store(_))));
8603
8604 let retry = evo.import_remote_envelope(&envelope).unwrap();
8605
8606 assert_eq!(retry.imported_asset_ids, vec!["capsule-partial-retry"]);
8607 let projection = store.rebuild_projection().unwrap();
8608 let gene = projection
8609 .genes
8610 .iter()
8611 .find(|gene| gene.id == "gene-partial-retry")
8612 .unwrap();
8613 assert_eq!(gene.state, AssetState::Quarantined);
8614 let capsule = projection
8615 .capsules
8616 .iter()
8617 .find(|capsule| capsule.id == "capsule-partial-retry")
8618 .unwrap();
8619 assert_eq!(capsule.state, AssetState::Quarantined);
8620 assert_eq!(projection.attempt_counts["gene-partial-retry"], 1);
8621
8622 let events = store.scan(1).unwrap();
8623 assert_eq!(
8624 events
8625 .iter()
8626 .filter(|stored| {
8627 matches!(
8628 &stored.event,
8629 EvolutionEvent::MutationDeclared { mutation }
8630 if mutation.intent.id == "mutation-partial-retry"
8631 )
8632 })
8633 .count(),
8634 1
8635 );
8636 assert_eq!(
8637 events
8638 .iter()
8639 .filter(|stored| {
8640 matches!(
8641 &stored.event,
8642 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-partial-retry"
8643 )
8644 })
8645 .count(),
8646 1
8647 );
8648 assert_eq!(
8649 events
8650 .iter()
8651 .filter(|stored| {
8652 matches!(
8653 &stored.event,
8654 EvolutionEvent::CapsuleCommitted { capsule }
8655 if capsule.id == "capsule-partial-retry"
8656 )
8657 })
8658 .count(),
8659 1
8660 );
8661 }
8662
8663 #[tokio::test]
8664 async fn duplicate_remote_import_does_not_requarantine_locally_validated_assets() {
8665 let (evo, store) = build_test_evo(
8666 "remote-idempotent",
8667 "run-remote-idempotent",
8668 command_validator(),
8669 );
8670 let envelope = remote_publish_envelope(
8671 "node-idempotent",
8672 "run-remote-idempotent",
8673 "gene-idempotent",
8674 "capsule-idempotent",
8675 "mutation-idempotent",
8676 "idempotent-signal",
8677 "IDEMPOTENT.md",
8678 "# idempotent",
8679 );
8680
8681 let first = evo.import_remote_envelope(&envelope).unwrap();
8682 assert_eq!(
8683 first.imported_asset_ids,
8684 vec!["gene-idempotent", "capsule-idempotent"]
8685 );
8686
8687 let decision = evo
8688 .replay_or_fallback(replay_input("idempotent-signal"))
8689 .await
8690 .unwrap();
8691 assert!(decision.used_capsule);
8692 assert_eq!(decision.capsule_id, Some("capsule-idempotent".into()));
8693
8694 let projection_before = store.rebuild_projection().unwrap();
8695 let attempts_before = projection_before.attempt_counts["gene-idempotent"];
8696 let gene_before = projection_before
8697 .genes
8698 .iter()
8699 .find(|gene| gene.id == "gene-idempotent")
8700 .unwrap();
8701 assert_eq!(gene_before.state, AssetState::ShadowValidated);
8702 let capsule_before = projection_before
8703 .capsules
8704 .iter()
8705 .find(|capsule| capsule.id == "capsule-idempotent")
8706 .unwrap();
8707 assert_eq!(capsule_before.state, AssetState::ShadowValidated);
8708
8709 let second = evo.import_remote_envelope(&envelope).unwrap();
8710 assert!(second.imported_asset_ids.is_empty());
8711
8712 let projection_after = store.rebuild_projection().unwrap();
8713 assert_eq!(
8714 projection_after.attempt_counts["gene-idempotent"],
8715 attempts_before
8716 );
8717 let gene_after = projection_after
8718 .genes
8719 .iter()
8720 .find(|gene| gene.id == "gene-idempotent")
8721 .unwrap();
8722 assert_eq!(gene_after.state, AssetState::ShadowValidated);
8723 let capsule_after = projection_after
8724 .capsules
8725 .iter()
8726 .find(|capsule| capsule.id == "capsule-idempotent")
8727 .unwrap();
8728 assert_eq!(capsule_after.state, AssetState::ShadowValidated);
8729
8730 let third_decision = evo
8731 .replay_or_fallback(replay_input("idempotent-signal"))
8732 .await
8733 .unwrap();
8734 assert!(third_decision.used_capsule);
8735 assert_eq!(third_decision.capsule_id, Some("capsule-idempotent".into()));
8736
8737 let projection_promoted = store.rebuild_projection().unwrap();
8738 let promoted_gene = projection_promoted
8739 .genes
8740 .iter()
8741 .find(|gene| gene.id == "gene-idempotent")
8742 .unwrap();
8743 let promoted_capsule = projection_promoted
8744 .capsules
8745 .iter()
8746 .find(|capsule| capsule.id == "capsule-idempotent")
8747 .unwrap();
8748 assert_eq!(promoted_gene.state, AssetState::Promoted);
8749 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8750
8751 let events = store.scan(1).unwrap();
8752 assert_eq!(
8753 events
8754 .iter()
8755 .filter(|stored| {
8756 matches!(
8757 &stored.event,
8758 EvolutionEvent::MutationDeclared { mutation }
8759 if mutation.intent.id == "mutation-idempotent"
8760 )
8761 })
8762 .count(),
8763 1
8764 );
8765 assert_eq!(
8766 events
8767 .iter()
8768 .filter(|stored| {
8769 matches!(
8770 &stored.event,
8771 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-idempotent"
8772 )
8773 })
8774 .count(),
8775 1
8776 );
8777 assert_eq!(
8778 events
8779 .iter()
8780 .filter(|stored| {
8781 matches!(
8782 &stored.event,
8783 EvolutionEvent::CapsuleCommitted { capsule }
8784 if capsule.id == "capsule-idempotent"
8785 )
8786 })
8787 .count(),
8788 1
8789 );
8790
8791 assert_eq!(first.sync_audit.scanned_count, envelope.assets.len());
8792 assert_eq!(first.sync_audit.failed_count, 0);
8793 assert_eq!(second.sync_audit.applied_count, 0);
8794 assert_eq!(second.sync_audit.skipped_count, envelope.assets.len());
8795 assert!(second.resume_token.is_some());
8796 }
8797
8798 #[tokio::test]
8799 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
8800 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
8801 let capsule = evo
8802 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
8803 .await
8804 .unwrap();
8805 let publish = evo.export_promoted_assets("node-local");
8806 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
8807
8808 let decision = evo
8809 .replay_or_fallback(replay_input("missing readme"))
8810 .await
8811 .unwrap();
8812 assert!(decision.used_capsule);
8813 assert_eq!(decision.capsule_id, Some(capsule.id));
8814 }
8815
8816 #[tokio::test]
8817 async fn second_replay_validation_failure_revokes_gene_immediately() {
8818 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
8819 let capsule = capturer
8820 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
8821 .await
8822 .unwrap();
8823
8824 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
8825 let failing_replay = build_test_evo_with_store(
8826 "revoke-replay",
8827 "run-replay-fail",
8828 failing_validator,
8829 store.clone(),
8830 );
8831
8832 let first = failing_replay
8833 .replay_or_fallback(replay_input("missing readme"))
8834 .await
8835 .unwrap();
8836 let second = failing_replay
8837 .replay_or_fallback(replay_input("missing readme"))
8838 .await
8839 .unwrap();
8840
8841 assert!(!first.used_capsule);
8842 assert!(first.fallback_to_planner);
8843 assert!(!second.used_capsule);
8844 assert!(second.fallback_to_planner);
8845
8846 let projection = store.rebuild_projection().unwrap();
8847 let gene = projection
8848 .genes
8849 .iter()
8850 .find(|gene| gene.id == capsule.gene_id)
8851 .unwrap();
8852 assert_eq!(gene.state, AssetState::Promoted);
8853 let committed_capsule = projection
8854 .capsules
8855 .iter()
8856 .find(|current| current.id == capsule.id)
8857 .unwrap();
8858 assert_eq!(committed_capsule.state, AssetState::Promoted);
8859
8860 let events = store.scan(1).unwrap();
8861 assert_eq!(
8862 events
8863 .iter()
8864 .filter(|stored| {
8865 matches!(
8866 &stored.event,
8867 EvolutionEvent::ValidationFailed {
8868 gene_id: Some(gene_id),
8869 ..
8870 } if gene_id == &capsule.gene_id
8871 )
8872 })
8873 .count(),
8874 1
8875 );
8876 assert!(!events.iter().any(|stored| {
8877 matches!(
8878 &stored.event,
8879 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
8880 )
8881 }));
8882
8883 let recovered = build_test_evo_with_store(
8884 "revoke-replay",
8885 "run-replay-check",
8886 command_validator(),
8887 store.clone(),
8888 );
8889 let after_revoke = recovered
8890 .replay_or_fallback(replay_input("missing readme"))
8891 .await
8892 .unwrap();
8893 assert!(!after_revoke.used_capsule);
8894 assert!(after_revoke.fallback_to_planner);
8895 assert!(after_revoke.reason.contains("below replay threshold"));
8896 }
8897
8898 #[tokio::test]
8899 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
8900 let ledger = Arc::new(Mutex::new(EvuLedger {
8901 accounts: vec![],
8902 reputations: vec![
8903 oris_economics::ReputationRecord {
8904 node_id: "node-a".into(),
8905 publish_success_rate: 0.4,
8906 validator_accuracy: 0.4,
8907 reuse_impact: 0,
8908 },
8909 oris_economics::ReputationRecord {
8910 node_id: "node-b".into(),
8911 publish_success_rate: 0.95,
8912 validator_accuracy: 0.95,
8913 reuse_impact: 8,
8914 },
8915 ],
8916 }));
8917 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
8918 let evo = evo.with_economics(ledger.clone());
8919
8920 let envelope_a = remote_publish_envelope(
8921 "node-a",
8922 "run-remote-a",
8923 "gene-a",
8924 "capsule-a",
8925 "mutation-a",
8926 "shared-signal",
8927 "A.md",
8928 "# from a",
8929 );
8930 let envelope_b = remote_publish_envelope(
8931 "node-b",
8932 "run-remote-b",
8933 "gene-b",
8934 "capsule-b",
8935 "mutation-b",
8936 "shared-signal",
8937 "B.md",
8938 "# from b",
8939 );
8940
8941 evo.import_remote_envelope(&envelope_a).unwrap();
8942 evo.import_remote_envelope(&envelope_b).unwrap();
8943
8944 let decision = evo
8945 .replay_or_fallback(replay_input("shared-signal"))
8946 .await
8947 .unwrap();
8948
8949 assert!(decision.used_capsule);
8950 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
8951 let locked = ledger.lock().unwrap();
8952 let rewarded = locked
8953 .accounts
8954 .iter()
8955 .find(|item| item.node_id == "node-b")
8956 .unwrap();
8957 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8958 assert!(
8959 locked.selector_reputation_bias()["node-b"]
8960 > locked.selector_reputation_bias()["node-a"]
8961 );
8962 }
8963
8964 #[tokio::test]
8965 async fn remote_reuse_settlement_tracks_selected_capsule_publisher_for_shared_gene() {
8966 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
8967 let (evo, _) = build_test_evo(
8968 "remote-shared-publisher",
8969 "run-remote-shared-publisher",
8970 command_validator(),
8971 );
8972 let evo = evo.with_economics(ledger.clone());
8973 let input = replay_input("shared-signal");
8974 let preferred = remote_publish_envelope_with_env(
8975 "node-a",
8976 "run-remote-a",
8977 "gene-shared",
8978 "capsule-preferred",
8979 "mutation-preferred",
8980 "shared-signal",
8981 "A.md",
8982 "# from a",
8983 input.env.clone(),
8984 );
8985 let fallback = remote_publish_envelope_with_env(
8986 "node-b",
8987 "run-remote-b",
8988 "gene-shared",
8989 "capsule-fallback",
8990 "mutation-fallback",
8991 "shared-signal",
8992 "B.md",
8993 "# from b",
8994 EnvFingerprint {
8995 rustc_version: "old-rustc".into(),
8996 cargo_lock_hash: "other-lock".into(),
8997 target_triple: "aarch64-apple-darwin".into(),
8998 os: "linux".into(),
8999 },
9000 );
9001
9002 evo.import_remote_envelope(&preferred).unwrap();
9003 evo.import_remote_envelope(&fallback).unwrap();
9004
9005 let decision = evo.replay_or_fallback(input).await.unwrap();
9006
9007 assert!(decision.used_capsule);
9008 assert_eq!(decision.capsule_id, Some("capsule-preferred".into()));
9009 let locked = ledger.lock().unwrap();
9010 let rewarded = locked
9011 .accounts
9012 .iter()
9013 .find(|item| item.node_id == "node-a")
9014 .unwrap();
9015 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
9016 assert!(locked.accounts.iter().all(|item| item.node_id != "node-b"));
9017 }
9018
9019 #[test]
9020 fn select_candidates_surfaces_ranked_remote_cold_start_candidates() {
9021 let ledger = Arc::new(Mutex::new(EvuLedger {
9022 accounts: vec![],
9023 reputations: vec![
9024 oris_economics::ReputationRecord {
9025 node_id: "node-a".into(),
9026 publish_success_rate: 0.4,
9027 validator_accuracy: 0.4,
9028 reuse_impact: 0,
9029 },
9030 oris_economics::ReputationRecord {
9031 node_id: "node-b".into(),
9032 publish_success_rate: 0.95,
9033 validator_accuracy: 0.95,
9034 reuse_impact: 8,
9035 },
9036 ],
9037 }));
9038 let (evo, _) = build_test_evo("remote-select", "run-remote-select", command_validator());
9039 let evo = evo.with_economics(ledger);
9040
9041 let envelope_a = remote_publish_envelope(
9042 "node-a",
9043 "run-remote-a",
9044 "gene-a",
9045 "capsule-a",
9046 "mutation-a",
9047 "shared-signal",
9048 "A.md",
9049 "# from a",
9050 );
9051 let envelope_b = remote_publish_envelope(
9052 "node-b",
9053 "run-remote-b",
9054 "gene-b",
9055 "capsule-b",
9056 "mutation-b",
9057 "shared-signal",
9058 "B.md",
9059 "# from b",
9060 );
9061
9062 evo.import_remote_envelope(&envelope_a).unwrap();
9063 evo.import_remote_envelope(&envelope_b).unwrap();
9064
9065 let candidates = evo.select_candidates(&replay_input("shared-signal"));
9066
9067 assert_eq!(candidates.len(), 1);
9068 assert_eq!(candidates[0].gene.id, "gene-b");
9069 assert_eq!(candidates[0].capsules[0].id, "capsule-b");
9070 }
9071
9072 #[tokio::test]
9073 async fn remote_reuse_publisher_bias_survives_restart() {
9074 let ledger = Arc::new(Mutex::new(EvuLedger {
9075 accounts: vec![],
9076 reputations: vec![
9077 oris_economics::ReputationRecord {
9078 node_id: "node-a".into(),
9079 publish_success_rate: 0.4,
9080 validator_accuracy: 0.4,
9081 reuse_impact: 0,
9082 },
9083 oris_economics::ReputationRecord {
9084 node_id: "node-b".into(),
9085 publish_success_rate: 0.95,
9086 validator_accuracy: 0.95,
9087 reuse_impact: 8,
9088 },
9089 ],
9090 }));
9091 let store_root = std::env::temp_dir().join(format!(
9092 "oris-evokernel-remote-restart-store-{}",
9093 next_id("t")
9094 ));
9095 if store_root.exists() {
9096 fs::remove_dir_all(&store_root).unwrap();
9097 }
9098 let store: Arc<dyn EvolutionStore> =
9099 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9100 let evo = build_test_evo_with_store(
9101 "remote-success-restart-source",
9102 "run-remote-restart-source",
9103 command_validator(),
9104 store.clone(),
9105 )
9106 .with_economics(ledger.clone());
9107
9108 let envelope_a = remote_publish_envelope(
9109 "node-a",
9110 "run-remote-a",
9111 "gene-a",
9112 "capsule-a",
9113 "mutation-a",
9114 "shared-signal",
9115 "A.md",
9116 "# from a",
9117 );
9118 let envelope_b = remote_publish_envelope(
9119 "node-b",
9120 "run-remote-b",
9121 "gene-b",
9122 "capsule-b",
9123 "mutation-b",
9124 "shared-signal",
9125 "B.md",
9126 "# from b",
9127 );
9128
9129 evo.import_remote_envelope(&envelope_a).unwrap();
9130 evo.import_remote_envelope(&envelope_b).unwrap();
9131
9132 let recovered = build_test_evo_with_store(
9133 "remote-success-restart-recovered",
9134 "run-remote-restart-recovered",
9135 command_validator(),
9136 store.clone(),
9137 )
9138 .with_economics(ledger.clone());
9139
9140 let decision = recovered
9141 .replay_or_fallback(replay_input("shared-signal"))
9142 .await
9143 .unwrap();
9144
9145 assert!(decision.used_capsule);
9146 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
9147 let locked = ledger.lock().unwrap();
9148 let rewarded = locked
9149 .accounts
9150 .iter()
9151 .find(|item| item.node_id == "node-b")
9152 .unwrap();
9153 assert_eq!(rewarded.balance, recovered.stake_policy.reuse_reward);
9154 }
9155
9156 #[tokio::test]
9157 async fn remote_reuse_failure_penalizes_remote_reputation() {
9158 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
9159 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
9160 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
9161 let evo = evo.with_economics(ledger.clone());
9162
9163 let envelope = remote_publish_envelope(
9164 "node-remote",
9165 "run-remote-failed",
9166 "gene-remote",
9167 "capsule-remote",
9168 "mutation-remote",
9169 "failure-signal",
9170 "FAILED.md",
9171 "# from remote",
9172 );
9173 evo.import_remote_envelope(&envelope).unwrap();
9174
9175 let decision = evo
9176 .replay_or_fallback(replay_input("failure-signal"))
9177 .await
9178 .unwrap();
9179
9180 assert!(!decision.used_capsule);
9181 assert!(decision.fallback_to_planner);
9182
9183 let signal = evo.economics_signal("node-remote").unwrap();
9184 assert_eq!(signal.available_evu, 0);
9185 assert!(signal.publish_success_rate < 0.5);
9186 assert!(signal.validator_accuracy < 0.5);
9187 }
9188
9189 #[test]
9190 fn ensure_builtin_experience_assets_is_idempotent_and_fetchable() {
9191 let store_root = std::env::temp_dir().join(format!(
9192 "oris-evokernel-builtin-experience-store-{}",
9193 next_id("t")
9194 ));
9195 if store_root.exists() {
9196 fs::remove_dir_all(&store_root).unwrap();
9197 }
9198 let store: Arc<dyn EvolutionStore> =
9199 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9200 let node = EvolutionNetworkNode::new(store.clone());
9201
9202 let first = node
9203 .ensure_builtin_experience_assets("runtime-bootstrap")
9204 .unwrap();
9205 assert!(!first.imported_asset_ids.is_empty());
9206
9207 let second = node
9208 .ensure_builtin_experience_assets("runtime-bootstrap")
9209 .unwrap();
9210 assert!(second.imported_asset_ids.is_empty());
9211
9212 let fetch = node
9213 .fetch_assets(
9214 "execution-api",
9215 &FetchQuery {
9216 sender_id: "compat-agent".into(),
9217 signals: vec!["error".into()],
9218 since_cursor: None,
9219 resume_token: None,
9220 },
9221 )
9222 .unwrap();
9223
9224 let mut has_builtin_evomap = false;
9225 for asset in fetch.assets {
9226 if let NetworkAsset::Gene { gene } = asset {
9227 if strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9228 == Some("builtin_evomap")
9229 && gene.state == AssetState::Promoted
9230 {
9231 has_builtin_evomap = true;
9232 break;
9233 }
9234 }
9235 }
9236 assert!(has_builtin_evomap);
9237 }
9238
9239 #[test]
9240 fn reported_experience_retention_keeps_latest_three_and_preserves_builtin_assets() {
9241 let store_root = std::env::temp_dir().join(format!(
9242 "oris-evokernel-reported-retention-store-{}",
9243 next_id("t")
9244 ));
9245 if store_root.exists() {
9246 fs::remove_dir_all(&store_root).unwrap();
9247 }
9248 let store: Arc<dyn EvolutionStore> =
9249 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9250 let node = EvolutionNetworkNode::new(store.clone());
9251
9252 node.ensure_builtin_experience_assets("runtime-bootstrap")
9253 .unwrap();
9254
9255 for idx in 0..4 {
9256 node.record_reported_experience(
9257 "reporter-a",
9258 format!("reported-docs-rewrite-v{}", idx + 1),
9259 vec!["docs.rewrite".into(), format!("task-{}", idx + 1)],
9260 vec![
9261 "task_class=docs.rewrite".into(),
9262 format!("task_label=Docs rewrite v{}", idx + 1),
9263 format!("summary=reported replay {}", idx + 1),
9264 ],
9265 vec!["a2a.tasks.report".into()],
9266 )
9267 .unwrap();
9268 }
9269
9270 let (_, projection) = store.scan_projection().unwrap();
9271 let reported_promoted = projection
9272 .genes
9273 .iter()
9274 .filter(|gene| {
9275 gene.state == AssetState::Promoted
9276 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9277 == Some("reported_experience")
9278 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9279 == Some("docs.rewrite")
9280 })
9281 .count();
9282 let reported_revoked = projection
9283 .genes
9284 .iter()
9285 .filter(|gene| {
9286 gene.state == AssetState::Revoked
9287 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9288 == Some("reported_experience")
9289 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9290 == Some("docs.rewrite")
9291 })
9292 .count();
9293 let builtin_promoted = projection
9294 .genes
9295 .iter()
9296 .filter(|gene| {
9297 gene.state == AssetState::Promoted
9298 && matches!(
9299 strategy_metadata_value(&gene.strategy, "asset_origin").as_deref(),
9300 Some("builtin") | Some("builtin_evomap")
9301 )
9302 })
9303 .count();
9304
9305 assert_eq!(reported_promoted, 3);
9306 assert_eq!(reported_revoked, 1);
9307 assert!(builtin_promoted >= 1);
9308
9309 let fetch = node
9310 .fetch_assets(
9311 "execution-api",
9312 &FetchQuery {
9313 sender_id: "consumer-b".into(),
9314 signals: vec!["docs.rewrite".into()],
9315 since_cursor: None,
9316 resume_token: None,
9317 },
9318 )
9319 .unwrap();
9320 let docs_genes = fetch
9321 .assets
9322 .into_iter()
9323 .filter_map(|asset| match asset {
9324 NetworkAsset::Gene { gene } => Some(gene),
9325 _ => None,
9326 })
9327 .filter(|gene| {
9328 strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9329 == Some("docs.rewrite")
9330 })
9331 .collect::<Vec<_>>();
9332 assert!(docs_genes.len() >= 3);
9333 }
9334}