1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::{Path, PathBuf};
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12 infer_mutation_needed_failure_reason_code, infer_replay_fallback_reason_code,
13 normalize_mutation_needed_failure_contract, normalize_replay_fallback_contract, AgentRole,
14 BoundedTaskClass, CoordinationMessage, CoordinationPlan, CoordinationPrimitive,
15 CoordinationResult, CoordinationTask, ExecutionFeedback, MutationNeededFailureContract,
16 MutationNeededFailureReasonCode, MutationProposal as AgentMutationProposal, ReplayFeedback,
17 ReplayPlannerDirective, SupervisedDevloopOutcome, SupervisedDevloopRequest,
18 SupervisedDevloopStatus,
19};
20use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
21use oris_evolution::{
22 compute_artifact_hash, decayed_replay_confidence, next_id, stable_hash_json, AssetState,
23 BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
24 EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
25 PreparedMutation, ReplayRoiEvidence, ReplayRoiReasonCode, Selector, SelectorInput,
26 StoreBackedSelector, StoredEvolutionEvent, TransitionEvidence, TransitionReasonCode,
27 ValidationSnapshot, MIN_REPLAY_CONFIDENCE,
28};
29use oris_evolution_network::{EvolutionEnvelope, NetworkAsset, SyncAudit};
30use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
31use oris_kernel::{Kernel, KernelState, RunId};
32use oris_sandbox::{
33 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
34};
35use oris_spec::CompiledMutationPlan;
36use serde::{Deserialize, Serialize};
37use serde_json::Value;
38use thiserror::Error;
39
40pub use oris_evolution::{
41 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
42 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
43 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
44 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
45 SelectorInput as EvoSelectorInput, TransitionReasonCode as EvoTransitionReasonCode,
46};
47pub use oris_evolution_network::{
48 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
49};
50pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
51pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
52pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
53
54#[derive(Clone, Debug, Serialize, Deserialize)]
55pub struct ValidationPlan {
56 pub profile: String,
57 pub stages: Vec<ValidationStage>,
58}
59
60impl ValidationPlan {
61 pub fn oris_default() -> Self {
62 Self {
63 profile: "oris-default".into(),
64 stages: vec![
65 ValidationStage::Command {
66 program: "cargo".into(),
67 args: vec!["fmt".into(), "--all".into(), "--check".into()],
68 timeout_ms: 60_000,
69 },
70 ValidationStage::Command {
71 program: "cargo".into(),
72 args: vec!["check".into(), "--workspace".into()],
73 timeout_ms: 180_000,
74 },
75 ValidationStage::Command {
76 program: "cargo".into(),
77 args: vec![
78 "test".into(),
79 "-p".into(),
80 "oris-kernel".into(),
81 "-p".into(),
82 "oris-evolution".into(),
83 "-p".into(),
84 "oris-sandbox".into(),
85 "-p".into(),
86 "oris-evokernel".into(),
87 "--lib".into(),
88 ],
89 timeout_ms: 300_000,
90 },
91 ValidationStage::Command {
92 program: "cargo".into(),
93 args: vec![
94 "test".into(),
95 "-p".into(),
96 "oris-runtime".into(),
97 "--lib".into(),
98 ],
99 timeout_ms: 300_000,
100 },
101 ],
102 }
103 }
104}
105
106#[derive(Clone, Debug, Serialize, Deserialize)]
107pub enum ValidationStage {
108 Command {
109 program: String,
110 args: Vec<String>,
111 timeout_ms: u64,
112 },
113}
114
115#[derive(Clone, Debug, Serialize, Deserialize)]
116pub struct ValidationStageReport {
117 pub stage: String,
118 pub success: bool,
119 pub exit_code: Option<i32>,
120 pub duration_ms: u64,
121 pub stdout: String,
122 pub stderr: String,
123}
124
125#[derive(Clone, Debug, Serialize, Deserialize)]
126pub struct ValidationReport {
127 pub success: bool,
128 pub duration_ms: u64,
129 pub stages: Vec<ValidationStageReport>,
130 pub logs: String,
131}
132
133#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
134pub struct SignalExtractionInput {
135 pub patch_diff: String,
136 pub intent: String,
137 pub expected_effect: String,
138 pub declared_signals: Vec<String>,
139 pub changed_files: Vec<String>,
140 pub validation_success: bool,
141 pub validation_logs: String,
142 pub stage_outputs: Vec<String>,
143}
144
145#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
146pub struct SignalExtractionOutput {
147 pub values: Vec<String>,
148 pub hash: String,
149}
150
151#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
152pub struct SeedTemplate {
153 pub id: String,
154 pub intent: String,
155 pub signals: Vec<String>,
156 pub diff_payload: String,
157 pub validation_profile: String,
158}
159
160#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
161pub struct BootstrapReport {
162 pub seeded: bool,
163 pub genes_added: usize,
164 pub capsules_added: usize,
165}
166
167const REPORTED_EXPERIENCE_RETENTION_LIMIT: usize = 3;
168const SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS: u64 = 2;
169const SHADOW_PROMOTION_MIN_SUCCESS_RATE: f32 = 0.70;
170const SHADOW_PROMOTION_MIN_ENV_MATCH: f32 = 0.75;
171const SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE: f32 = MIN_REPLAY_CONFIDENCE;
172const REPLAY_REASONING_TOKEN_FLOOR: u64 = 192;
173const REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT: u64 = 24;
174const COLD_START_LOOKUP_PENALTY: f32 = 0.05;
175const MUTATION_NEEDED_MAX_DIFF_BYTES: usize = 128 * 1024;
176const MUTATION_NEEDED_MAX_CHANGED_LINES: usize = 600;
177const MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS: u64 = 120_000;
178const MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS: u64 = 900_000;
179pub const REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS: [&str; 2] =
180 ["task_class", "source_sender_id"];
181
182#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
183pub struct RepairQualityGateReport {
184 pub root_cause: bool,
185 pub fix: bool,
186 pub verification: bool,
187 pub rollback: bool,
188 pub incident_anchor: bool,
189 pub structure_score: usize,
190 pub has_actionable_command: bool,
191}
192
193impl RepairQualityGateReport {
194 pub fn passes(&self) -> bool {
195 self.incident_anchor
196 && self.structure_score >= 3
197 && (self.has_actionable_command || self.verification)
198 }
199
200 pub fn failed_checks(&self) -> Vec<String> {
201 let mut failed = Vec::new();
202 if !self.incident_anchor {
203 failed.push("包含unknown command故障上下文".to_string());
204 }
205 if self.structure_score < 3 {
206 failed.push("结构化修复信息至少满足3项(根因/修复/验证/回滚)".to_string());
207 }
208 if !(self.has_actionable_command || self.verification) {
209 failed.push("包含可执行验证命令或验证计划".to_string());
210 }
211 failed
212 }
213}
214
215pub fn evaluate_repair_quality_gate(plan: &str) -> RepairQualityGateReport {
216 fn contains_any(haystack: &str, needles: &[&str]) -> bool {
217 needles.iter().any(|needle| haystack.contains(needle))
218 }
219
220 let lower = plan.to_ascii_lowercase();
221 let root_cause = contains_any(
222 plan,
223 &["根因", "原因分析", "问题定位", "原因定位", "根本原因"],
224 ) || contains_any(
225 &lower,
226 &[
227 "root cause",
228 "cause analysis",
229 "problem diagnosis",
230 "diagnosis",
231 ],
232 );
233 let fix = contains_any(
234 plan,
235 &["修复步骤", "修复方案", "处理步骤", "修复建议", "整改方案"],
236 ) || contains_any(
237 &lower,
238 &[
239 "fix",
240 "remediation",
241 "mitigation",
242 "resolution",
243 "repair steps",
244 ],
245 );
246 let verification = contains_any(
247 plan,
248 &["验证命令", "验证步骤", "回归测试", "验证方式", "验收步骤"],
249 ) || contains_any(
250 &lower,
251 &[
252 "verification",
253 "validate",
254 "regression test",
255 "smoke test",
256 "test command",
257 ],
258 );
259 let rollback = contains_any(plan, &["回滚方案", "回滚步骤", "恢复方案", "撤销方案"])
260 || contains_any(&lower, &["rollback", "revert", "fallback plan", "undo"]);
261 let incident_anchor = contains_any(
262 &lower,
263 &[
264 "unknown command",
265 "process",
266 "proccess",
267 "command not found",
268 ],
269 ) || contains_any(plan, &["命令不存在", "命令未找到", "未知命令"]);
270 let structure_score = [root_cause, fix, verification, rollback]
271 .into_iter()
272 .filter(|ok| *ok)
273 .count();
274 let has_actionable_command = contains_any(
275 &lower,
276 &[
277 "cargo ", "git ", "python ", "pip ", "npm ", "pnpm ", "yarn ", "bash ", "make ",
278 ],
279 );
280
281 RepairQualityGateReport {
282 root_cause,
283 fix,
284 verification,
285 rollback,
286 incident_anchor,
287 structure_score,
288 has_actionable_command,
289 }
290}
291
292impl ValidationReport {
293 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
294 ValidationSnapshot {
295 success: self.success,
296 profile: profile.to_string(),
297 duration_ms: self.duration_ms,
298 summary: if self.success {
299 "validation passed".into()
300 } else {
301 "validation failed".into()
302 },
303 }
304 }
305}
306
307pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
308 let mut signals = BTreeSet::new();
309
310 for declared in &input.declared_signals {
311 if let Some(phrase) = normalize_signal_phrase(declared) {
312 signals.insert(phrase);
313 }
314 extend_signal_tokens(&mut signals, declared);
315 }
316
317 for text in [
318 input.patch_diff.as_str(),
319 input.intent.as_str(),
320 input.expected_effect.as_str(),
321 input.validation_logs.as_str(),
322 ] {
323 extend_signal_tokens(&mut signals, text);
324 }
325
326 for changed_file in &input.changed_files {
327 extend_signal_tokens(&mut signals, changed_file);
328 }
329
330 for stage_output in &input.stage_outputs {
331 extend_signal_tokens(&mut signals, stage_output);
332 }
333
334 signals.insert(if input.validation_success {
335 "validation passed".into()
336 } else {
337 "validation failed".into()
338 });
339
340 let values = signals.into_iter().take(32).collect::<Vec<_>>();
341 let hash =
342 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
343 SignalExtractionOutput { values, hash }
344}
345
346#[derive(Debug, Error)]
347pub enum ValidationError {
348 #[error("validation execution failed: {0}")]
349 Execution(String),
350}
351
352#[async_trait]
353pub trait Validator: Send + Sync {
354 async fn run(
355 &self,
356 receipt: &SandboxReceipt,
357 plan: &ValidationPlan,
358 ) -> Result<ValidationReport, ValidationError>;
359}
360
361pub struct CommandValidator {
362 policy: SandboxPolicy,
363}
364
365impl CommandValidator {
366 pub fn new(policy: SandboxPolicy) -> Self {
367 Self { policy }
368 }
369}
370
371#[async_trait]
372impl Validator for CommandValidator {
373 async fn run(
374 &self,
375 receipt: &SandboxReceipt,
376 plan: &ValidationPlan,
377 ) -> Result<ValidationReport, ValidationError> {
378 let started = std::time::Instant::now();
379 let mut stages = Vec::new();
380 let mut success = true;
381 let mut logs = String::new();
382
383 for stage in &plan.stages {
384 match stage {
385 ValidationStage::Command {
386 program,
387 args,
388 timeout_ms,
389 } => {
390 let result = execute_allowed_command(
391 &self.policy,
392 &receipt.workdir,
393 program,
394 args,
395 *timeout_ms,
396 )
397 .await;
398 let report = match result {
399 Ok(output) => ValidationStageReport {
400 stage: format!("{program} {}", args.join(" ")),
401 success: output.success,
402 exit_code: output.exit_code,
403 duration_ms: output.duration_ms,
404 stdout: output.stdout,
405 stderr: output.stderr,
406 },
407 Err(err) => ValidationStageReport {
408 stage: format!("{program} {}", args.join(" ")),
409 success: false,
410 exit_code: None,
411 duration_ms: 0,
412 stdout: String::new(),
413 stderr: err.to_string(),
414 },
415 };
416 if !report.success {
417 success = false;
418 }
419 if !report.stdout.is_empty() {
420 logs.push_str(&report.stdout);
421 logs.push('\n');
422 }
423 if !report.stderr.is_empty() {
424 logs.push_str(&report.stderr);
425 logs.push('\n');
426 }
427 stages.push(report);
428 if !success {
429 break;
430 }
431 }
432 }
433 }
434
435 Ok(ValidationReport {
436 success,
437 duration_ms: started.elapsed().as_millis() as u64,
438 stages,
439 logs,
440 })
441 }
442}
443
444#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
445pub struct ReplayDetectEvidence {
446 pub task_class_id: String,
447 pub task_label: String,
448 pub matched_signals: Vec<String>,
449 pub mismatch_reasons: Vec<String>,
450}
451
452#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
453pub struct ReplayCandidateEvidence {
454 pub rank: usize,
455 pub gene_id: String,
456 pub capsule_id: Option<String>,
457 pub match_quality: f32,
458 pub confidence: Option<f32>,
459 pub environment_match_factor: Option<f32>,
460 pub cold_start_penalty: f32,
461 pub final_score: f32,
462}
463
464#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
465pub struct ReplaySelectEvidence {
466 pub exact_match_lookup: bool,
467 pub selected_gene_id: Option<String>,
468 pub selected_capsule_id: Option<String>,
469 pub candidates: Vec<ReplayCandidateEvidence>,
470}
471
472#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
473pub struct ReplayDecision {
474 pub used_capsule: bool,
475 pub capsule_id: Option<CapsuleId>,
476 pub fallback_to_planner: bool,
477 pub reason: String,
478 pub detect_evidence: ReplayDetectEvidence,
479 pub select_evidence: ReplaySelectEvidence,
480 pub economics_evidence: ReplayRoiEvidence,
481}
482
483#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
484pub struct ReplayTaskClassMetrics {
485 pub task_class_id: String,
486 pub task_label: String,
487 pub replay_success_total: u64,
488 pub replay_failure_total: u64,
489 pub reasoning_steps_avoided_total: u64,
490 pub reasoning_avoided_tokens_total: u64,
491 pub replay_fallback_cost_total: u64,
492 pub replay_roi: f64,
493}
494
495#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
496pub struct ReplaySourceRoiMetrics {
497 pub source_sender_id: String,
498 pub replay_success_total: u64,
499 pub replay_failure_total: u64,
500 pub reasoning_avoided_tokens_total: u64,
501 pub replay_fallback_cost_total: u64,
502 pub replay_roi: f64,
503}
504
505#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
506pub struct ReplayRoiWindowSummary {
507 pub generated_at: String,
508 pub window_seconds: u64,
509 pub replay_attempts_total: u64,
510 pub replay_success_total: u64,
511 pub replay_failure_total: u64,
512 pub reasoning_avoided_tokens_total: u64,
513 pub replay_fallback_cost_total: u64,
514 pub replay_roi: f64,
515 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
516 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
517}
518
519#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
520pub struct ReplayRoiReleaseGateThresholds {
521 pub min_replay_attempts: u64,
522 pub min_replay_hit_rate: f64,
523 pub max_false_replay_rate: f64,
524 pub min_reasoning_avoided_tokens: u64,
525 pub min_replay_roi: f64,
526 pub require_replay_safety: bool,
527}
528
529impl Default for ReplayRoiReleaseGateThresholds {
530 fn default() -> Self {
531 Self {
532 min_replay_attempts: 3,
533 min_replay_hit_rate: 0.60,
534 max_false_replay_rate: 0.25,
535 min_reasoning_avoided_tokens: REPLAY_REASONING_TOKEN_FLOOR,
536 min_replay_roi: 0.05,
537 require_replay_safety: true,
538 }
539 }
540}
541
542#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
543#[serde(rename_all = "snake_case")]
544pub enum ReplayRoiReleaseGateAction {
545 BlockRelease,
546}
547
548#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
549pub struct ReplayRoiReleaseGateFailClosedPolicy {
550 pub on_threshold_violation: ReplayRoiReleaseGateAction,
551 pub on_missing_metrics: ReplayRoiReleaseGateAction,
552 pub on_invalid_metrics: ReplayRoiReleaseGateAction,
553}
554
555impl Default for ReplayRoiReleaseGateFailClosedPolicy {
556 fn default() -> Self {
557 Self {
558 on_threshold_violation: ReplayRoiReleaseGateAction::BlockRelease,
559 on_missing_metrics: ReplayRoiReleaseGateAction::BlockRelease,
560 on_invalid_metrics: ReplayRoiReleaseGateAction::BlockRelease,
561 }
562 }
563}
564
565#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
566pub struct ReplayRoiReleaseGateSafetySignal {
567 pub fail_closed_default: bool,
568 pub rollback_ready: bool,
569 pub audit_trail_complete: bool,
570 pub has_replay_activity: bool,
571}
572
573#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
574pub struct ReplayRoiReleaseGateInputContract {
575 pub generated_at: String,
576 pub window_seconds: u64,
577 pub aggregation_dimensions: Vec<String>,
578 pub replay_attempts_total: u64,
579 pub replay_success_total: u64,
580 pub replay_failure_total: u64,
581 pub replay_hit_rate: f64,
582 pub false_replay_rate: f64,
583 pub reasoning_avoided_tokens: u64,
584 pub replay_fallback_cost_total: u64,
585 pub replay_roi: f64,
586 pub replay_safety: bool,
587 pub replay_safety_signal: ReplayRoiReleaseGateSafetySignal,
588 pub thresholds: ReplayRoiReleaseGateThresholds,
589 pub fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy,
590}
591
592#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
593#[serde(rename_all = "snake_case")]
594pub enum ReplayRoiReleaseGateStatus {
595 Pass,
596 FailClosed,
597 Indeterminate,
598}
599
600#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
601pub struct ReplayRoiReleaseGateOutputContract {
602 pub status: ReplayRoiReleaseGateStatus,
603 pub failed_checks: Vec<String>,
604 pub evidence_refs: Vec<String>,
605 pub summary: String,
606}
607
608#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
609pub struct ReplayRoiReleaseGateContract {
610 pub input: ReplayRoiReleaseGateInputContract,
611 pub output: ReplayRoiReleaseGateOutputContract,
612}
613
614#[derive(Clone, Copy, Debug, Eq, PartialEq)]
615enum CoordinationTaskState {
616 Ready,
617 Waiting,
618 BlockedByFailure,
619 PermanentlyBlocked,
620}
621
622#[derive(Clone, Debug, Default)]
623pub struct MultiAgentCoordinator;
624
625impl MultiAgentCoordinator {
626 pub fn new() -> Self {
627 Self
628 }
629
630 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
631 let primitive = plan.primitive.clone();
632 let root_goal = plan.root_goal.clone();
633 let timeout_ms = plan.timeout_ms;
634 let max_retries = plan.max_retries;
635 let mut tasks = BTreeMap::new();
636 for task in plan.tasks {
637 tasks.entry(task.id.clone()).or_insert(task);
638 }
639
640 let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
641 let mut completed = BTreeSet::new();
642 let mut failed = BTreeSet::new();
643 let mut completed_order = Vec::new();
644 let mut failed_order = Vec::new();
645 let mut skipped = BTreeSet::new();
646 let mut attempts = BTreeMap::new();
647 let mut messages = Vec::new();
648
649 loop {
650 if matches!(primitive, CoordinationPrimitive::Conditional) {
651 self.apply_conditional_skips(
652 &tasks,
653 &mut pending,
654 &completed,
655 &failed,
656 &mut skipped,
657 &mut messages,
658 );
659 }
660
661 let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
662 if ready.is_empty() {
663 break;
664 }
665 if matches!(primitive, CoordinationPrimitive::Sequential) {
666 ready.truncate(1);
667 }
668
669 for task_id in ready {
670 let Some(task) = tasks.get(&task_id) else {
671 continue;
672 };
673 if !pending.contains(&task_id) {
674 continue;
675 }
676 self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
677
678 let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
679 if Self::simulate_task_failure(task, prior_failures) {
680 let failure_count = prior_failures + 1;
681 attempts.insert(task_id.clone(), failure_count);
682 let will_retry = failure_count <= max_retries;
683 messages.push(CoordinationMessage {
684 from_role: task.role.clone(),
685 to_role: task.role.clone(),
686 task_id: task_id.clone(),
687 content: if will_retry {
688 format!("task {task_id} failed on attempt {failure_count} and will retry")
689 } else {
690 format!(
691 "task {task_id} failed on attempt {failure_count} and exhausted retries"
692 )
693 },
694 });
695 if !will_retry {
696 pending.remove(&task_id);
697 if failed.insert(task_id.clone()) {
698 failed_order.push(task_id);
699 }
700 }
701 continue;
702 }
703
704 pending.remove(&task_id);
705 if completed.insert(task_id.clone()) {
706 completed_order.push(task_id);
707 }
708 }
709 }
710
711 let blocked_ids = pending.into_iter().collect::<Vec<_>>();
712 for task_id in blocked_ids {
713 let Some(task) = tasks.get(&task_id) else {
714 continue;
715 };
716 let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
717 let content = match state {
718 CoordinationTaskState::BlockedByFailure => {
719 format!("task {task_id} blocked by failed dependencies")
720 }
721 CoordinationTaskState::PermanentlyBlocked => {
722 format!("task {task_id} has invalid coordination prerequisites")
723 }
724 CoordinationTaskState::Waiting => {
725 format!("task {task_id} has unresolved dependencies")
726 }
727 CoordinationTaskState::Ready => {
728 format!("task {task_id} was left pending unexpectedly")
729 }
730 };
731 messages.push(CoordinationMessage {
732 from_role: task.role.clone(),
733 to_role: task.role.clone(),
734 task_id: task_id.clone(),
735 content,
736 });
737 if failed.insert(task_id.clone()) {
738 failed_order.push(task_id);
739 }
740 }
741
742 CoordinationResult {
743 completed_tasks: completed_order,
744 failed_tasks: failed_order,
745 messages,
746 summary: format!(
747 "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
748 root_goal,
749 completed.len(),
750 failed.len(),
751 skipped.len(),
752 primitive,
753 timeout_ms,
754 max_retries
755 ),
756 }
757 }
758
759 fn ready_task_ids(
760 &self,
761 tasks: &BTreeMap<String, CoordinationTask>,
762 pending: &BTreeSet<String>,
763 completed: &BTreeSet<String>,
764 failed: &BTreeSet<String>,
765 skipped: &BTreeSet<String>,
766 ) -> Vec<String> {
767 pending
768 .iter()
769 .filter_map(|task_id| {
770 let task = tasks.get(task_id)?;
771 (self.classify_task(task, tasks, completed, failed, skipped)
772 == CoordinationTaskState::Ready)
773 .then(|| task_id.clone())
774 })
775 .collect()
776 }
777
778 fn apply_conditional_skips(
779 &self,
780 tasks: &BTreeMap<String, CoordinationTask>,
781 pending: &mut BTreeSet<String>,
782 completed: &BTreeSet<String>,
783 failed: &BTreeSet<String>,
784 skipped: &mut BTreeSet<String>,
785 messages: &mut Vec<CoordinationMessage>,
786 ) {
787 let skip_ids = pending
788 .iter()
789 .filter_map(|task_id| {
790 let task = tasks.get(task_id)?;
791 (self.classify_task(task, tasks, completed, failed, skipped)
792 == CoordinationTaskState::BlockedByFailure)
793 .then(|| task_id.clone())
794 })
795 .collect::<Vec<_>>();
796
797 for task_id in skip_ids {
798 let Some(task) = tasks.get(&task_id) else {
799 continue;
800 };
801 pending.remove(&task_id);
802 skipped.insert(task_id.clone());
803 messages.push(CoordinationMessage {
804 from_role: task.role.clone(),
805 to_role: task.role.clone(),
806 task_id: task_id.clone(),
807 content: format!("task {task_id} skipped due to failed dependency chain"),
808 });
809 }
810 }
811
812 fn classify_task(
813 &self,
814 task: &CoordinationTask,
815 tasks: &BTreeMap<String, CoordinationTask>,
816 completed: &BTreeSet<String>,
817 failed: &BTreeSet<String>,
818 skipped: &BTreeSet<String>,
819 ) -> CoordinationTaskState {
820 match task.role {
821 AgentRole::Planner | AgentRole::Coder => {
822 let mut waiting = false;
823 for dependency_id in &task.depends_on {
824 if !tasks.contains_key(dependency_id) {
825 return CoordinationTaskState::PermanentlyBlocked;
826 }
827 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
828 return CoordinationTaskState::BlockedByFailure;
829 }
830 if !completed.contains(dependency_id) {
831 waiting = true;
832 }
833 }
834 if waiting {
835 CoordinationTaskState::Waiting
836 } else {
837 CoordinationTaskState::Ready
838 }
839 }
840 AgentRole::Repair => {
841 let mut waiting = false;
842 let mut has_coder_dependency = false;
843 let mut has_failed_coder = false;
844 for dependency_id in &task.depends_on {
845 let Some(dependency) = tasks.get(dependency_id) else {
846 return CoordinationTaskState::PermanentlyBlocked;
847 };
848 let is_coder = matches!(dependency.role, AgentRole::Coder);
849 if is_coder {
850 has_coder_dependency = true;
851 }
852 if skipped.contains(dependency_id) {
853 return CoordinationTaskState::BlockedByFailure;
854 }
855 if failed.contains(dependency_id) {
856 if is_coder {
857 has_failed_coder = true;
858 } else {
859 return CoordinationTaskState::BlockedByFailure;
860 }
861 continue;
862 }
863 if !completed.contains(dependency_id) {
864 waiting = true;
865 }
866 }
867 if !has_coder_dependency {
868 CoordinationTaskState::PermanentlyBlocked
869 } else if waiting {
870 CoordinationTaskState::Waiting
871 } else if has_failed_coder {
872 CoordinationTaskState::Ready
873 } else {
874 CoordinationTaskState::PermanentlyBlocked
875 }
876 }
877 AgentRole::Optimizer => {
878 let mut waiting = false;
879 let mut has_impl_dependency = false;
880 let mut has_completed_impl = false;
881 let mut has_failed_impl = false;
882 for dependency_id in &task.depends_on {
883 let Some(dependency) = tasks.get(dependency_id) else {
884 return CoordinationTaskState::PermanentlyBlocked;
885 };
886 let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
887 if is_impl {
888 has_impl_dependency = true;
889 }
890 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
891 if is_impl {
892 has_failed_impl = true;
893 continue;
894 }
895 return CoordinationTaskState::BlockedByFailure;
896 }
897 if completed.contains(dependency_id) {
898 if is_impl {
899 has_completed_impl = true;
900 }
901 continue;
902 }
903 waiting = true;
904 }
905 if !has_impl_dependency {
906 CoordinationTaskState::PermanentlyBlocked
907 } else if waiting {
908 CoordinationTaskState::Waiting
909 } else if has_completed_impl {
910 CoordinationTaskState::Ready
911 } else if has_failed_impl {
912 CoordinationTaskState::BlockedByFailure
913 } else {
914 CoordinationTaskState::PermanentlyBlocked
915 }
916 }
917 }
918 }
919
920 fn record_handoff_messages(
921 &self,
922 task: &CoordinationTask,
923 tasks: &BTreeMap<String, CoordinationTask>,
924 completed: &BTreeSet<String>,
925 failed: &BTreeSet<String>,
926 messages: &mut Vec<CoordinationMessage>,
927 ) {
928 let mut dependency_ids = task.depends_on.clone();
929 dependency_ids.sort();
930 dependency_ids.dedup();
931
932 for dependency_id in dependency_ids {
933 let Some(dependency) = tasks.get(&dependency_id) else {
934 continue;
935 };
936 if completed.contains(&dependency_id) {
937 messages.push(CoordinationMessage {
938 from_role: dependency.role.clone(),
939 to_role: task.role.clone(),
940 task_id: task.id.clone(),
941 content: format!("handoff from {dependency_id} to {}", task.id),
942 });
943 } else if failed.contains(&dependency_id) {
944 messages.push(CoordinationMessage {
945 from_role: dependency.role.clone(),
946 to_role: task.role.clone(),
947 task_id: task.id.clone(),
948 content: format!("failed dependency {dependency_id} routed to {}", task.id),
949 });
950 }
951 }
952 }
953
954 fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
955 let normalized = task.description.to_ascii_lowercase();
956 normalized.contains("force-fail")
957 || (normalized.contains("fail-once") && prior_failures == 0)
958 }
959}
960
961#[derive(Debug, Error)]
962pub enum ReplayError {
963 #[error("store error: {0}")]
964 Store(String),
965 #[error("sandbox error: {0}")]
966 Sandbox(String),
967 #[error("validation error: {0}")]
968 Validation(String),
969}
970
971#[async_trait]
972pub trait ReplayExecutor: Send + Sync {
973 async fn try_replay(
974 &self,
975 input: &SelectorInput,
976 policy: &SandboxPolicy,
977 validation: &ValidationPlan,
978 ) -> Result<ReplayDecision, ReplayError>;
979
980 async fn try_replay_for_run(
981 &self,
982 run_id: &RunId,
983 input: &SelectorInput,
984 policy: &SandboxPolicy,
985 validation: &ValidationPlan,
986 ) -> Result<ReplayDecision, ReplayError> {
987 let _ = run_id;
988 self.try_replay(input, policy, validation).await
989 }
990}
991
992pub struct StoreReplayExecutor {
993 pub sandbox: Arc<dyn Sandbox>,
994 pub validator: Arc<dyn Validator>,
995 pub store: Arc<dyn EvolutionStore>,
996 pub selector: Arc<dyn Selector>,
997 pub governor: Arc<dyn Governor>,
998 pub economics: Option<Arc<Mutex<EvuLedger>>>,
999 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
1000 pub stake_policy: StakePolicy,
1001}
1002
1003struct ReplayCandidates {
1004 candidates: Vec<GeneCandidate>,
1005 exact_match: bool,
1006}
1007
1008#[async_trait]
1009impl ReplayExecutor for StoreReplayExecutor {
1010 async fn try_replay(
1011 &self,
1012 input: &SelectorInput,
1013 policy: &SandboxPolicy,
1014 validation: &ValidationPlan,
1015 ) -> Result<ReplayDecision, ReplayError> {
1016 self.try_replay_inner(None, input, policy, validation).await
1017 }
1018
1019 async fn try_replay_for_run(
1020 &self,
1021 run_id: &RunId,
1022 input: &SelectorInput,
1023 policy: &SandboxPolicy,
1024 validation: &ValidationPlan,
1025 ) -> Result<ReplayDecision, ReplayError> {
1026 self.try_replay_inner(Some(run_id), input, policy, validation)
1027 .await
1028 }
1029}
1030
1031impl StoreReplayExecutor {
1032 fn collect_replay_candidates(&self, input: &SelectorInput) -> ReplayCandidates {
1033 self.apply_confidence_revalidation();
1034 let mut selector_input = input.clone();
1035 if self.economics.is_some() && self.remote_publishers.is_some() {
1036 selector_input.limit = selector_input.limit.max(4);
1037 }
1038 let mut candidates = self.selector.select(&selector_input);
1039 self.rerank_with_reputation_bias(&mut candidates);
1040 let mut exact_match = false;
1041 if candidates.is_empty() {
1042 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
1043 self.rerank_with_reputation_bias(&mut exact_candidates);
1044 if !exact_candidates.is_empty() {
1045 candidates = exact_candidates;
1046 exact_match = true;
1047 }
1048 }
1049 if candidates.is_empty() {
1050 let mut remote_candidates =
1051 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
1052 self.rerank_with_reputation_bias(&mut remote_candidates);
1053 if !remote_candidates.is_empty() {
1054 candidates = remote_candidates;
1055 exact_match = true;
1056 }
1057 }
1058 candidates.truncate(input.limit.max(1));
1059 ReplayCandidates {
1060 candidates,
1061 exact_match,
1062 }
1063 }
1064
1065 fn build_select_evidence(
1066 &self,
1067 input: &SelectorInput,
1068 candidates: &[GeneCandidate],
1069 exact_match: bool,
1070 ) -> ReplaySelectEvidence {
1071 let cold_start_penalty = if exact_match {
1072 COLD_START_LOOKUP_PENALTY
1073 } else {
1074 0.0
1075 };
1076 let candidate_rows = candidates
1077 .iter()
1078 .enumerate()
1079 .map(|(idx, candidate)| {
1080 let top_capsule = candidate.capsules.first();
1081 let environment_match_factor = top_capsule
1082 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env));
1083 let final_score = candidate.score * (1.0 - cold_start_penalty);
1084 ReplayCandidateEvidence {
1085 rank: idx + 1,
1086 gene_id: candidate.gene.id.clone(),
1087 capsule_id: top_capsule.map(|capsule| capsule.id.clone()),
1088 match_quality: candidate.score,
1089 confidence: top_capsule.map(|capsule| capsule.confidence),
1090 environment_match_factor,
1091 cold_start_penalty,
1092 final_score,
1093 }
1094 })
1095 .collect::<Vec<_>>();
1096
1097 ReplaySelectEvidence {
1098 exact_match_lookup: exact_match,
1099 selected_gene_id: candidate_rows
1100 .first()
1101 .map(|candidate| candidate.gene_id.clone()),
1102 selected_capsule_id: candidate_rows
1103 .first()
1104 .and_then(|candidate| candidate.capsule_id.clone()),
1105 candidates: candidate_rows,
1106 }
1107 }
1108
1109 fn apply_confidence_revalidation(&self) {
1110 let Ok(projection) = projection_snapshot(self.store.as_ref()) else {
1111 return;
1112 };
1113 for target in stale_replay_revalidation_targets(&projection, Utc::now()) {
1114 let reason = format!(
1115 "confidence decayed to {:.3}; revalidation required before replay",
1116 target.decayed_confidence
1117 );
1118 let confidence_decay_ratio = if target.peak_confidence > 0.0 {
1119 (target.decayed_confidence / target.peak_confidence).clamp(0.0, 1.0)
1120 } else {
1121 0.0
1122 };
1123 if self
1124 .store
1125 .append_event(EvolutionEvent::PromotionEvaluated {
1126 gene_id: target.gene_id.clone(),
1127 state: AssetState::Quarantined,
1128 reason: reason.clone(),
1129 reason_code: TransitionReasonCode::RevalidationConfidenceDecay,
1130 evidence: Some(TransitionEvidence {
1131 replay_attempts: None,
1132 replay_successes: None,
1133 replay_success_rate: None,
1134 environment_match_factor: None,
1135 decayed_confidence: Some(target.decayed_confidence),
1136 confidence_decay_ratio: Some(confidence_decay_ratio),
1137 summary: Some(format!(
1138 "phase=confidence_revalidation; decayed_confidence={:.3}; confidence_decay_ratio={:.3}",
1139 target.decayed_confidence, confidence_decay_ratio
1140 )),
1141 }),
1142 })
1143 .is_err()
1144 {
1145 continue;
1146 }
1147 for capsule_id in target.capsule_ids {
1148 if self
1149 .store
1150 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
1151 .is_err()
1152 {
1153 break;
1154 }
1155 }
1156 }
1157 }
1158
1159 fn build_replay_economics_evidence(
1160 &self,
1161 input: &SelectorInput,
1162 candidate: Option<&GeneCandidate>,
1163 source_sender_id: Option<&str>,
1164 success: bool,
1165 reason_code: ReplayRoiReasonCode,
1166 reason: &str,
1167 ) -> ReplayRoiEvidence {
1168 let (task_class_id, task_label) =
1169 replay_descriptor_from_candidate_or_input(candidate, input);
1170 let signal_source = candidate
1171 .map(|best| best.gene.signals.as_slice())
1172 .unwrap_or(input.signals.as_slice());
1173 let baseline_tokens = estimated_reasoning_tokens(signal_source);
1174 let reasoning_avoided_tokens = if success { baseline_tokens } else { 0 };
1175 let replay_fallback_cost = if success { 0 } else { baseline_tokens };
1176 let asset_origin =
1177 candidate.and_then(|best| strategy_metadata_value(&best.gene.strategy, "asset_origin"));
1178 let mut context_dimensions = vec![
1179 format!(
1180 "outcome={}",
1181 if success {
1182 "replay_hit"
1183 } else {
1184 "planner_fallback"
1185 }
1186 ),
1187 format!("reason={reason}"),
1188 format!("task_class_id={task_class_id}"),
1189 format!("task_label={task_label}"),
1190 ];
1191 if let Some(asset_origin) = asset_origin.as_deref() {
1192 context_dimensions.push(format!("asset_origin={asset_origin}"));
1193 }
1194 if let Some(source_sender_id) = source_sender_id {
1195 context_dimensions.push(format!("source_sender_id={source_sender_id}"));
1196 }
1197 ReplayRoiEvidence {
1198 success,
1199 reason_code,
1200 task_class_id,
1201 task_label,
1202 reasoning_avoided_tokens,
1203 replay_fallback_cost,
1204 replay_roi: compute_replay_roi(reasoning_avoided_tokens, replay_fallback_cost),
1205 asset_origin,
1206 source_sender_id: source_sender_id.map(ToOwned::to_owned),
1207 context_dimensions,
1208 }
1209 }
1210
1211 fn record_replay_economics(
1212 &self,
1213 replay_run_id: Option<&RunId>,
1214 candidate: Option<&GeneCandidate>,
1215 capsule_id: Option<&str>,
1216 evidence: ReplayRoiEvidence,
1217 ) -> Result<(), ReplayError> {
1218 self.store
1219 .append_event(EvolutionEvent::ReplayEconomicsRecorded {
1220 gene_id: candidate.map(|best| best.gene.id.clone()),
1221 capsule_id: capsule_id.map(ToOwned::to_owned),
1222 replay_run_id: replay_run_id.cloned(),
1223 evidence,
1224 })
1225 .map_err(|err| ReplayError::Store(err.to_string()))?;
1226 Ok(())
1227 }
1228
1229 async fn try_replay_inner(
1230 &self,
1231 replay_run_id: Option<&RunId>,
1232 input: &SelectorInput,
1233 policy: &SandboxPolicy,
1234 validation: &ValidationPlan,
1235 ) -> Result<ReplayDecision, ReplayError> {
1236 let ReplayCandidates {
1237 candidates,
1238 exact_match,
1239 } = self.collect_replay_candidates(input);
1240 let mut detect_evidence = replay_detect_evidence_from_input(input);
1241 let select_evidence = self.build_select_evidence(input, &candidates, exact_match);
1242 let Some(best) = candidates.into_iter().next() else {
1243 detect_evidence
1244 .mismatch_reasons
1245 .push("no_candidate_after_select".to_string());
1246 let economics_evidence = self.build_replay_economics_evidence(
1247 input,
1248 None,
1249 None,
1250 false,
1251 ReplayRoiReasonCode::ReplayMissNoMatchingGene,
1252 "no matching gene",
1253 );
1254 self.record_replay_economics(replay_run_id, None, None, economics_evidence.clone())?;
1255 return Ok(ReplayDecision {
1256 used_capsule: false,
1257 capsule_id: None,
1258 fallback_to_planner: true,
1259 reason: "no matching gene".into(),
1260 detect_evidence,
1261 select_evidence,
1262 economics_evidence,
1263 });
1264 };
1265 let (detected_task_class_id, detected_task_label) =
1266 replay_descriptor_from_candidate_or_input(Some(&best), input);
1267 detect_evidence.task_class_id = detected_task_class_id;
1268 detect_evidence.task_label = detected_task_label;
1269 detect_evidence.matched_signals =
1270 matched_replay_signals(&input.signals, &best.gene.signals);
1271 if !exact_match && best.score < 0.82 {
1272 detect_evidence
1273 .mismatch_reasons
1274 .push("score_below_threshold".to_string());
1275 let reason = format!("best gene score {:.3} below replay threshold", best.score);
1276 let economics_evidence = self.build_replay_economics_evidence(
1277 input,
1278 Some(&best),
1279 None,
1280 false,
1281 ReplayRoiReasonCode::ReplayMissScoreBelowThreshold,
1282 &reason,
1283 );
1284 self.record_replay_economics(
1285 replay_run_id,
1286 Some(&best),
1287 None,
1288 economics_evidence.clone(),
1289 )?;
1290 return Ok(ReplayDecision {
1291 used_capsule: false,
1292 capsule_id: None,
1293 fallback_to_planner: true,
1294 reason,
1295 detect_evidence,
1296 select_evidence,
1297 economics_evidence,
1298 });
1299 }
1300
1301 let Some(capsule) = best.capsules.first().cloned() else {
1302 detect_evidence
1303 .mismatch_reasons
1304 .push("candidate_has_no_capsule".to_string());
1305 let economics_evidence = self.build_replay_economics_evidence(
1306 input,
1307 Some(&best),
1308 None,
1309 false,
1310 ReplayRoiReasonCode::ReplayMissCandidateHasNoCapsule,
1311 "candidate gene has no capsule",
1312 );
1313 self.record_replay_economics(
1314 replay_run_id,
1315 Some(&best),
1316 None,
1317 economics_evidence.clone(),
1318 )?;
1319 return Ok(ReplayDecision {
1320 used_capsule: false,
1321 capsule_id: None,
1322 fallback_to_planner: true,
1323 reason: "candidate gene has no capsule".into(),
1324 detect_evidence,
1325 select_evidence,
1326 economics_evidence,
1327 });
1328 };
1329 let remote_publisher = self.publisher_for_capsule(&capsule.id);
1330
1331 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
1332 .map_err(|err| ReplayError::Store(err.to_string()))?
1333 else {
1334 detect_evidence
1335 .mismatch_reasons
1336 .push("mutation_payload_missing".to_string());
1337 let economics_evidence = self.build_replay_economics_evidence(
1338 input,
1339 Some(&best),
1340 remote_publisher.as_deref(),
1341 false,
1342 ReplayRoiReasonCode::ReplayMissMutationPayloadMissing,
1343 "mutation payload missing from store",
1344 );
1345 self.record_replay_economics(
1346 replay_run_id,
1347 Some(&best),
1348 Some(&capsule.id),
1349 economics_evidence.clone(),
1350 )?;
1351 return Ok(ReplayDecision {
1352 used_capsule: false,
1353 capsule_id: None,
1354 fallback_to_planner: true,
1355 reason: "mutation payload missing from store".into(),
1356 detect_evidence,
1357 select_evidence,
1358 economics_evidence,
1359 });
1360 };
1361
1362 let receipt = match self.sandbox.apply(&mutation, policy).await {
1363 Ok(receipt) => receipt,
1364 Err(err) => {
1365 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1366 let reason = format!("replay patch apply failed: {err}");
1367 let economics_evidence = self.build_replay_economics_evidence(
1368 input,
1369 Some(&best),
1370 remote_publisher.as_deref(),
1371 false,
1372 ReplayRoiReasonCode::ReplayMissPatchApplyFailed,
1373 &reason,
1374 );
1375 self.record_replay_economics(
1376 replay_run_id,
1377 Some(&best),
1378 Some(&capsule.id),
1379 economics_evidence.clone(),
1380 )?;
1381 detect_evidence
1382 .mismatch_reasons
1383 .push("patch_apply_failed".to_string());
1384 return Ok(ReplayDecision {
1385 used_capsule: false,
1386 capsule_id: Some(capsule.id.clone()),
1387 fallback_to_planner: true,
1388 reason,
1389 detect_evidence,
1390 select_evidence,
1391 economics_evidence,
1392 });
1393 }
1394 };
1395
1396 let report = self
1397 .validator
1398 .run(&receipt, validation)
1399 .await
1400 .map_err(|err| ReplayError::Validation(err.to_string()))?;
1401 if !report.success {
1402 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
1403 self.record_reuse_settlement(remote_publisher.as_deref(), false);
1404 let economics_evidence = self.build_replay_economics_evidence(
1405 input,
1406 Some(&best),
1407 remote_publisher.as_deref(),
1408 false,
1409 ReplayRoiReasonCode::ReplayMissValidationFailed,
1410 "replay validation failed",
1411 );
1412 self.record_replay_economics(
1413 replay_run_id,
1414 Some(&best),
1415 Some(&capsule.id),
1416 economics_evidence.clone(),
1417 )?;
1418 detect_evidence
1419 .mismatch_reasons
1420 .push("validation_failed".to_string());
1421 return Ok(ReplayDecision {
1422 used_capsule: false,
1423 capsule_id: Some(capsule.id.clone()),
1424 fallback_to_planner: true,
1425 reason: "replay validation failed".into(),
1426 detect_evidence,
1427 select_evidence,
1428 economics_evidence,
1429 });
1430 }
1431
1432 let requires_shadow_progression = remote_publisher.is_some()
1433 && matches!(
1434 capsule.state,
1435 AssetState::Quarantined | AssetState::ShadowValidated
1436 );
1437 if requires_shadow_progression {
1438 self.store
1439 .append_event(EvolutionEvent::ValidationPassed {
1440 mutation_id: capsule.mutation_id.clone(),
1441 report: report.to_snapshot(&validation.profile),
1442 gene_id: Some(best.gene.id.clone()),
1443 })
1444 .map_err(|err| ReplayError::Store(err.to_string()))?;
1445 let evidence = self.shadow_transition_evidence(&best.gene.id, &capsule, &input.env)?;
1446 let (target_state, reason_code, reason, promote_now, phase) =
1447 if matches!(best.gene.state, AssetState::Quarantined) {
1448 (
1449 AssetState::ShadowValidated,
1450 TransitionReasonCode::PromotionShadowValidationPassed,
1451 "remote asset passed first local replay and entered shadow validation"
1452 .into(),
1453 false,
1454 "quarantine_to_shadow",
1455 )
1456 } else if shadow_promotion_gate_passed(&evidence) {
1457 (
1458 AssetState::Promoted,
1459 TransitionReasonCode::PromotionRemoteReplayValidated,
1460 "shadow validation thresholds satisfied; remote asset promoted".into(),
1461 true,
1462 "shadow_to_promoted",
1463 )
1464 } else {
1465 (
1466 AssetState::ShadowValidated,
1467 TransitionReasonCode::ShadowCollectingReplayEvidence,
1468 "shadow validation collecting additional replay evidence".into(),
1469 false,
1470 "shadow_hold",
1471 )
1472 };
1473 self.store
1474 .append_event(EvolutionEvent::PromotionEvaluated {
1475 gene_id: best.gene.id.clone(),
1476 state: target_state.clone(),
1477 reason,
1478 reason_code,
1479 evidence: Some(evidence.to_transition_evidence(shadow_evidence_summary(
1480 &evidence,
1481 promote_now,
1482 phase,
1483 ))),
1484 })
1485 .map_err(|err| ReplayError::Store(err.to_string()))?;
1486 if promote_now {
1487 self.store
1488 .append_event(EvolutionEvent::GenePromoted {
1489 gene_id: best.gene.id.clone(),
1490 })
1491 .map_err(|err| ReplayError::Store(err.to_string()))?;
1492 }
1493 self.store
1494 .append_event(EvolutionEvent::CapsuleReleased {
1495 capsule_id: capsule.id.clone(),
1496 state: target_state,
1497 })
1498 .map_err(|err| ReplayError::Store(err.to_string()))?;
1499 }
1500
1501 self.store
1502 .append_event(EvolutionEvent::CapsuleReused {
1503 capsule_id: capsule.id.clone(),
1504 gene_id: capsule.gene_id.clone(),
1505 run_id: capsule.run_id.clone(),
1506 replay_run_id: replay_run_id.cloned(),
1507 })
1508 .map_err(|err| ReplayError::Store(err.to_string()))?;
1509 self.record_reuse_settlement(remote_publisher.as_deref(), true);
1510 let reason = if exact_match {
1511 "replayed via cold-start lookup".to_string()
1512 } else {
1513 "replayed via selector".to_string()
1514 };
1515 let economics_evidence = self.build_replay_economics_evidence(
1516 input,
1517 Some(&best),
1518 remote_publisher.as_deref(),
1519 true,
1520 ReplayRoiReasonCode::ReplayHit,
1521 &reason,
1522 );
1523 self.record_replay_economics(
1524 replay_run_id,
1525 Some(&best),
1526 Some(&capsule.id),
1527 economics_evidence.clone(),
1528 )?;
1529
1530 Ok(ReplayDecision {
1531 used_capsule: true,
1532 capsule_id: Some(capsule.id),
1533 fallback_to_planner: false,
1534 reason,
1535 detect_evidence,
1536 select_evidence,
1537 economics_evidence,
1538 })
1539 }
1540
1541 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
1542 let Some(ledger) = self.economics.as_ref() else {
1543 return;
1544 };
1545 let reputation_bias = ledger
1546 .lock()
1547 .ok()
1548 .map(|locked| locked.selector_reputation_bias())
1549 .unwrap_or_default();
1550 if reputation_bias.is_empty() {
1551 return;
1552 }
1553 let required_assets = candidates
1554 .iter()
1555 .filter_map(|candidate| {
1556 candidate
1557 .capsules
1558 .first()
1559 .map(|capsule| capsule.id.as_str())
1560 })
1561 .collect::<Vec<_>>();
1562 let publisher_map = self.remote_publishers_snapshot(&required_assets);
1563 if publisher_map.is_empty() {
1564 return;
1565 }
1566 candidates.sort_by(|left, right| {
1567 effective_candidate_score(right, &publisher_map, &reputation_bias)
1568 .partial_cmp(&effective_candidate_score(
1569 left,
1570 &publisher_map,
1571 &reputation_bias,
1572 ))
1573 .unwrap_or(std::cmp::Ordering::Equal)
1574 .then_with(|| left.gene.id.cmp(&right.gene.id))
1575 });
1576 }
1577
1578 fn publisher_for_capsule(&self, capsule_id: &str) -> Option<String> {
1579 self.remote_publishers_snapshot(&[capsule_id])
1580 .get(capsule_id)
1581 .cloned()
1582 }
1583
1584 fn remote_publishers_snapshot(&self, required_assets: &[&str]) -> BTreeMap<String, String> {
1585 let cached = self
1586 .remote_publishers
1587 .as_ref()
1588 .and_then(|remote_publishers| {
1589 remote_publishers.lock().ok().map(|locked| locked.clone())
1590 })
1591 .unwrap_or_default();
1592 if !cached.is_empty()
1593 && required_assets
1594 .iter()
1595 .all(|asset_id| cached.contains_key(*asset_id))
1596 {
1597 return cached;
1598 }
1599
1600 let persisted = remote_publishers_by_asset_from_store(self.store.as_ref());
1601 if persisted.is_empty() {
1602 return cached;
1603 }
1604
1605 let mut merged = cached;
1606 for (asset_id, sender_id) in persisted {
1607 merged.entry(asset_id).or_insert(sender_id);
1608 }
1609
1610 if let Some(remote_publishers) = self.remote_publishers.as_ref() {
1611 if let Ok(mut locked) = remote_publishers.lock() {
1612 for (asset_id, sender_id) in &merged {
1613 locked.entry(asset_id.clone()).or_insert(sender_id.clone());
1614 }
1615 }
1616 }
1617
1618 merged
1619 }
1620
1621 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
1622 let Some(publisher_id) = publisher_id else {
1623 return;
1624 };
1625 let Some(ledger) = self.economics.as_ref() else {
1626 return;
1627 };
1628 if let Ok(mut locked) = ledger.lock() {
1629 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
1630 }
1631 }
1632
1633 fn record_replay_validation_failure(
1634 &self,
1635 best: &GeneCandidate,
1636 capsule: &Capsule,
1637 validation: &ValidationPlan,
1638 report: &ValidationReport,
1639 ) -> Result<(), ReplayError> {
1640 let projection = projection_snapshot(self.store.as_ref())
1641 .map_err(|err| ReplayError::Store(err.to_string()))?;
1642 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1643 Self::confidence_context(&projection, &best.gene.id);
1644
1645 self.store
1646 .append_event(EvolutionEvent::ValidationFailed {
1647 mutation_id: capsule.mutation_id.clone(),
1648 report: report.to_snapshot(&validation.profile),
1649 gene_id: Some(best.gene.id.clone()),
1650 })
1651 .map_err(|err| ReplayError::Store(err.to_string()))?;
1652
1653 let replay_failures = self.replay_failure_count(&best.gene.id)?;
1654 let governor_decision = self.governor.evaluate(GovernorInput {
1655 candidate_source: if self.publisher_for_capsule(&capsule.id).is_some() {
1656 CandidateSource::Remote
1657 } else {
1658 CandidateSource::Local
1659 },
1660 success_count: 0,
1661 blast_radius: BlastRadius {
1662 files_changed: capsule.outcome.changed_files.len(),
1663 lines_changed: capsule.outcome.lines_changed,
1664 },
1665 replay_failures,
1666 recent_mutation_ages_secs: Vec::new(),
1667 current_confidence,
1668 historical_peak_confidence,
1669 confidence_last_updated_secs,
1670 });
1671
1672 if matches!(governor_decision.target_state, AssetState::Revoked) {
1673 self.store
1674 .append_event(EvolutionEvent::PromotionEvaluated {
1675 gene_id: best.gene.id.clone(),
1676 state: AssetState::Revoked,
1677 reason: governor_decision.reason.clone(),
1678 reason_code: governor_decision.reason_code.clone(),
1679 evidence: Some(TransitionEvidence {
1680 replay_attempts: Some(replay_failures),
1681 replay_successes: None,
1682 replay_success_rate: None,
1683 environment_match_factor: None,
1684 decayed_confidence: Some(current_confidence),
1685 confidence_decay_ratio: if historical_peak_confidence > 0.0 {
1686 Some((current_confidence / historical_peak_confidence).clamp(0.0, 1.0))
1687 } else {
1688 None
1689 },
1690 summary: Some(format!(
1691 "phase=replay_failure_revocation; replay_failures={replay_failures}; current_confidence={:.3}; historical_peak_confidence={:.3}",
1692 current_confidence, historical_peak_confidence
1693 )),
1694 }),
1695 })
1696 .map_err(|err| ReplayError::Store(err.to_string()))?;
1697 self.store
1698 .append_event(EvolutionEvent::GeneRevoked {
1699 gene_id: best.gene.id.clone(),
1700 reason: governor_decision.reason,
1701 })
1702 .map_err(|err| ReplayError::Store(err.to_string()))?;
1703 for related in &best.capsules {
1704 self.store
1705 .append_event(EvolutionEvent::CapsuleQuarantined {
1706 capsule_id: related.id.clone(),
1707 })
1708 .map_err(|err| ReplayError::Store(err.to_string()))?;
1709 }
1710 }
1711
1712 Ok(())
1713 }
1714
1715 fn confidence_context(
1716 projection: &EvolutionProjection,
1717 gene_id: &str,
1718 ) -> (f32, f32, Option<u64>) {
1719 let peak_confidence = projection
1720 .capsules
1721 .iter()
1722 .filter(|capsule| capsule.gene_id == gene_id)
1723 .map(|capsule| capsule.confidence)
1724 .fold(0.0_f32, f32::max);
1725 let age_secs = projection
1726 .last_updated_at
1727 .get(gene_id)
1728 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1729 (peak_confidence, peak_confidence, age_secs)
1730 }
1731
1732 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1733 let parsed = DateTime::parse_from_rfc3339(timestamp)
1734 .ok()?
1735 .with_timezone(&Utc);
1736 let elapsed = now.signed_duration_since(parsed);
1737 if elapsed < Duration::zero() {
1738 Some(0)
1739 } else {
1740 u64::try_from(elapsed.num_seconds()).ok()
1741 }
1742 }
1743
1744 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1745 Ok(self
1746 .store
1747 .scan(1)
1748 .map_err(|err| ReplayError::Store(err.to_string()))?
1749 .into_iter()
1750 .filter(|stored| {
1751 matches!(
1752 &stored.event,
1753 EvolutionEvent::ValidationFailed {
1754 gene_id: Some(current_gene_id),
1755 ..
1756 } if current_gene_id == gene_id
1757 )
1758 })
1759 .count() as u64)
1760 }
1761
1762 fn shadow_transition_evidence(
1763 &self,
1764 gene_id: &str,
1765 capsule: &Capsule,
1766 input_env: &EnvFingerprint,
1767 ) -> Result<ShadowTransitionEvidence, ReplayError> {
1768 let events = self
1769 .store
1770 .scan(1)
1771 .map_err(|err| ReplayError::Store(err.to_string()))?;
1772 let (replay_attempts, replay_successes) = events.iter().fold(
1773 (0_u64, 0_u64),
1774 |(attempts, successes), stored| match &stored.event {
1775 EvolutionEvent::ValidationPassed {
1776 gene_id: Some(current_gene_id),
1777 ..
1778 } if current_gene_id == gene_id => (attempts + 1, successes + 1),
1779 EvolutionEvent::ValidationFailed {
1780 gene_id: Some(current_gene_id),
1781 ..
1782 } if current_gene_id == gene_id => (attempts + 1, successes),
1783 _ => (attempts, successes),
1784 },
1785 );
1786 let replay_success_rate = safe_ratio(replay_successes, replay_attempts) as f32;
1787 let environment_match_factor = replay_environment_match_factor(input_env, &capsule.env);
1788 let projection = projection_snapshot(self.store.as_ref())
1789 .map_err(|err| ReplayError::Store(err.to_string()))?;
1790 let age_secs = projection
1791 .last_updated_at
1792 .get(gene_id)
1793 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1794 let decayed_confidence = decayed_replay_confidence(capsule.confidence, age_secs);
1795 let confidence_decay_ratio = if capsule.confidence > 0.0 {
1796 (decayed_confidence / capsule.confidence).clamp(0.0, 1.0)
1797 } else {
1798 0.0
1799 };
1800
1801 Ok(ShadowTransitionEvidence {
1802 replay_attempts,
1803 replay_successes,
1804 replay_success_rate,
1805 environment_match_factor,
1806 decayed_confidence,
1807 confidence_decay_ratio,
1808 })
1809 }
1810}
1811
1812#[derive(Clone, Debug)]
1813struct ShadowTransitionEvidence {
1814 replay_attempts: u64,
1815 replay_successes: u64,
1816 replay_success_rate: f32,
1817 environment_match_factor: f32,
1818 decayed_confidence: f32,
1819 confidence_decay_ratio: f32,
1820}
1821
1822impl ShadowTransitionEvidence {
1823 fn to_transition_evidence(&self, summary: String) -> TransitionEvidence {
1824 TransitionEvidence {
1825 replay_attempts: Some(self.replay_attempts),
1826 replay_successes: Some(self.replay_successes),
1827 replay_success_rate: Some(self.replay_success_rate),
1828 environment_match_factor: Some(self.environment_match_factor),
1829 decayed_confidence: Some(self.decayed_confidence),
1830 confidence_decay_ratio: Some(self.confidence_decay_ratio),
1831 summary: Some(summary),
1832 }
1833 }
1834}
1835
1836fn shadow_promotion_gate_passed(evidence: &ShadowTransitionEvidence) -> bool {
1837 evidence.replay_attempts >= SHADOW_PROMOTION_MIN_REPLAY_ATTEMPTS
1838 && evidence.replay_success_rate >= SHADOW_PROMOTION_MIN_SUCCESS_RATE
1839 && evidence.environment_match_factor >= SHADOW_PROMOTION_MIN_ENV_MATCH
1840 && evidence.decayed_confidence >= SHADOW_PROMOTION_MIN_DECAYED_CONFIDENCE
1841}
1842
1843fn shadow_evidence_summary(
1844 evidence: &ShadowTransitionEvidence,
1845 promoted: bool,
1846 phase: &str,
1847) -> String {
1848 format!(
1849 "phase={phase}; replay_attempts={}; replay_successes={}; replay_success_rate={:.3}; environment_match_factor={:.3}; decayed_confidence={:.3}; confidence_decay_ratio={:.3}; promote={promoted}",
1850 evidence.replay_attempts,
1851 evidence.replay_successes,
1852 evidence.replay_success_rate,
1853 evidence.environment_match_factor,
1854 evidence.decayed_confidence,
1855 evidence.confidence_decay_ratio,
1856 )
1857}
1858
1859#[derive(Clone, Debug, PartialEq)]
1860struct ConfidenceRevalidationTarget {
1861 gene_id: String,
1862 capsule_ids: Vec<String>,
1863 peak_confidence: f32,
1864 decayed_confidence: f32,
1865}
1866
1867fn stale_replay_revalidation_targets(
1868 projection: &EvolutionProjection,
1869 now: DateTime<Utc>,
1870) -> Vec<ConfidenceRevalidationTarget> {
1871 projection
1872 .genes
1873 .iter()
1874 .filter(|gene| gene.state == AssetState::Promoted)
1875 .filter_map(|gene| {
1876 let promoted_capsules = projection
1877 .capsules
1878 .iter()
1879 .filter(|capsule| {
1880 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1881 })
1882 .collect::<Vec<_>>();
1883 if promoted_capsules.is_empty() {
1884 return None;
1885 }
1886 let age_secs = projection
1887 .last_updated_at
1888 .get(&gene.id)
1889 .and_then(|timestamp| seconds_since_timestamp_for_confidence(timestamp, now));
1890 let decayed_confidence = promoted_capsules
1891 .iter()
1892 .map(|capsule| decayed_replay_confidence(capsule.confidence, age_secs))
1893 .fold(0.0_f32, f32::max);
1894 if decayed_confidence >= MIN_REPLAY_CONFIDENCE {
1895 return None;
1896 }
1897 let peak_confidence = promoted_capsules
1898 .iter()
1899 .map(|capsule| capsule.confidence)
1900 .fold(0.0_f32, f32::max);
1901 Some(ConfidenceRevalidationTarget {
1902 gene_id: gene.id.clone(),
1903 capsule_ids: promoted_capsules
1904 .into_iter()
1905 .map(|capsule| capsule.id.clone())
1906 .collect(),
1907 peak_confidence,
1908 decayed_confidence,
1909 })
1910 })
1911 .collect()
1912}
1913
1914fn seconds_since_timestamp_for_confidence(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1915 let parsed = DateTime::parse_from_rfc3339(timestamp)
1916 .ok()?
1917 .with_timezone(&Utc);
1918 let elapsed = now.signed_duration_since(parsed);
1919 if elapsed < Duration::zero() {
1920 Some(0)
1921 } else {
1922 u64::try_from(elapsed.num_seconds()).ok()
1923 }
1924}
1925
1926#[derive(Debug, Error)]
1927pub enum EvoKernelError {
1928 #[error("sandbox error: {0}")]
1929 Sandbox(String),
1930 #[error("validation error: {0}")]
1931 Validation(String),
1932 #[error("validation failed")]
1933 ValidationFailed(ValidationReport),
1934 #[error("store error: {0}")]
1935 Store(String),
1936}
1937
1938#[derive(Clone, Debug)]
1939pub struct CaptureOutcome {
1940 pub capsule: Capsule,
1941 pub gene: Gene,
1942 pub governor_decision: GovernorDecision,
1943}
1944
1945#[derive(Clone, Debug, Serialize, Deserialize)]
1946pub struct ImportOutcome {
1947 pub imported_asset_ids: Vec<String>,
1948 pub accepted: bool,
1949 #[serde(default, skip_serializing_if = "Option::is_none")]
1950 pub next_cursor: Option<String>,
1951 #[serde(default, skip_serializing_if = "Option::is_none")]
1952 pub resume_token: Option<String>,
1953 #[serde(default)]
1954 pub sync_audit: SyncAudit,
1955}
1956
1957#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1958pub struct EvolutionMetricsSnapshot {
1959 pub replay_attempts_total: u64,
1960 pub replay_success_total: u64,
1961 pub replay_success_rate: f64,
1962 pub confidence_revalidations_total: u64,
1963 pub replay_reasoning_avoided_total: u64,
1964 pub reasoning_avoided_tokens_total: u64,
1965 pub replay_fallback_cost_total: u64,
1966 pub replay_roi: f64,
1967 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
1968 pub replay_sources: Vec<ReplaySourceRoiMetrics>,
1969 pub mutation_declared_total: u64,
1970 pub promoted_mutations_total: u64,
1971 pub promotion_ratio: f64,
1972 pub gene_revocations_total: u64,
1973 pub mutation_velocity_last_hour: u64,
1974 pub revoke_frequency_last_hour: u64,
1975 pub promoted_genes: u64,
1976 pub promoted_capsules: u64,
1977 pub last_event_seq: u64,
1978}
1979
1980#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1981pub struct EvolutionHealthSnapshot {
1982 pub status: String,
1983 pub last_event_seq: u64,
1984 pub promoted_genes: u64,
1985 pub promoted_capsules: u64,
1986}
1987
1988#[derive(Clone)]
1989pub struct EvolutionNetworkNode {
1990 pub store: Arc<dyn EvolutionStore>,
1991}
1992
1993impl EvolutionNetworkNode {
1994 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1995 Self { store }
1996 }
1997
1998 pub fn with_default_store() -> Self {
1999 Self {
2000 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
2001 }
2002 }
2003
2004 pub fn accept_publish_request(
2005 &self,
2006 request: &PublishRequest,
2007 ) -> Result<ImportOutcome, EvoKernelError> {
2008 let requested_cursor = resolve_requested_cursor(
2009 &request.sender_id,
2010 request.since_cursor.as_deref(),
2011 request.resume_token.as_deref(),
2012 )?;
2013 import_remote_envelope_into_store(
2014 self.store.as_ref(),
2015 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
2016 None,
2017 requested_cursor,
2018 )
2019 }
2020
2021 pub fn ensure_builtin_experience_assets(
2022 &self,
2023 sender_id: impl Into<String>,
2024 ) -> Result<ImportOutcome, EvoKernelError> {
2025 ensure_builtin_experience_assets_in_store(self.store.as_ref(), sender_id.into())
2026 }
2027
2028 pub fn record_reported_experience(
2029 &self,
2030 sender_id: impl Into<String>,
2031 gene_id: impl Into<String>,
2032 signals: Vec<String>,
2033 strategy: Vec<String>,
2034 validation: Vec<String>,
2035 ) -> Result<ImportOutcome, EvoKernelError> {
2036 record_reported_experience_in_store(
2037 self.store.as_ref(),
2038 sender_id.into(),
2039 gene_id.into(),
2040 signals,
2041 strategy,
2042 validation,
2043 )
2044 }
2045
2046 pub fn publish_local_assets(
2047 &self,
2048 sender_id: impl Into<String>,
2049 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2050 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
2051 }
2052
2053 pub fn fetch_assets(
2054 &self,
2055 responder_id: impl Into<String>,
2056 query: &FetchQuery,
2057 ) -> Result<FetchResponse, EvoKernelError> {
2058 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2059 }
2060
2061 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2062 revoke_assets_in_store(self.store.as_ref(), notice)
2063 }
2064
2065 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2066 evolution_metrics_snapshot(self.store.as_ref())
2067 }
2068
2069 pub fn replay_roi_release_gate_summary(
2070 &self,
2071 window_seconds: u64,
2072 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2073 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2074 }
2075
2076 pub fn render_replay_roi_release_gate_summary_json(
2077 &self,
2078 window_seconds: u64,
2079 ) -> Result<String, EvoKernelError> {
2080 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2081 serde_json::to_string_pretty(&summary)
2082 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2083 }
2084
2085 pub fn replay_roi_release_gate_contract(
2086 &self,
2087 window_seconds: u64,
2088 thresholds: ReplayRoiReleaseGateThresholds,
2089 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2090 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2091 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2092 }
2093
2094 pub fn render_replay_roi_release_gate_contract_json(
2095 &self,
2096 window_seconds: u64,
2097 thresholds: ReplayRoiReleaseGateThresholds,
2098 ) -> Result<String, EvoKernelError> {
2099 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2100 serde_json::to_string_pretty(&contract)
2101 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2102 }
2103
2104 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2105 self.metrics_snapshot().map(|snapshot| {
2106 let health = evolution_health_snapshot(&snapshot);
2107 render_evolution_metrics_prometheus(&snapshot, &health)
2108 })
2109 }
2110
2111 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2112 self.metrics_snapshot()
2113 .map(|snapshot| evolution_health_snapshot(&snapshot))
2114 }
2115}
2116
2117pub struct EvoKernel<S: KernelState> {
2118 pub kernel: Arc<Kernel<S>>,
2119 pub sandbox: Arc<dyn Sandbox>,
2120 pub validator: Arc<dyn Validator>,
2121 pub store: Arc<dyn EvolutionStore>,
2122 pub selector: Arc<dyn Selector>,
2123 pub governor: Arc<dyn Governor>,
2124 pub economics: Arc<Mutex<EvuLedger>>,
2125 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
2126 pub stake_policy: StakePolicy,
2127 pub sandbox_policy: SandboxPolicy,
2128 pub validation_plan: ValidationPlan,
2129}
2130
2131impl<S: KernelState> EvoKernel<S> {
2132 fn recent_prior_mutation_ages_secs(
2133 &self,
2134 exclude_mutation_id: Option<&str>,
2135 ) -> Result<Vec<u64>, EvolutionError> {
2136 let now = Utc::now();
2137 let mut ages = self
2138 .store
2139 .scan(1)?
2140 .into_iter()
2141 .filter_map(|stored| match stored.event {
2142 EvolutionEvent::MutationDeclared { mutation }
2143 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
2144 {
2145 Self::seconds_since_timestamp(&stored.timestamp, now)
2146 }
2147 _ => None,
2148 })
2149 .collect::<Vec<_>>();
2150 ages.sort_unstable();
2151 Ok(ages)
2152 }
2153
2154 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
2155 let parsed = DateTime::parse_from_rfc3339(timestamp)
2156 .ok()?
2157 .with_timezone(&Utc);
2158 let elapsed = now.signed_duration_since(parsed);
2159 if elapsed < Duration::zero() {
2160 Some(0)
2161 } else {
2162 u64::try_from(elapsed.num_seconds()).ok()
2163 }
2164 }
2165
2166 pub fn new(
2167 kernel: Arc<Kernel<S>>,
2168 sandbox: Arc<dyn Sandbox>,
2169 validator: Arc<dyn Validator>,
2170 store: Arc<dyn EvolutionStore>,
2171 ) -> Self {
2172 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
2173 Self {
2174 kernel,
2175 sandbox,
2176 validator,
2177 store,
2178 selector,
2179 governor: Arc::new(DefaultGovernor::default()),
2180 economics: Arc::new(Mutex::new(EvuLedger::default())),
2181 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
2182 stake_policy: StakePolicy::default(),
2183 sandbox_policy: SandboxPolicy::oris_default(),
2184 validation_plan: ValidationPlan::oris_default(),
2185 }
2186 }
2187
2188 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
2189 self.selector = selector;
2190 self
2191 }
2192
2193 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
2194 self.sandbox_policy = policy;
2195 self
2196 }
2197
2198 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
2199 self.governor = governor;
2200 self
2201 }
2202
2203 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
2204 self.economics = economics;
2205 self
2206 }
2207
2208 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
2209 self.stake_policy = policy;
2210 self
2211 }
2212
2213 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
2214 self.validation_plan = plan;
2215 self
2216 }
2217
2218 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
2219 let executor = StoreReplayExecutor {
2220 sandbox: self.sandbox.clone(),
2221 validator: self.validator.clone(),
2222 store: self.store.clone(),
2223 selector: self.selector.clone(),
2224 governor: self.governor.clone(),
2225 economics: Some(self.economics.clone()),
2226 remote_publishers: Some(self.remote_publishers.clone()),
2227 stake_policy: self.stake_policy.clone(),
2228 };
2229 executor.collect_replay_candidates(input).candidates
2230 }
2231
2232 pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
2233 let projection = projection_snapshot(self.store.as_ref())?;
2234 if !projection.genes.is_empty() {
2235 return Ok(BootstrapReport::default());
2236 }
2237
2238 let templates = built_in_seed_templates();
2239 for template in &templates {
2240 let mutation = build_seed_mutation(template);
2241 let extracted = extract_seed_signals(template);
2242 let gene = build_bootstrap_gene(template, &extracted)
2243 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2244 let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
2245 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2246
2247 self.store
2248 .append_event(EvolutionEvent::MutationDeclared {
2249 mutation: mutation.clone(),
2250 })
2251 .map_err(store_err)?;
2252 self.store
2253 .append_event(EvolutionEvent::SignalsExtracted {
2254 mutation_id: mutation.intent.id.clone(),
2255 hash: extracted.hash.clone(),
2256 signals: extracted.values.clone(),
2257 })
2258 .map_err(store_err)?;
2259 self.store
2260 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2261 .map_err(store_err)?;
2262 self.store
2263 .append_event(EvolutionEvent::PromotionEvaluated {
2264 gene_id: gene.id.clone(),
2265 state: AssetState::Quarantined,
2266 reason: "bootstrap seeds require local validation before replay".into(),
2267 reason_code: TransitionReasonCode::DowngradeBootstrapRequiresLocalValidation,
2268 evidence: None,
2269 })
2270 .map_err(store_err)?;
2271 self.store
2272 .append_event(EvolutionEvent::CapsuleCommitted {
2273 capsule: capsule.clone(),
2274 })
2275 .map_err(store_err)?;
2276 self.store
2277 .append_event(EvolutionEvent::CapsuleQuarantined {
2278 capsule_id: capsule.id,
2279 })
2280 .map_err(store_err)?;
2281 }
2282
2283 Ok(BootstrapReport {
2284 seeded: true,
2285 genes_added: templates.len(),
2286 capsules_added: templates.len(),
2287 })
2288 }
2289
2290 pub async fn capture_successful_mutation(
2291 &self,
2292 run_id: &RunId,
2293 mutation: PreparedMutation,
2294 ) -> Result<Capsule, EvoKernelError> {
2295 Ok(self
2296 .capture_mutation_with_governor(run_id, mutation)
2297 .await?
2298 .capsule)
2299 }
2300
2301 pub async fn capture_mutation_with_governor(
2302 &self,
2303 run_id: &RunId,
2304 mutation: PreparedMutation,
2305 ) -> Result<CaptureOutcome, EvoKernelError> {
2306 self.store
2307 .append_event(EvolutionEvent::MutationDeclared {
2308 mutation: mutation.clone(),
2309 })
2310 .map_err(store_err)?;
2311
2312 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
2313 Ok(receipt) => receipt,
2314 Err(err) => {
2315 let message = err.to_string();
2316 let contract = mutation_needed_contract_for_error_message(&message);
2317 self.store
2318 .append_event(EvolutionEvent::MutationRejected {
2319 mutation_id: mutation.intent.id.clone(),
2320 reason: contract.failure_reason,
2321 reason_code: Some(
2322 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2323 ),
2324 recovery_hint: Some(contract.recovery_hint),
2325 fail_closed: contract.fail_closed,
2326 })
2327 .map_err(store_err)?;
2328 return Err(EvoKernelError::Sandbox(message));
2329 }
2330 };
2331
2332 self.store
2333 .append_event(EvolutionEvent::MutationApplied {
2334 mutation_id: mutation.intent.id.clone(),
2335 patch_hash: receipt.patch_hash.clone(),
2336 changed_files: receipt
2337 .changed_files
2338 .iter()
2339 .map(|path| path.to_string_lossy().to_string())
2340 .collect(),
2341 })
2342 .map_err(store_err)?;
2343
2344 let report = match self.validator.run(&receipt, &self.validation_plan).await {
2345 Ok(report) => report,
2346 Err(err) => {
2347 let message = format!("mutation-needed validation execution error: {err}");
2348 let contract = mutation_needed_contract_for_error_message(&message);
2349 self.store
2350 .append_event(EvolutionEvent::MutationRejected {
2351 mutation_id: mutation.intent.id.clone(),
2352 reason: contract.failure_reason,
2353 reason_code: Some(
2354 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2355 ),
2356 recovery_hint: Some(contract.recovery_hint),
2357 fail_closed: contract.fail_closed,
2358 })
2359 .map_err(store_err)?;
2360 return Err(EvoKernelError::Validation(message));
2361 }
2362 };
2363 if !report.success {
2364 self.store
2365 .append_event(EvolutionEvent::ValidationFailed {
2366 mutation_id: mutation.intent.id.clone(),
2367 report: report.to_snapshot(&self.validation_plan.profile),
2368 gene_id: None,
2369 })
2370 .map_err(store_err)?;
2371 let contract = mutation_needed_contract_for_validation_failure(
2372 &self.validation_plan.profile,
2373 &report,
2374 );
2375 self.store
2376 .append_event(EvolutionEvent::MutationRejected {
2377 mutation_id: mutation.intent.id.clone(),
2378 reason: contract.failure_reason,
2379 reason_code: Some(
2380 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2381 ),
2382 recovery_hint: Some(contract.recovery_hint),
2383 fail_closed: contract.fail_closed,
2384 })
2385 .map_err(store_err)?;
2386 return Err(EvoKernelError::ValidationFailed(report));
2387 }
2388
2389 self.store
2390 .append_event(EvolutionEvent::ValidationPassed {
2391 mutation_id: mutation.intent.id.clone(),
2392 report: report.to_snapshot(&self.validation_plan.profile),
2393 gene_id: None,
2394 })
2395 .map_err(store_err)?;
2396
2397 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
2398 patch_diff: mutation.artifact.payload.clone(),
2399 intent: mutation.intent.intent.clone(),
2400 expected_effect: mutation.intent.expected_effect.clone(),
2401 declared_signals: mutation.intent.signals.clone(),
2402 changed_files: receipt
2403 .changed_files
2404 .iter()
2405 .map(|path| path.to_string_lossy().to_string())
2406 .collect(),
2407 validation_success: report.success,
2408 validation_logs: report.logs.clone(),
2409 stage_outputs: report
2410 .stages
2411 .iter()
2412 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
2413 .filter(|value| !value.is_empty())
2414 .collect(),
2415 });
2416 self.store
2417 .append_event(EvolutionEvent::SignalsExtracted {
2418 mutation_id: mutation.intent.id.clone(),
2419 hash: extracted_signals.hash.clone(),
2420 signals: extracted_signals.values.clone(),
2421 })
2422 .map_err(store_err)?;
2423
2424 let projection = projection_snapshot(self.store.as_ref())?;
2425 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
2426 let recent_mutation_ages_secs = self
2427 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
2428 .map_err(store_err)?;
2429 let mut gene = derive_gene(
2430 &mutation,
2431 &receipt,
2432 &self.validation_plan.profile,
2433 &extracted_signals.values,
2434 );
2435 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
2436 StoreReplayExecutor::confidence_context(&projection, &gene.id);
2437 let success_count = projection
2438 .genes
2439 .iter()
2440 .find(|existing| existing.id == gene.id)
2441 .map(|existing| {
2442 projection
2443 .capsules
2444 .iter()
2445 .filter(|capsule| capsule.gene_id == existing.id)
2446 .count() as u64
2447 })
2448 .unwrap_or(0)
2449 + 1;
2450 let governor_decision = self.governor.evaluate(GovernorInput {
2451 candidate_source: CandidateSource::Local,
2452 success_count,
2453 blast_radius: blast_radius.clone(),
2454 replay_failures: 0,
2455 recent_mutation_ages_secs,
2456 current_confidence,
2457 historical_peak_confidence,
2458 confidence_last_updated_secs,
2459 });
2460
2461 gene.state = governor_decision.target_state.clone();
2462 self.store
2463 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2464 .map_err(store_err)?;
2465 self.store
2466 .append_event(EvolutionEvent::PromotionEvaluated {
2467 gene_id: gene.id.clone(),
2468 state: governor_decision.target_state.clone(),
2469 reason: governor_decision.reason.clone(),
2470 reason_code: governor_decision.reason_code.clone(),
2471 evidence: None,
2472 })
2473 .map_err(store_err)?;
2474 if matches!(governor_decision.target_state, AssetState::Promoted) {
2475 self.store
2476 .append_event(EvolutionEvent::GenePromoted {
2477 gene_id: gene.id.clone(),
2478 })
2479 .map_err(store_err)?;
2480 }
2481 if matches!(governor_decision.target_state, AssetState::Revoked) {
2482 self.store
2483 .append_event(EvolutionEvent::GeneRevoked {
2484 gene_id: gene.id.clone(),
2485 reason: governor_decision.reason.clone(),
2486 })
2487 .map_err(store_err)?;
2488 }
2489 if let Some(spec_id) = &mutation.intent.spec_id {
2490 self.store
2491 .append_event(EvolutionEvent::SpecLinked {
2492 mutation_id: mutation.intent.id.clone(),
2493 spec_id: spec_id.clone(),
2494 })
2495 .map_err(store_err)?;
2496 }
2497
2498 let mut capsule = build_capsule(
2499 run_id,
2500 &mutation,
2501 &receipt,
2502 &report,
2503 &self.validation_plan.profile,
2504 &gene,
2505 &blast_radius,
2506 )
2507 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
2508 capsule.state = governor_decision.target_state.clone();
2509 self.store
2510 .append_event(EvolutionEvent::CapsuleCommitted {
2511 capsule: capsule.clone(),
2512 })
2513 .map_err(store_err)?;
2514 if matches!(governor_decision.target_state, AssetState::Quarantined) {
2515 self.store
2516 .append_event(EvolutionEvent::CapsuleQuarantined {
2517 capsule_id: capsule.id.clone(),
2518 })
2519 .map_err(store_err)?;
2520 }
2521
2522 Ok(CaptureOutcome {
2523 capsule,
2524 gene,
2525 governor_decision,
2526 })
2527 }
2528
2529 pub async fn capture_from_proposal(
2530 &self,
2531 run_id: &RunId,
2532 proposal: &AgentMutationProposal,
2533 diff_payload: String,
2534 base_revision: Option<String>,
2535 ) -> Result<CaptureOutcome, EvoKernelError> {
2536 let intent = MutationIntent {
2537 id: next_id("proposal"),
2538 intent: proposal.intent.clone(),
2539 target: MutationTarget::Paths {
2540 allow: proposal.files.clone(),
2541 },
2542 expected_effect: proposal.expected_effect.clone(),
2543 risk: RiskLevel::Low,
2544 signals: proposal.files.clone(),
2545 spec_id: None,
2546 };
2547 self.capture_mutation_with_governor(
2548 run_id,
2549 prepare_mutation(intent, diff_payload, base_revision),
2550 )
2551 .await
2552 }
2553
2554 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
2555 ExecutionFeedback {
2556 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
2557 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
2558 summary: outcome.governor_decision.reason.clone(),
2559 }
2560 }
2561
2562 pub fn replay_feedback_for_agent(
2563 signals: &[String],
2564 decision: &ReplayDecision,
2565 ) -> ReplayFeedback {
2566 let (fallback_task_class_id, fallback_task_label) = replay_task_descriptor(signals);
2567 let task_class_id = if decision.detect_evidence.task_class_id.is_empty() {
2568 fallback_task_class_id
2569 } else {
2570 decision.detect_evidence.task_class_id.clone()
2571 };
2572 let task_label = if decision.detect_evidence.task_label.is_empty() {
2573 fallback_task_label
2574 } else {
2575 decision.detect_evidence.task_label.clone()
2576 };
2577 let planner_directive = if decision.used_capsule {
2578 ReplayPlannerDirective::SkipPlanner
2579 } else {
2580 ReplayPlannerDirective::PlanFallback
2581 };
2582 let reasoning_steps_avoided = u64::from(decision.used_capsule);
2583 let reason_code_hint = decision
2584 .detect_evidence
2585 .mismatch_reasons
2586 .first()
2587 .and_then(|reason| infer_replay_fallback_reason_code(reason));
2588 let fallback_contract = normalize_replay_fallback_contract(
2589 &planner_directive,
2590 decision
2591 .fallback_to_planner
2592 .then_some(decision.reason.as_str()),
2593 reason_code_hint,
2594 None,
2595 None,
2596 None,
2597 );
2598 let summary = if decision.used_capsule {
2599 format!("reused prior capsule for task class '{task_label}'; skip planner")
2600 } else {
2601 format!(
2602 "planner fallback required for task class '{task_label}': {}",
2603 decision.reason
2604 )
2605 };
2606
2607 ReplayFeedback {
2608 used_capsule: decision.used_capsule,
2609 capsule_id: decision.capsule_id.clone(),
2610 planner_directive,
2611 reasoning_steps_avoided,
2612 fallback_reason: fallback_contract
2613 .as_ref()
2614 .map(|contract| contract.fallback_reason.clone()),
2615 reason_code: fallback_contract
2616 .as_ref()
2617 .map(|contract| contract.reason_code),
2618 repair_hint: fallback_contract
2619 .as_ref()
2620 .map(|contract| contract.repair_hint.clone()),
2621 next_action: fallback_contract
2622 .as_ref()
2623 .map(|contract| contract.next_action),
2624 confidence: fallback_contract
2625 .as_ref()
2626 .map(|contract| contract.confidence),
2627 task_class_id,
2628 task_label,
2629 summary,
2630 }
2631 }
2632
2633 fn mutation_needed_failure_outcome(
2634 &self,
2635 request: &SupervisedDevloopRequest,
2636 task_class: Option<BoundedTaskClass>,
2637 status: SupervisedDevloopStatus,
2638 contract: MutationNeededFailureContract,
2639 mutation_id_for_audit: Option<String>,
2640 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2641 if let Some(mutation_id) = mutation_id_for_audit {
2642 self.store
2643 .append_event(EvolutionEvent::MutationRejected {
2644 mutation_id,
2645 reason: contract.failure_reason.clone(),
2646 reason_code: Some(
2647 mutation_needed_reason_code_key(contract.reason_code).to_string(),
2648 ),
2649 recovery_hint: Some(contract.recovery_hint.clone()),
2650 fail_closed: contract.fail_closed,
2651 })
2652 .map_err(store_err)?;
2653 }
2654 let status_label = match status {
2655 SupervisedDevloopStatus::AwaitingApproval => "awaiting_approval",
2656 SupervisedDevloopStatus::RejectedByPolicy => "rejected_by_policy",
2657 SupervisedDevloopStatus::FailedClosed => "failed_closed",
2658 SupervisedDevloopStatus::Executed => "executed",
2659 };
2660 let reason_code_key = mutation_needed_reason_code_key(contract.reason_code);
2661 Ok(SupervisedDevloopOutcome {
2662 task_id: request.task.id.clone(),
2663 task_class,
2664 status,
2665 execution_feedback: None,
2666 failure_contract: Some(contract.clone()),
2667 summary: format!(
2668 "supervised devloop {status_label} task '{}' [{reason_code_key}]: {}",
2669 request.task.id, contract.failure_reason
2670 ),
2671 })
2672 }
2673
2674 pub async fn run_supervised_devloop(
2675 &self,
2676 run_id: &RunId,
2677 request: &SupervisedDevloopRequest,
2678 diff_payload: String,
2679 base_revision: Option<String>,
2680 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
2681 let audit_mutation_id = mutation_needed_audit_mutation_id(request);
2682 let task_class = classify_supervised_devloop_request(request);
2683 let Some(task_class) = task_class else {
2684 let contract = normalize_mutation_needed_failure_contract(
2685 Some(&format!(
2686 "supervised devloop rejected task '{}' because it is an unsupported task outside the bounded scope",
2687 request.task.id
2688 )),
2689 Some(MutationNeededFailureReasonCode::PolicyDenied),
2690 );
2691 return self.mutation_needed_failure_outcome(
2692 request,
2693 None,
2694 SupervisedDevloopStatus::RejectedByPolicy,
2695 contract,
2696 Some(audit_mutation_id),
2697 );
2698 };
2699
2700 if !request.approval.approved {
2701 return Ok(SupervisedDevloopOutcome {
2702 task_id: request.task.id.clone(),
2703 task_class: Some(task_class),
2704 status: SupervisedDevloopStatus::AwaitingApproval,
2705 execution_feedback: None,
2706 failure_contract: None,
2707 summary: format!(
2708 "supervised devloop paused task '{}' until explicit human approval is granted",
2709 request.task.id
2710 ),
2711 });
2712 }
2713
2714 if diff_payload.len() > MUTATION_NEEDED_MAX_DIFF_BYTES {
2715 let contract = normalize_mutation_needed_failure_contract(
2716 Some(&format!(
2717 "mutation-needed diff payload exceeds bounded byte budget (size={}, max={})",
2718 diff_payload.len(),
2719 MUTATION_NEEDED_MAX_DIFF_BYTES
2720 )),
2721 Some(MutationNeededFailureReasonCode::PolicyDenied),
2722 );
2723 return self.mutation_needed_failure_outcome(
2724 request,
2725 Some(task_class),
2726 SupervisedDevloopStatus::RejectedByPolicy,
2727 contract,
2728 Some(audit_mutation_id),
2729 );
2730 }
2731
2732 let blast_radius = compute_blast_radius(&diff_payload);
2733 if blast_radius.lines_changed > MUTATION_NEEDED_MAX_CHANGED_LINES {
2734 let contract = normalize_mutation_needed_failure_contract(
2735 Some(&format!(
2736 "mutation-needed patch exceeds bounded changed-line budget (lines_changed={}, max={})",
2737 blast_radius.lines_changed,
2738 MUTATION_NEEDED_MAX_CHANGED_LINES
2739 )),
2740 Some(MutationNeededFailureReasonCode::UnsafePatch),
2741 );
2742 return self.mutation_needed_failure_outcome(
2743 request,
2744 Some(task_class),
2745 SupervisedDevloopStatus::FailedClosed,
2746 contract,
2747 Some(audit_mutation_id),
2748 );
2749 }
2750
2751 if self.sandbox_policy.max_duration_ms > MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS {
2752 let contract = normalize_mutation_needed_failure_contract(
2753 Some(&format!(
2754 "mutation-needed sandbox duration budget exceeds bounded policy (configured={}ms, max={}ms)",
2755 self.sandbox_policy.max_duration_ms,
2756 MUTATION_NEEDED_MAX_SANDBOX_DURATION_MS
2757 )),
2758 Some(MutationNeededFailureReasonCode::PolicyDenied),
2759 );
2760 return self.mutation_needed_failure_outcome(
2761 request,
2762 Some(task_class),
2763 SupervisedDevloopStatus::RejectedByPolicy,
2764 contract,
2765 Some(audit_mutation_id),
2766 );
2767 }
2768
2769 let validation_budget_ms = validation_plan_timeout_budget_ms(&self.validation_plan);
2770 if validation_budget_ms > MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS {
2771 let contract = normalize_mutation_needed_failure_contract(
2772 Some(&format!(
2773 "mutation-needed validation timeout budget exceeds bounded policy (configured={}ms, max={}ms)",
2774 validation_budget_ms,
2775 MUTATION_NEEDED_MAX_VALIDATION_BUDGET_MS
2776 )),
2777 Some(MutationNeededFailureReasonCode::PolicyDenied),
2778 );
2779 return self.mutation_needed_failure_outcome(
2780 request,
2781 Some(task_class),
2782 SupervisedDevloopStatus::RejectedByPolicy,
2783 contract,
2784 Some(audit_mutation_id),
2785 );
2786 }
2787
2788 let capture = match self
2789 .capture_from_proposal(run_id, &request.proposal, diff_payload, base_revision)
2790 .await
2791 {
2792 Ok(capture) => capture,
2793 Err(EvoKernelError::Sandbox(message)) => {
2794 let contract = mutation_needed_contract_for_error_message(&message);
2795 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2796 return self.mutation_needed_failure_outcome(
2797 request,
2798 Some(task_class),
2799 status,
2800 contract,
2801 None,
2802 );
2803 }
2804 Err(EvoKernelError::ValidationFailed(report)) => {
2805 let contract = mutation_needed_contract_for_validation_failure(
2806 &self.validation_plan.profile,
2807 &report,
2808 );
2809 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2810 return self.mutation_needed_failure_outcome(
2811 request,
2812 Some(task_class),
2813 status,
2814 contract,
2815 None,
2816 );
2817 }
2818 Err(EvoKernelError::Validation(message)) => {
2819 let contract = mutation_needed_contract_for_error_message(&message);
2820 let status = mutation_needed_status_from_reason_code(contract.reason_code);
2821 return self.mutation_needed_failure_outcome(
2822 request,
2823 Some(task_class),
2824 status,
2825 contract,
2826 None,
2827 );
2828 }
2829 Err(err) => return Err(err),
2830 };
2831 let approver = request
2832 .approval
2833 .approver
2834 .as_deref()
2835 .unwrap_or("unknown approver");
2836
2837 Ok(SupervisedDevloopOutcome {
2838 task_id: request.task.id.clone(),
2839 task_class: Some(task_class),
2840 status: SupervisedDevloopStatus::Executed,
2841 execution_feedback: Some(Self::feedback_for_agent(&capture)),
2842 failure_contract: None,
2843 summary: format!(
2844 "supervised devloop executed task '{}' with explicit approval from {approver}",
2845 request.task.id
2846 ),
2847 })
2848 }
2849 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
2850 MultiAgentCoordinator::new().coordinate(plan)
2851 }
2852
2853 pub fn export_promoted_assets(
2854 &self,
2855 sender_id: impl Into<String>,
2856 ) -> Result<EvolutionEnvelope, EvoKernelError> {
2857 let sender_id = sender_id.into();
2858 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
2859 if !envelope.assets.is_empty() {
2860 let mut ledger = self
2861 .economics
2862 .lock()
2863 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
2864 if ledger
2865 .reserve_publish_stake(&sender_id, &self.stake_policy)
2866 .is_none()
2867 {
2868 return Err(EvoKernelError::Validation(
2869 "insufficient EVU for remote publish".into(),
2870 ));
2871 }
2872 }
2873 Ok(envelope)
2874 }
2875
2876 pub fn import_remote_envelope(
2877 &self,
2878 envelope: &EvolutionEnvelope,
2879 ) -> Result<ImportOutcome, EvoKernelError> {
2880 import_remote_envelope_into_store(
2881 self.store.as_ref(),
2882 envelope,
2883 Some(self.remote_publishers.as_ref()),
2884 None,
2885 )
2886 }
2887
2888 pub fn fetch_assets(
2889 &self,
2890 responder_id: impl Into<String>,
2891 query: &FetchQuery,
2892 ) -> Result<FetchResponse, EvoKernelError> {
2893 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
2894 }
2895
2896 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
2897 revoke_assets_in_store(self.store.as_ref(), notice)
2898 }
2899
2900 pub async fn replay_or_fallback(
2901 &self,
2902 input: SelectorInput,
2903 ) -> Result<ReplayDecision, EvoKernelError> {
2904 let replay_run_id = next_id("replay");
2905 self.replay_or_fallback_for_run(&replay_run_id, input).await
2906 }
2907
2908 pub async fn replay_or_fallback_for_run(
2909 &self,
2910 run_id: &RunId,
2911 input: SelectorInput,
2912 ) -> Result<ReplayDecision, EvoKernelError> {
2913 let executor = StoreReplayExecutor {
2914 sandbox: self.sandbox.clone(),
2915 validator: self.validator.clone(),
2916 store: self.store.clone(),
2917 selector: self.selector.clone(),
2918 governor: self.governor.clone(),
2919 economics: Some(self.economics.clone()),
2920 remote_publishers: Some(self.remote_publishers.clone()),
2921 stake_policy: self.stake_policy.clone(),
2922 };
2923 executor
2924 .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
2925 .await
2926 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2927 }
2928
2929 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
2930 self.economics.lock().ok()?.governor_signal(node_id)
2931 }
2932
2933 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
2934 self.economics
2935 .lock()
2936 .ok()
2937 .map(|locked| locked.selector_reputation_bias())
2938 .unwrap_or_default()
2939 }
2940
2941 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2942 evolution_metrics_snapshot(self.store.as_ref())
2943 }
2944
2945 pub fn replay_roi_release_gate_summary(
2946 &self,
2947 window_seconds: u64,
2948 ) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
2949 replay_roi_release_gate_summary(self.store.as_ref(), window_seconds)
2950 }
2951
2952 pub fn render_replay_roi_release_gate_summary_json(
2953 &self,
2954 window_seconds: u64,
2955 ) -> Result<String, EvoKernelError> {
2956 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2957 serde_json::to_string_pretty(&summary)
2958 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2959 }
2960
2961 pub fn replay_roi_release_gate_contract(
2962 &self,
2963 window_seconds: u64,
2964 thresholds: ReplayRoiReleaseGateThresholds,
2965 ) -> Result<ReplayRoiReleaseGateContract, EvoKernelError> {
2966 let summary = self.replay_roi_release_gate_summary(window_seconds)?;
2967 Ok(replay_roi_release_gate_contract(&summary, thresholds))
2968 }
2969
2970 pub fn render_replay_roi_release_gate_contract_json(
2971 &self,
2972 window_seconds: u64,
2973 thresholds: ReplayRoiReleaseGateThresholds,
2974 ) -> Result<String, EvoKernelError> {
2975 let contract = self.replay_roi_release_gate_contract(window_seconds, thresholds)?;
2976 serde_json::to_string_pretty(&contract)
2977 .map_err(|err| EvoKernelError::Validation(err.to_string()))
2978 }
2979
2980 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
2981 self.metrics_snapshot().map(|snapshot| {
2982 let health = evolution_health_snapshot(&snapshot);
2983 render_evolution_metrics_prometheus(&snapshot, &health)
2984 })
2985 }
2986
2987 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
2988 self.metrics_snapshot()
2989 .map(|snapshot| evolution_health_snapshot(&snapshot))
2990 }
2991}
2992
2993pub fn prepare_mutation(
2994 intent: MutationIntent,
2995 diff_payload: String,
2996 base_revision: Option<String>,
2997) -> PreparedMutation {
2998 PreparedMutation {
2999 intent,
3000 artifact: MutationArtifact {
3001 encoding: ArtifactEncoding::UnifiedDiff,
3002 content_hash: compute_artifact_hash(&diff_payload),
3003 payload: diff_payload,
3004 base_revision,
3005 },
3006 }
3007}
3008
3009pub fn prepare_mutation_from_spec(
3010 plan: CompiledMutationPlan,
3011 diff_payload: String,
3012 base_revision: Option<String>,
3013) -> PreparedMutation {
3014 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
3015}
3016
3017pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
3018 Arc::new(oris_evolution::JsonlEvolutionStore::new(
3019 default_store_root(),
3020 ))
3021}
3022
3023fn built_in_seed_templates() -> Vec<SeedTemplate> {
3024 vec![
3025 SeedTemplate {
3026 id: "bootstrap-readme".into(),
3027 intent: "Seed a baseline README recovery pattern".into(),
3028 signals: vec!["bootstrap readme".into(), "missing readme".into()],
3029 diff_payload: "\
3030diff --git a/README.md b/README.md
3031new file mode 100644
3032index 0000000..1111111
3033--- /dev/null
3034+++ b/README.md
3035@@ -0,0 +1,3 @@
3036+# Oris
3037+Bootstrap documentation seed
3038+"
3039 .into(),
3040 validation_profile: "bootstrap-seed".into(),
3041 },
3042 SeedTemplate {
3043 id: "bootstrap-test-fix".into(),
3044 intent: "Seed a deterministic test stabilization pattern".into(),
3045 signals: vec!["bootstrap test fix".into(), "failing tests".into()],
3046 diff_payload: "\
3047diff --git a/src/lib.rs b/src/lib.rs
3048index 1111111..2222222 100644
3049--- a/src/lib.rs
3050+++ b/src/lib.rs
3051@@ -1 +1,2 @@
3052 pub fn demo() -> usize { 1 }
3053+pub fn normalize_test_output() -> bool { true }
3054"
3055 .into(),
3056 validation_profile: "bootstrap-seed".into(),
3057 },
3058 SeedTemplate {
3059 id: "bootstrap-refactor".into(),
3060 intent: "Seed a low-risk refactor capsule".into(),
3061 signals: vec!["bootstrap refactor".into(), "small refactor".into()],
3062 diff_payload: "\
3063diff --git a/src/lib.rs b/src/lib.rs
3064index 2222222..3333333 100644
3065--- a/src/lib.rs
3066+++ b/src/lib.rs
3067@@ -1 +1,3 @@
3068 pub fn demo() -> usize { 1 }
3069+
3070+fn extract_strategy_key(input: &str) -> &str { input }
3071"
3072 .into(),
3073 validation_profile: "bootstrap-seed".into(),
3074 },
3075 SeedTemplate {
3076 id: "bootstrap-logging".into(),
3077 intent: "Seed a baseline structured logging mutation".into(),
3078 signals: vec!["bootstrap logging".into(), "structured logs".into()],
3079 diff_payload: "\
3080diff --git a/src/lib.rs b/src/lib.rs
3081index 3333333..4444444 100644
3082--- a/src/lib.rs
3083+++ b/src/lib.rs
3084@@ -1 +1,3 @@
3085 pub fn demo() -> usize { 1 }
3086+
3087+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
3088"
3089 .into(),
3090 validation_profile: "bootstrap-seed".into(),
3091 },
3092 ]
3093}
3094
3095fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
3096 let changed_files = seed_changed_files(&template.diff_payload);
3097 let target = if changed_files.is_empty() {
3098 MutationTarget::WorkspaceRoot
3099 } else {
3100 MutationTarget::Paths {
3101 allow: changed_files,
3102 }
3103 };
3104 prepare_mutation(
3105 MutationIntent {
3106 id: stable_hash_json(&("bootstrap-mutation", &template.id))
3107 .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
3108 intent: template.intent.clone(),
3109 target,
3110 expected_effect: format!("seed {}", template.id),
3111 risk: RiskLevel::Low,
3112 signals: template.signals.clone(),
3113 spec_id: None,
3114 },
3115 template.diff_payload.clone(),
3116 None,
3117 )
3118}
3119
3120fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
3121 let mut signals = BTreeSet::new();
3122 for declared in &template.signals {
3123 if let Some(phrase) = normalize_signal_phrase(declared) {
3124 signals.insert(phrase);
3125 }
3126 extend_signal_tokens(&mut signals, declared);
3127 }
3128 extend_signal_tokens(&mut signals, &template.intent);
3129 extend_signal_tokens(&mut signals, &template.diff_payload);
3130 for changed_file in seed_changed_files(&template.diff_payload) {
3131 extend_signal_tokens(&mut signals, &changed_file);
3132 }
3133 let values = signals.into_iter().take(32).collect::<Vec<_>>();
3134 let hash =
3135 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
3136 SignalExtractionOutput { values, hash }
3137}
3138
3139fn seed_changed_files(diff_payload: &str) -> Vec<String> {
3140 let mut changed_files = BTreeSet::new();
3141 for line in diff_payload.lines() {
3142 if let Some(path) = line.strip_prefix("+++ b/") {
3143 let normalized = path.trim();
3144 if !normalized.is_empty() {
3145 changed_files.insert(normalized.to_string());
3146 }
3147 }
3148 }
3149 changed_files.into_iter().collect()
3150}
3151
3152fn build_bootstrap_gene(
3153 template: &SeedTemplate,
3154 extracted: &SignalExtractionOutput,
3155) -> Result<Gene, EvolutionError> {
3156 let mut strategy = vec![template.id.clone(), "bootstrap".into()];
3157 let (task_class_id, task_label) = replay_task_descriptor(&extracted.values);
3158 ensure_strategy_metadata(&mut strategy, "task_class", &task_class_id);
3159 ensure_strategy_metadata(&mut strategy, "task_label", &task_label);
3160 let id = stable_hash_json(&(
3161 "bootstrap-gene",
3162 &template.id,
3163 &extracted.values,
3164 &template.validation_profile,
3165 ))?;
3166 Ok(Gene {
3167 id,
3168 signals: extracted.values.clone(),
3169 strategy,
3170 validation: vec![template.validation_profile.clone()],
3171 state: AssetState::Quarantined,
3172 })
3173}
3174
3175fn build_bootstrap_capsule(
3176 run_id: &RunId,
3177 template: &SeedTemplate,
3178 mutation: &PreparedMutation,
3179 gene: &Gene,
3180) -> Result<Capsule, EvolutionError> {
3181 let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
3182 let env = current_env_fingerprint(&cwd);
3183 let diff_hash = mutation.artifact.content_hash.clone();
3184 let changed_files = seed_changed_files(&template.diff_payload);
3185 let validator_hash = stable_hash_json(&(
3186 "bootstrap-validator",
3187 &template.id,
3188 &template.validation_profile,
3189 &diff_hash,
3190 ))?;
3191 let id = stable_hash_json(&(
3192 "bootstrap-capsule",
3193 &template.id,
3194 run_id,
3195 &gene.id,
3196 &diff_hash,
3197 &env,
3198 ))?;
3199 Ok(Capsule {
3200 id,
3201 gene_id: gene.id.clone(),
3202 mutation_id: mutation.intent.id.clone(),
3203 run_id: run_id.clone(),
3204 diff_hash,
3205 confidence: 0.0,
3206 env,
3207 outcome: Outcome {
3208 success: false,
3209 validation_profile: template.validation_profile.clone(),
3210 validation_duration_ms: 0,
3211 changed_files,
3212 validator_hash,
3213 lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
3214 replay_verified: false,
3215 },
3216 state: AssetState::Quarantined,
3217 })
3218}
3219
3220fn derive_gene(
3221 mutation: &PreparedMutation,
3222 receipt: &SandboxReceipt,
3223 validation_profile: &str,
3224 extracted_signals: &[String],
3225) -> Gene {
3226 let mut strategy = BTreeSet::new();
3227 for file in &receipt.changed_files {
3228 if let Some(component) = file.components().next() {
3229 strategy.insert(component.as_os_str().to_string_lossy().to_string());
3230 }
3231 }
3232 for token in mutation
3233 .artifact
3234 .payload
3235 .split(|ch: char| !ch.is_ascii_alphanumeric())
3236 {
3237 if token.len() == 5
3238 && token.starts_with('E')
3239 && token[1..].chars().all(|ch| ch.is_ascii_digit())
3240 {
3241 strategy.insert(token.to_string());
3242 }
3243 }
3244 for token in mutation.intent.intent.split_whitespace().take(8) {
3245 strategy.insert(token.to_ascii_lowercase());
3246 }
3247 let mut strategy = strategy.into_iter().collect::<Vec<_>>();
3248 let descriptor_signals = if mutation
3249 .intent
3250 .signals
3251 .iter()
3252 .any(|signal| normalize_signal_phrase(signal).is_some())
3253 {
3254 mutation.intent.signals.as_slice()
3255 } else {
3256 extracted_signals
3257 };
3258 let (task_class_id, task_label) = replay_task_descriptor(descriptor_signals);
3259 ensure_strategy_metadata(&mut strategy, "task_class", &task_class_id);
3260 ensure_strategy_metadata(&mut strategy, "task_label", &task_label);
3261 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
3262 .unwrap_or_else(|_| next_id("gene"));
3263 Gene {
3264 id,
3265 signals: extracted_signals.to_vec(),
3266 strategy,
3267 validation: vec![validation_profile.to_string()],
3268 state: AssetState::Promoted,
3269 }
3270}
3271
3272fn build_capsule(
3273 run_id: &RunId,
3274 mutation: &PreparedMutation,
3275 receipt: &SandboxReceipt,
3276 report: &ValidationReport,
3277 validation_profile: &str,
3278 gene: &Gene,
3279 blast_radius: &BlastRadius,
3280) -> Result<Capsule, EvolutionError> {
3281 let env = current_env_fingerprint(&receipt.workdir);
3282 let validator_hash = stable_hash_json(report)?;
3283 let diff_hash = mutation.artifact.content_hash.clone();
3284 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
3285 Ok(Capsule {
3286 id,
3287 gene_id: gene.id.clone(),
3288 mutation_id: mutation.intent.id.clone(),
3289 run_id: run_id.clone(),
3290 diff_hash,
3291 confidence: 0.7,
3292 env,
3293 outcome: oris_evolution::Outcome {
3294 success: true,
3295 validation_profile: validation_profile.to_string(),
3296 validation_duration_ms: report.duration_ms,
3297 changed_files: receipt
3298 .changed_files
3299 .iter()
3300 .map(|path| path.to_string_lossy().to_string())
3301 .collect(),
3302 validator_hash,
3303 lines_changed: blast_radius.lines_changed,
3304 replay_verified: false,
3305 },
3306 state: AssetState::Promoted,
3307 })
3308}
3309
3310fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
3311 let rustc_version = Command::new("rustc")
3312 .arg("--version")
3313 .output()
3314 .ok()
3315 .filter(|output| output.status.success())
3316 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
3317 .unwrap_or_else(|| "rustc unknown".into());
3318 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
3319 .ok()
3320 .map(|bytes| {
3321 let value = String::from_utf8_lossy(&bytes);
3322 compute_artifact_hash(&value)
3323 })
3324 .unwrap_or_else(|| "missing-cargo-lock".into());
3325 let target_triple = format!(
3326 "{}-unknown-{}",
3327 std::env::consts::ARCH,
3328 std::env::consts::OS
3329 );
3330 EnvFingerprint {
3331 rustc_version,
3332 cargo_lock_hash,
3333 target_triple,
3334 os: std::env::consts::OS.to_string(),
3335 }
3336}
3337
3338fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
3339 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
3340 let trimmed = raw.trim();
3341 if trimmed.is_empty() {
3342 continue;
3343 }
3344 let normalized = if is_rust_error_code(trimmed) {
3345 let mut chars = trimmed.chars();
3346 let prefix = chars
3347 .next()
3348 .map(|ch| ch.to_ascii_uppercase())
3349 .unwrap_or('E');
3350 format!("{prefix}{}", chars.as_str())
3351 } else {
3352 trimmed.to_ascii_lowercase()
3353 };
3354 if normalized.len() < 3 {
3355 continue;
3356 }
3357 out.insert(normalized);
3358 }
3359}
3360
3361fn normalize_signal_phrase(input: &str) -> Option<String> {
3362 let mut seen = BTreeSet::new();
3363 let mut normalized_tokens = Vec::new();
3364 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
3365 let Some(token) = canonical_replay_signal_token(raw) else {
3366 continue;
3367 };
3368 if seen.insert(token.clone()) {
3369 normalized_tokens.push(token);
3370 }
3371 }
3372 let normalized = normalized_tokens.join(" ");
3373 if normalized.is_empty() {
3374 None
3375 } else {
3376 Some(normalized)
3377 }
3378}
3379
3380fn canonical_replay_signal_token(raw: &str) -> Option<String> {
3381 let trimmed = raw.trim();
3382 if trimmed.is_empty() {
3383 return None;
3384 }
3385 let normalized = if is_rust_error_code(trimmed) {
3386 let mut chars = trimmed.chars();
3387 let prefix = chars
3388 .next()
3389 .map(|ch| ch.to_ascii_uppercase())
3390 .unwrap_or('E');
3391 format!("{prefix}{}", chars.as_str())
3392 } else {
3393 trimmed.to_ascii_lowercase()
3394 };
3395 if normalized.len() < 3 {
3396 return None;
3397 }
3398 if normalized.chars().all(|ch| ch.is_ascii_digit()) {
3399 return None;
3400 }
3401 match normalized.as_str() {
3402 "absent" | "unavailable" | "vanished" => Some("missing".into()),
3403 "file" | "files" | "error" | "errors" => None,
3404 _ => Some(normalized),
3405 }
3406}
3407
3408fn replay_task_descriptor(signals: &[String]) -> (String, String) {
3409 let normalized = signals
3410 .iter()
3411 .filter_map(|signal| normalize_signal_phrase(signal))
3412 .collect::<BTreeSet<_>>()
3413 .into_iter()
3414 .collect::<Vec<_>>();
3415 if normalized.is_empty() {
3416 return ("unknown".into(), "unknown".into());
3417 }
3418 let task_label = normalized
3419 .iter()
3420 .filter(|value| !is_validation_summary_phrase(value))
3421 .max_by_key(|value| {
3422 let token_count = value.split_whitespace().count();
3423 (
3424 value.chars().any(|ch| ch.is_ascii_alphabetic()),
3425 token_count >= 2,
3426 token_count,
3427 value.len(),
3428 )
3429 })
3430 .cloned()
3431 .unwrap_or_else(|| normalized[0].clone());
3432 let task_class_id = stable_hash_json(&normalized)
3433 .unwrap_or_else(|_| compute_artifact_hash(&normalized.join("\n")));
3434 (task_class_id, task_label)
3435}
3436
3437fn is_validation_summary_phrase(value: &str) -> bool {
3438 let tokens = value.split_whitespace().collect::<BTreeSet<_>>();
3439 tokens == BTreeSet::from(["validation", "passed"])
3440 || tokens == BTreeSet::from(["validation", "failed"])
3441}
3442
3443fn normalized_signal_values(signals: &[String]) -> Vec<String> {
3444 signals
3445 .iter()
3446 .filter_map(|signal| normalize_signal_phrase(signal))
3447 .collect::<BTreeSet<_>>()
3448 .into_iter()
3449 .collect::<Vec<_>>()
3450}
3451
3452fn matched_replay_signals(input_signals: &[String], candidate_signals: &[String]) -> Vec<String> {
3453 let normalized_input = normalized_signal_values(input_signals);
3454 if normalized_input.is_empty() {
3455 return Vec::new();
3456 }
3457 let normalized_candidate = normalized_signal_values(candidate_signals);
3458 if normalized_candidate.is_empty() {
3459 return normalized_input;
3460 }
3461 let matched = normalized_input
3462 .iter()
3463 .filter(|signal| {
3464 normalized_candidate
3465 .iter()
3466 .any(|candidate| candidate.contains(signal.as_str()) || signal.contains(candidate))
3467 })
3468 .cloned()
3469 .collect::<Vec<_>>();
3470 if matched.is_empty() {
3471 normalized_input
3472 } else {
3473 matched
3474 }
3475}
3476
3477fn replay_detect_evidence_from_input(input: &SelectorInput) -> ReplayDetectEvidence {
3478 let (task_class_id, task_label) = replay_task_descriptor(&input.signals);
3479 ReplayDetectEvidence {
3480 task_class_id,
3481 task_label,
3482 matched_signals: normalized_signal_values(&input.signals),
3483 mismatch_reasons: Vec::new(),
3484 }
3485}
3486
3487fn replay_descriptor_from_candidate_or_input(
3488 candidate: Option<&GeneCandidate>,
3489 input: &SelectorInput,
3490) -> (String, String) {
3491 if let Some(candidate) = candidate {
3492 let task_class_id = strategy_metadata_value(&candidate.gene.strategy, "task_class");
3493 let task_label = strategy_metadata_value(&candidate.gene.strategy, "task_label");
3494 if let Some(task_class_id) = task_class_id {
3495 return (
3496 task_class_id.clone(),
3497 task_label.unwrap_or_else(|| task_class_id.clone()),
3498 );
3499 }
3500 return replay_task_descriptor(&candidate.gene.signals);
3501 }
3502 replay_task_descriptor(&input.signals)
3503}
3504
3505fn estimated_reasoning_tokens(signals: &[String]) -> u64 {
3506 let normalized = signals
3507 .iter()
3508 .filter_map(|signal| normalize_signal_phrase(signal))
3509 .collect::<BTreeSet<_>>();
3510 let signal_count = normalized.len() as u64;
3511 REPLAY_REASONING_TOKEN_FLOOR + REPLAY_REASONING_TOKEN_SIGNAL_WEIGHT * signal_count.max(1)
3512}
3513
3514fn compute_replay_roi(reasoning_avoided_tokens: u64, replay_fallback_cost: u64) -> f64 {
3515 let total = reasoning_avoided_tokens + replay_fallback_cost;
3516 if total == 0 {
3517 return 0.0;
3518 }
3519 (reasoning_avoided_tokens as f64 - replay_fallback_cost as f64) / total as f64
3520}
3521
3522fn is_rust_error_code(value: &str) -> bool {
3523 value.len() == 5
3524 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
3525 && value[1..].chars().all(|ch| ch.is_ascii_digit())
3526}
3527
3528fn validation_plan_timeout_budget_ms(plan: &ValidationPlan) -> u64 {
3529 plan.stages.iter().fold(0_u64, |acc, stage| match stage {
3530 ValidationStage::Command { timeout_ms, .. } => acc.saturating_add(*timeout_ms),
3531 })
3532}
3533
3534fn mutation_needed_reason_code_key(reason_code: MutationNeededFailureReasonCode) -> &'static str {
3535 match reason_code {
3536 MutationNeededFailureReasonCode::PolicyDenied => "policy_denied",
3537 MutationNeededFailureReasonCode::ValidationFailed => "validation_failed",
3538 MutationNeededFailureReasonCode::UnsafePatch => "unsafe_patch",
3539 MutationNeededFailureReasonCode::Timeout => "timeout",
3540 MutationNeededFailureReasonCode::MutationPayloadMissing => "mutation_payload_missing",
3541 MutationNeededFailureReasonCode::UnknownFailClosed => "unknown_fail_closed",
3542 }
3543}
3544
3545fn mutation_needed_status_from_reason_code(
3546 reason_code: MutationNeededFailureReasonCode,
3547) -> SupervisedDevloopStatus {
3548 if matches!(reason_code, MutationNeededFailureReasonCode::PolicyDenied) {
3549 SupervisedDevloopStatus::RejectedByPolicy
3550 } else {
3551 SupervisedDevloopStatus::FailedClosed
3552 }
3553}
3554
3555fn mutation_needed_contract_for_validation_failure(
3556 profile: &str,
3557 report: &ValidationReport,
3558) -> MutationNeededFailureContract {
3559 let lower_logs = report.logs.to_ascii_lowercase();
3560 if lower_logs.contains("timed out") {
3561 normalize_mutation_needed_failure_contract(
3562 Some(&format!(
3563 "mutation-needed validation command timed out under profile '{profile}'"
3564 )),
3565 Some(MutationNeededFailureReasonCode::Timeout),
3566 )
3567 } else {
3568 normalize_mutation_needed_failure_contract(
3569 Some(&format!(
3570 "mutation-needed validation failed under profile '{profile}'"
3571 )),
3572 Some(MutationNeededFailureReasonCode::ValidationFailed),
3573 )
3574 }
3575}
3576
3577fn mutation_needed_contract_for_error_message(message: &str) -> MutationNeededFailureContract {
3578 let reason_code = infer_mutation_needed_failure_reason_code(message);
3579 normalize_mutation_needed_failure_contract(Some(message), reason_code)
3580}
3581
3582fn mutation_needed_audit_mutation_id(request: &SupervisedDevloopRequest) -> String {
3583 stable_hash_json(&(
3584 "mutation-needed-audit",
3585 &request.task.id,
3586 &request.proposal.intent,
3587 &request.proposal.files,
3588 ))
3589 .map(|hash| format!("mutation-needed-{hash}"))
3590 .unwrap_or_else(|_| format!("mutation-needed-{}", request.task.id))
3591}
3592
3593fn classify_supervised_devloop_request(
3594 request: &SupervisedDevloopRequest,
3595) -> Option<BoundedTaskClass> {
3596 let path = request.proposal.files.first()?.trim();
3597 if request.proposal.files.len() != 1 || path.is_empty() {
3598 return None;
3599 }
3600 let normalized = path.replace('\\', "/");
3601 if normalized.starts_with("docs/") && normalized.ends_with(".md") {
3602 Some(BoundedTaskClass::DocsSingleFile)
3603 } else {
3604 None
3605 }
3606}
3607
3608fn find_declared_mutation(
3609 store: &dyn EvolutionStore,
3610 mutation_id: &MutationId,
3611) -> Result<Option<PreparedMutation>, EvolutionError> {
3612 for stored in store.scan(1)? {
3613 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
3614 if &mutation.intent.id == mutation_id {
3615 return Ok(Some(mutation));
3616 }
3617 }
3618 }
3619 Ok(None)
3620}
3621
3622fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
3623 let Ok(projection) = projection_snapshot(store) else {
3624 return Vec::new();
3625 };
3626 let capsules = projection.capsules.clone();
3627 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3628 let requested_spec_id = input
3629 .spec_id
3630 .as_deref()
3631 .map(str::trim)
3632 .filter(|value| !value.is_empty());
3633 let signal_set = input
3634 .signals
3635 .iter()
3636 .map(|signal| signal.to_ascii_lowercase())
3637 .collect::<BTreeSet<_>>();
3638 let mut candidates = projection
3639 .genes
3640 .into_iter()
3641 .filter_map(|gene| {
3642 if gene.state != AssetState::Promoted {
3643 return None;
3644 }
3645 if let Some(spec_id) = requested_spec_id {
3646 let matches_spec = spec_ids_by_gene
3647 .get(&gene.id)
3648 .map(|values| {
3649 values
3650 .iter()
3651 .any(|value| value.eq_ignore_ascii_case(spec_id))
3652 })
3653 .unwrap_or(false);
3654 if !matches_spec {
3655 return None;
3656 }
3657 }
3658 let gene_signals = gene
3659 .signals
3660 .iter()
3661 .map(|signal| signal.to_ascii_lowercase())
3662 .collect::<BTreeSet<_>>();
3663 if gene_signals == signal_set {
3664 let mut matched_capsules = capsules
3665 .iter()
3666 .filter(|capsule| {
3667 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
3668 })
3669 .cloned()
3670 .collect::<Vec<_>>();
3671 matched_capsules.sort_by(|left, right| {
3672 replay_environment_match_factor(&input.env, &right.env)
3673 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3674 .unwrap_or(std::cmp::Ordering::Equal)
3675 .then_with(|| {
3676 right
3677 .confidence
3678 .partial_cmp(&left.confidence)
3679 .unwrap_or(std::cmp::Ordering::Equal)
3680 })
3681 .then_with(|| left.id.cmp(&right.id))
3682 });
3683 if matched_capsules.is_empty() {
3684 None
3685 } else {
3686 let score = matched_capsules
3687 .first()
3688 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3689 .unwrap_or(0.0);
3690 Some(GeneCandidate {
3691 gene,
3692 score,
3693 capsules: matched_capsules,
3694 })
3695 }
3696 } else {
3697 None
3698 }
3699 })
3700 .collect::<Vec<_>>();
3701 candidates.sort_by(|left, right| {
3702 right
3703 .score
3704 .partial_cmp(&left.score)
3705 .unwrap_or(std::cmp::Ordering::Equal)
3706 .then_with(|| left.gene.id.cmp(&right.gene.id))
3707 });
3708 candidates
3709}
3710
3711fn quarantined_remote_exact_match_candidates(
3712 store: &dyn EvolutionStore,
3713 input: &SelectorInput,
3714) -> Vec<GeneCandidate> {
3715 let remote_asset_ids = store
3716 .scan(1)
3717 .ok()
3718 .map(|events| {
3719 events
3720 .into_iter()
3721 .filter_map(|stored| match stored.event {
3722 EvolutionEvent::RemoteAssetImported {
3723 source: CandidateSource::Remote,
3724 asset_ids,
3725 ..
3726 } => Some(asset_ids),
3727 _ => None,
3728 })
3729 .flatten()
3730 .collect::<BTreeSet<_>>()
3731 })
3732 .unwrap_or_default();
3733 if remote_asset_ids.is_empty() {
3734 return Vec::new();
3735 }
3736
3737 let Ok(projection) = projection_snapshot(store) else {
3738 return Vec::new();
3739 };
3740 let capsules = projection.capsules.clone();
3741 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
3742 let requested_spec_id = input
3743 .spec_id
3744 .as_deref()
3745 .map(str::trim)
3746 .filter(|value| !value.is_empty());
3747 let normalized_signals = input
3748 .signals
3749 .iter()
3750 .filter_map(|signal| normalize_signal_phrase(signal))
3751 .collect::<BTreeSet<_>>()
3752 .into_iter()
3753 .collect::<Vec<_>>();
3754 if normalized_signals.is_empty() {
3755 return Vec::new();
3756 }
3757 let mut candidates = projection
3758 .genes
3759 .into_iter()
3760 .filter_map(|gene| {
3761 if !matches!(
3762 gene.state,
3763 AssetState::Promoted | AssetState::Quarantined | AssetState::ShadowValidated
3764 ) {
3765 return None;
3766 }
3767 if let Some(spec_id) = requested_spec_id {
3768 let matches_spec = spec_ids_by_gene
3769 .get(&gene.id)
3770 .map(|values| {
3771 values
3772 .iter()
3773 .any(|value| value.eq_ignore_ascii_case(spec_id))
3774 })
3775 .unwrap_or(false);
3776 if !matches_spec {
3777 return None;
3778 }
3779 }
3780 let normalized_gene_signals = gene
3781 .signals
3782 .iter()
3783 .filter_map(|candidate| normalize_signal_phrase(candidate))
3784 .collect::<Vec<_>>();
3785 let matched_query_count = normalized_signals
3786 .iter()
3787 .filter(|signal| {
3788 normalized_gene_signals.iter().any(|candidate| {
3789 candidate.contains(signal.as_str()) || signal.contains(candidate)
3790 })
3791 })
3792 .count();
3793 if matched_query_count == 0 {
3794 return None;
3795 }
3796
3797 let mut matched_capsules = capsules
3798 .iter()
3799 .filter(|capsule| {
3800 capsule.gene_id == gene.id
3801 && matches!(
3802 capsule.state,
3803 AssetState::Quarantined | AssetState::ShadowValidated
3804 )
3805 && remote_asset_ids.contains(&capsule.id)
3806 })
3807 .cloned()
3808 .collect::<Vec<_>>();
3809 matched_capsules.sort_by(|left, right| {
3810 replay_environment_match_factor(&input.env, &right.env)
3811 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
3812 .unwrap_or(std::cmp::Ordering::Equal)
3813 .then_with(|| {
3814 right
3815 .confidence
3816 .partial_cmp(&left.confidence)
3817 .unwrap_or(std::cmp::Ordering::Equal)
3818 })
3819 .then_with(|| left.id.cmp(&right.id))
3820 });
3821 if matched_capsules.is_empty() {
3822 None
3823 } else {
3824 let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
3825 let env_score = matched_capsules
3826 .first()
3827 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
3828 .unwrap_or(0.0);
3829 Some(GeneCandidate {
3830 gene,
3831 score: overlap.max(env_score),
3832 capsules: matched_capsules,
3833 })
3834 }
3835 })
3836 .collect::<Vec<_>>();
3837 candidates.sort_by(|left, right| {
3838 right
3839 .score
3840 .partial_cmp(&left.score)
3841 .unwrap_or(std::cmp::Ordering::Equal)
3842 .then_with(|| left.gene.id.cmp(&right.gene.id))
3843 });
3844 candidates
3845}
3846
3847fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
3848 let fields = [
3849 input
3850 .rustc_version
3851 .eq_ignore_ascii_case(&candidate.rustc_version),
3852 input
3853 .cargo_lock_hash
3854 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
3855 input
3856 .target_triple
3857 .eq_ignore_ascii_case(&candidate.target_triple),
3858 input.os.eq_ignore_ascii_case(&candidate.os),
3859 ];
3860 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
3861 0.5 + ((matched_fields / 4.0) * 0.5)
3862}
3863
3864fn effective_candidate_score(
3865 candidate: &GeneCandidate,
3866 publishers_by_asset: &BTreeMap<String, String>,
3867 reputation_bias: &BTreeMap<String, f32>,
3868) -> f32 {
3869 let bias = candidate
3870 .capsules
3871 .first()
3872 .and_then(|capsule| publishers_by_asset.get(&capsule.id))
3873 .and_then(|publisher| reputation_bias.get(publisher))
3874 .copied()
3875 .unwrap_or(0.0)
3876 .clamp(0.0, 1.0);
3877 candidate.score * (1.0 + (bias * 0.1))
3878}
3879
3880fn export_promoted_assets_from_store(
3881 store: &dyn EvolutionStore,
3882 sender_id: impl Into<String>,
3883) -> Result<EvolutionEnvelope, EvoKernelError> {
3884 let (events, projection) = scan_projection(store)?;
3885 let genes = projection
3886 .genes
3887 .into_iter()
3888 .filter(|gene| gene.state == AssetState::Promoted)
3889 .collect::<Vec<_>>();
3890 let capsules = projection
3891 .capsules
3892 .into_iter()
3893 .filter(|capsule| capsule.state == AssetState::Promoted)
3894 .collect::<Vec<_>>();
3895 let assets = replay_export_assets(&events, genes, capsules);
3896 Ok(EvolutionEnvelope::publish(sender_id, assets))
3897}
3898
3899fn scan_projection(
3900 store: &dyn EvolutionStore,
3901) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
3902 store.scan_projection().map_err(store_err)
3903}
3904
3905fn projection_snapshot(store: &dyn EvolutionStore) -> Result<EvolutionProjection, EvoKernelError> {
3906 scan_projection(store).map(|(_, projection)| projection)
3907}
3908
3909fn replay_export_assets(
3910 events: &[StoredEvolutionEvent],
3911 genes: Vec<Gene>,
3912 capsules: Vec<Capsule>,
3913) -> Vec<NetworkAsset> {
3914 let mutation_ids = capsules
3915 .iter()
3916 .map(|capsule| capsule.mutation_id.clone())
3917 .collect::<BTreeSet<_>>();
3918 let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
3919 for gene in genes {
3920 assets.push(NetworkAsset::Gene { gene });
3921 }
3922 for capsule in capsules {
3923 assets.push(NetworkAsset::Capsule { capsule });
3924 }
3925 assets
3926}
3927
3928fn replay_export_events_for_mutations(
3929 events: &[StoredEvolutionEvent],
3930 mutation_ids: &BTreeSet<String>,
3931) -> Vec<NetworkAsset> {
3932 if mutation_ids.is_empty() {
3933 return Vec::new();
3934 }
3935
3936 let mut assets = Vec::new();
3937 let mut seen_mutations = BTreeSet::new();
3938 let mut seen_spec_links = BTreeSet::new();
3939 for stored in events {
3940 match &stored.event {
3941 EvolutionEvent::MutationDeclared { mutation }
3942 if mutation_ids.contains(mutation.intent.id.as_str())
3943 && seen_mutations.insert(mutation.intent.id.clone()) =>
3944 {
3945 assets.push(NetworkAsset::EvolutionEvent {
3946 event: EvolutionEvent::MutationDeclared {
3947 mutation: mutation.clone(),
3948 },
3949 });
3950 }
3951 EvolutionEvent::SpecLinked {
3952 mutation_id,
3953 spec_id,
3954 } if mutation_ids.contains(mutation_id.as_str())
3955 && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
3956 {
3957 assets.push(NetworkAsset::EvolutionEvent {
3958 event: EvolutionEvent::SpecLinked {
3959 mutation_id: mutation_id.clone(),
3960 spec_id: spec_id.clone(),
3961 },
3962 });
3963 }
3964 _ => {}
3965 }
3966 }
3967
3968 assets
3969}
3970
3971const SYNC_CURSOR_PREFIX: &str = "seq:";
3972const SYNC_RESUME_TOKEN_PREFIX: &str = "gep-rt1|";
3973
3974#[derive(Clone, Debug)]
3975struct DeltaWindow {
3976 changed_gene_ids: BTreeSet<String>,
3977 changed_capsule_ids: BTreeSet<String>,
3978 changed_mutation_ids: BTreeSet<String>,
3979}
3980
3981fn normalize_sync_value(value: Option<&str>) -> Option<String> {
3982 value
3983 .map(str::trim)
3984 .filter(|value| !value.is_empty())
3985 .map(ToOwned::to_owned)
3986}
3987
3988fn parse_sync_cursor_seq(cursor: &str) -> Option<u64> {
3989 let trimmed = cursor.trim();
3990 if trimmed.is_empty() {
3991 return None;
3992 }
3993 let raw = trimmed.strip_prefix(SYNC_CURSOR_PREFIX).unwrap_or(trimmed);
3994 raw.parse::<u64>().ok()
3995}
3996
3997fn format_sync_cursor(seq: u64) -> String {
3998 format!("{SYNC_CURSOR_PREFIX}{seq}")
3999}
4000
4001fn encode_resume_token(sender_id: &str, cursor: &str) -> String {
4002 format!("{SYNC_RESUME_TOKEN_PREFIX}{sender_id}|{cursor}")
4003}
4004
4005fn decode_resume_token(sender_id: &str, token: &str) -> Result<String, EvoKernelError> {
4006 let token = token.trim();
4007 let Some(encoded) = token.strip_prefix(SYNC_RESUME_TOKEN_PREFIX) else {
4008 return Ok(token.to_string());
4009 };
4010 let (token_sender, cursor) = encoded.split_once('|').ok_or_else(|| {
4011 EvoKernelError::Validation(
4012 "invalid resume_token format; expected gep-rt1|<sender>|<seq>".into(),
4013 )
4014 })?;
4015 if token_sender != sender_id.trim() {
4016 return Err(EvoKernelError::Validation(
4017 "resume_token sender mismatch".into(),
4018 ));
4019 }
4020 Ok(cursor.to_string())
4021}
4022
4023fn resolve_requested_cursor(
4024 sender_id: &str,
4025 since_cursor: Option<&str>,
4026 resume_token: Option<&str>,
4027) -> Result<Option<String>, EvoKernelError> {
4028 let cursor = if let Some(token) = normalize_sync_value(resume_token) {
4029 Some(decode_resume_token(sender_id, &token)?)
4030 } else {
4031 normalize_sync_value(since_cursor)
4032 };
4033
4034 let Some(cursor) = cursor else {
4035 return Ok(None);
4036 };
4037 let seq = parse_sync_cursor_seq(&cursor).ok_or_else(|| {
4038 EvoKernelError::Validation("invalid since_cursor/resume_token cursor format".into())
4039 })?;
4040 Ok(Some(format_sync_cursor(seq)))
4041}
4042
4043fn latest_store_cursor(store: &dyn EvolutionStore) -> Result<Option<String>, EvoKernelError> {
4044 let events = store.scan(1).map_err(store_err)?;
4045 Ok(events.last().map(|stored| format_sync_cursor(stored.seq)))
4046}
4047
4048fn delta_window(events: &[StoredEvolutionEvent], since_seq: u64) -> DeltaWindow {
4049 let mut changed_gene_ids = BTreeSet::new();
4050 let mut changed_capsule_ids = BTreeSet::new();
4051 let mut changed_mutation_ids = BTreeSet::new();
4052
4053 for stored in events {
4054 if stored.seq <= since_seq {
4055 continue;
4056 }
4057 match &stored.event {
4058 EvolutionEvent::MutationDeclared { mutation } => {
4059 changed_mutation_ids.insert(mutation.intent.id.clone());
4060 }
4061 EvolutionEvent::SpecLinked { mutation_id, .. } => {
4062 changed_mutation_ids.insert(mutation_id.clone());
4063 }
4064 EvolutionEvent::GeneProjected { gene } => {
4065 changed_gene_ids.insert(gene.id.clone());
4066 }
4067 EvolutionEvent::GenePromoted { gene_id }
4068 | EvolutionEvent::GeneRevoked { gene_id, .. }
4069 | EvolutionEvent::PromotionEvaluated { gene_id, .. } => {
4070 changed_gene_ids.insert(gene_id.clone());
4071 }
4072 EvolutionEvent::CapsuleCommitted { capsule } => {
4073 changed_capsule_ids.insert(capsule.id.clone());
4074 changed_gene_ids.insert(capsule.gene_id.clone());
4075 changed_mutation_ids.insert(capsule.mutation_id.clone());
4076 }
4077 EvolutionEvent::CapsuleReleased { capsule_id, .. }
4078 | EvolutionEvent::CapsuleQuarantined { capsule_id } => {
4079 changed_capsule_ids.insert(capsule_id.clone());
4080 }
4081 EvolutionEvent::RemoteAssetImported { asset_ids, .. } => {
4082 for asset_id in asset_ids {
4083 changed_gene_ids.insert(asset_id.clone());
4084 changed_capsule_ids.insert(asset_id.clone());
4085 }
4086 }
4087 _ => {}
4088 }
4089 }
4090
4091 DeltaWindow {
4092 changed_gene_ids,
4093 changed_capsule_ids,
4094 changed_mutation_ids,
4095 }
4096}
4097
4098fn import_remote_envelope_into_store(
4099 store: &dyn EvolutionStore,
4100 envelope: &EvolutionEnvelope,
4101 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
4102 requested_cursor: Option<String>,
4103) -> Result<ImportOutcome, EvoKernelError> {
4104 if !envelope.verify_content_hash() {
4105 record_manifest_validation(store, envelope, false, "invalid evolution envelope hash")?;
4106 return Err(EvoKernelError::Validation(
4107 "invalid evolution envelope hash".into(),
4108 ));
4109 }
4110 if let Err(reason) = envelope.verify_manifest() {
4111 record_manifest_validation(
4112 store,
4113 envelope,
4114 false,
4115 format!("manifest validation failed: {reason}"),
4116 )?;
4117 return Err(EvoKernelError::Validation(format!(
4118 "invalid evolution envelope manifest: {reason}"
4119 )));
4120 }
4121 record_manifest_validation(store, envelope, true, "manifest validated")?;
4122
4123 let sender_id = normalized_sender_id(&envelope.sender_id);
4124 let (events, projection) = scan_projection(store)?;
4125 let mut known_gene_ids = projection
4126 .genes
4127 .into_iter()
4128 .map(|gene| gene.id)
4129 .collect::<BTreeSet<_>>();
4130 let mut known_capsule_ids = projection
4131 .capsules
4132 .into_iter()
4133 .map(|capsule| capsule.id)
4134 .collect::<BTreeSet<_>>();
4135 let mut known_mutation_ids = BTreeSet::new();
4136 let mut known_spec_links = BTreeSet::new();
4137 for stored in &events {
4138 match &stored.event {
4139 EvolutionEvent::MutationDeclared { mutation } => {
4140 known_mutation_ids.insert(mutation.intent.id.clone());
4141 }
4142 EvolutionEvent::SpecLinked {
4143 mutation_id,
4144 spec_id,
4145 } => {
4146 known_spec_links.insert((mutation_id.clone(), spec_id.clone()));
4147 }
4148 _ => {}
4149 }
4150 }
4151 let mut imported_asset_ids = Vec::new();
4152 let mut applied_count = 0usize;
4153 let mut skipped_count = 0usize;
4154 for asset in &envelope.assets {
4155 match asset {
4156 NetworkAsset::Gene { gene } => {
4157 if !known_gene_ids.insert(gene.id.clone()) {
4158 skipped_count += 1;
4159 continue;
4160 }
4161 imported_asset_ids.push(gene.id.clone());
4162 applied_count += 1;
4163 let mut quarantined_gene = gene.clone();
4164 quarantined_gene.state = AssetState::Quarantined;
4165 store
4166 .append_event(EvolutionEvent::RemoteAssetImported {
4167 source: CandidateSource::Remote,
4168 asset_ids: vec![gene.id.clone()],
4169 sender_id: sender_id.clone(),
4170 })
4171 .map_err(store_err)?;
4172 store
4173 .append_event(EvolutionEvent::GeneProjected {
4174 gene: quarantined_gene.clone(),
4175 })
4176 .map_err(store_err)?;
4177 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4178 store
4179 .append_event(EvolutionEvent::PromotionEvaluated {
4180 gene_id: quarantined_gene.id,
4181 state: AssetState::Quarantined,
4182 reason: "remote asset requires local validation before promotion".into(),
4183 reason_code: TransitionReasonCode::DowngradeRemoteRequiresLocalValidation,
4184 evidence: Some(TransitionEvidence {
4185 replay_attempts: None,
4186 replay_successes: None,
4187 replay_success_rate: None,
4188 environment_match_factor: None,
4189 decayed_confidence: None,
4190 confidence_decay_ratio: None,
4191 summary: Some("phase=remote_import; source=remote; action=quarantine_before_shadow_validation".into()),
4192 }),
4193 })
4194 .map_err(store_err)?;
4195 }
4196 NetworkAsset::Capsule { capsule } => {
4197 if !known_capsule_ids.insert(capsule.id.clone()) {
4198 skipped_count += 1;
4199 continue;
4200 }
4201 imported_asset_ids.push(capsule.id.clone());
4202 applied_count += 1;
4203 store
4204 .append_event(EvolutionEvent::RemoteAssetImported {
4205 source: CandidateSource::Remote,
4206 asset_ids: vec![capsule.id.clone()],
4207 sender_id: sender_id.clone(),
4208 })
4209 .map_err(store_err)?;
4210 let mut quarantined = capsule.clone();
4211 quarantined.state = AssetState::Quarantined;
4212 store
4213 .append_event(EvolutionEvent::CapsuleCommitted {
4214 capsule: quarantined.clone(),
4215 })
4216 .map_err(store_err)?;
4217 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
4218 store
4219 .append_event(EvolutionEvent::CapsuleQuarantined {
4220 capsule_id: quarantined.id,
4221 })
4222 .map_err(store_err)?;
4223 }
4224 NetworkAsset::EvolutionEvent { event } => {
4225 let should_append = match event {
4226 EvolutionEvent::MutationDeclared { mutation } => {
4227 known_mutation_ids.insert(mutation.intent.id.clone())
4228 }
4229 EvolutionEvent::SpecLinked {
4230 mutation_id,
4231 spec_id,
4232 } => known_spec_links.insert((mutation_id.clone(), spec_id.clone())),
4233 _ if should_import_remote_event(event) => true,
4234 _ => false,
4235 };
4236 if should_append {
4237 store.append_event(event.clone()).map_err(store_err)?;
4238 applied_count += 1;
4239 } else {
4240 skipped_count += 1;
4241 }
4242 }
4243 }
4244 }
4245 let next_cursor = latest_store_cursor(store)?;
4246 let resume_token = next_cursor.as_ref().and_then(|cursor| {
4247 normalized_sender_id(&envelope.sender_id).map(|sender| encode_resume_token(&sender, cursor))
4248 });
4249
4250 Ok(ImportOutcome {
4251 imported_asset_ids,
4252 accepted: true,
4253 next_cursor: next_cursor.clone(),
4254 resume_token,
4255 sync_audit: SyncAudit {
4256 batch_id: next_id("sync-import"),
4257 requested_cursor,
4258 scanned_count: envelope.assets.len(),
4259 applied_count,
4260 skipped_count,
4261 failed_count: 0,
4262 failure_reasons: Vec::new(),
4263 },
4264 })
4265}
4266
4267const EVOMAP_SNAPSHOT_ROOT: &str = "assets/gep/evomap_snapshot";
4268const EVOMAP_SNAPSHOT_GENES_FILE: &str = "genes.json";
4269const EVOMAP_SNAPSHOT_CAPSULES_FILE: &str = "capsules.json";
4270const EVOMAP_BUILTIN_RUN_ID: &str = "builtin-evomap-seed";
4271
4272#[derive(Debug, Deserialize)]
4273struct EvoMapGeneDocument {
4274 #[serde(default)]
4275 genes: Vec<EvoMapGeneAsset>,
4276}
4277
4278#[derive(Debug, Deserialize)]
4279struct EvoMapGeneAsset {
4280 id: String,
4281 #[serde(default)]
4282 category: Option<String>,
4283 #[serde(default)]
4284 signals_match: Vec<Value>,
4285 #[serde(default)]
4286 strategy: Vec<String>,
4287 #[serde(default)]
4288 validation: Vec<String>,
4289 #[serde(default)]
4290 constraints: Option<EvoMapConstraintAsset>,
4291 #[serde(default)]
4292 model_name: Option<String>,
4293 #[serde(default)]
4294 schema_version: Option<String>,
4295 #[serde(default)]
4296 compatibility: Option<Value>,
4297}
4298
4299#[derive(Clone, Debug, Deserialize, Default)]
4300struct EvoMapConstraintAsset {
4301 #[serde(default)]
4302 max_files: Option<usize>,
4303 #[serde(default)]
4304 forbidden_paths: Vec<String>,
4305}
4306
4307#[derive(Debug, Deserialize)]
4308struct EvoMapCapsuleDocument {
4309 #[serde(default)]
4310 capsules: Vec<EvoMapCapsuleAsset>,
4311}
4312
4313#[derive(Debug, Deserialize)]
4314struct EvoMapCapsuleAsset {
4315 id: String,
4316 gene: String,
4317 #[serde(default)]
4318 trigger: Vec<String>,
4319 #[serde(default)]
4320 summary: String,
4321 #[serde(default)]
4322 diff: Option<String>,
4323 #[serde(default)]
4324 confidence: Option<f32>,
4325 #[serde(default)]
4326 outcome: Option<EvoMapOutcomeAsset>,
4327 #[serde(default)]
4328 blast_radius: Option<EvoMapBlastRadiusAsset>,
4329 #[serde(default)]
4330 content: Option<EvoMapCapsuleContentAsset>,
4331 #[serde(default)]
4332 env_fingerprint: Option<Value>,
4333 #[serde(default)]
4334 model_name: Option<String>,
4335 #[serde(default)]
4336 schema_version: Option<String>,
4337 #[serde(default)]
4338 compatibility: Option<Value>,
4339}
4340
4341#[derive(Clone, Debug, Deserialize, Default)]
4342struct EvoMapOutcomeAsset {
4343 #[serde(default)]
4344 status: Option<String>,
4345 #[serde(default)]
4346 score: Option<f32>,
4347}
4348
4349#[derive(Clone, Debug, Deserialize, Default)]
4350struct EvoMapBlastRadiusAsset {
4351 #[serde(default)]
4352 lines: usize,
4353}
4354
4355#[derive(Clone, Debug, Deserialize, Default)]
4356struct EvoMapCapsuleContentAsset {
4357 #[serde(default)]
4358 changed_files: Vec<String>,
4359}
4360
4361#[derive(Debug)]
4362struct BuiltinCapsuleSeed {
4363 capsule: Capsule,
4364 mutation: PreparedMutation,
4365}
4366
4367#[derive(Debug)]
4368struct BuiltinAssetBundle {
4369 genes: Vec<Gene>,
4370 capsules: Vec<BuiltinCapsuleSeed>,
4371}
4372
4373fn built_in_experience_genes() -> Vec<Gene> {
4374 vec![
4375 Gene {
4376 id: "builtin-experience-docs-rewrite-v1".into(),
4377 signals: vec!["docs.rewrite".into(), "docs".into(), "rewrite".into()],
4378 strategy: vec![
4379 "asset_origin=builtin".into(),
4380 "task_class=docs.rewrite".into(),
4381 "task_label=Docs rewrite".into(),
4382 "template_id=builtin-docs-rewrite-v1".into(),
4383 "summary=baseline docs rewrite experience".into(),
4384 ],
4385 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4386 state: AssetState::Promoted,
4387 },
4388 Gene {
4389 id: "builtin-experience-ci-fix-v1".into(),
4390 signals: vec![
4391 "ci.fix".into(),
4392 "ci".into(),
4393 "test".into(),
4394 "failure".into(),
4395 ],
4396 strategy: vec![
4397 "asset_origin=builtin".into(),
4398 "task_class=ci.fix".into(),
4399 "task_label=CI fix".into(),
4400 "template_id=builtin-ci-fix-v1".into(),
4401 "summary=baseline ci stabilization experience".into(),
4402 ],
4403 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4404 state: AssetState::Promoted,
4405 },
4406 Gene {
4407 id: "builtin-experience-task-decomposition-v1".into(),
4408 signals: vec![
4409 "task.decomposition".into(),
4410 "task".into(),
4411 "decomposition".into(),
4412 "planning".into(),
4413 ],
4414 strategy: vec![
4415 "asset_origin=builtin".into(),
4416 "task_class=task.decomposition".into(),
4417 "task_label=Task decomposition".into(),
4418 "template_id=builtin-task-decomposition-v1".into(),
4419 "summary=baseline task decomposition and routing experience".into(),
4420 ],
4421 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4422 state: AssetState::Promoted,
4423 },
4424 Gene {
4425 id: "builtin-experience-project-workflow-v1".into(),
4426 signals: vec![
4427 "project.workflow".into(),
4428 "project".into(),
4429 "workflow".into(),
4430 "milestone".into(),
4431 ],
4432 strategy: vec![
4433 "asset_origin=builtin".into(),
4434 "task_class=project.workflow".into(),
4435 "task_label=Project workflow".into(),
4436 "template_id=builtin-project-workflow-v1".into(),
4437 "summary=baseline project proposal and merge workflow experience".into(),
4438 ],
4439 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4440 state: AssetState::Promoted,
4441 },
4442 Gene {
4443 id: "builtin-experience-service-bid-v1".into(),
4444 signals: vec![
4445 "service.bid".into(),
4446 "service".into(),
4447 "bid".into(),
4448 "economics".into(),
4449 ],
4450 strategy: vec![
4451 "asset_origin=builtin".into(),
4452 "task_class=service.bid".into(),
4453 "task_label=Service bid".into(),
4454 "template_id=builtin-service-bid-v1".into(),
4455 "summary=baseline service bidding and settlement experience".into(),
4456 ],
4457 validation: vec!["builtin-template".into(), "origin=builtin".into()],
4458 state: AssetState::Promoted,
4459 },
4460 ]
4461}
4462
4463fn evomap_snapshot_path(file_name: &str) -> PathBuf {
4464 PathBuf::from(env!("CARGO_MANIFEST_DIR"))
4465 .join(EVOMAP_SNAPSHOT_ROOT)
4466 .join(file_name)
4467}
4468
4469fn read_evomap_snapshot(file_name: &str) -> Result<Option<String>, EvoKernelError> {
4470 let path = evomap_snapshot_path(file_name);
4471 if !path.exists() {
4472 return Ok(None);
4473 }
4474 fs::read_to_string(&path).map(Some).map_err(|err| {
4475 EvoKernelError::Validation(format!(
4476 "failed to read EvoMap snapshot {}: {err}",
4477 path.display()
4478 ))
4479 })
4480}
4481
4482fn compatibility_state_from_value(value: Option<&Value>) -> Option<String> {
4483 let value = value?;
4484 if let Some(state) = value.as_str() {
4485 let normalized = state.trim().to_ascii_lowercase();
4486 if normalized.is_empty() {
4487 return None;
4488 }
4489 return Some(normalized);
4490 }
4491 value
4492 .get("state")
4493 .and_then(Value::as_str)
4494 .map(str::trim)
4495 .filter(|state| !state.is_empty())
4496 .map(|state| state.to_ascii_lowercase())
4497}
4498
4499fn map_evomap_state(value: Option<&Value>) -> AssetState {
4500 match compatibility_state_from_value(value).as_deref() {
4501 Some("promoted") => AssetState::Promoted,
4502 Some("candidate") => AssetState::Candidate,
4503 Some("quarantined") => AssetState::Quarantined,
4504 Some("shadow_validated") => AssetState::ShadowValidated,
4505 Some("revoked") => AssetState::Revoked,
4506 Some("rejected") => AssetState::Archived,
4507 Some("archived") => AssetState::Archived,
4508 _ => AssetState::Candidate,
4509 }
4510}
4511
4512fn value_as_signal_string(value: &Value) -> Option<String> {
4513 match value {
4514 Value::String(raw) => {
4515 let normalized = raw.trim();
4516 if normalized.is_empty() {
4517 None
4518 } else {
4519 Some(normalized.to_string())
4520 }
4521 }
4522 Value::Object(_) => {
4523 let serialized = serde_json::to_string(value).ok()?;
4524 let normalized = serialized.trim();
4525 if normalized.is_empty() {
4526 None
4527 } else {
4528 Some(normalized.to_string())
4529 }
4530 }
4531 Value::Null => None,
4532 other => {
4533 let rendered = other.to_string();
4534 let normalized = rendered.trim();
4535 if normalized.is_empty() {
4536 None
4537 } else {
4538 Some(normalized.to_string())
4539 }
4540 }
4541 }
4542}
4543
4544fn parse_diff_changed_files(payload: &str) -> Vec<String> {
4545 let mut changed_files = BTreeSet::new();
4546 for line in payload.lines() {
4547 let line = line.trim();
4548 if let Some(path) = line.strip_prefix("+++ b/") {
4549 let path = path.trim();
4550 if !path.is_empty() && path != "/dev/null" {
4551 changed_files.insert(path.to_string());
4552 }
4553 continue;
4554 }
4555 if let Some(path) = line.strip_prefix("diff --git a/") {
4556 if let Some((_, right)) = path.split_once(" b/") {
4557 let right = right.trim();
4558 if !right.is_empty() {
4559 changed_files.insert(right.to_string());
4560 }
4561 }
4562 }
4563 }
4564 changed_files.into_iter().collect()
4565}
4566
4567fn strip_diff_code_fence(payload: &str) -> String {
4568 let trimmed = payload.trim();
4569 if !trimmed.starts_with("```") {
4570 return trimmed.to_string();
4571 }
4572 let mut lines = trimmed.lines().collect::<Vec<_>>();
4573 if lines.is_empty() {
4574 return String::new();
4575 }
4576 lines.remove(0);
4577 if lines
4578 .last()
4579 .map(|line| line.trim() == "```")
4580 .unwrap_or(false)
4581 {
4582 lines.pop();
4583 }
4584 lines.join("\n").trim().to_string()
4585}
4586
4587fn synthetic_diff_for_capsule(capsule: &EvoMapCapsuleAsset) -> String {
4588 let file_path = format!("docs/evomap_builtin_capsules/{}.md", capsule.id);
4589 let mut content = Vec::new();
4590 content.push(format!("# EvoMap Builtin Capsule {}", capsule.id));
4591 if capsule.summary.trim().is_empty() {
4592 content.push("summary: missing".to_string());
4593 } else {
4594 content.push(format!("summary: {}", capsule.summary.trim()));
4595 }
4596 if !capsule.trigger.is_empty() {
4597 content.push(format!("trigger: {}", capsule.trigger.join(", ")));
4598 }
4599 content.push(format!("gene: {}", capsule.gene));
4600 let added = content
4601 .into_iter()
4602 .map(|line| format!("+{}", line.replace('\r', "")))
4603 .collect::<Vec<_>>()
4604 .join("\n");
4605 format!(
4606 "diff --git a/{file_path} b/{file_path}\nnew file mode 100644\nindex 0000000..1111111\n--- /dev/null\n+++ b/{file_path}\n@@ -0,0 +1,{line_count} @@\n{added}\n",
4607 line_count = added.lines().count()
4608 )
4609}
4610
4611fn normalized_diff_payload(capsule: &EvoMapCapsuleAsset) -> String {
4612 if let Some(raw) = capsule.diff.as_deref() {
4613 let normalized = strip_diff_code_fence(raw);
4614 if !normalized.trim().is_empty() {
4615 return normalized;
4616 }
4617 }
4618 synthetic_diff_for_capsule(capsule)
4619}
4620
4621fn env_field(value: Option<&Value>, keys: &[&str]) -> Option<String> {
4622 let object = value?.as_object()?;
4623 keys.iter().find_map(|key| {
4624 object
4625 .get(*key)
4626 .and_then(Value::as_str)
4627 .map(str::trim)
4628 .filter(|value| !value.is_empty())
4629 .map(|value| value.to_string())
4630 })
4631}
4632
4633fn map_evomap_env_fingerprint(value: Option<&Value>) -> EnvFingerprint {
4634 let os =
4635 env_field(value, &["os", "platform", "os_release"]).unwrap_or_else(|| "unknown".into());
4636 let target_triple = env_field(value, &["target_triple"]).unwrap_or_else(|| {
4637 let arch = env_field(value, &["arch"]).unwrap_or_else(|| "unknown".into());
4638 format!("{arch}-unknown-{os}")
4639 });
4640 EnvFingerprint {
4641 rustc_version: env_field(value, &["runtime", "rustc_version", "node_version"])
4642 .unwrap_or_else(|| "unknown".into()),
4643 cargo_lock_hash: env_field(value, &["cargo_lock_hash"]).unwrap_or_else(|| "unknown".into()),
4644 target_triple,
4645 os,
4646 }
4647}
4648
4649fn load_evomap_builtin_assets() -> Result<Option<BuiltinAssetBundle>, EvoKernelError> {
4650 let genes_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_GENES_FILE)?;
4651 let capsules_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_CAPSULES_FILE)?;
4652 let (Some(genes_raw), Some(capsules_raw)) = (genes_raw, capsules_raw) else {
4653 return Ok(None);
4654 };
4655
4656 let genes_doc: EvoMapGeneDocument = serde_json::from_str(&genes_raw).map_err(|err| {
4657 EvoKernelError::Validation(format!("failed to parse EvoMap genes snapshot: {err}"))
4658 })?;
4659 let capsules_doc: EvoMapCapsuleDocument =
4660 serde_json::from_str(&capsules_raw).map_err(|err| {
4661 EvoKernelError::Validation(format!("failed to parse EvoMap capsules snapshot: {err}"))
4662 })?;
4663
4664 let mut genes = Vec::new();
4665 let mut known_gene_ids = BTreeSet::new();
4666 for source in genes_doc.genes {
4667 let EvoMapGeneAsset {
4668 id,
4669 category,
4670 signals_match,
4671 strategy,
4672 validation,
4673 constraints,
4674 model_name,
4675 schema_version,
4676 compatibility,
4677 } = source;
4678 let gene_id = id.trim();
4679 if gene_id.is_empty() {
4680 return Err(EvoKernelError::Validation(
4681 "EvoMap snapshot gene id must not be empty".into(),
4682 ));
4683 }
4684 if !known_gene_ids.insert(gene_id.to_string()) {
4685 continue;
4686 }
4687
4688 let mut seen_signals = BTreeSet::new();
4689 let mut signals = Vec::new();
4690 for signal in signals_match {
4691 let Some(normalized) = value_as_signal_string(&signal) else {
4692 continue;
4693 };
4694 if seen_signals.insert(normalized.clone()) {
4695 signals.push(normalized);
4696 }
4697 }
4698 if signals.is_empty() {
4699 signals.push(format!("gene:{}", gene_id.to_ascii_lowercase()));
4700 }
4701
4702 let mut strategy = strategy
4703 .into_iter()
4704 .map(|item| item.trim().to_string())
4705 .filter(|item| !item.is_empty())
4706 .collect::<Vec<_>>();
4707 if strategy.is_empty() {
4708 strategy.push("evomap strategy missing in snapshot".into());
4709 }
4710 let constraint = constraints.unwrap_or_default();
4711 let compat_state = compatibility_state_from_value(compatibility.as_ref())
4712 .unwrap_or_else(|| "candidate".to_string());
4713 ensure_strategy_metadata(&mut strategy, "asset_origin", "builtin_evomap");
4714 ensure_strategy_metadata(
4715 &mut strategy,
4716 "evomap_category",
4717 category.as_deref().unwrap_or("unknown"),
4718 );
4719 ensure_strategy_metadata(
4720 &mut strategy,
4721 "evomap_constraints_max_files",
4722 &constraint.max_files.unwrap_or_default().to_string(),
4723 );
4724 ensure_strategy_metadata(
4725 &mut strategy,
4726 "evomap_constraints_forbidden_paths",
4727 &constraint.forbidden_paths.join("|"),
4728 );
4729 ensure_strategy_metadata(
4730 &mut strategy,
4731 "evomap_model_name",
4732 model_name.as_deref().unwrap_or("unknown"),
4733 );
4734 ensure_strategy_metadata(
4735 &mut strategy,
4736 "evomap_schema_version",
4737 schema_version.as_deref().unwrap_or("1.5.0"),
4738 );
4739 ensure_strategy_metadata(&mut strategy, "evomap_compatibility_state", &compat_state);
4740
4741 let mut validation = validation
4742 .into_iter()
4743 .map(|item| item.trim().to_string())
4744 .filter(|item| !item.is_empty())
4745 .collect::<Vec<_>>();
4746 if validation.is_empty() {
4747 validation.push("evomap-builtin-seed".into());
4748 }
4749
4750 genes.push(Gene {
4751 id: gene_id.to_string(),
4752 signals,
4753 strategy,
4754 validation,
4755 state: map_evomap_state(compatibility.as_ref()),
4756 });
4757 }
4758
4759 let mut capsules = Vec::new();
4760 let known_gene_ids = genes
4761 .iter()
4762 .map(|gene| gene.id.clone())
4763 .collect::<BTreeSet<_>>();
4764 for source in capsules_doc.capsules {
4765 let EvoMapCapsuleAsset {
4766 id,
4767 gene,
4768 trigger,
4769 summary,
4770 diff,
4771 confidence,
4772 outcome,
4773 blast_radius,
4774 content,
4775 env_fingerprint,
4776 model_name: _model_name,
4777 schema_version: _schema_version,
4778 compatibility,
4779 } = source;
4780 let source_for_diff = EvoMapCapsuleAsset {
4781 id: id.clone(),
4782 gene: gene.clone(),
4783 trigger: trigger.clone(),
4784 summary: summary.clone(),
4785 diff,
4786 confidence,
4787 outcome: outcome.clone(),
4788 blast_radius: blast_radius.clone(),
4789 content: content.clone(),
4790 env_fingerprint: env_fingerprint.clone(),
4791 model_name: None,
4792 schema_version: None,
4793 compatibility: compatibility.clone(),
4794 };
4795 if !known_gene_ids.contains(gene.as_str()) {
4796 return Err(EvoKernelError::Validation(format!(
4797 "EvoMap capsule {} references unknown gene {}",
4798 id, gene
4799 )));
4800 }
4801 let normalized_diff = normalized_diff_payload(&source_for_diff);
4802 if normalized_diff.trim().is_empty() {
4803 return Err(EvoKernelError::Validation(format!(
4804 "EvoMap capsule {} has empty normalized diff payload",
4805 id
4806 )));
4807 }
4808 let mut changed_files = content
4809 .as_ref()
4810 .map(|content| {
4811 content
4812 .changed_files
4813 .iter()
4814 .map(|item| item.trim().to_string())
4815 .filter(|item| !item.is_empty())
4816 .collect::<Vec<_>>()
4817 })
4818 .unwrap_or_default();
4819 if changed_files.is_empty() {
4820 changed_files = parse_diff_changed_files(&normalized_diff);
4821 }
4822 if changed_files.is_empty() {
4823 changed_files.push(format!("docs/evomap_builtin_capsules/{}.md", id));
4824 }
4825
4826 let confidence = confidence
4827 .or_else(|| outcome.as_ref().and_then(|outcome| outcome.score))
4828 .unwrap_or(0.6)
4829 .clamp(0.0, 1.0);
4830 let status_success = outcome
4831 .as_ref()
4832 .and_then(|outcome| outcome.status.as_deref())
4833 .map(|status| status.eq_ignore_ascii_case("success"))
4834 .unwrap_or(true);
4835 let blast_radius = blast_radius.unwrap_or_default();
4836 let mutation_id = format!("builtin-evomap-mutation-{}", id);
4837 let intent = MutationIntent {
4838 id: mutation_id.clone(),
4839 intent: if summary.trim().is_empty() {
4840 format!("apply EvoMap capsule {}", id)
4841 } else {
4842 summary.trim().to_string()
4843 },
4844 target: MutationTarget::Paths {
4845 allow: changed_files.clone(),
4846 },
4847 expected_effect: format!("seed replay candidate from EvoMap capsule {}", id),
4848 risk: RiskLevel::Low,
4849 signals: if trigger.is_empty() {
4850 vec![format!("capsule:{}", id.to_ascii_lowercase())]
4851 } else {
4852 trigger
4853 .iter()
4854 .map(|signal| signal.trim().to_ascii_lowercase())
4855 .filter(|signal| !signal.is_empty())
4856 .collect::<Vec<_>>()
4857 },
4858 spec_id: None,
4859 };
4860 let mutation = PreparedMutation {
4861 intent,
4862 artifact: oris_evolution::MutationArtifact {
4863 encoding: ArtifactEncoding::UnifiedDiff,
4864 payload: normalized_diff.clone(),
4865 base_revision: None,
4866 content_hash: compute_artifact_hash(&normalized_diff),
4867 },
4868 };
4869 let capsule = Capsule {
4870 id: id.clone(),
4871 gene_id: gene.clone(),
4872 mutation_id,
4873 run_id: EVOMAP_BUILTIN_RUN_ID.to_string(),
4874 diff_hash: compute_artifact_hash(&normalized_diff),
4875 confidence,
4876 env: map_evomap_env_fingerprint(env_fingerprint.as_ref()),
4877 outcome: Outcome {
4878 success: status_success,
4879 validation_profile: "evomap-builtin-seed".into(),
4880 validation_duration_ms: 0,
4881 changed_files,
4882 validator_hash: "builtin-evomap".into(),
4883 lines_changed: blast_radius.lines,
4884 replay_verified: false,
4885 },
4886 state: map_evomap_state(compatibility.as_ref()),
4887 };
4888 capsules.push(BuiltinCapsuleSeed { capsule, mutation });
4889 }
4890
4891 Ok(Some(BuiltinAssetBundle { genes, capsules }))
4892}
4893
4894fn ensure_builtin_experience_assets_in_store(
4895 store: &dyn EvolutionStore,
4896 sender_id: String,
4897) -> Result<ImportOutcome, EvoKernelError> {
4898 let (events, projection) = scan_projection(store)?;
4899 let mut known_gene_ids = projection
4900 .genes
4901 .into_iter()
4902 .map(|gene| gene.id)
4903 .collect::<BTreeSet<_>>();
4904 let mut known_capsule_ids = projection
4905 .capsules
4906 .into_iter()
4907 .map(|capsule| capsule.id)
4908 .collect::<BTreeSet<_>>();
4909 let mut known_mutation_ids = BTreeSet::new();
4910 for stored in &events {
4911 if let EvolutionEvent::MutationDeclared { mutation } = &stored.event {
4912 known_mutation_ids.insert(mutation.intent.id.clone());
4913 }
4914 }
4915 let normalized_sender = normalized_sender_id(&sender_id);
4916 let mut imported_asset_ids = Vec::new();
4917 let mut bundle = BuiltinAssetBundle {
4920 genes: built_in_experience_genes(),
4921 capsules: Vec::new(),
4922 };
4923 if let Some(snapshot_bundle) = load_evomap_builtin_assets()? {
4924 bundle.genes.extend(snapshot_bundle.genes);
4925 bundle.capsules.extend(snapshot_bundle.capsules);
4926 }
4927 let scanned_count = bundle.genes.len() + bundle.capsules.len();
4928
4929 for gene in bundle.genes {
4930 if !known_gene_ids.insert(gene.id.clone()) {
4931 continue;
4932 }
4933
4934 store
4935 .append_event(EvolutionEvent::RemoteAssetImported {
4936 source: CandidateSource::Local,
4937 asset_ids: vec![gene.id.clone()],
4938 sender_id: normalized_sender.clone(),
4939 })
4940 .map_err(store_err)?;
4941 store
4942 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
4943 .map_err(store_err)?;
4944 match gene.state {
4945 AssetState::Revoked | AssetState::Archived => {}
4946 AssetState::Quarantined | AssetState::ShadowValidated => {
4947 store
4948 .append_event(EvolutionEvent::PromotionEvaluated {
4949 gene_id: gene.id.clone(),
4950 state: AssetState::Quarantined,
4951 reason:
4952 "built-in EvoMap asset requires additional validation before promotion"
4953 .into(),
4954 reason_code: TransitionReasonCode::DowngradeBuiltinRequiresValidation,
4955 evidence: None,
4956 })
4957 .map_err(store_err)?;
4958 }
4959 AssetState::Promoted | AssetState::Candidate => {
4960 store
4961 .append_event(EvolutionEvent::PromotionEvaluated {
4962 gene_id: gene.id.clone(),
4963 state: AssetState::Promoted,
4964 reason: "built-in experience asset promoted for cold-start compatibility"
4965 .into(),
4966 reason_code: TransitionReasonCode::PromotionBuiltinColdStartCompatibility,
4967 evidence: None,
4968 })
4969 .map_err(store_err)?;
4970 store
4971 .append_event(EvolutionEvent::GenePromoted {
4972 gene_id: gene.id.clone(),
4973 })
4974 .map_err(store_err)?;
4975 }
4976 }
4977 imported_asset_ids.push(gene.id.clone());
4978 }
4979
4980 for seed in bundle.capsules {
4981 if !known_gene_ids.contains(seed.capsule.gene_id.as_str()) {
4982 return Err(EvoKernelError::Validation(format!(
4983 "built-in capsule {} references unknown gene {}",
4984 seed.capsule.id, seed.capsule.gene_id
4985 )));
4986 }
4987 if known_mutation_ids.insert(seed.mutation.intent.id.clone()) {
4988 store
4989 .append_event(EvolutionEvent::MutationDeclared {
4990 mutation: seed.mutation.clone(),
4991 })
4992 .map_err(store_err)?;
4993 }
4994 if !known_capsule_ids.insert(seed.capsule.id.clone()) {
4995 continue;
4996 }
4997 store
4998 .append_event(EvolutionEvent::RemoteAssetImported {
4999 source: CandidateSource::Local,
5000 asset_ids: vec![seed.capsule.id.clone()],
5001 sender_id: normalized_sender.clone(),
5002 })
5003 .map_err(store_err)?;
5004 store
5005 .append_event(EvolutionEvent::CapsuleCommitted {
5006 capsule: seed.capsule.clone(),
5007 })
5008 .map_err(store_err)?;
5009 match seed.capsule.state {
5010 AssetState::Revoked | AssetState::Archived => {}
5011 AssetState::Quarantined | AssetState::ShadowValidated => {
5012 store
5013 .append_event(EvolutionEvent::CapsuleQuarantined {
5014 capsule_id: seed.capsule.id.clone(),
5015 })
5016 .map_err(store_err)?;
5017 }
5018 AssetState::Promoted | AssetState::Candidate => {
5019 store
5020 .append_event(EvolutionEvent::CapsuleReleased {
5021 capsule_id: seed.capsule.id.clone(),
5022 state: AssetState::Promoted,
5023 })
5024 .map_err(store_err)?;
5025 }
5026 }
5027 imported_asset_ids.push(seed.capsule.id.clone());
5028 }
5029
5030 let next_cursor = latest_store_cursor(store)?;
5031 let resume_token = next_cursor.as_ref().and_then(|cursor| {
5032 normalized_sender
5033 .as_deref()
5034 .map(|sender| encode_resume_token(sender, cursor))
5035 });
5036 let applied_count = imported_asset_ids.len();
5037 let skipped_count = scanned_count.saturating_sub(applied_count);
5038
5039 Ok(ImportOutcome {
5040 imported_asset_ids,
5041 accepted: true,
5042 next_cursor: next_cursor.clone(),
5043 resume_token,
5044 sync_audit: SyncAudit {
5045 batch_id: next_id("sync-import"),
5046 requested_cursor: None,
5047 scanned_count,
5048 applied_count,
5049 skipped_count,
5050 failed_count: 0,
5051 failure_reasons: Vec::new(),
5052 },
5053 })
5054}
5055
5056fn strategy_metadata_value(strategy: &[String], key: &str) -> Option<String> {
5057 strategy.iter().find_map(|entry| {
5058 let (entry_key, entry_value) = entry.split_once('=')?;
5059 if entry_key.trim().eq_ignore_ascii_case(key) {
5060 let normalized = entry_value.trim();
5061 if normalized.is_empty() {
5062 None
5063 } else {
5064 Some(normalized.to_string())
5065 }
5066 } else {
5067 None
5068 }
5069 })
5070}
5071
5072fn ensure_strategy_metadata(strategy: &mut Vec<String>, key: &str, value: &str) {
5073 let normalized = value.trim();
5074 if normalized.is_empty() || strategy_metadata_value(strategy, key).is_some() {
5075 return;
5076 }
5077 strategy.push(format!("{key}={normalized}"));
5078}
5079
5080fn enforce_reported_experience_retention(
5081 store: &dyn EvolutionStore,
5082 task_class: &str,
5083 keep_latest: usize,
5084) -> Result<(), EvoKernelError> {
5085 let task_class = task_class.trim();
5086 if task_class.is_empty() || keep_latest == 0 {
5087 return Ok(());
5088 }
5089
5090 let (_, projection) = scan_projection(store)?;
5091 let mut candidates = projection
5092 .genes
5093 .iter()
5094 .filter(|gene| gene.state == AssetState::Promoted)
5095 .filter_map(|gene| {
5096 let origin = strategy_metadata_value(&gene.strategy, "asset_origin")?;
5097 if !origin.eq_ignore_ascii_case("reported_experience") {
5098 return None;
5099 }
5100 let gene_task_class = strategy_metadata_value(&gene.strategy, "task_class")?;
5101 if !gene_task_class.eq_ignore_ascii_case(task_class) {
5102 return None;
5103 }
5104 let updated_at = projection
5105 .last_updated_at
5106 .get(&gene.id)
5107 .cloned()
5108 .unwrap_or_default();
5109 Some((gene.id.clone(), updated_at))
5110 })
5111 .collect::<Vec<_>>();
5112 if candidates.len() <= keep_latest {
5113 return Ok(());
5114 }
5115
5116 candidates.sort_by(|left, right| right.1.cmp(&left.1).then_with(|| right.0.cmp(&left.0)));
5117 let stale_gene_ids = candidates
5118 .into_iter()
5119 .skip(keep_latest)
5120 .map(|(gene_id, _)| gene_id)
5121 .collect::<BTreeSet<_>>();
5122 if stale_gene_ids.is_empty() {
5123 return Ok(());
5124 }
5125
5126 let reason =
5127 format!("reported experience retention limit exceeded for task_class={task_class}");
5128 for gene_id in &stale_gene_ids {
5129 store
5130 .append_event(EvolutionEvent::GeneRevoked {
5131 gene_id: gene_id.clone(),
5132 reason: reason.clone(),
5133 })
5134 .map_err(store_err)?;
5135 }
5136
5137 let stale_capsule_ids = projection
5138 .capsules
5139 .iter()
5140 .filter(|capsule| stale_gene_ids.contains(&capsule.gene_id))
5141 .map(|capsule| capsule.id.clone())
5142 .collect::<BTreeSet<_>>();
5143 for capsule_id in stale_capsule_ids {
5144 store
5145 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
5146 .map_err(store_err)?;
5147 }
5148 Ok(())
5149}
5150
5151fn record_reported_experience_in_store(
5152 store: &dyn EvolutionStore,
5153 sender_id: String,
5154 gene_id: String,
5155 signals: Vec<String>,
5156 strategy: Vec<String>,
5157 validation: Vec<String>,
5158) -> Result<ImportOutcome, EvoKernelError> {
5159 let gene_id = gene_id.trim();
5160 if gene_id.is_empty() {
5161 return Err(EvoKernelError::Validation(
5162 "reported experience gene_id must not be empty".into(),
5163 ));
5164 }
5165
5166 let mut unique_signals = BTreeSet::new();
5167 let mut normalized_signals = Vec::new();
5168 for signal in signals {
5169 let normalized = signal.trim().to_ascii_lowercase();
5170 if normalized.is_empty() {
5171 continue;
5172 }
5173 if unique_signals.insert(normalized.clone()) {
5174 normalized_signals.push(normalized);
5175 }
5176 }
5177 if normalized_signals.is_empty() {
5178 return Err(EvoKernelError::Validation(
5179 "reported experience signals must not be empty".into(),
5180 ));
5181 }
5182
5183 let mut unique_strategy = BTreeSet::new();
5184 let mut normalized_strategy = Vec::new();
5185 for entry in strategy {
5186 let normalized = entry.trim().to_string();
5187 if normalized.is_empty() {
5188 continue;
5189 }
5190 if unique_strategy.insert(normalized.clone()) {
5191 normalized_strategy.push(normalized);
5192 }
5193 }
5194 if normalized_strategy.is_empty() {
5195 normalized_strategy.push("reported local replay experience".into());
5196 }
5197 let task_class_id = strategy_metadata_value(&normalized_strategy, "task_class")
5198 .or_else(|| normalized_signals.first().cloned())
5199 .unwrap_or_else(|| "reported-experience".into());
5200 let task_label = strategy_metadata_value(&normalized_strategy, "task_label")
5201 .or_else(|| normalized_signals.first().cloned())
5202 .unwrap_or_else(|| task_class_id.clone());
5203 ensure_strategy_metadata(
5204 &mut normalized_strategy,
5205 "asset_origin",
5206 "reported_experience",
5207 );
5208 ensure_strategy_metadata(&mut normalized_strategy, "task_class", &task_class_id);
5209 ensure_strategy_metadata(&mut normalized_strategy, "task_label", &task_label);
5210
5211 let mut unique_validation = BTreeSet::new();
5212 let mut normalized_validation = Vec::new();
5213 for entry in validation {
5214 let normalized = entry.trim().to_string();
5215 if normalized.is_empty() {
5216 continue;
5217 }
5218 if unique_validation.insert(normalized.clone()) {
5219 normalized_validation.push(normalized);
5220 }
5221 }
5222 if normalized_validation.is_empty() {
5223 normalized_validation.push("a2a.tasks.report".into());
5224 }
5225
5226 let gene = Gene {
5227 id: gene_id.to_string(),
5228 signals: normalized_signals,
5229 strategy: normalized_strategy,
5230 validation: normalized_validation,
5231 state: AssetState::Promoted,
5232 };
5233 let normalized_sender = normalized_sender_id(&sender_id);
5234
5235 store
5236 .append_event(EvolutionEvent::RemoteAssetImported {
5237 source: CandidateSource::Local,
5238 asset_ids: vec![gene.id.clone()],
5239 sender_id: normalized_sender.clone(),
5240 })
5241 .map_err(store_err)?;
5242 store
5243 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
5244 .map_err(store_err)?;
5245 store
5246 .append_event(EvolutionEvent::PromotionEvaluated {
5247 gene_id: gene.id.clone(),
5248 state: AssetState::Promoted,
5249 reason: "trusted local report promoted reusable experience".into(),
5250 reason_code: TransitionReasonCode::PromotionTrustedLocalReport,
5251 evidence: None,
5252 })
5253 .map_err(store_err)?;
5254 store
5255 .append_event(EvolutionEvent::GenePromoted {
5256 gene_id: gene.id.clone(),
5257 })
5258 .map_err(store_err)?;
5259 enforce_reported_experience_retention(
5260 store,
5261 &task_class_id,
5262 REPORTED_EXPERIENCE_RETENTION_LIMIT,
5263 )?;
5264
5265 let imported_asset_ids = vec![gene.id];
5266 let next_cursor = latest_store_cursor(store)?;
5267 let resume_token = next_cursor.as_ref().and_then(|cursor| {
5268 normalized_sender
5269 .as_deref()
5270 .map(|sender| encode_resume_token(sender, cursor))
5271 });
5272 Ok(ImportOutcome {
5273 imported_asset_ids,
5274 accepted: true,
5275 next_cursor,
5276 resume_token,
5277 sync_audit: SyncAudit {
5278 batch_id: next_id("sync-import"),
5279 requested_cursor: None,
5280 scanned_count: 1,
5281 applied_count: 1,
5282 skipped_count: 0,
5283 failed_count: 0,
5284 failure_reasons: Vec::new(),
5285 },
5286 })
5287}
5288
5289fn normalized_sender_id(sender_id: &str) -> Option<String> {
5290 let trimmed = sender_id.trim();
5291 if trimmed.is_empty() {
5292 None
5293 } else {
5294 Some(trimmed.to_string())
5295 }
5296}
5297
5298fn record_manifest_validation(
5299 store: &dyn EvolutionStore,
5300 envelope: &EvolutionEnvelope,
5301 accepted: bool,
5302 reason: impl Into<String>,
5303) -> Result<(), EvoKernelError> {
5304 let manifest = envelope.manifest.as_ref();
5305 let sender_id = manifest
5306 .and_then(|value| normalized_sender_id(&value.sender_id))
5307 .or_else(|| normalized_sender_id(&envelope.sender_id));
5308 let publisher = manifest.and_then(|value| normalized_sender_id(&value.publisher));
5309 let asset_ids = manifest
5310 .map(|value| value.asset_ids.clone())
5311 .unwrap_or_else(|| EvolutionEnvelope::manifest_asset_ids(&envelope.assets));
5312
5313 store
5314 .append_event(EvolutionEvent::ManifestValidated {
5315 accepted,
5316 reason: reason.into(),
5317 sender_id,
5318 publisher,
5319 asset_ids,
5320 })
5321 .map_err(store_err)?;
5322 Ok(())
5323}
5324
5325fn record_remote_publisher_for_asset(
5326 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
5327 sender_id: &str,
5328 asset: &NetworkAsset,
5329) {
5330 let Some(remote_publishers) = remote_publishers else {
5331 return;
5332 };
5333 let sender_id = sender_id.trim();
5334 if sender_id.is_empty() {
5335 return;
5336 }
5337 let Ok(mut publishers) = remote_publishers.lock() else {
5338 return;
5339 };
5340 match asset {
5341 NetworkAsset::Gene { gene } => {
5342 publishers.insert(gene.id.clone(), sender_id.to_string());
5343 }
5344 NetworkAsset::Capsule { capsule } => {
5345 publishers.insert(capsule.id.clone(), sender_id.to_string());
5346 }
5347 NetworkAsset::EvolutionEvent { .. } => {}
5348 }
5349}
5350
5351fn remote_publishers_by_asset_from_store(store: &dyn EvolutionStore) -> BTreeMap<String, String> {
5352 let Ok(events) = store.scan(1) else {
5353 return BTreeMap::new();
5354 };
5355 remote_publishers_by_asset_from_events(&events)
5356}
5357
5358fn remote_publishers_by_asset_from_events(
5359 events: &[StoredEvolutionEvent],
5360) -> BTreeMap<String, String> {
5361 let mut imported_asset_publishers = BTreeMap::<String, String>::new();
5362 let mut known_gene_ids = BTreeSet::<String>::new();
5363 let mut known_capsule_ids = BTreeSet::<String>::new();
5364 let mut publishers_by_asset = BTreeMap::<String, String>::new();
5365
5366 for stored in events {
5367 match &stored.event {
5368 EvolutionEvent::RemoteAssetImported {
5369 source: CandidateSource::Remote,
5370 asset_ids,
5371 sender_id,
5372 } => {
5373 let Some(sender_id) = sender_id.as_deref().and_then(normalized_sender_id) else {
5374 continue;
5375 };
5376 for asset_id in asset_ids {
5377 imported_asset_publishers.insert(asset_id.clone(), sender_id.clone());
5378 if known_gene_ids.contains(asset_id) || known_capsule_ids.contains(asset_id) {
5379 publishers_by_asset.insert(asset_id.clone(), sender_id.clone());
5380 }
5381 }
5382 }
5383 EvolutionEvent::GeneProjected { gene } => {
5384 known_gene_ids.insert(gene.id.clone());
5385 if let Some(sender_id) = imported_asset_publishers.get(&gene.id) {
5386 publishers_by_asset.insert(gene.id.clone(), sender_id.clone());
5387 }
5388 }
5389 EvolutionEvent::CapsuleCommitted { capsule } => {
5390 known_capsule_ids.insert(capsule.id.clone());
5391 if let Some(sender_id) = imported_asset_publishers.get(&capsule.id) {
5392 publishers_by_asset.insert(capsule.id.clone(), sender_id.clone());
5393 }
5394 }
5395 _ => {}
5396 }
5397 }
5398
5399 publishers_by_asset
5400}
5401
5402fn should_import_remote_event(event: &EvolutionEvent) -> bool {
5403 matches!(
5404 event,
5405 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
5406 )
5407}
5408
5409fn fetch_assets_from_store(
5410 store: &dyn EvolutionStore,
5411 responder_id: impl Into<String>,
5412 query: &FetchQuery,
5413) -> Result<FetchResponse, EvoKernelError> {
5414 let (events, projection) = scan_projection(store)?;
5415 let requested_cursor = resolve_requested_cursor(
5416 &query.sender_id,
5417 query.since_cursor.as_deref(),
5418 query.resume_token.as_deref(),
5419 )?;
5420 let since_seq = requested_cursor
5421 .as_deref()
5422 .and_then(parse_sync_cursor_seq)
5423 .unwrap_or(0);
5424 let normalized_signals: Vec<String> = query
5425 .signals
5426 .iter()
5427 .map(|signal| signal.trim().to_ascii_lowercase())
5428 .filter(|signal| !signal.is_empty())
5429 .collect();
5430 let matches_any_signal = |candidate: &str| {
5431 if normalized_signals.is_empty() {
5432 return true;
5433 }
5434 let candidate = candidate.to_ascii_lowercase();
5435 normalized_signals
5436 .iter()
5437 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
5438 };
5439
5440 let matched_genes: Vec<Gene> = projection
5441 .genes
5442 .into_iter()
5443 .filter(|gene| gene.state == AssetState::Promoted)
5444 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
5445 .collect();
5446 let matched_gene_ids: BTreeSet<String> =
5447 matched_genes.iter().map(|gene| gene.id.clone()).collect();
5448 let matched_capsules: Vec<Capsule> = projection
5449 .capsules
5450 .into_iter()
5451 .filter(|capsule| capsule.state == AssetState::Promoted)
5452 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
5453 .collect();
5454 let all_assets = replay_export_assets(&events, matched_genes.clone(), matched_capsules.clone());
5455 let (selected_genes, selected_capsules) = if requested_cursor.is_some() {
5456 let delta = delta_window(&events, since_seq);
5457 let selected_capsules = matched_capsules
5458 .into_iter()
5459 .filter(|capsule| {
5460 delta.changed_capsule_ids.contains(&capsule.id)
5461 || delta.changed_mutation_ids.contains(&capsule.mutation_id)
5462 })
5463 .collect::<Vec<_>>();
5464 let selected_gene_ids = selected_capsules
5465 .iter()
5466 .map(|capsule| capsule.gene_id.clone())
5467 .collect::<BTreeSet<_>>();
5468 let selected_genes = matched_genes
5469 .into_iter()
5470 .filter(|gene| {
5471 delta.changed_gene_ids.contains(&gene.id) || selected_gene_ids.contains(&gene.id)
5472 })
5473 .collect::<Vec<_>>();
5474 (selected_genes, selected_capsules)
5475 } else {
5476 (matched_genes, matched_capsules)
5477 };
5478 let assets = replay_export_assets(&events, selected_genes, selected_capsules);
5479 let next_cursor = events.last().map(|stored| format_sync_cursor(stored.seq));
5480 let resume_token = next_cursor
5481 .as_ref()
5482 .map(|cursor| encode_resume_token(&query.sender_id, cursor));
5483 let applied_count = assets.len();
5484 let skipped_count = all_assets.len().saturating_sub(applied_count);
5485
5486 Ok(FetchResponse {
5487 sender_id: responder_id.into(),
5488 assets,
5489 next_cursor: next_cursor.clone(),
5490 resume_token,
5491 sync_audit: SyncAudit {
5492 batch_id: next_id("sync-fetch"),
5493 requested_cursor,
5494 scanned_count: all_assets.len(),
5495 applied_count,
5496 skipped_count,
5497 failed_count: 0,
5498 failure_reasons: Vec::new(),
5499 },
5500 })
5501}
5502
5503fn revoke_assets_in_store(
5504 store: &dyn EvolutionStore,
5505 notice: &RevokeNotice,
5506) -> Result<RevokeNotice, EvoKernelError> {
5507 let projection = projection_snapshot(store)?;
5508 let requested: BTreeSet<String> = notice
5509 .asset_ids
5510 .iter()
5511 .map(|asset_id| asset_id.trim().to_string())
5512 .filter(|asset_id| !asset_id.is_empty())
5513 .collect();
5514 let mut revoked_gene_ids = BTreeSet::new();
5515 let mut quarantined_capsule_ids = BTreeSet::new();
5516
5517 for gene in &projection.genes {
5518 if requested.contains(&gene.id) {
5519 revoked_gene_ids.insert(gene.id.clone());
5520 }
5521 }
5522 for capsule in &projection.capsules {
5523 if requested.contains(&capsule.id) {
5524 quarantined_capsule_ids.insert(capsule.id.clone());
5525 revoked_gene_ids.insert(capsule.gene_id.clone());
5526 }
5527 }
5528 for capsule in &projection.capsules {
5529 if revoked_gene_ids.contains(&capsule.gene_id) {
5530 quarantined_capsule_ids.insert(capsule.id.clone());
5531 }
5532 }
5533
5534 for gene_id in &revoked_gene_ids {
5535 store
5536 .append_event(EvolutionEvent::GeneRevoked {
5537 gene_id: gene_id.clone(),
5538 reason: notice.reason.clone(),
5539 })
5540 .map_err(store_err)?;
5541 }
5542 for capsule_id in &quarantined_capsule_ids {
5543 store
5544 .append_event(EvolutionEvent::CapsuleQuarantined {
5545 capsule_id: capsule_id.clone(),
5546 })
5547 .map_err(store_err)?;
5548 }
5549
5550 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
5551 affected_ids.extend(quarantined_capsule_ids);
5552 affected_ids.sort();
5553 affected_ids.dedup();
5554
5555 Ok(RevokeNotice {
5556 sender_id: notice.sender_id.clone(),
5557 asset_ids: affected_ids,
5558 reason: notice.reason.clone(),
5559 })
5560}
5561
5562fn evolution_metrics_snapshot(
5563 store: &dyn EvolutionStore,
5564) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
5565 let (events, projection) = scan_projection(store)?;
5566 let mut replay_task_class_totals = BTreeMap::<(String, String), (u64, u64, u64, u64)>::new();
5567 let mut replay_source_totals = BTreeMap::<String, (u64, u64, u64, u64)>::new();
5568 let replay_evidences = events
5569 .iter()
5570 .filter_map(|stored| match &stored.event {
5571 EvolutionEvent::ReplayEconomicsRecorded { evidence, .. } => Some(evidence.clone()),
5572 _ => None,
5573 })
5574 .collect::<Vec<_>>();
5575 let (
5576 replay_success_total,
5577 replay_failures_total,
5578 replay_reasoning_avoided_total,
5579 reasoning_avoided_tokens_total,
5580 replay_fallback_cost_total,
5581 ) = if replay_evidences.is_empty() {
5582 let gene_task_classes = projection
5583 .genes
5584 .iter()
5585 .map(|gene| (gene.id.clone(), replay_task_descriptor(&gene.signals)))
5586 .collect::<BTreeMap<_, _>>();
5587 let replay_success_total = events
5588 .iter()
5589 .filter(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. }))
5590 .count() as u64;
5591 for stored in &events {
5592 if let EvolutionEvent::CapsuleReused { gene_id, .. } = &stored.event {
5593 if let Some((task_class_id, task_label)) = gene_task_classes.get(gene_id) {
5594 let entry = replay_task_class_totals
5595 .entry((task_class_id.clone(), task_label.clone()))
5596 .or_insert((0, 0, 0, 0));
5597 entry.0 += 1;
5598 entry.2 += REPLAY_REASONING_TOKEN_FLOOR;
5599 }
5600 }
5601 }
5602 let replay_failures_total = events
5603 .iter()
5604 .filter(|stored| is_replay_validation_failure(&stored.event))
5605 .count() as u64;
5606 (
5607 replay_success_total,
5608 replay_failures_total,
5609 replay_success_total,
5610 replay_success_total * REPLAY_REASONING_TOKEN_FLOOR,
5611 replay_failures_total * REPLAY_REASONING_TOKEN_FLOOR,
5612 )
5613 } else {
5614 let mut replay_success_total = 0_u64;
5615 let mut replay_failures_total = 0_u64;
5616 let mut reasoning_avoided_tokens_total = 0_u64;
5617 let mut replay_fallback_cost_total = 0_u64;
5618 for evidence in &replay_evidences {
5619 if evidence.success {
5620 replay_success_total += 1;
5621 } else {
5622 replay_failures_total += 1;
5623 }
5624 reasoning_avoided_tokens_total += evidence.reasoning_avoided_tokens;
5625 replay_fallback_cost_total += evidence.replay_fallback_cost;
5626 let entry = replay_task_class_totals
5627 .entry((evidence.task_class_id.clone(), evidence.task_label.clone()))
5628 .or_insert((0, 0, 0, 0));
5629 if evidence.success {
5630 entry.0 += 1;
5631 } else {
5632 entry.1 += 1;
5633 }
5634 entry.2 += evidence.reasoning_avoided_tokens;
5635 entry.3 += evidence.replay_fallback_cost;
5636 if let Some(source_sender_id) = evidence.source_sender_id.as_deref() {
5637 let source_entry = replay_source_totals
5638 .entry(source_sender_id.to_string())
5639 .or_insert((0, 0, 0, 0));
5640 if evidence.success {
5641 source_entry.0 += 1;
5642 } else {
5643 source_entry.1 += 1;
5644 }
5645 source_entry.2 += evidence.reasoning_avoided_tokens;
5646 source_entry.3 += evidence.replay_fallback_cost;
5647 }
5648 }
5649 (
5650 replay_success_total,
5651 replay_failures_total,
5652 replay_success_total,
5653 reasoning_avoided_tokens_total,
5654 replay_fallback_cost_total,
5655 )
5656 };
5657 let replay_task_classes = replay_task_class_totals
5658 .into_iter()
5659 .map(
5660 |(
5661 (task_class_id, task_label),
5662 (
5663 replay_success_total,
5664 replay_failure_total,
5665 reasoning_avoided_tokens_total,
5666 replay_fallback_cost_total,
5667 ),
5668 )| ReplayTaskClassMetrics {
5669 task_class_id,
5670 task_label,
5671 replay_success_total,
5672 replay_failure_total,
5673 reasoning_steps_avoided_total: replay_success_total,
5674 reasoning_avoided_tokens_total,
5675 replay_fallback_cost_total,
5676 replay_roi: compute_replay_roi(
5677 reasoning_avoided_tokens_total,
5678 replay_fallback_cost_total,
5679 ),
5680 },
5681 )
5682 .collect::<Vec<_>>();
5683 let replay_sources = replay_source_totals
5684 .into_iter()
5685 .map(
5686 |(
5687 source_sender_id,
5688 (
5689 replay_success_total,
5690 replay_failure_total,
5691 reasoning_avoided_tokens_total,
5692 replay_fallback_cost_total,
5693 ),
5694 )| ReplaySourceRoiMetrics {
5695 source_sender_id,
5696 replay_success_total,
5697 replay_failure_total,
5698 reasoning_avoided_tokens_total,
5699 replay_fallback_cost_total,
5700 replay_roi: compute_replay_roi(
5701 reasoning_avoided_tokens_total,
5702 replay_fallback_cost_total,
5703 ),
5704 },
5705 )
5706 .collect::<Vec<_>>();
5707 let replay_attempts_total = replay_success_total + replay_failures_total;
5708 let confidence_revalidations_total = events
5709 .iter()
5710 .filter(|stored| is_confidence_revalidation_event(&stored.event))
5711 .count() as u64;
5712 let mutation_declared_total = events
5713 .iter()
5714 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
5715 .count() as u64;
5716 let promoted_mutations_total = events
5717 .iter()
5718 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
5719 .count() as u64;
5720 let gene_revocations_total = events
5721 .iter()
5722 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
5723 .count() as u64;
5724 let cutoff = Utc::now() - Duration::hours(1);
5725 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
5726 matches!(event, EvolutionEvent::MutationDeclared { .. })
5727 });
5728 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
5729 matches!(event, EvolutionEvent::GeneRevoked { .. })
5730 });
5731 let promoted_genes = projection
5732 .genes
5733 .iter()
5734 .filter(|gene| gene.state == AssetState::Promoted)
5735 .count() as u64;
5736 let promoted_capsules = projection
5737 .capsules
5738 .iter()
5739 .filter(|capsule| capsule.state == AssetState::Promoted)
5740 .count() as u64;
5741
5742 Ok(EvolutionMetricsSnapshot {
5743 replay_attempts_total,
5744 replay_success_total,
5745 replay_success_rate: safe_ratio(replay_success_total, replay_attempts_total),
5746 confidence_revalidations_total,
5747 replay_reasoning_avoided_total,
5748 reasoning_avoided_tokens_total,
5749 replay_fallback_cost_total,
5750 replay_roi: compute_replay_roi(reasoning_avoided_tokens_total, replay_fallback_cost_total),
5751 replay_task_classes,
5752 replay_sources,
5753 mutation_declared_total,
5754 promoted_mutations_total,
5755 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
5756 gene_revocations_total,
5757 mutation_velocity_last_hour,
5758 revoke_frequency_last_hour,
5759 promoted_genes,
5760 promoted_capsules,
5761 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
5762 })
5763}
5764
5765fn replay_roi_release_gate_summary(
5766 store: &dyn EvolutionStore,
5767 window_seconds: u64,
5768) -> Result<ReplayRoiWindowSummary, EvoKernelError> {
5769 let events = store.scan(1).map_err(store_err)?;
5770 let now = Utc::now();
5771 let cutoff = if window_seconds == 0 {
5772 None
5773 } else {
5774 let seconds = i64::try_from(window_seconds).unwrap_or(i64::MAX);
5775 Some(now - Duration::seconds(seconds))
5776 };
5777
5778 let mut replay_attempts_total = 0_u64;
5779 let mut replay_success_total = 0_u64;
5780 let mut replay_failure_total = 0_u64;
5781 let mut reasoning_avoided_tokens_total = 0_u64;
5782 let mut replay_fallback_cost_total = 0_u64;
5783 let mut task_totals = BTreeMap::<(String, String), (u64, u64, u64, u64)>::new();
5784 let mut source_totals = BTreeMap::<String, (u64, u64, u64, u64)>::new();
5785
5786 for stored in events {
5787 let EvolutionEvent::ReplayEconomicsRecorded { evidence, .. } = stored.event else {
5788 continue;
5789 };
5790 if let Some(cutoff) = cutoff {
5791 let Some(timestamp) = parse_event_timestamp(&stored.timestamp) else {
5792 continue;
5793 };
5794 if timestamp < cutoff {
5795 continue;
5796 }
5797 }
5798 replay_attempts_total += 1;
5799 if evidence.success {
5800 replay_success_total += 1;
5801 } else {
5802 replay_failure_total += 1;
5803 }
5804 reasoning_avoided_tokens_total += evidence.reasoning_avoided_tokens;
5805 replay_fallback_cost_total += evidence.replay_fallback_cost;
5806 let task_entry = task_totals
5807 .entry((evidence.task_class_id.clone(), evidence.task_label.clone()))
5808 .or_insert((0, 0, 0, 0));
5809 if evidence.success {
5810 task_entry.0 += 1;
5811 } else {
5812 task_entry.1 += 1;
5813 }
5814 task_entry.2 += evidence.reasoning_avoided_tokens;
5815 task_entry.3 += evidence.replay_fallback_cost;
5816 if let Some(source_sender_id) = evidence.source_sender_id.as_deref() {
5817 let source_entry = source_totals
5818 .entry(source_sender_id.to_string())
5819 .or_insert((0, 0, 0, 0));
5820 if evidence.success {
5821 source_entry.0 += 1;
5822 } else {
5823 source_entry.1 += 1;
5824 }
5825 source_entry.2 += evidence.reasoning_avoided_tokens;
5826 source_entry.3 += evidence.replay_fallback_cost;
5827 }
5828 }
5829
5830 let replay_task_classes = task_totals
5831 .into_iter()
5832 .map(
5833 |(
5834 (task_class_id, task_label),
5835 (
5836 replay_success_total,
5837 replay_failure_total,
5838 reasoning_avoided_tokens_total,
5839 replay_fallback_cost_total,
5840 ),
5841 )| ReplayTaskClassMetrics {
5842 task_class_id,
5843 task_label,
5844 replay_success_total,
5845 replay_failure_total,
5846 reasoning_steps_avoided_total: replay_success_total,
5847 reasoning_avoided_tokens_total,
5848 replay_fallback_cost_total,
5849 replay_roi: compute_replay_roi(
5850 reasoning_avoided_tokens_total,
5851 replay_fallback_cost_total,
5852 ),
5853 },
5854 )
5855 .collect::<Vec<_>>();
5856 let replay_sources = source_totals
5857 .into_iter()
5858 .map(
5859 |(
5860 source_sender_id,
5861 (
5862 replay_success_total,
5863 replay_failure_total,
5864 reasoning_avoided_tokens_total,
5865 replay_fallback_cost_total,
5866 ),
5867 )| ReplaySourceRoiMetrics {
5868 source_sender_id,
5869 replay_success_total,
5870 replay_failure_total,
5871 reasoning_avoided_tokens_total,
5872 replay_fallback_cost_total,
5873 replay_roi: compute_replay_roi(
5874 reasoning_avoided_tokens_total,
5875 replay_fallback_cost_total,
5876 ),
5877 },
5878 )
5879 .collect::<Vec<_>>();
5880
5881 Ok(ReplayRoiWindowSummary {
5882 generated_at: now.to_rfc3339(),
5883 window_seconds,
5884 replay_attempts_total,
5885 replay_success_total,
5886 replay_failure_total,
5887 reasoning_avoided_tokens_total,
5888 replay_fallback_cost_total,
5889 replay_roi: compute_replay_roi(reasoning_avoided_tokens_total, replay_fallback_cost_total),
5890 replay_task_classes,
5891 replay_sources,
5892 })
5893}
5894
5895fn replay_roi_release_gate_contract(
5896 summary: &ReplayRoiWindowSummary,
5897 thresholds: ReplayRoiReleaseGateThresholds,
5898) -> ReplayRoiReleaseGateContract {
5899 let input = replay_roi_release_gate_input_contract(summary, thresholds);
5900 let output = evaluate_replay_roi_release_gate_contract_input(&input);
5901 ReplayRoiReleaseGateContract { input, output }
5902}
5903
5904fn replay_roi_release_gate_input_contract(
5905 summary: &ReplayRoiWindowSummary,
5906 thresholds: ReplayRoiReleaseGateThresholds,
5907) -> ReplayRoiReleaseGateInputContract {
5908 let replay_safety_signal = replay_roi_release_gate_safety_signal(summary);
5909 let replay_safety = replay_safety_signal.fail_closed_default
5910 && replay_safety_signal.rollback_ready
5911 && replay_safety_signal.audit_trail_complete
5912 && replay_safety_signal.has_replay_activity;
5913 ReplayRoiReleaseGateInputContract {
5914 generated_at: summary.generated_at.clone(),
5915 window_seconds: summary.window_seconds,
5916 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
5917 .iter()
5918 .map(|dimension| (*dimension).to_string())
5919 .collect(),
5920 replay_attempts_total: summary.replay_attempts_total,
5921 replay_success_total: summary.replay_success_total,
5922 replay_failure_total: summary.replay_failure_total,
5923 replay_hit_rate: safe_ratio(summary.replay_success_total, summary.replay_attempts_total),
5924 false_replay_rate: safe_ratio(summary.replay_failure_total, summary.replay_attempts_total),
5925 reasoning_avoided_tokens: summary.reasoning_avoided_tokens_total,
5926 replay_fallback_cost_total: summary.replay_fallback_cost_total,
5927 replay_roi: summary.replay_roi,
5928 replay_safety,
5929 replay_safety_signal,
5930 thresholds,
5931 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
5932 }
5933}
5934
5935fn replay_roi_release_gate_safety_signal(
5936 summary: &ReplayRoiWindowSummary,
5937) -> ReplayRoiReleaseGateSafetySignal {
5938 ReplayRoiReleaseGateSafetySignal {
5939 fail_closed_default: true,
5940 rollback_ready: summary.replay_failure_total == 0 || summary.replay_fallback_cost_total > 0,
5941 audit_trail_complete: summary.replay_attempts_total
5942 == summary.replay_success_total + summary.replay_failure_total,
5943 has_replay_activity: summary.replay_attempts_total > 0,
5944 }
5945}
5946
5947pub fn evaluate_replay_roi_release_gate_contract_input(
5948 input: &ReplayRoiReleaseGateInputContract,
5949) -> ReplayRoiReleaseGateOutputContract {
5950 let mut failed_checks = Vec::new();
5951 let mut evidence_refs = Vec::new();
5952 let mut indeterminate = false;
5953
5954 replay_release_gate_push_unique(&mut evidence_refs, "replay_roi_release_gate_summary");
5955 replay_release_gate_push_unique(
5956 &mut evidence_refs,
5957 format!("window_seconds:{}", input.window_seconds),
5958 );
5959 if input.generated_at.trim().is_empty() {
5960 replay_release_gate_record_failed_check(
5961 &mut failed_checks,
5962 &mut evidence_refs,
5963 "missing_generated_at",
5964 &["field:generated_at"],
5965 );
5966 indeterminate = true;
5967 } else {
5968 replay_release_gate_push_unique(
5969 &mut evidence_refs,
5970 format!("generated_at:{}", input.generated_at),
5971 );
5972 }
5973
5974 let expected_attempts_total = input.replay_success_total + input.replay_failure_total;
5975 if input.replay_attempts_total != expected_attempts_total {
5976 replay_release_gate_record_failed_check(
5977 &mut failed_checks,
5978 &mut evidence_refs,
5979 "invalid_attempt_accounting",
5980 &[
5981 "metric:replay_attempts_total",
5982 "metric:replay_success_total",
5983 "metric:replay_failure_total",
5984 ],
5985 );
5986 indeterminate = true;
5987 }
5988
5989 if input.replay_attempts_total == 0 {
5990 replay_release_gate_record_failed_check(
5991 &mut failed_checks,
5992 &mut evidence_refs,
5993 "missing_replay_attempts",
5994 &["metric:replay_attempts_total"],
5995 );
5996 indeterminate = true;
5997 }
5998
5999 if !replay_release_gate_rate_valid(input.replay_hit_rate) {
6000 replay_release_gate_record_failed_check(
6001 &mut failed_checks,
6002 &mut evidence_refs,
6003 "invalid_replay_hit_rate",
6004 &["metric:replay_hit_rate"],
6005 );
6006 indeterminate = true;
6007 }
6008 if !replay_release_gate_rate_valid(input.false_replay_rate) {
6009 replay_release_gate_record_failed_check(
6010 &mut failed_checks,
6011 &mut evidence_refs,
6012 "invalid_false_replay_rate",
6013 &["metric:false_replay_rate"],
6014 );
6015 indeterminate = true;
6016 }
6017
6018 if !input.replay_roi.is_finite() {
6019 replay_release_gate_record_failed_check(
6020 &mut failed_checks,
6021 &mut evidence_refs,
6022 "invalid_replay_roi",
6023 &["metric:replay_roi"],
6024 );
6025 indeterminate = true;
6026 }
6027
6028 let expected_hit_rate = safe_ratio(input.replay_success_total, input.replay_attempts_total);
6029 let expected_false_rate = safe_ratio(input.replay_failure_total, input.replay_attempts_total);
6030 if input.replay_attempts_total > 0
6031 && !replay_release_gate_float_eq(input.replay_hit_rate, expected_hit_rate)
6032 {
6033 replay_release_gate_record_failed_check(
6034 &mut failed_checks,
6035 &mut evidence_refs,
6036 "invalid_replay_hit_rate_consistency",
6037 &["metric:replay_hit_rate", "metric:replay_success_total"],
6038 );
6039 indeterminate = true;
6040 }
6041 if input.replay_attempts_total > 0
6042 && !replay_release_gate_float_eq(input.false_replay_rate, expected_false_rate)
6043 {
6044 replay_release_gate_record_failed_check(
6045 &mut failed_checks,
6046 &mut evidence_refs,
6047 "invalid_false_replay_rate_consistency",
6048 &["metric:false_replay_rate", "metric:replay_failure_total"],
6049 );
6050 indeterminate = true;
6051 }
6052
6053 if !(0.0..=1.0).contains(&input.thresholds.min_replay_hit_rate) {
6054 replay_release_gate_record_failed_check(
6055 &mut failed_checks,
6056 &mut evidence_refs,
6057 "invalid_threshold_min_replay_hit_rate",
6058 &["threshold:min_replay_hit_rate"],
6059 );
6060 indeterminate = true;
6061 }
6062 if !(0.0..=1.0).contains(&input.thresholds.max_false_replay_rate) {
6063 replay_release_gate_record_failed_check(
6064 &mut failed_checks,
6065 &mut evidence_refs,
6066 "invalid_threshold_max_false_replay_rate",
6067 &["threshold:max_false_replay_rate"],
6068 );
6069 indeterminate = true;
6070 }
6071 if !input.thresholds.min_replay_roi.is_finite() {
6072 replay_release_gate_record_failed_check(
6073 &mut failed_checks,
6074 &mut evidence_refs,
6075 "invalid_threshold_min_replay_roi",
6076 &["threshold:min_replay_roi"],
6077 );
6078 indeterminate = true;
6079 }
6080
6081 if input.replay_attempts_total < input.thresholds.min_replay_attempts {
6082 replay_release_gate_record_failed_check(
6083 &mut failed_checks,
6084 &mut evidence_refs,
6085 "min_replay_attempts_below_threshold",
6086 &[
6087 "threshold:min_replay_attempts",
6088 "metric:replay_attempts_total",
6089 ],
6090 );
6091 }
6092 if input.replay_attempts_total > 0
6093 && input.replay_hit_rate < input.thresholds.min_replay_hit_rate
6094 {
6095 replay_release_gate_record_failed_check(
6096 &mut failed_checks,
6097 &mut evidence_refs,
6098 "replay_hit_rate_below_threshold",
6099 &["threshold:min_replay_hit_rate", "metric:replay_hit_rate"],
6100 );
6101 }
6102 if input.replay_attempts_total > 0
6103 && input.false_replay_rate > input.thresholds.max_false_replay_rate
6104 {
6105 replay_release_gate_record_failed_check(
6106 &mut failed_checks,
6107 &mut evidence_refs,
6108 "false_replay_rate_above_threshold",
6109 &[
6110 "threshold:max_false_replay_rate",
6111 "metric:false_replay_rate",
6112 ],
6113 );
6114 }
6115 if input.reasoning_avoided_tokens < input.thresholds.min_reasoning_avoided_tokens {
6116 replay_release_gate_record_failed_check(
6117 &mut failed_checks,
6118 &mut evidence_refs,
6119 "reasoning_avoided_tokens_below_threshold",
6120 &[
6121 "threshold:min_reasoning_avoided_tokens",
6122 "metric:reasoning_avoided_tokens",
6123 ],
6124 );
6125 }
6126 if input.replay_roi < input.thresholds.min_replay_roi {
6127 replay_release_gate_record_failed_check(
6128 &mut failed_checks,
6129 &mut evidence_refs,
6130 "replay_roi_below_threshold",
6131 &["threshold:min_replay_roi", "metric:replay_roi"],
6132 );
6133 }
6134 if input.thresholds.require_replay_safety && !input.replay_safety {
6135 replay_release_gate_record_failed_check(
6136 &mut failed_checks,
6137 &mut evidence_refs,
6138 "replay_safety_required",
6139 &["metric:replay_safety", "threshold:require_replay_safety"],
6140 );
6141 }
6142
6143 failed_checks.sort();
6144 evidence_refs.sort();
6145
6146 let status = if failed_checks.is_empty() {
6147 ReplayRoiReleaseGateStatus::Pass
6148 } else if indeterminate {
6149 ReplayRoiReleaseGateStatus::Indeterminate
6150 } else {
6151 ReplayRoiReleaseGateStatus::FailClosed
6152 };
6153 let joined_checks = if failed_checks.is_empty() {
6154 "none".to_string()
6155 } else {
6156 failed_checks.join(",")
6157 };
6158 let summary = match status {
6159 ReplayRoiReleaseGateStatus::Pass => format!(
6160 "release gate pass: attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6161 input.replay_attempts_total,
6162 input.replay_hit_rate,
6163 input.false_replay_rate,
6164 input.reasoning_avoided_tokens,
6165 input.replay_roi,
6166 input.replay_safety
6167 ),
6168 ReplayRoiReleaseGateStatus::FailClosed => format!(
6169 "release gate fail_closed: failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6170 joined_checks,
6171 input.replay_attempts_total,
6172 input.replay_hit_rate,
6173 input.false_replay_rate,
6174 input.reasoning_avoided_tokens,
6175 input.replay_roi,
6176 input.replay_safety
6177 ),
6178 ReplayRoiReleaseGateStatus::Indeterminate => format!(
6179 "release gate indeterminate (fail-closed): failed_checks=[{}] attempts={} hit_rate={:.3} false_replay_rate={:.3} reasoning_avoided_tokens={} replay_roi={:.3} replay_safety={}",
6180 joined_checks,
6181 input.replay_attempts_total,
6182 input.replay_hit_rate,
6183 input.false_replay_rate,
6184 input.reasoning_avoided_tokens,
6185 input.replay_roi,
6186 input.replay_safety
6187 ),
6188 };
6189
6190 ReplayRoiReleaseGateOutputContract {
6191 status,
6192 failed_checks,
6193 evidence_refs,
6194 summary,
6195 }
6196}
6197
6198fn replay_release_gate_record_failed_check(
6199 failed_checks: &mut Vec<String>,
6200 evidence_refs: &mut Vec<String>,
6201 check: &str,
6202 refs: &[&str],
6203) {
6204 replay_release_gate_push_unique(failed_checks, check.to_string());
6205 for entry in refs {
6206 replay_release_gate_push_unique(evidence_refs, (*entry).to_string());
6207 }
6208}
6209
6210fn replay_release_gate_push_unique(values: &mut Vec<String>, entry: impl Into<String>) {
6211 let entry = entry.into();
6212 if !values.iter().any(|current| current == &entry) {
6213 values.push(entry);
6214 }
6215}
6216
6217fn replay_release_gate_rate_valid(value: f64) -> bool {
6218 value.is_finite() && (0.0..=1.0).contains(&value)
6219}
6220
6221fn replay_release_gate_float_eq(left: f64, right: f64) -> bool {
6222 (left - right).abs() <= 1e-9
6223}
6224
6225fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
6226 EvolutionHealthSnapshot {
6227 status: "ok".into(),
6228 last_event_seq: snapshot.last_event_seq,
6229 promoted_genes: snapshot.promoted_genes,
6230 promoted_capsules: snapshot.promoted_capsules,
6231 }
6232}
6233
6234fn render_evolution_metrics_prometheus(
6235 snapshot: &EvolutionMetricsSnapshot,
6236 health: &EvolutionHealthSnapshot,
6237) -> String {
6238 let mut out = String::new();
6239 out.push_str(
6240 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
6241 );
6242 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
6243 out.push_str(&format!(
6244 "oris_evolution_replay_attempts_total {}\n",
6245 snapshot.replay_attempts_total
6246 ));
6247 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
6248 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
6249 out.push_str(&format!(
6250 "oris_evolution_replay_success_total {}\n",
6251 snapshot.replay_success_total
6252 ));
6253 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_total Total planner steps avoided by successful replay.\n");
6254 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_total counter\n");
6255 out.push_str(&format!(
6256 "oris_evolution_replay_reasoning_avoided_total {}\n",
6257 snapshot.replay_reasoning_avoided_total
6258 ));
6259 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_total Estimated reasoning tokens avoided by replay hits.\n");
6260 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_total counter\n");
6261 out.push_str(&format!(
6262 "oris_evolution_reasoning_avoided_tokens_total {}\n",
6263 snapshot.reasoning_avoided_tokens_total
6264 ));
6265 out.push_str("# HELP oris_evolution_replay_fallback_cost_total Estimated reasoning token cost spent on replay fallbacks.\n");
6266 out.push_str("# TYPE oris_evolution_replay_fallback_cost_total counter\n");
6267 out.push_str(&format!(
6268 "oris_evolution_replay_fallback_cost_total {}\n",
6269 snapshot.replay_fallback_cost_total
6270 ));
6271 out.push_str("# HELP oris_evolution_replay_roi Net replay ROI in token space ((avoided - fallback_cost) / total).\n");
6272 out.push_str("# TYPE oris_evolution_replay_roi gauge\n");
6273 out.push_str(&format!(
6274 "oris_evolution_replay_roi {:.6}\n",
6275 snapshot.replay_roi
6276 ));
6277 out.push_str("# HELP oris_evolution_replay_utilization_by_task_class_total Successful replay reuse counts grouped by deterministic task class.\n");
6278 out.push_str("# TYPE oris_evolution_replay_utilization_by_task_class_total counter\n");
6279 for task_class in &snapshot.replay_task_classes {
6280 out.push_str(&format!(
6281 "oris_evolution_replay_utilization_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6282 prometheus_label_value(&task_class.task_class_id),
6283 prometheus_label_value(&task_class.task_label),
6284 task_class.replay_success_total
6285 ));
6286 }
6287 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_by_task_class_total Planner steps avoided by successful replay grouped by deterministic task class.\n");
6288 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_by_task_class_total counter\n");
6289 for task_class in &snapshot.replay_task_classes {
6290 out.push_str(&format!(
6291 "oris_evolution_replay_reasoning_avoided_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6292 prometheus_label_value(&task_class.task_class_id),
6293 prometheus_label_value(&task_class.task_label),
6294 task_class.reasoning_steps_avoided_total
6295 ));
6296 }
6297 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_task_class_total Estimated reasoning tokens avoided by replay hits grouped by deterministic task class.\n");
6298 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_task_class_total counter\n");
6299 for task_class in &snapshot.replay_task_classes {
6300 out.push_str(&format!(
6301 "oris_evolution_reasoning_avoided_tokens_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6302 prometheus_label_value(&task_class.task_class_id),
6303 prometheus_label_value(&task_class.task_label),
6304 task_class.reasoning_avoided_tokens_total
6305 ));
6306 }
6307 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_task_class_total Estimated fallback token cost grouped by deterministic task class.\n");
6308 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_task_class_total counter\n");
6309 for task_class in &snapshot.replay_task_classes {
6310 out.push_str(&format!(
6311 "oris_evolution_replay_fallback_cost_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
6312 prometheus_label_value(&task_class.task_class_id),
6313 prometheus_label_value(&task_class.task_label),
6314 task_class.replay_fallback_cost_total
6315 ));
6316 }
6317 out.push_str("# HELP oris_evolution_replay_roi_by_task_class Replay ROI in token space grouped by deterministic task class.\n");
6318 out.push_str("# TYPE oris_evolution_replay_roi_by_task_class gauge\n");
6319 for task_class in &snapshot.replay_task_classes {
6320 out.push_str(&format!(
6321 "oris_evolution_replay_roi_by_task_class{{task_class_id=\"{}\",task_label=\"{}\"}} {:.6}\n",
6322 prometheus_label_value(&task_class.task_class_id),
6323 prometheus_label_value(&task_class.task_label),
6324 task_class.replay_roi
6325 ));
6326 }
6327 out.push_str("# HELP oris_evolution_replay_roi_by_source Replay ROI in token space grouped by remote sender id for cross-node reconciliation.\n");
6328 out.push_str("# TYPE oris_evolution_replay_roi_by_source gauge\n");
6329 for source in &snapshot.replay_sources {
6330 out.push_str(&format!(
6331 "oris_evolution_replay_roi_by_source{{source_sender_id=\"{}\"}} {:.6}\n",
6332 prometheus_label_value(&source.source_sender_id),
6333 source.replay_roi
6334 ));
6335 }
6336 out.push_str("# HELP oris_evolution_reasoning_avoided_tokens_by_source_total Estimated reasoning tokens avoided grouped by remote sender id.\n");
6337 out.push_str("# TYPE oris_evolution_reasoning_avoided_tokens_by_source_total counter\n");
6338 for source in &snapshot.replay_sources {
6339 out.push_str(&format!(
6340 "oris_evolution_reasoning_avoided_tokens_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6341 prometheus_label_value(&source.source_sender_id),
6342 source.reasoning_avoided_tokens_total
6343 ));
6344 }
6345 out.push_str("# HELP oris_evolution_replay_fallback_cost_by_source_total Estimated replay fallback token cost grouped by remote sender id.\n");
6346 out.push_str("# TYPE oris_evolution_replay_fallback_cost_by_source_total counter\n");
6347 for source in &snapshot.replay_sources {
6348 out.push_str(&format!(
6349 "oris_evolution_replay_fallback_cost_by_source_total{{source_sender_id=\"{}\"}} {}\n",
6350 prometheus_label_value(&source.source_sender_id),
6351 source.replay_fallback_cost_total
6352 ));
6353 }
6354 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
6355 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
6356 out.push_str(&format!(
6357 "oris_evolution_replay_success_rate {:.6}\n",
6358 snapshot.replay_success_rate
6359 ));
6360 out.push_str("# HELP oris_evolution_confidence_revalidations_total Total confidence-driven demotions that require revalidation before replay.\n");
6361 out.push_str("# TYPE oris_evolution_confidence_revalidations_total counter\n");
6362 out.push_str(&format!(
6363 "oris_evolution_confidence_revalidations_total {}\n",
6364 snapshot.confidence_revalidations_total
6365 ));
6366 out.push_str(
6367 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
6368 );
6369 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
6370 out.push_str(&format!(
6371 "oris_evolution_mutation_declared_total {}\n",
6372 snapshot.mutation_declared_total
6373 ));
6374 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
6375 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
6376 out.push_str(&format!(
6377 "oris_evolution_promoted_mutations_total {}\n",
6378 snapshot.promoted_mutations_total
6379 ));
6380 out.push_str(
6381 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
6382 );
6383 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
6384 out.push_str(&format!(
6385 "oris_evolution_promotion_ratio {:.6}\n",
6386 snapshot.promotion_ratio
6387 ));
6388 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
6389 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
6390 out.push_str(&format!(
6391 "oris_evolution_gene_revocations_total {}\n",
6392 snapshot.gene_revocations_total
6393 ));
6394 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
6395 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
6396 out.push_str(&format!(
6397 "oris_evolution_mutation_velocity_last_hour {}\n",
6398 snapshot.mutation_velocity_last_hour
6399 ));
6400 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
6401 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
6402 out.push_str(&format!(
6403 "oris_evolution_revoke_frequency_last_hour {}\n",
6404 snapshot.revoke_frequency_last_hour
6405 ));
6406 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
6407 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
6408 out.push_str(&format!(
6409 "oris_evolution_promoted_genes {}\n",
6410 snapshot.promoted_genes
6411 ));
6412 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
6413 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
6414 out.push_str(&format!(
6415 "oris_evolution_promoted_capsules {}\n",
6416 snapshot.promoted_capsules
6417 ));
6418 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
6419 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
6420 out.push_str(&format!(
6421 "oris_evolution_store_last_event_seq {}\n",
6422 snapshot.last_event_seq
6423 ));
6424 out.push_str(
6425 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
6426 );
6427 out.push_str("# TYPE oris_evolution_health gauge\n");
6428 out.push_str(&format!(
6429 "oris_evolution_health {}\n",
6430 u8::from(health.status == "ok")
6431 ));
6432 out
6433}
6434
6435fn count_recent_events(
6436 events: &[StoredEvolutionEvent],
6437 cutoff: DateTime<Utc>,
6438 predicate: impl Fn(&EvolutionEvent) -> bool,
6439) -> u64 {
6440 events
6441 .iter()
6442 .filter(|stored| {
6443 predicate(&stored.event)
6444 && parse_event_timestamp(&stored.timestamp)
6445 .map(|timestamp| timestamp >= cutoff)
6446 .unwrap_or(false)
6447 })
6448 .count() as u64
6449}
6450
6451fn prometheus_label_value(input: &str) -> String {
6452 input
6453 .replace('\\', "\\\\")
6454 .replace('\n', "\\n")
6455 .replace('"', "\\\"")
6456}
6457
6458fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
6459 DateTime::parse_from_rfc3339(raw)
6460 .ok()
6461 .map(|parsed| parsed.with_timezone(&Utc))
6462}
6463
6464fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
6465 matches!(
6466 event,
6467 EvolutionEvent::ValidationFailed {
6468 gene_id: Some(_),
6469 ..
6470 }
6471 )
6472}
6473
6474fn is_confidence_revalidation_event(event: &EvolutionEvent) -> bool {
6475 matches!(
6476 event,
6477 EvolutionEvent::PromotionEvaluated {
6478 state,
6479 reason,
6480 reason_code,
6481 ..
6482 }
6483 if *state == AssetState::Quarantined
6484 && (reason_code == &TransitionReasonCode::RevalidationConfidenceDecay
6485 || (reason_code == &TransitionReasonCode::Unspecified
6486 && reason.contains("confidence decayed")))
6487 )
6488}
6489
6490fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
6491 if denominator == 0 {
6492 0.0
6493 } else {
6494 numerator as f64 / denominator as f64
6495 }
6496}
6497
6498fn store_err(err: EvolutionError) -> EvoKernelError {
6499 EvoKernelError::Store(err.to_string())
6500}
6501
6502#[cfg(test)]
6503mod tests {
6504 use super::*;
6505 use oris_agent_contract::{
6506 AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
6507 };
6508 use oris_kernel::{
6509 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
6510 NoopStepFn, StateUpdatedOnlyReducer,
6511 };
6512 use serde::{Deserialize, Serialize};
6513
6514 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
6515 struct TestState;
6516
6517 impl KernelState for TestState {
6518 fn version(&self) -> u32 {
6519 1
6520 }
6521 }
6522
6523 #[test]
6524 fn repair_quality_gate_accepts_semantic_variants() {
6525 let plan = r#"
6526根本原因:脚本中拼写错误导致 unknown command 'process'。
6527修复建议:将 `proccess` 更正为 `process`,并统一命令入口。
6528验证方式:执行 `cargo check -p oris-runtime` 与回归测试。
6529恢复方案:若新入口异常,立即回滚到旧命令映射。
6530"#;
6531 let report = evaluate_repair_quality_gate(plan);
6532 assert!(report.passes());
6533 assert!(report.failed_checks().is_empty());
6534 }
6535
6536 #[test]
6537 fn repair_quality_gate_rejects_missing_incident_anchor() {
6538 let plan = r#"
6539原因分析:逻辑分支覆盖不足。
6540修复方案:补充分支与日志。
6541验证命令:cargo check -p oris-runtime
6542回滚方案:git revert HEAD
6543"#;
6544 let report = evaluate_repair_quality_gate(plan);
6545 assert!(!report.passes());
6546 assert!(report
6547 .failed_checks()
6548 .iter()
6549 .any(|check| check.contains("unknown command")));
6550 }
6551
6552 fn temp_workspace(name: &str) -> std::path::PathBuf {
6553 let root =
6554 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
6555 if root.exists() {
6556 fs::remove_dir_all(&root).unwrap();
6557 }
6558 fs::create_dir_all(root.join("src")).unwrap();
6559 fs::write(
6560 root.join("Cargo.toml"),
6561 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
6562 )
6563 .unwrap();
6564 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
6565 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
6566 root
6567 }
6568
6569 fn test_kernel() -> Arc<Kernel<TestState>> {
6570 Arc::new(Kernel::<TestState> {
6571 events: Box::new(InMemoryEventStore::new()),
6572 snaps: None,
6573 reducer: Box::new(StateUpdatedOnlyReducer),
6574 exec: Box::new(NoopActionExecutor),
6575 step: Box::new(NoopStepFn),
6576 policy: Box::new(AllowAllPolicy),
6577 effect_sink: None,
6578 mode: KernelMode::Normal,
6579 })
6580 }
6581
6582 fn lightweight_plan() -> ValidationPlan {
6583 ValidationPlan {
6584 profile: "test".into(),
6585 stages: vec![ValidationStage::Command {
6586 program: "git".into(),
6587 args: vec!["--version".into()],
6588 timeout_ms: 5_000,
6589 }],
6590 }
6591 }
6592
6593 fn sample_mutation() -> PreparedMutation {
6594 prepare_mutation(
6595 MutationIntent {
6596 id: "mutation-1".into(),
6597 intent: "add README".into(),
6598 target: MutationTarget::Paths {
6599 allow: vec!["README.md".into()],
6600 },
6601 expected_effect: "repo still builds".into(),
6602 risk: RiskLevel::Low,
6603 signals: vec!["missing readme".into()],
6604 spec_id: None,
6605 },
6606 "\
6607diff --git a/README.md b/README.md
6608new file mode 100644
6609index 0000000..1111111
6610--- /dev/null
6611+++ b/README.md
6612@@ -0,0 +1 @@
6613+# sample
6614"
6615 .into(),
6616 Some("HEAD".into()),
6617 )
6618 }
6619
6620 fn base_sandbox_policy() -> SandboxPolicy {
6621 SandboxPolicy {
6622 allowed_programs: vec!["git".into()],
6623 max_duration_ms: 60_000,
6624 max_output_bytes: 1024 * 1024,
6625 denied_env_prefixes: Vec::new(),
6626 }
6627 }
6628
6629 fn command_validator() -> Arc<dyn Validator> {
6630 Arc::new(CommandValidator::new(base_sandbox_policy()))
6631 }
6632
6633 fn replay_input(signal: &str) -> SelectorInput {
6634 let rustc_version = std::process::Command::new("rustc")
6635 .arg("--version")
6636 .output()
6637 .ok()
6638 .filter(|output| output.status.success())
6639 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
6640 .unwrap_or_else(|| "rustc unknown".into());
6641 SelectorInput {
6642 signals: vec![signal.into()],
6643 env: EnvFingerprint {
6644 rustc_version,
6645 cargo_lock_hash: compute_artifact_hash("# lock\n"),
6646 target_triple: format!(
6647 "{}-unknown-{}",
6648 std::env::consts::ARCH,
6649 std::env::consts::OS
6650 ),
6651 os: std::env::consts::OS.into(),
6652 },
6653 spec_id: None,
6654 limit: 1,
6655 }
6656 }
6657
6658 fn build_test_evo_with_store(
6659 name: &str,
6660 run_id: &str,
6661 validator: Arc<dyn Validator>,
6662 store: Arc<dyn EvolutionStore>,
6663 ) -> EvoKernel<TestState> {
6664 let workspace = temp_workspace(name);
6665 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
6666 run_id,
6667 &workspace,
6668 std::env::temp_dir(),
6669 ));
6670 EvoKernel::new(test_kernel(), sandbox, validator, store)
6671 .with_governor(Arc::new(DefaultGovernor::new(
6672 oris_governor::GovernorConfig {
6673 promote_after_successes: 1,
6674 ..Default::default()
6675 },
6676 )))
6677 .with_validation_plan(lightweight_plan())
6678 .with_sandbox_policy(base_sandbox_policy())
6679 }
6680
6681 fn build_test_evo(
6682 name: &str,
6683 run_id: &str,
6684 validator: Arc<dyn Validator>,
6685 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
6686 let store_root = std::env::temp_dir().join(format!(
6687 "oris-evokernel-{name}-store-{}",
6688 std::process::id()
6689 ));
6690 if store_root.exists() {
6691 fs::remove_dir_all(&store_root).unwrap();
6692 }
6693 let store: Arc<dyn EvolutionStore> =
6694 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
6695 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
6696 (evo, store)
6697 }
6698
6699 fn remote_publish_envelope(
6700 sender_id: &str,
6701 run_id: &str,
6702 gene_id: &str,
6703 capsule_id: &str,
6704 mutation_id: &str,
6705 signal: &str,
6706 file_name: &str,
6707 line: &str,
6708 ) -> EvolutionEnvelope {
6709 remote_publish_envelope_with_env(
6710 sender_id,
6711 run_id,
6712 gene_id,
6713 capsule_id,
6714 mutation_id,
6715 signal,
6716 file_name,
6717 line,
6718 replay_input(signal).env,
6719 )
6720 }
6721
6722 fn remote_publish_envelope_with_env(
6723 sender_id: &str,
6724 run_id: &str,
6725 gene_id: &str,
6726 capsule_id: &str,
6727 mutation_id: &str,
6728 signal: &str,
6729 file_name: &str,
6730 line: &str,
6731 env: EnvFingerprint,
6732 ) -> EvolutionEnvelope {
6733 let mutation = prepare_mutation(
6734 MutationIntent {
6735 id: mutation_id.into(),
6736 intent: format!("add {file_name}"),
6737 target: MutationTarget::Paths {
6738 allow: vec![file_name.into()],
6739 },
6740 expected_effect: "replay should still validate".into(),
6741 risk: RiskLevel::Low,
6742 signals: vec![signal.into()],
6743 spec_id: None,
6744 },
6745 format!(
6746 "\
6747diff --git a/{file_name} b/{file_name}
6748new file mode 100644
6749index 0000000..1111111
6750--- /dev/null
6751+++ b/{file_name}
6752@@ -0,0 +1 @@
6753+{line}
6754"
6755 ),
6756 Some("HEAD".into()),
6757 );
6758 let gene = Gene {
6759 id: gene_id.into(),
6760 signals: vec![signal.into()],
6761 strategy: vec![file_name.into()],
6762 validation: vec!["test".into()],
6763 state: AssetState::Promoted,
6764 };
6765 let capsule = Capsule {
6766 id: capsule_id.into(),
6767 gene_id: gene_id.into(),
6768 mutation_id: mutation_id.into(),
6769 run_id: run_id.into(),
6770 diff_hash: mutation.artifact.content_hash.clone(),
6771 confidence: 0.9,
6772 env,
6773 outcome: Outcome {
6774 success: true,
6775 validation_profile: "test".into(),
6776 validation_duration_ms: 1,
6777 changed_files: vec![file_name.into()],
6778 validator_hash: "validator-hash".into(),
6779 lines_changed: 1,
6780 replay_verified: false,
6781 },
6782 state: AssetState::Promoted,
6783 };
6784 EvolutionEnvelope::publish(
6785 sender_id,
6786 vec![
6787 NetworkAsset::EvolutionEvent {
6788 event: EvolutionEvent::MutationDeclared { mutation },
6789 },
6790 NetworkAsset::Gene { gene: gene.clone() },
6791 NetworkAsset::Capsule {
6792 capsule: capsule.clone(),
6793 },
6794 NetworkAsset::EvolutionEvent {
6795 event: EvolutionEvent::CapsuleReleased {
6796 capsule_id: capsule.id.clone(),
6797 state: AssetState::Promoted,
6798 },
6799 },
6800 ],
6801 )
6802 }
6803
6804 fn remote_publish_envelope_with_signals(
6805 sender_id: &str,
6806 run_id: &str,
6807 gene_id: &str,
6808 capsule_id: &str,
6809 mutation_id: &str,
6810 mutation_signals: Vec<String>,
6811 gene_signals: Vec<String>,
6812 file_name: &str,
6813 line: &str,
6814 env: EnvFingerprint,
6815 ) -> EvolutionEnvelope {
6816 let mutation = prepare_mutation(
6817 MutationIntent {
6818 id: mutation_id.into(),
6819 intent: format!("add {file_name}"),
6820 target: MutationTarget::Paths {
6821 allow: vec![file_name.into()],
6822 },
6823 expected_effect: "replay should still validate".into(),
6824 risk: RiskLevel::Low,
6825 signals: mutation_signals,
6826 spec_id: None,
6827 },
6828 format!(
6829 "\
6830diff --git a/{file_name} b/{file_name}
6831new file mode 100644
6832index 0000000..1111111
6833--- /dev/null
6834+++ b/{file_name}
6835@@ -0,0 +1 @@
6836+{line}
6837"
6838 ),
6839 Some("HEAD".into()),
6840 );
6841 let gene = Gene {
6842 id: gene_id.into(),
6843 signals: gene_signals,
6844 strategy: vec![file_name.into()],
6845 validation: vec!["test".into()],
6846 state: AssetState::Promoted,
6847 };
6848 let capsule = Capsule {
6849 id: capsule_id.into(),
6850 gene_id: gene_id.into(),
6851 mutation_id: mutation_id.into(),
6852 run_id: run_id.into(),
6853 diff_hash: mutation.artifact.content_hash.clone(),
6854 confidence: 0.9,
6855 env,
6856 outcome: Outcome {
6857 success: true,
6858 validation_profile: "test".into(),
6859 validation_duration_ms: 1,
6860 changed_files: vec![file_name.into()],
6861 validator_hash: "validator-hash".into(),
6862 lines_changed: 1,
6863 replay_verified: false,
6864 },
6865 state: AssetState::Promoted,
6866 };
6867 EvolutionEnvelope::publish(
6868 sender_id,
6869 vec![
6870 NetworkAsset::EvolutionEvent {
6871 event: EvolutionEvent::MutationDeclared { mutation },
6872 },
6873 NetworkAsset::Gene { gene: gene.clone() },
6874 NetworkAsset::Capsule {
6875 capsule: capsule.clone(),
6876 },
6877 NetworkAsset::EvolutionEvent {
6878 event: EvolutionEvent::CapsuleReleased {
6879 capsule_id: capsule.id.clone(),
6880 state: AssetState::Promoted,
6881 },
6882 },
6883 ],
6884 )
6885 }
6886
6887 struct FixedValidator {
6888 success: bool,
6889 }
6890
6891 #[async_trait]
6892 impl Validator for FixedValidator {
6893 async fn run(
6894 &self,
6895 _receipt: &SandboxReceipt,
6896 plan: &ValidationPlan,
6897 ) -> Result<ValidationReport, ValidationError> {
6898 Ok(ValidationReport {
6899 success: self.success,
6900 duration_ms: 1,
6901 stages: Vec::new(),
6902 logs: if self.success {
6903 format!("{} ok", plan.profile)
6904 } else {
6905 format!("{} failed", plan.profile)
6906 },
6907 })
6908 }
6909 }
6910
6911 struct FailOnAppendStore {
6912 inner: JsonlEvolutionStore,
6913 fail_on_call: usize,
6914 call_count: Mutex<usize>,
6915 }
6916
6917 impl FailOnAppendStore {
6918 fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
6919 Self {
6920 inner: JsonlEvolutionStore::new(root_dir),
6921 fail_on_call,
6922 call_count: Mutex::new(0),
6923 }
6924 }
6925 }
6926
6927 impl EvolutionStore for FailOnAppendStore {
6928 fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
6929 let mut call_count = self
6930 .call_count
6931 .lock()
6932 .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
6933 *call_count += 1;
6934 if *call_count == self.fail_on_call {
6935 return Err(EvolutionError::Io("injected append failure".into()));
6936 }
6937 self.inner.append_event(event)
6938 }
6939
6940 fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
6941 self.inner.scan(from_seq)
6942 }
6943
6944 fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
6945 self.inner.rebuild_projection()
6946 }
6947 }
6948
6949 #[test]
6950 fn coordination_planner_to_coder_handoff_is_deterministic() {
6951 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6952 root_goal: "ship feature".into(),
6953 primitive: CoordinationPrimitive::Sequential,
6954 tasks: vec![
6955 CoordinationTask {
6956 id: "planner".into(),
6957 role: AgentRole::Planner,
6958 description: "split the work".into(),
6959 depends_on: Vec::new(),
6960 },
6961 CoordinationTask {
6962 id: "coder".into(),
6963 role: AgentRole::Coder,
6964 description: "implement the patch".into(),
6965 depends_on: vec!["planner".into()],
6966 },
6967 ],
6968 timeout_ms: 5_000,
6969 max_retries: 0,
6970 });
6971
6972 assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
6973 assert!(result.failed_tasks.is_empty());
6974 assert!(result.messages.iter().any(|message| {
6975 message.from_role == AgentRole::Planner
6976 && message.to_role == AgentRole::Coder
6977 && message.task_id == "coder"
6978 }));
6979 }
6980
6981 #[test]
6982 fn coordination_repair_runs_only_after_coder_failure() {
6983 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
6984 root_goal: "fix broken implementation".into(),
6985 primitive: CoordinationPrimitive::Sequential,
6986 tasks: vec![
6987 CoordinationTask {
6988 id: "coder".into(),
6989 role: AgentRole::Coder,
6990 description: "force-fail initial implementation".into(),
6991 depends_on: Vec::new(),
6992 },
6993 CoordinationTask {
6994 id: "repair".into(),
6995 role: AgentRole::Repair,
6996 description: "patch the failed implementation".into(),
6997 depends_on: vec!["coder".into()],
6998 },
6999 ],
7000 timeout_ms: 5_000,
7001 max_retries: 0,
7002 });
7003
7004 assert_eq!(result.completed_tasks, vec!["repair"]);
7005 assert_eq!(result.failed_tasks, vec!["coder"]);
7006 assert!(result.messages.iter().any(|message| {
7007 message.from_role == AgentRole::Coder
7008 && message.to_role == AgentRole::Repair
7009 && message.task_id == "repair"
7010 }));
7011 }
7012
7013 #[test]
7014 fn coordination_optimizer_runs_after_successful_implementation_step() {
7015 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7016 root_goal: "ship optimized patch".into(),
7017 primitive: CoordinationPrimitive::Sequential,
7018 tasks: vec![
7019 CoordinationTask {
7020 id: "coder".into(),
7021 role: AgentRole::Coder,
7022 description: "implement a working patch".into(),
7023 depends_on: Vec::new(),
7024 },
7025 CoordinationTask {
7026 id: "optimizer".into(),
7027 role: AgentRole::Optimizer,
7028 description: "tighten the implementation".into(),
7029 depends_on: vec!["coder".into()],
7030 },
7031 ],
7032 timeout_ms: 5_000,
7033 max_retries: 0,
7034 });
7035
7036 assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
7037 assert!(result.failed_tasks.is_empty());
7038 }
7039
7040 #[test]
7041 fn coordination_parallel_waves_preserve_sorted_merge_order() {
7042 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7043 root_goal: "parallelize safe tasks".into(),
7044 primitive: CoordinationPrimitive::Parallel,
7045 tasks: vec![
7046 CoordinationTask {
7047 id: "z-task".into(),
7048 role: AgentRole::Planner,
7049 description: "analyze z".into(),
7050 depends_on: Vec::new(),
7051 },
7052 CoordinationTask {
7053 id: "a-task".into(),
7054 role: AgentRole::Coder,
7055 description: "implement a".into(),
7056 depends_on: Vec::new(),
7057 },
7058 CoordinationTask {
7059 id: "mid-task".into(),
7060 role: AgentRole::Optimizer,
7061 description: "polish after both".into(),
7062 depends_on: vec!["z-task".into(), "a-task".into()],
7063 },
7064 ],
7065 timeout_ms: 5_000,
7066 max_retries: 0,
7067 });
7068
7069 assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
7070 assert!(result.failed_tasks.is_empty());
7071 }
7072
7073 #[test]
7074 fn coordination_retries_stop_at_max_retries() {
7075 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7076 root_goal: "retry then stop".into(),
7077 primitive: CoordinationPrimitive::Sequential,
7078 tasks: vec![CoordinationTask {
7079 id: "coder".into(),
7080 role: AgentRole::Coder,
7081 description: "force-fail this task".into(),
7082 depends_on: Vec::new(),
7083 }],
7084 timeout_ms: 5_000,
7085 max_retries: 1,
7086 });
7087
7088 assert!(result.completed_tasks.is_empty());
7089 assert_eq!(result.failed_tasks, vec!["coder"]);
7090 assert_eq!(
7091 result
7092 .messages
7093 .iter()
7094 .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
7095 .count(),
7096 2
7097 );
7098 }
7099
7100 #[test]
7101 fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
7102 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
7103 root_goal: "skip blocked follow-up work".into(),
7104 primitive: CoordinationPrimitive::Conditional,
7105 tasks: vec![
7106 CoordinationTask {
7107 id: "coder".into(),
7108 role: AgentRole::Coder,
7109 description: "force-fail the implementation".into(),
7110 depends_on: Vec::new(),
7111 },
7112 CoordinationTask {
7113 id: "optimizer".into(),
7114 role: AgentRole::Optimizer,
7115 description: "only optimize a successful implementation".into(),
7116 depends_on: vec!["coder".into()],
7117 },
7118 ],
7119 timeout_ms: 5_000,
7120 max_retries: 0,
7121 });
7122
7123 assert!(result.completed_tasks.is_empty());
7124 assert_eq!(result.failed_tasks, vec!["coder"]);
7125 assert!(result.messages.iter().any(|message| {
7126 message.task_id == "optimizer"
7127 && message
7128 .content
7129 .contains("skipped due to failed dependency chain")
7130 }));
7131 assert!(!result
7132 .failed_tasks
7133 .iter()
7134 .any(|task_id| task_id == "optimizer"));
7135 }
7136
7137 #[tokio::test]
7138 async fn command_validator_aggregates_stage_reports() {
7139 let workspace = temp_workspace("validator");
7140 let receipt = SandboxReceipt {
7141 mutation_id: "m".into(),
7142 workdir: workspace,
7143 applied: true,
7144 changed_files: Vec::new(),
7145 patch_hash: "hash".into(),
7146 stdout_log: std::env::temp_dir().join("stdout.log"),
7147 stderr_log: std::env::temp_dir().join("stderr.log"),
7148 };
7149 let validator = CommandValidator::new(SandboxPolicy {
7150 allowed_programs: vec!["git".into()],
7151 max_duration_ms: 1_000,
7152 max_output_bytes: 1024,
7153 denied_env_prefixes: Vec::new(),
7154 });
7155 let report = validator
7156 .run(
7157 &receipt,
7158 &ValidationPlan {
7159 profile: "test".into(),
7160 stages: vec![ValidationStage::Command {
7161 program: "git".into(),
7162 args: vec!["--version".into()],
7163 timeout_ms: 1_000,
7164 }],
7165 },
7166 )
7167 .await
7168 .unwrap();
7169 assert_eq!(report.stages.len(), 1);
7170 }
7171
7172 #[tokio::test]
7173 async fn capture_successful_mutation_appends_capsule() {
7174 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
7175 let capsule = evo
7176 .capture_successful_mutation(&"run-1".into(), sample_mutation())
7177 .await
7178 .unwrap();
7179 let events = store.scan(1).unwrap();
7180 assert!(events
7181 .iter()
7182 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
7183 assert!(!capsule.id.is_empty());
7184 }
7185
7186 #[tokio::test]
7187 async fn replay_hit_records_capsule_reused() {
7188 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
7189 let capsule = evo
7190 .capture_successful_mutation(&"run-2".into(), sample_mutation())
7191 .await
7192 .unwrap();
7193 let replay_run_id = "run-replay".to_string();
7194 let decision = evo
7195 .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
7196 .await
7197 .unwrap();
7198 assert!(decision.used_capsule);
7199 assert_eq!(decision.capsule_id, Some(capsule.id));
7200 assert!(!decision.detect_evidence.task_class_id.is_empty());
7201 assert!(!decision.detect_evidence.matched_signals.is_empty());
7202 assert!(decision.detect_evidence.mismatch_reasons.is_empty());
7203 assert!(!decision.select_evidence.candidates.is_empty());
7204 assert!(!decision.select_evidence.exact_match_lookup);
7205 assert_eq!(
7206 decision.select_evidence.selected_capsule_id.as_deref(),
7207 decision.capsule_id.as_deref()
7208 );
7209 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7210 &stored.event,
7211 EvolutionEvent::CapsuleReused {
7212 run_id,
7213 replay_run_id: Some(current_replay_run_id),
7214 ..
7215 } if run_id == "run-2" && current_replay_run_id == &replay_run_id
7216 )));
7217 }
7218
7219 #[tokio::test]
7220 async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
7221 let capture_run_id = "run-legacy-capture".to_string();
7222 let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
7223 let capsule = evo
7224 .capture_successful_mutation(&capture_run_id, sample_mutation())
7225 .await
7226 .unwrap();
7227 let executor = StoreReplayExecutor {
7228 sandbox: evo.sandbox.clone(),
7229 validator: evo.validator.clone(),
7230 store: evo.store.clone(),
7231 selector: evo.selector.clone(),
7232 governor: evo.governor.clone(),
7233 economics: Some(evo.economics.clone()),
7234 remote_publishers: Some(evo.remote_publishers.clone()),
7235 stake_policy: evo.stake_policy.clone(),
7236 };
7237
7238 let decision = executor
7239 .try_replay(
7240 &replay_input("missing readme"),
7241 &evo.sandbox_policy,
7242 &evo.validation_plan,
7243 )
7244 .await
7245 .unwrap();
7246
7247 assert!(decision.used_capsule);
7248 assert_eq!(decision.capsule_id, Some(capsule.id));
7249 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
7250 &stored.event,
7251 EvolutionEvent::CapsuleReused {
7252 run_id,
7253 replay_run_id: None,
7254 ..
7255 } if run_id == &capture_run_id
7256 )));
7257 }
7258
7259 #[tokio::test]
7260 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
7261 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
7262 let capsule = evo
7263 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
7264 .await
7265 .unwrap();
7266 let decision = evo
7267 .replay_or_fallback(replay_input("missing readme"))
7268 .await
7269 .unwrap();
7270 assert!(decision.used_capsule);
7271
7272 evo.revoke_assets(&RevokeNotice {
7273 sender_id: "node-metrics".into(),
7274 asset_ids: vec![capsule.id.clone()],
7275 reason: "manual test revoke".into(),
7276 })
7277 .unwrap();
7278
7279 let snapshot = evo.metrics_snapshot().unwrap();
7280 assert_eq!(snapshot.replay_attempts_total, 1);
7281 assert_eq!(snapshot.replay_success_total, 1);
7282 assert_eq!(snapshot.replay_success_rate, 1.0);
7283 assert_eq!(snapshot.confidence_revalidations_total, 0);
7284 assert_eq!(snapshot.replay_reasoning_avoided_total, 1);
7285 assert_eq!(
7286 snapshot.reasoning_avoided_tokens_total,
7287 decision.economics_evidence.reasoning_avoided_tokens
7288 );
7289 assert_eq!(snapshot.replay_fallback_cost_total, 0);
7290 assert_eq!(snapshot.replay_roi, 1.0);
7291 assert_eq!(snapshot.replay_task_classes.len(), 1);
7292 assert_eq!(snapshot.replay_task_classes[0].replay_success_total, 1);
7293 assert_eq!(snapshot.replay_task_classes[0].replay_failure_total, 0);
7294 assert_eq!(
7295 snapshot.replay_task_classes[0].reasoning_steps_avoided_total,
7296 1
7297 );
7298 assert_eq!(
7299 snapshot.replay_task_classes[0].replay_fallback_cost_total,
7300 0
7301 );
7302 assert_eq!(snapshot.replay_task_classes[0].replay_roi, 1.0);
7303 assert!(snapshot.replay_sources.is_empty());
7304 assert_eq!(snapshot.confidence_revalidations_total, 0);
7305 assert_eq!(snapshot.mutation_declared_total, 1);
7306 assert_eq!(snapshot.promoted_mutations_total, 1);
7307 assert_eq!(snapshot.promotion_ratio, 1.0);
7308 assert_eq!(snapshot.gene_revocations_total, 1);
7309 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
7310 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
7311 assert_eq!(snapshot.promoted_genes, 0);
7312 assert_eq!(snapshot.promoted_capsules, 0);
7313
7314 let rendered = evo.render_metrics_prometheus().unwrap();
7315 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_total 1"));
7316 assert!(rendered.contains("oris_evolution_reasoning_avoided_tokens_total"));
7317 assert!(rendered.contains("oris_evolution_replay_fallback_cost_total"));
7318 assert!(rendered.contains("oris_evolution_replay_roi 1.000000"));
7319 assert!(rendered.contains("oris_evolution_replay_utilization_by_task_class_total"));
7320 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_by_task_class_total"));
7321 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
7322 assert!(rendered.contains("oris_evolution_confidence_revalidations_total 0"));
7323 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
7324 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
7325 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
7326 assert!(rendered.contains("oris_evolution_health 1"));
7327 }
7328
7329 #[tokio::test]
7330 async fn replay_roi_release_gate_summary_aggregates_task_class_and_remote_source() {
7331 let (evo, _) = build_test_evo("roi-summary", "run-roi-summary", command_validator());
7332 let envelope = remote_publish_envelope(
7333 "node-roi",
7334 "run-remote-roi",
7335 "gene-roi",
7336 "capsule-roi",
7337 "mutation-roi",
7338 "roi-signal",
7339 "ROI.md",
7340 "# roi",
7341 );
7342 evo.import_remote_envelope(&envelope).unwrap();
7343
7344 let miss = evo
7345 .replay_or_fallback(replay_input("entropy-hash-12345-no-overlap"))
7346 .await
7347 .unwrap();
7348 assert!(!miss.used_capsule);
7349 assert!(miss.fallback_to_planner);
7350 assert!(miss.select_evidence.candidates.is_empty());
7351 assert!(miss
7352 .detect_evidence
7353 .mismatch_reasons
7354 .iter()
7355 .any(|reason| reason == "no_candidate_after_select"));
7356
7357 let hit = evo
7358 .replay_or_fallback(replay_input("roi-signal"))
7359 .await
7360 .unwrap();
7361 assert!(hit.used_capsule);
7362 assert!(!hit.select_evidence.candidates.is_empty());
7363 assert_eq!(
7364 hit.select_evidence.selected_capsule_id.as_deref(),
7365 hit.capsule_id.as_deref()
7366 );
7367
7368 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7369 assert_eq!(summary.replay_attempts_total, 2);
7370 assert_eq!(summary.replay_success_total, 1);
7371 assert_eq!(summary.replay_failure_total, 1);
7372 assert!(summary.reasoning_avoided_tokens_total > 0);
7373 assert!(summary.replay_fallback_cost_total > 0);
7374 assert!(summary
7375 .replay_task_classes
7376 .iter()
7377 .any(|entry| { entry.replay_success_total == 1 && entry.replay_failure_total == 0 }));
7378 assert!(summary.replay_sources.iter().any(|source| {
7379 source.source_sender_id == "node-roi" && source.replay_success_total == 1
7380 }));
7381
7382 let rendered = evo
7383 .render_replay_roi_release_gate_summary_json(60 * 60)
7384 .unwrap();
7385 assert!(rendered.contains("\"replay_attempts_total\": 2"));
7386 assert!(rendered.contains("\"source_sender_id\": \"node-roi\""));
7387 }
7388
7389 #[tokio::test]
7390 async fn replay_roi_release_gate_summary_contract_exposes_core_metrics_and_fail_closed_defaults(
7391 ) {
7392 let (evo, _) = build_test_evo("roi-contract", "run-roi-contract", command_validator());
7393 let envelope = remote_publish_envelope(
7394 "node-contract",
7395 "run-remote-contract",
7396 "gene-contract",
7397 "capsule-contract",
7398 "mutation-contract",
7399 "contract-signal",
7400 "CONTRACT.md",
7401 "# contract",
7402 );
7403 evo.import_remote_envelope(&envelope).unwrap();
7404
7405 let miss = evo
7406 .replay_or_fallback(replay_input("entropy-hash-contract-no-overlap"))
7407 .await
7408 .unwrap();
7409 assert!(!miss.used_capsule);
7410 assert!(miss.fallback_to_planner);
7411
7412 let hit = evo
7413 .replay_or_fallback(replay_input("contract-signal"))
7414 .await
7415 .unwrap();
7416 assert!(hit.used_capsule);
7417
7418 let summary = evo.replay_roi_release_gate_summary(60 * 60).unwrap();
7419 let contract = evo
7420 .replay_roi_release_gate_contract(60 * 60, ReplayRoiReleaseGateThresholds::default())
7421 .unwrap();
7422
7423 assert_eq!(contract.input.replay_attempts_total, 2);
7424 assert_eq!(contract.input.replay_success_total, 1);
7425 assert_eq!(contract.input.replay_failure_total, 1);
7426 assert_eq!(
7427 contract.input.reasoning_avoided_tokens,
7428 summary.reasoning_avoided_tokens_total
7429 );
7430 assert_eq!(
7431 contract.input.replay_fallback_cost_total,
7432 summary.replay_fallback_cost_total
7433 );
7434 assert!((contract.input.replay_hit_rate - 0.5).abs() < f64::EPSILON);
7435 assert!((contract.input.false_replay_rate - 0.5).abs() < f64::EPSILON);
7436 assert!((contract.input.replay_roi - summary.replay_roi).abs() < f64::EPSILON);
7437 assert!(contract.input.replay_safety);
7438 assert_eq!(
7439 contract.input.aggregation_dimensions,
7440 REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7441 .iter()
7442 .map(|dimension| (*dimension).to_string())
7443 .collect::<Vec<_>>()
7444 );
7445 assert_eq!(
7446 contract.input.thresholds,
7447 ReplayRoiReleaseGateThresholds::default()
7448 );
7449 assert_eq!(
7450 contract.input.fail_closed_policy,
7451 ReplayRoiReleaseGateFailClosedPolicy::default()
7452 );
7453 assert_eq!(
7454 contract.output.status,
7455 ReplayRoiReleaseGateStatus::FailClosed
7456 );
7457 assert!(contract
7458 .output
7459 .failed_checks
7460 .iter()
7461 .any(|check| check == "min_replay_attempts_below_threshold"));
7462 assert!(contract
7463 .output
7464 .failed_checks
7465 .iter()
7466 .any(|check| check == "replay_hit_rate_below_threshold"));
7467 assert!(contract
7468 .output
7469 .failed_checks
7470 .iter()
7471 .any(|check| check == "false_replay_rate_above_threshold"));
7472 assert!(contract
7473 .output
7474 .evidence_refs
7475 .iter()
7476 .any(|evidence| evidence == "replay_roi_release_gate_summary"));
7477 assert!(contract.output.summary.contains("release gate fail_closed"));
7478 }
7479
7480 #[tokio::test]
7481 async fn replay_roi_release_gate_summary_contract_accepts_custom_thresholds_and_json() {
7482 let (evo, _) = build_test_evo(
7483 "roi-contract-thresholds",
7484 "run-roi-contract-thresholds",
7485 command_validator(),
7486 );
7487 let thresholds = ReplayRoiReleaseGateThresholds {
7488 min_replay_attempts: 8,
7489 min_replay_hit_rate: 0.75,
7490 max_false_replay_rate: 0.10,
7491 min_reasoning_avoided_tokens: 600,
7492 min_replay_roi: 0.30,
7493 require_replay_safety: true,
7494 };
7495 let contract = evo
7496 .replay_roi_release_gate_contract(60 * 60, thresholds.clone())
7497 .unwrap();
7498 assert_eq!(contract.input.thresholds, thresholds.clone());
7499 assert_eq!(contract.input.replay_attempts_total, 0);
7500 assert_eq!(contract.input.replay_hit_rate, 0.0);
7501 assert_eq!(contract.input.false_replay_rate, 0.0);
7502 assert!(!contract.input.replay_safety_signal.has_replay_activity);
7503 assert!(!contract.input.replay_safety);
7504 assert_eq!(
7505 contract.output.status,
7506 ReplayRoiReleaseGateStatus::Indeterminate
7507 );
7508 assert!(contract
7509 .output
7510 .failed_checks
7511 .iter()
7512 .any(|check| check == "missing_replay_attempts"));
7513 assert!(contract
7514 .output
7515 .summary
7516 .contains("indeterminate (fail-closed)"));
7517
7518 let rendered = evo
7519 .render_replay_roi_release_gate_contract_json(60 * 60, thresholds)
7520 .unwrap();
7521 assert!(rendered.contains("\"min_replay_attempts\": 8"));
7522 assert!(rendered.contains("\"min_replay_hit_rate\": 0.75"));
7523 assert!(rendered.contains("\"status\": \"indeterminate\""));
7524 }
7525
7526 #[tokio::test]
7527 async fn replay_roi_release_gate_summary_window_boundary_filters_old_events() {
7528 let (evo, _) = build_test_evo("roi-window", "run-roi-window", command_validator());
7529 let envelope = remote_publish_envelope(
7530 "node-window",
7531 "run-remote-window",
7532 "gene-window",
7533 "capsule-window",
7534 "mutation-window",
7535 "window-signal",
7536 "WINDOW.md",
7537 "# window",
7538 );
7539 evo.import_remote_envelope(&envelope).unwrap();
7540
7541 let miss = evo
7542 .replay_or_fallback(replay_input("window-no-match-signal"))
7543 .await
7544 .unwrap();
7545 assert!(!miss.used_capsule);
7546 assert!(miss.fallback_to_planner);
7547
7548 let first_hit = evo
7549 .replay_or_fallback(replay_input("window-signal"))
7550 .await
7551 .unwrap();
7552 assert!(first_hit.used_capsule);
7553
7554 std::thread::sleep(std::time::Duration::from_secs(2));
7555
7556 let second_hit = evo
7557 .replay_or_fallback(replay_input("window-signal"))
7558 .await
7559 .unwrap();
7560 assert!(second_hit.used_capsule);
7561
7562 let narrow = evo.replay_roi_release_gate_summary(1).unwrap();
7563 assert_eq!(narrow.replay_attempts_total, 1);
7564 assert_eq!(narrow.replay_success_total, 1);
7565 assert_eq!(narrow.replay_failure_total, 0);
7566
7567 let all = evo.replay_roi_release_gate_summary(0).unwrap();
7568 assert_eq!(all.replay_attempts_total, 3);
7569 assert_eq!(all.replay_success_total, 2);
7570 assert_eq!(all.replay_failure_total, 1);
7571 }
7572
7573 fn fixed_release_gate_pass_fixture() -> ReplayRoiReleaseGateInputContract {
7574 ReplayRoiReleaseGateInputContract {
7575 generated_at: "2026-03-13T00:00:00Z".to_string(),
7576 window_seconds: 86_400,
7577 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7578 .iter()
7579 .map(|dimension| (*dimension).to_string())
7580 .collect(),
7581 replay_attempts_total: 4,
7582 replay_success_total: 3,
7583 replay_failure_total: 1,
7584 replay_hit_rate: 0.75,
7585 false_replay_rate: 0.25,
7586 reasoning_avoided_tokens: 480,
7587 replay_fallback_cost_total: 64,
7588 replay_roi: compute_replay_roi(480, 64),
7589 replay_safety: true,
7590 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7591 fail_closed_default: true,
7592 rollback_ready: true,
7593 audit_trail_complete: true,
7594 has_replay_activity: true,
7595 },
7596 thresholds: ReplayRoiReleaseGateThresholds::default(),
7597 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7598 }
7599 }
7600
7601 fn fixed_release_gate_fail_fixture() -> ReplayRoiReleaseGateInputContract {
7602 ReplayRoiReleaseGateInputContract {
7603 generated_at: "2026-03-13T00:00:00Z".to_string(),
7604 window_seconds: 86_400,
7605 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7606 .iter()
7607 .map(|dimension| (*dimension).to_string())
7608 .collect(),
7609 replay_attempts_total: 10,
7610 replay_success_total: 4,
7611 replay_failure_total: 6,
7612 replay_hit_rate: 0.4,
7613 false_replay_rate: 0.6,
7614 reasoning_avoided_tokens: 80,
7615 replay_fallback_cost_total: 400,
7616 replay_roi: compute_replay_roi(80, 400),
7617 replay_safety: false,
7618 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7619 fail_closed_default: true,
7620 rollback_ready: true,
7621 audit_trail_complete: true,
7622 has_replay_activity: true,
7623 },
7624 thresholds: ReplayRoiReleaseGateThresholds::default(),
7625 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7626 }
7627 }
7628
7629 fn fixed_release_gate_borderline_fixture() -> ReplayRoiReleaseGateInputContract {
7630 ReplayRoiReleaseGateInputContract {
7631 generated_at: "2026-03-13T00:00:00Z".to_string(),
7632 window_seconds: 3_600,
7633 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7634 .iter()
7635 .map(|dimension| (*dimension).to_string())
7636 .collect(),
7637 replay_attempts_total: 4,
7638 replay_success_total: 3,
7639 replay_failure_total: 1,
7640 replay_hit_rate: 0.75,
7641 false_replay_rate: 0.25,
7642 reasoning_avoided_tokens: 192,
7643 replay_fallback_cost_total: 173,
7644 replay_roi: 0.05,
7645 replay_safety: true,
7646 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7647 fail_closed_default: true,
7648 rollback_ready: true,
7649 audit_trail_complete: true,
7650 has_replay_activity: true,
7651 },
7652 thresholds: ReplayRoiReleaseGateThresholds {
7653 min_replay_attempts: 4,
7654 min_replay_hit_rate: 0.75,
7655 max_false_replay_rate: 0.25,
7656 min_reasoning_avoided_tokens: 192,
7657 min_replay_roi: 0.05,
7658 require_replay_safety: true,
7659 },
7660 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7661 }
7662 }
7663
7664 #[test]
7665 fn replay_roi_release_gate_summary_fixed_fixtures_cover_pass_fail_and_borderline() {
7666 let pass =
7667 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_pass_fixture());
7668 let fail =
7669 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7670 let borderline = evaluate_replay_roi_release_gate_contract_input(
7671 &fixed_release_gate_borderline_fixture(),
7672 );
7673
7674 assert_eq!(pass.status, ReplayRoiReleaseGateStatus::Pass);
7675 assert!(pass.failed_checks.is_empty());
7676 assert_eq!(fail.status, ReplayRoiReleaseGateStatus::FailClosed);
7677 assert!(!fail.failed_checks.is_empty());
7678 assert_eq!(borderline.status, ReplayRoiReleaseGateStatus::Pass);
7679 assert!(borderline.failed_checks.is_empty());
7680 }
7681
7682 #[test]
7683 fn replay_roi_release_gate_summary_machine_readable_output_is_stable_and_sorted() {
7684 let output =
7685 evaluate_replay_roi_release_gate_contract_input(&fixed_release_gate_fail_fixture());
7686
7687 assert_eq!(
7688 output.failed_checks,
7689 vec![
7690 "false_replay_rate_above_threshold".to_string(),
7691 "reasoning_avoided_tokens_below_threshold".to_string(),
7692 "replay_hit_rate_below_threshold".to_string(),
7693 "replay_roi_below_threshold".to_string(),
7694 "replay_safety_required".to_string(),
7695 ]
7696 );
7697 assert_eq!(
7698 output.evidence_refs,
7699 vec![
7700 "generated_at:2026-03-13T00:00:00Z".to_string(),
7701 "metric:false_replay_rate".to_string(),
7702 "metric:reasoning_avoided_tokens".to_string(),
7703 "metric:replay_hit_rate".to_string(),
7704 "metric:replay_roi".to_string(),
7705 "metric:replay_safety".to_string(),
7706 "replay_roi_release_gate_summary".to_string(),
7707 "threshold:max_false_replay_rate".to_string(),
7708 "threshold:min_reasoning_avoided_tokens".to_string(),
7709 "threshold:min_replay_hit_rate".to_string(),
7710 "threshold:min_replay_roi".to_string(),
7711 "threshold:require_replay_safety".to_string(),
7712 "window_seconds:86400".to_string(),
7713 ]
7714 );
7715
7716 let rendered = serde_json::to_string(&output).unwrap();
7717 assert!(rendered.starts_with("{\"status\":\"fail_closed\",\"failed_checks\":"));
7718 assert_eq!(rendered, serde_json::to_string(&output).unwrap());
7719 }
7720
7721 #[test]
7722 fn replay_roi_release_gate_summary_evaluator_passes_with_threshold_compliance() {
7723 let input = ReplayRoiReleaseGateInputContract {
7724 generated_at: Utc::now().to_rfc3339(),
7725 window_seconds: 86_400,
7726 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7727 .iter()
7728 .map(|dimension| (*dimension).to_string())
7729 .collect(),
7730 replay_attempts_total: 10,
7731 replay_success_total: 9,
7732 replay_failure_total: 1,
7733 replay_hit_rate: 0.9,
7734 false_replay_rate: 0.1,
7735 reasoning_avoided_tokens: 960,
7736 replay_fallback_cost_total: 64,
7737 replay_roi: compute_replay_roi(960, 64),
7738 replay_safety: true,
7739 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7740 fail_closed_default: true,
7741 rollback_ready: true,
7742 audit_trail_complete: true,
7743 has_replay_activity: true,
7744 },
7745 thresholds: ReplayRoiReleaseGateThresholds::default(),
7746 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7747 };
7748
7749 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7750 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Pass);
7751 assert!(output.failed_checks.is_empty());
7752 assert!(output.summary.contains("release gate pass"));
7753 }
7754
7755 #[test]
7756 fn replay_roi_release_gate_summary_evaluator_fail_closed_on_threshold_violations() {
7757 let input = ReplayRoiReleaseGateInputContract {
7758 generated_at: Utc::now().to_rfc3339(),
7759 window_seconds: 86_400,
7760 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7761 .iter()
7762 .map(|dimension| (*dimension).to_string())
7763 .collect(),
7764 replay_attempts_total: 10,
7765 replay_success_total: 4,
7766 replay_failure_total: 6,
7767 replay_hit_rate: 0.4,
7768 false_replay_rate: 0.6,
7769 reasoning_avoided_tokens: 80,
7770 replay_fallback_cost_total: 400,
7771 replay_roi: compute_replay_roi(80, 400),
7772 replay_safety: false,
7773 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7774 fail_closed_default: true,
7775 rollback_ready: true,
7776 audit_trail_complete: true,
7777 has_replay_activity: true,
7778 },
7779 thresholds: ReplayRoiReleaseGateThresholds::default(),
7780 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7781 };
7782
7783 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7784 assert_eq!(output.status, ReplayRoiReleaseGateStatus::FailClosed);
7785 assert!(output
7786 .failed_checks
7787 .iter()
7788 .any(|check| check == "replay_hit_rate_below_threshold"));
7789 assert!(output
7790 .failed_checks
7791 .iter()
7792 .any(|check| check == "false_replay_rate_above_threshold"));
7793 assert!(output
7794 .failed_checks
7795 .iter()
7796 .any(|check| check == "replay_roi_below_threshold"));
7797 assert!(output.summary.contains("release gate fail_closed"));
7798 }
7799
7800 #[test]
7801 fn replay_roi_release_gate_summary_evaluator_marks_missing_data_indeterminate() {
7802 let input = ReplayRoiReleaseGateInputContract {
7803 generated_at: String::new(),
7804 window_seconds: 86_400,
7805 aggregation_dimensions: REPLAY_RELEASE_GATE_AGGREGATION_DIMENSIONS
7806 .iter()
7807 .map(|dimension| (*dimension).to_string())
7808 .collect(),
7809 replay_attempts_total: 0,
7810 replay_success_total: 0,
7811 replay_failure_total: 0,
7812 replay_hit_rate: 0.0,
7813 false_replay_rate: 0.0,
7814 reasoning_avoided_tokens: 0,
7815 replay_fallback_cost_total: 0,
7816 replay_roi: 0.0,
7817 replay_safety: false,
7818 replay_safety_signal: ReplayRoiReleaseGateSafetySignal {
7819 fail_closed_default: true,
7820 rollback_ready: true,
7821 audit_trail_complete: true,
7822 has_replay_activity: false,
7823 },
7824 thresholds: ReplayRoiReleaseGateThresholds::default(),
7825 fail_closed_policy: ReplayRoiReleaseGateFailClosedPolicy::default(),
7826 };
7827
7828 let output = evaluate_replay_roi_release_gate_contract_input(&input);
7829 assert_eq!(output.status, ReplayRoiReleaseGateStatus::Indeterminate);
7830 assert!(output
7831 .failed_checks
7832 .iter()
7833 .any(|check| check == "missing_generated_at"));
7834 assert!(output
7835 .failed_checks
7836 .iter()
7837 .any(|check| check == "missing_replay_attempts"));
7838 assert!(output
7839 .summary
7840 .contains("release gate indeterminate (fail-closed)"));
7841 }
7842
7843 #[test]
7844 fn stale_replay_targets_require_confidence_revalidation() {
7845 let now = Utc::now();
7846 let projection = EvolutionProjection {
7847 genes: vec![Gene {
7848 id: "gene-stale".into(),
7849 signals: vec!["missing readme".into()],
7850 strategy: vec!["README.md".into()],
7851 validation: vec!["test".into()],
7852 state: AssetState::Promoted,
7853 }],
7854 capsules: vec![Capsule {
7855 id: "capsule-stale".into(),
7856 gene_id: "gene-stale".into(),
7857 mutation_id: "mutation-stale".into(),
7858 run_id: "run-stale".into(),
7859 diff_hash: "hash".into(),
7860 confidence: 0.8,
7861 env: replay_input("missing readme").env,
7862 outcome: Outcome {
7863 success: true,
7864 validation_profile: "test".into(),
7865 validation_duration_ms: 1,
7866 changed_files: vec!["README.md".into()],
7867 validator_hash: "validator".into(),
7868 lines_changed: 1,
7869 replay_verified: false,
7870 },
7871 state: AssetState::Promoted,
7872 }],
7873 reuse_counts: BTreeMap::from([("gene-stale".into(), 1)]),
7874 attempt_counts: BTreeMap::from([("gene-stale".into(), 1)]),
7875 last_updated_at: BTreeMap::from([(
7876 "gene-stale".into(),
7877 (now - Duration::hours(48)).to_rfc3339(),
7878 )]),
7879 spec_ids_by_gene: BTreeMap::new(),
7880 };
7881
7882 let targets = stale_replay_revalidation_targets(&projection, now);
7883
7884 assert_eq!(targets.len(), 1);
7885 assert_eq!(targets[0].gene_id, "gene-stale");
7886 assert_eq!(targets[0].capsule_ids, vec!["capsule-stale".to_string()]);
7887 assert!(targets[0].decayed_confidence < MIN_REPLAY_CONFIDENCE);
7888 }
7889
7890 #[tokio::test]
7891 async fn remote_replay_prefers_closest_environment_match() {
7892 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
7893 let input = replay_input("env-signal");
7894
7895 let envelope_a = remote_publish_envelope_with_env(
7896 "node-a",
7897 "run-remote-a",
7898 "gene-a",
7899 "capsule-a",
7900 "mutation-a",
7901 "env-signal",
7902 "A.md",
7903 "# from a",
7904 input.env.clone(),
7905 );
7906 let envelope_b = remote_publish_envelope_with_env(
7907 "node-b",
7908 "run-remote-b",
7909 "gene-b",
7910 "capsule-b",
7911 "mutation-b",
7912 "env-signal",
7913 "B.md",
7914 "# from b",
7915 EnvFingerprint {
7916 rustc_version: "old-rustc".into(),
7917 cargo_lock_hash: "other-lock".into(),
7918 target_triple: "aarch64-apple-darwin".into(),
7919 os: "linux".into(),
7920 },
7921 );
7922
7923 evo.import_remote_envelope(&envelope_a).unwrap();
7924 evo.import_remote_envelope(&envelope_b).unwrap();
7925
7926 let decision = evo.replay_or_fallback(input).await.unwrap();
7927
7928 assert!(decision.used_capsule);
7929 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
7930 assert!(!decision.fallback_to_planner);
7931 }
7932
7933 #[test]
7934 fn remote_cold_start_scoring_caps_distinct_query_coverage() {
7935 let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
7936 let input = replay_input("missing readme");
7937
7938 let exact = remote_publish_envelope_with_signals(
7939 "node-exact",
7940 "run-remote-exact",
7941 "gene-exact",
7942 "capsule-exact",
7943 "mutation-exact",
7944 vec!["missing readme".into()],
7945 vec!["missing readme".into()],
7946 "EXACT.md",
7947 "# exact",
7948 input.env.clone(),
7949 );
7950 let overlapping = remote_publish_envelope_with_signals(
7951 "node-overlap",
7952 "run-remote-overlap",
7953 "gene-overlap",
7954 "capsule-overlap",
7955 "mutation-overlap",
7956 vec!["missing readme".into()],
7957 vec!["missing".into(), "readme".into()],
7958 "OVERLAP.md",
7959 "# overlap",
7960 input.env.clone(),
7961 );
7962
7963 evo.import_remote_envelope(&exact).unwrap();
7964 evo.import_remote_envelope(&overlapping).unwrap();
7965
7966 let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
7967 let exact_candidate = candidates
7968 .iter()
7969 .find(|candidate| candidate.gene.id == "gene-exact")
7970 .unwrap();
7971 let overlap_candidate = candidates
7972 .iter()
7973 .find(|candidate| candidate.gene.id == "gene-overlap")
7974 .unwrap();
7975
7976 assert_eq!(exact_candidate.score, 1.0);
7977 assert_eq!(overlap_candidate.score, 1.0);
7978 assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
7979 }
7980
7981 #[test]
7982 fn exact_match_candidates_respect_spec_linked_events() {
7983 let (evo, _) = build_test_evo(
7984 "spec-linked-filter",
7985 "run-spec-linked-filter",
7986 command_validator(),
7987 );
7988 let mut input = replay_input("missing readme");
7989 input.spec_id = Some("spec-readme".into());
7990
7991 let mut mutation = sample_mutation();
7992 mutation.intent.id = "mutation-spec-linked".into();
7993 mutation.intent.spec_id = None;
7994 let gene = Gene {
7995 id: "gene-spec-linked".into(),
7996 signals: vec!["missing readme".into()],
7997 strategy: vec!["README.md".into()],
7998 validation: vec!["test".into()],
7999 state: AssetState::Promoted,
8000 };
8001 let capsule = Capsule {
8002 id: "capsule-spec-linked".into(),
8003 gene_id: gene.id.clone(),
8004 mutation_id: mutation.intent.id.clone(),
8005 run_id: "run-spec-linked".into(),
8006 diff_hash: mutation.artifact.content_hash.clone(),
8007 confidence: 0.9,
8008 env: input.env.clone(),
8009 outcome: Outcome {
8010 success: true,
8011 validation_profile: "test".into(),
8012 validation_duration_ms: 1,
8013 changed_files: vec!["README.md".into()],
8014 validator_hash: "validator-hash".into(),
8015 lines_changed: 1,
8016 replay_verified: false,
8017 },
8018 state: AssetState::Promoted,
8019 };
8020
8021 evo.store
8022 .append_event(EvolutionEvent::MutationDeclared { mutation })
8023 .unwrap();
8024 evo.store
8025 .append_event(EvolutionEvent::GeneProjected { gene })
8026 .unwrap();
8027 evo.store
8028 .append_event(EvolutionEvent::CapsuleCommitted { capsule })
8029 .unwrap();
8030 evo.store
8031 .append_event(EvolutionEvent::SpecLinked {
8032 mutation_id: "mutation-spec-linked".into(),
8033 spec_id: "spec-readme".into(),
8034 })
8035 .unwrap();
8036
8037 let candidates = exact_match_candidates(evo.store.as_ref(), &input);
8038 assert_eq!(candidates.len(), 1);
8039 assert_eq!(candidates[0].gene.id, "gene-spec-linked");
8040 }
8041
8042 #[tokio::test]
8043 async fn remote_capsule_advances_from_quarantine_to_shadow_then_promoted() {
8044 let (evo, store) = build_test_evo(
8045 "remote-quarantine",
8046 "run-remote-quarantine",
8047 command_validator(),
8048 );
8049 let envelope = remote_publish_envelope(
8050 "node-remote",
8051 "run-remote-quarantine",
8052 "gene-remote",
8053 "capsule-remote",
8054 "mutation-remote",
8055 "remote-signal",
8056 "REMOTE.md",
8057 "# from remote",
8058 );
8059
8060 evo.import_remote_envelope(&envelope).unwrap();
8061
8062 let before_replay = store.rebuild_projection().unwrap();
8063 let imported_gene = before_replay
8064 .genes
8065 .iter()
8066 .find(|gene| gene.id == "gene-remote")
8067 .unwrap();
8068 let imported_capsule = before_replay
8069 .capsules
8070 .iter()
8071 .find(|capsule| capsule.id == "capsule-remote")
8072 .unwrap();
8073 assert_eq!(imported_gene.state, AssetState::Quarantined);
8074 assert_eq!(imported_capsule.state, AssetState::Quarantined);
8075 let exported_before_replay =
8076 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8077 assert!(exported_before_replay.assets.is_empty());
8078
8079 let first_decision = evo
8080 .replay_or_fallback(replay_input("remote-signal"))
8081 .await
8082 .unwrap();
8083
8084 assert!(first_decision.used_capsule);
8085 assert_eq!(first_decision.capsule_id, Some("capsule-remote".into()));
8086
8087 let after_first_replay = store.rebuild_projection().unwrap();
8088 let shadow_gene = after_first_replay
8089 .genes
8090 .iter()
8091 .find(|gene| gene.id == "gene-remote")
8092 .unwrap();
8093 let shadow_capsule = after_first_replay
8094 .capsules
8095 .iter()
8096 .find(|capsule| capsule.id == "capsule-remote")
8097 .unwrap();
8098 assert_eq!(shadow_gene.state, AssetState::ShadowValidated);
8099 assert_eq!(shadow_capsule.state, AssetState::ShadowValidated);
8100 let exported_after_first_replay =
8101 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8102 assert!(exported_after_first_replay.assets.is_empty());
8103
8104 let second_decision = evo
8105 .replay_or_fallback(replay_input("remote-signal"))
8106 .await
8107 .unwrap();
8108 assert!(second_decision.used_capsule);
8109 assert_eq!(second_decision.capsule_id, Some("capsule-remote".into()));
8110
8111 let after_second_replay = store.rebuild_projection().unwrap();
8112 let promoted_gene = after_second_replay
8113 .genes
8114 .iter()
8115 .find(|gene| gene.id == "gene-remote")
8116 .unwrap();
8117 let promoted_capsule = after_second_replay
8118 .capsules
8119 .iter()
8120 .find(|capsule| capsule.id == "capsule-remote")
8121 .unwrap();
8122 assert_eq!(promoted_gene.state, AssetState::Promoted);
8123 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8124 let exported_after_second_replay =
8125 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
8126 assert_eq!(exported_after_second_replay.assets.len(), 3);
8127 assert!(exported_after_second_replay
8128 .assets
8129 .iter()
8130 .any(|asset| matches!(
8131 asset,
8132 NetworkAsset::EvolutionEvent {
8133 event: EvolutionEvent::MutationDeclared { .. }
8134 }
8135 )));
8136 }
8137
8138 #[tokio::test]
8139 async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
8140 let (source, source_store) = build_test_evo(
8141 "remote-publish-export",
8142 "run-remote-publish-export",
8143 command_validator(),
8144 );
8145 source
8146 .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
8147 .await
8148 .unwrap();
8149 let envelope = EvolutionNetworkNode::new(source_store.clone())
8150 .publish_local_assets("node-source")
8151 .unwrap();
8152 assert!(envelope.assets.iter().any(|asset| matches!(
8153 asset,
8154 NetworkAsset::EvolutionEvent {
8155 event: EvolutionEvent::MutationDeclared { mutation }
8156 } if mutation.intent.id == "mutation-1"
8157 )));
8158
8159 let (remote, _) = build_test_evo(
8160 "remote-publish-import",
8161 "run-remote-publish-import",
8162 command_validator(),
8163 );
8164 remote.import_remote_envelope(&envelope).unwrap();
8165
8166 let decision = remote
8167 .replay_or_fallback(replay_input("missing readme"))
8168 .await
8169 .unwrap();
8170
8171 assert!(decision.used_capsule);
8172 assert!(!decision.fallback_to_planner);
8173 }
8174
8175 #[tokio::test]
8176 async fn import_remote_envelope_records_manifest_validation_event() {
8177 let (source, source_store) = build_test_evo(
8178 "remote-manifest-success-source",
8179 "run-remote-manifest-success-source",
8180 command_validator(),
8181 );
8182 source
8183 .capture_successful_mutation(
8184 &"run-remote-manifest-success-source".into(),
8185 sample_mutation(),
8186 )
8187 .await
8188 .unwrap();
8189 let envelope = EvolutionNetworkNode::new(source_store.clone())
8190 .publish_local_assets("node-source")
8191 .unwrap();
8192
8193 let (remote, remote_store) = build_test_evo(
8194 "remote-manifest-success-remote",
8195 "run-remote-manifest-success-remote",
8196 command_validator(),
8197 );
8198 remote.import_remote_envelope(&envelope).unwrap();
8199
8200 let events = remote_store.scan(1).unwrap();
8201 assert!(events.iter().any(|stored| matches!(
8202 &stored.event,
8203 EvolutionEvent::ManifestValidated {
8204 accepted: true,
8205 reason,
8206 sender_id: Some(sender_id),
8207 publisher: Some(publisher),
8208 asset_ids,
8209 } if reason == "manifest validated"
8210 && sender_id == "node-source"
8211 && publisher == "node-source"
8212 && !asset_ids.is_empty()
8213 )));
8214 }
8215
8216 #[test]
8217 fn import_remote_envelope_rejects_invalid_manifest_and_records_audit_event() {
8218 let (remote, remote_store) = build_test_evo(
8219 "remote-manifest-invalid",
8220 "run-remote-manifest-invalid",
8221 command_validator(),
8222 );
8223 let mut envelope = remote_publish_envelope(
8224 "node-remote",
8225 "run-remote-manifest-invalid",
8226 "gene-remote",
8227 "capsule-remote",
8228 "mutation-remote",
8229 "manifest-signal",
8230 "MANIFEST.md",
8231 "# drift",
8232 );
8233 if let Some(manifest) = envelope.manifest.as_mut() {
8234 manifest.asset_hash = "tampered-hash".to_string();
8235 }
8236 envelope.content_hash = envelope.compute_content_hash();
8237
8238 let error = remote.import_remote_envelope(&envelope).unwrap_err();
8239 assert!(error.to_string().contains("manifest"));
8240
8241 let events = remote_store.scan(1).unwrap();
8242 assert!(events.iter().any(|stored| matches!(
8243 &stored.event,
8244 EvolutionEvent::ManifestValidated {
8245 accepted: false,
8246 reason,
8247 sender_id: Some(sender_id),
8248 publisher: Some(publisher),
8249 asset_ids,
8250 } if reason.contains("manifest asset_hash mismatch")
8251 && sender_id == "node-remote"
8252 && publisher == "node-remote"
8253 && !asset_ids.is_empty()
8254 )));
8255 }
8256
8257 #[tokio::test]
8258 async fn fetch_assets_include_mutation_payload_for_remote_replay() {
8259 let (evo, store) = build_test_evo(
8260 "remote-fetch-export",
8261 "run-remote-fetch",
8262 command_validator(),
8263 );
8264 evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
8265 .await
8266 .unwrap();
8267
8268 let response = EvolutionNetworkNode::new(store.clone())
8269 .fetch_assets(
8270 "node-source",
8271 &FetchQuery {
8272 sender_id: "node-client".into(),
8273 signals: vec!["missing readme".into()],
8274 since_cursor: None,
8275 resume_token: None,
8276 },
8277 )
8278 .unwrap();
8279
8280 assert!(response.assets.iter().any(|asset| matches!(
8281 asset,
8282 NetworkAsset::EvolutionEvent {
8283 event: EvolutionEvent::MutationDeclared { mutation }
8284 } if mutation.intent.id == "mutation-1"
8285 )));
8286 assert!(response
8287 .assets
8288 .iter()
8289 .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
8290 assert!(response
8291 .assets
8292 .iter()
8293 .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
8294 }
8295
8296 #[test]
8297 fn fetch_assets_delta_sync_supports_since_cursor_and_resume_token() {
8298 let store_root =
8299 std::env::temp_dir().join(format!("oris-evokernel-fetch-delta-store-{}", next_id("t")));
8300 if store_root.exists() {
8301 fs::remove_dir_all(&store_root).unwrap();
8302 }
8303 let store: Arc<dyn EvolutionStore> =
8304 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
8305 let node = EvolutionNetworkNode::new(store.clone());
8306 node.record_reported_experience(
8307 "delta-agent",
8308 "gene-delta-a",
8309 vec!["delta.signal".into()],
8310 vec![
8311 "task_class=delta.signal".into(),
8312 "task_label=delta replay".into(),
8313 ],
8314 vec!["a2a.tasks.report".into()],
8315 )
8316 .unwrap();
8317
8318 let first = node
8319 .fetch_assets(
8320 "execution-api",
8321 &FetchQuery {
8322 sender_id: "delta-agent".into(),
8323 signals: vec!["delta.signal".into()],
8324 since_cursor: None,
8325 resume_token: None,
8326 },
8327 )
8328 .unwrap();
8329 let first_cursor = first.next_cursor.clone().expect("first next_cursor");
8330 let first_token = first.resume_token.clone().expect("first resume_token");
8331 assert!(first.assets.iter().any(
8332 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8333 ));
8334
8335 let restarted = EvolutionNetworkNode::new(store.clone());
8336 restarted
8337 .record_reported_experience(
8338 "delta-agent",
8339 "gene-delta-b",
8340 vec!["delta.signal".into()],
8341 vec![
8342 "task_class=delta.signal".into(),
8343 "task_label=delta replay".into(),
8344 ],
8345 vec!["a2a.tasks.report".into()],
8346 )
8347 .unwrap();
8348
8349 let from_token = restarted
8350 .fetch_assets(
8351 "execution-api",
8352 &FetchQuery {
8353 sender_id: "delta-agent".into(),
8354 signals: vec!["delta.signal".into()],
8355 since_cursor: None,
8356 resume_token: Some(first_token),
8357 },
8358 )
8359 .unwrap();
8360 assert!(from_token.assets.iter().any(
8361 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8362 ));
8363 assert!(!from_token.assets.iter().any(
8364 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
8365 ));
8366 assert_eq!(
8367 from_token.sync_audit.requested_cursor,
8368 Some(first_cursor.clone())
8369 );
8370 assert!(from_token.sync_audit.applied_count >= 1);
8371
8372 let from_cursor = restarted
8373 .fetch_assets(
8374 "execution-api",
8375 &FetchQuery {
8376 sender_id: "delta-agent".into(),
8377 signals: vec!["delta.signal".into()],
8378 since_cursor: Some(first_cursor),
8379 resume_token: None,
8380 },
8381 )
8382 .unwrap();
8383 assert!(from_cursor.assets.iter().any(
8384 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
8385 ));
8386 }
8387
8388 #[test]
8389 fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
8390 let store_root = std::env::temp_dir().join(format!(
8391 "oris-evokernel-remote-partial-store-{}",
8392 std::process::id()
8393 ));
8394 if store_root.exists() {
8395 fs::remove_dir_all(&store_root).unwrap();
8396 }
8397 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8398 let evo = build_test_evo_with_store(
8399 "remote-partial",
8400 "run-remote-partial",
8401 command_validator(),
8402 store.clone(),
8403 );
8404 let envelope = remote_publish_envelope(
8405 "node-partial",
8406 "run-remote-partial",
8407 "gene-partial",
8408 "capsule-partial",
8409 "mutation-partial",
8410 "partial-signal",
8411 "PARTIAL.md",
8412 "# partial",
8413 );
8414
8415 let result = evo.import_remote_envelope(&envelope);
8416
8417 assert!(matches!(result, Err(EvoKernelError::Store(_))));
8418 let projection = store.rebuild_projection().unwrap();
8419 assert!(projection
8420 .genes
8421 .iter()
8422 .any(|gene| gene.id == "gene-partial"));
8423 assert!(projection.capsules.is_empty());
8424 let publishers = evo.remote_publishers.lock().unwrap();
8425 assert_eq!(
8426 publishers.get("gene-partial").map(String::as_str),
8427 Some("node-partial")
8428 );
8429 }
8430
8431 #[test]
8432 fn retry_remote_import_after_partial_failure_only_imports_missing_assets() {
8433 let store_root = std::env::temp_dir().join(format!(
8434 "oris-evokernel-remote-partial-retry-store-{}",
8435 next_id("t")
8436 ));
8437 if store_root.exists() {
8438 fs::remove_dir_all(&store_root).unwrap();
8439 }
8440 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
8441 let evo = build_test_evo_with_store(
8442 "remote-partial-retry",
8443 "run-remote-partial-retry",
8444 command_validator(),
8445 store.clone(),
8446 );
8447 let envelope = remote_publish_envelope(
8448 "node-partial",
8449 "run-remote-partial-retry",
8450 "gene-partial-retry",
8451 "capsule-partial-retry",
8452 "mutation-partial-retry",
8453 "partial-retry-signal",
8454 "PARTIAL_RETRY.md",
8455 "# partial retry",
8456 );
8457
8458 let first = evo.import_remote_envelope(&envelope);
8459 assert!(matches!(first, Err(EvoKernelError::Store(_))));
8460
8461 let retry = evo.import_remote_envelope(&envelope).unwrap();
8462
8463 assert_eq!(retry.imported_asset_ids, vec!["capsule-partial-retry"]);
8464 let projection = store.rebuild_projection().unwrap();
8465 let gene = projection
8466 .genes
8467 .iter()
8468 .find(|gene| gene.id == "gene-partial-retry")
8469 .unwrap();
8470 assert_eq!(gene.state, AssetState::Quarantined);
8471 let capsule = projection
8472 .capsules
8473 .iter()
8474 .find(|capsule| capsule.id == "capsule-partial-retry")
8475 .unwrap();
8476 assert_eq!(capsule.state, AssetState::Quarantined);
8477 assert_eq!(projection.attempt_counts["gene-partial-retry"], 1);
8478
8479 let events = store.scan(1).unwrap();
8480 assert_eq!(
8481 events
8482 .iter()
8483 .filter(|stored| {
8484 matches!(
8485 &stored.event,
8486 EvolutionEvent::MutationDeclared { mutation }
8487 if mutation.intent.id == "mutation-partial-retry"
8488 )
8489 })
8490 .count(),
8491 1
8492 );
8493 assert_eq!(
8494 events
8495 .iter()
8496 .filter(|stored| {
8497 matches!(
8498 &stored.event,
8499 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-partial-retry"
8500 )
8501 })
8502 .count(),
8503 1
8504 );
8505 assert_eq!(
8506 events
8507 .iter()
8508 .filter(|stored| {
8509 matches!(
8510 &stored.event,
8511 EvolutionEvent::CapsuleCommitted { capsule }
8512 if capsule.id == "capsule-partial-retry"
8513 )
8514 })
8515 .count(),
8516 1
8517 );
8518 }
8519
8520 #[tokio::test]
8521 async fn duplicate_remote_import_does_not_requarantine_locally_validated_assets() {
8522 let (evo, store) = build_test_evo(
8523 "remote-idempotent",
8524 "run-remote-idempotent",
8525 command_validator(),
8526 );
8527 let envelope = remote_publish_envelope(
8528 "node-idempotent",
8529 "run-remote-idempotent",
8530 "gene-idempotent",
8531 "capsule-idempotent",
8532 "mutation-idempotent",
8533 "idempotent-signal",
8534 "IDEMPOTENT.md",
8535 "# idempotent",
8536 );
8537
8538 let first = evo.import_remote_envelope(&envelope).unwrap();
8539 assert_eq!(
8540 first.imported_asset_ids,
8541 vec!["gene-idempotent", "capsule-idempotent"]
8542 );
8543
8544 let decision = evo
8545 .replay_or_fallback(replay_input("idempotent-signal"))
8546 .await
8547 .unwrap();
8548 assert!(decision.used_capsule);
8549 assert_eq!(decision.capsule_id, Some("capsule-idempotent".into()));
8550
8551 let projection_before = store.rebuild_projection().unwrap();
8552 let attempts_before = projection_before.attempt_counts["gene-idempotent"];
8553 let gene_before = projection_before
8554 .genes
8555 .iter()
8556 .find(|gene| gene.id == "gene-idempotent")
8557 .unwrap();
8558 assert_eq!(gene_before.state, AssetState::ShadowValidated);
8559 let capsule_before = projection_before
8560 .capsules
8561 .iter()
8562 .find(|capsule| capsule.id == "capsule-idempotent")
8563 .unwrap();
8564 assert_eq!(capsule_before.state, AssetState::ShadowValidated);
8565
8566 let second = evo.import_remote_envelope(&envelope).unwrap();
8567 assert!(second.imported_asset_ids.is_empty());
8568
8569 let projection_after = store.rebuild_projection().unwrap();
8570 assert_eq!(
8571 projection_after.attempt_counts["gene-idempotent"],
8572 attempts_before
8573 );
8574 let gene_after = projection_after
8575 .genes
8576 .iter()
8577 .find(|gene| gene.id == "gene-idempotent")
8578 .unwrap();
8579 assert_eq!(gene_after.state, AssetState::ShadowValidated);
8580 let capsule_after = projection_after
8581 .capsules
8582 .iter()
8583 .find(|capsule| capsule.id == "capsule-idempotent")
8584 .unwrap();
8585 assert_eq!(capsule_after.state, AssetState::ShadowValidated);
8586
8587 let third_decision = evo
8588 .replay_or_fallback(replay_input("idempotent-signal"))
8589 .await
8590 .unwrap();
8591 assert!(third_decision.used_capsule);
8592 assert_eq!(third_decision.capsule_id, Some("capsule-idempotent".into()));
8593
8594 let projection_promoted = store.rebuild_projection().unwrap();
8595 let promoted_gene = projection_promoted
8596 .genes
8597 .iter()
8598 .find(|gene| gene.id == "gene-idempotent")
8599 .unwrap();
8600 let promoted_capsule = projection_promoted
8601 .capsules
8602 .iter()
8603 .find(|capsule| capsule.id == "capsule-idempotent")
8604 .unwrap();
8605 assert_eq!(promoted_gene.state, AssetState::Promoted);
8606 assert_eq!(promoted_capsule.state, AssetState::Promoted);
8607
8608 let events = store.scan(1).unwrap();
8609 assert_eq!(
8610 events
8611 .iter()
8612 .filter(|stored| {
8613 matches!(
8614 &stored.event,
8615 EvolutionEvent::MutationDeclared { mutation }
8616 if mutation.intent.id == "mutation-idempotent"
8617 )
8618 })
8619 .count(),
8620 1
8621 );
8622 assert_eq!(
8623 events
8624 .iter()
8625 .filter(|stored| {
8626 matches!(
8627 &stored.event,
8628 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-idempotent"
8629 )
8630 })
8631 .count(),
8632 1
8633 );
8634 assert_eq!(
8635 events
8636 .iter()
8637 .filter(|stored| {
8638 matches!(
8639 &stored.event,
8640 EvolutionEvent::CapsuleCommitted { capsule }
8641 if capsule.id == "capsule-idempotent"
8642 )
8643 })
8644 .count(),
8645 1
8646 );
8647
8648 assert_eq!(first.sync_audit.scanned_count, envelope.assets.len());
8649 assert_eq!(first.sync_audit.failed_count, 0);
8650 assert_eq!(second.sync_audit.applied_count, 0);
8651 assert_eq!(second.sync_audit.skipped_count, envelope.assets.len());
8652 assert!(second.resume_token.is_some());
8653 }
8654
8655 #[tokio::test]
8656 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
8657 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
8658 let capsule = evo
8659 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
8660 .await
8661 .unwrap();
8662 let publish = evo.export_promoted_assets("node-local");
8663 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
8664
8665 let decision = evo
8666 .replay_or_fallback(replay_input("missing readme"))
8667 .await
8668 .unwrap();
8669 assert!(decision.used_capsule);
8670 assert_eq!(decision.capsule_id, Some(capsule.id));
8671 }
8672
8673 #[tokio::test]
8674 async fn second_replay_validation_failure_revokes_gene_immediately() {
8675 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
8676 let capsule = capturer
8677 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
8678 .await
8679 .unwrap();
8680
8681 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
8682 let failing_replay = build_test_evo_with_store(
8683 "revoke-replay",
8684 "run-replay-fail",
8685 failing_validator,
8686 store.clone(),
8687 );
8688
8689 let first = failing_replay
8690 .replay_or_fallback(replay_input("missing readme"))
8691 .await
8692 .unwrap();
8693 let second = failing_replay
8694 .replay_or_fallback(replay_input("missing readme"))
8695 .await
8696 .unwrap();
8697
8698 assert!(!first.used_capsule);
8699 assert!(first.fallback_to_planner);
8700 assert!(!second.used_capsule);
8701 assert!(second.fallback_to_planner);
8702
8703 let projection = store.rebuild_projection().unwrap();
8704 let gene = projection
8705 .genes
8706 .iter()
8707 .find(|gene| gene.id == capsule.gene_id)
8708 .unwrap();
8709 assert_eq!(gene.state, AssetState::Promoted);
8710 let committed_capsule = projection
8711 .capsules
8712 .iter()
8713 .find(|current| current.id == capsule.id)
8714 .unwrap();
8715 assert_eq!(committed_capsule.state, AssetState::Promoted);
8716
8717 let events = store.scan(1).unwrap();
8718 assert_eq!(
8719 events
8720 .iter()
8721 .filter(|stored| {
8722 matches!(
8723 &stored.event,
8724 EvolutionEvent::ValidationFailed {
8725 gene_id: Some(gene_id),
8726 ..
8727 } if gene_id == &capsule.gene_id
8728 )
8729 })
8730 .count(),
8731 1
8732 );
8733 assert!(!events.iter().any(|stored| {
8734 matches!(
8735 &stored.event,
8736 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
8737 )
8738 }));
8739
8740 let recovered = build_test_evo_with_store(
8741 "revoke-replay",
8742 "run-replay-check",
8743 command_validator(),
8744 store.clone(),
8745 );
8746 let after_revoke = recovered
8747 .replay_or_fallback(replay_input("missing readme"))
8748 .await
8749 .unwrap();
8750 assert!(!after_revoke.used_capsule);
8751 assert!(after_revoke.fallback_to_planner);
8752 assert!(after_revoke.reason.contains("below replay threshold"));
8753 }
8754
8755 #[tokio::test]
8756 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
8757 let ledger = Arc::new(Mutex::new(EvuLedger {
8758 accounts: vec![],
8759 reputations: vec![
8760 oris_economics::ReputationRecord {
8761 node_id: "node-a".into(),
8762 publish_success_rate: 0.4,
8763 validator_accuracy: 0.4,
8764 reuse_impact: 0,
8765 },
8766 oris_economics::ReputationRecord {
8767 node_id: "node-b".into(),
8768 publish_success_rate: 0.95,
8769 validator_accuracy: 0.95,
8770 reuse_impact: 8,
8771 },
8772 ],
8773 }));
8774 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
8775 let evo = evo.with_economics(ledger.clone());
8776
8777 let envelope_a = remote_publish_envelope(
8778 "node-a",
8779 "run-remote-a",
8780 "gene-a",
8781 "capsule-a",
8782 "mutation-a",
8783 "shared-signal",
8784 "A.md",
8785 "# from a",
8786 );
8787 let envelope_b = remote_publish_envelope(
8788 "node-b",
8789 "run-remote-b",
8790 "gene-b",
8791 "capsule-b",
8792 "mutation-b",
8793 "shared-signal",
8794 "B.md",
8795 "# from b",
8796 );
8797
8798 evo.import_remote_envelope(&envelope_a).unwrap();
8799 evo.import_remote_envelope(&envelope_b).unwrap();
8800
8801 let decision = evo
8802 .replay_or_fallback(replay_input("shared-signal"))
8803 .await
8804 .unwrap();
8805
8806 assert!(decision.used_capsule);
8807 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
8808 let locked = ledger.lock().unwrap();
8809 let rewarded = locked
8810 .accounts
8811 .iter()
8812 .find(|item| item.node_id == "node-b")
8813 .unwrap();
8814 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8815 assert!(
8816 locked.selector_reputation_bias()["node-b"]
8817 > locked.selector_reputation_bias()["node-a"]
8818 );
8819 }
8820
8821 #[tokio::test]
8822 async fn remote_reuse_settlement_tracks_selected_capsule_publisher_for_shared_gene() {
8823 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
8824 let (evo, _) = build_test_evo(
8825 "remote-shared-publisher",
8826 "run-remote-shared-publisher",
8827 command_validator(),
8828 );
8829 let evo = evo.with_economics(ledger.clone());
8830 let input = replay_input("shared-signal");
8831 let preferred = remote_publish_envelope_with_env(
8832 "node-a",
8833 "run-remote-a",
8834 "gene-shared",
8835 "capsule-preferred",
8836 "mutation-preferred",
8837 "shared-signal",
8838 "A.md",
8839 "# from a",
8840 input.env.clone(),
8841 );
8842 let fallback = remote_publish_envelope_with_env(
8843 "node-b",
8844 "run-remote-b",
8845 "gene-shared",
8846 "capsule-fallback",
8847 "mutation-fallback",
8848 "shared-signal",
8849 "B.md",
8850 "# from b",
8851 EnvFingerprint {
8852 rustc_version: "old-rustc".into(),
8853 cargo_lock_hash: "other-lock".into(),
8854 target_triple: "aarch64-apple-darwin".into(),
8855 os: "linux".into(),
8856 },
8857 );
8858
8859 evo.import_remote_envelope(&preferred).unwrap();
8860 evo.import_remote_envelope(&fallback).unwrap();
8861
8862 let decision = evo.replay_or_fallback(input).await.unwrap();
8863
8864 assert!(decision.used_capsule);
8865 assert_eq!(decision.capsule_id, Some("capsule-preferred".into()));
8866 let locked = ledger.lock().unwrap();
8867 let rewarded = locked
8868 .accounts
8869 .iter()
8870 .find(|item| item.node_id == "node-a")
8871 .unwrap();
8872 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
8873 assert!(locked.accounts.iter().all(|item| item.node_id != "node-b"));
8874 }
8875
8876 #[test]
8877 fn select_candidates_surfaces_ranked_remote_cold_start_candidates() {
8878 let ledger = Arc::new(Mutex::new(EvuLedger {
8879 accounts: vec![],
8880 reputations: vec![
8881 oris_economics::ReputationRecord {
8882 node_id: "node-a".into(),
8883 publish_success_rate: 0.4,
8884 validator_accuracy: 0.4,
8885 reuse_impact: 0,
8886 },
8887 oris_economics::ReputationRecord {
8888 node_id: "node-b".into(),
8889 publish_success_rate: 0.95,
8890 validator_accuracy: 0.95,
8891 reuse_impact: 8,
8892 },
8893 ],
8894 }));
8895 let (evo, _) = build_test_evo("remote-select", "run-remote-select", command_validator());
8896 let evo = evo.with_economics(ledger);
8897
8898 let envelope_a = remote_publish_envelope(
8899 "node-a",
8900 "run-remote-a",
8901 "gene-a",
8902 "capsule-a",
8903 "mutation-a",
8904 "shared-signal",
8905 "A.md",
8906 "# from a",
8907 );
8908 let envelope_b = remote_publish_envelope(
8909 "node-b",
8910 "run-remote-b",
8911 "gene-b",
8912 "capsule-b",
8913 "mutation-b",
8914 "shared-signal",
8915 "B.md",
8916 "# from b",
8917 );
8918
8919 evo.import_remote_envelope(&envelope_a).unwrap();
8920 evo.import_remote_envelope(&envelope_b).unwrap();
8921
8922 let candidates = evo.select_candidates(&replay_input("shared-signal"));
8923
8924 assert_eq!(candidates.len(), 1);
8925 assert_eq!(candidates[0].gene.id, "gene-b");
8926 assert_eq!(candidates[0].capsules[0].id, "capsule-b");
8927 }
8928
8929 #[tokio::test]
8930 async fn remote_reuse_publisher_bias_survives_restart() {
8931 let ledger = Arc::new(Mutex::new(EvuLedger {
8932 accounts: vec![],
8933 reputations: vec![
8934 oris_economics::ReputationRecord {
8935 node_id: "node-a".into(),
8936 publish_success_rate: 0.4,
8937 validator_accuracy: 0.4,
8938 reuse_impact: 0,
8939 },
8940 oris_economics::ReputationRecord {
8941 node_id: "node-b".into(),
8942 publish_success_rate: 0.95,
8943 validator_accuracy: 0.95,
8944 reuse_impact: 8,
8945 },
8946 ],
8947 }));
8948 let store_root = std::env::temp_dir().join(format!(
8949 "oris-evokernel-remote-restart-store-{}",
8950 next_id("t")
8951 ));
8952 if store_root.exists() {
8953 fs::remove_dir_all(&store_root).unwrap();
8954 }
8955 let store: Arc<dyn EvolutionStore> =
8956 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
8957 let evo = build_test_evo_with_store(
8958 "remote-success-restart-source",
8959 "run-remote-restart-source",
8960 command_validator(),
8961 store.clone(),
8962 )
8963 .with_economics(ledger.clone());
8964
8965 let envelope_a = remote_publish_envelope(
8966 "node-a",
8967 "run-remote-a",
8968 "gene-a",
8969 "capsule-a",
8970 "mutation-a",
8971 "shared-signal",
8972 "A.md",
8973 "# from a",
8974 );
8975 let envelope_b = remote_publish_envelope(
8976 "node-b",
8977 "run-remote-b",
8978 "gene-b",
8979 "capsule-b",
8980 "mutation-b",
8981 "shared-signal",
8982 "B.md",
8983 "# from b",
8984 );
8985
8986 evo.import_remote_envelope(&envelope_a).unwrap();
8987 evo.import_remote_envelope(&envelope_b).unwrap();
8988
8989 let recovered = build_test_evo_with_store(
8990 "remote-success-restart-recovered",
8991 "run-remote-restart-recovered",
8992 command_validator(),
8993 store.clone(),
8994 )
8995 .with_economics(ledger.clone());
8996
8997 let decision = recovered
8998 .replay_or_fallback(replay_input("shared-signal"))
8999 .await
9000 .unwrap();
9001
9002 assert!(decision.used_capsule);
9003 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
9004 let locked = ledger.lock().unwrap();
9005 let rewarded = locked
9006 .accounts
9007 .iter()
9008 .find(|item| item.node_id == "node-b")
9009 .unwrap();
9010 assert_eq!(rewarded.balance, recovered.stake_policy.reuse_reward);
9011 }
9012
9013 #[tokio::test]
9014 async fn remote_reuse_failure_penalizes_remote_reputation() {
9015 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
9016 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
9017 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
9018 let evo = evo.with_economics(ledger.clone());
9019
9020 let envelope = remote_publish_envelope(
9021 "node-remote",
9022 "run-remote-failed",
9023 "gene-remote",
9024 "capsule-remote",
9025 "mutation-remote",
9026 "failure-signal",
9027 "FAILED.md",
9028 "# from remote",
9029 );
9030 evo.import_remote_envelope(&envelope).unwrap();
9031
9032 let decision = evo
9033 .replay_or_fallback(replay_input("failure-signal"))
9034 .await
9035 .unwrap();
9036
9037 assert!(!decision.used_capsule);
9038 assert!(decision.fallback_to_planner);
9039
9040 let signal = evo.economics_signal("node-remote").unwrap();
9041 assert_eq!(signal.available_evu, 0);
9042 assert!(signal.publish_success_rate < 0.5);
9043 assert!(signal.validator_accuracy < 0.5);
9044 }
9045
9046 #[test]
9047 fn ensure_builtin_experience_assets_is_idempotent_and_fetchable() {
9048 let store_root = std::env::temp_dir().join(format!(
9049 "oris-evokernel-builtin-experience-store-{}",
9050 next_id("t")
9051 ));
9052 if store_root.exists() {
9053 fs::remove_dir_all(&store_root).unwrap();
9054 }
9055 let store: Arc<dyn EvolutionStore> =
9056 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9057 let node = EvolutionNetworkNode::new(store.clone());
9058
9059 let first = node
9060 .ensure_builtin_experience_assets("runtime-bootstrap")
9061 .unwrap();
9062 assert!(!first.imported_asset_ids.is_empty());
9063
9064 let second = node
9065 .ensure_builtin_experience_assets("runtime-bootstrap")
9066 .unwrap();
9067 assert!(second.imported_asset_ids.is_empty());
9068
9069 let fetch = node
9070 .fetch_assets(
9071 "execution-api",
9072 &FetchQuery {
9073 sender_id: "compat-agent".into(),
9074 signals: vec!["error".into()],
9075 since_cursor: None,
9076 resume_token: None,
9077 },
9078 )
9079 .unwrap();
9080
9081 let mut has_builtin_evomap = false;
9082 for asset in fetch.assets {
9083 if let NetworkAsset::Gene { gene } = asset {
9084 if strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9085 == Some("builtin_evomap")
9086 && gene.state == AssetState::Promoted
9087 {
9088 has_builtin_evomap = true;
9089 break;
9090 }
9091 }
9092 }
9093 assert!(has_builtin_evomap);
9094 }
9095
9096 #[test]
9097 fn reported_experience_retention_keeps_latest_three_and_preserves_builtin_assets() {
9098 let store_root = std::env::temp_dir().join(format!(
9099 "oris-evokernel-reported-retention-store-{}",
9100 next_id("t")
9101 ));
9102 if store_root.exists() {
9103 fs::remove_dir_all(&store_root).unwrap();
9104 }
9105 let store: Arc<dyn EvolutionStore> =
9106 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
9107 let node = EvolutionNetworkNode::new(store.clone());
9108
9109 node.ensure_builtin_experience_assets("runtime-bootstrap")
9110 .unwrap();
9111
9112 for idx in 0..4 {
9113 node.record_reported_experience(
9114 "reporter-a",
9115 format!("reported-docs-rewrite-v{}", idx + 1),
9116 vec!["docs.rewrite".into(), format!("task-{}", idx + 1)],
9117 vec![
9118 "task_class=docs.rewrite".into(),
9119 format!("task_label=Docs rewrite v{}", idx + 1),
9120 format!("summary=reported replay {}", idx + 1),
9121 ],
9122 vec!["a2a.tasks.report".into()],
9123 )
9124 .unwrap();
9125 }
9126
9127 let (_, projection) = store.scan_projection().unwrap();
9128 let reported_promoted = projection
9129 .genes
9130 .iter()
9131 .filter(|gene| {
9132 gene.state == AssetState::Promoted
9133 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9134 == Some("reported_experience")
9135 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9136 == Some("docs.rewrite")
9137 })
9138 .count();
9139 let reported_revoked = projection
9140 .genes
9141 .iter()
9142 .filter(|gene| {
9143 gene.state == AssetState::Revoked
9144 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
9145 == Some("reported_experience")
9146 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9147 == Some("docs.rewrite")
9148 })
9149 .count();
9150 let builtin_promoted = projection
9151 .genes
9152 .iter()
9153 .filter(|gene| {
9154 gene.state == AssetState::Promoted
9155 && matches!(
9156 strategy_metadata_value(&gene.strategy, "asset_origin").as_deref(),
9157 Some("builtin") | Some("builtin_evomap")
9158 )
9159 })
9160 .count();
9161
9162 assert_eq!(reported_promoted, 3);
9163 assert_eq!(reported_revoked, 1);
9164 assert!(builtin_promoted >= 1);
9165
9166 let fetch = node
9167 .fetch_assets(
9168 "execution-api",
9169 &FetchQuery {
9170 sender_id: "consumer-b".into(),
9171 signals: vec!["docs.rewrite".into()],
9172 since_cursor: None,
9173 resume_token: None,
9174 },
9175 )
9176 .unwrap();
9177 let docs_genes = fetch
9178 .assets
9179 .into_iter()
9180 .filter_map(|asset| match asset {
9181 NetworkAsset::Gene { gene } => Some(gene),
9182 _ => None,
9183 })
9184 .filter(|gene| {
9185 strategy_metadata_value(&gene.strategy, "task_class").as_deref()
9186 == Some("docs.rewrite")
9187 })
9188 .collect::<Vec<_>>();
9189 assert!(docs_genes.len() >= 3);
9190 }
9191}