Skip to main content

oris_evokernel/
core.rs

1//! EvoKernel orchestration: mutation capture, validation, capsule construction, and replay-first reuse.
2
3use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::Path;
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12    AgentRole, CoordinationMessage, CoordinationPlan, CoordinationPrimitive, CoordinationResult,
13    CoordinationTask, ExecutionFeedback, MutationProposal as AgentMutationProposal,
14};
15use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
16use oris_evolution::{
17    compute_artifact_hash, next_id, stable_hash_json, AssetState, BlastRadius, CandidateSource,
18    Capsule, CapsuleId, EnvFingerprint, EvolutionError, EvolutionEvent, EvolutionProjection,
19    EvolutionStore, Gene, GeneCandidate, MutationId, PreparedMutation, Selector, SelectorInput,
20    StoreBackedSelector, StoredEvolutionEvent, ValidationSnapshot,
21};
22use oris_evolution_network::{EvolutionEnvelope, NetworkAsset};
23use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
24use oris_kernel::{Kernel, KernelState, RunId};
25use oris_sandbox::{
26    compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
27};
28use oris_spec::CompiledMutationPlan;
29use serde::{Deserialize, Serialize};
30use thiserror::Error;
31
32pub use oris_evolution::{
33    default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
34    BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
35    EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
36    MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
37    SelectorInput as EvoSelectorInput,
38};
39pub use oris_evolution_network::{
40    FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
41};
42pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
43pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
44pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
45
46#[derive(Clone, Debug, Serialize, Deserialize)]
47pub struct ValidationPlan {
48    pub profile: String,
49    pub stages: Vec<ValidationStage>,
50}
51
52impl ValidationPlan {
53    pub fn oris_default() -> Self {
54        Self {
55            profile: "oris-default".into(),
56            stages: vec![
57                ValidationStage::Command {
58                    program: "cargo".into(),
59                    args: vec!["fmt".into(), "--all".into(), "--check".into()],
60                    timeout_ms: 60_000,
61                },
62                ValidationStage::Command {
63                    program: "cargo".into(),
64                    args: vec!["check".into(), "--workspace".into()],
65                    timeout_ms: 180_000,
66                },
67                ValidationStage::Command {
68                    program: "cargo".into(),
69                    args: vec![
70                        "test".into(),
71                        "-p".into(),
72                        "oris-kernel".into(),
73                        "-p".into(),
74                        "oris-evolution".into(),
75                        "-p".into(),
76                        "oris-sandbox".into(),
77                        "-p".into(),
78                        "oris-evokernel".into(),
79                        "--lib".into(),
80                    ],
81                    timeout_ms: 300_000,
82                },
83                ValidationStage::Command {
84                    program: "cargo".into(),
85                    args: vec![
86                        "test".into(),
87                        "-p".into(),
88                        "oris-runtime".into(),
89                        "--lib".into(),
90                    ],
91                    timeout_ms: 300_000,
92                },
93            ],
94        }
95    }
96}
97
98#[derive(Clone, Debug, Serialize, Deserialize)]
99pub enum ValidationStage {
100    Command {
101        program: String,
102        args: Vec<String>,
103        timeout_ms: u64,
104    },
105}
106
107#[derive(Clone, Debug, Serialize, Deserialize)]
108pub struct ValidationStageReport {
109    pub stage: String,
110    pub success: bool,
111    pub exit_code: Option<i32>,
112    pub duration_ms: u64,
113    pub stdout: String,
114    pub stderr: String,
115}
116
117#[derive(Clone, Debug, Serialize, Deserialize)]
118pub struct ValidationReport {
119    pub success: bool,
120    pub duration_ms: u64,
121    pub stages: Vec<ValidationStageReport>,
122    pub logs: String,
123}
124
125#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
126pub struct SignalExtractionInput {
127    pub patch_diff: String,
128    pub intent: String,
129    pub expected_effect: String,
130    pub declared_signals: Vec<String>,
131    pub changed_files: Vec<String>,
132    pub validation_success: bool,
133    pub validation_logs: String,
134    pub stage_outputs: Vec<String>,
135}
136
137#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
138pub struct SignalExtractionOutput {
139    pub values: Vec<String>,
140    pub hash: String,
141}
142
143#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
144pub struct SeedTemplate {
145    pub id: String,
146    pub intent: String,
147    pub signals: Vec<String>,
148    pub diff_payload: String,
149    pub validation_profile: String,
150}
151
152#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
153pub struct BootstrapReport {
154    pub seeded: bool,
155    pub genes_added: usize,
156    pub capsules_added: usize,
157}
158
159impl ValidationReport {
160    pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
161        ValidationSnapshot {
162            success: self.success,
163            profile: profile.to_string(),
164            duration_ms: self.duration_ms,
165            summary: if self.success {
166                "validation passed".into()
167            } else {
168                "validation failed".into()
169            },
170        }
171    }
172}
173
174pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
175    let mut signals = BTreeSet::new();
176
177    for declared in &input.declared_signals {
178        if let Some(phrase) = normalize_signal_phrase(declared) {
179            signals.insert(phrase);
180        }
181        extend_signal_tokens(&mut signals, declared);
182    }
183
184    for text in [
185        input.patch_diff.as_str(),
186        input.intent.as_str(),
187        input.expected_effect.as_str(),
188        input.validation_logs.as_str(),
189    ] {
190        extend_signal_tokens(&mut signals, text);
191    }
192
193    for changed_file in &input.changed_files {
194        extend_signal_tokens(&mut signals, changed_file);
195    }
196
197    for stage_output in &input.stage_outputs {
198        extend_signal_tokens(&mut signals, stage_output);
199    }
200
201    signals.insert(if input.validation_success {
202        "validation passed".into()
203    } else {
204        "validation failed".into()
205    });
206
207    let values = signals.into_iter().take(32).collect::<Vec<_>>();
208    let hash =
209        stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
210    SignalExtractionOutput { values, hash }
211}
212
213#[derive(Debug, Error)]
214pub enum ValidationError {
215    #[error("validation execution failed: {0}")]
216    Execution(String),
217}
218
219#[async_trait]
220pub trait Validator: Send + Sync {
221    async fn run(
222        &self,
223        receipt: &SandboxReceipt,
224        plan: &ValidationPlan,
225    ) -> Result<ValidationReport, ValidationError>;
226}
227
228pub struct CommandValidator {
229    policy: SandboxPolicy,
230}
231
232impl CommandValidator {
233    pub fn new(policy: SandboxPolicy) -> Self {
234        Self { policy }
235    }
236}
237
238#[async_trait]
239impl Validator for CommandValidator {
240    async fn run(
241        &self,
242        receipt: &SandboxReceipt,
243        plan: &ValidationPlan,
244    ) -> Result<ValidationReport, ValidationError> {
245        let started = std::time::Instant::now();
246        let mut stages = Vec::new();
247        let mut success = true;
248        let mut logs = String::new();
249
250        for stage in &plan.stages {
251            match stage {
252                ValidationStage::Command {
253                    program,
254                    args,
255                    timeout_ms,
256                } => {
257                    let result = execute_allowed_command(
258                        &self.policy,
259                        &receipt.workdir,
260                        program,
261                        args,
262                        *timeout_ms,
263                    )
264                    .await;
265                    let report = match result {
266                        Ok(output) => ValidationStageReport {
267                            stage: format!("{program} {}", args.join(" ")),
268                            success: output.success,
269                            exit_code: output.exit_code,
270                            duration_ms: output.duration_ms,
271                            stdout: output.stdout,
272                            stderr: output.stderr,
273                        },
274                        Err(err) => ValidationStageReport {
275                            stage: format!("{program} {}", args.join(" ")),
276                            success: false,
277                            exit_code: None,
278                            duration_ms: 0,
279                            stdout: String::new(),
280                            stderr: err.to_string(),
281                        },
282                    };
283                    if !report.success {
284                        success = false;
285                    }
286                    if !report.stdout.is_empty() {
287                        logs.push_str(&report.stdout);
288                        logs.push('\n');
289                    }
290                    if !report.stderr.is_empty() {
291                        logs.push_str(&report.stderr);
292                        logs.push('\n');
293                    }
294                    stages.push(report);
295                    if !success {
296                        break;
297                    }
298                }
299            }
300        }
301
302        Ok(ValidationReport {
303            success,
304            duration_ms: started.elapsed().as_millis() as u64,
305            stages,
306            logs,
307        })
308    }
309}
310
311#[derive(Clone, Debug)]
312pub struct ReplayDecision {
313    pub used_capsule: bool,
314    pub capsule_id: Option<CapsuleId>,
315    pub fallback_to_planner: bool,
316    pub reason: String,
317}
318
319#[derive(Clone, Copy, Debug, Eq, PartialEq)]
320enum CoordinationTaskState {
321    Ready,
322    Waiting,
323    BlockedByFailure,
324    PermanentlyBlocked,
325}
326
327#[derive(Clone, Debug, Default)]
328pub struct MultiAgentCoordinator;
329
330impl MultiAgentCoordinator {
331    pub fn new() -> Self {
332        Self
333    }
334
335    pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
336        let primitive = plan.primitive.clone();
337        let root_goal = plan.root_goal.clone();
338        let timeout_ms = plan.timeout_ms;
339        let max_retries = plan.max_retries;
340        let mut tasks = BTreeMap::new();
341        for task in plan.tasks {
342            tasks.entry(task.id.clone()).or_insert(task);
343        }
344
345        let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
346        let mut completed = BTreeSet::new();
347        let mut failed = BTreeSet::new();
348        let mut completed_order = Vec::new();
349        let mut failed_order = Vec::new();
350        let mut skipped = BTreeSet::new();
351        let mut attempts = BTreeMap::new();
352        let mut messages = Vec::new();
353
354        loop {
355            if matches!(primitive, CoordinationPrimitive::Conditional) {
356                self.apply_conditional_skips(
357                    &tasks,
358                    &mut pending,
359                    &completed,
360                    &failed,
361                    &mut skipped,
362                    &mut messages,
363                );
364            }
365
366            let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
367            if ready.is_empty() {
368                break;
369            }
370            if matches!(primitive, CoordinationPrimitive::Sequential) {
371                ready.truncate(1);
372            }
373
374            for task_id in ready {
375                let Some(task) = tasks.get(&task_id) else {
376                    continue;
377                };
378                if !pending.contains(&task_id) {
379                    continue;
380                }
381                self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
382
383                let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
384                if Self::simulate_task_failure(task, prior_failures) {
385                    let failure_count = prior_failures + 1;
386                    attempts.insert(task_id.clone(), failure_count);
387                    let will_retry = failure_count <= max_retries;
388                    messages.push(CoordinationMessage {
389                        from_role: task.role.clone(),
390                        to_role: task.role.clone(),
391                        task_id: task_id.clone(),
392                        content: if will_retry {
393                            format!("task {task_id} failed on attempt {failure_count} and will retry")
394                        } else {
395                            format!(
396                                "task {task_id} failed on attempt {failure_count} and exhausted retries"
397                            )
398                        },
399                    });
400                    if !will_retry {
401                        pending.remove(&task_id);
402                        if failed.insert(task_id.clone()) {
403                            failed_order.push(task_id);
404                        }
405                    }
406                    continue;
407                }
408
409                pending.remove(&task_id);
410                if completed.insert(task_id.clone()) {
411                    completed_order.push(task_id);
412                }
413            }
414        }
415
416        let blocked_ids = pending.into_iter().collect::<Vec<_>>();
417        for task_id in blocked_ids {
418            let Some(task) = tasks.get(&task_id) else {
419                continue;
420            };
421            let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
422            let content = match state {
423                CoordinationTaskState::BlockedByFailure => {
424                    format!("task {task_id} blocked by failed dependencies")
425                }
426                CoordinationTaskState::PermanentlyBlocked => {
427                    format!("task {task_id} has invalid coordination prerequisites")
428                }
429                CoordinationTaskState::Waiting => {
430                    format!("task {task_id} has unresolved dependencies")
431                }
432                CoordinationTaskState::Ready => {
433                    format!("task {task_id} was left pending unexpectedly")
434                }
435            };
436            messages.push(CoordinationMessage {
437                from_role: task.role.clone(),
438                to_role: task.role.clone(),
439                task_id: task_id.clone(),
440                content,
441            });
442            if failed.insert(task_id.clone()) {
443                failed_order.push(task_id);
444            }
445        }
446
447        CoordinationResult {
448            completed_tasks: completed_order,
449            failed_tasks: failed_order,
450            messages,
451            summary: format!(
452                "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
453                root_goal,
454                completed.len(),
455                failed.len(),
456                skipped.len(),
457                primitive,
458                timeout_ms,
459                max_retries
460            ),
461        }
462    }
463
464    fn ready_task_ids(
465        &self,
466        tasks: &BTreeMap<String, CoordinationTask>,
467        pending: &BTreeSet<String>,
468        completed: &BTreeSet<String>,
469        failed: &BTreeSet<String>,
470        skipped: &BTreeSet<String>,
471    ) -> Vec<String> {
472        pending
473            .iter()
474            .filter_map(|task_id| {
475                let task = tasks.get(task_id)?;
476                (self.classify_task(task, tasks, completed, failed, skipped)
477                    == CoordinationTaskState::Ready)
478                    .then(|| task_id.clone())
479            })
480            .collect()
481    }
482
483    fn apply_conditional_skips(
484        &self,
485        tasks: &BTreeMap<String, CoordinationTask>,
486        pending: &mut BTreeSet<String>,
487        completed: &BTreeSet<String>,
488        failed: &BTreeSet<String>,
489        skipped: &mut BTreeSet<String>,
490        messages: &mut Vec<CoordinationMessage>,
491    ) {
492        let skip_ids = pending
493            .iter()
494            .filter_map(|task_id| {
495                let task = tasks.get(task_id)?;
496                (self.classify_task(task, tasks, completed, failed, skipped)
497                    == CoordinationTaskState::BlockedByFailure)
498                    .then(|| task_id.clone())
499            })
500            .collect::<Vec<_>>();
501
502        for task_id in skip_ids {
503            let Some(task) = tasks.get(&task_id) else {
504                continue;
505            };
506            pending.remove(&task_id);
507            skipped.insert(task_id.clone());
508            messages.push(CoordinationMessage {
509                from_role: task.role.clone(),
510                to_role: task.role.clone(),
511                task_id: task_id.clone(),
512                content: format!("task {task_id} skipped due to failed dependency chain"),
513            });
514        }
515    }
516
517    fn classify_task(
518        &self,
519        task: &CoordinationTask,
520        tasks: &BTreeMap<String, CoordinationTask>,
521        completed: &BTreeSet<String>,
522        failed: &BTreeSet<String>,
523        skipped: &BTreeSet<String>,
524    ) -> CoordinationTaskState {
525        match task.role {
526            AgentRole::Planner | AgentRole::Coder => {
527                let mut waiting = false;
528                for dependency_id in &task.depends_on {
529                    if !tasks.contains_key(dependency_id) {
530                        return CoordinationTaskState::PermanentlyBlocked;
531                    }
532                    if skipped.contains(dependency_id) || failed.contains(dependency_id) {
533                        return CoordinationTaskState::BlockedByFailure;
534                    }
535                    if !completed.contains(dependency_id) {
536                        waiting = true;
537                    }
538                }
539                if waiting {
540                    CoordinationTaskState::Waiting
541                } else {
542                    CoordinationTaskState::Ready
543                }
544            }
545            AgentRole::Repair => {
546                let mut waiting = false;
547                let mut has_coder_dependency = false;
548                let mut has_failed_coder = false;
549                for dependency_id in &task.depends_on {
550                    let Some(dependency) = tasks.get(dependency_id) else {
551                        return CoordinationTaskState::PermanentlyBlocked;
552                    };
553                    let is_coder = matches!(dependency.role, AgentRole::Coder);
554                    if is_coder {
555                        has_coder_dependency = true;
556                    }
557                    if skipped.contains(dependency_id) {
558                        return CoordinationTaskState::BlockedByFailure;
559                    }
560                    if failed.contains(dependency_id) {
561                        if is_coder {
562                            has_failed_coder = true;
563                        } else {
564                            return CoordinationTaskState::BlockedByFailure;
565                        }
566                        continue;
567                    }
568                    if !completed.contains(dependency_id) {
569                        waiting = true;
570                    }
571                }
572                if !has_coder_dependency {
573                    CoordinationTaskState::PermanentlyBlocked
574                } else if waiting {
575                    CoordinationTaskState::Waiting
576                } else if has_failed_coder {
577                    CoordinationTaskState::Ready
578                } else {
579                    CoordinationTaskState::PermanentlyBlocked
580                }
581            }
582            AgentRole::Optimizer => {
583                let mut waiting = false;
584                let mut has_impl_dependency = false;
585                let mut has_completed_impl = false;
586                let mut has_failed_impl = false;
587                for dependency_id in &task.depends_on {
588                    let Some(dependency) = tasks.get(dependency_id) else {
589                        return CoordinationTaskState::PermanentlyBlocked;
590                    };
591                    let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
592                    if is_impl {
593                        has_impl_dependency = true;
594                    }
595                    if skipped.contains(dependency_id) || failed.contains(dependency_id) {
596                        if is_impl {
597                            has_failed_impl = true;
598                            continue;
599                        }
600                        return CoordinationTaskState::BlockedByFailure;
601                    }
602                    if completed.contains(dependency_id) {
603                        if is_impl {
604                            has_completed_impl = true;
605                        }
606                        continue;
607                    }
608                    waiting = true;
609                }
610                if !has_impl_dependency {
611                    CoordinationTaskState::PermanentlyBlocked
612                } else if waiting {
613                    CoordinationTaskState::Waiting
614                } else if has_completed_impl {
615                    CoordinationTaskState::Ready
616                } else if has_failed_impl {
617                    CoordinationTaskState::BlockedByFailure
618                } else {
619                    CoordinationTaskState::PermanentlyBlocked
620                }
621            }
622        }
623    }
624
625    fn record_handoff_messages(
626        &self,
627        task: &CoordinationTask,
628        tasks: &BTreeMap<String, CoordinationTask>,
629        completed: &BTreeSet<String>,
630        failed: &BTreeSet<String>,
631        messages: &mut Vec<CoordinationMessage>,
632    ) {
633        let mut dependency_ids = task.depends_on.clone();
634        dependency_ids.sort();
635        dependency_ids.dedup();
636
637        for dependency_id in dependency_ids {
638            let Some(dependency) = tasks.get(&dependency_id) else {
639                continue;
640            };
641            if completed.contains(&dependency_id) {
642                messages.push(CoordinationMessage {
643                    from_role: dependency.role.clone(),
644                    to_role: task.role.clone(),
645                    task_id: task.id.clone(),
646                    content: format!("handoff from {dependency_id} to {}", task.id),
647                });
648            } else if failed.contains(&dependency_id) {
649                messages.push(CoordinationMessage {
650                    from_role: dependency.role.clone(),
651                    to_role: task.role.clone(),
652                    task_id: task.id.clone(),
653                    content: format!("failed dependency {dependency_id} routed to {}", task.id),
654                });
655            }
656        }
657    }
658
659    fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
660        let normalized = task.description.to_ascii_lowercase();
661        normalized.contains("force-fail")
662            || (normalized.contains("fail-once") && prior_failures == 0)
663    }
664}
665
666#[derive(Debug, Error)]
667pub enum ReplayError {
668    #[error("store error: {0}")]
669    Store(String),
670    #[error("sandbox error: {0}")]
671    Sandbox(String),
672    #[error("validation error: {0}")]
673    Validation(String),
674}
675
676#[async_trait]
677pub trait ReplayExecutor: Send + Sync {
678    async fn try_replay(
679        &self,
680        input: &SelectorInput,
681        policy: &SandboxPolicy,
682        validation: &ValidationPlan,
683    ) -> Result<ReplayDecision, ReplayError>;
684}
685
686pub struct StoreReplayExecutor {
687    pub sandbox: Arc<dyn Sandbox>,
688    pub validator: Arc<dyn Validator>,
689    pub store: Arc<dyn EvolutionStore>,
690    pub selector: Arc<dyn Selector>,
691    pub governor: Arc<dyn Governor>,
692    pub economics: Option<Arc<Mutex<EvuLedger>>>,
693    pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
694    pub stake_policy: StakePolicy,
695}
696
697#[async_trait]
698impl ReplayExecutor for StoreReplayExecutor {
699    async fn try_replay(
700        &self,
701        input: &SelectorInput,
702        policy: &SandboxPolicy,
703        validation: &ValidationPlan,
704    ) -> Result<ReplayDecision, ReplayError> {
705        let mut selector_input = input.clone();
706        if self.economics.is_some() && self.remote_publishers.is_some() {
707            selector_input.limit = selector_input.limit.max(4);
708        }
709        let mut candidates = self.selector.select(&selector_input);
710        self.rerank_with_reputation_bias(&mut candidates);
711        let mut exact_match = false;
712        if candidates.is_empty() {
713            let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
714            self.rerank_with_reputation_bias(&mut exact_candidates);
715            if !exact_candidates.is_empty() {
716                candidates = exact_candidates;
717                exact_match = true;
718            }
719        }
720        if candidates.is_empty() {
721            let mut remote_candidates =
722                quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
723            self.rerank_with_reputation_bias(&mut remote_candidates);
724            if !remote_candidates.is_empty() {
725                candidates = remote_candidates;
726                exact_match = true;
727            }
728        }
729        candidates.truncate(input.limit.max(1));
730        let Some(best) = candidates.into_iter().next() else {
731            return Ok(ReplayDecision {
732                used_capsule: false,
733                capsule_id: None,
734                fallback_to_planner: true,
735                reason: "no matching gene".into(),
736            });
737        };
738        let remote_publisher = self.publisher_for_gene(&best.gene.id);
739
740        if !exact_match && best.score < 0.82 {
741            return Ok(ReplayDecision {
742                used_capsule: false,
743                capsule_id: None,
744                fallback_to_planner: true,
745                reason: format!("best gene score {:.3} below replay threshold", best.score),
746            });
747        }
748
749        let Some(capsule) = best.capsules.first().cloned() else {
750            return Ok(ReplayDecision {
751                used_capsule: false,
752                capsule_id: None,
753                fallback_to_planner: true,
754                reason: "candidate gene has no capsule".into(),
755            });
756        };
757
758        let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
759            .map_err(|err| ReplayError::Store(err.to_string()))?
760        else {
761            return Ok(ReplayDecision {
762                used_capsule: false,
763                capsule_id: None,
764                fallback_to_planner: true,
765                reason: "mutation payload missing from store".into(),
766            });
767        };
768
769        let receipt = match self.sandbox.apply(&mutation, policy).await {
770            Ok(receipt) => receipt,
771            Err(err) => {
772                self.record_reuse_settlement(remote_publisher.as_deref(), false);
773                return Ok(ReplayDecision {
774                    used_capsule: false,
775                    capsule_id: Some(capsule.id.clone()),
776                    fallback_to_planner: true,
777                    reason: format!("replay patch apply failed: {err}"),
778                });
779            }
780        };
781
782        let report = self
783            .validator
784            .run(&receipt, validation)
785            .await
786            .map_err(|err| ReplayError::Validation(err.to_string()))?;
787        if !report.success {
788            self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
789            self.record_reuse_settlement(remote_publisher.as_deref(), false);
790            return Ok(ReplayDecision {
791                used_capsule: false,
792                capsule_id: Some(capsule.id.clone()),
793                fallback_to_planner: true,
794                reason: "replay validation failed".into(),
795            });
796        }
797
798        if matches!(capsule.state, AssetState::Quarantined) {
799            self.store
800                .append_event(EvolutionEvent::ValidationPassed {
801                    mutation_id: capsule.mutation_id.clone(),
802                    report: report.to_snapshot(&validation.profile),
803                    gene_id: Some(best.gene.id.clone()),
804                })
805                .map_err(|err| ReplayError::Store(err.to_string()))?;
806            self.store
807                .append_event(EvolutionEvent::CapsuleReleased {
808                    capsule_id: capsule.id.clone(),
809                    state: AssetState::Promoted,
810                })
811                .map_err(|err| ReplayError::Store(err.to_string()))?;
812        }
813
814        self.store
815            .append_event(EvolutionEvent::CapsuleReused {
816                capsule_id: capsule.id.clone(),
817                gene_id: capsule.gene_id.clone(),
818                run_id: capsule.run_id.clone(),
819            })
820            .map_err(|err| ReplayError::Store(err.to_string()))?;
821        self.record_reuse_settlement(remote_publisher.as_deref(), true);
822
823        Ok(ReplayDecision {
824            used_capsule: true,
825            capsule_id: Some(capsule.id),
826            fallback_to_planner: false,
827            reason: if exact_match {
828                "replayed via exact-match cold-start lookup".into()
829            } else {
830                "replayed via selector".into()
831            },
832        })
833    }
834}
835
836impl StoreReplayExecutor {
837    fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
838        let Some(ledger) = self.economics.as_ref() else {
839            return;
840        };
841        let Some(remote_publishers) = self.remote_publishers.as_ref() else {
842            return;
843        };
844        let reputation_bias = ledger
845            .lock()
846            .ok()
847            .map(|locked| locked.selector_reputation_bias())
848            .unwrap_or_default();
849        if reputation_bias.is_empty() {
850            return;
851        }
852        let publisher_map = remote_publishers
853            .lock()
854            .ok()
855            .map(|locked| locked.clone())
856            .unwrap_or_default();
857        candidates.sort_by(|left, right| {
858            effective_candidate_score(right, &publisher_map, &reputation_bias)
859                .partial_cmp(&effective_candidate_score(
860                    left,
861                    &publisher_map,
862                    &reputation_bias,
863                ))
864                .unwrap_or(std::cmp::Ordering::Equal)
865                .then_with(|| left.gene.id.cmp(&right.gene.id))
866        });
867    }
868
869    fn publisher_for_gene(&self, gene_id: &str) -> Option<String> {
870        self.remote_publishers
871            .as_ref()?
872            .lock()
873            .ok()?
874            .get(gene_id)
875            .cloned()
876    }
877
878    fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
879        let Some(publisher_id) = publisher_id else {
880            return;
881        };
882        let Some(ledger) = self.economics.as_ref() else {
883            return;
884        };
885        if let Ok(mut locked) = ledger.lock() {
886            locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
887        }
888    }
889
890    fn record_replay_validation_failure(
891        &self,
892        best: &GeneCandidate,
893        capsule: &Capsule,
894        validation: &ValidationPlan,
895        report: &ValidationReport,
896    ) -> Result<(), ReplayError> {
897        let projection = self
898            .store
899            .rebuild_projection()
900            .map_err(|err| ReplayError::Store(err.to_string()))?;
901        let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
902            Self::confidence_context(&projection, &best.gene.id);
903
904        self.store
905            .append_event(EvolutionEvent::ValidationFailed {
906                mutation_id: capsule.mutation_id.clone(),
907                report: report.to_snapshot(&validation.profile),
908                gene_id: Some(best.gene.id.clone()),
909            })
910            .map_err(|err| ReplayError::Store(err.to_string()))?;
911
912        let replay_failures = self.replay_failure_count(&best.gene.id)?;
913        let governor_decision = self.governor.evaluate(GovernorInput {
914            candidate_source: if self.publisher_for_gene(&best.gene.id).is_some() {
915                CandidateSource::Remote
916            } else {
917                CandidateSource::Local
918            },
919            success_count: 0,
920            blast_radius: BlastRadius {
921                files_changed: capsule.outcome.changed_files.len(),
922                lines_changed: capsule.outcome.lines_changed,
923            },
924            replay_failures,
925            recent_mutation_ages_secs: Vec::new(),
926            current_confidence,
927            historical_peak_confidence,
928            confidence_last_updated_secs,
929        });
930
931        if matches!(governor_decision.target_state, AssetState::Revoked) {
932            self.store
933                .append_event(EvolutionEvent::PromotionEvaluated {
934                    gene_id: best.gene.id.clone(),
935                    state: AssetState::Revoked,
936                    reason: governor_decision.reason.clone(),
937                })
938                .map_err(|err| ReplayError::Store(err.to_string()))?;
939            self.store
940                .append_event(EvolutionEvent::GeneRevoked {
941                    gene_id: best.gene.id.clone(),
942                    reason: governor_decision.reason,
943                })
944                .map_err(|err| ReplayError::Store(err.to_string()))?;
945            for related in &best.capsules {
946                self.store
947                    .append_event(EvolutionEvent::CapsuleQuarantined {
948                        capsule_id: related.id.clone(),
949                    })
950                    .map_err(|err| ReplayError::Store(err.to_string()))?;
951            }
952        }
953
954        Ok(())
955    }
956
957    fn confidence_context(
958        projection: &EvolutionProjection,
959        gene_id: &str,
960    ) -> (f32, f32, Option<u64>) {
961        let peak_confidence = projection
962            .capsules
963            .iter()
964            .filter(|capsule| capsule.gene_id == gene_id)
965            .map(|capsule| capsule.confidence)
966            .fold(0.0_f32, f32::max);
967        let age_secs = projection
968            .last_updated_at
969            .get(gene_id)
970            .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
971        (peak_confidence, peak_confidence, age_secs)
972    }
973
974    fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
975        let parsed = DateTime::parse_from_rfc3339(timestamp)
976            .ok()?
977            .with_timezone(&Utc);
978        let elapsed = now.signed_duration_since(parsed);
979        if elapsed < Duration::zero() {
980            Some(0)
981        } else {
982            u64::try_from(elapsed.num_seconds()).ok()
983        }
984    }
985
986    fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
987        Ok(self
988            .store
989            .scan(1)
990            .map_err(|err| ReplayError::Store(err.to_string()))?
991            .into_iter()
992            .filter(|stored| {
993                matches!(
994                    &stored.event,
995                    EvolutionEvent::ValidationFailed {
996                        gene_id: Some(current_gene_id),
997                        ..
998                    } if current_gene_id == gene_id
999                )
1000            })
1001            .count() as u64)
1002    }
1003}
1004
1005#[derive(Debug, Error)]
1006pub enum EvoKernelError {
1007    #[error("sandbox error: {0}")]
1008    Sandbox(String),
1009    #[error("validation error: {0}")]
1010    Validation(String),
1011    #[error("validation failed")]
1012    ValidationFailed(ValidationReport),
1013    #[error("store error: {0}")]
1014    Store(String),
1015}
1016
1017#[derive(Clone, Debug)]
1018pub struct CaptureOutcome {
1019    pub capsule: Capsule,
1020    pub gene: Gene,
1021    pub governor_decision: GovernorDecision,
1022}
1023
1024#[derive(Clone, Debug, Serialize, Deserialize)]
1025pub struct ImportOutcome {
1026    pub imported_asset_ids: Vec<String>,
1027    pub accepted: bool,
1028}
1029
1030#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1031pub struct EvolutionMetricsSnapshot {
1032    pub replay_attempts_total: u64,
1033    pub replay_success_total: u64,
1034    pub replay_success_rate: f64,
1035    pub mutation_declared_total: u64,
1036    pub promoted_mutations_total: u64,
1037    pub promotion_ratio: f64,
1038    pub gene_revocations_total: u64,
1039    pub mutation_velocity_last_hour: u64,
1040    pub revoke_frequency_last_hour: u64,
1041    pub promoted_genes: u64,
1042    pub promoted_capsules: u64,
1043    pub last_event_seq: u64,
1044}
1045
1046#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1047pub struct EvolutionHealthSnapshot {
1048    pub status: String,
1049    pub last_event_seq: u64,
1050    pub promoted_genes: u64,
1051    pub promoted_capsules: u64,
1052}
1053
1054#[derive(Clone)]
1055pub struct EvolutionNetworkNode {
1056    pub store: Arc<dyn EvolutionStore>,
1057}
1058
1059impl EvolutionNetworkNode {
1060    pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1061        Self { store }
1062    }
1063
1064    pub fn with_default_store() -> Self {
1065        Self {
1066            store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
1067        }
1068    }
1069
1070    pub fn accept_publish_request(
1071        &self,
1072        request: &PublishRequest,
1073    ) -> Result<ImportOutcome, EvoKernelError> {
1074        import_remote_envelope_into_store(
1075            self.store.as_ref(),
1076            &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
1077        )
1078    }
1079
1080    pub fn publish_local_assets(
1081        &self,
1082        sender_id: impl Into<String>,
1083    ) -> Result<EvolutionEnvelope, EvoKernelError> {
1084        export_promoted_assets_from_store(self.store.as_ref(), sender_id)
1085    }
1086
1087    pub fn fetch_assets(
1088        &self,
1089        responder_id: impl Into<String>,
1090        query: &FetchQuery,
1091    ) -> Result<FetchResponse, EvoKernelError> {
1092        fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1093    }
1094
1095    pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1096        revoke_assets_in_store(self.store.as_ref(), notice)
1097    }
1098
1099    pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1100        evolution_metrics_snapshot(self.store.as_ref())
1101    }
1102
1103    pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1104        self.metrics_snapshot().map(|snapshot| {
1105            let health = evolution_health_snapshot(&snapshot);
1106            render_evolution_metrics_prometheus(&snapshot, &health)
1107        })
1108    }
1109
1110    pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1111        self.metrics_snapshot()
1112            .map(|snapshot| evolution_health_snapshot(&snapshot))
1113    }
1114}
1115
1116pub struct EvoKernel<S: KernelState> {
1117    pub kernel: Arc<Kernel<S>>,
1118    pub sandbox: Arc<dyn Sandbox>,
1119    pub validator: Arc<dyn Validator>,
1120    pub store: Arc<dyn EvolutionStore>,
1121    pub selector: Arc<dyn Selector>,
1122    pub governor: Arc<dyn Governor>,
1123    pub economics: Arc<Mutex<EvuLedger>>,
1124    pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
1125    pub stake_policy: StakePolicy,
1126    pub sandbox_policy: SandboxPolicy,
1127    pub validation_plan: ValidationPlan,
1128}
1129
1130impl<S: KernelState> EvoKernel<S> {
1131    fn recent_prior_mutation_ages_secs(
1132        &self,
1133        exclude_mutation_id: Option<&str>,
1134    ) -> Result<Vec<u64>, EvolutionError> {
1135        let now = Utc::now();
1136        let mut ages = self
1137            .store
1138            .scan(1)?
1139            .into_iter()
1140            .filter_map(|stored| match stored.event {
1141                EvolutionEvent::MutationDeclared { mutation }
1142                    if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
1143                {
1144                    Self::seconds_since_timestamp(&stored.timestamp, now)
1145                }
1146                _ => None,
1147            })
1148            .collect::<Vec<_>>();
1149        ages.sort_unstable();
1150        Ok(ages)
1151    }
1152
1153    fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1154        let parsed = DateTime::parse_from_rfc3339(timestamp)
1155            .ok()?
1156            .with_timezone(&Utc);
1157        let elapsed = now.signed_duration_since(parsed);
1158        if elapsed < Duration::zero() {
1159            Some(0)
1160        } else {
1161            u64::try_from(elapsed.num_seconds()).ok()
1162        }
1163    }
1164
1165    pub fn new(
1166        kernel: Arc<Kernel<S>>,
1167        sandbox: Arc<dyn Sandbox>,
1168        validator: Arc<dyn Validator>,
1169        store: Arc<dyn EvolutionStore>,
1170    ) -> Self {
1171        let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
1172        Self {
1173            kernel,
1174            sandbox,
1175            validator,
1176            store,
1177            selector,
1178            governor: Arc::new(DefaultGovernor::default()),
1179            economics: Arc::new(Mutex::new(EvuLedger::default())),
1180            remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
1181            stake_policy: StakePolicy::default(),
1182            sandbox_policy: SandboxPolicy::oris_default(),
1183            validation_plan: ValidationPlan::oris_default(),
1184        }
1185    }
1186
1187    pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
1188        self.selector = selector;
1189        self
1190    }
1191
1192    pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
1193        self.sandbox_policy = policy;
1194        self
1195    }
1196
1197    pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
1198        self.governor = governor;
1199        self
1200    }
1201
1202    pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
1203        self.economics = economics;
1204        self
1205    }
1206
1207    pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
1208        self.stake_policy = policy;
1209        self
1210    }
1211
1212    pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
1213        self.validation_plan = plan;
1214        self
1215    }
1216
1217    pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
1218        let mut candidates = self.selector.select(input);
1219        let mut seen = candidates
1220            .iter()
1221            .map(|candidate| candidate.gene.id.clone())
1222            .collect::<BTreeSet<_>>();
1223        for candidate in bootstrap_seed_candidates(self.store.as_ref(), input) {
1224            if seen.insert(candidate.gene.id.clone()) {
1225                candidates.push(candidate);
1226            }
1227        }
1228        candidates.sort_by(|left, right| {
1229            right
1230                .score
1231                .partial_cmp(&left.score)
1232                .unwrap_or(std::cmp::Ordering::Equal)
1233                .then_with(|| left.gene.id.cmp(&right.gene.id))
1234        });
1235        candidates.truncate(input.limit.max(1));
1236        candidates
1237    }
1238
1239    pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
1240        let projection = self.store.rebuild_projection().map_err(store_err)?;
1241        if !projection.genes.is_empty() {
1242            return Ok(BootstrapReport::default());
1243        }
1244
1245        let templates = built_in_seed_templates();
1246        for template in &templates {
1247            let mutation = build_seed_mutation(template);
1248            let extracted = extract_seed_signals(template);
1249            let gene = build_bootstrap_gene(template, &extracted)
1250                .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1251            let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
1252                .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1253
1254            self.store
1255                .append_event(EvolutionEvent::MutationDeclared {
1256                    mutation: mutation.clone(),
1257                })
1258                .map_err(store_err)?;
1259            self.store
1260                .append_event(EvolutionEvent::SignalsExtracted {
1261                    mutation_id: mutation.intent.id.clone(),
1262                    hash: extracted.hash.clone(),
1263                    signals: extracted.values.clone(),
1264                })
1265                .map_err(store_err)?;
1266            self.store
1267                .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1268                .map_err(store_err)?;
1269            self.store
1270                .append_event(EvolutionEvent::PromotionEvaluated {
1271                    gene_id: gene.id.clone(),
1272                    state: AssetState::Quarantined,
1273                    reason: "bootstrap seeds require local validation before replay".into(),
1274                })
1275                .map_err(store_err)?;
1276            self.store
1277                .append_event(EvolutionEvent::CapsuleCommitted {
1278                    capsule: capsule.clone(),
1279                })
1280                .map_err(store_err)?;
1281            self.store
1282                .append_event(EvolutionEvent::CapsuleQuarantined {
1283                    capsule_id: capsule.id,
1284                })
1285                .map_err(store_err)?;
1286        }
1287
1288        Ok(BootstrapReport {
1289            seeded: true,
1290            genes_added: templates.len(),
1291            capsules_added: templates.len(),
1292        })
1293    }
1294
1295    pub async fn capture_successful_mutation(
1296        &self,
1297        run_id: &RunId,
1298        mutation: PreparedMutation,
1299    ) -> Result<Capsule, EvoKernelError> {
1300        Ok(self
1301            .capture_mutation_with_governor(run_id, mutation)
1302            .await?
1303            .capsule)
1304    }
1305
1306    pub async fn capture_mutation_with_governor(
1307        &self,
1308        run_id: &RunId,
1309        mutation: PreparedMutation,
1310    ) -> Result<CaptureOutcome, EvoKernelError> {
1311        self.store
1312            .append_event(EvolutionEvent::MutationDeclared {
1313                mutation: mutation.clone(),
1314            })
1315            .map_err(store_err)?;
1316
1317        let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
1318            Ok(receipt) => receipt,
1319            Err(err) => {
1320                self.store
1321                    .append_event(EvolutionEvent::MutationRejected {
1322                        mutation_id: mutation.intent.id.clone(),
1323                        reason: err.to_string(),
1324                    })
1325                    .map_err(store_err)?;
1326                return Err(EvoKernelError::Sandbox(err.to_string()));
1327            }
1328        };
1329
1330        self.store
1331            .append_event(EvolutionEvent::MutationApplied {
1332                mutation_id: mutation.intent.id.clone(),
1333                patch_hash: receipt.patch_hash.clone(),
1334                changed_files: receipt
1335                    .changed_files
1336                    .iter()
1337                    .map(|path| path.to_string_lossy().to_string())
1338                    .collect(),
1339            })
1340            .map_err(store_err)?;
1341
1342        let report = self
1343            .validator
1344            .run(&receipt, &self.validation_plan)
1345            .await
1346            .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1347        if !report.success {
1348            self.store
1349                .append_event(EvolutionEvent::ValidationFailed {
1350                    mutation_id: mutation.intent.id.clone(),
1351                    report: report.to_snapshot(&self.validation_plan.profile),
1352                    gene_id: None,
1353                })
1354                .map_err(store_err)?;
1355            return Err(EvoKernelError::ValidationFailed(report));
1356        }
1357
1358        self.store
1359            .append_event(EvolutionEvent::ValidationPassed {
1360                mutation_id: mutation.intent.id.clone(),
1361                report: report.to_snapshot(&self.validation_plan.profile),
1362                gene_id: None,
1363            })
1364            .map_err(store_err)?;
1365
1366        let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
1367            patch_diff: mutation.artifact.payload.clone(),
1368            intent: mutation.intent.intent.clone(),
1369            expected_effect: mutation.intent.expected_effect.clone(),
1370            declared_signals: mutation.intent.signals.clone(),
1371            changed_files: receipt
1372                .changed_files
1373                .iter()
1374                .map(|path| path.to_string_lossy().to_string())
1375                .collect(),
1376            validation_success: report.success,
1377            validation_logs: report.logs.clone(),
1378            stage_outputs: report
1379                .stages
1380                .iter()
1381                .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
1382                .filter(|value| !value.is_empty())
1383                .collect(),
1384        });
1385        self.store
1386            .append_event(EvolutionEvent::SignalsExtracted {
1387                mutation_id: mutation.intent.id.clone(),
1388                hash: extracted_signals.hash.clone(),
1389                signals: extracted_signals.values.clone(),
1390            })
1391            .map_err(store_err)?;
1392
1393        let projection = self.store.rebuild_projection().map_err(store_err)?;
1394        let blast_radius = compute_blast_radius(&mutation.artifact.payload);
1395        let recent_mutation_ages_secs = self
1396            .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
1397            .map_err(store_err)?;
1398        let mut gene = derive_gene(
1399            &mutation,
1400            &receipt,
1401            &self.validation_plan.profile,
1402            &extracted_signals.values,
1403        );
1404        let success_count = projection
1405            .genes
1406            .iter()
1407            .find(|existing| existing.id == gene.id)
1408            .map(|existing| {
1409                projection
1410                    .capsules
1411                    .iter()
1412                    .filter(|capsule| capsule.gene_id == existing.id)
1413                    .count() as u64
1414            })
1415            .unwrap_or(0)
1416            + 1;
1417        let governor_decision = self.governor.evaluate(GovernorInput {
1418            candidate_source: CandidateSource::Local,
1419            success_count,
1420            blast_radius: blast_radius.clone(),
1421            replay_failures: 0,
1422            recent_mutation_ages_secs,
1423            current_confidence: 0.7,
1424            historical_peak_confidence: 0.7,
1425            confidence_last_updated_secs: Some(0),
1426        });
1427
1428        gene.state = governor_decision.target_state.clone();
1429        self.store
1430            .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1431            .map_err(store_err)?;
1432        self.store
1433            .append_event(EvolutionEvent::PromotionEvaluated {
1434                gene_id: gene.id.clone(),
1435                state: governor_decision.target_state.clone(),
1436                reason: governor_decision.reason.clone(),
1437            })
1438            .map_err(store_err)?;
1439        if matches!(governor_decision.target_state, AssetState::Promoted) {
1440            self.store
1441                .append_event(EvolutionEvent::GenePromoted {
1442                    gene_id: gene.id.clone(),
1443                })
1444                .map_err(store_err)?;
1445        }
1446        if let Some(spec_id) = &mutation.intent.spec_id {
1447            self.store
1448                .append_event(EvolutionEvent::SpecLinked {
1449                    mutation_id: mutation.intent.id.clone(),
1450                    spec_id: spec_id.clone(),
1451                })
1452                .map_err(store_err)?;
1453        }
1454
1455        let mut capsule = build_capsule(
1456            run_id,
1457            &mutation,
1458            &receipt,
1459            &report,
1460            &self.validation_plan.profile,
1461            &gene,
1462            &blast_radius,
1463        )
1464        .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1465        capsule.state = governor_decision.target_state.clone();
1466        self.store
1467            .append_event(EvolutionEvent::CapsuleCommitted {
1468                capsule: capsule.clone(),
1469            })
1470            .map_err(store_err)?;
1471        if matches!(governor_decision.target_state, AssetState::Quarantined) {
1472            self.store
1473                .append_event(EvolutionEvent::CapsuleQuarantined {
1474                    capsule_id: capsule.id.clone(),
1475                })
1476                .map_err(store_err)?;
1477        }
1478
1479        Ok(CaptureOutcome {
1480            capsule,
1481            gene,
1482            governor_decision,
1483        })
1484    }
1485
1486    pub async fn capture_from_proposal(
1487        &self,
1488        run_id: &RunId,
1489        proposal: &AgentMutationProposal,
1490        diff_payload: String,
1491        base_revision: Option<String>,
1492    ) -> Result<CaptureOutcome, EvoKernelError> {
1493        let intent = MutationIntent {
1494            id: next_id("proposal"),
1495            intent: proposal.intent.clone(),
1496            target: MutationTarget::Paths {
1497                allow: proposal.files.clone(),
1498            },
1499            expected_effect: proposal.expected_effect.clone(),
1500            risk: RiskLevel::Low,
1501            signals: proposal.files.clone(),
1502            spec_id: None,
1503        };
1504        self.capture_mutation_with_governor(
1505            run_id,
1506            prepare_mutation(intent, diff_payload, base_revision),
1507        )
1508        .await
1509    }
1510
1511    pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
1512        ExecutionFeedback {
1513            accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
1514            asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
1515            summary: outcome.governor_decision.reason.clone(),
1516        }
1517    }
1518
1519    pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
1520        MultiAgentCoordinator::new().coordinate(plan)
1521    }
1522
1523    pub fn export_promoted_assets(
1524        &self,
1525        sender_id: impl Into<String>,
1526    ) -> Result<EvolutionEnvelope, EvoKernelError> {
1527        let sender_id = sender_id.into();
1528        let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
1529        if !envelope.assets.is_empty() {
1530            let mut ledger = self
1531                .economics
1532                .lock()
1533                .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
1534            if ledger
1535                .reserve_publish_stake(&sender_id, &self.stake_policy)
1536                .is_none()
1537            {
1538                return Err(EvoKernelError::Validation(
1539                    "insufficient EVU for remote publish".into(),
1540                ));
1541            }
1542        }
1543        Ok(envelope)
1544    }
1545
1546    pub fn import_remote_envelope(
1547        &self,
1548        envelope: &EvolutionEnvelope,
1549    ) -> Result<ImportOutcome, EvoKernelError> {
1550        let outcome = import_remote_envelope_into_store(self.store.as_ref(), envelope)?;
1551        self.record_remote_publishers(envelope);
1552        Ok(outcome)
1553    }
1554
1555    pub fn fetch_assets(
1556        &self,
1557        responder_id: impl Into<String>,
1558        query: &FetchQuery,
1559    ) -> Result<FetchResponse, EvoKernelError> {
1560        fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1561    }
1562
1563    pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1564        revoke_assets_in_store(self.store.as_ref(), notice)
1565    }
1566
1567    pub async fn replay_or_fallback(
1568        &self,
1569        input: SelectorInput,
1570    ) -> Result<ReplayDecision, EvoKernelError> {
1571        let executor = StoreReplayExecutor {
1572            sandbox: self.sandbox.clone(),
1573            validator: self.validator.clone(),
1574            store: self.store.clone(),
1575            selector: self.selector.clone(),
1576            governor: self.governor.clone(),
1577            economics: Some(self.economics.clone()),
1578            remote_publishers: Some(self.remote_publishers.clone()),
1579            stake_policy: self.stake_policy.clone(),
1580        };
1581        executor
1582            .try_replay(&input, &self.sandbox_policy, &self.validation_plan)
1583            .await
1584            .map_err(|err| EvoKernelError::Validation(err.to_string()))
1585    }
1586
1587    pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
1588        self.economics.lock().ok()?.governor_signal(node_id)
1589    }
1590
1591    pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
1592        self.economics
1593            .lock()
1594            .ok()
1595            .map(|locked| locked.selector_reputation_bias())
1596            .unwrap_or_default()
1597    }
1598
1599    pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1600        evolution_metrics_snapshot(self.store.as_ref())
1601    }
1602
1603    pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1604        self.metrics_snapshot().map(|snapshot| {
1605            let health = evolution_health_snapshot(&snapshot);
1606            render_evolution_metrics_prometheus(&snapshot, &health)
1607        })
1608    }
1609
1610    pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1611        self.metrics_snapshot()
1612            .map(|snapshot| evolution_health_snapshot(&snapshot))
1613    }
1614
1615    fn record_remote_publishers(&self, envelope: &EvolutionEnvelope) {
1616        let sender_id = envelope.sender_id.trim();
1617        if sender_id.is_empty() {
1618            return;
1619        }
1620        let Ok(mut publishers) = self.remote_publishers.lock() else {
1621            return;
1622        };
1623        for asset in &envelope.assets {
1624            match asset {
1625                NetworkAsset::Gene { gene } => {
1626                    publishers.insert(gene.id.clone(), sender_id.to_string());
1627                }
1628                NetworkAsset::Capsule { capsule } => {
1629                    publishers.insert(capsule.gene_id.clone(), sender_id.to_string());
1630                }
1631                NetworkAsset::EvolutionEvent { .. } => {}
1632            }
1633        }
1634    }
1635}
1636
1637pub fn prepare_mutation(
1638    intent: MutationIntent,
1639    diff_payload: String,
1640    base_revision: Option<String>,
1641) -> PreparedMutation {
1642    PreparedMutation {
1643        intent,
1644        artifact: MutationArtifact {
1645            encoding: ArtifactEncoding::UnifiedDiff,
1646            content_hash: compute_artifact_hash(&diff_payload),
1647            payload: diff_payload,
1648            base_revision,
1649        },
1650    }
1651}
1652
1653pub fn prepare_mutation_from_spec(
1654    plan: CompiledMutationPlan,
1655    diff_payload: String,
1656    base_revision: Option<String>,
1657) -> PreparedMutation {
1658    prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
1659}
1660
1661pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
1662    Arc::new(oris_evolution::JsonlEvolutionStore::new(
1663        default_store_root(),
1664    ))
1665}
1666
1667fn built_in_seed_templates() -> Vec<SeedTemplate> {
1668    vec![
1669        SeedTemplate {
1670            id: "bootstrap-readme".into(),
1671            intent: "Seed a baseline README recovery pattern".into(),
1672            signals: vec!["bootstrap readme".into(), "missing readme".into()],
1673            diff_payload: "\
1674diff --git a/README.md b/README.md
1675new file mode 100644
1676index 0000000..1111111
1677--- /dev/null
1678+++ b/README.md
1679@@ -0,0 +1,3 @@
1680+# Oris
1681+Bootstrap documentation seed
1682+"
1683            .into(),
1684            validation_profile: "bootstrap-seed".into(),
1685        },
1686        SeedTemplate {
1687            id: "bootstrap-test-fix".into(),
1688            intent: "Seed a deterministic test stabilization pattern".into(),
1689            signals: vec!["bootstrap test fix".into(), "failing tests".into()],
1690            diff_payload: "\
1691diff --git a/src/lib.rs b/src/lib.rs
1692index 1111111..2222222 100644
1693--- a/src/lib.rs
1694+++ b/src/lib.rs
1695@@ -1 +1,2 @@
1696 pub fn demo() -> usize { 1 }
1697+pub fn normalize_test_output() -> bool { true }
1698"
1699            .into(),
1700            validation_profile: "bootstrap-seed".into(),
1701        },
1702        SeedTemplate {
1703            id: "bootstrap-refactor".into(),
1704            intent: "Seed a low-risk refactor capsule".into(),
1705            signals: vec!["bootstrap refactor".into(), "small refactor".into()],
1706            diff_payload: "\
1707diff --git a/src/lib.rs b/src/lib.rs
1708index 2222222..3333333 100644
1709--- a/src/lib.rs
1710+++ b/src/lib.rs
1711@@ -1 +1,3 @@
1712 pub fn demo() -> usize { 1 }
1713+
1714+fn extract_strategy_key(input: &str) -> &str { input }
1715"
1716            .into(),
1717            validation_profile: "bootstrap-seed".into(),
1718        },
1719        SeedTemplate {
1720            id: "bootstrap-logging".into(),
1721            intent: "Seed a baseline structured logging mutation".into(),
1722            signals: vec!["bootstrap logging".into(), "structured logs".into()],
1723            diff_payload: "\
1724diff --git a/src/lib.rs b/src/lib.rs
1725index 3333333..4444444 100644
1726--- a/src/lib.rs
1727+++ b/src/lib.rs
1728@@ -1 +1,3 @@
1729 pub fn demo() -> usize { 1 }
1730+
1731+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
1732"
1733            .into(),
1734            validation_profile: "bootstrap-seed".into(),
1735        },
1736    ]
1737}
1738
1739fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
1740    let changed_files = seed_changed_files(&template.diff_payload);
1741    let target = if changed_files.is_empty() {
1742        MutationTarget::WorkspaceRoot
1743    } else {
1744        MutationTarget::Paths {
1745            allow: changed_files,
1746        }
1747    };
1748    prepare_mutation(
1749        MutationIntent {
1750            id: stable_hash_json(&("bootstrap-mutation", &template.id))
1751                .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
1752            intent: template.intent.clone(),
1753            target,
1754            expected_effect: format!("seed {}", template.id),
1755            risk: RiskLevel::Low,
1756            signals: template.signals.clone(),
1757            spec_id: None,
1758        },
1759        template.diff_payload.clone(),
1760        None,
1761    )
1762}
1763
1764fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
1765    let mut signals = BTreeSet::new();
1766    for declared in &template.signals {
1767        if let Some(phrase) = normalize_signal_phrase(declared) {
1768            signals.insert(phrase);
1769        }
1770        extend_signal_tokens(&mut signals, declared);
1771    }
1772    extend_signal_tokens(&mut signals, &template.intent);
1773    extend_signal_tokens(&mut signals, &template.diff_payload);
1774    for changed_file in seed_changed_files(&template.diff_payload) {
1775        extend_signal_tokens(&mut signals, &changed_file);
1776    }
1777    let values = signals.into_iter().take(32).collect::<Vec<_>>();
1778    let hash =
1779        stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
1780    SignalExtractionOutput { values, hash }
1781}
1782
1783fn seed_changed_files(diff_payload: &str) -> Vec<String> {
1784    let mut changed_files = BTreeSet::new();
1785    for line in diff_payload.lines() {
1786        if let Some(path) = line.strip_prefix("+++ b/") {
1787            let normalized = path.trim();
1788            if !normalized.is_empty() {
1789                changed_files.insert(normalized.to_string());
1790            }
1791        }
1792    }
1793    changed_files.into_iter().collect()
1794}
1795
1796fn build_bootstrap_gene(
1797    template: &SeedTemplate,
1798    extracted: &SignalExtractionOutput,
1799) -> Result<Gene, EvolutionError> {
1800    let strategy = vec![template.id.clone(), "bootstrap".into()];
1801    let id = stable_hash_json(&(
1802        "bootstrap-gene",
1803        &template.id,
1804        &extracted.values,
1805        &template.validation_profile,
1806    ))?;
1807    Ok(Gene {
1808        id,
1809        signals: extracted.values.clone(),
1810        strategy,
1811        validation: vec![template.validation_profile.clone()],
1812        state: AssetState::Quarantined,
1813    })
1814}
1815
1816fn build_bootstrap_capsule(
1817    run_id: &RunId,
1818    template: &SeedTemplate,
1819    mutation: &PreparedMutation,
1820    gene: &Gene,
1821) -> Result<Capsule, EvolutionError> {
1822    let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
1823    let env = current_env_fingerprint(&cwd);
1824    let diff_hash = mutation.artifact.content_hash.clone();
1825    let changed_files = seed_changed_files(&template.diff_payload);
1826    let validator_hash = stable_hash_json(&(
1827        "bootstrap-validator",
1828        &template.id,
1829        &template.validation_profile,
1830        &diff_hash,
1831    ))?;
1832    let id = stable_hash_json(&(
1833        "bootstrap-capsule",
1834        &template.id,
1835        run_id,
1836        &gene.id,
1837        &diff_hash,
1838        &env,
1839    ))?;
1840    Ok(Capsule {
1841        id,
1842        gene_id: gene.id.clone(),
1843        mutation_id: mutation.intent.id.clone(),
1844        run_id: run_id.clone(),
1845        diff_hash,
1846        confidence: 0.0,
1847        env,
1848        outcome: Outcome {
1849            success: false,
1850            validation_profile: template.validation_profile.clone(),
1851            validation_duration_ms: 0,
1852            changed_files,
1853            validator_hash,
1854            lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
1855            replay_verified: false,
1856        },
1857        state: AssetState::Quarantined,
1858    })
1859}
1860
1861fn bootstrap_seed_candidates(
1862    store: &dyn EvolutionStore,
1863    input: &SelectorInput,
1864) -> Vec<GeneCandidate> {
1865    let normalized_signals = input
1866        .signals
1867        .iter()
1868        .map(|signal| signal.trim().to_ascii_lowercase())
1869        .filter(|signal| !signal.is_empty())
1870        .collect::<Vec<_>>();
1871    if normalized_signals.is_empty() {
1872        return Vec::new();
1873    }
1874
1875    let Ok(projection) = store.rebuild_projection() else {
1876        return Vec::new();
1877    };
1878    let capsules = projection.capsules.clone();
1879    let mut candidates = projection
1880        .genes
1881        .into_iter()
1882        .filter_map(|gene| {
1883            if gene.state != AssetState::Quarantined {
1884                return None;
1885            }
1886
1887            let matched_signal_count = gene
1888                .signals
1889                .iter()
1890                .filter(|candidate| {
1891                    let candidate = candidate.to_ascii_lowercase();
1892                    normalized_signals.iter().any(|signal| {
1893                        candidate.contains(signal) || signal.contains(candidate.as_str())
1894                    })
1895                })
1896                .count();
1897            if matched_signal_count == 0 {
1898                return None;
1899            }
1900
1901            let mut matched_capsules = capsules
1902                .iter()
1903                .filter(|capsule| {
1904                    capsule.gene_id == gene.id
1905                        && capsule.state == AssetState::Quarantined
1906                        && capsule.outcome.validation_profile == "bootstrap-seed"
1907                })
1908                .cloned()
1909                .collect::<Vec<_>>();
1910            if matched_capsules.is_empty() {
1911                return None;
1912            }
1913
1914            matched_capsules.sort_by(|left, right| {
1915                replay_environment_match_factor(&input.env, &right.env)
1916                    .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
1917                    .unwrap_or(std::cmp::Ordering::Equal)
1918                    .then_with(|| left.id.cmp(&right.id))
1919            });
1920
1921            let overlap = matched_signal_count as f32 / normalized_signals.len() as f32;
1922            let env_score = matched_capsules
1923                .first()
1924                .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
1925                .unwrap_or(0.0);
1926            Some(GeneCandidate {
1927                gene,
1928                score: overlap.max(env_score),
1929                capsules: matched_capsules,
1930            })
1931        })
1932        .collect::<Vec<_>>();
1933    candidates.sort_by(|left, right| {
1934        right
1935            .score
1936            .partial_cmp(&left.score)
1937            .unwrap_or(std::cmp::Ordering::Equal)
1938            .then_with(|| left.gene.id.cmp(&right.gene.id))
1939    });
1940    candidates
1941}
1942
1943fn derive_gene(
1944    mutation: &PreparedMutation,
1945    receipt: &SandboxReceipt,
1946    validation_profile: &str,
1947    extracted_signals: &[String],
1948) -> Gene {
1949    let mut strategy = BTreeSet::new();
1950    for file in &receipt.changed_files {
1951        if let Some(component) = file.components().next() {
1952            strategy.insert(component.as_os_str().to_string_lossy().to_string());
1953        }
1954    }
1955    for token in mutation
1956        .artifact
1957        .payload
1958        .split(|ch: char| !ch.is_ascii_alphanumeric())
1959    {
1960        if token.len() == 5
1961            && token.starts_with('E')
1962            && token[1..].chars().all(|ch| ch.is_ascii_digit())
1963        {
1964            strategy.insert(token.to_string());
1965        }
1966    }
1967    for token in mutation.intent.intent.split_whitespace().take(8) {
1968        strategy.insert(token.to_ascii_lowercase());
1969    }
1970    let strategy = strategy.into_iter().collect::<Vec<_>>();
1971    let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
1972        .unwrap_or_else(|_| next_id("gene"));
1973    Gene {
1974        id,
1975        signals: extracted_signals.to_vec(),
1976        strategy,
1977        validation: vec![validation_profile.to_string()],
1978        state: AssetState::Promoted,
1979    }
1980}
1981
1982fn build_capsule(
1983    run_id: &RunId,
1984    mutation: &PreparedMutation,
1985    receipt: &SandboxReceipt,
1986    report: &ValidationReport,
1987    validation_profile: &str,
1988    gene: &Gene,
1989    blast_radius: &BlastRadius,
1990) -> Result<Capsule, EvolutionError> {
1991    let env = current_env_fingerprint(&receipt.workdir);
1992    let validator_hash = stable_hash_json(report)?;
1993    let diff_hash = mutation.artifact.content_hash.clone();
1994    let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
1995    Ok(Capsule {
1996        id,
1997        gene_id: gene.id.clone(),
1998        mutation_id: mutation.intent.id.clone(),
1999        run_id: run_id.clone(),
2000        diff_hash,
2001        confidence: 0.7,
2002        env,
2003        outcome: oris_evolution::Outcome {
2004            success: true,
2005            validation_profile: validation_profile.to_string(),
2006            validation_duration_ms: report.duration_ms,
2007            changed_files: receipt
2008                .changed_files
2009                .iter()
2010                .map(|path| path.to_string_lossy().to_string())
2011                .collect(),
2012            validator_hash,
2013            lines_changed: blast_radius.lines_changed,
2014            replay_verified: false,
2015        },
2016        state: AssetState::Promoted,
2017    })
2018}
2019
2020fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
2021    let rustc_version = Command::new("rustc")
2022        .arg("--version")
2023        .output()
2024        .ok()
2025        .filter(|output| output.status.success())
2026        .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
2027        .unwrap_or_else(|| "rustc unknown".into());
2028    let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
2029        .ok()
2030        .map(|bytes| {
2031            let value = String::from_utf8_lossy(&bytes);
2032            compute_artifact_hash(&value)
2033        })
2034        .unwrap_or_else(|| "missing-cargo-lock".into());
2035    let target_triple = format!(
2036        "{}-unknown-{}",
2037        std::env::consts::ARCH,
2038        std::env::consts::OS
2039    );
2040    EnvFingerprint {
2041        rustc_version,
2042        cargo_lock_hash,
2043        target_triple,
2044        os: std::env::consts::OS.to_string(),
2045    }
2046}
2047
2048fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
2049    for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
2050        let trimmed = raw.trim();
2051        if trimmed.is_empty() {
2052            continue;
2053        }
2054        let normalized = if is_rust_error_code(trimmed) {
2055            let mut chars = trimmed.chars();
2056            let prefix = chars
2057                .next()
2058                .map(|ch| ch.to_ascii_uppercase())
2059                .unwrap_or('E');
2060            format!("{prefix}{}", chars.as_str())
2061        } else {
2062            trimmed.to_ascii_lowercase()
2063        };
2064        if normalized.len() < 3 {
2065            continue;
2066        }
2067        out.insert(normalized);
2068    }
2069}
2070
2071fn normalize_signal_phrase(input: &str) -> Option<String> {
2072    let normalized = input
2073        .split(|ch: char| !ch.is_ascii_alphanumeric())
2074        .filter_map(|raw| {
2075            let trimmed = raw.trim();
2076            if trimmed.is_empty() {
2077                return None;
2078            }
2079            let normalized = if is_rust_error_code(trimmed) {
2080                let mut chars = trimmed.chars();
2081                let prefix = chars
2082                    .next()
2083                    .map(|ch| ch.to_ascii_uppercase())
2084                    .unwrap_or('E');
2085                format!("{prefix}{}", chars.as_str())
2086            } else {
2087                trimmed.to_ascii_lowercase()
2088            };
2089            if normalized.len() < 3 {
2090                None
2091            } else {
2092                Some(normalized)
2093            }
2094        })
2095        .collect::<Vec<_>>()
2096        .join(" ");
2097    if normalized.is_empty() {
2098        None
2099    } else {
2100        Some(normalized)
2101    }
2102}
2103
2104fn is_rust_error_code(value: &str) -> bool {
2105    value.len() == 5
2106        && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
2107        && value[1..].chars().all(|ch| ch.is_ascii_digit())
2108}
2109
2110fn find_declared_mutation(
2111    store: &dyn EvolutionStore,
2112    mutation_id: &MutationId,
2113) -> Result<Option<PreparedMutation>, EvolutionError> {
2114    for stored in store.scan(1)? {
2115        if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
2116            if &mutation.intent.id == mutation_id {
2117                return Ok(Some(mutation));
2118            }
2119        }
2120    }
2121    Ok(None)
2122}
2123
2124fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
2125    let Ok(projection) = store.rebuild_projection() else {
2126        return Vec::new();
2127    };
2128    let capsules = projection.capsules.clone();
2129    let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2130    let requested_spec_id = input
2131        .spec_id
2132        .as_deref()
2133        .map(str::trim)
2134        .filter(|value| !value.is_empty());
2135    let signal_set = input
2136        .signals
2137        .iter()
2138        .map(|signal| signal.to_ascii_lowercase())
2139        .collect::<BTreeSet<_>>();
2140    let mut candidates = projection
2141        .genes
2142        .into_iter()
2143        .filter_map(|gene| {
2144            if gene.state != AssetState::Promoted {
2145                return None;
2146            }
2147            if let Some(spec_id) = requested_spec_id {
2148                let matches_spec = spec_ids_by_gene
2149                    .get(&gene.id)
2150                    .map(|values| {
2151                        values
2152                            .iter()
2153                            .any(|value| value.eq_ignore_ascii_case(spec_id))
2154                    })
2155                    .unwrap_or(false);
2156                if !matches_spec {
2157                    return None;
2158                }
2159            }
2160            let gene_signals = gene
2161                .signals
2162                .iter()
2163                .map(|signal| signal.to_ascii_lowercase())
2164                .collect::<BTreeSet<_>>();
2165            if gene_signals == signal_set {
2166                let mut matched_capsules = capsules
2167                    .iter()
2168                    .filter(|capsule| {
2169                        capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
2170                    })
2171                    .cloned()
2172                    .collect::<Vec<_>>();
2173                matched_capsules.sort_by(|left, right| {
2174                    replay_environment_match_factor(&input.env, &right.env)
2175                        .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2176                        .unwrap_or(std::cmp::Ordering::Equal)
2177                        .then_with(|| {
2178                            right
2179                                .confidence
2180                                .partial_cmp(&left.confidence)
2181                                .unwrap_or(std::cmp::Ordering::Equal)
2182                        })
2183                        .then_with(|| left.id.cmp(&right.id))
2184                });
2185                if matched_capsules.is_empty() {
2186                    None
2187                } else {
2188                    let score = matched_capsules
2189                        .first()
2190                        .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2191                        .unwrap_or(0.0);
2192                    Some(GeneCandidate {
2193                        gene,
2194                        score,
2195                        capsules: matched_capsules,
2196                    })
2197                }
2198            } else {
2199                None
2200            }
2201        })
2202        .collect::<Vec<_>>();
2203    candidates.sort_by(|left, right| {
2204        right
2205            .score
2206            .partial_cmp(&left.score)
2207            .unwrap_or(std::cmp::Ordering::Equal)
2208            .then_with(|| left.gene.id.cmp(&right.gene.id))
2209    });
2210    candidates
2211}
2212
2213fn quarantined_remote_exact_match_candidates(
2214    store: &dyn EvolutionStore,
2215    input: &SelectorInput,
2216) -> Vec<GeneCandidate> {
2217    let remote_asset_ids = store
2218        .scan(1)
2219        .ok()
2220        .map(|events| {
2221            events
2222                .into_iter()
2223                .filter_map(|stored| match stored.event {
2224                    EvolutionEvent::RemoteAssetImported {
2225                        source: CandidateSource::Remote,
2226                        asset_ids,
2227                    } => Some(asset_ids),
2228                    _ => None,
2229                })
2230                .flatten()
2231                .collect::<BTreeSet<_>>()
2232        })
2233        .unwrap_or_default();
2234    if remote_asset_ids.is_empty() {
2235        return Vec::new();
2236    }
2237
2238    let Ok(projection) = store.rebuild_projection() else {
2239        return Vec::new();
2240    };
2241    let capsules = projection.capsules.clone();
2242    let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2243    let requested_spec_id = input
2244        .spec_id
2245        .as_deref()
2246        .map(str::trim)
2247        .filter(|value| !value.is_empty());
2248    let signal_set = input
2249        .signals
2250        .iter()
2251        .map(|signal| signal.to_ascii_lowercase())
2252        .collect::<BTreeSet<_>>();
2253    let mut candidates = projection
2254        .genes
2255        .into_iter()
2256        .filter_map(|gene| {
2257            if gene.state != AssetState::Promoted {
2258                return None;
2259            }
2260            if let Some(spec_id) = requested_spec_id {
2261                let matches_spec = spec_ids_by_gene
2262                    .get(&gene.id)
2263                    .map(|values| {
2264                        values
2265                            .iter()
2266                            .any(|value| value.eq_ignore_ascii_case(spec_id))
2267                    })
2268                    .unwrap_or(false);
2269                if !matches_spec {
2270                    return None;
2271                }
2272            }
2273            let gene_signals = gene
2274                .signals
2275                .iter()
2276                .map(|signal| signal.to_ascii_lowercase())
2277                .collect::<BTreeSet<_>>();
2278            if gene_signals == signal_set {
2279                let mut matched_capsules = capsules
2280                    .iter()
2281                    .filter(|capsule| {
2282                        capsule.gene_id == gene.id
2283                            && capsule.state == AssetState::Quarantined
2284                            && remote_asset_ids.contains(&capsule.id)
2285                    })
2286                    .cloned()
2287                    .collect::<Vec<_>>();
2288                matched_capsules.sort_by(|left, right| {
2289                    replay_environment_match_factor(&input.env, &right.env)
2290                        .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2291                        .unwrap_or(std::cmp::Ordering::Equal)
2292                        .then_with(|| {
2293                            right
2294                                .confidence
2295                                .partial_cmp(&left.confidence)
2296                                .unwrap_or(std::cmp::Ordering::Equal)
2297                        })
2298                        .then_with(|| left.id.cmp(&right.id))
2299                });
2300                if matched_capsules.is_empty() {
2301                    None
2302                } else {
2303                    let score = matched_capsules
2304                        .first()
2305                        .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2306                        .unwrap_or(0.0);
2307                    Some(GeneCandidate {
2308                        gene,
2309                        score,
2310                        capsules: matched_capsules,
2311                    })
2312                }
2313            } else {
2314                None
2315            }
2316        })
2317        .collect::<Vec<_>>();
2318    candidates.sort_by(|left, right| {
2319        right
2320            .score
2321            .partial_cmp(&left.score)
2322            .unwrap_or(std::cmp::Ordering::Equal)
2323            .then_with(|| left.gene.id.cmp(&right.gene.id))
2324    });
2325    candidates
2326}
2327
2328fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
2329    let fields = [
2330        input
2331            .rustc_version
2332            .eq_ignore_ascii_case(&candidate.rustc_version),
2333        input
2334            .cargo_lock_hash
2335            .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
2336        input
2337            .target_triple
2338            .eq_ignore_ascii_case(&candidate.target_triple),
2339        input.os.eq_ignore_ascii_case(&candidate.os),
2340    ];
2341    let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
2342    0.5 + ((matched_fields / 4.0) * 0.5)
2343}
2344
2345fn effective_candidate_score(
2346    candidate: &GeneCandidate,
2347    publishers_by_gene: &BTreeMap<String, String>,
2348    reputation_bias: &BTreeMap<String, f32>,
2349) -> f32 {
2350    let bias = publishers_by_gene
2351        .get(&candidate.gene.id)
2352        .and_then(|publisher| reputation_bias.get(publisher))
2353        .copied()
2354        .unwrap_or(0.0)
2355        .clamp(0.0, 1.0);
2356    candidate.score * (1.0 + (bias * 0.1))
2357}
2358
2359fn export_promoted_assets_from_store(
2360    store: &dyn EvolutionStore,
2361    sender_id: impl Into<String>,
2362) -> Result<EvolutionEnvelope, EvoKernelError> {
2363    let projection = store.rebuild_projection().map_err(store_err)?;
2364    let mut assets = Vec::new();
2365    for gene in projection
2366        .genes
2367        .into_iter()
2368        .filter(|gene| gene.state == AssetState::Promoted)
2369    {
2370        assets.push(NetworkAsset::Gene { gene });
2371    }
2372    for capsule in projection
2373        .capsules
2374        .into_iter()
2375        .filter(|capsule| capsule.state == AssetState::Promoted)
2376    {
2377        assets.push(NetworkAsset::Capsule { capsule });
2378    }
2379    Ok(EvolutionEnvelope::publish(sender_id, assets))
2380}
2381
2382fn import_remote_envelope_into_store(
2383    store: &dyn EvolutionStore,
2384    envelope: &EvolutionEnvelope,
2385) -> Result<ImportOutcome, EvoKernelError> {
2386    if !envelope.verify_content_hash() {
2387        return Err(EvoKernelError::Validation(
2388            "invalid evolution envelope hash".into(),
2389        ));
2390    }
2391
2392    let mut imported_asset_ids = Vec::new();
2393    for asset in &envelope.assets {
2394        match asset {
2395            NetworkAsset::Gene { gene } => {
2396                imported_asset_ids.push(gene.id.clone());
2397                store
2398                    .append_event(EvolutionEvent::RemoteAssetImported {
2399                        source: CandidateSource::Remote,
2400                        asset_ids: vec![gene.id.clone()],
2401                    })
2402                    .map_err(store_err)?;
2403                store
2404                    .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
2405                    .map_err(store_err)?;
2406            }
2407            NetworkAsset::Capsule { capsule } => {
2408                imported_asset_ids.push(capsule.id.clone());
2409                store
2410                    .append_event(EvolutionEvent::RemoteAssetImported {
2411                        source: CandidateSource::Remote,
2412                        asset_ids: vec![capsule.id.clone()],
2413                    })
2414                    .map_err(store_err)?;
2415                let mut quarantined = capsule.clone();
2416                quarantined.state = AssetState::Quarantined;
2417                store
2418                    .append_event(EvolutionEvent::CapsuleCommitted {
2419                        capsule: quarantined.clone(),
2420                    })
2421                    .map_err(store_err)?;
2422                store
2423                    .append_event(EvolutionEvent::CapsuleQuarantined {
2424                        capsule_id: quarantined.id,
2425                    })
2426                    .map_err(store_err)?;
2427            }
2428            NetworkAsset::EvolutionEvent { event } => {
2429                if should_import_remote_event(event) {
2430                    store.append_event(event.clone()).map_err(store_err)?;
2431                }
2432            }
2433        }
2434    }
2435
2436    Ok(ImportOutcome {
2437        imported_asset_ids,
2438        accepted: true,
2439    })
2440}
2441
2442fn should_import_remote_event(event: &EvolutionEvent) -> bool {
2443    matches!(
2444        event,
2445        EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
2446    )
2447}
2448
2449fn fetch_assets_from_store(
2450    store: &dyn EvolutionStore,
2451    responder_id: impl Into<String>,
2452    query: &FetchQuery,
2453) -> Result<FetchResponse, EvoKernelError> {
2454    let projection = store.rebuild_projection().map_err(store_err)?;
2455    let normalized_signals: Vec<String> = query
2456        .signals
2457        .iter()
2458        .map(|signal| signal.trim().to_ascii_lowercase())
2459        .filter(|signal| !signal.is_empty())
2460        .collect();
2461    let matches_any_signal = |candidate: &str| {
2462        if normalized_signals.is_empty() {
2463            return true;
2464        }
2465        let candidate = candidate.to_ascii_lowercase();
2466        normalized_signals
2467            .iter()
2468            .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
2469    };
2470
2471    let matched_genes: Vec<Gene> = projection
2472        .genes
2473        .into_iter()
2474        .filter(|gene| gene.state == AssetState::Promoted)
2475        .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
2476        .collect();
2477    let matched_gene_ids: BTreeSet<String> =
2478        matched_genes.iter().map(|gene| gene.id.clone()).collect();
2479    let matched_capsules: Vec<Capsule> = projection
2480        .capsules
2481        .into_iter()
2482        .filter(|capsule| capsule.state == AssetState::Promoted)
2483        .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
2484        .collect();
2485
2486    let mut assets = Vec::new();
2487    for gene in matched_genes {
2488        assets.push(NetworkAsset::Gene { gene });
2489    }
2490    for capsule in matched_capsules {
2491        assets.push(NetworkAsset::Capsule { capsule });
2492    }
2493
2494    Ok(FetchResponse {
2495        sender_id: responder_id.into(),
2496        assets,
2497    })
2498}
2499
2500fn revoke_assets_in_store(
2501    store: &dyn EvolutionStore,
2502    notice: &RevokeNotice,
2503) -> Result<RevokeNotice, EvoKernelError> {
2504    let projection = store.rebuild_projection().map_err(store_err)?;
2505    let requested: BTreeSet<String> = notice
2506        .asset_ids
2507        .iter()
2508        .map(|asset_id| asset_id.trim().to_string())
2509        .filter(|asset_id| !asset_id.is_empty())
2510        .collect();
2511    let mut revoked_gene_ids = BTreeSet::new();
2512    let mut quarantined_capsule_ids = BTreeSet::new();
2513
2514    for gene in &projection.genes {
2515        if requested.contains(&gene.id) {
2516            revoked_gene_ids.insert(gene.id.clone());
2517        }
2518    }
2519    for capsule in &projection.capsules {
2520        if requested.contains(&capsule.id) {
2521            quarantined_capsule_ids.insert(capsule.id.clone());
2522            revoked_gene_ids.insert(capsule.gene_id.clone());
2523        }
2524    }
2525    for capsule in &projection.capsules {
2526        if revoked_gene_ids.contains(&capsule.gene_id) {
2527            quarantined_capsule_ids.insert(capsule.id.clone());
2528        }
2529    }
2530
2531    for gene_id in &revoked_gene_ids {
2532        store
2533            .append_event(EvolutionEvent::GeneRevoked {
2534                gene_id: gene_id.clone(),
2535                reason: notice.reason.clone(),
2536            })
2537            .map_err(store_err)?;
2538    }
2539    for capsule_id in &quarantined_capsule_ids {
2540        store
2541            .append_event(EvolutionEvent::CapsuleQuarantined {
2542                capsule_id: capsule_id.clone(),
2543            })
2544            .map_err(store_err)?;
2545    }
2546
2547    let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
2548    affected_ids.extend(quarantined_capsule_ids);
2549    affected_ids.sort();
2550    affected_ids.dedup();
2551
2552    Ok(RevokeNotice {
2553        sender_id: notice.sender_id.clone(),
2554        asset_ids: affected_ids,
2555        reason: notice.reason.clone(),
2556    })
2557}
2558
2559fn evolution_metrics_snapshot(
2560    store: &dyn EvolutionStore,
2561) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
2562    let events = store.scan(1).map_err(store_err)?;
2563    let projection = store.rebuild_projection().map_err(store_err)?;
2564    let replay_success_total = events
2565        .iter()
2566        .filter(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. }))
2567        .count() as u64;
2568    let replay_failures_total = events
2569        .iter()
2570        .filter(|stored| is_replay_validation_failure(&stored.event))
2571        .count() as u64;
2572    let replay_attempts_total = replay_success_total + replay_failures_total;
2573    let mutation_declared_total = events
2574        .iter()
2575        .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
2576        .count() as u64;
2577    let promoted_mutations_total = events
2578        .iter()
2579        .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
2580        .count() as u64;
2581    let gene_revocations_total = events
2582        .iter()
2583        .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
2584        .count() as u64;
2585    let cutoff = Utc::now() - Duration::hours(1);
2586    let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
2587        matches!(event, EvolutionEvent::MutationDeclared { .. })
2588    });
2589    let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
2590        matches!(event, EvolutionEvent::GeneRevoked { .. })
2591    });
2592    let promoted_genes = projection
2593        .genes
2594        .iter()
2595        .filter(|gene| gene.state == AssetState::Promoted)
2596        .count() as u64;
2597    let promoted_capsules = projection
2598        .capsules
2599        .iter()
2600        .filter(|capsule| capsule.state == AssetState::Promoted)
2601        .count() as u64;
2602
2603    Ok(EvolutionMetricsSnapshot {
2604        replay_attempts_total,
2605        replay_success_total,
2606        replay_success_rate: safe_ratio(replay_success_total, replay_attempts_total),
2607        mutation_declared_total,
2608        promoted_mutations_total,
2609        promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
2610        gene_revocations_total,
2611        mutation_velocity_last_hour,
2612        revoke_frequency_last_hour,
2613        promoted_genes,
2614        promoted_capsules,
2615        last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
2616    })
2617}
2618
2619fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
2620    EvolutionHealthSnapshot {
2621        status: "ok".into(),
2622        last_event_seq: snapshot.last_event_seq,
2623        promoted_genes: snapshot.promoted_genes,
2624        promoted_capsules: snapshot.promoted_capsules,
2625    }
2626}
2627
2628fn render_evolution_metrics_prometheus(
2629    snapshot: &EvolutionMetricsSnapshot,
2630    health: &EvolutionHealthSnapshot,
2631) -> String {
2632    let mut out = String::new();
2633    out.push_str(
2634        "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
2635    );
2636    out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
2637    out.push_str(&format!(
2638        "oris_evolution_replay_attempts_total {}\n",
2639        snapshot.replay_attempts_total
2640    ));
2641    out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
2642    out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
2643    out.push_str(&format!(
2644        "oris_evolution_replay_success_total {}\n",
2645        snapshot.replay_success_total
2646    ));
2647    out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
2648    out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
2649    out.push_str(&format!(
2650        "oris_evolution_replay_success_rate {:.6}\n",
2651        snapshot.replay_success_rate
2652    ));
2653    out.push_str(
2654        "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
2655    );
2656    out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
2657    out.push_str(&format!(
2658        "oris_evolution_mutation_declared_total {}\n",
2659        snapshot.mutation_declared_total
2660    ));
2661    out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
2662    out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
2663    out.push_str(&format!(
2664        "oris_evolution_promoted_mutations_total {}\n",
2665        snapshot.promoted_mutations_total
2666    ));
2667    out.push_str(
2668        "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
2669    );
2670    out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
2671    out.push_str(&format!(
2672        "oris_evolution_promotion_ratio {:.6}\n",
2673        snapshot.promotion_ratio
2674    ));
2675    out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
2676    out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
2677    out.push_str(&format!(
2678        "oris_evolution_gene_revocations_total {}\n",
2679        snapshot.gene_revocations_total
2680    ));
2681    out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
2682    out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
2683    out.push_str(&format!(
2684        "oris_evolution_mutation_velocity_last_hour {}\n",
2685        snapshot.mutation_velocity_last_hour
2686    ));
2687    out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
2688    out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
2689    out.push_str(&format!(
2690        "oris_evolution_revoke_frequency_last_hour {}\n",
2691        snapshot.revoke_frequency_last_hour
2692    ));
2693    out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
2694    out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
2695    out.push_str(&format!(
2696        "oris_evolution_promoted_genes {}\n",
2697        snapshot.promoted_genes
2698    ));
2699    out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
2700    out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
2701    out.push_str(&format!(
2702        "oris_evolution_promoted_capsules {}\n",
2703        snapshot.promoted_capsules
2704    ));
2705    out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
2706    out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
2707    out.push_str(&format!(
2708        "oris_evolution_store_last_event_seq {}\n",
2709        snapshot.last_event_seq
2710    ));
2711    out.push_str(
2712        "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
2713    );
2714    out.push_str("# TYPE oris_evolution_health gauge\n");
2715    out.push_str(&format!(
2716        "oris_evolution_health {}\n",
2717        u8::from(health.status == "ok")
2718    ));
2719    out
2720}
2721
2722fn count_recent_events(
2723    events: &[StoredEvolutionEvent],
2724    cutoff: DateTime<Utc>,
2725    predicate: impl Fn(&EvolutionEvent) -> bool,
2726) -> u64 {
2727    events
2728        .iter()
2729        .filter(|stored| {
2730            predicate(&stored.event)
2731                && parse_event_timestamp(&stored.timestamp)
2732                    .map(|timestamp| timestamp >= cutoff)
2733                    .unwrap_or(false)
2734        })
2735        .count() as u64
2736}
2737
2738fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
2739    DateTime::parse_from_rfc3339(raw)
2740        .ok()
2741        .map(|parsed| parsed.with_timezone(&Utc))
2742}
2743
2744fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
2745    matches!(
2746        event,
2747        EvolutionEvent::ValidationFailed {
2748            gene_id: Some(_),
2749            ..
2750        }
2751    )
2752}
2753
2754fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
2755    if denominator == 0 {
2756        0.0
2757    } else {
2758        numerator as f64 / denominator as f64
2759    }
2760}
2761
2762fn store_err(err: EvolutionError) -> EvoKernelError {
2763    EvoKernelError::Store(err.to_string())
2764}
2765
2766#[cfg(test)]
2767mod tests {
2768    use super::*;
2769    use oris_agent_contract::{
2770        AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
2771    };
2772    use oris_kernel::{
2773        AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
2774        NoopStepFn, StateUpdatedOnlyReducer,
2775    };
2776    use serde::{Deserialize, Serialize};
2777
2778    #[derive(Clone, Debug, Default, Serialize, Deserialize)]
2779    struct TestState;
2780
2781    impl KernelState for TestState {
2782        fn version(&self) -> u32 {
2783            1
2784        }
2785    }
2786
2787    fn temp_workspace(name: &str) -> std::path::PathBuf {
2788        let root =
2789            std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
2790        if root.exists() {
2791            fs::remove_dir_all(&root).unwrap();
2792        }
2793        fs::create_dir_all(root.join("src")).unwrap();
2794        fs::write(
2795            root.join("Cargo.toml"),
2796            "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
2797        )
2798        .unwrap();
2799        fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
2800        fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
2801        root
2802    }
2803
2804    fn test_kernel() -> Arc<Kernel<TestState>> {
2805        Arc::new(Kernel::<TestState> {
2806            events: Box::new(InMemoryEventStore::new()),
2807            snaps: None,
2808            reducer: Box::new(StateUpdatedOnlyReducer),
2809            exec: Box::new(NoopActionExecutor),
2810            step: Box::new(NoopStepFn),
2811            policy: Box::new(AllowAllPolicy),
2812            effect_sink: None,
2813            mode: KernelMode::Normal,
2814        })
2815    }
2816
2817    fn lightweight_plan() -> ValidationPlan {
2818        ValidationPlan {
2819            profile: "test".into(),
2820            stages: vec![ValidationStage::Command {
2821                program: "git".into(),
2822                args: vec!["--version".into()],
2823                timeout_ms: 5_000,
2824            }],
2825        }
2826    }
2827
2828    fn sample_mutation() -> PreparedMutation {
2829        prepare_mutation(
2830            MutationIntent {
2831                id: "mutation-1".into(),
2832                intent: "add README".into(),
2833                target: MutationTarget::Paths {
2834                    allow: vec!["README.md".into()],
2835                },
2836                expected_effect: "repo still builds".into(),
2837                risk: RiskLevel::Low,
2838                signals: vec!["missing readme".into()],
2839                spec_id: None,
2840            },
2841            "\
2842diff --git a/README.md b/README.md
2843new file mode 100644
2844index 0000000..1111111
2845--- /dev/null
2846+++ b/README.md
2847@@ -0,0 +1 @@
2848+# sample
2849"
2850            .into(),
2851            Some("HEAD".into()),
2852        )
2853    }
2854
2855    fn base_sandbox_policy() -> SandboxPolicy {
2856        SandboxPolicy {
2857            allowed_programs: vec!["git".into()],
2858            max_duration_ms: 60_000,
2859            max_output_bytes: 1024 * 1024,
2860            denied_env_prefixes: Vec::new(),
2861        }
2862    }
2863
2864    fn command_validator() -> Arc<dyn Validator> {
2865        Arc::new(CommandValidator::new(base_sandbox_policy()))
2866    }
2867
2868    fn replay_input(signal: &str) -> SelectorInput {
2869        let rustc_version = std::process::Command::new("rustc")
2870            .arg("--version")
2871            .output()
2872            .ok()
2873            .filter(|output| output.status.success())
2874            .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
2875            .unwrap_or_else(|| "rustc unknown".into());
2876        SelectorInput {
2877            signals: vec![signal.into()],
2878            env: EnvFingerprint {
2879                rustc_version,
2880                cargo_lock_hash: compute_artifact_hash("# lock\n"),
2881                target_triple: format!(
2882                    "{}-unknown-{}",
2883                    std::env::consts::ARCH,
2884                    std::env::consts::OS
2885                ),
2886                os: std::env::consts::OS.into(),
2887            },
2888            spec_id: None,
2889            limit: 1,
2890        }
2891    }
2892
2893    fn build_test_evo_with_store(
2894        name: &str,
2895        run_id: &str,
2896        validator: Arc<dyn Validator>,
2897        store: Arc<dyn EvolutionStore>,
2898    ) -> EvoKernel<TestState> {
2899        let workspace = temp_workspace(name);
2900        let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
2901            run_id,
2902            &workspace,
2903            std::env::temp_dir(),
2904        ));
2905        EvoKernel::new(test_kernel(), sandbox, validator, store)
2906            .with_governor(Arc::new(DefaultGovernor::new(
2907                oris_governor::GovernorConfig {
2908                    promote_after_successes: 1,
2909                    ..Default::default()
2910                },
2911            )))
2912            .with_validation_plan(lightweight_plan())
2913            .with_sandbox_policy(base_sandbox_policy())
2914    }
2915
2916    fn build_test_evo(
2917        name: &str,
2918        run_id: &str,
2919        validator: Arc<dyn Validator>,
2920    ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
2921        let store_root = std::env::temp_dir().join(format!(
2922            "oris-evokernel-{name}-store-{}",
2923            std::process::id()
2924        ));
2925        if store_root.exists() {
2926            fs::remove_dir_all(&store_root).unwrap();
2927        }
2928        let store: Arc<dyn EvolutionStore> =
2929            Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
2930        let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
2931        (evo, store)
2932    }
2933
2934    fn remote_publish_envelope(
2935        sender_id: &str,
2936        run_id: &str,
2937        gene_id: &str,
2938        capsule_id: &str,
2939        mutation_id: &str,
2940        signal: &str,
2941        file_name: &str,
2942        line: &str,
2943    ) -> EvolutionEnvelope {
2944        remote_publish_envelope_with_env(
2945            sender_id,
2946            run_id,
2947            gene_id,
2948            capsule_id,
2949            mutation_id,
2950            signal,
2951            file_name,
2952            line,
2953            replay_input(signal).env,
2954        )
2955    }
2956
2957    fn remote_publish_envelope_with_env(
2958        sender_id: &str,
2959        run_id: &str,
2960        gene_id: &str,
2961        capsule_id: &str,
2962        mutation_id: &str,
2963        signal: &str,
2964        file_name: &str,
2965        line: &str,
2966        env: EnvFingerprint,
2967    ) -> EvolutionEnvelope {
2968        let mutation = prepare_mutation(
2969            MutationIntent {
2970                id: mutation_id.into(),
2971                intent: format!("add {file_name}"),
2972                target: MutationTarget::Paths {
2973                    allow: vec![file_name.into()],
2974                },
2975                expected_effect: "replay should still validate".into(),
2976                risk: RiskLevel::Low,
2977                signals: vec![signal.into()],
2978                spec_id: None,
2979            },
2980            format!(
2981                "\
2982diff --git a/{file_name} b/{file_name}
2983new file mode 100644
2984index 0000000..1111111
2985--- /dev/null
2986+++ b/{file_name}
2987@@ -0,0 +1 @@
2988+{line}
2989"
2990            ),
2991            Some("HEAD".into()),
2992        );
2993        let gene = Gene {
2994            id: gene_id.into(),
2995            signals: vec![signal.into()],
2996            strategy: vec![file_name.into()],
2997            validation: vec!["test".into()],
2998            state: AssetState::Promoted,
2999        };
3000        let capsule = Capsule {
3001            id: capsule_id.into(),
3002            gene_id: gene_id.into(),
3003            mutation_id: mutation_id.into(),
3004            run_id: run_id.into(),
3005            diff_hash: mutation.artifact.content_hash.clone(),
3006            confidence: 0.9,
3007            env,
3008            outcome: Outcome {
3009                success: true,
3010                validation_profile: "test".into(),
3011                validation_duration_ms: 1,
3012                changed_files: vec![file_name.into()],
3013                validator_hash: "validator-hash".into(),
3014                lines_changed: 1,
3015                replay_verified: false,
3016            },
3017            state: AssetState::Promoted,
3018        };
3019        EvolutionEnvelope::publish(
3020            sender_id,
3021            vec![
3022                NetworkAsset::EvolutionEvent {
3023                    event: EvolutionEvent::MutationDeclared { mutation },
3024                },
3025                NetworkAsset::Gene { gene: gene.clone() },
3026                NetworkAsset::Capsule {
3027                    capsule: capsule.clone(),
3028                },
3029                NetworkAsset::EvolutionEvent {
3030                    event: EvolutionEvent::CapsuleReleased {
3031                        capsule_id: capsule.id.clone(),
3032                        state: AssetState::Promoted,
3033                    },
3034                },
3035            ],
3036        )
3037    }
3038
3039    struct FixedValidator {
3040        success: bool,
3041    }
3042
3043    #[async_trait]
3044    impl Validator for FixedValidator {
3045        async fn run(
3046            &self,
3047            _receipt: &SandboxReceipt,
3048            plan: &ValidationPlan,
3049        ) -> Result<ValidationReport, ValidationError> {
3050            Ok(ValidationReport {
3051                success: self.success,
3052                duration_ms: 1,
3053                stages: Vec::new(),
3054                logs: if self.success {
3055                    format!("{} ok", plan.profile)
3056                } else {
3057                    format!("{} failed", plan.profile)
3058                },
3059            })
3060        }
3061    }
3062
3063    #[test]
3064    fn coordination_planner_to_coder_handoff_is_deterministic() {
3065        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3066            root_goal: "ship feature".into(),
3067            primitive: CoordinationPrimitive::Sequential,
3068            tasks: vec![
3069                CoordinationTask {
3070                    id: "planner".into(),
3071                    role: AgentRole::Planner,
3072                    description: "split the work".into(),
3073                    depends_on: Vec::new(),
3074                },
3075                CoordinationTask {
3076                    id: "coder".into(),
3077                    role: AgentRole::Coder,
3078                    description: "implement the patch".into(),
3079                    depends_on: vec!["planner".into()],
3080                },
3081            ],
3082            timeout_ms: 5_000,
3083            max_retries: 0,
3084        });
3085
3086        assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
3087        assert!(result.failed_tasks.is_empty());
3088        assert!(result.messages.iter().any(|message| {
3089            message.from_role == AgentRole::Planner
3090                && message.to_role == AgentRole::Coder
3091                && message.task_id == "coder"
3092        }));
3093    }
3094
3095    #[test]
3096    fn coordination_repair_runs_only_after_coder_failure() {
3097        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3098            root_goal: "fix broken implementation".into(),
3099            primitive: CoordinationPrimitive::Sequential,
3100            tasks: vec![
3101                CoordinationTask {
3102                    id: "coder".into(),
3103                    role: AgentRole::Coder,
3104                    description: "force-fail initial implementation".into(),
3105                    depends_on: Vec::new(),
3106                },
3107                CoordinationTask {
3108                    id: "repair".into(),
3109                    role: AgentRole::Repair,
3110                    description: "patch the failed implementation".into(),
3111                    depends_on: vec!["coder".into()],
3112                },
3113            ],
3114            timeout_ms: 5_000,
3115            max_retries: 0,
3116        });
3117
3118        assert_eq!(result.completed_tasks, vec!["repair"]);
3119        assert_eq!(result.failed_tasks, vec!["coder"]);
3120        assert!(result.messages.iter().any(|message| {
3121            message.from_role == AgentRole::Coder
3122                && message.to_role == AgentRole::Repair
3123                && message.task_id == "repair"
3124        }));
3125    }
3126
3127    #[test]
3128    fn coordination_optimizer_runs_after_successful_implementation_step() {
3129        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3130            root_goal: "ship optimized patch".into(),
3131            primitive: CoordinationPrimitive::Sequential,
3132            tasks: vec![
3133                CoordinationTask {
3134                    id: "coder".into(),
3135                    role: AgentRole::Coder,
3136                    description: "implement a working patch".into(),
3137                    depends_on: Vec::new(),
3138                },
3139                CoordinationTask {
3140                    id: "optimizer".into(),
3141                    role: AgentRole::Optimizer,
3142                    description: "tighten the implementation".into(),
3143                    depends_on: vec!["coder".into()],
3144                },
3145            ],
3146            timeout_ms: 5_000,
3147            max_retries: 0,
3148        });
3149
3150        assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
3151        assert!(result.failed_tasks.is_empty());
3152    }
3153
3154    #[test]
3155    fn coordination_parallel_waves_preserve_sorted_merge_order() {
3156        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3157            root_goal: "parallelize safe tasks".into(),
3158            primitive: CoordinationPrimitive::Parallel,
3159            tasks: vec![
3160                CoordinationTask {
3161                    id: "z-task".into(),
3162                    role: AgentRole::Planner,
3163                    description: "analyze z".into(),
3164                    depends_on: Vec::new(),
3165                },
3166                CoordinationTask {
3167                    id: "a-task".into(),
3168                    role: AgentRole::Coder,
3169                    description: "implement a".into(),
3170                    depends_on: Vec::new(),
3171                },
3172                CoordinationTask {
3173                    id: "mid-task".into(),
3174                    role: AgentRole::Optimizer,
3175                    description: "polish after both".into(),
3176                    depends_on: vec!["z-task".into(), "a-task".into()],
3177                },
3178            ],
3179            timeout_ms: 5_000,
3180            max_retries: 0,
3181        });
3182
3183        assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
3184        assert!(result.failed_tasks.is_empty());
3185    }
3186
3187    #[test]
3188    fn coordination_retries_stop_at_max_retries() {
3189        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3190            root_goal: "retry then stop".into(),
3191            primitive: CoordinationPrimitive::Sequential,
3192            tasks: vec![CoordinationTask {
3193                id: "coder".into(),
3194                role: AgentRole::Coder,
3195                description: "force-fail this task".into(),
3196                depends_on: Vec::new(),
3197            }],
3198            timeout_ms: 5_000,
3199            max_retries: 1,
3200        });
3201
3202        assert!(result.completed_tasks.is_empty());
3203        assert_eq!(result.failed_tasks, vec!["coder"]);
3204        assert_eq!(
3205            result
3206                .messages
3207                .iter()
3208                .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
3209                .count(),
3210            2
3211        );
3212    }
3213
3214    #[test]
3215    fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
3216        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3217            root_goal: "skip blocked follow-up work".into(),
3218            primitive: CoordinationPrimitive::Conditional,
3219            tasks: vec![
3220                CoordinationTask {
3221                    id: "coder".into(),
3222                    role: AgentRole::Coder,
3223                    description: "force-fail the implementation".into(),
3224                    depends_on: Vec::new(),
3225                },
3226                CoordinationTask {
3227                    id: "optimizer".into(),
3228                    role: AgentRole::Optimizer,
3229                    description: "only optimize a successful implementation".into(),
3230                    depends_on: vec!["coder".into()],
3231                },
3232            ],
3233            timeout_ms: 5_000,
3234            max_retries: 0,
3235        });
3236
3237        assert!(result.completed_tasks.is_empty());
3238        assert_eq!(result.failed_tasks, vec!["coder"]);
3239        assert!(result.messages.iter().any(|message| {
3240            message.task_id == "optimizer"
3241                && message
3242                    .content
3243                    .contains("skipped due to failed dependency chain")
3244        }));
3245        assert!(!result
3246            .failed_tasks
3247            .iter()
3248            .any(|task_id| task_id == "optimizer"));
3249    }
3250
3251    #[tokio::test]
3252    async fn command_validator_aggregates_stage_reports() {
3253        let workspace = temp_workspace("validator");
3254        let receipt = SandboxReceipt {
3255            mutation_id: "m".into(),
3256            workdir: workspace,
3257            applied: true,
3258            changed_files: Vec::new(),
3259            patch_hash: "hash".into(),
3260            stdout_log: std::env::temp_dir().join("stdout.log"),
3261            stderr_log: std::env::temp_dir().join("stderr.log"),
3262        };
3263        let validator = CommandValidator::new(SandboxPolicy {
3264            allowed_programs: vec!["git".into()],
3265            max_duration_ms: 1_000,
3266            max_output_bytes: 1024,
3267            denied_env_prefixes: Vec::new(),
3268        });
3269        let report = validator
3270            .run(
3271                &receipt,
3272                &ValidationPlan {
3273                    profile: "test".into(),
3274                    stages: vec![ValidationStage::Command {
3275                        program: "git".into(),
3276                        args: vec!["--version".into()],
3277                        timeout_ms: 1_000,
3278                    }],
3279                },
3280            )
3281            .await
3282            .unwrap();
3283        assert_eq!(report.stages.len(), 1);
3284    }
3285
3286    #[tokio::test]
3287    async fn capture_successful_mutation_appends_capsule() {
3288        let (evo, store) = build_test_evo("capture", "run-1", command_validator());
3289        let capsule = evo
3290            .capture_successful_mutation(&"run-1".into(), sample_mutation())
3291            .await
3292            .unwrap();
3293        let events = store.scan(1).unwrap();
3294        assert!(events
3295            .iter()
3296            .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
3297        assert!(!capsule.id.is_empty());
3298    }
3299
3300    #[tokio::test]
3301    async fn replay_hit_records_capsule_reused() {
3302        let (evo, store) = build_test_evo("replay", "run-2", command_validator());
3303        let capsule = evo
3304            .capture_successful_mutation(&"run-2".into(), sample_mutation())
3305            .await
3306            .unwrap();
3307        let decision = evo
3308            .replay_or_fallback(replay_input("missing readme"))
3309            .await
3310            .unwrap();
3311        assert!(decision.used_capsule);
3312        assert_eq!(decision.capsule_id, Some(capsule.id));
3313        assert!(store
3314            .scan(1)
3315            .unwrap()
3316            .iter()
3317            .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. })));
3318    }
3319
3320    #[tokio::test]
3321    async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
3322        let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
3323        let capsule = evo
3324            .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
3325            .await
3326            .unwrap();
3327        let decision = evo
3328            .replay_or_fallback(replay_input("missing readme"))
3329            .await
3330            .unwrap();
3331        assert!(decision.used_capsule);
3332
3333        evo.revoke_assets(&RevokeNotice {
3334            sender_id: "node-metrics".into(),
3335            asset_ids: vec![capsule.id.clone()],
3336            reason: "manual test revoke".into(),
3337        })
3338        .unwrap();
3339
3340        let snapshot = evo.metrics_snapshot().unwrap();
3341        assert_eq!(snapshot.replay_attempts_total, 1);
3342        assert_eq!(snapshot.replay_success_total, 1);
3343        assert_eq!(snapshot.replay_success_rate, 1.0);
3344        assert_eq!(snapshot.mutation_declared_total, 1);
3345        assert_eq!(snapshot.promoted_mutations_total, 1);
3346        assert_eq!(snapshot.promotion_ratio, 1.0);
3347        assert_eq!(snapshot.gene_revocations_total, 1);
3348        assert_eq!(snapshot.mutation_velocity_last_hour, 1);
3349        assert_eq!(snapshot.revoke_frequency_last_hour, 1);
3350        assert_eq!(snapshot.promoted_genes, 0);
3351        assert_eq!(snapshot.promoted_capsules, 0);
3352
3353        let rendered = evo.render_metrics_prometheus().unwrap();
3354        assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
3355        assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
3356        assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
3357        assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
3358        assert!(rendered.contains("oris_evolution_health 1"));
3359    }
3360
3361    #[tokio::test]
3362    async fn remote_replay_prefers_closest_environment_match() {
3363        let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
3364        let input = replay_input("env-signal");
3365
3366        let envelope_a = remote_publish_envelope_with_env(
3367            "node-a",
3368            "run-remote-a",
3369            "gene-a",
3370            "capsule-a",
3371            "mutation-a",
3372            "env-signal",
3373            "A.md",
3374            "# from a",
3375            input.env.clone(),
3376        );
3377        let envelope_b = remote_publish_envelope_with_env(
3378            "node-b",
3379            "run-remote-b",
3380            "gene-b",
3381            "capsule-b",
3382            "mutation-b",
3383            "env-signal",
3384            "B.md",
3385            "# from b",
3386            EnvFingerprint {
3387                rustc_version: "old-rustc".into(),
3388                cargo_lock_hash: "other-lock".into(),
3389                target_triple: "aarch64-apple-darwin".into(),
3390                os: "linux".into(),
3391            },
3392        );
3393
3394        evo.import_remote_envelope(&envelope_a).unwrap();
3395        evo.import_remote_envelope(&envelope_b).unwrap();
3396
3397        let decision = evo.replay_or_fallback(input).await.unwrap();
3398
3399        assert!(decision.used_capsule);
3400        assert_eq!(decision.capsule_id, Some("capsule-a".into()));
3401        assert!(!decision.fallback_to_planner);
3402    }
3403
3404    #[tokio::test]
3405    async fn remote_capsule_stays_quarantined_until_first_successful_replay() {
3406        let (evo, store) = build_test_evo(
3407            "remote-quarantine",
3408            "run-remote-quarantine",
3409            command_validator(),
3410        );
3411        let envelope = remote_publish_envelope(
3412            "node-remote",
3413            "run-remote-quarantine",
3414            "gene-remote",
3415            "capsule-remote",
3416            "mutation-remote",
3417            "remote-signal",
3418            "REMOTE.md",
3419            "# from remote",
3420        );
3421
3422        evo.import_remote_envelope(&envelope).unwrap();
3423
3424        let before_replay = store.rebuild_projection().unwrap();
3425        let imported_capsule = before_replay
3426            .capsules
3427            .iter()
3428            .find(|capsule| capsule.id == "capsule-remote")
3429            .unwrap();
3430        assert_eq!(imported_capsule.state, AssetState::Quarantined);
3431
3432        let decision = evo
3433            .replay_or_fallback(replay_input("remote-signal"))
3434            .await
3435            .unwrap();
3436
3437        assert!(decision.used_capsule);
3438        assert_eq!(decision.capsule_id, Some("capsule-remote".into()));
3439
3440        let after_replay = store.rebuild_projection().unwrap();
3441        let released_capsule = after_replay
3442            .capsules
3443            .iter()
3444            .find(|capsule| capsule.id == "capsule-remote")
3445            .unwrap();
3446        assert_eq!(released_capsule.state, AssetState::Promoted);
3447    }
3448
3449    #[tokio::test]
3450    async fn insufficient_evu_blocks_publish_but_not_local_replay() {
3451        let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
3452        let capsule = evo
3453            .capture_successful_mutation(&"run-stake".into(), sample_mutation())
3454            .await
3455            .unwrap();
3456        let publish = evo.export_promoted_assets("node-local");
3457        assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
3458
3459        let decision = evo
3460            .replay_or_fallback(replay_input("missing readme"))
3461            .await
3462            .unwrap();
3463        assert!(decision.used_capsule);
3464        assert_eq!(decision.capsule_id, Some(capsule.id));
3465    }
3466
3467    #[tokio::test]
3468    async fn second_replay_validation_failure_revokes_gene_immediately() {
3469        let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
3470        let capsule = capturer
3471            .capture_successful_mutation(&"run-capture".into(), sample_mutation())
3472            .await
3473            .unwrap();
3474
3475        let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
3476        let failing_replay = build_test_evo_with_store(
3477            "revoke-replay",
3478            "run-replay-fail",
3479            failing_validator,
3480            store.clone(),
3481        );
3482
3483        let first = failing_replay
3484            .replay_or_fallback(replay_input("missing readme"))
3485            .await
3486            .unwrap();
3487        let second = failing_replay
3488            .replay_or_fallback(replay_input("missing readme"))
3489            .await
3490            .unwrap();
3491
3492        assert!(!first.used_capsule);
3493        assert!(first.fallback_to_planner);
3494        assert!(!second.used_capsule);
3495        assert!(second.fallback_to_planner);
3496
3497        let projection = store.rebuild_projection().unwrap();
3498        let gene = projection
3499            .genes
3500            .iter()
3501            .find(|gene| gene.id == capsule.gene_id)
3502            .unwrap();
3503        assert_eq!(gene.state, AssetState::Promoted);
3504        let committed_capsule = projection
3505            .capsules
3506            .iter()
3507            .find(|current| current.id == capsule.id)
3508            .unwrap();
3509        assert_eq!(committed_capsule.state, AssetState::Promoted);
3510
3511        let events = store.scan(1).unwrap();
3512        assert_eq!(
3513            events
3514                .iter()
3515                .filter(|stored| {
3516                    matches!(
3517                        &stored.event,
3518                        EvolutionEvent::ValidationFailed {
3519                            gene_id: Some(gene_id),
3520                            ..
3521                        } if gene_id == &capsule.gene_id
3522                    )
3523                })
3524                .count(),
3525            1
3526        );
3527        assert!(!events.iter().any(|stored| {
3528            matches!(
3529                &stored.event,
3530                EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
3531            )
3532        }));
3533
3534        let recovered = build_test_evo_with_store(
3535            "revoke-replay",
3536            "run-replay-check",
3537            command_validator(),
3538            store.clone(),
3539        );
3540        let after_revoke = recovered
3541            .replay_or_fallback(replay_input("missing readme"))
3542            .await
3543            .unwrap();
3544        assert!(!after_revoke.used_capsule);
3545        assert!(after_revoke.fallback_to_planner);
3546        assert!(after_revoke.reason.contains("below replay threshold"));
3547    }
3548
3549    #[tokio::test]
3550    async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
3551        let ledger = Arc::new(Mutex::new(EvuLedger {
3552            accounts: vec![],
3553            reputations: vec![
3554                oris_economics::ReputationRecord {
3555                    node_id: "node-a".into(),
3556                    publish_success_rate: 0.4,
3557                    validator_accuracy: 0.4,
3558                    reuse_impact: 0,
3559                },
3560                oris_economics::ReputationRecord {
3561                    node_id: "node-b".into(),
3562                    publish_success_rate: 0.95,
3563                    validator_accuracy: 0.95,
3564                    reuse_impact: 8,
3565                },
3566            ],
3567        }));
3568        let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
3569        let evo = evo.with_economics(ledger.clone());
3570
3571        let envelope_a = remote_publish_envelope(
3572            "node-a",
3573            "run-remote-a",
3574            "gene-a",
3575            "capsule-a",
3576            "mutation-a",
3577            "shared-signal",
3578            "A.md",
3579            "# from a",
3580        );
3581        let envelope_b = remote_publish_envelope(
3582            "node-b",
3583            "run-remote-b",
3584            "gene-b",
3585            "capsule-b",
3586            "mutation-b",
3587            "shared-signal",
3588            "B.md",
3589            "# from b",
3590        );
3591
3592        evo.import_remote_envelope(&envelope_a).unwrap();
3593        evo.import_remote_envelope(&envelope_b).unwrap();
3594
3595        let decision = evo
3596            .replay_or_fallback(replay_input("shared-signal"))
3597            .await
3598            .unwrap();
3599
3600        assert!(decision.used_capsule);
3601        assert_eq!(decision.capsule_id, Some("capsule-b".into()));
3602        let locked = ledger.lock().unwrap();
3603        let rewarded = locked
3604            .accounts
3605            .iter()
3606            .find(|item| item.node_id == "node-b")
3607            .unwrap();
3608        assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
3609        assert!(
3610            locked.selector_reputation_bias()["node-b"]
3611                > locked.selector_reputation_bias()["node-a"]
3612        );
3613    }
3614
3615    #[tokio::test]
3616    async fn remote_reuse_failure_penalizes_remote_reputation() {
3617        let ledger = Arc::new(Mutex::new(EvuLedger::default()));
3618        let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
3619        let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
3620        let evo = evo.with_economics(ledger.clone());
3621
3622        let envelope = remote_publish_envelope(
3623            "node-remote",
3624            "run-remote-failed",
3625            "gene-remote",
3626            "capsule-remote",
3627            "mutation-remote",
3628            "failure-signal",
3629            "FAILED.md",
3630            "# from remote",
3631        );
3632        evo.import_remote_envelope(&envelope).unwrap();
3633
3634        let decision = evo
3635            .replay_or_fallback(replay_input("failure-signal"))
3636            .await
3637            .unwrap();
3638
3639        assert!(!decision.used_capsule);
3640        assert!(decision.fallback_to_planner);
3641
3642        let signal = evo.economics_signal("node-remote").unwrap();
3643        assert_eq!(signal.available_evu, 0);
3644        assert!(signal.publish_success_rate < 0.5);
3645        assert!(signal.validator_accuracy < 0.5);
3646    }
3647}