Skip to main content

oris_evokernel/
core.rs

1//! EvoKernel orchestration: mutation capture, validation, capsule construction, and replay-first reuse.
2
3use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::Path;
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12    AgentRole, BoundedTaskClass, CoordinationMessage, CoordinationPlan, CoordinationPrimitive,
13    CoordinationResult, CoordinationTask, ExecutionFeedback,
14    MutationProposal as AgentMutationProposal, ReplayFeedback, ReplayPlannerDirective,
15    SupervisedDevloopOutcome, SupervisedDevloopRequest, SupervisedDevloopStatus,
16};
17use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
18use oris_evolution::{
19    compute_artifact_hash, decayed_replay_confidence, next_id, stable_hash_json, AssetState,
20    BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
21    EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
22    PreparedMutation, Selector, SelectorInput, StoreBackedSelector, StoredEvolutionEvent,
23    ValidationSnapshot, MIN_REPLAY_CONFIDENCE,
24};
25use oris_evolution_network::{EvolutionEnvelope, NetworkAsset};
26use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
27use oris_kernel::{Kernel, KernelState, RunId};
28use oris_sandbox::{
29    compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
30};
31use oris_spec::CompiledMutationPlan;
32use serde::{Deserialize, Serialize};
33use thiserror::Error;
34
35pub use oris_evolution::{
36    default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
37    BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
38    EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
39    MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
40    SelectorInput as EvoSelectorInput,
41};
42pub use oris_evolution_network::{
43    FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
44};
45pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
46pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
47pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
48
49#[derive(Clone, Debug, Serialize, Deserialize)]
50pub struct ValidationPlan {
51    pub profile: String,
52    pub stages: Vec<ValidationStage>,
53}
54
55impl ValidationPlan {
56    pub fn oris_default() -> Self {
57        Self {
58            profile: "oris-default".into(),
59            stages: vec![
60                ValidationStage::Command {
61                    program: "cargo".into(),
62                    args: vec!["fmt".into(), "--all".into(), "--check".into()],
63                    timeout_ms: 60_000,
64                },
65                ValidationStage::Command {
66                    program: "cargo".into(),
67                    args: vec!["check".into(), "--workspace".into()],
68                    timeout_ms: 180_000,
69                },
70                ValidationStage::Command {
71                    program: "cargo".into(),
72                    args: vec![
73                        "test".into(),
74                        "-p".into(),
75                        "oris-kernel".into(),
76                        "-p".into(),
77                        "oris-evolution".into(),
78                        "-p".into(),
79                        "oris-sandbox".into(),
80                        "-p".into(),
81                        "oris-evokernel".into(),
82                        "--lib".into(),
83                    ],
84                    timeout_ms: 300_000,
85                },
86                ValidationStage::Command {
87                    program: "cargo".into(),
88                    args: vec![
89                        "test".into(),
90                        "-p".into(),
91                        "oris-runtime".into(),
92                        "--lib".into(),
93                    ],
94                    timeout_ms: 300_000,
95                },
96            ],
97        }
98    }
99}
100
101#[derive(Clone, Debug, Serialize, Deserialize)]
102pub enum ValidationStage {
103    Command {
104        program: String,
105        args: Vec<String>,
106        timeout_ms: u64,
107    },
108}
109
110#[derive(Clone, Debug, Serialize, Deserialize)]
111pub struct ValidationStageReport {
112    pub stage: String,
113    pub success: bool,
114    pub exit_code: Option<i32>,
115    pub duration_ms: u64,
116    pub stdout: String,
117    pub stderr: String,
118}
119
120#[derive(Clone, Debug, Serialize, Deserialize)]
121pub struct ValidationReport {
122    pub success: bool,
123    pub duration_ms: u64,
124    pub stages: Vec<ValidationStageReport>,
125    pub logs: String,
126}
127
128#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
129pub struct SignalExtractionInput {
130    pub patch_diff: String,
131    pub intent: String,
132    pub expected_effect: String,
133    pub declared_signals: Vec<String>,
134    pub changed_files: Vec<String>,
135    pub validation_success: bool,
136    pub validation_logs: String,
137    pub stage_outputs: Vec<String>,
138}
139
140#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
141pub struct SignalExtractionOutput {
142    pub values: Vec<String>,
143    pub hash: String,
144}
145
146#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
147pub struct SeedTemplate {
148    pub id: String,
149    pub intent: String,
150    pub signals: Vec<String>,
151    pub diff_payload: String,
152    pub validation_profile: String,
153}
154
155#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
156pub struct BootstrapReport {
157    pub seeded: bool,
158    pub genes_added: usize,
159    pub capsules_added: usize,
160}
161
162impl ValidationReport {
163    pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
164        ValidationSnapshot {
165            success: self.success,
166            profile: profile.to_string(),
167            duration_ms: self.duration_ms,
168            summary: if self.success {
169                "validation passed".into()
170            } else {
171                "validation failed".into()
172            },
173        }
174    }
175}
176
177pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
178    let mut signals = BTreeSet::new();
179
180    for declared in &input.declared_signals {
181        if let Some(phrase) = normalize_signal_phrase(declared) {
182            signals.insert(phrase);
183        }
184        extend_signal_tokens(&mut signals, declared);
185    }
186
187    for text in [
188        input.patch_diff.as_str(),
189        input.intent.as_str(),
190        input.expected_effect.as_str(),
191        input.validation_logs.as_str(),
192    ] {
193        extend_signal_tokens(&mut signals, text);
194    }
195
196    for changed_file in &input.changed_files {
197        extend_signal_tokens(&mut signals, changed_file);
198    }
199
200    for stage_output in &input.stage_outputs {
201        extend_signal_tokens(&mut signals, stage_output);
202    }
203
204    signals.insert(if input.validation_success {
205        "validation passed".into()
206    } else {
207        "validation failed".into()
208    });
209
210    let values = signals.into_iter().take(32).collect::<Vec<_>>();
211    let hash =
212        stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
213    SignalExtractionOutput { values, hash }
214}
215
216#[derive(Debug, Error)]
217pub enum ValidationError {
218    #[error("validation execution failed: {0}")]
219    Execution(String),
220}
221
222#[async_trait]
223pub trait Validator: Send + Sync {
224    async fn run(
225        &self,
226        receipt: &SandboxReceipt,
227        plan: &ValidationPlan,
228    ) -> Result<ValidationReport, ValidationError>;
229}
230
231pub struct CommandValidator {
232    policy: SandboxPolicy,
233}
234
235impl CommandValidator {
236    pub fn new(policy: SandboxPolicy) -> Self {
237        Self { policy }
238    }
239}
240
241#[async_trait]
242impl Validator for CommandValidator {
243    async fn run(
244        &self,
245        receipt: &SandboxReceipt,
246        plan: &ValidationPlan,
247    ) -> Result<ValidationReport, ValidationError> {
248        let started = std::time::Instant::now();
249        let mut stages = Vec::new();
250        let mut success = true;
251        let mut logs = String::new();
252
253        for stage in &plan.stages {
254            match stage {
255                ValidationStage::Command {
256                    program,
257                    args,
258                    timeout_ms,
259                } => {
260                    let result = execute_allowed_command(
261                        &self.policy,
262                        &receipt.workdir,
263                        program,
264                        args,
265                        *timeout_ms,
266                    )
267                    .await;
268                    let report = match result {
269                        Ok(output) => ValidationStageReport {
270                            stage: format!("{program} {}", args.join(" ")),
271                            success: output.success,
272                            exit_code: output.exit_code,
273                            duration_ms: output.duration_ms,
274                            stdout: output.stdout,
275                            stderr: output.stderr,
276                        },
277                        Err(err) => ValidationStageReport {
278                            stage: format!("{program} {}", args.join(" ")),
279                            success: false,
280                            exit_code: None,
281                            duration_ms: 0,
282                            stdout: String::new(),
283                            stderr: err.to_string(),
284                        },
285                    };
286                    if !report.success {
287                        success = false;
288                    }
289                    if !report.stdout.is_empty() {
290                        logs.push_str(&report.stdout);
291                        logs.push('\n');
292                    }
293                    if !report.stderr.is_empty() {
294                        logs.push_str(&report.stderr);
295                        logs.push('\n');
296                    }
297                    stages.push(report);
298                    if !success {
299                        break;
300                    }
301                }
302            }
303        }
304
305        Ok(ValidationReport {
306            success,
307            duration_ms: started.elapsed().as_millis() as u64,
308            stages,
309            logs,
310        })
311    }
312}
313
314#[derive(Clone, Debug)]
315pub struct ReplayDecision {
316    pub used_capsule: bool,
317    pub capsule_id: Option<CapsuleId>,
318    pub fallback_to_planner: bool,
319    pub reason: String,
320}
321
322#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
323pub struct ReplayTaskClassMetrics {
324    pub task_class_id: String,
325    pub task_label: String,
326    pub replay_success_total: u64,
327    pub reasoning_steps_avoided_total: u64,
328}
329
330#[derive(Clone, Copy, Debug, Eq, PartialEq)]
331enum CoordinationTaskState {
332    Ready,
333    Waiting,
334    BlockedByFailure,
335    PermanentlyBlocked,
336}
337
338#[derive(Clone, Debug, Default)]
339pub struct MultiAgentCoordinator;
340
341impl MultiAgentCoordinator {
342    pub fn new() -> Self {
343        Self
344    }
345
346    pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
347        let primitive = plan.primitive.clone();
348        let root_goal = plan.root_goal.clone();
349        let timeout_ms = plan.timeout_ms;
350        let max_retries = plan.max_retries;
351        let mut tasks = BTreeMap::new();
352        for task in plan.tasks {
353            tasks.entry(task.id.clone()).or_insert(task);
354        }
355
356        let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
357        let mut completed = BTreeSet::new();
358        let mut failed = BTreeSet::new();
359        let mut completed_order = Vec::new();
360        let mut failed_order = Vec::new();
361        let mut skipped = BTreeSet::new();
362        let mut attempts = BTreeMap::new();
363        let mut messages = Vec::new();
364
365        loop {
366            if matches!(primitive, CoordinationPrimitive::Conditional) {
367                self.apply_conditional_skips(
368                    &tasks,
369                    &mut pending,
370                    &completed,
371                    &failed,
372                    &mut skipped,
373                    &mut messages,
374                );
375            }
376
377            let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
378            if ready.is_empty() {
379                break;
380            }
381            if matches!(primitive, CoordinationPrimitive::Sequential) {
382                ready.truncate(1);
383            }
384
385            for task_id in ready {
386                let Some(task) = tasks.get(&task_id) else {
387                    continue;
388                };
389                if !pending.contains(&task_id) {
390                    continue;
391                }
392                self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
393
394                let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
395                if Self::simulate_task_failure(task, prior_failures) {
396                    let failure_count = prior_failures + 1;
397                    attempts.insert(task_id.clone(), failure_count);
398                    let will_retry = failure_count <= max_retries;
399                    messages.push(CoordinationMessage {
400                        from_role: task.role.clone(),
401                        to_role: task.role.clone(),
402                        task_id: task_id.clone(),
403                        content: if will_retry {
404                            format!("task {task_id} failed on attempt {failure_count} and will retry")
405                        } else {
406                            format!(
407                                "task {task_id} failed on attempt {failure_count} and exhausted retries"
408                            )
409                        },
410                    });
411                    if !will_retry {
412                        pending.remove(&task_id);
413                        if failed.insert(task_id.clone()) {
414                            failed_order.push(task_id);
415                        }
416                    }
417                    continue;
418                }
419
420                pending.remove(&task_id);
421                if completed.insert(task_id.clone()) {
422                    completed_order.push(task_id);
423                }
424            }
425        }
426
427        let blocked_ids = pending.into_iter().collect::<Vec<_>>();
428        for task_id in blocked_ids {
429            let Some(task) = tasks.get(&task_id) else {
430                continue;
431            };
432            let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
433            let content = match state {
434                CoordinationTaskState::BlockedByFailure => {
435                    format!("task {task_id} blocked by failed dependencies")
436                }
437                CoordinationTaskState::PermanentlyBlocked => {
438                    format!("task {task_id} has invalid coordination prerequisites")
439                }
440                CoordinationTaskState::Waiting => {
441                    format!("task {task_id} has unresolved dependencies")
442                }
443                CoordinationTaskState::Ready => {
444                    format!("task {task_id} was left pending unexpectedly")
445                }
446            };
447            messages.push(CoordinationMessage {
448                from_role: task.role.clone(),
449                to_role: task.role.clone(),
450                task_id: task_id.clone(),
451                content,
452            });
453            if failed.insert(task_id.clone()) {
454                failed_order.push(task_id);
455            }
456        }
457
458        CoordinationResult {
459            completed_tasks: completed_order,
460            failed_tasks: failed_order,
461            messages,
462            summary: format!(
463                "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
464                root_goal,
465                completed.len(),
466                failed.len(),
467                skipped.len(),
468                primitive,
469                timeout_ms,
470                max_retries
471            ),
472        }
473    }
474
475    fn ready_task_ids(
476        &self,
477        tasks: &BTreeMap<String, CoordinationTask>,
478        pending: &BTreeSet<String>,
479        completed: &BTreeSet<String>,
480        failed: &BTreeSet<String>,
481        skipped: &BTreeSet<String>,
482    ) -> Vec<String> {
483        pending
484            .iter()
485            .filter_map(|task_id| {
486                let task = tasks.get(task_id)?;
487                (self.classify_task(task, tasks, completed, failed, skipped)
488                    == CoordinationTaskState::Ready)
489                    .then(|| task_id.clone())
490            })
491            .collect()
492    }
493
494    fn apply_conditional_skips(
495        &self,
496        tasks: &BTreeMap<String, CoordinationTask>,
497        pending: &mut BTreeSet<String>,
498        completed: &BTreeSet<String>,
499        failed: &BTreeSet<String>,
500        skipped: &mut BTreeSet<String>,
501        messages: &mut Vec<CoordinationMessage>,
502    ) {
503        let skip_ids = pending
504            .iter()
505            .filter_map(|task_id| {
506                let task = tasks.get(task_id)?;
507                (self.classify_task(task, tasks, completed, failed, skipped)
508                    == CoordinationTaskState::BlockedByFailure)
509                    .then(|| task_id.clone())
510            })
511            .collect::<Vec<_>>();
512
513        for task_id in skip_ids {
514            let Some(task) = tasks.get(&task_id) else {
515                continue;
516            };
517            pending.remove(&task_id);
518            skipped.insert(task_id.clone());
519            messages.push(CoordinationMessage {
520                from_role: task.role.clone(),
521                to_role: task.role.clone(),
522                task_id: task_id.clone(),
523                content: format!("task {task_id} skipped due to failed dependency chain"),
524            });
525        }
526    }
527
528    fn classify_task(
529        &self,
530        task: &CoordinationTask,
531        tasks: &BTreeMap<String, CoordinationTask>,
532        completed: &BTreeSet<String>,
533        failed: &BTreeSet<String>,
534        skipped: &BTreeSet<String>,
535    ) -> CoordinationTaskState {
536        match task.role {
537            AgentRole::Planner | AgentRole::Coder => {
538                let mut waiting = false;
539                for dependency_id in &task.depends_on {
540                    if !tasks.contains_key(dependency_id) {
541                        return CoordinationTaskState::PermanentlyBlocked;
542                    }
543                    if skipped.contains(dependency_id) || failed.contains(dependency_id) {
544                        return CoordinationTaskState::BlockedByFailure;
545                    }
546                    if !completed.contains(dependency_id) {
547                        waiting = true;
548                    }
549                }
550                if waiting {
551                    CoordinationTaskState::Waiting
552                } else {
553                    CoordinationTaskState::Ready
554                }
555            }
556            AgentRole::Repair => {
557                let mut waiting = false;
558                let mut has_coder_dependency = false;
559                let mut has_failed_coder = false;
560                for dependency_id in &task.depends_on {
561                    let Some(dependency) = tasks.get(dependency_id) else {
562                        return CoordinationTaskState::PermanentlyBlocked;
563                    };
564                    let is_coder = matches!(dependency.role, AgentRole::Coder);
565                    if is_coder {
566                        has_coder_dependency = true;
567                    }
568                    if skipped.contains(dependency_id) {
569                        return CoordinationTaskState::BlockedByFailure;
570                    }
571                    if failed.contains(dependency_id) {
572                        if is_coder {
573                            has_failed_coder = true;
574                        } else {
575                            return CoordinationTaskState::BlockedByFailure;
576                        }
577                        continue;
578                    }
579                    if !completed.contains(dependency_id) {
580                        waiting = true;
581                    }
582                }
583                if !has_coder_dependency {
584                    CoordinationTaskState::PermanentlyBlocked
585                } else if waiting {
586                    CoordinationTaskState::Waiting
587                } else if has_failed_coder {
588                    CoordinationTaskState::Ready
589                } else {
590                    CoordinationTaskState::PermanentlyBlocked
591                }
592            }
593            AgentRole::Optimizer => {
594                let mut waiting = false;
595                let mut has_impl_dependency = false;
596                let mut has_completed_impl = false;
597                let mut has_failed_impl = false;
598                for dependency_id in &task.depends_on {
599                    let Some(dependency) = tasks.get(dependency_id) else {
600                        return CoordinationTaskState::PermanentlyBlocked;
601                    };
602                    let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
603                    if is_impl {
604                        has_impl_dependency = true;
605                    }
606                    if skipped.contains(dependency_id) || failed.contains(dependency_id) {
607                        if is_impl {
608                            has_failed_impl = true;
609                            continue;
610                        }
611                        return CoordinationTaskState::BlockedByFailure;
612                    }
613                    if completed.contains(dependency_id) {
614                        if is_impl {
615                            has_completed_impl = true;
616                        }
617                        continue;
618                    }
619                    waiting = true;
620                }
621                if !has_impl_dependency {
622                    CoordinationTaskState::PermanentlyBlocked
623                } else if waiting {
624                    CoordinationTaskState::Waiting
625                } else if has_completed_impl {
626                    CoordinationTaskState::Ready
627                } else if has_failed_impl {
628                    CoordinationTaskState::BlockedByFailure
629                } else {
630                    CoordinationTaskState::PermanentlyBlocked
631                }
632            }
633        }
634    }
635
636    fn record_handoff_messages(
637        &self,
638        task: &CoordinationTask,
639        tasks: &BTreeMap<String, CoordinationTask>,
640        completed: &BTreeSet<String>,
641        failed: &BTreeSet<String>,
642        messages: &mut Vec<CoordinationMessage>,
643    ) {
644        let mut dependency_ids = task.depends_on.clone();
645        dependency_ids.sort();
646        dependency_ids.dedup();
647
648        for dependency_id in dependency_ids {
649            let Some(dependency) = tasks.get(&dependency_id) else {
650                continue;
651            };
652            if completed.contains(&dependency_id) {
653                messages.push(CoordinationMessage {
654                    from_role: dependency.role.clone(),
655                    to_role: task.role.clone(),
656                    task_id: task.id.clone(),
657                    content: format!("handoff from {dependency_id} to {}", task.id),
658                });
659            } else if failed.contains(&dependency_id) {
660                messages.push(CoordinationMessage {
661                    from_role: dependency.role.clone(),
662                    to_role: task.role.clone(),
663                    task_id: task.id.clone(),
664                    content: format!("failed dependency {dependency_id} routed to {}", task.id),
665                });
666            }
667        }
668    }
669
670    fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
671        let normalized = task.description.to_ascii_lowercase();
672        normalized.contains("force-fail")
673            || (normalized.contains("fail-once") && prior_failures == 0)
674    }
675}
676
677#[derive(Debug, Error)]
678pub enum ReplayError {
679    #[error("store error: {0}")]
680    Store(String),
681    #[error("sandbox error: {0}")]
682    Sandbox(String),
683    #[error("validation error: {0}")]
684    Validation(String),
685}
686
687#[async_trait]
688pub trait ReplayExecutor: Send + Sync {
689    async fn try_replay(
690        &self,
691        input: &SelectorInput,
692        policy: &SandboxPolicy,
693        validation: &ValidationPlan,
694    ) -> Result<ReplayDecision, ReplayError>;
695
696    async fn try_replay_for_run(
697        &self,
698        run_id: &RunId,
699        input: &SelectorInput,
700        policy: &SandboxPolicy,
701        validation: &ValidationPlan,
702    ) -> Result<ReplayDecision, ReplayError> {
703        let _ = run_id;
704        self.try_replay(input, policy, validation).await
705    }
706}
707
708pub struct StoreReplayExecutor {
709    pub sandbox: Arc<dyn Sandbox>,
710    pub validator: Arc<dyn Validator>,
711    pub store: Arc<dyn EvolutionStore>,
712    pub selector: Arc<dyn Selector>,
713    pub governor: Arc<dyn Governor>,
714    pub economics: Option<Arc<Mutex<EvuLedger>>>,
715    pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
716    pub stake_policy: StakePolicy,
717}
718
719struct ReplayCandidates {
720    candidates: Vec<GeneCandidate>,
721    exact_match: bool,
722}
723
724#[async_trait]
725impl ReplayExecutor for StoreReplayExecutor {
726    async fn try_replay(
727        &self,
728        input: &SelectorInput,
729        policy: &SandboxPolicy,
730        validation: &ValidationPlan,
731    ) -> Result<ReplayDecision, ReplayError> {
732        self.try_replay_inner(None, input, policy, validation).await
733    }
734
735    async fn try_replay_for_run(
736        &self,
737        run_id: &RunId,
738        input: &SelectorInput,
739        policy: &SandboxPolicy,
740        validation: &ValidationPlan,
741    ) -> Result<ReplayDecision, ReplayError> {
742        self.try_replay_inner(Some(run_id), input, policy, validation)
743            .await
744    }
745}
746
747impl StoreReplayExecutor {
748    fn collect_replay_candidates(&self, input: &SelectorInput) -> ReplayCandidates {
749        self.apply_confidence_revalidation();
750        let mut selector_input = input.clone();
751        if self.economics.is_some() && self.remote_publishers.is_some() {
752            selector_input.limit = selector_input.limit.max(4);
753        }
754        let mut candidates = self.selector.select(&selector_input);
755        self.rerank_with_reputation_bias(&mut candidates);
756        let mut exact_match = false;
757        if candidates.is_empty() {
758            let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
759            self.rerank_with_reputation_bias(&mut exact_candidates);
760            if !exact_candidates.is_empty() {
761                candidates = exact_candidates;
762                exact_match = true;
763            }
764        }
765        if candidates.is_empty() {
766            let mut remote_candidates =
767                quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
768            self.rerank_with_reputation_bias(&mut remote_candidates);
769            if !remote_candidates.is_empty() {
770                candidates = remote_candidates;
771                exact_match = true;
772            }
773        }
774        candidates.truncate(input.limit.max(1));
775        ReplayCandidates {
776            candidates,
777            exact_match,
778        }
779    }
780
781    fn apply_confidence_revalidation(&self) {
782        let Ok(projection) = projection_snapshot(self.store.as_ref()) else {
783            return;
784        };
785        for target in stale_replay_revalidation_targets(&projection, Utc::now()) {
786            let reason = format!(
787                "confidence decayed to {:.3}; revalidation required before replay",
788                target.decayed_confidence
789            );
790            if self
791                .store
792                .append_event(EvolutionEvent::PromotionEvaluated {
793                    gene_id: target.gene_id.clone(),
794                    state: AssetState::Quarantined,
795                    reason: reason.clone(),
796                })
797                .is_err()
798            {
799                continue;
800            }
801            for capsule_id in target.capsule_ids {
802                if self
803                    .store
804                    .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
805                    .is_err()
806                {
807                    break;
808                }
809            }
810        }
811    }
812
813    async fn try_replay_inner(
814        &self,
815        replay_run_id: Option<&RunId>,
816        input: &SelectorInput,
817        policy: &SandboxPolicy,
818        validation: &ValidationPlan,
819    ) -> Result<ReplayDecision, ReplayError> {
820        let ReplayCandidates {
821            candidates,
822            exact_match,
823        } = self.collect_replay_candidates(input);
824        let Some(best) = candidates.into_iter().next() else {
825            return Ok(ReplayDecision {
826                used_capsule: false,
827                capsule_id: None,
828                fallback_to_planner: true,
829                reason: "no matching gene".into(),
830            });
831        };
832        if !exact_match && best.score < 0.82 {
833            return Ok(ReplayDecision {
834                used_capsule: false,
835                capsule_id: None,
836                fallback_to_planner: true,
837                reason: format!("best gene score {:.3} below replay threshold", best.score),
838            });
839        }
840
841        let Some(capsule) = best.capsules.first().cloned() else {
842            return Ok(ReplayDecision {
843                used_capsule: false,
844                capsule_id: None,
845                fallback_to_planner: true,
846                reason: "candidate gene has no capsule".into(),
847            });
848        };
849        let remote_publisher = self.publisher_for_capsule(&capsule.id);
850
851        let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
852            .map_err(|err| ReplayError::Store(err.to_string()))?
853        else {
854            return Ok(ReplayDecision {
855                used_capsule: false,
856                capsule_id: None,
857                fallback_to_planner: true,
858                reason: "mutation payload missing from store".into(),
859            });
860        };
861
862        let receipt = match self.sandbox.apply(&mutation, policy).await {
863            Ok(receipt) => receipt,
864            Err(err) => {
865                self.record_reuse_settlement(remote_publisher.as_deref(), false);
866                return Ok(ReplayDecision {
867                    used_capsule: false,
868                    capsule_id: Some(capsule.id.clone()),
869                    fallback_to_planner: true,
870                    reason: format!("replay patch apply failed: {err}"),
871                });
872            }
873        };
874
875        let report = self
876            .validator
877            .run(&receipt, validation)
878            .await
879            .map_err(|err| ReplayError::Validation(err.to_string()))?;
880        if !report.success {
881            self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
882            self.record_reuse_settlement(remote_publisher.as_deref(), false);
883            return Ok(ReplayDecision {
884                used_capsule: false,
885                capsule_id: Some(capsule.id.clone()),
886                fallback_to_planner: true,
887                reason: "replay validation failed".into(),
888            });
889        }
890
891        if matches!(capsule.state, AssetState::Quarantined) {
892            self.store
893                .append_event(EvolutionEvent::ValidationPassed {
894                    mutation_id: capsule.mutation_id.clone(),
895                    report: report.to_snapshot(&validation.profile),
896                    gene_id: Some(best.gene.id.clone()),
897                })
898                .map_err(|err| ReplayError::Store(err.to_string()))?;
899            if matches!(best.gene.state, AssetState::Quarantined) {
900                self.store
901                    .append_event(EvolutionEvent::PromotionEvaluated {
902                        gene_id: best.gene.id.clone(),
903                        state: AssetState::Promoted,
904                        reason: "remote asset locally validated via replay".into(),
905                    })
906                    .map_err(|err| ReplayError::Store(err.to_string()))?;
907                self.store
908                    .append_event(EvolutionEvent::GenePromoted {
909                        gene_id: best.gene.id.clone(),
910                    })
911                    .map_err(|err| ReplayError::Store(err.to_string()))?;
912            }
913            self.store
914                .append_event(EvolutionEvent::CapsuleReleased {
915                    capsule_id: capsule.id.clone(),
916                    state: AssetState::Promoted,
917                })
918                .map_err(|err| ReplayError::Store(err.to_string()))?;
919        }
920
921        self.store
922            .append_event(EvolutionEvent::CapsuleReused {
923                capsule_id: capsule.id.clone(),
924                gene_id: capsule.gene_id.clone(),
925                run_id: capsule.run_id.clone(),
926                replay_run_id: replay_run_id.cloned(),
927            })
928            .map_err(|err| ReplayError::Store(err.to_string()))?;
929        self.record_reuse_settlement(remote_publisher.as_deref(), true);
930
931        Ok(ReplayDecision {
932            used_capsule: true,
933            capsule_id: Some(capsule.id),
934            fallback_to_planner: false,
935            reason: if exact_match {
936                "replayed via cold-start lookup".into()
937            } else {
938                "replayed via selector".into()
939            },
940        })
941    }
942
943    fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
944        let Some(ledger) = self.economics.as_ref() else {
945            return;
946        };
947        let reputation_bias = ledger
948            .lock()
949            .ok()
950            .map(|locked| locked.selector_reputation_bias())
951            .unwrap_or_default();
952        if reputation_bias.is_empty() {
953            return;
954        }
955        let required_assets = candidates
956            .iter()
957            .filter_map(|candidate| {
958                candidate
959                    .capsules
960                    .first()
961                    .map(|capsule| capsule.id.as_str())
962            })
963            .collect::<Vec<_>>();
964        let publisher_map = self.remote_publishers_snapshot(&required_assets);
965        if publisher_map.is_empty() {
966            return;
967        }
968        candidates.sort_by(|left, right| {
969            effective_candidate_score(right, &publisher_map, &reputation_bias)
970                .partial_cmp(&effective_candidate_score(
971                    left,
972                    &publisher_map,
973                    &reputation_bias,
974                ))
975                .unwrap_or(std::cmp::Ordering::Equal)
976                .then_with(|| left.gene.id.cmp(&right.gene.id))
977        });
978    }
979
980    fn publisher_for_capsule(&self, capsule_id: &str) -> Option<String> {
981        self.remote_publishers_snapshot(&[capsule_id])
982            .get(capsule_id)
983            .cloned()
984    }
985
986    fn remote_publishers_snapshot(&self, required_assets: &[&str]) -> BTreeMap<String, String> {
987        let cached = self
988            .remote_publishers
989            .as_ref()
990            .and_then(|remote_publishers| {
991                remote_publishers.lock().ok().map(|locked| locked.clone())
992            })
993            .unwrap_or_default();
994        if !cached.is_empty()
995            && required_assets
996                .iter()
997                .all(|asset_id| cached.contains_key(*asset_id))
998        {
999            return cached;
1000        }
1001
1002        let persisted = remote_publishers_by_asset_from_store(self.store.as_ref());
1003        if persisted.is_empty() {
1004            return cached;
1005        }
1006
1007        let mut merged = cached;
1008        for (asset_id, sender_id) in persisted {
1009            merged.entry(asset_id).or_insert(sender_id);
1010        }
1011
1012        if let Some(remote_publishers) = self.remote_publishers.as_ref() {
1013            if let Ok(mut locked) = remote_publishers.lock() {
1014                for (asset_id, sender_id) in &merged {
1015                    locked.entry(asset_id.clone()).or_insert(sender_id.clone());
1016                }
1017            }
1018        }
1019
1020        merged
1021    }
1022
1023    fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
1024        let Some(publisher_id) = publisher_id else {
1025            return;
1026        };
1027        let Some(ledger) = self.economics.as_ref() else {
1028            return;
1029        };
1030        if let Ok(mut locked) = ledger.lock() {
1031            locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
1032        }
1033    }
1034
1035    fn record_replay_validation_failure(
1036        &self,
1037        best: &GeneCandidate,
1038        capsule: &Capsule,
1039        validation: &ValidationPlan,
1040        report: &ValidationReport,
1041    ) -> Result<(), ReplayError> {
1042        let projection = projection_snapshot(self.store.as_ref())
1043            .map_err(|err| ReplayError::Store(err.to_string()))?;
1044        let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1045            Self::confidence_context(&projection, &best.gene.id);
1046
1047        self.store
1048            .append_event(EvolutionEvent::ValidationFailed {
1049                mutation_id: capsule.mutation_id.clone(),
1050                report: report.to_snapshot(&validation.profile),
1051                gene_id: Some(best.gene.id.clone()),
1052            })
1053            .map_err(|err| ReplayError::Store(err.to_string()))?;
1054
1055        let replay_failures = self.replay_failure_count(&best.gene.id)?;
1056        let governor_decision = self.governor.evaluate(GovernorInput {
1057            candidate_source: if self.publisher_for_capsule(&capsule.id).is_some() {
1058                CandidateSource::Remote
1059            } else {
1060                CandidateSource::Local
1061            },
1062            success_count: 0,
1063            blast_radius: BlastRadius {
1064                files_changed: capsule.outcome.changed_files.len(),
1065                lines_changed: capsule.outcome.lines_changed,
1066            },
1067            replay_failures,
1068            recent_mutation_ages_secs: Vec::new(),
1069            current_confidence,
1070            historical_peak_confidence,
1071            confidence_last_updated_secs,
1072        });
1073
1074        if matches!(governor_decision.target_state, AssetState::Revoked) {
1075            self.store
1076                .append_event(EvolutionEvent::PromotionEvaluated {
1077                    gene_id: best.gene.id.clone(),
1078                    state: AssetState::Revoked,
1079                    reason: governor_decision.reason.clone(),
1080                })
1081                .map_err(|err| ReplayError::Store(err.to_string()))?;
1082            self.store
1083                .append_event(EvolutionEvent::GeneRevoked {
1084                    gene_id: best.gene.id.clone(),
1085                    reason: governor_decision.reason,
1086                })
1087                .map_err(|err| ReplayError::Store(err.to_string()))?;
1088            for related in &best.capsules {
1089                self.store
1090                    .append_event(EvolutionEvent::CapsuleQuarantined {
1091                        capsule_id: related.id.clone(),
1092                    })
1093                    .map_err(|err| ReplayError::Store(err.to_string()))?;
1094            }
1095        }
1096
1097        Ok(())
1098    }
1099
1100    fn confidence_context(
1101        projection: &EvolutionProjection,
1102        gene_id: &str,
1103    ) -> (f32, f32, Option<u64>) {
1104        let peak_confidence = projection
1105            .capsules
1106            .iter()
1107            .filter(|capsule| capsule.gene_id == gene_id)
1108            .map(|capsule| capsule.confidence)
1109            .fold(0.0_f32, f32::max);
1110        let age_secs = projection
1111            .last_updated_at
1112            .get(gene_id)
1113            .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1114        (peak_confidence, peak_confidence, age_secs)
1115    }
1116
1117    fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1118        let parsed = DateTime::parse_from_rfc3339(timestamp)
1119            .ok()?
1120            .with_timezone(&Utc);
1121        let elapsed = now.signed_duration_since(parsed);
1122        if elapsed < Duration::zero() {
1123            Some(0)
1124        } else {
1125            u64::try_from(elapsed.num_seconds()).ok()
1126        }
1127    }
1128
1129    fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1130        Ok(self
1131            .store
1132            .scan(1)
1133            .map_err(|err| ReplayError::Store(err.to_string()))?
1134            .into_iter()
1135            .filter(|stored| {
1136                matches!(
1137                    &stored.event,
1138                    EvolutionEvent::ValidationFailed {
1139                        gene_id: Some(current_gene_id),
1140                        ..
1141                    } if current_gene_id == gene_id
1142                )
1143            })
1144            .count() as u64)
1145    }
1146}
1147
1148#[derive(Clone, Debug, PartialEq)]
1149struct ConfidenceRevalidationTarget {
1150    gene_id: String,
1151    capsule_ids: Vec<String>,
1152    decayed_confidence: f32,
1153}
1154
1155fn stale_replay_revalidation_targets(
1156    projection: &EvolutionProjection,
1157    now: DateTime<Utc>,
1158) -> Vec<ConfidenceRevalidationTarget> {
1159    projection
1160        .genes
1161        .iter()
1162        .filter(|gene| gene.state == AssetState::Promoted)
1163        .filter_map(|gene| {
1164            let promoted_capsules = projection
1165                .capsules
1166                .iter()
1167                .filter(|capsule| {
1168                    capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1169                })
1170                .collect::<Vec<_>>();
1171            if promoted_capsules.is_empty() {
1172                return None;
1173            }
1174            let age_secs = projection
1175                .last_updated_at
1176                .get(&gene.id)
1177                .and_then(|timestamp| seconds_since_timestamp_for_confidence(timestamp, now));
1178            let decayed_confidence = promoted_capsules
1179                .iter()
1180                .map(|capsule| decayed_replay_confidence(capsule.confidence, age_secs))
1181                .fold(0.0_f32, f32::max);
1182            if decayed_confidence >= MIN_REPLAY_CONFIDENCE {
1183                return None;
1184            }
1185            Some(ConfidenceRevalidationTarget {
1186                gene_id: gene.id.clone(),
1187                capsule_ids: promoted_capsules
1188                    .into_iter()
1189                    .map(|capsule| capsule.id.clone())
1190                    .collect(),
1191                decayed_confidence,
1192            })
1193        })
1194        .collect()
1195}
1196
1197fn seconds_since_timestamp_for_confidence(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1198    let parsed = DateTime::parse_from_rfc3339(timestamp)
1199        .ok()?
1200        .with_timezone(&Utc);
1201    let elapsed = now.signed_duration_since(parsed);
1202    if elapsed < Duration::zero() {
1203        Some(0)
1204    } else {
1205        u64::try_from(elapsed.num_seconds()).ok()
1206    }
1207}
1208
1209#[derive(Debug, Error)]
1210pub enum EvoKernelError {
1211    #[error("sandbox error: {0}")]
1212    Sandbox(String),
1213    #[error("validation error: {0}")]
1214    Validation(String),
1215    #[error("validation failed")]
1216    ValidationFailed(ValidationReport),
1217    #[error("store error: {0}")]
1218    Store(String),
1219}
1220
1221#[derive(Clone, Debug)]
1222pub struct CaptureOutcome {
1223    pub capsule: Capsule,
1224    pub gene: Gene,
1225    pub governor_decision: GovernorDecision,
1226}
1227
1228#[derive(Clone, Debug, Serialize, Deserialize)]
1229pub struct ImportOutcome {
1230    pub imported_asset_ids: Vec<String>,
1231    pub accepted: bool,
1232}
1233
1234#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1235pub struct EvolutionMetricsSnapshot {
1236    pub replay_attempts_total: u64,
1237    pub replay_success_total: u64,
1238    pub replay_success_rate: f64,
1239    pub confidence_revalidations_total: u64,
1240    pub replay_reasoning_avoided_total: u64,
1241    pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
1242    pub mutation_declared_total: u64,
1243    pub promoted_mutations_total: u64,
1244    pub promotion_ratio: f64,
1245    pub gene_revocations_total: u64,
1246    pub mutation_velocity_last_hour: u64,
1247    pub revoke_frequency_last_hour: u64,
1248    pub promoted_genes: u64,
1249    pub promoted_capsules: u64,
1250    pub last_event_seq: u64,
1251}
1252
1253#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1254pub struct EvolutionHealthSnapshot {
1255    pub status: String,
1256    pub last_event_seq: u64,
1257    pub promoted_genes: u64,
1258    pub promoted_capsules: u64,
1259}
1260
1261#[derive(Clone)]
1262pub struct EvolutionNetworkNode {
1263    pub store: Arc<dyn EvolutionStore>,
1264}
1265
1266impl EvolutionNetworkNode {
1267    pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1268        Self { store }
1269    }
1270
1271    pub fn with_default_store() -> Self {
1272        Self {
1273            store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
1274        }
1275    }
1276
1277    pub fn accept_publish_request(
1278        &self,
1279        request: &PublishRequest,
1280    ) -> Result<ImportOutcome, EvoKernelError> {
1281        import_remote_envelope_into_store(
1282            self.store.as_ref(),
1283            &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
1284            None,
1285        )
1286    }
1287
1288    pub fn publish_local_assets(
1289        &self,
1290        sender_id: impl Into<String>,
1291    ) -> Result<EvolutionEnvelope, EvoKernelError> {
1292        export_promoted_assets_from_store(self.store.as_ref(), sender_id)
1293    }
1294
1295    pub fn fetch_assets(
1296        &self,
1297        responder_id: impl Into<String>,
1298        query: &FetchQuery,
1299    ) -> Result<FetchResponse, EvoKernelError> {
1300        fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1301    }
1302
1303    pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1304        revoke_assets_in_store(self.store.as_ref(), notice)
1305    }
1306
1307    pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1308        evolution_metrics_snapshot(self.store.as_ref())
1309    }
1310
1311    pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1312        self.metrics_snapshot().map(|snapshot| {
1313            let health = evolution_health_snapshot(&snapshot);
1314            render_evolution_metrics_prometheus(&snapshot, &health)
1315        })
1316    }
1317
1318    pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1319        self.metrics_snapshot()
1320            .map(|snapshot| evolution_health_snapshot(&snapshot))
1321    }
1322}
1323
1324pub struct EvoKernel<S: KernelState> {
1325    pub kernel: Arc<Kernel<S>>,
1326    pub sandbox: Arc<dyn Sandbox>,
1327    pub validator: Arc<dyn Validator>,
1328    pub store: Arc<dyn EvolutionStore>,
1329    pub selector: Arc<dyn Selector>,
1330    pub governor: Arc<dyn Governor>,
1331    pub economics: Arc<Mutex<EvuLedger>>,
1332    pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
1333    pub stake_policy: StakePolicy,
1334    pub sandbox_policy: SandboxPolicy,
1335    pub validation_plan: ValidationPlan,
1336}
1337
1338impl<S: KernelState> EvoKernel<S> {
1339    fn recent_prior_mutation_ages_secs(
1340        &self,
1341        exclude_mutation_id: Option<&str>,
1342    ) -> Result<Vec<u64>, EvolutionError> {
1343        let now = Utc::now();
1344        let mut ages = self
1345            .store
1346            .scan(1)?
1347            .into_iter()
1348            .filter_map(|stored| match stored.event {
1349                EvolutionEvent::MutationDeclared { mutation }
1350                    if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
1351                {
1352                    Self::seconds_since_timestamp(&stored.timestamp, now)
1353                }
1354                _ => None,
1355            })
1356            .collect::<Vec<_>>();
1357        ages.sort_unstable();
1358        Ok(ages)
1359    }
1360
1361    fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1362        let parsed = DateTime::parse_from_rfc3339(timestamp)
1363            .ok()?
1364            .with_timezone(&Utc);
1365        let elapsed = now.signed_duration_since(parsed);
1366        if elapsed < Duration::zero() {
1367            Some(0)
1368        } else {
1369            u64::try_from(elapsed.num_seconds()).ok()
1370        }
1371    }
1372
1373    pub fn new(
1374        kernel: Arc<Kernel<S>>,
1375        sandbox: Arc<dyn Sandbox>,
1376        validator: Arc<dyn Validator>,
1377        store: Arc<dyn EvolutionStore>,
1378    ) -> Self {
1379        let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
1380        Self {
1381            kernel,
1382            sandbox,
1383            validator,
1384            store,
1385            selector,
1386            governor: Arc::new(DefaultGovernor::default()),
1387            economics: Arc::new(Mutex::new(EvuLedger::default())),
1388            remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
1389            stake_policy: StakePolicy::default(),
1390            sandbox_policy: SandboxPolicy::oris_default(),
1391            validation_plan: ValidationPlan::oris_default(),
1392        }
1393    }
1394
1395    pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
1396        self.selector = selector;
1397        self
1398    }
1399
1400    pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
1401        self.sandbox_policy = policy;
1402        self
1403    }
1404
1405    pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
1406        self.governor = governor;
1407        self
1408    }
1409
1410    pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
1411        self.economics = economics;
1412        self
1413    }
1414
1415    pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
1416        self.stake_policy = policy;
1417        self
1418    }
1419
1420    pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
1421        self.validation_plan = plan;
1422        self
1423    }
1424
1425    pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
1426        let executor = StoreReplayExecutor {
1427            sandbox: self.sandbox.clone(),
1428            validator: self.validator.clone(),
1429            store: self.store.clone(),
1430            selector: self.selector.clone(),
1431            governor: self.governor.clone(),
1432            economics: Some(self.economics.clone()),
1433            remote_publishers: Some(self.remote_publishers.clone()),
1434            stake_policy: self.stake_policy.clone(),
1435        };
1436        executor.collect_replay_candidates(input).candidates
1437    }
1438
1439    pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
1440        let projection = projection_snapshot(self.store.as_ref())?;
1441        if !projection.genes.is_empty() {
1442            return Ok(BootstrapReport::default());
1443        }
1444
1445        let templates = built_in_seed_templates();
1446        for template in &templates {
1447            let mutation = build_seed_mutation(template);
1448            let extracted = extract_seed_signals(template);
1449            let gene = build_bootstrap_gene(template, &extracted)
1450                .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1451            let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
1452                .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1453
1454            self.store
1455                .append_event(EvolutionEvent::MutationDeclared {
1456                    mutation: mutation.clone(),
1457                })
1458                .map_err(store_err)?;
1459            self.store
1460                .append_event(EvolutionEvent::SignalsExtracted {
1461                    mutation_id: mutation.intent.id.clone(),
1462                    hash: extracted.hash.clone(),
1463                    signals: extracted.values.clone(),
1464                })
1465                .map_err(store_err)?;
1466            self.store
1467                .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1468                .map_err(store_err)?;
1469            self.store
1470                .append_event(EvolutionEvent::PromotionEvaluated {
1471                    gene_id: gene.id.clone(),
1472                    state: AssetState::Quarantined,
1473                    reason: "bootstrap seeds require local validation before replay".into(),
1474                })
1475                .map_err(store_err)?;
1476            self.store
1477                .append_event(EvolutionEvent::CapsuleCommitted {
1478                    capsule: capsule.clone(),
1479                })
1480                .map_err(store_err)?;
1481            self.store
1482                .append_event(EvolutionEvent::CapsuleQuarantined {
1483                    capsule_id: capsule.id,
1484                })
1485                .map_err(store_err)?;
1486        }
1487
1488        Ok(BootstrapReport {
1489            seeded: true,
1490            genes_added: templates.len(),
1491            capsules_added: templates.len(),
1492        })
1493    }
1494
1495    pub async fn capture_successful_mutation(
1496        &self,
1497        run_id: &RunId,
1498        mutation: PreparedMutation,
1499    ) -> Result<Capsule, EvoKernelError> {
1500        Ok(self
1501            .capture_mutation_with_governor(run_id, mutation)
1502            .await?
1503            .capsule)
1504    }
1505
1506    pub async fn capture_mutation_with_governor(
1507        &self,
1508        run_id: &RunId,
1509        mutation: PreparedMutation,
1510    ) -> Result<CaptureOutcome, EvoKernelError> {
1511        self.store
1512            .append_event(EvolutionEvent::MutationDeclared {
1513                mutation: mutation.clone(),
1514            })
1515            .map_err(store_err)?;
1516
1517        let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
1518            Ok(receipt) => receipt,
1519            Err(err) => {
1520                self.store
1521                    .append_event(EvolutionEvent::MutationRejected {
1522                        mutation_id: mutation.intent.id.clone(),
1523                        reason: err.to_string(),
1524                    })
1525                    .map_err(store_err)?;
1526                return Err(EvoKernelError::Sandbox(err.to_string()));
1527            }
1528        };
1529
1530        self.store
1531            .append_event(EvolutionEvent::MutationApplied {
1532                mutation_id: mutation.intent.id.clone(),
1533                patch_hash: receipt.patch_hash.clone(),
1534                changed_files: receipt
1535                    .changed_files
1536                    .iter()
1537                    .map(|path| path.to_string_lossy().to_string())
1538                    .collect(),
1539            })
1540            .map_err(store_err)?;
1541
1542        let report = self
1543            .validator
1544            .run(&receipt, &self.validation_plan)
1545            .await
1546            .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1547        if !report.success {
1548            self.store
1549                .append_event(EvolutionEvent::ValidationFailed {
1550                    mutation_id: mutation.intent.id.clone(),
1551                    report: report.to_snapshot(&self.validation_plan.profile),
1552                    gene_id: None,
1553                })
1554                .map_err(store_err)?;
1555            return Err(EvoKernelError::ValidationFailed(report));
1556        }
1557
1558        self.store
1559            .append_event(EvolutionEvent::ValidationPassed {
1560                mutation_id: mutation.intent.id.clone(),
1561                report: report.to_snapshot(&self.validation_plan.profile),
1562                gene_id: None,
1563            })
1564            .map_err(store_err)?;
1565
1566        let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
1567            patch_diff: mutation.artifact.payload.clone(),
1568            intent: mutation.intent.intent.clone(),
1569            expected_effect: mutation.intent.expected_effect.clone(),
1570            declared_signals: mutation.intent.signals.clone(),
1571            changed_files: receipt
1572                .changed_files
1573                .iter()
1574                .map(|path| path.to_string_lossy().to_string())
1575                .collect(),
1576            validation_success: report.success,
1577            validation_logs: report.logs.clone(),
1578            stage_outputs: report
1579                .stages
1580                .iter()
1581                .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
1582                .filter(|value| !value.is_empty())
1583                .collect(),
1584        });
1585        self.store
1586            .append_event(EvolutionEvent::SignalsExtracted {
1587                mutation_id: mutation.intent.id.clone(),
1588                hash: extracted_signals.hash.clone(),
1589                signals: extracted_signals.values.clone(),
1590            })
1591            .map_err(store_err)?;
1592
1593        let projection = projection_snapshot(self.store.as_ref())?;
1594        let blast_radius = compute_blast_radius(&mutation.artifact.payload);
1595        let recent_mutation_ages_secs = self
1596            .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
1597            .map_err(store_err)?;
1598        let mut gene = derive_gene(
1599            &mutation,
1600            &receipt,
1601            &self.validation_plan.profile,
1602            &extracted_signals.values,
1603        );
1604        let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1605            StoreReplayExecutor::confidence_context(&projection, &gene.id);
1606        let success_count = projection
1607            .genes
1608            .iter()
1609            .find(|existing| existing.id == gene.id)
1610            .map(|existing| {
1611                projection
1612                    .capsules
1613                    .iter()
1614                    .filter(|capsule| capsule.gene_id == existing.id)
1615                    .count() as u64
1616            })
1617            .unwrap_or(0)
1618            + 1;
1619        let governor_decision = self.governor.evaluate(GovernorInput {
1620            candidate_source: CandidateSource::Local,
1621            success_count,
1622            blast_radius: blast_radius.clone(),
1623            replay_failures: 0,
1624            recent_mutation_ages_secs,
1625            current_confidence,
1626            historical_peak_confidence,
1627            confidence_last_updated_secs,
1628        });
1629
1630        gene.state = governor_decision.target_state.clone();
1631        self.store
1632            .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1633            .map_err(store_err)?;
1634        self.store
1635            .append_event(EvolutionEvent::PromotionEvaluated {
1636                gene_id: gene.id.clone(),
1637                state: governor_decision.target_state.clone(),
1638                reason: governor_decision.reason.clone(),
1639            })
1640            .map_err(store_err)?;
1641        if matches!(governor_decision.target_state, AssetState::Promoted) {
1642            self.store
1643                .append_event(EvolutionEvent::GenePromoted {
1644                    gene_id: gene.id.clone(),
1645                })
1646                .map_err(store_err)?;
1647        }
1648        if matches!(governor_decision.target_state, AssetState::Revoked) {
1649            self.store
1650                .append_event(EvolutionEvent::GeneRevoked {
1651                    gene_id: gene.id.clone(),
1652                    reason: governor_decision.reason.clone(),
1653                })
1654                .map_err(store_err)?;
1655        }
1656        if let Some(spec_id) = &mutation.intent.spec_id {
1657            self.store
1658                .append_event(EvolutionEvent::SpecLinked {
1659                    mutation_id: mutation.intent.id.clone(),
1660                    spec_id: spec_id.clone(),
1661                })
1662                .map_err(store_err)?;
1663        }
1664
1665        let mut capsule = build_capsule(
1666            run_id,
1667            &mutation,
1668            &receipt,
1669            &report,
1670            &self.validation_plan.profile,
1671            &gene,
1672            &blast_radius,
1673        )
1674        .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1675        capsule.state = governor_decision.target_state.clone();
1676        self.store
1677            .append_event(EvolutionEvent::CapsuleCommitted {
1678                capsule: capsule.clone(),
1679            })
1680            .map_err(store_err)?;
1681        if matches!(governor_decision.target_state, AssetState::Quarantined) {
1682            self.store
1683                .append_event(EvolutionEvent::CapsuleQuarantined {
1684                    capsule_id: capsule.id.clone(),
1685                })
1686                .map_err(store_err)?;
1687        }
1688
1689        Ok(CaptureOutcome {
1690            capsule,
1691            gene,
1692            governor_decision,
1693        })
1694    }
1695
1696    pub async fn capture_from_proposal(
1697        &self,
1698        run_id: &RunId,
1699        proposal: &AgentMutationProposal,
1700        diff_payload: String,
1701        base_revision: Option<String>,
1702    ) -> Result<CaptureOutcome, EvoKernelError> {
1703        let intent = MutationIntent {
1704            id: next_id("proposal"),
1705            intent: proposal.intent.clone(),
1706            target: MutationTarget::Paths {
1707                allow: proposal.files.clone(),
1708            },
1709            expected_effect: proposal.expected_effect.clone(),
1710            risk: RiskLevel::Low,
1711            signals: proposal.files.clone(),
1712            spec_id: None,
1713        };
1714        self.capture_mutation_with_governor(
1715            run_id,
1716            prepare_mutation(intent, diff_payload, base_revision),
1717        )
1718        .await
1719    }
1720
1721    pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
1722        ExecutionFeedback {
1723            accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
1724            asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
1725            summary: outcome.governor_decision.reason.clone(),
1726        }
1727    }
1728
1729    pub fn replay_feedback_for_agent(
1730        signals: &[String],
1731        decision: &ReplayDecision,
1732    ) -> ReplayFeedback {
1733        let (task_class_id, task_label) = replay_task_descriptor(signals);
1734        let planner_directive = if decision.used_capsule {
1735            ReplayPlannerDirective::SkipPlanner
1736        } else {
1737            ReplayPlannerDirective::PlanFallback
1738        };
1739        let reasoning_steps_avoided = u64::from(decision.used_capsule);
1740        let fallback_reason = if decision.fallback_to_planner {
1741            Some(decision.reason.clone())
1742        } else {
1743            None
1744        };
1745        let summary = if decision.used_capsule {
1746            format!("reused prior capsule for task class '{task_label}'; skip planner")
1747        } else {
1748            format!(
1749                "planner fallback required for task class '{task_label}': {}",
1750                decision.reason
1751            )
1752        };
1753
1754        ReplayFeedback {
1755            used_capsule: decision.used_capsule,
1756            capsule_id: decision.capsule_id.clone(),
1757            planner_directive,
1758            reasoning_steps_avoided,
1759            fallback_reason,
1760            task_class_id,
1761            task_label,
1762            summary,
1763        }
1764    }
1765    pub async fn run_supervised_devloop(
1766        &self,
1767        run_id: &RunId,
1768        request: &SupervisedDevloopRequest,
1769        diff_payload: String,
1770        base_revision: Option<String>,
1771    ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
1772        let task_class = classify_supervised_devloop_request(request);
1773        let Some(task_class) = task_class else {
1774            return Ok(SupervisedDevloopOutcome {
1775                task_id: request.task.id.clone(),
1776                task_class: None,
1777                status: SupervisedDevloopStatus::RejectedByPolicy,
1778                execution_feedback: None,
1779                summary: format!(
1780                    "supervised devloop rejected task '{}' because it is an unsupported task outside the bounded scope",
1781                    request.task.id
1782                ),
1783            });
1784        };
1785
1786        if !request.approval.approved {
1787            return Ok(SupervisedDevloopOutcome {
1788                task_id: request.task.id.clone(),
1789                task_class: Some(task_class),
1790                status: SupervisedDevloopStatus::AwaitingApproval,
1791                execution_feedback: None,
1792                summary: format!(
1793                    "supervised devloop paused task '{}' until explicit human approval is granted",
1794                    request.task.id
1795                ),
1796            });
1797        }
1798
1799        let capture = self
1800            .capture_from_proposal(run_id, &request.proposal, diff_payload, base_revision)
1801            .await?;
1802        let approver = request
1803            .approval
1804            .approver
1805            .as_deref()
1806            .unwrap_or("unknown approver");
1807
1808        Ok(SupervisedDevloopOutcome {
1809            task_id: request.task.id.clone(),
1810            task_class: Some(task_class),
1811            status: SupervisedDevloopStatus::Executed,
1812            execution_feedback: Some(Self::feedback_for_agent(&capture)),
1813            summary: format!(
1814                "supervised devloop executed task '{}' with explicit approval from {approver}",
1815                request.task.id
1816            ),
1817        })
1818    }
1819    pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
1820        MultiAgentCoordinator::new().coordinate(plan)
1821    }
1822
1823    pub fn export_promoted_assets(
1824        &self,
1825        sender_id: impl Into<String>,
1826    ) -> Result<EvolutionEnvelope, EvoKernelError> {
1827        let sender_id = sender_id.into();
1828        let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
1829        if !envelope.assets.is_empty() {
1830            let mut ledger = self
1831                .economics
1832                .lock()
1833                .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
1834            if ledger
1835                .reserve_publish_stake(&sender_id, &self.stake_policy)
1836                .is_none()
1837            {
1838                return Err(EvoKernelError::Validation(
1839                    "insufficient EVU for remote publish".into(),
1840                ));
1841            }
1842        }
1843        Ok(envelope)
1844    }
1845
1846    pub fn import_remote_envelope(
1847        &self,
1848        envelope: &EvolutionEnvelope,
1849    ) -> Result<ImportOutcome, EvoKernelError> {
1850        import_remote_envelope_into_store(
1851            self.store.as_ref(),
1852            envelope,
1853            Some(self.remote_publishers.as_ref()),
1854        )
1855    }
1856
1857    pub fn fetch_assets(
1858        &self,
1859        responder_id: impl Into<String>,
1860        query: &FetchQuery,
1861    ) -> Result<FetchResponse, EvoKernelError> {
1862        fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1863    }
1864
1865    pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1866        revoke_assets_in_store(self.store.as_ref(), notice)
1867    }
1868
1869    pub async fn replay_or_fallback(
1870        &self,
1871        input: SelectorInput,
1872    ) -> Result<ReplayDecision, EvoKernelError> {
1873        let replay_run_id = next_id("replay");
1874        self.replay_or_fallback_for_run(&replay_run_id, input).await
1875    }
1876
1877    pub async fn replay_or_fallback_for_run(
1878        &self,
1879        run_id: &RunId,
1880        input: SelectorInput,
1881    ) -> Result<ReplayDecision, EvoKernelError> {
1882        let executor = StoreReplayExecutor {
1883            sandbox: self.sandbox.clone(),
1884            validator: self.validator.clone(),
1885            store: self.store.clone(),
1886            selector: self.selector.clone(),
1887            governor: self.governor.clone(),
1888            economics: Some(self.economics.clone()),
1889            remote_publishers: Some(self.remote_publishers.clone()),
1890            stake_policy: self.stake_policy.clone(),
1891        };
1892        executor
1893            .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
1894            .await
1895            .map_err(|err| EvoKernelError::Validation(err.to_string()))
1896    }
1897
1898    pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
1899        self.economics.lock().ok()?.governor_signal(node_id)
1900    }
1901
1902    pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
1903        self.economics
1904            .lock()
1905            .ok()
1906            .map(|locked| locked.selector_reputation_bias())
1907            .unwrap_or_default()
1908    }
1909
1910    pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1911        evolution_metrics_snapshot(self.store.as_ref())
1912    }
1913
1914    pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1915        self.metrics_snapshot().map(|snapshot| {
1916            let health = evolution_health_snapshot(&snapshot);
1917            render_evolution_metrics_prometheus(&snapshot, &health)
1918        })
1919    }
1920
1921    pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1922        self.metrics_snapshot()
1923            .map(|snapshot| evolution_health_snapshot(&snapshot))
1924    }
1925}
1926
1927pub fn prepare_mutation(
1928    intent: MutationIntent,
1929    diff_payload: String,
1930    base_revision: Option<String>,
1931) -> PreparedMutation {
1932    PreparedMutation {
1933        intent,
1934        artifact: MutationArtifact {
1935            encoding: ArtifactEncoding::UnifiedDiff,
1936            content_hash: compute_artifact_hash(&diff_payload),
1937            payload: diff_payload,
1938            base_revision,
1939        },
1940    }
1941}
1942
1943pub fn prepare_mutation_from_spec(
1944    plan: CompiledMutationPlan,
1945    diff_payload: String,
1946    base_revision: Option<String>,
1947) -> PreparedMutation {
1948    prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
1949}
1950
1951pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
1952    Arc::new(oris_evolution::JsonlEvolutionStore::new(
1953        default_store_root(),
1954    ))
1955}
1956
1957fn built_in_seed_templates() -> Vec<SeedTemplate> {
1958    vec![
1959        SeedTemplate {
1960            id: "bootstrap-readme".into(),
1961            intent: "Seed a baseline README recovery pattern".into(),
1962            signals: vec!["bootstrap readme".into(), "missing readme".into()],
1963            diff_payload: "\
1964diff --git a/README.md b/README.md
1965new file mode 100644
1966index 0000000..1111111
1967--- /dev/null
1968+++ b/README.md
1969@@ -0,0 +1,3 @@
1970+# Oris
1971+Bootstrap documentation seed
1972+"
1973            .into(),
1974            validation_profile: "bootstrap-seed".into(),
1975        },
1976        SeedTemplate {
1977            id: "bootstrap-test-fix".into(),
1978            intent: "Seed a deterministic test stabilization pattern".into(),
1979            signals: vec!["bootstrap test fix".into(), "failing tests".into()],
1980            diff_payload: "\
1981diff --git a/src/lib.rs b/src/lib.rs
1982index 1111111..2222222 100644
1983--- a/src/lib.rs
1984+++ b/src/lib.rs
1985@@ -1 +1,2 @@
1986 pub fn demo() -> usize { 1 }
1987+pub fn normalize_test_output() -> bool { true }
1988"
1989            .into(),
1990            validation_profile: "bootstrap-seed".into(),
1991        },
1992        SeedTemplate {
1993            id: "bootstrap-refactor".into(),
1994            intent: "Seed a low-risk refactor capsule".into(),
1995            signals: vec!["bootstrap refactor".into(), "small refactor".into()],
1996            diff_payload: "\
1997diff --git a/src/lib.rs b/src/lib.rs
1998index 2222222..3333333 100644
1999--- a/src/lib.rs
2000+++ b/src/lib.rs
2001@@ -1 +1,3 @@
2002 pub fn demo() -> usize { 1 }
2003+
2004+fn extract_strategy_key(input: &str) -> &str { input }
2005"
2006            .into(),
2007            validation_profile: "bootstrap-seed".into(),
2008        },
2009        SeedTemplate {
2010            id: "bootstrap-logging".into(),
2011            intent: "Seed a baseline structured logging mutation".into(),
2012            signals: vec!["bootstrap logging".into(), "structured logs".into()],
2013            diff_payload: "\
2014diff --git a/src/lib.rs b/src/lib.rs
2015index 3333333..4444444 100644
2016--- a/src/lib.rs
2017+++ b/src/lib.rs
2018@@ -1 +1,3 @@
2019 pub fn demo() -> usize { 1 }
2020+
2021+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
2022"
2023            .into(),
2024            validation_profile: "bootstrap-seed".into(),
2025        },
2026    ]
2027}
2028
2029fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
2030    let changed_files = seed_changed_files(&template.diff_payload);
2031    let target = if changed_files.is_empty() {
2032        MutationTarget::WorkspaceRoot
2033    } else {
2034        MutationTarget::Paths {
2035            allow: changed_files,
2036        }
2037    };
2038    prepare_mutation(
2039        MutationIntent {
2040            id: stable_hash_json(&("bootstrap-mutation", &template.id))
2041                .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
2042            intent: template.intent.clone(),
2043            target,
2044            expected_effect: format!("seed {}", template.id),
2045            risk: RiskLevel::Low,
2046            signals: template.signals.clone(),
2047            spec_id: None,
2048        },
2049        template.diff_payload.clone(),
2050        None,
2051    )
2052}
2053
2054fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
2055    let mut signals = BTreeSet::new();
2056    for declared in &template.signals {
2057        if let Some(phrase) = normalize_signal_phrase(declared) {
2058            signals.insert(phrase);
2059        }
2060        extend_signal_tokens(&mut signals, declared);
2061    }
2062    extend_signal_tokens(&mut signals, &template.intent);
2063    extend_signal_tokens(&mut signals, &template.diff_payload);
2064    for changed_file in seed_changed_files(&template.diff_payload) {
2065        extend_signal_tokens(&mut signals, &changed_file);
2066    }
2067    let values = signals.into_iter().take(32).collect::<Vec<_>>();
2068    let hash =
2069        stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
2070    SignalExtractionOutput { values, hash }
2071}
2072
2073fn seed_changed_files(diff_payload: &str) -> Vec<String> {
2074    let mut changed_files = BTreeSet::new();
2075    for line in diff_payload.lines() {
2076        if let Some(path) = line.strip_prefix("+++ b/") {
2077            let normalized = path.trim();
2078            if !normalized.is_empty() {
2079                changed_files.insert(normalized.to_string());
2080            }
2081        }
2082    }
2083    changed_files.into_iter().collect()
2084}
2085
2086fn build_bootstrap_gene(
2087    template: &SeedTemplate,
2088    extracted: &SignalExtractionOutput,
2089) -> Result<Gene, EvolutionError> {
2090    let strategy = vec![template.id.clone(), "bootstrap".into()];
2091    let id = stable_hash_json(&(
2092        "bootstrap-gene",
2093        &template.id,
2094        &extracted.values,
2095        &template.validation_profile,
2096    ))?;
2097    Ok(Gene {
2098        id,
2099        signals: extracted.values.clone(),
2100        strategy,
2101        validation: vec![template.validation_profile.clone()],
2102        state: AssetState::Quarantined,
2103    })
2104}
2105
2106fn build_bootstrap_capsule(
2107    run_id: &RunId,
2108    template: &SeedTemplate,
2109    mutation: &PreparedMutation,
2110    gene: &Gene,
2111) -> Result<Capsule, EvolutionError> {
2112    let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
2113    let env = current_env_fingerprint(&cwd);
2114    let diff_hash = mutation.artifact.content_hash.clone();
2115    let changed_files = seed_changed_files(&template.diff_payload);
2116    let validator_hash = stable_hash_json(&(
2117        "bootstrap-validator",
2118        &template.id,
2119        &template.validation_profile,
2120        &diff_hash,
2121    ))?;
2122    let id = stable_hash_json(&(
2123        "bootstrap-capsule",
2124        &template.id,
2125        run_id,
2126        &gene.id,
2127        &diff_hash,
2128        &env,
2129    ))?;
2130    Ok(Capsule {
2131        id,
2132        gene_id: gene.id.clone(),
2133        mutation_id: mutation.intent.id.clone(),
2134        run_id: run_id.clone(),
2135        diff_hash,
2136        confidence: 0.0,
2137        env,
2138        outcome: Outcome {
2139            success: false,
2140            validation_profile: template.validation_profile.clone(),
2141            validation_duration_ms: 0,
2142            changed_files,
2143            validator_hash,
2144            lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
2145            replay_verified: false,
2146        },
2147        state: AssetState::Quarantined,
2148    })
2149}
2150
2151fn derive_gene(
2152    mutation: &PreparedMutation,
2153    receipt: &SandboxReceipt,
2154    validation_profile: &str,
2155    extracted_signals: &[String],
2156) -> Gene {
2157    let mut strategy = BTreeSet::new();
2158    for file in &receipt.changed_files {
2159        if let Some(component) = file.components().next() {
2160            strategy.insert(component.as_os_str().to_string_lossy().to_string());
2161        }
2162    }
2163    for token in mutation
2164        .artifact
2165        .payload
2166        .split(|ch: char| !ch.is_ascii_alphanumeric())
2167    {
2168        if token.len() == 5
2169            && token.starts_with('E')
2170            && token[1..].chars().all(|ch| ch.is_ascii_digit())
2171        {
2172            strategy.insert(token.to_string());
2173        }
2174    }
2175    for token in mutation.intent.intent.split_whitespace().take(8) {
2176        strategy.insert(token.to_ascii_lowercase());
2177    }
2178    let strategy = strategy.into_iter().collect::<Vec<_>>();
2179    let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
2180        .unwrap_or_else(|_| next_id("gene"));
2181    Gene {
2182        id,
2183        signals: extracted_signals.to_vec(),
2184        strategy,
2185        validation: vec![validation_profile.to_string()],
2186        state: AssetState::Promoted,
2187    }
2188}
2189
2190fn build_capsule(
2191    run_id: &RunId,
2192    mutation: &PreparedMutation,
2193    receipt: &SandboxReceipt,
2194    report: &ValidationReport,
2195    validation_profile: &str,
2196    gene: &Gene,
2197    blast_radius: &BlastRadius,
2198) -> Result<Capsule, EvolutionError> {
2199    let env = current_env_fingerprint(&receipt.workdir);
2200    let validator_hash = stable_hash_json(report)?;
2201    let diff_hash = mutation.artifact.content_hash.clone();
2202    let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
2203    Ok(Capsule {
2204        id,
2205        gene_id: gene.id.clone(),
2206        mutation_id: mutation.intent.id.clone(),
2207        run_id: run_id.clone(),
2208        diff_hash,
2209        confidence: 0.7,
2210        env,
2211        outcome: oris_evolution::Outcome {
2212            success: true,
2213            validation_profile: validation_profile.to_string(),
2214            validation_duration_ms: report.duration_ms,
2215            changed_files: receipt
2216                .changed_files
2217                .iter()
2218                .map(|path| path.to_string_lossy().to_string())
2219                .collect(),
2220            validator_hash,
2221            lines_changed: blast_radius.lines_changed,
2222            replay_verified: false,
2223        },
2224        state: AssetState::Promoted,
2225    })
2226}
2227
2228fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
2229    let rustc_version = Command::new("rustc")
2230        .arg("--version")
2231        .output()
2232        .ok()
2233        .filter(|output| output.status.success())
2234        .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
2235        .unwrap_or_else(|| "rustc unknown".into());
2236    let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
2237        .ok()
2238        .map(|bytes| {
2239            let value = String::from_utf8_lossy(&bytes);
2240            compute_artifact_hash(&value)
2241        })
2242        .unwrap_or_else(|| "missing-cargo-lock".into());
2243    let target_triple = format!(
2244        "{}-unknown-{}",
2245        std::env::consts::ARCH,
2246        std::env::consts::OS
2247    );
2248    EnvFingerprint {
2249        rustc_version,
2250        cargo_lock_hash,
2251        target_triple,
2252        os: std::env::consts::OS.to_string(),
2253    }
2254}
2255
2256fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
2257    for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
2258        let trimmed = raw.trim();
2259        if trimmed.is_empty() {
2260            continue;
2261        }
2262        let normalized = if is_rust_error_code(trimmed) {
2263            let mut chars = trimmed.chars();
2264            let prefix = chars
2265                .next()
2266                .map(|ch| ch.to_ascii_uppercase())
2267                .unwrap_or('E');
2268            format!("{prefix}{}", chars.as_str())
2269        } else {
2270            trimmed.to_ascii_lowercase()
2271        };
2272        if normalized.len() < 3 {
2273            continue;
2274        }
2275        out.insert(normalized);
2276    }
2277}
2278
2279fn normalize_signal_phrase(input: &str) -> Option<String> {
2280    let normalized = input
2281        .split(|ch: char| !ch.is_ascii_alphanumeric())
2282        .filter_map(|raw| {
2283            let trimmed = raw.trim();
2284            if trimmed.is_empty() {
2285                return None;
2286            }
2287            let normalized = if is_rust_error_code(trimmed) {
2288                let mut chars = trimmed.chars();
2289                let prefix = chars
2290                    .next()
2291                    .map(|ch| ch.to_ascii_uppercase())
2292                    .unwrap_or('E');
2293                format!("{prefix}{}", chars.as_str())
2294            } else {
2295                trimmed.to_ascii_lowercase()
2296            };
2297            if normalized.len() < 3 {
2298                None
2299            } else {
2300                Some(normalized)
2301            }
2302        })
2303        .collect::<Vec<_>>()
2304        .join(" ");
2305    if normalized.is_empty() {
2306        None
2307    } else {
2308        Some(normalized)
2309    }
2310}
2311
2312fn replay_task_descriptor(signals: &[String]) -> (String, String) {
2313    let normalized = signals
2314        .iter()
2315        .filter_map(|signal| normalize_signal_phrase(signal))
2316        .collect::<BTreeSet<_>>()
2317        .into_iter()
2318        .collect::<Vec<_>>();
2319    if normalized.is_empty() {
2320        return ("unknown".into(), "unknown".into());
2321    }
2322    let task_label = normalized
2323        .iter()
2324        .find(|value| {
2325            value.as_str() != "validation passed" && value.as_str() != "validation failed"
2326        })
2327        .cloned()
2328        .unwrap_or_else(|| normalized[0].clone());
2329    let task_class_id = stable_hash_json(&normalized)
2330        .unwrap_or_else(|_| compute_artifact_hash(&normalized.join("\n")));
2331    (task_class_id, task_label)
2332}
2333
2334fn is_rust_error_code(value: &str) -> bool {
2335    value.len() == 5
2336        && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
2337        && value[1..].chars().all(|ch| ch.is_ascii_digit())
2338}
2339
2340fn classify_supervised_devloop_request(
2341    request: &SupervisedDevloopRequest,
2342) -> Option<BoundedTaskClass> {
2343    let path = request.proposal.files.first()?.trim();
2344    if request.proposal.files.len() != 1 || path.is_empty() {
2345        return None;
2346    }
2347    let normalized = path.replace('\\', "/");
2348    if normalized.starts_with("docs/") && normalized.ends_with(".md") {
2349        Some(BoundedTaskClass::DocsSingleFile)
2350    } else {
2351        None
2352    }
2353}
2354
2355fn find_declared_mutation(
2356    store: &dyn EvolutionStore,
2357    mutation_id: &MutationId,
2358) -> Result<Option<PreparedMutation>, EvolutionError> {
2359    for stored in store.scan(1)? {
2360        if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
2361            if &mutation.intent.id == mutation_id {
2362                return Ok(Some(mutation));
2363            }
2364        }
2365    }
2366    Ok(None)
2367}
2368
2369fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
2370    let Ok(projection) = projection_snapshot(store) else {
2371        return Vec::new();
2372    };
2373    let capsules = projection.capsules.clone();
2374    let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2375    let requested_spec_id = input
2376        .spec_id
2377        .as_deref()
2378        .map(str::trim)
2379        .filter(|value| !value.is_empty());
2380    let signal_set = input
2381        .signals
2382        .iter()
2383        .map(|signal| signal.to_ascii_lowercase())
2384        .collect::<BTreeSet<_>>();
2385    let mut candidates = projection
2386        .genes
2387        .into_iter()
2388        .filter_map(|gene| {
2389            if gene.state != AssetState::Promoted {
2390                return None;
2391            }
2392            if let Some(spec_id) = requested_spec_id {
2393                let matches_spec = spec_ids_by_gene
2394                    .get(&gene.id)
2395                    .map(|values| {
2396                        values
2397                            .iter()
2398                            .any(|value| value.eq_ignore_ascii_case(spec_id))
2399                    })
2400                    .unwrap_or(false);
2401                if !matches_spec {
2402                    return None;
2403                }
2404            }
2405            let gene_signals = gene
2406                .signals
2407                .iter()
2408                .map(|signal| signal.to_ascii_lowercase())
2409                .collect::<BTreeSet<_>>();
2410            if gene_signals == signal_set {
2411                let mut matched_capsules = capsules
2412                    .iter()
2413                    .filter(|capsule| {
2414                        capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
2415                    })
2416                    .cloned()
2417                    .collect::<Vec<_>>();
2418                matched_capsules.sort_by(|left, right| {
2419                    replay_environment_match_factor(&input.env, &right.env)
2420                        .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2421                        .unwrap_or(std::cmp::Ordering::Equal)
2422                        .then_with(|| {
2423                            right
2424                                .confidence
2425                                .partial_cmp(&left.confidence)
2426                                .unwrap_or(std::cmp::Ordering::Equal)
2427                        })
2428                        .then_with(|| left.id.cmp(&right.id))
2429                });
2430                if matched_capsules.is_empty() {
2431                    None
2432                } else {
2433                    let score = matched_capsules
2434                        .first()
2435                        .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2436                        .unwrap_or(0.0);
2437                    Some(GeneCandidate {
2438                        gene,
2439                        score,
2440                        capsules: matched_capsules,
2441                    })
2442                }
2443            } else {
2444                None
2445            }
2446        })
2447        .collect::<Vec<_>>();
2448    candidates.sort_by(|left, right| {
2449        right
2450            .score
2451            .partial_cmp(&left.score)
2452            .unwrap_or(std::cmp::Ordering::Equal)
2453            .then_with(|| left.gene.id.cmp(&right.gene.id))
2454    });
2455    candidates
2456}
2457
2458fn quarantined_remote_exact_match_candidates(
2459    store: &dyn EvolutionStore,
2460    input: &SelectorInput,
2461) -> Vec<GeneCandidate> {
2462    let remote_asset_ids = store
2463        .scan(1)
2464        .ok()
2465        .map(|events| {
2466            events
2467                .into_iter()
2468                .filter_map(|stored| match stored.event {
2469                    EvolutionEvent::RemoteAssetImported {
2470                        source: CandidateSource::Remote,
2471                        asset_ids,
2472                        ..
2473                    } => Some(asset_ids),
2474                    _ => None,
2475                })
2476                .flatten()
2477                .collect::<BTreeSet<_>>()
2478        })
2479        .unwrap_or_default();
2480    if remote_asset_ids.is_empty() {
2481        return Vec::new();
2482    }
2483
2484    let Ok(projection) = projection_snapshot(store) else {
2485        return Vec::new();
2486    };
2487    let capsules = projection.capsules.clone();
2488    let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2489    let requested_spec_id = input
2490        .spec_id
2491        .as_deref()
2492        .map(str::trim)
2493        .filter(|value| !value.is_empty());
2494    let normalized_signals = input
2495        .signals
2496        .iter()
2497        .filter_map(|signal| normalize_signal_phrase(signal))
2498        .collect::<BTreeSet<_>>()
2499        .into_iter()
2500        .collect::<Vec<_>>();
2501    if normalized_signals.is_empty() {
2502        return Vec::new();
2503    }
2504    let mut candidates = projection
2505        .genes
2506        .into_iter()
2507        .filter_map(|gene| {
2508            if !matches!(gene.state, AssetState::Promoted | AssetState::Quarantined) {
2509                return None;
2510            }
2511            if let Some(spec_id) = requested_spec_id {
2512                let matches_spec = spec_ids_by_gene
2513                    .get(&gene.id)
2514                    .map(|values| {
2515                        values
2516                            .iter()
2517                            .any(|value| value.eq_ignore_ascii_case(spec_id))
2518                    })
2519                    .unwrap_or(false);
2520                if !matches_spec {
2521                    return None;
2522                }
2523            }
2524            let normalized_gene_signals = gene
2525                .signals
2526                .iter()
2527                .filter_map(|candidate| normalize_signal_phrase(candidate))
2528                .collect::<Vec<_>>();
2529            let matched_query_count = normalized_signals
2530                .iter()
2531                .filter(|signal| {
2532                    normalized_gene_signals.iter().any(|candidate| {
2533                        candidate.contains(signal.as_str()) || signal.contains(candidate)
2534                    })
2535                })
2536                .count();
2537            if matched_query_count == 0 {
2538                return None;
2539            }
2540
2541            let mut matched_capsules = capsules
2542                .iter()
2543                .filter(|capsule| {
2544                    capsule.gene_id == gene.id
2545                        && capsule.state == AssetState::Quarantined
2546                        && remote_asset_ids.contains(&capsule.id)
2547                })
2548                .cloned()
2549                .collect::<Vec<_>>();
2550            matched_capsules.sort_by(|left, right| {
2551                replay_environment_match_factor(&input.env, &right.env)
2552                    .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2553                    .unwrap_or(std::cmp::Ordering::Equal)
2554                    .then_with(|| {
2555                        right
2556                            .confidence
2557                            .partial_cmp(&left.confidence)
2558                            .unwrap_or(std::cmp::Ordering::Equal)
2559                    })
2560                    .then_with(|| left.id.cmp(&right.id))
2561            });
2562            if matched_capsules.is_empty() {
2563                None
2564            } else {
2565                let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
2566                let env_score = matched_capsules
2567                    .first()
2568                    .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2569                    .unwrap_or(0.0);
2570                Some(GeneCandidate {
2571                    gene,
2572                    score: overlap.max(env_score),
2573                    capsules: matched_capsules,
2574                })
2575            }
2576        })
2577        .collect::<Vec<_>>();
2578    candidates.sort_by(|left, right| {
2579        right
2580            .score
2581            .partial_cmp(&left.score)
2582            .unwrap_or(std::cmp::Ordering::Equal)
2583            .then_with(|| left.gene.id.cmp(&right.gene.id))
2584    });
2585    candidates
2586}
2587
2588fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
2589    let fields = [
2590        input
2591            .rustc_version
2592            .eq_ignore_ascii_case(&candidate.rustc_version),
2593        input
2594            .cargo_lock_hash
2595            .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
2596        input
2597            .target_triple
2598            .eq_ignore_ascii_case(&candidate.target_triple),
2599        input.os.eq_ignore_ascii_case(&candidate.os),
2600    ];
2601    let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
2602    0.5 + ((matched_fields / 4.0) * 0.5)
2603}
2604
2605fn effective_candidate_score(
2606    candidate: &GeneCandidate,
2607    publishers_by_asset: &BTreeMap<String, String>,
2608    reputation_bias: &BTreeMap<String, f32>,
2609) -> f32 {
2610    let bias = candidate
2611        .capsules
2612        .first()
2613        .and_then(|capsule| publishers_by_asset.get(&capsule.id))
2614        .and_then(|publisher| reputation_bias.get(publisher))
2615        .copied()
2616        .unwrap_or(0.0)
2617        .clamp(0.0, 1.0);
2618    candidate.score * (1.0 + (bias * 0.1))
2619}
2620
2621fn export_promoted_assets_from_store(
2622    store: &dyn EvolutionStore,
2623    sender_id: impl Into<String>,
2624) -> Result<EvolutionEnvelope, EvoKernelError> {
2625    let (events, projection) = scan_projection(store)?;
2626    let genes = projection
2627        .genes
2628        .into_iter()
2629        .filter(|gene| gene.state == AssetState::Promoted)
2630        .collect::<Vec<_>>();
2631    let capsules = projection
2632        .capsules
2633        .into_iter()
2634        .filter(|capsule| capsule.state == AssetState::Promoted)
2635        .collect::<Vec<_>>();
2636    let assets = replay_export_assets(&events, genes, capsules);
2637    Ok(EvolutionEnvelope::publish(sender_id, assets))
2638}
2639
2640fn scan_projection(
2641    store: &dyn EvolutionStore,
2642) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
2643    store.scan_projection().map_err(store_err)
2644}
2645
2646fn projection_snapshot(store: &dyn EvolutionStore) -> Result<EvolutionProjection, EvoKernelError> {
2647    scan_projection(store).map(|(_, projection)| projection)
2648}
2649
2650fn replay_export_assets(
2651    events: &[StoredEvolutionEvent],
2652    genes: Vec<Gene>,
2653    capsules: Vec<Capsule>,
2654) -> Vec<NetworkAsset> {
2655    let mutation_ids = capsules
2656        .iter()
2657        .map(|capsule| capsule.mutation_id.clone())
2658        .collect::<BTreeSet<_>>();
2659    let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
2660    for gene in genes {
2661        assets.push(NetworkAsset::Gene { gene });
2662    }
2663    for capsule in capsules {
2664        assets.push(NetworkAsset::Capsule { capsule });
2665    }
2666    assets
2667}
2668
2669fn replay_export_events_for_mutations(
2670    events: &[StoredEvolutionEvent],
2671    mutation_ids: &BTreeSet<String>,
2672) -> Vec<NetworkAsset> {
2673    if mutation_ids.is_empty() {
2674        return Vec::new();
2675    }
2676
2677    let mut assets = Vec::new();
2678    let mut seen_mutations = BTreeSet::new();
2679    let mut seen_spec_links = BTreeSet::new();
2680    for stored in events {
2681        match &stored.event {
2682            EvolutionEvent::MutationDeclared { mutation }
2683                if mutation_ids.contains(mutation.intent.id.as_str())
2684                    && seen_mutations.insert(mutation.intent.id.clone()) =>
2685            {
2686                assets.push(NetworkAsset::EvolutionEvent {
2687                    event: EvolutionEvent::MutationDeclared {
2688                        mutation: mutation.clone(),
2689                    },
2690                });
2691            }
2692            EvolutionEvent::SpecLinked {
2693                mutation_id,
2694                spec_id,
2695            } if mutation_ids.contains(mutation_id.as_str())
2696                && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
2697            {
2698                assets.push(NetworkAsset::EvolutionEvent {
2699                    event: EvolutionEvent::SpecLinked {
2700                        mutation_id: mutation_id.clone(),
2701                        spec_id: spec_id.clone(),
2702                    },
2703                });
2704            }
2705            _ => {}
2706        }
2707    }
2708
2709    assets
2710}
2711
2712fn import_remote_envelope_into_store(
2713    store: &dyn EvolutionStore,
2714    envelope: &EvolutionEnvelope,
2715    remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
2716) -> Result<ImportOutcome, EvoKernelError> {
2717    if !envelope.verify_content_hash() {
2718        return Err(EvoKernelError::Validation(
2719            "invalid evolution envelope hash".into(),
2720        ));
2721    }
2722
2723    let sender_id = normalized_sender_id(&envelope.sender_id);
2724    let (events, projection) = scan_projection(store)?;
2725    let mut known_gene_ids = projection
2726        .genes
2727        .into_iter()
2728        .map(|gene| gene.id)
2729        .collect::<BTreeSet<_>>();
2730    let mut known_capsule_ids = projection
2731        .capsules
2732        .into_iter()
2733        .map(|capsule| capsule.id)
2734        .collect::<BTreeSet<_>>();
2735    let mut known_mutation_ids = BTreeSet::new();
2736    let mut known_spec_links = BTreeSet::new();
2737    for stored in &events {
2738        match &stored.event {
2739            EvolutionEvent::MutationDeclared { mutation } => {
2740                known_mutation_ids.insert(mutation.intent.id.clone());
2741            }
2742            EvolutionEvent::SpecLinked {
2743                mutation_id,
2744                spec_id,
2745            } => {
2746                known_spec_links.insert((mutation_id.clone(), spec_id.clone()));
2747            }
2748            _ => {}
2749        }
2750    }
2751    let mut imported_asset_ids = Vec::new();
2752    for asset in &envelope.assets {
2753        match asset {
2754            NetworkAsset::Gene { gene } => {
2755                if !known_gene_ids.insert(gene.id.clone()) {
2756                    continue;
2757                }
2758                imported_asset_ids.push(gene.id.clone());
2759                let mut quarantined_gene = gene.clone();
2760                quarantined_gene.state = AssetState::Quarantined;
2761                store
2762                    .append_event(EvolutionEvent::RemoteAssetImported {
2763                        source: CandidateSource::Remote,
2764                        asset_ids: vec![gene.id.clone()],
2765                        sender_id: sender_id.clone(),
2766                    })
2767                    .map_err(store_err)?;
2768                store
2769                    .append_event(EvolutionEvent::GeneProjected {
2770                        gene: quarantined_gene.clone(),
2771                    })
2772                    .map_err(store_err)?;
2773                record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
2774                store
2775                    .append_event(EvolutionEvent::PromotionEvaluated {
2776                        gene_id: quarantined_gene.id,
2777                        state: AssetState::Quarantined,
2778                        reason: "remote asset requires local validation before promotion".into(),
2779                    })
2780                    .map_err(store_err)?;
2781            }
2782            NetworkAsset::Capsule { capsule } => {
2783                if !known_capsule_ids.insert(capsule.id.clone()) {
2784                    continue;
2785                }
2786                imported_asset_ids.push(capsule.id.clone());
2787                store
2788                    .append_event(EvolutionEvent::RemoteAssetImported {
2789                        source: CandidateSource::Remote,
2790                        asset_ids: vec![capsule.id.clone()],
2791                        sender_id: sender_id.clone(),
2792                    })
2793                    .map_err(store_err)?;
2794                let mut quarantined = capsule.clone();
2795                quarantined.state = AssetState::Quarantined;
2796                store
2797                    .append_event(EvolutionEvent::CapsuleCommitted {
2798                        capsule: quarantined.clone(),
2799                    })
2800                    .map_err(store_err)?;
2801                record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
2802                store
2803                    .append_event(EvolutionEvent::CapsuleQuarantined {
2804                        capsule_id: quarantined.id,
2805                    })
2806                    .map_err(store_err)?;
2807            }
2808            NetworkAsset::EvolutionEvent { event } => {
2809                let should_append = match event {
2810                    EvolutionEvent::MutationDeclared { mutation } => {
2811                        known_mutation_ids.insert(mutation.intent.id.clone())
2812                    }
2813                    EvolutionEvent::SpecLinked {
2814                        mutation_id,
2815                        spec_id,
2816                    } => known_spec_links.insert((mutation_id.clone(), spec_id.clone())),
2817                    _ if should_import_remote_event(event) => true,
2818                    _ => false,
2819                };
2820                if should_append {
2821                    store.append_event(event.clone()).map_err(store_err)?;
2822                }
2823            }
2824        }
2825    }
2826
2827    Ok(ImportOutcome {
2828        imported_asset_ids,
2829        accepted: true,
2830    })
2831}
2832
2833fn normalized_sender_id(sender_id: &str) -> Option<String> {
2834    let trimmed = sender_id.trim();
2835    if trimmed.is_empty() {
2836        None
2837    } else {
2838        Some(trimmed.to_string())
2839    }
2840}
2841
2842fn record_remote_publisher_for_asset(
2843    remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
2844    sender_id: &str,
2845    asset: &NetworkAsset,
2846) {
2847    let Some(remote_publishers) = remote_publishers else {
2848        return;
2849    };
2850    let sender_id = sender_id.trim();
2851    if sender_id.is_empty() {
2852        return;
2853    }
2854    let Ok(mut publishers) = remote_publishers.lock() else {
2855        return;
2856    };
2857    match asset {
2858        NetworkAsset::Gene { gene } => {
2859            publishers.insert(gene.id.clone(), sender_id.to_string());
2860        }
2861        NetworkAsset::Capsule { capsule } => {
2862            publishers.insert(capsule.id.clone(), sender_id.to_string());
2863        }
2864        NetworkAsset::EvolutionEvent { .. } => {}
2865    }
2866}
2867
2868fn remote_publishers_by_asset_from_store(store: &dyn EvolutionStore) -> BTreeMap<String, String> {
2869    let Ok(events) = store.scan(1) else {
2870        return BTreeMap::new();
2871    };
2872    remote_publishers_by_asset_from_events(&events)
2873}
2874
2875fn remote_publishers_by_asset_from_events(
2876    events: &[StoredEvolutionEvent],
2877) -> BTreeMap<String, String> {
2878    let mut imported_asset_publishers = BTreeMap::<String, String>::new();
2879    let mut known_gene_ids = BTreeSet::<String>::new();
2880    let mut known_capsule_ids = BTreeSet::<String>::new();
2881    let mut publishers_by_asset = BTreeMap::<String, String>::new();
2882
2883    for stored in events {
2884        match &stored.event {
2885            EvolutionEvent::RemoteAssetImported {
2886                source: CandidateSource::Remote,
2887                asset_ids,
2888                sender_id,
2889            } => {
2890                let Some(sender_id) = sender_id.as_deref().and_then(normalized_sender_id) else {
2891                    continue;
2892                };
2893                for asset_id in asset_ids {
2894                    imported_asset_publishers.insert(asset_id.clone(), sender_id.clone());
2895                    if known_gene_ids.contains(asset_id) || known_capsule_ids.contains(asset_id) {
2896                        publishers_by_asset.insert(asset_id.clone(), sender_id.clone());
2897                    }
2898                }
2899            }
2900            EvolutionEvent::GeneProjected { gene } => {
2901                known_gene_ids.insert(gene.id.clone());
2902                if let Some(sender_id) = imported_asset_publishers.get(&gene.id) {
2903                    publishers_by_asset.insert(gene.id.clone(), sender_id.clone());
2904                }
2905            }
2906            EvolutionEvent::CapsuleCommitted { capsule } => {
2907                known_capsule_ids.insert(capsule.id.clone());
2908                if let Some(sender_id) = imported_asset_publishers.get(&capsule.id) {
2909                    publishers_by_asset.insert(capsule.id.clone(), sender_id.clone());
2910                }
2911            }
2912            _ => {}
2913        }
2914    }
2915
2916    publishers_by_asset
2917}
2918
2919fn should_import_remote_event(event: &EvolutionEvent) -> bool {
2920    matches!(
2921        event,
2922        EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
2923    )
2924}
2925
2926fn fetch_assets_from_store(
2927    store: &dyn EvolutionStore,
2928    responder_id: impl Into<String>,
2929    query: &FetchQuery,
2930) -> Result<FetchResponse, EvoKernelError> {
2931    let (events, projection) = scan_projection(store)?;
2932    let normalized_signals: Vec<String> = query
2933        .signals
2934        .iter()
2935        .map(|signal| signal.trim().to_ascii_lowercase())
2936        .filter(|signal| !signal.is_empty())
2937        .collect();
2938    let matches_any_signal = |candidate: &str| {
2939        if normalized_signals.is_empty() {
2940            return true;
2941        }
2942        let candidate = candidate.to_ascii_lowercase();
2943        normalized_signals
2944            .iter()
2945            .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
2946    };
2947
2948    let matched_genes: Vec<Gene> = projection
2949        .genes
2950        .into_iter()
2951        .filter(|gene| gene.state == AssetState::Promoted)
2952        .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
2953        .collect();
2954    let matched_gene_ids: BTreeSet<String> =
2955        matched_genes.iter().map(|gene| gene.id.clone()).collect();
2956    let matched_capsules: Vec<Capsule> = projection
2957        .capsules
2958        .into_iter()
2959        .filter(|capsule| capsule.state == AssetState::Promoted)
2960        .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
2961        .collect();
2962
2963    let assets = replay_export_assets(&events, matched_genes, matched_capsules);
2964
2965    Ok(FetchResponse {
2966        sender_id: responder_id.into(),
2967        assets,
2968    })
2969}
2970
2971fn revoke_assets_in_store(
2972    store: &dyn EvolutionStore,
2973    notice: &RevokeNotice,
2974) -> Result<RevokeNotice, EvoKernelError> {
2975    let projection = projection_snapshot(store)?;
2976    let requested: BTreeSet<String> = notice
2977        .asset_ids
2978        .iter()
2979        .map(|asset_id| asset_id.trim().to_string())
2980        .filter(|asset_id| !asset_id.is_empty())
2981        .collect();
2982    let mut revoked_gene_ids = BTreeSet::new();
2983    let mut quarantined_capsule_ids = BTreeSet::new();
2984
2985    for gene in &projection.genes {
2986        if requested.contains(&gene.id) {
2987            revoked_gene_ids.insert(gene.id.clone());
2988        }
2989    }
2990    for capsule in &projection.capsules {
2991        if requested.contains(&capsule.id) {
2992            quarantined_capsule_ids.insert(capsule.id.clone());
2993            revoked_gene_ids.insert(capsule.gene_id.clone());
2994        }
2995    }
2996    for capsule in &projection.capsules {
2997        if revoked_gene_ids.contains(&capsule.gene_id) {
2998            quarantined_capsule_ids.insert(capsule.id.clone());
2999        }
3000    }
3001
3002    for gene_id in &revoked_gene_ids {
3003        store
3004            .append_event(EvolutionEvent::GeneRevoked {
3005                gene_id: gene_id.clone(),
3006                reason: notice.reason.clone(),
3007            })
3008            .map_err(store_err)?;
3009    }
3010    for capsule_id in &quarantined_capsule_ids {
3011        store
3012            .append_event(EvolutionEvent::CapsuleQuarantined {
3013                capsule_id: capsule_id.clone(),
3014            })
3015            .map_err(store_err)?;
3016    }
3017
3018    let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
3019    affected_ids.extend(quarantined_capsule_ids);
3020    affected_ids.sort();
3021    affected_ids.dedup();
3022
3023    Ok(RevokeNotice {
3024        sender_id: notice.sender_id.clone(),
3025        asset_ids: affected_ids,
3026        reason: notice.reason.clone(),
3027    })
3028}
3029
3030fn evolution_metrics_snapshot(
3031    store: &dyn EvolutionStore,
3032) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
3033    let (events, projection) = scan_projection(store)?;
3034    let gene_task_classes = projection
3035        .genes
3036        .iter()
3037        .map(|gene| (gene.id.clone(), replay_task_descriptor(&gene.signals)))
3038        .collect::<BTreeMap<_, _>>();
3039    let replay_success_total = events
3040        .iter()
3041        .filter(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. }))
3042        .count() as u64;
3043    let mut replay_task_class_totals = BTreeMap::<(String, String), u64>::new();
3044    for stored in &events {
3045        if let EvolutionEvent::CapsuleReused { gene_id, .. } = &stored.event {
3046            if let Some((task_class_id, task_label)) = gene_task_classes.get(gene_id) {
3047                *replay_task_class_totals
3048                    .entry((task_class_id.clone(), task_label.clone()))
3049                    .or_insert(0) += 1;
3050            }
3051        }
3052    }
3053    let replay_task_classes = replay_task_class_totals
3054        .into_iter()
3055        .map(
3056            |((task_class_id, task_label), replay_success_total)| ReplayTaskClassMetrics {
3057                task_class_id,
3058                task_label,
3059                replay_success_total,
3060                reasoning_steps_avoided_total: replay_success_total,
3061            },
3062        )
3063        .collect::<Vec<_>>();
3064    let replay_reasoning_avoided_total = replay_task_classes
3065        .iter()
3066        .map(|entry| entry.reasoning_steps_avoided_total)
3067        .sum();
3068    let replay_failures_total = events
3069        .iter()
3070        .filter(|stored| is_replay_validation_failure(&stored.event))
3071        .count() as u64;
3072    let replay_attempts_total = replay_success_total + replay_failures_total;
3073    let confidence_revalidations_total = events
3074        .iter()
3075        .filter(|stored| is_confidence_revalidation_event(&stored.event))
3076        .count() as u64;
3077    let mutation_declared_total = events
3078        .iter()
3079        .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
3080        .count() as u64;
3081    let promoted_mutations_total = events
3082        .iter()
3083        .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
3084        .count() as u64;
3085    let gene_revocations_total = events
3086        .iter()
3087        .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
3088        .count() as u64;
3089    let cutoff = Utc::now() - Duration::hours(1);
3090    let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
3091        matches!(event, EvolutionEvent::MutationDeclared { .. })
3092    });
3093    let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
3094        matches!(event, EvolutionEvent::GeneRevoked { .. })
3095    });
3096    let promoted_genes = projection
3097        .genes
3098        .iter()
3099        .filter(|gene| gene.state == AssetState::Promoted)
3100        .count() as u64;
3101    let promoted_capsules = projection
3102        .capsules
3103        .iter()
3104        .filter(|capsule| capsule.state == AssetState::Promoted)
3105        .count() as u64;
3106
3107    Ok(EvolutionMetricsSnapshot {
3108        replay_attempts_total,
3109        replay_success_total,
3110        replay_success_rate: safe_ratio(replay_success_total, replay_attempts_total),
3111        confidence_revalidations_total,
3112        replay_reasoning_avoided_total,
3113        replay_task_classes,
3114        mutation_declared_total,
3115        promoted_mutations_total,
3116        promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
3117        gene_revocations_total,
3118        mutation_velocity_last_hour,
3119        revoke_frequency_last_hour,
3120        promoted_genes,
3121        promoted_capsules,
3122        last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
3123    })
3124}
3125
3126fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
3127    EvolutionHealthSnapshot {
3128        status: "ok".into(),
3129        last_event_seq: snapshot.last_event_seq,
3130        promoted_genes: snapshot.promoted_genes,
3131        promoted_capsules: snapshot.promoted_capsules,
3132    }
3133}
3134
3135fn render_evolution_metrics_prometheus(
3136    snapshot: &EvolutionMetricsSnapshot,
3137    health: &EvolutionHealthSnapshot,
3138) -> String {
3139    let mut out = String::new();
3140    out.push_str(
3141        "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
3142    );
3143    out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
3144    out.push_str(&format!(
3145        "oris_evolution_replay_attempts_total {}\n",
3146        snapshot.replay_attempts_total
3147    ));
3148    out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
3149    out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
3150    out.push_str(&format!(
3151        "oris_evolution_replay_success_total {}\n",
3152        snapshot.replay_success_total
3153    ));
3154    out.push_str("# HELP oris_evolution_replay_reasoning_avoided_total Total planner steps avoided by successful replay.\n");
3155    out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_total counter\n");
3156    out.push_str(&format!(
3157        "oris_evolution_replay_reasoning_avoided_total {}\n",
3158        snapshot.replay_reasoning_avoided_total
3159    ));
3160    out.push_str("# HELP oris_evolution_replay_utilization_by_task_class_total Successful replay reuse counts grouped by deterministic task class.\n");
3161    out.push_str("# TYPE oris_evolution_replay_utilization_by_task_class_total counter\n");
3162    for task_class in &snapshot.replay_task_classes {
3163        out.push_str(&format!(
3164            "oris_evolution_replay_utilization_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
3165            prometheus_label_value(&task_class.task_class_id),
3166            prometheus_label_value(&task_class.task_label),
3167            task_class.replay_success_total
3168        ));
3169    }
3170    out.push_str("# HELP oris_evolution_replay_reasoning_avoided_by_task_class_total Planner steps avoided by successful replay grouped by deterministic task class.\n");
3171    out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_by_task_class_total counter\n");
3172    for task_class in &snapshot.replay_task_classes {
3173        out.push_str(&format!(
3174            "oris_evolution_replay_reasoning_avoided_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
3175            prometheus_label_value(&task_class.task_class_id),
3176            prometheus_label_value(&task_class.task_label),
3177            task_class.reasoning_steps_avoided_total
3178        ));
3179    }
3180    out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
3181    out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
3182    out.push_str(&format!(
3183        "oris_evolution_replay_success_rate {:.6}\n",
3184        snapshot.replay_success_rate
3185    ));
3186    out.push_str("# HELP oris_evolution_confidence_revalidations_total Total confidence-driven demotions that require revalidation before replay.\n");
3187    out.push_str("# TYPE oris_evolution_confidence_revalidations_total counter\n");
3188    out.push_str(&format!(
3189        "oris_evolution_confidence_revalidations_total {}\n",
3190        snapshot.confidence_revalidations_total
3191    ));
3192    out.push_str(
3193        "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
3194    );
3195    out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
3196    out.push_str(&format!(
3197        "oris_evolution_mutation_declared_total {}\n",
3198        snapshot.mutation_declared_total
3199    ));
3200    out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
3201    out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
3202    out.push_str(&format!(
3203        "oris_evolution_promoted_mutations_total {}\n",
3204        snapshot.promoted_mutations_total
3205    ));
3206    out.push_str(
3207        "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
3208    );
3209    out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
3210    out.push_str(&format!(
3211        "oris_evolution_promotion_ratio {:.6}\n",
3212        snapshot.promotion_ratio
3213    ));
3214    out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
3215    out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
3216    out.push_str(&format!(
3217        "oris_evolution_gene_revocations_total {}\n",
3218        snapshot.gene_revocations_total
3219    ));
3220    out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
3221    out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
3222    out.push_str(&format!(
3223        "oris_evolution_mutation_velocity_last_hour {}\n",
3224        snapshot.mutation_velocity_last_hour
3225    ));
3226    out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
3227    out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
3228    out.push_str(&format!(
3229        "oris_evolution_revoke_frequency_last_hour {}\n",
3230        snapshot.revoke_frequency_last_hour
3231    ));
3232    out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
3233    out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
3234    out.push_str(&format!(
3235        "oris_evolution_promoted_genes {}\n",
3236        snapshot.promoted_genes
3237    ));
3238    out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
3239    out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
3240    out.push_str(&format!(
3241        "oris_evolution_promoted_capsules {}\n",
3242        snapshot.promoted_capsules
3243    ));
3244    out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
3245    out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
3246    out.push_str(&format!(
3247        "oris_evolution_store_last_event_seq {}\n",
3248        snapshot.last_event_seq
3249    ));
3250    out.push_str(
3251        "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
3252    );
3253    out.push_str("# TYPE oris_evolution_health gauge\n");
3254    out.push_str(&format!(
3255        "oris_evolution_health {}\n",
3256        u8::from(health.status == "ok")
3257    ));
3258    out
3259}
3260
3261fn count_recent_events(
3262    events: &[StoredEvolutionEvent],
3263    cutoff: DateTime<Utc>,
3264    predicate: impl Fn(&EvolutionEvent) -> bool,
3265) -> u64 {
3266    events
3267        .iter()
3268        .filter(|stored| {
3269            predicate(&stored.event)
3270                && parse_event_timestamp(&stored.timestamp)
3271                    .map(|timestamp| timestamp >= cutoff)
3272                    .unwrap_or(false)
3273        })
3274        .count() as u64
3275}
3276
3277fn prometheus_label_value(input: &str) -> String {
3278    input
3279        .replace('\\', "\\\\")
3280        .replace('\n', "\\n")
3281        .replace('"', "\\\"")
3282}
3283
3284fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
3285    DateTime::parse_from_rfc3339(raw)
3286        .ok()
3287        .map(|parsed| parsed.with_timezone(&Utc))
3288}
3289
3290fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
3291    matches!(
3292        event,
3293        EvolutionEvent::ValidationFailed {
3294            gene_id: Some(_),
3295            ..
3296        }
3297    )
3298}
3299
3300fn is_confidence_revalidation_event(event: &EvolutionEvent) -> bool {
3301    matches!(
3302        event,
3303        EvolutionEvent::PromotionEvaluated { state, reason, .. }
3304            if *state == AssetState::Quarantined
3305                && reason.contains("confidence decayed")
3306    )
3307}
3308
3309fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
3310    if denominator == 0 {
3311        0.0
3312    } else {
3313        numerator as f64 / denominator as f64
3314    }
3315}
3316
3317fn store_err(err: EvolutionError) -> EvoKernelError {
3318    EvoKernelError::Store(err.to_string())
3319}
3320
3321#[cfg(test)]
3322mod tests {
3323    use super::*;
3324    use oris_agent_contract::{
3325        AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
3326    };
3327    use oris_kernel::{
3328        AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
3329        NoopStepFn, StateUpdatedOnlyReducer,
3330    };
3331    use serde::{Deserialize, Serialize};
3332
3333    #[derive(Clone, Debug, Default, Serialize, Deserialize)]
3334    struct TestState;
3335
3336    impl KernelState for TestState {
3337        fn version(&self) -> u32 {
3338            1
3339        }
3340    }
3341
3342    fn temp_workspace(name: &str) -> std::path::PathBuf {
3343        let root =
3344            std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
3345        if root.exists() {
3346            fs::remove_dir_all(&root).unwrap();
3347        }
3348        fs::create_dir_all(root.join("src")).unwrap();
3349        fs::write(
3350            root.join("Cargo.toml"),
3351            "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
3352        )
3353        .unwrap();
3354        fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
3355        fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
3356        root
3357    }
3358
3359    fn test_kernel() -> Arc<Kernel<TestState>> {
3360        Arc::new(Kernel::<TestState> {
3361            events: Box::new(InMemoryEventStore::new()),
3362            snaps: None,
3363            reducer: Box::new(StateUpdatedOnlyReducer),
3364            exec: Box::new(NoopActionExecutor),
3365            step: Box::new(NoopStepFn),
3366            policy: Box::new(AllowAllPolicy),
3367            effect_sink: None,
3368            mode: KernelMode::Normal,
3369        })
3370    }
3371
3372    fn lightweight_plan() -> ValidationPlan {
3373        ValidationPlan {
3374            profile: "test".into(),
3375            stages: vec![ValidationStage::Command {
3376                program: "git".into(),
3377                args: vec!["--version".into()],
3378                timeout_ms: 5_000,
3379            }],
3380        }
3381    }
3382
3383    fn sample_mutation() -> PreparedMutation {
3384        prepare_mutation(
3385            MutationIntent {
3386                id: "mutation-1".into(),
3387                intent: "add README".into(),
3388                target: MutationTarget::Paths {
3389                    allow: vec!["README.md".into()],
3390                },
3391                expected_effect: "repo still builds".into(),
3392                risk: RiskLevel::Low,
3393                signals: vec!["missing readme".into()],
3394                spec_id: None,
3395            },
3396            "\
3397diff --git a/README.md b/README.md
3398new file mode 100644
3399index 0000000..1111111
3400--- /dev/null
3401+++ b/README.md
3402@@ -0,0 +1 @@
3403+# sample
3404"
3405            .into(),
3406            Some("HEAD".into()),
3407        )
3408    }
3409
3410    fn base_sandbox_policy() -> SandboxPolicy {
3411        SandboxPolicy {
3412            allowed_programs: vec!["git".into()],
3413            max_duration_ms: 60_000,
3414            max_output_bytes: 1024 * 1024,
3415            denied_env_prefixes: Vec::new(),
3416        }
3417    }
3418
3419    fn command_validator() -> Arc<dyn Validator> {
3420        Arc::new(CommandValidator::new(base_sandbox_policy()))
3421    }
3422
3423    fn replay_input(signal: &str) -> SelectorInput {
3424        let rustc_version = std::process::Command::new("rustc")
3425            .arg("--version")
3426            .output()
3427            .ok()
3428            .filter(|output| output.status.success())
3429            .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
3430            .unwrap_or_else(|| "rustc unknown".into());
3431        SelectorInput {
3432            signals: vec![signal.into()],
3433            env: EnvFingerprint {
3434                rustc_version,
3435                cargo_lock_hash: compute_artifact_hash("# lock\n"),
3436                target_triple: format!(
3437                    "{}-unknown-{}",
3438                    std::env::consts::ARCH,
3439                    std::env::consts::OS
3440                ),
3441                os: std::env::consts::OS.into(),
3442            },
3443            spec_id: None,
3444            limit: 1,
3445        }
3446    }
3447
3448    fn build_test_evo_with_store(
3449        name: &str,
3450        run_id: &str,
3451        validator: Arc<dyn Validator>,
3452        store: Arc<dyn EvolutionStore>,
3453    ) -> EvoKernel<TestState> {
3454        let workspace = temp_workspace(name);
3455        let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
3456            run_id,
3457            &workspace,
3458            std::env::temp_dir(),
3459        ));
3460        EvoKernel::new(test_kernel(), sandbox, validator, store)
3461            .with_governor(Arc::new(DefaultGovernor::new(
3462                oris_governor::GovernorConfig {
3463                    promote_after_successes: 1,
3464                    ..Default::default()
3465                },
3466            )))
3467            .with_validation_plan(lightweight_plan())
3468            .with_sandbox_policy(base_sandbox_policy())
3469    }
3470
3471    fn build_test_evo(
3472        name: &str,
3473        run_id: &str,
3474        validator: Arc<dyn Validator>,
3475    ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
3476        let store_root = std::env::temp_dir().join(format!(
3477            "oris-evokernel-{name}-store-{}",
3478            std::process::id()
3479        ));
3480        if store_root.exists() {
3481            fs::remove_dir_all(&store_root).unwrap();
3482        }
3483        let store: Arc<dyn EvolutionStore> =
3484            Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
3485        let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
3486        (evo, store)
3487    }
3488
3489    fn remote_publish_envelope(
3490        sender_id: &str,
3491        run_id: &str,
3492        gene_id: &str,
3493        capsule_id: &str,
3494        mutation_id: &str,
3495        signal: &str,
3496        file_name: &str,
3497        line: &str,
3498    ) -> EvolutionEnvelope {
3499        remote_publish_envelope_with_env(
3500            sender_id,
3501            run_id,
3502            gene_id,
3503            capsule_id,
3504            mutation_id,
3505            signal,
3506            file_name,
3507            line,
3508            replay_input(signal).env,
3509        )
3510    }
3511
3512    fn remote_publish_envelope_with_env(
3513        sender_id: &str,
3514        run_id: &str,
3515        gene_id: &str,
3516        capsule_id: &str,
3517        mutation_id: &str,
3518        signal: &str,
3519        file_name: &str,
3520        line: &str,
3521        env: EnvFingerprint,
3522    ) -> EvolutionEnvelope {
3523        let mutation = prepare_mutation(
3524            MutationIntent {
3525                id: mutation_id.into(),
3526                intent: format!("add {file_name}"),
3527                target: MutationTarget::Paths {
3528                    allow: vec![file_name.into()],
3529                },
3530                expected_effect: "replay should still validate".into(),
3531                risk: RiskLevel::Low,
3532                signals: vec![signal.into()],
3533                spec_id: None,
3534            },
3535            format!(
3536                "\
3537diff --git a/{file_name} b/{file_name}
3538new file mode 100644
3539index 0000000..1111111
3540--- /dev/null
3541+++ b/{file_name}
3542@@ -0,0 +1 @@
3543+{line}
3544"
3545            ),
3546            Some("HEAD".into()),
3547        );
3548        let gene = Gene {
3549            id: gene_id.into(),
3550            signals: vec![signal.into()],
3551            strategy: vec![file_name.into()],
3552            validation: vec!["test".into()],
3553            state: AssetState::Promoted,
3554        };
3555        let capsule = Capsule {
3556            id: capsule_id.into(),
3557            gene_id: gene_id.into(),
3558            mutation_id: mutation_id.into(),
3559            run_id: run_id.into(),
3560            diff_hash: mutation.artifact.content_hash.clone(),
3561            confidence: 0.9,
3562            env,
3563            outcome: Outcome {
3564                success: true,
3565                validation_profile: "test".into(),
3566                validation_duration_ms: 1,
3567                changed_files: vec![file_name.into()],
3568                validator_hash: "validator-hash".into(),
3569                lines_changed: 1,
3570                replay_verified: false,
3571            },
3572            state: AssetState::Promoted,
3573        };
3574        EvolutionEnvelope::publish(
3575            sender_id,
3576            vec![
3577                NetworkAsset::EvolutionEvent {
3578                    event: EvolutionEvent::MutationDeclared { mutation },
3579                },
3580                NetworkAsset::Gene { gene: gene.clone() },
3581                NetworkAsset::Capsule {
3582                    capsule: capsule.clone(),
3583                },
3584                NetworkAsset::EvolutionEvent {
3585                    event: EvolutionEvent::CapsuleReleased {
3586                        capsule_id: capsule.id.clone(),
3587                        state: AssetState::Promoted,
3588                    },
3589                },
3590            ],
3591        )
3592    }
3593
3594    fn remote_publish_envelope_with_signals(
3595        sender_id: &str,
3596        run_id: &str,
3597        gene_id: &str,
3598        capsule_id: &str,
3599        mutation_id: &str,
3600        mutation_signals: Vec<String>,
3601        gene_signals: Vec<String>,
3602        file_name: &str,
3603        line: &str,
3604        env: EnvFingerprint,
3605    ) -> EvolutionEnvelope {
3606        let mutation = prepare_mutation(
3607            MutationIntent {
3608                id: mutation_id.into(),
3609                intent: format!("add {file_name}"),
3610                target: MutationTarget::Paths {
3611                    allow: vec![file_name.into()],
3612                },
3613                expected_effect: "replay should still validate".into(),
3614                risk: RiskLevel::Low,
3615                signals: mutation_signals,
3616                spec_id: None,
3617            },
3618            format!(
3619                "\
3620diff --git a/{file_name} b/{file_name}
3621new file mode 100644
3622index 0000000..1111111
3623--- /dev/null
3624+++ b/{file_name}
3625@@ -0,0 +1 @@
3626+{line}
3627"
3628            ),
3629            Some("HEAD".into()),
3630        );
3631        let gene = Gene {
3632            id: gene_id.into(),
3633            signals: gene_signals,
3634            strategy: vec![file_name.into()],
3635            validation: vec!["test".into()],
3636            state: AssetState::Promoted,
3637        };
3638        let capsule = Capsule {
3639            id: capsule_id.into(),
3640            gene_id: gene_id.into(),
3641            mutation_id: mutation_id.into(),
3642            run_id: run_id.into(),
3643            diff_hash: mutation.artifact.content_hash.clone(),
3644            confidence: 0.9,
3645            env,
3646            outcome: Outcome {
3647                success: true,
3648                validation_profile: "test".into(),
3649                validation_duration_ms: 1,
3650                changed_files: vec![file_name.into()],
3651                validator_hash: "validator-hash".into(),
3652                lines_changed: 1,
3653                replay_verified: false,
3654            },
3655            state: AssetState::Promoted,
3656        };
3657        EvolutionEnvelope::publish(
3658            sender_id,
3659            vec![
3660                NetworkAsset::EvolutionEvent {
3661                    event: EvolutionEvent::MutationDeclared { mutation },
3662                },
3663                NetworkAsset::Gene { gene: gene.clone() },
3664                NetworkAsset::Capsule {
3665                    capsule: capsule.clone(),
3666                },
3667                NetworkAsset::EvolutionEvent {
3668                    event: EvolutionEvent::CapsuleReleased {
3669                        capsule_id: capsule.id.clone(),
3670                        state: AssetState::Promoted,
3671                    },
3672                },
3673            ],
3674        )
3675    }
3676
3677    struct FixedValidator {
3678        success: bool,
3679    }
3680
3681    #[async_trait]
3682    impl Validator for FixedValidator {
3683        async fn run(
3684            &self,
3685            _receipt: &SandboxReceipt,
3686            plan: &ValidationPlan,
3687        ) -> Result<ValidationReport, ValidationError> {
3688            Ok(ValidationReport {
3689                success: self.success,
3690                duration_ms: 1,
3691                stages: Vec::new(),
3692                logs: if self.success {
3693                    format!("{} ok", plan.profile)
3694                } else {
3695                    format!("{} failed", plan.profile)
3696                },
3697            })
3698        }
3699    }
3700
3701    struct FailOnAppendStore {
3702        inner: JsonlEvolutionStore,
3703        fail_on_call: usize,
3704        call_count: Mutex<usize>,
3705    }
3706
3707    impl FailOnAppendStore {
3708        fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
3709            Self {
3710                inner: JsonlEvolutionStore::new(root_dir),
3711                fail_on_call,
3712                call_count: Mutex::new(0),
3713            }
3714        }
3715    }
3716
3717    impl EvolutionStore for FailOnAppendStore {
3718        fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
3719            let mut call_count = self
3720                .call_count
3721                .lock()
3722                .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
3723            *call_count += 1;
3724            if *call_count == self.fail_on_call {
3725                return Err(EvolutionError::Io("injected append failure".into()));
3726            }
3727            self.inner.append_event(event)
3728        }
3729
3730        fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
3731            self.inner.scan(from_seq)
3732        }
3733
3734        fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
3735            self.inner.rebuild_projection()
3736        }
3737    }
3738
3739    #[test]
3740    fn coordination_planner_to_coder_handoff_is_deterministic() {
3741        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3742            root_goal: "ship feature".into(),
3743            primitive: CoordinationPrimitive::Sequential,
3744            tasks: vec![
3745                CoordinationTask {
3746                    id: "planner".into(),
3747                    role: AgentRole::Planner,
3748                    description: "split the work".into(),
3749                    depends_on: Vec::new(),
3750                },
3751                CoordinationTask {
3752                    id: "coder".into(),
3753                    role: AgentRole::Coder,
3754                    description: "implement the patch".into(),
3755                    depends_on: vec!["planner".into()],
3756                },
3757            ],
3758            timeout_ms: 5_000,
3759            max_retries: 0,
3760        });
3761
3762        assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
3763        assert!(result.failed_tasks.is_empty());
3764        assert!(result.messages.iter().any(|message| {
3765            message.from_role == AgentRole::Planner
3766                && message.to_role == AgentRole::Coder
3767                && message.task_id == "coder"
3768        }));
3769    }
3770
3771    #[test]
3772    fn coordination_repair_runs_only_after_coder_failure() {
3773        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3774            root_goal: "fix broken implementation".into(),
3775            primitive: CoordinationPrimitive::Sequential,
3776            tasks: vec![
3777                CoordinationTask {
3778                    id: "coder".into(),
3779                    role: AgentRole::Coder,
3780                    description: "force-fail initial implementation".into(),
3781                    depends_on: Vec::new(),
3782                },
3783                CoordinationTask {
3784                    id: "repair".into(),
3785                    role: AgentRole::Repair,
3786                    description: "patch the failed implementation".into(),
3787                    depends_on: vec!["coder".into()],
3788                },
3789            ],
3790            timeout_ms: 5_000,
3791            max_retries: 0,
3792        });
3793
3794        assert_eq!(result.completed_tasks, vec!["repair"]);
3795        assert_eq!(result.failed_tasks, vec!["coder"]);
3796        assert!(result.messages.iter().any(|message| {
3797            message.from_role == AgentRole::Coder
3798                && message.to_role == AgentRole::Repair
3799                && message.task_id == "repair"
3800        }));
3801    }
3802
3803    #[test]
3804    fn coordination_optimizer_runs_after_successful_implementation_step() {
3805        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3806            root_goal: "ship optimized patch".into(),
3807            primitive: CoordinationPrimitive::Sequential,
3808            tasks: vec![
3809                CoordinationTask {
3810                    id: "coder".into(),
3811                    role: AgentRole::Coder,
3812                    description: "implement a working patch".into(),
3813                    depends_on: Vec::new(),
3814                },
3815                CoordinationTask {
3816                    id: "optimizer".into(),
3817                    role: AgentRole::Optimizer,
3818                    description: "tighten the implementation".into(),
3819                    depends_on: vec!["coder".into()],
3820                },
3821            ],
3822            timeout_ms: 5_000,
3823            max_retries: 0,
3824        });
3825
3826        assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
3827        assert!(result.failed_tasks.is_empty());
3828    }
3829
3830    #[test]
3831    fn coordination_parallel_waves_preserve_sorted_merge_order() {
3832        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3833            root_goal: "parallelize safe tasks".into(),
3834            primitive: CoordinationPrimitive::Parallel,
3835            tasks: vec![
3836                CoordinationTask {
3837                    id: "z-task".into(),
3838                    role: AgentRole::Planner,
3839                    description: "analyze z".into(),
3840                    depends_on: Vec::new(),
3841                },
3842                CoordinationTask {
3843                    id: "a-task".into(),
3844                    role: AgentRole::Coder,
3845                    description: "implement a".into(),
3846                    depends_on: Vec::new(),
3847                },
3848                CoordinationTask {
3849                    id: "mid-task".into(),
3850                    role: AgentRole::Optimizer,
3851                    description: "polish after both".into(),
3852                    depends_on: vec!["z-task".into(), "a-task".into()],
3853                },
3854            ],
3855            timeout_ms: 5_000,
3856            max_retries: 0,
3857        });
3858
3859        assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
3860        assert!(result.failed_tasks.is_empty());
3861    }
3862
3863    #[test]
3864    fn coordination_retries_stop_at_max_retries() {
3865        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3866            root_goal: "retry then stop".into(),
3867            primitive: CoordinationPrimitive::Sequential,
3868            tasks: vec![CoordinationTask {
3869                id: "coder".into(),
3870                role: AgentRole::Coder,
3871                description: "force-fail this task".into(),
3872                depends_on: Vec::new(),
3873            }],
3874            timeout_ms: 5_000,
3875            max_retries: 1,
3876        });
3877
3878        assert!(result.completed_tasks.is_empty());
3879        assert_eq!(result.failed_tasks, vec!["coder"]);
3880        assert_eq!(
3881            result
3882                .messages
3883                .iter()
3884                .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
3885                .count(),
3886            2
3887        );
3888    }
3889
3890    #[test]
3891    fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
3892        let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
3893            root_goal: "skip blocked follow-up work".into(),
3894            primitive: CoordinationPrimitive::Conditional,
3895            tasks: vec![
3896                CoordinationTask {
3897                    id: "coder".into(),
3898                    role: AgentRole::Coder,
3899                    description: "force-fail the implementation".into(),
3900                    depends_on: Vec::new(),
3901                },
3902                CoordinationTask {
3903                    id: "optimizer".into(),
3904                    role: AgentRole::Optimizer,
3905                    description: "only optimize a successful implementation".into(),
3906                    depends_on: vec!["coder".into()],
3907                },
3908            ],
3909            timeout_ms: 5_000,
3910            max_retries: 0,
3911        });
3912
3913        assert!(result.completed_tasks.is_empty());
3914        assert_eq!(result.failed_tasks, vec!["coder"]);
3915        assert!(result.messages.iter().any(|message| {
3916            message.task_id == "optimizer"
3917                && message
3918                    .content
3919                    .contains("skipped due to failed dependency chain")
3920        }));
3921        assert!(!result
3922            .failed_tasks
3923            .iter()
3924            .any(|task_id| task_id == "optimizer"));
3925    }
3926
3927    #[tokio::test]
3928    async fn command_validator_aggregates_stage_reports() {
3929        let workspace = temp_workspace("validator");
3930        let receipt = SandboxReceipt {
3931            mutation_id: "m".into(),
3932            workdir: workspace,
3933            applied: true,
3934            changed_files: Vec::new(),
3935            patch_hash: "hash".into(),
3936            stdout_log: std::env::temp_dir().join("stdout.log"),
3937            stderr_log: std::env::temp_dir().join("stderr.log"),
3938        };
3939        let validator = CommandValidator::new(SandboxPolicy {
3940            allowed_programs: vec!["git".into()],
3941            max_duration_ms: 1_000,
3942            max_output_bytes: 1024,
3943            denied_env_prefixes: Vec::new(),
3944        });
3945        let report = validator
3946            .run(
3947                &receipt,
3948                &ValidationPlan {
3949                    profile: "test".into(),
3950                    stages: vec![ValidationStage::Command {
3951                        program: "git".into(),
3952                        args: vec!["--version".into()],
3953                        timeout_ms: 1_000,
3954                    }],
3955                },
3956            )
3957            .await
3958            .unwrap();
3959        assert_eq!(report.stages.len(), 1);
3960    }
3961
3962    #[tokio::test]
3963    async fn capture_successful_mutation_appends_capsule() {
3964        let (evo, store) = build_test_evo("capture", "run-1", command_validator());
3965        let capsule = evo
3966            .capture_successful_mutation(&"run-1".into(), sample_mutation())
3967            .await
3968            .unwrap();
3969        let events = store.scan(1).unwrap();
3970        assert!(events
3971            .iter()
3972            .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
3973        assert!(!capsule.id.is_empty());
3974    }
3975
3976    #[tokio::test]
3977    async fn replay_hit_records_capsule_reused() {
3978        let (evo, store) = build_test_evo("replay", "run-2", command_validator());
3979        let capsule = evo
3980            .capture_successful_mutation(&"run-2".into(), sample_mutation())
3981            .await
3982            .unwrap();
3983        let replay_run_id = "run-replay".to_string();
3984        let decision = evo
3985            .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
3986            .await
3987            .unwrap();
3988        assert!(decision.used_capsule);
3989        assert_eq!(decision.capsule_id, Some(capsule.id));
3990        assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
3991            &stored.event,
3992            EvolutionEvent::CapsuleReused {
3993                run_id,
3994                replay_run_id: Some(current_replay_run_id),
3995                ..
3996            } if run_id == "run-2" && current_replay_run_id == &replay_run_id
3997        )));
3998    }
3999
4000    #[tokio::test]
4001    async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
4002        let capture_run_id = "run-legacy-capture".to_string();
4003        let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
4004        let capsule = evo
4005            .capture_successful_mutation(&capture_run_id, sample_mutation())
4006            .await
4007            .unwrap();
4008        let executor = StoreReplayExecutor {
4009            sandbox: evo.sandbox.clone(),
4010            validator: evo.validator.clone(),
4011            store: evo.store.clone(),
4012            selector: evo.selector.clone(),
4013            governor: evo.governor.clone(),
4014            economics: Some(evo.economics.clone()),
4015            remote_publishers: Some(evo.remote_publishers.clone()),
4016            stake_policy: evo.stake_policy.clone(),
4017        };
4018
4019        let decision = executor
4020            .try_replay(
4021                &replay_input("missing readme"),
4022                &evo.sandbox_policy,
4023                &evo.validation_plan,
4024            )
4025            .await
4026            .unwrap();
4027
4028        assert!(decision.used_capsule);
4029        assert_eq!(decision.capsule_id, Some(capsule.id));
4030        assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
4031            &stored.event,
4032            EvolutionEvent::CapsuleReused {
4033                run_id,
4034                replay_run_id: None,
4035                ..
4036            } if run_id == &capture_run_id
4037        )));
4038    }
4039
4040    #[tokio::test]
4041    async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
4042        let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
4043        let capsule = evo
4044            .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
4045            .await
4046            .unwrap();
4047        let decision = evo
4048            .replay_or_fallback(replay_input("missing readme"))
4049            .await
4050            .unwrap();
4051        assert!(decision.used_capsule);
4052
4053        evo.revoke_assets(&RevokeNotice {
4054            sender_id: "node-metrics".into(),
4055            asset_ids: vec![capsule.id.clone()],
4056            reason: "manual test revoke".into(),
4057        })
4058        .unwrap();
4059
4060        let snapshot = evo.metrics_snapshot().unwrap();
4061        assert_eq!(snapshot.replay_attempts_total, 1);
4062        assert_eq!(snapshot.replay_success_total, 1);
4063        assert_eq!(snapshot.replay_success_rate, 1.0);
4064        assert_eq!(snapshot.confidence_revalidations_total, 0);
4065        assert_eq!(snapshot.replay_reasoning_avoided_total, 1);
4066        assert_eq!(snapshot.replay_task_classes.len(), 1);
4067        assert_eq!(snapshot.replay_task_classes[0].replay_success_total, 1);
4068        assert_eq!(
4069            snapshot.replay_task_classes[0].reasoning_steps_avoided_total,
4070            1
4071        );
4072        assert_eq!(snapshot.confidence_revalidations_total, 0);
4073        assert_eq!(snapshot.mutation_declared_total, 1);
4074        assert_eq!(snapshot.promoted_mutations_total, 1);
4075        assert_eq!(snapshot.promotion_ratio, 1.0);
4076        assert_eq!(snapshot.gene_revocations_total, 1);
4077        assert_eq!(snapshot.mutation_velocity_last_hour, 1);
4078        assert_eq!(snapshot.revoke_frequency_last_hour, 1);
4079        assert_eq!(snapshot.promoted_genes, 0);
4080        assert_eq!(snapshot.promoted_capsules, 0);
4081
4082        let rendered = evo.render_metrics_prometheus().unwrap();
4083        assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_total 1"));
4084        assert!(rendered.contains("oris_evolution_replay_utilization_by_task_class_total"));
4085        assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_by_task_class_total"));
4086        assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
4087        assert!(rendered.contains("oris_evolution_confidence_revalidations_total 0"));
4088        assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
4089        assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
4090        assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
4091        assert!(rendered.contains("oris_evolution_health 1"));
4092    }
4093
4094    #[test]
4095    fn stale_replay_targets_require_confidence_revalidation() {
4096        let now = Utc::now();
4097        let projection = EvolutionProjection {
4098            genes: vec![Gene {
4099                id: "gene-stale".into(),
4100                signals: vec!["missing readme".into()],
4101                strategy: vec!["README.md".into()],
4102                validation: vec!["test".into()],
4103                state: AssetState::Promoted,
4104            }],
4105            capsules: vec![Capsule {
4106                id: "capsule-stale".into(),
4107                gene_id: "gene-stale".into(),
4108                mutation_id: "mutation-stale".into(),
4109                run_id: "run-stale".into(),
4110                diff_hash: "hash".into(),
4111                confidence: 0.8,
4112                env: replay_input("missing readme").env,
4113                outcome: Outcome {
4114                    success: true,
4115                    validation_profile: "test".into(),
4116                    validation_duration_ms: 1,
4117                    changed_files: vec!["README.md".into()],
4118                    validator_hash: "validator".into(),
4119                    lines_changed: 1,
4120                    replay_verified: false,
4121                },
4122                state: AssetState::Promoted,
4123            }],
4124            reuse_counts: BTreeMap::from([("gene-stale".into(), 1)]),
4125            attempt_counts: BTreeMap::from([("gene-stale".into(), 1)]),
4126            last_updated_at: BTreeMap::from([(
4127                "gene-stale".into(),
4128                (now - Duration::hours(48)).to_rfc3339(),
4129            )]),
4130            spec_ids_by_gene: BTreeMap::new(),
4131        };
4132
4133        let targets = stale_replay_revalidation_targets(&projection, now);
4134
4135        assert_eq!(targets.len(), 1);
4136        assert_eq!(targets[0].gene_id, "gene-stale");
4137        assert_eq!(targets[0].capsule_ids, vec!["capsule-stale".to_string()]);
4138        assert!(targets[0].decayed_confidence < MIN_REPLAY_CONFIDENCE);
4139    }
4140
4141    #[tokio::test]
4142    async fn remote_replay_prefers_closest_environment_match() {
4143        let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
4144        let input = replay_input("env-signal");
4145
4146        let envelope_a = remote_publish_envelope_with_env(
4147            "node-a",
4148            "run-remote-a",
4149            "gene-a",
4150            "capsule-a",
4151            "mutation-a",
4152            "env-signal",
4153            "A.md",
4154            "# from a",
4155            input.env.clone(),
4156        );
4157        let envelope_b = remote_publish_envelope_with_env(
4158            "node-b",
4159            "run-remote-b",
4160            "gene-b",
4161            "capsule-b",
4162            "mutation-b",
4163            "env-signal",
4164            "B.md",
4165            "# from b",
4166            EnvFingerprint {
4167                rustc_version: "old-rustc".into(),
4168                cargo_lock_hash: "other-lock".into(),
4169                target_triple: "aarch64-apple-darwin".into(),
4170                os: "linux".into(),
4171            },
4172        );
4173
4174        evo.import_remote_envelope(&envelope_a).unwrap();
4175        evo.import_remote_envelope(&envelope_b).unwrap();
4176
4177        let decision = evo.replay_or_fallback(input).await.unwrap();
4178
4179        assert!(decision.used_capsule);
4180        assert_eq!(decision.capsule_id, Some("capsule-a".into()));
4181        assert!(!decision.fallback_to_planner);
4182    }
4183
4184    #[test]
4185    fn remote_cold_start_scoring_caps_distinct_query_coverage() {
4186        let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
4187        let input = replay_input("missing readme");
4188
4189        let exact = remote_publish_envelope_with_signals(
4190            "node-exact",
4191            "run-remote-exact",
4192            "gene-exact",
4193            "capsule-exact",
4194            "mutation-exact",
4195            vec!["missing readme".into()],
4196            vec!["missing readme".into()],
4197            "EXACT.md",
4198            "# exact",
4199            input.env.clone(),
4200        );
4201        let overlapping = remote_publish_envelope_with_signals(
4202            "node-overlap",
4203            "run-remote-overlap",
4204            "gene-overlap",
4205            "capsule-overlap",
4206            "mutation-overlap",
4207            vec!["missing readme".into()],
4208            vec!["missing".into(), "readme".into()],
4209            "OVERLAP.md",
4210            "# overlap",
4211            input.env.clone(),
4212        );
4213
4214        evo.import_remote_envelope(&exact).unwrap();
4215        evo.import_remote_envelope(&overlapping).unwrap();
4216
4217        let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
4218        let exact_candidate = candidates
4219            .iter()
4220            .find(|candidate| candidate.gene.id == "gene-exact")
4221            .unwrap();
4222        let overlap_candidate = candidates
4223            .iter()
4224            .find(|candidate| candidate.gene.id == "gene-overlap")
4225            .unwrap();
4226
4227        assert_eq!(exact_candidate.score, 1.0);
4228        assert_eq!(overlap_candidate.score, 1.0);
4229        assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
4230    }
4231
4232    #[test]
4233    fn exact_match_candidates_respect_spec_linked_events() {
4234        let (evo, _) = build_test_evo(
4235            "spec-linked-filter",
4236            "run-spec-linked-filter",
4237            command_validator(),
4238        );
4239        let mut input = replay_input("missing readme");
4240        input.spec_id = Some("spec-readme".into());
4241
4242        let mut mutation = sample_mutation();
4243        mutation.intent.id = "mutation-spec-linked".into();
4244        mutation.intent.spec_id = None;
4245        let gene = Gene {
4246            id: "gene-spec-linked".into(),
4247            signals: vec!["missing readme".into()],
4248            strategy: vec!["README.md".into()],
4249            validation: vec!["test".into()],
4250            state: AssetState::Promoted,
4251        };
4252        let capsule = Capsule {
4253            id: "capsule-spec-linked".into(),
4254            gene_id: gene.id.clone(),
4255            mutation_id: mutation.intent.id.clone(),
4256            run_id: "run-spec-linked".into(),
4257            diff_hash: mutation.artifact.content_hash.clone(),
4258            confidence: 0.9,
4259            env: input.env.clone(),
4260            outcome: Outcome {
4261                success: true,
4262                validation_profile: "test".into(),
4263                validation_duration_ms: 1,
4264                changed_files: vec!["README.md".into()],
4265                validator_hash: "validator-hash".into(),
4266                lines_changed: 1,
4267                replay_verified: false,
4268            },
4269            state: AssetState::Promoted,
4270        };
4271
4272        evo.store
4273            .append_event(EvolutionEvent::MutationDeclared { mutation })
4274            .unwrap();
4275        evo.store
4276            .append_event(EvolutionEvent::GeneProjected { gene })
4277            .unwrap();
4278        evo.store
4279            .append_event(EvolutionEvent::CapsuleCommitted { capsule })
4280            .unwrap();
4281        evo.store
4282            .append_event(EvolutionEvent::SpecLinked {
4283                mutation_id: "mutation-spec-linked".into(),
4284                spec_id: "spec-readme".into(),
4285            })
4286            .unwrap();
4287
4288        let candidates = exact_match_candidates(evo.store.as_ref(), &input);
4289        assert_eq!(candidates.len(), 1);
4290        assert_eq!(candidates[0].gene.id, "gene-spec-linked");
4291    }
4292
4293    #[tokio::test]
4294    async fn remote_capsule_stays_quarantined_until_first_successful_replay() {
4295        let (evo, store) = build_test_evo(
4296            "remote-quarantine",
4297            "run-remote-quarantine",
4298            command_validator(),
4299        );
4300        let envelope = remote_publish_envelope(
4301            "node-remote",
4302            "run-remote-quarantine",
4303            "gene-remote",
4304            "capsule-remote",
4305            "mutation-remote",
4306            "remote-signal",
4307            "REMOTE.md",
4308            "# from remote",
4309        );
4310
4311        evo.import_remote_envelope(&envelope).unwrap();
4312
4313        let before_replay = store.rebuild_projection().unwrap();
4314        let imported_gene = before_replay
4315            .genes
4316            .iter()
4317            .find(|gene| gene.id == "gene-remote")
4318            .unwrap();
4319        let imported_capsule = before_replay
4320            .capsules
4321            .iter()
4322            .find(|capsule| capsule.id == "capsule-remote")
4323            .unwrap();
4324        assert_eq!(imported_gene.state, AssetState::Quarantined);
4325        assert_eq!(imported_capsule.state, AssetState::Quarantined);
4326        let exported_before_replay =
4327            export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
4328        assert!(exported_before_replay.assets.is_empty());
4329
4330        let decision = evo
4331            .replay_or_fallback(replay_input("remote-signal"))
4332            .await
4333            .unwrap();
4334
4335        assert!(decision.used_capsule);
4336        assert_eq!(decision.capsule_id, Some("capsule-remote".into()));
4337
4338        let after_replay = store.rebuild_projection().unwrap();
4339        let promoted_gene = after_replay
4340            .genes
4341            .iter()
4342            .find(|gene| gene.id == "gene-remote")
4343            .unwrap();
4344        let released_capsule = after_replay
4345            .capsules
4346            .iter()
4347            .find(|capsule| capsule.id == "capsule-remote")
4348            .unwrap();
4349        assert_eq!(promoted_gene.state, AssetState::Promoted);
4350        assert_eq!(released_capsule.state, AssetState::Promoted);
4351        let exported_after_replay =
4352            export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
4353        assert_eq!(exported_after_replay.assets.len(), 3);
4354        assert!(exported_after_replay.assets.iter().any(|asset| matches!(
4355            asset,
4356            NetworkAsset::EvolutionEvent {
4357                event: EvolutionEvent::MutationDeclared { .. }
4358            }
4359        )));
4360    }
4361
4362    #[tokio::test]
4363    async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
4364        let (source, source_store) = build_test_evo(
4365            "remote-publish-export",
4366            "run-remote-publish-export",
4367            command_validator(),
4368        );
4369        source
4370            .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
4371            .await
4372            .unwrap();
4373        let envelope = EvolutionNetworkNode::new(source_store.clone())
4374            .publish_local_assets("node-source")
4375            .unwrap();
4376        assert!(envelope.assets.iter().any(|asset| matches!(
4377            asset,
4378            NetworkAsset::EvolutionEvent {
4379                event: EvolutionEvent::MutationDeclared { mutation }
4380            } if mutation.intent.id == "mutation-1"
4381        )));
4382
4383        let (remote, _) = build_test_evo(
4384            "remote-publish-import",
4385            "run-remote-publish-import",
4386            command_validator(),
4387        );
4388        remote.import_remote_envelope(&envelope).unwrap();
4389
4390        let decision = remote
4391            .replay_or_fallback(replay_input("missing readme"))
4392            .await
4393            .unwrap();
4394
4395        assert!(decision.used_capsule);
4396        assert!(!decision.fallback_to_planner);
4397    }
4398
4399    #[tokio::test]
4400    async fn fetch_assets_include_mutation_payload_for_remote_replay() {
4401        let (evo, store) = build_test_evo(
4402            "remote-fetch-export",
4403            "run-remote-fetch",
4404            command_validator(),
4405        );
4406        evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
4407            .await
4408            .unwrap();
4409
4410        let response = EvolutionNetworkNode::new(store.clone())
4411            .fetch_assets(
4412                "node-source",
4413                &FetchQuery {
4414                    sender_id: "node-client".into(),
4415                    signals: vec!["missing readme".into()],
4416                },
4417            )
4418            .unwrap();
4419
4420        assert!(response.assets.iter().any(|asset| matches!(
4421            asset,
4422            NetworkAsset::EvolutionEvent {
4423                event: EvolutionEvent::MutationDeclared { mutation }
4424            } if mutation.intent.id == "mutation-1"
4425        )));
4426        assert!(response
4427            .assets
4428            .iter()
4429            .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
4430        assert!(response
4431            .assets
4432            .iter()
4433            .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
4434    }
4435
4436    #[test]
4437    fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
4438        let store_root = std::env::temp_dir().join(format!(
4439            "oris-evokernel-remote-partial-store-{}",
4440            std::process::id()
4441        ));
4442        if store_root.exists() {
4443            fs::remove_dir_all(&store_root).unwrap();
4444        }
4445        let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 4));
4446        let evo = build_test_evo_with_store(
4447            "remote-partial",
4448            "run-remote-partial",
4449            command_validator(),
4450            store.clone(),
4451        );
4452        let envelope = remote_publish_envelope(
4453            "node-partial",
4454            "run-remote-partial",
4455            "gene-partial",
4456            "capsule-partial",
4457            "mutation-partial",
4458            "partial-signal",
4459            "PARTIAL.md",
4460            "# partial",
4461        );
4462
4463        let result = evo.import_remote_envelope(&envelope);
4464
4465        assert!(matches!(result, Err(EvoKernelError::Store(_))));
4466        let projection = store.rebuild_projection().unwrap();
4467        assert!(projection
4468            .genes
4469            .iter()
4470            .any(|gene| gene.id == "gene-partial"));
4471        assert!(projection.capsules.is_empty());
4472        let publishers = evo.remote_publishers.lock().unwrap();
4473        assert_eq!(
4474            publishers.get("gene-partial").map(String::as_str),
4475            Some("node-partial")
4476        );
4477    }
4478
4479    #[test]
4480    fn retry_remote_import_after_partial_failure_only_imports_missing_assets() {
4481        let store_root = std::env::temp_dir().join(format!(
4482            "oris-evokernel-remote-partial-retry-store-{}",
4483            next_id("t")
4484        ));
4485        if store_root.exists() {
4486            fs::remove_dir_all(&store_root).unwrap();
4487        }
4488        let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 4));
4489        let evo = build_test_evo_with_store(
4490            "remote-partial-retry",
4491            "run-remote-partial-retry",
4492            command_validator(),
4493            store.clone(),
4494        );
4495        let envelope = remote_publish_envelope(
4496            "node-partial",
4497            "run-remote-partial-retry",
4498            "gene-partial-retry",
4499            "capsule-partial-retry",
4500            "mutation-partial-retry",
4501            "partial-retry-signal",
4502            "PARTIAL_RETRY.md",
4503            "# partial retry",
4504        );
4505
4506        let first = evo.import_remote_envelope(&envelope);
4507        assert!(matches!(first, Err(EvoKernelError::Store(_))));
4508
4509        let retry = evo.import_remote_envelope(&envelope).unwrap();
4510
4511        assert_eq!(retry.imported_asset_ids, vec!["capsule-partial-retry"]);
4512        let projection = store.rebuild_projection().unwrap();
4513        let gene = projection
4514            .genes
4515            .iter()
4516            .find(|gene| gene.id == "gene-partial-retry")
4517            .unwrap();
4518        assert_eq!(gene.state, AssetState::Quarantined);
4519        let capsule = projection
4520            .capsules
4521            .iter()
4522            .find(|capsule| capsule.id == "capsule-partial-retry")
4523            .unwrap();
4524        assert_eq!(capsule.state, AssetState::Quarantined);
4525        assert_eq!(projection.attempt_counts["gene-partial-retry"], 1);
4526
4527        let events = store.scan(1).unwrap();
4528        assert_eq!(
4529            events
4530                .iter()
4531                .filter(|stored| {
4532                    matches!(
4533                        &stored.event,
4534                        EvolutionEvent::MutationDeclared { mutation }
4535                            if mutation.intent.id == "mutation-partial-retry"
4536                    )
4537                })
4538                .count(),
4539            1
4540        );
4541        assert_eq!(
4542            events
4543                .iter()
4544                .filter(|stored| {
4545                    matches!(
4546                        &stored.event,
4547                        EvolutionEvent::GeneProjected { gene } if gene.id == "gene-partial-retry"
4548                    )
4549                })
4550                .count(),
4551            1
4552        );
4553        assert_eq!(
4554            events
4555                .iter()
4556                .filter(|stored| {
4557                    matches!(
4558                        &stored.event,
4559                        EvolutionEvent::CapsuleCommitted { capsule }
4560                            if capsule.id == "capsule-partial-retry"
4561                    )
4562                })
4563                .count(),
4564            1
4565        );
4566    }
4567
4568    #[tokio::test]
4569    async fn duplicate_remote_import_does_not_requarantine_locally_validated_assets() {
4570        let (evo, store) = build_test_evo(
4571            "remote-idempotent",
4572            "run-remote-idempotent",
4573            command_validator(),
4574        );
4575        let envelope = remote_publish_envelope(
4576            "node-idempotent",
4577            "run-remote-idempotent",
4578            "gene-idempotent",
4579            "capsule-idempotent",
4580            "mutation-idempotent",
4581            "idempotent-signal",
4582            "IDEMPOTENT.md",
4583            "# idempotent",
4584        );
4585
4586        let first = evo.import_remote_envelope(&envelope).unwrap();
4587        assert_eq!(
4588            first.imported_asset_ids,
4589            vec!["gene-idempotent", "capsule-idempotent"]
4590        );
4591
4592        let decision = evo
4593            .replay_or_fallback(replay_input("idempotent-signal"))
4594            .await
4595            .unwrap();
4596        assert!(decision.used_capsule);
4597        assert_eq!(decision.capsule_id, Some("capsule-idempotent".into()));
4598
4599        let projection_before = store.rebuild_projection().unwrap();
4600        let attempts_before = projection_before.attempt_counts["gene-idempotent"];
4601        let gene_before = projection_before
4602            .genes
4603            .iter()
4604            .find(|gene| gene.id == "gene-idempotent")
4605            .unwrap();
4606        assert_eq!(gene_before.state, AssetState::Promoted);
4607        let capsule_before = projection_before
4608            .capsules
4609            .iter()
4610            .find(|capsule| capsule.id == "capsule-idempotent")
4611            .unwrap();
4612        assert_eq!(capsule_before.state, AssetState::Promoted);
4613
4614        let second = evo.import_remote_envelope(&envelope).unwrap();
4615        assert!(second.imported_asset_ids.is_empty());
4616
4617        let projection_after = store.rebuild_projection().unwrap();
4618        assert_eq!(
4619            projection_after.attempt_counts["gene-idempotent"],
4620            attempts_before
4621        );
4622        let gene_after = projection_after
4623            .genes
4624            .iter()
4625            .find(|gene| gene.id == "gene-idempotent")
4626            .unwrap();
4627        assert_eq!(gene_after.state, AssetState::Promoted);
4628        let capsule_after = projection_after
4629            .capsules
4630            .iter()
4631            .find(|capsule| capsule.id == "capsule-idempotent")
4632            .unwrap();
4633        assert_eq!(capsule_after.state, AssetState::Promoted);
4634
4635        let events = store.scan(1).unwrap();
4636        assert_eq!(
4637            events
4638                .iter()
4639                .filter(|stored| {
4640                    matches!(
4641                        &stored.event,
4642                        EvolutionEvent::MutationDeclared { mutation }
4643                            if mutation.intent.id == "mutation-idempotent"
4644                    )
4645                })
4646                .count(),
4647            1
4648        );
4649        assert_eq!(
4650            events
4651                .iter()
4652                .filter(|stored| {
4653                    matches!(
4654                        &stored.event,
4655                        EvolutionEvent::GeneProjected { gene } if gene.id == "gene-idempotent"
4656                    )
4657                })
4658                .count(),
4659            1
4660        );
4661        assert_eq!(
4662            events
4663                .iter()
4664                .filter(|stored| {
4665                    matches!(
4666                        &stored.event,
4667                        EvolutionEvent::CapsuleCommitted { capsule }
4668                            if capsule.id == "capsule-idempotent"
4669                    )
4670                })
4671                .count(),
4672            1
4673        );
4674    }
4675
4676    #[tokio::test]
4677    async fn insufficient_evu_blocks_publish_but_not_local_replay() {
4678        let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
4679        let capsule = evo
4680            .capture_successful_mutation(&"run-stake".into(), sample_mutation())
4681            .await
4682            .unwrap();
4683        let publish = evo.export_promoted_assets("node-local");
4684        assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
4685
4686        let decision = evo
4687            .replay_or_fallback(replay_input("missing readme"))
4688            .await
4689            .unwrap();
4690        assert!(decision.used_capsule);
4691        assert_eq!(decision.capsule_id, Some(capsule.id));
4692    }
4693
4694    #[tokio::test]
4695    async fn second_replay_validation_failure_revokes_gene_immediately() {
4696        let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
4697        let capsule = capturer
4698            .capture_successful_mutation(&"run-capture".into(), sample_mutation())
4699            .await
4700            .unwrap();
4701
4702        let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
4703        let failing_replay = build_test_evo_with_store(
4704            "revoke-replay",
4705            "run-replay-fail",
4706            failing_validator,
4707            store.clone(),
4708        );
4709
4710        let first = failing_replay
4711            .replay_or_fallback(replay_input("missing readme"))
4712            .await
4713            .unwrap();
4714        let second = failing_replay
4715            .replay_or_fallback(replay_input("missing readme"))
4716            .await
4717            .unwrap();
4718
4719        assert!(!first.used_capsule);
4720        assert!(first.fallback_to_planner);
4721        assert!(!second.used_capsule);
4722        assert!(second.fallback_to_planner);
4723
4724        let projection = store.rebuild_projection().unwrap();
4725        let gene = projection
4726            .genes
4727            .iter()
4728            .find(|gene| gene.id == capsule.gene_id)
4729            .unwrap();
4730        assert_eq!(gene.state, AssetState::Promoted);
4731        let committed_capsule = projection
4732            .capsules
4733            .iter()
4734            .find(|current| current.id == capsule.id)
4735            .unwrap();
4736        assert_eq!(committed_capsule.state, AssetState::Promoted);
4737
4738        let events = store.scan(1).unwrap();
4739        assert_eq!(
4740            events
4741                .iter()
4742                .filter(|stored| {
4743                    matches!(
4744                        &stored.event,
4745                        EvolutionEvent::ValidationFailed {
4746                            gene_id: Some(gene_id),
4747                            ..
4748                        } if gene_id == &capsule.gene_id
4749                    )
4750                })
4751                .count(),
4752            1
4753        );
4754        assert!(!events.iter().any(|stored| {
4755            matches!(
4756                &stored.event,
4757                EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
4758            )
4759        }));
4760
4761        let recovered = build_test_evo_with_store(
4762            "revoke-replay",
4763            "run-replay-check",
4764            command_validator(),
4765            store.clone(),
4766        );
4767        let after_revoke = recovered
4768            .replay_or_fallback(replay_input("missing readme"))
4769            .await
4770            .unwrap();
4771        assert!(!after_revoke.used_capsule);
4772        assert!(after_revoke.fallback_to_planner);
4773        assert!(after_revoke.reason.contains("below replay threshold"));
4774    }
4775
4776    #[tokio::test]
4777    async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
4778        let ledger = Arc::new(Mutex::new(EvuLedger {
4779            accounts: vec![],
4780            reputations: vec![
4781                oris_economics::ReputationRecord {
4782                    node_id: "node-a".into(),
4783                    publish_success_rate: 0.4,
4784                    validator_accuracy: 0.4,
4785                    reuse_impact: 0,
4786                },
4787                oris_economics::ReputationRecord {
4788                    node_id: "node-b".into(),
4789                    publish_success_rate: 0.95,
4790                    validator_accuracy: 0.95,
4791                    reuse_impact: 8,
4792                },
4793            ],
4794        }));
4795        let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
4796        let evo = evo.with_economics(ledger.clone());
4797
4798        let envelope_a = remote_publish_envelope(
4799            "node-a",
4800            "run-remote-a",
4801            "gene-a",
4802            "capsule-a",
4803            "mutation-a",
4804            "shared-signal",
4805            "A.md",
4806            "# from a",
4807        );
4808        let envelope_b = remote_publish_envelope(
4809            "node-b",
4810            "run-remote-b",
4811            "gene-b",
4812            "capsule-b",
4813            "mutation-b",
4814            "shared-signal",
4815            "B.md",
4816            "# from b",
4817        );
4818
4819        evo.import_remote_envelope(&envelope_a).unwrap();
4820        evo.import_remote_envelope(&envelope_b).unwrap();
4821
4822        let decision = evo
4823            .replay_or_fallback(replay_input("shared-signal"))
4824            .await
4825            .unwrap();
4826
4827        assert!(decision.used_capsule);
4828        assert_eq!(decision.capsule_id, Some("capsule-b".into()));
4829        let locked = ledger.lock().unwrap();
4830        let rewarded = locked
4831            .accounts
4832            .iter()
4833            .find(|item| item.node_id == "node-b")
4834            .unwrap();
4835        assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
4836        assert!(
4837            locked.selector_reputation_bias()["node-b"]
4838                > locked.selector_reputation_bias()["node-a"]
4839        );
4840    }
4841
4842    #[tokio::test]
4843    async fn remote_reuse_settlement_tracks_selected_capsule_publisher_for_shared_gene() {
4844        let ledger = Arc::new(Mutex::new(EvuLedger::default()));
4845        let (evo, _) = build_test_evo(
4846            "remote-shared-publisher",
4847            "run-remote-shared-publisher",
4848            command_validator(),
4849        );
4850        let evo = evo.with_economics(ledger.clone());
4851        let input = replay_input("shared-signal");
4852        let preferred = remote_publish_envelope_with_env(
4853            "node-a",
4854            "run-remote-a",
4855            "gene-shared",
4856            "capsule-preferred",
4857            "mutation-preferred",
4858            "shared-signal",
4859            "A.md",
4860            "# from a",
4861            input.env.clone(),
4862        );
4863        let fallback = remote_publish_envelope_with_env(
4864            "node-b",
4865            "run-remote-b",
4866            "gene-shared",
4867            "capsule-fallback",
4868            "mutation-fallback",
4869            "shared-signal",
4870            "B.md",
4871            "# from b",
4872            EnvFingerprint {
4873                rustc_version: "old-rustc".into(),
4874                cargo_lock_hash: "other-lock".into(),
4875                target_triple: "aarch64-apple-darwin".into(),
4876                os: "linux".into(),
4877            },
4878        );
4879
4880        evo.import_remote_envelope(&preferred).unwrap();
4881        evo.import_remote_envelope(&fallback).unwrap();
4882
4883        let decision = evo.replay_or_fallback(input).await.unwrap();
4884
4885        assert!(decision.used_capsule);
4886        assert_eq!(decision.capsule_id, Some("capsule-preferred".into()));
4887        let locked = ledger.lock().unwrap();
4888        let rewarded = locked
4889            .accounts
4890            .iter()
4891            .find(|item| item.node_id == "node-a")
4892            .unwrap();
4893        assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
4894        assert!(locked.accounts.iter().all(|item| item.node_id != "node-b"));
4895    }
4896
4897    #[test]
4898    fn select_candidates_surfaces_ranked_remote_cold_start_candidates() {
4899        let ledger = Arc::new(Mutex::new(EvuLedger {
4900            accounts: vec![],
4901            reputations: vec![
4902                oris_economics::ReputationRecord {
4903                    node_id: "node-a".into(),
4904                    publish_success_rate: 0.4,
4905                    validator_accuracy: 0.4,
4906                    reuse_impact: 0,
4907                },
4908                oris_economics::ReputationRecord {
4909                    node_id: "node-b".into(),
4910                    publish_success_rate: 0.95,
4911                    validator_accuracy: 0.95,
4912                    reuse_impact: 8,
4913                },
4914            ],
4915        }));
4916        let (evo, _) = build_test_evo("remote-select", "run-remote-select", command_validator());
4917        let evo = evo.with_economics(ledger);
4918
4919        let envelope_a = remote_publish_envelope(
4920            "node-a",
4921            "run-remote-a",
4922            "gene-a",
4923            "capsule-a",
4924            "mutation-a",
4925            "shared-signal",
4926            "A.md",
4927            "# from a",
4928        );
4929        let envelope_b = remote_publish_envelope(
4930            "node-b",
4931            "run-remote-b",
4932            "gene-b",
4933            "capsule-b",
4934            "mutation-b",
4935            "shared-signal",
4936            "B.md",
4937            "# from b",
4938        );
4939
4940        evo.import_remote_envelope(&envelope_a).unwrap();
4941        evo.import_remote_envelope(&envelope_b).unwrap();
4942
4943        let candidates = evo.select_candidates(&replay_input("shared-signal"));
4944
4945        assert_eq!(candidates.len(), 1);
4946        assert_eq!(candidates[0].gene.id, "gene-b");
4947        assert_eq!(candidates[0].capsules[0].id, "capsule-b");
4948    }
4949
4950    #[tokio::test]
4951    async fn remote_reuse_publisher_bias_survives_restart() {
4952        let ledger = Arc::new(Mutex::new(EvuLedger {
4953            accounts: vec![],
4954            reputations: vec![
4955                oris_economics::ReputationRecord {
4956                    node_id: "node-a".into(),
4957                    publish_success_rate: 0.4,
4958                    validator_accuracy: 0.4,
4959                    reuse_impact: 0,
4960                },
4961                oris_economics::ReputationRecord {
4962                    node_id: "node-b".into(),
4963                    publish_success_rate: 0.95,
4964                    validator_accuracy: 0.95,
4965                    reuse_impact: 8,
4966                },
4967            ],
4968        }));
4969        let store_root = std::env::temp_dir().join(format!(
4970            "oris-evokernel-remote-restart-store-{}",
4971            next_id("t")
4972        ));
4973        if store_root.exists() {
4974            fs::remove_dir_all(&store_root).unwrap();
4975        }
4976        let store: Arc<dyn EvolutionStore> =
4977            Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
4978        let evo = build_test_evo_with_store(
4979            "remote-success-restart-source",
4980            "run-remote-restart-source",
4981            command_validator(),
4982            store.clone(),
4983        )
4984        .with_economics(ledger.clone());
4985
4986        let envelope_a = remote_publish_envelope(
4987            "node-a",
4988            "run-remote-a",
4989            "gene-a",
4990            "capsule-a",
4991            "mutation-a",
4992            "shared-signal",
4993            "A.md",
4994            "# from a",
4995        );
4996        let envelope_b = remote_publish_envelope(
4997            "node-b",
4998            "run-remote-b",
4999            "gene-b",
5000            "capsule-b",
5001            "mutation-b",
5002            "shared-signal",
5003            "B.md",
5004            "# from b",
5005        );
5006
5007        evo.import_remote_envelope(&envelope_a).unwrap();
5008        evo.import_remote_envelope(&envelope_b).unwrap();
5009
5010        let recovered = build_test_evo_with_store(
5011            "remote-success-restart-recovered",
5012            "run-remote-restart-recovered",
5013            command_validator(),
5014            store.clone(),
5015        )
5016        .with_economics(ledger.clone());
5017
5018        let decision = recovered
5019            .replay_or_fallback(replay_input("shared-signal"))
5020            .await
5021            .unwrap();
5022
5023        assert!(decision.used_capsule);
5024        assert_eq!(decision.capsule_id, Some("capsule-b".into()));
5025        let locked = ledger.lock().unwrap();
5026        let rewarded = locked
5027            .accounts
5028            .iter()
5029            .find(|item| item.node_id == "node-b")
5030            .unwrap();
5031        assert_eq!(rewarded.balance, recovered.stake_policy.reuse_reward);
5032    }
5033
5034    #[tokio::test]
5035    async fn remote_reuse_failure_penalizes_remote_reputation() {
5036        let ledger = Arc::new(Mutex::new(EvuLedger::default()));
5037        let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
5038        let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
5039        let evo = evo.with_economics(ledger.clone());
5040
5041        let envelope = remote_publish_envelope(
5042            "node-remote",
5043            "run-remote-failed",
5044            "gene-remote",
5045            "capsule-remote",
5046            "mutation-remote",
5047            "failure-signal",
5048            "FAILED.md",
5049            "# from remote",
5050        );
5051        evo.import_remote_envelope(&envelope).unwrap();
5052
5053        let decision = evo
5054            .replay_or_fallback(replay_input("failure-signal"))
5055            .await
5056            .unwrap();
5057
5058        assert!(!decision.used_capsule);
5059        assert!(decision.fallback_to_planner);
5060
5061        let signal = evo.economics_signal("node-remote").unwrap();
5062        assert_eq!(signal.available_evu, 0);
5063        assert!(signal.publish_success_rate < 0.5);
5064        assert!(signal.validator_accuracy < 0.5);
5065    }
5066}