1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::Path;
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{ExecutionFeedback, MutationProposal as AgentMutationProposal};
12use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
13use oris_evolution::{
14 compute_artifact_hash, next_id, stable_hash_json, AssetState, BlastRadius, CandidateSource,
15 Capsule, CapsuleId, EnvFingerprint, EvolutionError, EvolutionEvent, EvolutionProjection,
16 EvolutionStore, Gene, GeneCandidate, MutationId, PreparedMutation, Selector, SelectorInput,
17 StoreBackedSelector, StoredEvolutionEvent, ValidationSnapshot,
18};
19use oris_evolution_network::{EvolutionEnvelope, NetworkAsset};
20use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
21use oris_kernel::{Kernel, KernelState, RunId};
22use oris_sandbox::{
23 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
24};
25use oris_spec::CompiledMutationPlan;
26use serde::{Deserialize, Serialize};
27use thiserror::Error;
28
29pub use oris_evolution::{
30 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
31 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
32 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
33 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
34 SelectorInput as EvoSelectorInput,
35};
36pub use oris_evolution_network::{
37 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
38};
39pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
40pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
41pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
42
43#[derive(Clone, Debug, Serialize, Deserialize)]
44pub struct ValidationPlan {
45 pub profile: String,
46 pub stages: Vec<ValidationStage>,
47}
48
49impl ValidationPlan {
50 pub fn oris_default() -> Self {
51 Self {
52 profile: "oris-default".into(),
53 stages: vec![
54 ValidationStage::Command {
55 program: "cargo".into(),
56 args: vec!["fmt".into(), "--all".into(), "--check".into()],
57 timeout_ms: 60_000,
58 },
59 ValidationStage::Command {
60 program: "cargo".into(),
61 args: vec!["check".into(), "--workspace".into()],
62 timeout_ms: 180_000,
63 },
64 ValidationStage::Command {
65 program: "cargo".into(),
66 args: vec![
67 "test".into(),
68 "-p".into(),
69 "oris-kernel".into(),
70 "-p".into(),
71 "oris-evolution".into(),
72 "-p".into(),
73 "oris-sandbox".into(),
74 "-p".into(),
75 "oris-evokernel".into(),
76 "--lib".into(),
77 ],
78 timeout_ms: 300_000,
79 },
80 ValidationStage::Command {
81 program: "cargo".into(),
82 args: vec![
83 "test".into(),
84 "-p".into(),
85 "oris-runtime".into(),
86 "--lib".into(),
87 ],
88 timeout_ms: 300_000,
89 },
90 ],
91 }
92 }
93}
94
95#[derive(Clone, Debug, Serialize, Deserialize)]
96pub enum ValidationStage {
97 Command {
98 program: String,
99 args: Vec<String>,
100 timeout_ms: u64,
101 },
102}
103
104#[derive(Clone, Debug, Serialize, Deserialize)]
105pub struct ValidationStageReport {
106 pub stage: String,
107 pub success: bool,
108 pub exit_code: Option<i32>,
109 pub duration_ms: u64,
110 pub stdout: String,
111 pub stderr: String,
112}
113
114#[derive(Clone, Debug, Serialize, Deserialize)]
115pub struct ValidationReport {
116 pub success: bool,
117 pub duration_ms: u64,
118 pub stages: Vec<ValidationStageReport>,
119 pub logs: String,
120}
121
122#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
123pub struct SignalExtractionInput {
124 pub patch_diff: String,
125 pub intent: String,
126 pub expected_effect: String,
127 pub declared_signals: Vec<String>,
128 pub changed_files: Vec<String>,
129 pub validation_success: bool,
130 pub validation_logs: String,
131 pub stage_outputs: Vec<String>,
132}
133
134#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
135pub struct SignalExtractionOutput {
136 pub values: Vec<String>,
137 pub hash: String,
138}
139
140impl ValidationReport {
141 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
142 ValidationSnapshot {
143 success: self.success,
144 profile: profile.to_string(),
145 duration_ms: self.duration_ms,
146 summary: if self.success {
147 "validation passed".into()
148 } else {
149 "validation failed".into()
150 },
151 }
152 }
153}
154
155pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
156 let mut signals = BTreeSet::new();
157
158 for declared in &input.declared_signals {
159 if let Some(phrase) = normalize_signal_phrase(declared) {
160 signals.insert(phrase);
161 }
162 extend_signal_tokens(&mut signals, declared);
163 }
164
165 for text in [
166 input.patch_diff.as_str(),
167 input.intent.as_str(),
168 input.expected_effect.as_str(),
169 input.validation_logs.as_str(),
170 ] {
171 extend_signal_tokens(&mut signals, text);
172 }
173
174 for changed_file in &input.changed_files {
175 extend_signal_tokens(&mut signals, changed_file);
176 }
177
178 for stage_output in &input.stage_outputs {
179 extend_signal_tokens(&mut signals, stage_output);
180 }
181
182 signals.insert(if input.validation_success {
183 "validation passed".into()
184 } else {
185 "validation failed".into()
186 });
187
188 let values = signals.into_iter().take(32).collect::<Vec<_>>();
189 let hash =
190 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
191 SignalExtractionOutput { values, hash }
192}
193
194#[derive(Debug, Error)]
195pub enum ValidationError {
196 #[error("validation execution failed: {0}")]
197 Execution(String),
198}
199
200#[async_trait]
201pub trait Validator: Send + Sync {
202 async fn run(
203 &self,
204 receipt: &SandboxReceipt,
205 plan: &ValidationPlan,
206 ) -> Result<ValidationReport, ValidationError>;
207}
208
209pub struct CommandValidator {
210 policy: SandboxPolicy,
211}
212
213impl CommandValidator {
214 pub fn new(policy: SandboxPolicy) -> Self {
215 Self { policy }
216 }
217}
218
219#[async_trait]
220impl Validator for CommandValidator {
221 async fn run(
222 &self,
223 receipt: &SandboxReceipt,
224 plan: &ValidationPlan,
225 ) -> Result<ValidationReport, ValidationError> {
226 let started = std::time::Instant::now();
227 let mut stages = Vec::new();
228 let mut success = true;
229 let mut logs = String::new();
230
231 for stage in &plan.stages {
232 match stage {
233 ValidationStage::Command {
234 program,
235 args,
236 timeout_ms,
237 } => {
238 let result = execute_allowed_command(
239 &self.policy,
240 &receipt.workdir,
241 program,
242 args,
243 *timeout_ms,
244 )
245 .await;
246 let report = match result {
247 Ok(output) => ValidationStageReport {
248 stage: format!("{program} {}", args.join(" ")),
249 success: output.success,
250 exit_code: output.exit_code,
251 duration_ms: output.duration_ms,
252 stdout: output.stdout,
253 stderr: output.stderr,
254 },
255 Err(err) => ValidationStageReport {
256 stage: format!("{program} {}", args.join(" ")),
257 success: false,
258 exit_code: None,
259 duration_ms: 0,
260 stdout: String::new(),
261 stderr: err.to_string(),
262 },
263 };
264 if !report.success {
265 success = false;
266 }
267 if !report.stdout.is_empty() {
268 logs.push_str(&report.stdout);
269 logs.push('\n');
270 }
271 if !report.stderr.is_empty() {
272 logs.push_str(&report.stderr);
273 logs.push('\n');
274 }
275 stages.push(report);
276 if !success {
277 break;
278 }
279 }
280 }
281 }
282
283 Ok(ValidationReport {
284 success,
285 duration_ms: started.elapsed().as_millis() as u64,
286 stages,
287 logs,
288 })
289 }
290}
291
292#[derive(Clone, Debug)]
293pub struct ReplayDecision {
294 pub used_capsule: bool,
295 pub capsule_id: Option<CapsuleId>,
296 pub fallback_to_planner: bool,
297 pub reason: String,
298}
299
300#[derive(Debug, Error)]
301pub enum ReplayError {
302 #[error("store error: {0}")]
303 Store(String),
304 #[error("sandbox error: {0}")]
305 Sandbox(String),
306 #[error("validation error: {0}")]
307 Validation(String),
308}
309
310#[async_trait]
311pub trait ReplayExecutor: Send + Sync {
312 async fn try_replay(
313 &self,
314 input: &SelectorInput,
315 policy: &SandboxPolicy,
316 validation: &ValidationPlan,
317 ) -> Result<ReplayDecision, ReplayError>;
318}
319
320pub struct StoreReplayExecutor {
321 pub sandbox: Arc<dyn Sandbox>,
322 pub validator: Arc<dyn Validator>,
323 pub store: Arc<dyn EvolutionStore>,
324 pub selector: Arc<dyn Selector>,
325 pub governor: Arc<dyn Governor>,
326 pub economics: Option<Arc<Mutex<EvuLedger>>>,
327 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
328 pub stake_policy: StakePolicy,
329}
330
331#[async_trait]
332impl ReplayExecutor for StoreReplayExecutor {
333 async fn try_replay(
334 &self,
335 input: &SelectorInput,
336 policy: &SandboxPolicy,
337 validation: &ValidationPlan,
338 ) -> Result<ReplayDecision, ReplayError> {
339 let mut selector_input = input.clone();
340 if self.economics.is_some() && self.remote_publishers.is_some() {
341 selector_input.limit = selector_input.limit.max(4);
342 }
343 let mut candidates = self.selector.select(&selector_input);
344 self.rerank_with_reputation_bias(&mut candidates);
345 let mut exact_match = false;
346 if candidates.is_empty() {
347 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
348 self.rerank_with_reputation_bias(&mut exact_candidates);
349 if !exact_candidates.is_empty() {
350 candidates = exact_candidates;
351 exact_match = true;
352 }
353 }
354 if candidates.is_empty() {
355 let mut remote_candidates =
356 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
357 self.rerank_with_reputation_bias(&mut remote_candidates);
358 if !remote_candidates.is_empty() {
359 candidates = remote_candidates;
360 exact_match = true;
361 }
362 }
363 candidates.truncate(input.limit.max(1));
364 let Some(best) = candidates.into_iter().next() else {
365 return Ok(ReplayDecision {
366 used_capsule: false,
367 capsule_id: None,
368 fallback_to_planner: true,
369 reason: "no matching gene".into(),
370 });
371 };
372 let remote_publisher = self.publisher_for_gene(&best.gene.id);
373
374 if !exact_match && best.score < 0.82 {
375 return Ok(ReplayDecision {
376 used_capsule: false,
377 capsule_id: None,
378 fallback_to_planner: true,
379 reason: format!("best gene score {:.3} below replay threshold", best.score),
380 });
381 }
382
383 let Some(capsule) = best.capsules.first().cloned() else {
384 return Ok(ReplayDecision {
385 used_capsule: false,
386 capsule_id: None,
387 fallback_to_planner: true,
388 reason: "candidate gene has no capsule".into(),
389 });
390 };
391
392 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
393 .map_err(|err| ReplayError::Store(err.to_string()))?
394 else {
395 return Ok(ReplayDecision {
396 used_capsule: false,
397 capsule_id: None,
398 fallback_to_planner: true,
399 reason: "mutation payload missing from store".into(),
400 });
401 };
402
403 let receipt = match self.sandbox.apply(&mutation, policy).await {
404 Ok(receipt) => receipt,
405 Err(err) => {
406 self.record_reuse_settlement(remote_publisher.as_deref(), false);
407 return Ok(ReplayDecision {
408 used_capsule: false,
409 capsule_id: Some(capsule.id.clone()),
410 fallback_to_planner: true,
411 reason: format!("replay patch apply failed: {err}"),
412 });
413 }
414 };
415
416 let report = self
417 .validator
418 .run(&receipt, validation)
419 .await
420 .map_err(|err| ReplayError::Validation(err.to_string()))?;
421 if !report.success {
422 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
423 self.record_reuse_settlement(remote_publisher.as_deref(), false);
424 return Ok(ReplayDecision {
425 used_capsule: false,
426 capsule_id: Some(capsule.id.clone()),
427 fallback_to_planner: true,
428 reason: "replay validation failed".into(),
429 });
430 }
431
432 if matches!(capsule.state, AssetState::Quarantined) {
433 self.store
434 .append_event(EvolutionEvent::ValidationPassed {
435 mutation_id: capsule.mutation_id.clone(),
436 report: report.to_snapshot(&validation.profile),
437 gene_id: Some(best.gene.id.clone()),
438 })
439 .map_err(|err| ReplayError::Store(err.to_string()))?;
440 self.store
441 .append_event(EvolutionEvent::CapsuleReleased {
442 capsule_id: capsule.id.clone(),
443 state: AssetState::Promoted,
444 })
445 .map_err(|err| ReplayError::Store(err.to_string()))?;
446 }
447
448 self.store
449 .append_event(EvolutionEvent::CapsuleReused {
450 capsule_id: capsule.id.clone(),
451 gene_id: capsule.gene_id.clone(),
452 run_id: capsule.run_id.clone(),
453 })
454 .map_err(|err| ReplayError::Store(err.to_string()))?;
455 self.record_reuse_settlement(remote_publisher.as_deref(), true);
456
457 Ok(ReplayDecision {
458 used_capsule: true,
459 capsule_id: Some(capsule.id),
460 fallback_to_planner: false,
461 reason: if exact_match {
462 "replayed via exact-match cold-start lookup".into()
463 } else {
464 "replayed via selector".into()
465 },
466 })
467 }
468}
469
470impl StoreReplayExecutor {
471 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
472 let Some(ledger) = self.economics.as_ref() else {
473 return;
474 };
475 let Some(remote_publishers) = self.remote_publishers.as_ref() else {
476 return;
477 };
478 let reputation_bias = ledger
479 .lock()
480 .ok()
481 .map(|locked| locked.selector_reputation_bias())
482 .unwrap_or_default();
483 if reputation_bias.is_empty() {
484 return;
485 }
486 let publisher_map = remote_publishers
487 .lock()
488 .ok()
489 .map(|locked| locked.clone())
490 .unwrap_or_default();
491 candidates.sort_by(|left, right| {
492 effective_candidate_score(right, &publisher_map, &reputation_bias)
493 .partial_cmp(&effective_candidate_score(
494 left,
495 &publisher_map,
496 &reputation_bias,
497 ))
498 .unwrap_or(std::cmp::Ordering::Equal)
499 .then_with(|| left.gene.id.cmp(&right.gene.id))
500 });
501 }
502
503 fn publisher_for_gene(&self, gene_id: &str) -> Option<String> {
504 self.remote_publishers
505 .as_ref()?
506 .lock()
507 .ok()?
508 .get(gene_id)
509 .cloned()
510 }
511
512 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
513 let Some(publisher_id) = publisher_id else {
514 return;
515 };
516 let Some(ledger) = self.economics.as_ref() else {
517 return;
518 };
519 if let Ok(mut locked) = ledger.lock() {
520 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
521 }
522 }
523
524 fn record_replay_validation_failure(
525 &self,
526 best: &GeneCandidate,
527 capsule: &Capsule,
528 validation: &ValidationPlan,
529 report: &ValidationReport,
530 ) -> Result<(), ReplayError> {
531 let projection = self
532 .store
533 .rebuild_projection()
534 .map_err(|err| ReplayError::Store(err.to_string()))?;
535 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
536 Self::confidence_context(&projection, &best.gene.id);
537
538 self.store
539 .append_event(EvolutionEvent::ValidationFailed {
540 mutation_id: capsule.mutation_id.clone(),
541 report: report.to_snapshot(&validation.profile),
542 gene_id: Some(best.gene.id.clone()),
543 })
544 .map_err(|err| ReplayError::Store(err.to_string()))?;
545
546 let replay_failures = self.replay_failure_count(&best.gene.id)?;
547 let governor_decision = self.governor.evaluate(GovernorInput {
548 candidate_source: if self.publisher_for_gene(&best.gene.id).is_some() {
549 CandidateSource::Remote
550 } else {
551 CandidateSource::Local
552 },
553 success_count: 0,
554 blast_radius: BlastRadius {
555 files_changed: capsule.outcome.changed_files.len(),
556 lines_changed: capsule.outcome.lines_changed,
557 },
558 replay_failures,
559 recent_mutation_ages_secs: Vec::new(),
560 current_confidence,
561 historical_peak_confidence,
562 confidence_last_updated_secs,
563 });
564
565 if matches!(governor_decision.target_state, AssetState::Revoked) {
566 self.store
567 .append_event(EvolutionEvent::PromotionEvaluated {
568 gene_id: best.gene.id.clone(),
569 state: AssetState::Revoked,
570 reason: governor_decision.reason.clone(),
571 })
572 .map_err(|err| ReplayError::Store(err.to_string()))?;
573 self.store
574 .append_event(EvolutionEvent::GeneRevoked {
575 gene_id: best.gene.id.clone(),
576 reason: governor_decision.reason,
577 })
578 .map_err(|err| ReplayError::Store(err.to_string()))?;
579 for related in &best.capsules {
580 self.store
581 .append_event(EvolutionEvent::CapsuleQuarantined {
582 capsule_id: related.id.clone(),
583 })
584 .map_err(|err| ReplayError::Store(err.to_string()))?;
585 }
586 }
587
588 Ok(())
589 }
590
591 fn confidence_context(
592 projection: &EvolutionProjection,
593 gene_id: &str,
594 ) -> (f32, f32, Option<u64>) {
595 let peak_confidence = projection
596 .capsules
597 .iter()
598 .filter(|capsule| capsule.gene_id == gene_id)
599 .map(|capsule| capsule.confidence)
600 .fold(0.0_f32, f32::max);
601 let age_secs = projection
602 .last_updated_at
603 .get(gene_id)
604 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
605 (peak_confidence, peak_confidence, age_secs)
606 }
607
608 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
609 let parsed = DateTime::parse_from_rfc3339(timestamp)
610 .ok()?
611 .with_timezone(&Utc);
612 let elapsed = now.signed_duration_since(parsed);
613 if elapsed < Duration::zero() {
614 Some(0)
615 } else {
616 u64::try_from(elapsed.num_seconds()).ok()
617 }
618 }
619
620 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
621 Ok(self
622 .store
623 .scan(1)
624 .map_err(|err| ReplayError::Store(err.to_string()))?
625 .into_iter()
626 .filter(|stored| {
627 matches!(
628 &stored.event,
629 EvolutionEvent::ValidationFailed {
630 gene_id: Some(current_gene_id),
631 ..
632 } if current_gene_id == gene_id
633 )
634 })
635 .count() as u64)
636 }
637}
638
639#[derive(Debug, Error)]
640pub enum EvoKernelError {
641 #[error("sandbox error: {0}")]
642 Sandbox(String),
643 #[error("validation error: {0}")]
644 Validation(String),
645 #[error("validation failed")]
646 ValidationFailed(ValidationReport),
647 #[error("store error: {0}")]
648 Store(String),
649}
650
651#[derive(Clone, Debug)]
652pub struct CaptureOutcome {
653 pub capsule: Capsule,
654 pub gene: Gene,
655 pub governor_decision: GovernorDecision,
656}
657
658#[derive(Clone, Debug, Serialize, Deserialize)]
659pub struct ImportOutcome {
660 pub imported_asset_ids: Vec<String>,
661 pub accepted: bool,
662}
663
664#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
665pub struct EvolutionMetricsSnapshot {
666 pub replay_attempts_total: u64,
667 pub replay_success_total: u64,
668 pub replay_success_rate: f64,
669 pub mutation_declared_total: u64,
670 pub promoted_mutations_total: u64,
671 pub promotion_ratio: f64,
672 pub gene_revocations_total: u64,
673 pub mutation_velocity_last_hour: u64,
674 pub revoke_frequency_last_hour: u64,
675 pub promoted_genes: u64,
676 pub promoted_capsules: u64,
677 pub last_event_seq: u64,
678}
679
680#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
681pub struct EvolutionHealthSnapshot {
682 pub status: String,
683 pub last_event_seq: u64,
684 pub promoted_genes: u64,
685 pub promoted_capsules: u64,
686}
687
688#[derive(Clone)]
689pub struct EvolutionNetworkNode {
690 pub store: Arc<dyn EvolutionStore>,
691}
692
693impl EvolutionNetworkNode {
694 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
695 Self { store }
696 }
697
698 pub fn with_default_store() -> Self {
699 Self {
700 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
701 }
702 }
703
704 pub fn accept_publish_request(
705 &self,
706 request: &PublishRequest,
707 ) -> Result<ImportOutcome, EvoKernelError> {
708 import_remote_envelope_into_store(
709 self.store.as_ref(),
710 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
711 )
712 }
713
714 pub fn publish_local_assets(
715 &self,
716 sender_id: impl Into<String>,
717 ) -> Result<EvolutionEnvelope, EvoKernelError> {
718 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
719 }
720
721 pub fn fetch_assets(
722 &self,
723 responder_id: impl Into<String>,
724 query: &FetchQuery,
725 ) -> Result<FetchResponse, EvoKernelError> {
726 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
727 }
728
729 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
730 revoke_assets_in_store(self.store.as_ref(), notice)
731 }
732
733 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
734 evolution_metrics_snapshot(self.store.as_ref())
735 }
736
737 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
738 self.metrics_snapshot().map(|snapshot| {
739 let health = evolution_health_snapshot(&snapshot);
740 render_evolution_metrics_prometheus(&snapshot, &health)
741 })
742 }
743
744 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
745 self.metrics_snapshot()
746 .map(|snapshot| evolution_health_snapshot(&snapshot))
747 }
748}
749
750pub struct EvoKernel<S: KernelState> {
751 pub kernel: Arc<Kernel<S>>,
752 pub sandbox: Arc<dyn Sandbox>,
753 pub validator: Arc<dyn Validator>,
754 pub store: Arc<dyn EvolutionStore>,
755 pub selector: Arc<dyn Selector>,
756 pub governor: Arc<dyn Governor>,
757 pub economics: Arc<Mutex<EvuLedger>>,
758 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
759 pub stake_policy: StakePolicy,
760 pub sandbox_policy: SandboxPolicy,
761 pub validation_plan: ValidationPlan,
762}
763
764impl<S: KernelState> EvoKernel<S> {
765 fn recent_prior_mutation_ages_secs(
766 &self,
767 exclude_mutation_id: Option<&str>,
768 ) -> Result<Vec<u64>, EvolutionError> {
769 let now = Utc::now();
770 let mut ages = self
771 .store
772 .scan(1)?
773 .into_iter()
774 .filter_map(|stored| match stored.event {
775 EvolutionEvent::MutationDeclared { mutation }
776 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
777 {
778 Self::seconds_since_timestamp(&stored.timestamp, now)
779 }
780 _ => None,
781 })
782 .collect::<Vec<_>>();
783 ages.sort_unstable();
784 Ok(ages)
785 }
786
787 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
788 let parsed = DateTime::parse_from_rfc3339(timestamp)
789 .ok()?
790 .with_timezone(&Utc);
791 let elapsed = now.signed_duration_since(parsed);
792 if elapsed < Duration::zero() {
793 Some(0)
794 } else {
795 u64::try_from(elapsed.num_seconds()).ok()
796 }
797 }
798
799 pub fn new(
800 kernel: Arc<Kernel<S>>,
801 sandbox: Arc<dyn Sandbox>,
802 validator: Arc<dyn Validator>,
803 store: Arc<dyn EvolutionStore>,
804 ) -> Self {
805 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
806 Self {
807 kernel,
808 sandbox,
809 validator,
810 store,
811 selector,
812 governor: Arc::new(DefaultGovernor::default()),
813 economics: Arc::new(Mutex::new(EvuLedger::default())),
814 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
815 stake_policy: StakePolicy::default(),
816 sandbox_policy: SandboxPolicy::oris_default(),
817 validation_plan: ValidationPlan::oris_default(),
818 }
819 }
820
821 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
822 self.selector = selector;
823 self
824 }
825
826 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
827 self.sandbox_policy = policy;
828 self
829 }
830
831 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
832 self.governor = governor;
833 self
834 }
835
836 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
837 self.economics = economics;
838 self
839 }
840
841 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
842 self.stake_policy = policy;
843 self
844 }
845
846 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
847 self.validation_plan = plan;
848 self
849 }
850
851 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
852 self.selector.select(input)
853 }
854
855 pub async fn capture_successful_mutation(
856 &self,
857 run_id: &RunId,
858 mutation: PreparedMutation,
859 ) -> Result<Capsule, EvoKernelError> {
860 Ok(self
861 .capture_mutation_with_governor(run_id, mutation)
862 .await?
863 .capsule)
864 }
865
866 pub async fn capture_mutation_with_governor(
867 &self,
868 run_id: &RunId,
869 mutation: PreparedMutation,
870 ) -> Result<CaptureOutcome, EvoKernelError> {
871 self.store
872 .append_event(EvolutionEvent::MutationDeclared {
873 mutation: mutation.clone(),
874 })
875 .map_err(store_err)?;
876
877 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
878 Ok(receipt) => receipt,
879 Err(err) => {
880 self.store
881 .append_event(EvolutionEvent::MutationRejected {
882 mutation_id: mutation.intent.id.clone(),
883 reason: err.to_string(),
884 })
885 .map_err(store_err)?;
886 return Err(EvoKernelError::Sandbox(err.to_string()));
887 }
888 };
889
890 self.store
891 .append_event(EvolutionEvent::MutationApplied {
892 mutation_id: mutation.intent.id.clone(),
893 patch_hash: receipt.patch_hash.clone(),
894 changed_files: receipt
895 .changed_files
896 .iter()
897 .map(|path| path.to_string_lossy().to_string())
898 .collect(),
899 })
900 .map_err(store_err)?;
901
902 let report = self
903 .validator
904 .run(&receipt, &self.validation_plan)
905 .await
906 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
907 if !report.success {
908 self.store
909 .append_event(EvolutionEvent::ValidationFailed {
910 mutation_id: mutation.intent.id.clone(),
911 report: report.to_snapshot(&self.validation_plan.profile),
912 gene_id: None,
913 })
914 .map_err(store_err)?;
915 return Err(EvoKernelError::ValidationFailed(report));
916 }
917
918 self.store
919 .append_event(EvolutionEvent::ValidationPassed {
920 mutation_id: mutation.intent.id.clone(),
921 report: report.to_snapshot(&self.validation_plan.profile),
922 gene_id: None,
923 })
924 .map_err(store_err)?;
925
926 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
927 patch_diff: mutation.artifact.payload.clone(),
928 intent: mutation.intent.intent.clone(),
929 expected_effect: mutation.intent.expected_effect.clone(),
930 declared_signals: mutation.intent.signals.clone(),
931 changed_files: receipt
932 .changed_files
933 .iter()
934 .map(|path| path.to_string_lossy().to_string())
935 .collect(),
936 validation_success: report.success,
937 validation_logs: report.logs.clone(),
938 stage_outputs: report
939 .stages
940 .iter()
941 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
942 .filter(|value| !value.is_empty())
943 .collect(),
944 });
945 self.store
946 .append_event(EvolutionEvent::SignalsExtracted {
947 mutation_id: mutation.intent.id.clone(),
948 hash: extracted_signals.hash.clone(),
949 signals: extracted_signals.values.clone(),
950 })
951 .map_err(store_err)?;
952
953 let projection = self.store.rebuild_projection().map_err(store_err)?;
954 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
955 let recent_mutation_ages_secs = self
956 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
957 .map_err(store_err)?;
958 let mut gene = derive_gene(
959 &mutation,
960 &receipt,
961 &self.validation_plan.profile,
962 &extracted_signals.values,
963 );
964 let success_count = projection
965 .genes
966 .iter()
967 .find(|existing| existing.id == gene.id)
968 .map(|existing| {
969 projection
970 .capsules
971 .iter()
972 .filter(|capsule| capsule.gene_id == existing.id)
973 .count() as u64
974 })
975 .unwrap_or(0)
976 + 1;
977 let governor_decision = self.governor.evaluate(GovernorInput {
978 candidate_source: CandidateSource::Local,
979 success_count,
980 blast_radius: blast_radius.clone(),
981 replay_failures: 0,
982 recent_mutation_ages_secs,
983 current_confidence: 0.7,
984 historical_peak_confidence: 0.7,
985 confidence_last_updated_secs: Some(0),
986 });
987
988 gene.state = governor_decision.target_state.clone();
989 self.store
990 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
991 .map_err(store_err)?;
992 self.store
993 .append_event(EvolutionEvent::PromotionEvaluated {
994 gene_id: gene.id.clone(),
995 state: governor_decision.target_state.clone(),
996 reason: governor_decision.reason.clone(),
997 })
998 .map_err(store_err)?;
999 if matches!(governor_decision.target_state, AssetState::Promoted) {
1000 self.store
1001 .append_event(EvolutionEvent::GenePromoted {
1002 gene_id: gene.id.clone(),
1003 })
1004 .map_err(store_err)?;
1005 }
1006 if let Some(spec_id) = &mutation.intent.spec_id {
1007 self.store
1008 .append_event(EvolutionEvent::SpecLinked {
1009 mutation_id: mutation.intent.id.clone(),
1010 spec_id: spec_id.clone(),
1011 })
1012 .map_err(store_err)?;
1013 }
1014
1015 let mut capsule = build_capsule(
1016 run_id,
1017 &mutation,
1018 &receipt,
1019 &report,
1020 &self.validation_plan.profile,
1021 &gene,
1022 &blast_radius,
1023 )
1024 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1025 capsule.state = governor_decision.target_state.clone();
1026 self.store
1027 .append_event(EvolutionEvent::CapsuleCommitted {
1028 capsule: capsule.clone(),
1029 })
1030 .map_err(store_err)?;
1031 if matches!(governor_decision.target_state, AssetState::Quarantined) {
1032 self.store
1033 .append_event(EvolutionEvent::CapsuleQuarantined {
1034 capsule_id: capsule.id.clone(),
1035 })
1036 .map_err(store_err)?;
1037 }
1038
1039 Ok(CaptureOutcome {
1040 capsule,
1041 gene,
1042 governor_decision,
1043 })
1044 }
1045
1046 pub async fn capture_from_proposal(
1047 &self,
1048 run_id: &RunId,
1049 proposal: &AgentMutationProposal,
1050 diff_payload: String,
1051 base_revision: Option<String>,
1052 ) -> Result<CaptureOutcome, EvoKernelError> {
1053 let intent = MutationIntent {
1054 id: next_id("proposal"),
1055 intent: proposal.intent.clone(),
1056 target: MutationTarget::Paths {
1057 allow: proposal.files.clone(),
1058 },
1059 expected_effect: proposal.expected_effect.clone(),
1060 risk: RiskLevel::Low,
1061 signals: proposal.files.clone(),
1062 spec_id: None,
1063 };
1064 self.capture_mutation_with_governor(
1065 run_id,
1066 prepare_mutation(intent, diff_payload, base_revision),
1067 )
1068 .await
1069 }
1070
1071 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
1072 ExecutionFeedback {
1073 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
1074 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
1075 summary: outcome.governor_decision.reason.clone(),
1076 }
1077 }
1078
1079 pub fn export_promoted_assets(
1080 &self,
1081 sender_id: impl Into<String>,
1082 ) -> Result<EvolutionEnvelope, EvoKernelError> {
1083 let sender_id = sender_id.into();
1084 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
1085 if !envelope.assets.is_empty() {
1086 let mut ledger = self
1087 .economics
1088 .lock()
1089 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
1090 if ledger
1091 .reserve_publish_stake(&sender_id, &self.stake_policy)
1092 .is_none()
1093 {
1094 return Err(EvoKernelError::Validation(
1095 "insufficient EVU for remote publish".into(),
1096 ));
1097 }
1098 }
1099 Ok(envelope)
1100 }
1101
1102 pub fn import_remote_envelope(
1103 &self,
1104 envelope: &EvolutionEnvelope,
1105 ) -> Result<ImportOutcome, EvoKernelError> {
1106 let outcome = import_remote_envelope_into_store(self.store.as_ref(), envelope)?;
1107 self.record_remote_publishers(envelope);
1108 Ok(outcome)
1109 }
1110
1111 pub fn fetch_assets(
1112 &self,
1113 responder_id: impl Into<String>,
1114 query: &FetchQuery,
1115 ) -> Result<FetchResponse, EvoKernelError> {
1116 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1117 }
1118
1119 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1120 revoke_assets_in_store(self.store.as_ref(), notice)
1121 }
1122
1123 pub async fn replay_or_fallback(
1124 &self,
1125 input: SelectorInput,
1126 ) -> Result<ReplayDecision, EvoKernelError> {
1127 let executor = StoreReplayExecutor {
1128 sandbox: self.sandbox.clone(),
1129 validator: self.validator.clone(),
1130 store: self.store.clone(),
1131 selector: self.selector.clone(),
1132 governor: self.governor.clone(),
1133 economics: Some(self.economics.clone()),
1134 remote_publishers: Some(self.remote_publishers.clone()),
1135 stake_policy: self.stake_policy.clone(),
1136 };
1137 executor
1138 .try_replay(&input, &self.sandbox_policy, &self.validation_plan)
1139 .await
1140 .map_err(|err| EvoKernelError::Validation(err.to_string()))
1141 }
1142
1143 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
1144 self.economics.lock().ok()?.governor_signal(node_id)
1145 }
1146
1147 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
1148 self.economics
1149 .lock()
1150 .ok()
1151 .map(|locked| locked.selector_reputation_bias())
1152 .unwrap_or_default()
1153 }
1154
1155 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1156 evolution_metrics_snapshot(self.store.as_ref())
1157 }
1158
1159 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1160 self.metrics_snapshot().map(|snapshot| {
1161 let health = evolution_health_snapshot(&snapshot);
1162 render_evolution_metrics_prometheus(&snapshot, &health)
1163 })
1164 }
1165
1166 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1167 self.metrics_snapshot()
1168 .map(|snapshot| evolution_health_snapshot(&snapshot))
1169 }
1170
1171 fn record_remote_publishers(&self, envelope: &EvolutionEnvelope) {
1172 let sender_id = envelope.sender_id.trim();
1173 if sender_id.is_empty() {
1174 return;
1175 }
1176 let Ok(mut publishers) = self.remote_publishers.lock() else {
1177 return;
1178 };
1179 for asset in &envelope.assets {
1180 match asset {
1181 NetworkAsset::Gene { gene } => {
1182 publishers.insert(gene.id.clone(), sender_id.to_string());
1183 }
1184 NetworkAsset::Capsule { capsule } => {
1185 publishers.insert(capsule.gene_id.clone(), sender_id.to_string());
1186 }
1187 NetworkAsset::EvolutionEvent { .. } => {}
1188 }
1189 }
1190 }
1191}
1192
1193pub fn prepare_mutation(
1194 intent: MutationIntent,
1195 diff_payload: String,
1196 base_revision: Option<String>,
1197) -> PreparedMutation {
1198 PreparedMutation {
1199 intent,
1200 artifact: MutationArtifact {
1201 encoding: ArtifactEncoding::UnifiedDiff,
1202 content_hash: compute_artifact_hash(&diff_payload),
1203 payload: diff_payload,
1204 base_revision,
1205 },
1206 }
1207}
1208
1209pub fn prepare_mutation_from_spec(
1210 plan: CompiledMutationPlan,
1211 diff_payload: String,
1212 base_revision: Option<String>,
1213) -> PreparedMutation {
1214 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
1215}
1216
1217pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
1218 Arc::new(oris_evolution::JsonlEvolutionStore::new(
1219 default_store_root(),
1220 ))
1221}
1222
1223fn derive_gene(
1224 mutation: &PreparedMutation,
1225 receipt: &SandboxReceipt,
1226 validation_profile: &str,
1227 extracted_signals: &[String],
1228) -> Gene {
1229 let mut strategy = BTreeSet::new();
1230 for file in &receipt.changed_files {
1231 if let Some(component) = file.components().next() {
1232 strategy.insert(component.as_os_str().to_string_lossy().to_string());
1233 }
1234 }
1235 for token in mutation
1236 .artifact
1237 .payload
1238 .split(|ch: char| !ch.is_ascii_alphanumeric())
1239 {
1240 if token.len() == 5
1241 && token.starts_with('E')
1242 && token[1..].chars().all(|ch| ch.is_ascii_digit())
1243 {
1244 strategy.insert(token.to_string());
1245 }
1246 }
1247 for token in mutation.intent.intent.split_whitespace().take(8) {
1248 strategy.insert(token.to_ascii_lowercase());
1249 }
1250 let strategy = strategy.into_iter().collect::<Vec<_>>();
1251 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
1252 .unwrap_or_else(|_| next_id("gene"));
1253 Gene {
1254 id,
1255 signals: extracted_signals.to_vec(),
1256 strategy,
1257 validation: vec![validation_profile.to_string()],
1258 state: AssetState::Promoted,
1259 }
1260}
1261
1262fn build_capsule(
1263 run_id: &RunId,
1264 mutation: &PreparedMutation,
1265 receipt: &SandboxReceipt,
1266 report: &ValidationReport,
1267 validation_profile: &str,
1268 gene: &Gene,
1269 blast_radius: &BlastRadius,
1270) -> Result<Capsule, EvolutionError> {
1271 let env = current_env_fingerprint(&receipt.workdir);
1272 let validator_hash = stable_hash_json(report)?;
1273 let diff_hash = mutation.artifact.content_hash.clone();
1274 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
1275 Ok(Capsule {
1276 id,
1277 gene_id: gene.id.clone(),
1278 mutation_id: mutation.intent.id.clone(),
1279 run_id: run_id.clone(),
1280 diff_hash,
1281 confidence: 0.7,
1282 env,
1283 outcome: oris_evolution::Outcome {
1284 success: true,
1285 validation_profile: validation_profile.to_string(),
1286 validation_duration_ms: report.duration_ms,
1287 changed_files: receipt
1288 .changed_files
1289 .iter()
1290 .map(|path| path.to_string_lossy().to_string())
1291 .collect(),
1292 validator_hash,
1293 lines_changed: blast_radius.lines_changed,
1294 replay_verified: false,
1295 },
1296 state: AssetState::Promoted,
1297 })
1298}
1299
1300fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
1301 let rustc_version = Command::new("rustc")
1302 .arg("--version")
1303 .output()
1304 .ok()
1305 .filter(|output| output.status.success())
1306 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
1307 .unwrap_or_else(|| "rustc unknown".into());
1308 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
1309 .ok()
1310 .map(|bytes| {
1311 let value = String::from_utf8_lossy(&bytes);
1312 compute_artifact_hash(&value)
1313 })
1314 .unwrap_or_else(|| "missing-cargo-lock".into());
1315 let target_triple = format!(
1316 "{}-unknown-{}",
1317 std::env::consts::ARCH,
1318 std::env::consts::OS
1319 );
1320 EnvFingerprint {
1321 rustc_version,
1322 cargo_lock_hash,
1323 target_triple,
1324 os: std::env::consts::OS.to_string(),
1325 }
1326}
1327
1328fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
1329 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
1330 let trimmed = raw.trim();
1331 if trimmed.is_empty() {
1332 continue;
1333 }
1334 let normalized = if is_rust_error_code(trimmed) {
1335 let mut chars = trimmed.chars();
1336 let prefix = chars
1337 .next()
1338 .map(|ch| ch.to_ascii_uppercase())
1339 .unwrap_or('E');
1340 format!("{prefix}{}", chars.as_str())
1341 } else {
1342 trimmed.to_ascii_lowercase()
1343 };
1344 if normalized.len() < 3 {
1345 continue;
1346 }
1347 out.insert(normalized);
1348 }
1349}
1350
1351fn normalize_signal_phrase(input: &str) -> Option<String> {
1352 let normalized = input
1353 .split(|ch: char| !ch.is_ascii_alphanumeric())
1354 .filter_map(|raw| {
1355 let trimmed = raw.trim();
1356 if trimmed.is_empty() {
1357 return None;
1358 }
1359 let normalized = if is_rust_error_code(trimmed) {
1360 let mut chars = trimmed.chars();
1361 let prefix = chars
1362 .next()
1363 .map(|ch| ch.to_ascii_uppercase())
1364 .unwrap_or('E');
1365 format!("{prefix}{}", chars.as_str())
1366 } else {
1367 trimmed.to_ascii_lowercase()
1368 };
1369 if normalized.len() < 3 {
1370 None
1371 } else {
1372 Some(normalized)
1373 }
1374 })
1375 .collect::<Vec<_>>()
1376 .join(" ");
1377 if normalized.is_empty() {
1378 None
1379 } else {
1380 Some(normalized)
1381 }
1382}
1383
1384fn is_rust_error_code(value: &str) -> bool {
1385 value.len() == 5
1386 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
1387 && value[1..].chars().all(|ch| ch.is_ascii_digit())
1388}
1389
1390fn find_declared_mutation(
1391 store: &dyn EvolutionStore,
1392 mutation_id: &MutationId,
1393) -> Result<Option<PreparedMutation>, EvolutionError> {
1394 for stored in store.scan(1)? {
1395 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
1396 if &mutation.intent.id == mutation_id {
1397 return Ok(Some(mutation));
1398 }
1399 }
1400 }
1401 Ok(None)
1402}
1403
1404fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
1405 let Ok(projection) = store.rebuild_projection() else {
1406 return Vec::new();
1407 };
1408 let capsules = projection.capsules.clone();
1409 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
1410 let requested_spec_id = input
1411 .spec_id
1412 .as_deref()
1413 .map(str::trim)
1414 .filter(|value| !value.is_empty());
1415 let signal_set = input
1416 .signals
1417 .iter()
1418 .map(|signal| signal.to_ascii_lowercase())
1419 .collect::<BTreeSet<_>>();
1420 let mut candidates = projection
1421 .genes
1422 .into_iter()
1423 .filter_map(|gene| {
1424 if gene.state != AssetState::Promoted {
1425 return None;
1426 }
1427 if let Some(spec_id) = requested_spec_id {
1428 let matches_spec = spec_ids_by_gene
1429 .get(&gene.id)
1430 .map(|values| {
1431 values
1432 .iter()
1433 .any(|value| value.eq_ignore_ascii_case(spec_id))
1434 })
1435 .unwrap_or(false);
1436 if !matches_spec {
1437 return None;
1438 }
1439 }
1440 let gene_signals = gene
1441 .signals
1442 .iter()
1443 .map(|signal| signal.to_ascii_lowercase())
1444 .collect::<BTreeSet<_>>();
1445 if gene_signals == signal_set {
1446 let mut matched_capsules = capsules
1447 .iter()
1448 .filter(|capsule| {
1449 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1450 })
1451 .cloned()
1452 .collect::<Vec<_>>();
1453 matched_capsules.sort_by(|left, right| {
1454 replay_environment_match_factor(&input.env, &right.env)
1455 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
1456 .unwrap_or(std::cmp::Ordering::Equal)
1457 .then_with(|| {
1458 right
1459 .confidence
1460 .partial_cmp(&left.confidence)
1461 .unwrap_or(std::cmp::Ordering::Equal)
1462 })
1463 .then_with(|| left.id.cmp(&right.id))
1464 });
1465 if matched_capsules.is_empty() {
1466 None
1467 } else {
1468 let score = matched_capsules
1469 .first()
1470 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
1471 .unwrap_or(0.0);
1472 Some(GeneCandidate {
1473 gene,
1474 score,
1475 capsules: matched_capsules,
1476 })
1477 }
1478 } else {
1479 None
1480 }
1481 })
1482 .collect::<Vec<_>>();
1483 candidates.sort_by(|left, right| {
1484 right
1485 .score
1486 .partial_cmp(&left.score)
1487 .unwrap_or(std::cmp::Ordering::Equal)
1488 .then_with(|| left.gene.id.cmp(&right.gene.id))
1489 });
1490 candidates
1491}
1492
1493fn quarantined_remote_exact_match_candidates(
1494 store: &dyn EvolutionStore,
1495 input: &SelectorInput,
1496) -> Vec<GeneCandidate> {
1497 let remote_asset_ids = store
1498 .scan(1)
1499 .ok()
1500 .map(|events| {
1501 events
1502 .into_iter()
1503 .filter_map(|stored| match stored.event {
1504 EvolutionEvent::RemoteAssetImported {
1505 source: CandidateSource::Remote,
1506 asset_ids,
1507 } => Some(asset_ids),
1508 _ => None,
1509 })
1510 .flatten()
1511 .collect::<BTreeSet<_>>()
1512 })
1513 .unwrap_or_default();
1514 if remote_asset_ids.is_empty() {
1515 return Vec::new();
1516 }
1517
1518 let Ok(projection) = store.rebuild_projection() else {
1519 return Vec::new();
1520 };
1521 let capsules = projection.capsules.clone();
1522 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
1523 let requested_spec_id = input
1524 .spec_id
1525 .as_deref()
1526 .map(str::trim)
1527 .filter(|value| !value.is_empty());
1528 let signal_set = input
1529 .signals
1530 .iter()
1531 .map(|signal| signal.to_ascii_lowercase())
1532 .collect::<BTreeSet<_>>();
1533 let mut candidates = projection
1534 .genes
1535 .into_iter()
1536 .filter_map(|gene| {
1537 if gene.state != AssetState::Promoted {
1538 return None;
1539 }
1540 if let Some(spec_id) = requested_spec_id {
1541 let matches_spec = spec_ids_by_gene
1542 .get(&gene.id)
1543 .map(|values| {
1544 values
1545 .iter()
1546 .any(|value| value.eq_ignore_ascii_case(spec_id))
1547 })
1548 .unwrap_or(false);
1549 if !matches_spec {
1550 return None;
1551 }
1552 }
1553 let gene_signals = gene
1554 .signals
1555 .iter()
1556 .map(|signal| signal.to_ascii_lowercase())
1557 .collect::<BTreeSet<_>>();
1558 if gene_signals == signal_set {
1559 let mut matched_capsules = capsules
1560 .iter()
1561 .filter(|capsule| {
1562 capsule.gene_id == gene.id
1563 && capsule.state == AssetState::Quarantined
1564 && remote_asset_ids.contains(&capsule.id)
1565 })
1566 .cloned()
1567 .collect::<Vec<_>>();
1568 matched_capsules.sort_by(|left, right| {
1569 replay_environment_match_factor(&input.env, &right.env)
1570 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
1571 .unwrap_or(std::cmp::Ordering::Equal)
1572 .then_with(|| {
1573 right
1574 .confidence
1575 .partial_cmp(&left.confidence)
1576 .unwrap_or(std::cmp::Ordering::Equal)
1577 })
1578 .then_with(|| left.id.cmp(&right.id))
1579 });
1580 if matched_capsules.is_empty() {
1581 None
1582 } else {
1583 let score = matched_capsules
1584 .first()
1585 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
1586 .unwrap_or(0.0);
1587 Some(GeneCandidate {
1588 gene,
1589 score,
1590 capsules: matched_capsules,
1591 })
1592 }
1593 } else {
1594 None
1595 }
1596 })
1597 .collect::<Vec<_>>();
1598 candidates.sort_by(|left, right| {
1599 right
1600 .score
1601 .partial_cmp(&left.score)
1602 .unwrap_or(std::cmp::Ordering::Equal)
1603 .then_with(|| left.gene.id.cmp(&right.gene.id))
1604 });
1605 candidates
1606}
1607
1608fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
1609 let fields = [
1610 input
1611 .rustc_version
1612 .eq_ignore_ascii_case(&candidate.rustc_version),
1613 input
1614 .cargo_lock_hash
1615 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
1616 input
1617 .target_triple
1618 .eq_ignore_ascii_case(&candidate.target_triple),
1619 input.os.eq_ignore_ascii_case(&candidate.os),
1620 ];
1621 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
1622 0.5 + ((matched_fields / 4.0) * 0.5)
1623}
1624
1625fn effective_candidate_score(
1626 candidate: &GeneCandidate,
1627 publishers_by_gene: &BTreeMap<String, String>,
1628 reputation_bias: &BTreeMap<String, f32>,
1629) -> f32 {
1630 let bias = publishers_by_gene
1631 .get(&candidate.gene.id)
1632 .and_then(|publisher| reputation_bias.get(publisher))
1633 .copied()
1634 .unwrap_or(0.0)
1635 .clamp(0.0, 1.0);
1636 candidate.score * (1.0 + (bias * 0.1))
1637}
1638
1639fn export_promoted_assets_from_store(
1640 store: &dyn EvolutionStore,
1641 sender_id: impl Into<String>,
1642) -> Result<EvolutionEnvelope, EvoKernelError> {
1643 let projection = store.rebuild_projection().map_err(store_err)?;
1644 let mut assets = Vec::new();
1645 for gene in projection
1646 .genes
1647 .into_iter()
1648 .filter(|gene| gene.state == AssetState::Promoted)
1649 {
1650 assets.push(NetworkAsset::Gene { gene });
1651 }
1652 for capsule in projection
1653 .capsules
1654 .into_iter()
1655 .filter(|capsule| capsule.state == AssetState::Promoted)
1656 {
1657 assets.push(NetworkAsset::Capsule { capsule });
1658 }
1659 Ok(EvolutionEnvelope::publish(sender_id, assets))
1660}
1661
1662fn import_remote_envelope_into_store(
1663 store: &dyn EvolutionStore,
1664 envelope: &EvolutionEnvelope,
1665) -> Result<ImportOutcome, EvoKernelError> {
1666 if !envelope.verify_content_hash() {
1667 return Err(EvoKernelError::Validation(
1668 "invalid evolution envelope hash".into(),
1669 ));
1670 }
1671
1672 let mut imported_asset_ids = Vec::new();
1673 for asset in &envelope.assets {
1674 match asset {
1675 NetworkAsset::Gene { gene } => {
1676 imported_asset_ids.push(gene.id.clone());
1677 store
1678 .append_event(EvolutionEvent::RemoteAssetImported {
1679 source: CandidateSource::Remote,
1680 asset_ids: vec![gene.id.clone()],
1681 })
1682 .map_err(store_err)?;
1683 store
1684 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1685 .map_err(store_err)?;
1686 }
1687 NetworkAsset::Capsule { capsule } => {
1688 imported_asset_ids.push(capsule.id.clone());
1689 store
1690 .append_event(EvolutionEvent::RemoteAssetImported {
1691 source: CandidateSource::Remote,
1692 asset_ids: vec![capsule.id.clone()],
1693 })
1694 .map_err(store_err)?;
1695 let mut quarantined = capsule.clone();
1696 quarantined.state = AssetState::Quarantined;
1697 store
1698 .append_event(EvolutionEvent::CapsuleCommitted {
1699 capsule: quarantined.clone(),
1700 })
1701 .map_err(store_err)?;
1702 store
1703 .append_event(EvolutionEvent::CapsuleQuarantined {
1704 capsule_id: quarantined.id,
1705 })
1706 .map_err(store_err)?;
1707 }
1708 NetworkAsset::EvolutionEvent { event } => {
1709 if should_import_remote_event(event) {
1710 store.append_event(event.clone()).map_err(store_err)?;
1711 }
1712 }
1713 }
1714 }
1715
1716 Ok(ImportOutcome {
1717 imported_asset_ids,
1718 accepted: true,
1719 })
1720}
1721
1722fn should_import_remote_event(event: &EvolutionEvent) -> bool {
1723 matches!(
1724 event,
1725 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
1726 )
1727}
1728
1729fn fetch_assets_from_store(
1730 store: &dyn EvolutionStore,
1731 responder_id: impl Into<String>,
1732 query: &FetchQuery,
1733) -> Result<FetchResponse, EvoKernelError> {
1734 let projection = store.rebuild_projection().map_err(store_err)?;
1735 let normalized_signals: Vec<String> = query
1736 .signals
1737 .iter()
1738 .map(|signal| signal.trim().to_ascii_lowercase())
1739 .filter(|signal| !signal.is_empty())
1740 .collect();
1741 let matches_any_signal = |candidate: &str| {
1742 if normalized_signals.is_empty() {
1743 return true;
1744 }
1745 let candidate = candidate.to_ascii_lowercase();
1746 normalized_signals
1747 .iter()
1748 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
1749 };
1750
1751 let matched_genes: Vec<Gene> = projection
1752 .genes
1753 .into_iter()
1754 .filter(|gene| gene.state == AssetState::Promoted)
1755 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
1756 .collect();
1757 let matched_gene_ids: BTreeSet<String> =
1758 matched_genes.iter().map(|gene| gene.id.clone()).collect();
1759 let matched_capsules: Vec<Capsule> = projection
1760 .capsules
1761 .into_iter()
1762 .filter(|capsule| capsule.state == AssetState::Promoted)
1763 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
1764 .collect();
1765
1766 let mut assets = Vec::new();
1767 for gene in matched_genes {
1768 assets.push(NetworkAsset::Gene { gene });
1769 }
1770 for capsule in matched_capsules {
1771 assets.push(NetworkAsset::Capsule { capsule });
1772 }
1773
1774 Ok(FetchResponse {
1775 sender_id: responder_id.into(),
1776 assets,
1777 })
1778}
1779
1780fn revoke_assets_in_store(
1781 store: &dyn EvolutionStore,
1782 notice: &RevokeNotice,
1783) -> Result<RevokeNotice, EvoKernelError> {
1784 let projection = store.rebuild_projection().map_err(store_err)?;
1785 let requested: BTreeSet<String> = notice
1786 .asset_ids
1787 .iter()
1788 .map(|asset_id| asset_id.trim().to_string())
1789 .filter(|asset_id| !asset_id.is_empty())
1790 .collect();
1791 let mut revoked_gene_ids = BTreeSet::new();
1792 let mut quarantined_capsule_ids = BTreeSet::new();
1793
1794 for gene in &projection.genes {
1795 if requested.contains(&gene.id) {
1796 revoked_gene_ids.insert(gene.id.clone());
1797 }
1798 }
1799 for capsule in &projection.capsules {
1800 if requested.contains(&capsule.id) {
1801 quarantined_capsule_ids.insert(capsule.id.clone());
1802 revoked_gene_ids.insert(capsule.gene_id.clone());
1803 }
1804 }
1805 for capsule in &projection.capsules {
1806 if revoked_gene_ids.contains(&capsule.gene_id) {
1807 quarantined_capsule_ids.insert(capsule.id.clone());
1808 }
1809 }
1810
1811 for gene_id in &revoked_gene_ids {
1812 store
1813 .append_event(EvolutionEvent::GeneRevoked {
1814 gene_id: gene_id.clone(),
1815 reason: notice.reason.clone(),
1816 })
1817 .map_err(store_err)?;
1818 }
1819 for capsule_id in &quarantined_capsule_ids {
1820 store
1821 .append_event(EvolutionEvent::CapsuleQuarantined {
1822 capsule_id: capsule_id.clone(),
1823 })
1824 .map_err(store_err)?;
1825 }
1826
1827 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
1828 affected_ids.extend(quarantined_capsule_ids);
1829 affected_ids.sort();
1830 affected_ids.dedup();
1831
1832 Ok(RevokeNotice {
1833 sender_id: notice.sender_id.clone(),
1834 asset_ids: affected_ids,
1835 reason: notice.reason.clone(),
1836 })
1837}
1838
1839fn evolution_metrics_snapshot(
1840 store: &dyn EvolutionStore,
1841) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1842 let events = store.scan(1).map_err(store_err)?;
1843 let projection = store.rebuild_projection().map_err(store_err)?;
1844 let replay_success_total = events
1845 .iter()
1846 .filter(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. }))
1847 .count() as u64;
1848 let replay_failures_total = events
1849 .iter()
1850 .filter(|stored| is_replay_validation_failure(&stored.event))
1851 .count() as u64;
1852 let replay_attempts_total = replay_success_total + replay_failures_total;
1853 let mutation_declared_total = events
1854 .iter()
1855 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
1856 .count() as u64;
1857 let promoted_mutations_total = events
1858 .iter()
1859 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
1860 .count() as u64;
1861 let gene_revocations_total = events
1862 .iter()
1863 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
1864 .count() as u64;
1865 let cutoff = Utc::now() - Duration::hours(1);
1866 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
1867 matches!(event, EvolutionEvent::MutationDeclared { .. })
1868 });
1869 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
1870 matches!(event, EvolutionEvent::GeneRevoked { .. })
1871 });
1872 let promoted_genes = projection
1873 .genes
1874 .iter()
1875 .filter(|gene| gene.state == AssetState::Promoted)
1876 .count() as u64;
1877 let promoted_capsules = projection
1878 .capsules
1879 .iter()
1880 .filter(|capsule| capsule.state == AssetState::Promoted)
1881 .count() as u64;
1882
1883 Ok(EvolutionMetricsSnapshot {
1884 replay_attempts_total,
1885 replay_success_total,
1886 replay_success_rate: safe_ratio(replay_success_total, replay_attempts_total),
1887 mutation_declared_total,
1888 promoted_mutations_total,
1889 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
1890 gene_revocations_total,
1891 mutation_velocity_last_hour,
1892 revoke_frequency_last_hour,
1893 promoted_genes,
1894 promoted_capsules,
1895 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
1896 })
1897}
1898
1899fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
1900 EvolutionHealthSnapshot {
1901 status: "ok".into(),
1902 last_event_seq: snapshot.last_event_seq,
1903 promoted_genes: snapshot.promoted_genes,
1904 promoted_capsules: snapshot.promoted_capsules,
1905 }
1906}
1907
1908fn render_evolution_metrics_prometheus(
1909 snapshot: &EvolutionMetricsSnapshot,
1910 health: &EvolutionHealthSnapshot,
1911) -> String {
1912 let mut out = String::new();
1913 out.push_str(
1914 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
1915 );
1916 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
1917 out.push_str(&format!(
1918 "oris_evolution_replay_attempts_total {}\n",
1919 snapshot.replay_attempts_total
1920 ));
1921 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
1922 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
1923 out.push_str(&format!(
1924 "oris_evolution_replay_success_total {}\n",
1925 snapshot.replay_success_total
1926 ));
1927 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
1928 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
1929 out.push_str(&format!(
1930 "oris_evolution_replay_success_rate {:.6}\n",
1931 snapshot.replay_success_rate
1932 ));
1933 out.push_str(
1934 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
1935 );
1936 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
1937 out.push_str(&format!(
1938 "oris_evolution_mutation_declared_total {}\n",
1939 snapshot.mutation_declared_total
1940 ));
1941 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
1942 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
1943 out.push_str(&format!(
1944 "oris_evolution_promoted_mutations_total {}\n",
1945 snapshot.promoted_mutations_total
1946 ));
1947 out.push_str(
1948 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
1949 );
1950 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
1951 out.push_str(&format!(
1952 "oris_evolution_promotion_ratio {:.6}\n",
1953 snapshot.promotion_ratio
1954 ));
1955 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
1956 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
1957 out.push_str(&format!(
1958 "oris_evolution_gene_revocations_total {}\n",
1959 snapshot.gene_revocations_total
1960 ));
1961 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
1962 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
1963 out.push_str(&format!(
1964 "oris_evolution_mutation_velocity_last_hour {}\n",
1965 snapshot.mutation_velocity_last_hour
1966 ));
1967 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
1968 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
1969 out.push_str(&format!(
1970 "oris_evolution_revoke_frequency_last_hour {}\n",
1971 snapshot.revoke_frequency_last_hour
1972 ));
1973 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
1974 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
1975 out.push_str(&format!(
1976 "oris_evolution_promoted_genes {}\n",
1977 snapshot.promoted_genes
1978 ));
1979 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
1980 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
1981 out.push_str(&format!(
1982 "oris_evolution_promoted_capsules {}\n",
1983 snapshot.promoted_capsules
1984 ));
1985 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
1986 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
1987 out.push_str(&format!(
1988 "oris_evolution_store_last_event_seq {}\n",
1989 snapshot.last_event_seq
1990 ));
1991 out.push_str(
1992 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
1993 );
1994 out.push_str("# TYPE oris_evolution_health gauge\n");
1995 out.push_str(&format!(
1996 "oris_evolution_health {}\n",
1997 u8::from(health.status == "ok")
1998 ));
1999 out
2000}
2001
2002fn count_recent_events(
2003 events: &[StoredEvolutionEvent],
2004 cutoff: DateTime<Utc>,
2005 predicate: impl Fn(&EvolutionEvent) -> bool,
2006) -> u64 {
2007 events
2008 .iter()
2009 .filter(|stored| {
2010 predicate(&stored.event)
2011 && parse_event_timestamp(&stored.timestamp)
2012 .map(|timestamp| timestamp >= cutoff)
2013 .unwrap_or(false)
2014 })
2015 .count() as u64
2016}
2017
2018fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
2019 DateTime::parse_from_rfc3339(raw)
2020 .ok()
2021 .map(|parsed| parsed.with_timezone(&Utc))
2022}
2023
2024fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
2025 matches!(
2026 event,
2027 EvolutionEvent::ValidationFailed {
2028 gene_id: Some(_),
2029 ..
2030 }
2031 )
2032}
2033
2034fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
2035 if denominator == 0 {
2036 0.0
2037 } else {
2038 numerator as f64 / denominator as f64
2039 }
2040}
2041
2042fn store_err(err: EvolutionError) -> EvoKernelError {
2043 EvoKernelError::Store(err.to_string())
2044}
2045
2046#[cfg(test)]
2047mod tests {
2048 use super::*;
2049 use oris_kernel::{
2050 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
2051 NoopStepFn, StateUpdatedOnlyReducer,
2052 };
2053 use serde::{Deserialize, Serialize};
2054
2055 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
2056 struct TestState;
2057
2058 impl KernelState for TestState {
2059 fn version(&self) -> u32 {
2060 1
2061 }
2062 }
2063
2064 fn temp_workspace(name: &str) -> std::path::PathBuf {
2065 let root =
2066 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
2067 if root.exists() {
2068 fs::remove_dir_all(&root).unwrap();
2069 }
2070 fs::create_dir_all(root.join("src")).unwrap();
2071 fs::write(
2072 root.join("Cargo.toml"),
2073 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
2074 )
2075 .unwrap();
2076 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
2077 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
2078 root
2079 }
2080
2081 fn test_kernel() -> Arc<Kernel<TestState>> {
2082 Arc::new(Kernel::<TestState> {
2083 events: Box::new(InMemoryEventStore::new()),
2084 snaps: None,
2085 reducer: Box::new(StateUpdatedOnlyReducer),
2086 exec: Box::new(NoopActionExecutor),
2087 step: Box::new(NoopStepFn),
2088 policy: Box::new(AllowAllPolicy),
2089 effect_sink: None,
2090 mode: KernelMode::Normal,
2091 })
2092 }
2093
2094 fn lightweight_plan() -> ValidationPlan {
2095 ValidationPlan {
2096 profile: "test".into(),
2097 stages: vec![ValidationStage::Command {
2098 program: "git".into(),
2099 args: vec!["--version".into()],
2100 timeout_ms: 5_000,
2101 }],
2102 }
2103 }
2104
2105 fn sample_mutation() -> PreparedMutation {
2106 prepare_mutation(
2107 MutationIntent {
2108 id: "mutation-1".into(),
2109 intent: "add README".into(),
2110 target: MutationTarget::Paths {
2111 allow: vec!["README.md".into()],
2112 },
2113 expected_effect: "repo still builds".into(),
2114 risk: RiskLevel::Low,
2115 signals: vec!["missing readme".into()],
2116 spec_id: None,
2117 },
2118 "\
2119diff --git a/README.md b/README.md
2120new file mode 100644
2121index 0000000..1111111
2122--- /dev/null
2123+++ b/README.md
2124@@ -0,0 +1 @@
2125+# sample
2126"
2127 .into(),
2128 Some("HEAD".into()),
2129 )
2130 }
2131
2132 fn base_sandbox_policy() -> SandboxPolicy {
2133 SandboxPolicy {
2134 allowed_programs: vec!["git".into()],
2135 max_duration_ms: 60_000,
2136 max_output_bytes: 1024 * 1024,
2137 denied_env_prefixes: Vec::new(),
2138 }
2139 }
2140
2141 fn command_validator() -> Arc<dyn Validator> {
2142 Arc::new(CommandValidator::new(base_sandbox_policy()))
2143 }
2144
2145 fn replay_input(signal: &str) -> SelectorInput {
2146 let rustc_version = std::process::Command::new("rustc")
2147 .arg("--version")
2148 .output()
2149 .ok()
2150 .filter(|output| output.status.success())
2151 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
2152 .unwrap_or_else(|| "rustc unknown".into());
2153 SelectorInput {
2154 signals: vec![signal.into()],
2155 env: EnvFingerprint {
2156 rustc_version,
2157 cargo_lock_hash: compute_artifact_hash("# lock\n"),
2158 target_triple: format!(
2159 "{}-unknown-{}",
2160 std::env::consts::ARCH,
2161 std::env::consts::OS
2162 ),
2163 os: std::env::consts::OS.into(),
2164 },
2165 spec_id: None,
2166 limit: 1,
2167 }
2168 }
2169
2170 fn build_test_evo_with_store(
2171 name: &str,
2172 run_id: &str,
2173 validator: Arc<dyn Validator>,
2174 store: Arc<dyn EvolutionStore>,
2175 ) -> EvoKernel<TestState> {
2176 let workspace = temp_workspace(name);
2177 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
2178 run_id,
2179 &workspace,
2180 std::env::temp_dir(),
2181 ));
2182 EvoKernel::new(test_kernel(), sandbox, validator, store)
2183 .with_governor(Arc::new(DefaultGovernor::new(
2184 oris_governor::GovernorConfig {
2185 promote_after_successes: 1,
2186 ..Default::default()
2187 },
2188 )))
2189 .with_validation_plan(lightweight_plan())
2190 .with_sandbox_policy(base_sandbox_policy())
2191 }
2192
2193 fn build_test_evo(
2194 name: &str,
2195 run_id: &str,
2196 validator: Arc<dyn Validator>,
2197 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
2198 let store_root = std::env::temp_dir().join(format!(
2199 "oris-evokernel-{name}-store-{}",
2200 std::process::id()
2201 ));
2202 if store_root.exists() {
2203 fs::remove_dir_all(&store_root).unwrap();
2204 }
2205 let store: Arc<dyn EvolutionStore> =
2206 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
2207 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
2208 (evo, store)
2209 }
2210
2211 fn remote_publish_envelope(
2212 sender_id: &str,
2213 run_id: &str,
2214 gene_id: &str,
2215 capsule_id: &str,
2216 mutation_id: &str,
2217 signal: &str,
2218 file_name: &str,
2219 line: &str,
2220 ) -> EvolutionEnvelope {
2221 remote_publish_envelope_with_env(
2222 sender_id,
2223 run_id,
2224 gene_id,
2225 capsule_id,
2226 mutation_id,
2227 signal,
2228 file_name,
2229 line,
2230 replay_input(signal).env,
2231 )
2232 }
2233
2234 fn remote_publish_envelope_with_env(
2235 sender_id: &str,
2236 run_id: &str,
2237 gene_id: &str,
2238 capsule_id: &str,
2239 mutation_id: &str,
2240 signal: &str,
2241 file_name: &str,
2242 line: &str,
2243 env: EnvFingerprint,
2244 ) -> EvolutionEnvelope {
2245 let mutation = prepare_mutation(
2246 MutationIntent {
2247 id: mutation_id.into(),
2248 intent: format!("add {file_name}"),
2249 target: MutationTarget::Paths {
2250 allow: vec![file_name.into()],
2251 },
2252 expected_effect: "replay should still validate".into(),
2253 risk: RiskLevel::Low,
2254 signals: vec![signal.into()],
2255 spec_id: None,
2256 },
2257 format!(
2258 "\
2259diff --git a/{file_name} b/{file_name}
2260new file mode 100644
2261index 0000000..1111111
2262--- /dev/null
2263+++ b/{file_name}
2264@@ -0,0 +1 @@
2265+{line}
2266"
2267 ),
2268 Some("HEAD".into()),
2269 );
2270 let gene = Gene {
2271 id: gene_id.into(),
2272 signals: vec![signal.into()],
2273 strategy: vec![file_name.into()],
2274 validation: vec!["test".into()],
2275 state: AssetState::Promoted,
2276 };
2277 let capsule = Capsule {
2278 id: capsule_id.into(),
2279 gene_id: gene_id.into(),
2280 mutation_id: mutation_id.into(),
2281 run_id: run_id.into(),
2282 diff_hash: mutation.artifact.content_hash.clone(),
2283 confidence: 0.9,
2284 env,
2285 outcome: Outcome {
2286 success: true,
2287 validation_profile: "test".into(),
2288 validation_duration_ms: 1,
2289 changed_files: vec![file_name.into()],
2290 validator_hash: "validator-hash".into(),
2291 lines_changed: 1,
2292 replay_verified: false,
2293 },
2294 state: AssetState::Promoted,
2295 };
2296 EvolutionEnvelope::publish(
2297 sender_id,
2298 vec![
2299 NetworkAsset::EvolutionEvent {
2300 event: EvolutionEvent::MutationDeclared { mutation },
2301 },
2302 NetworkAsset::Gene { gene: gene.clone() },
2303 NetworkAsset::Capsule {
2304 capsule: capsule.clone(),
2305 },
2306 NetworkAsset::EvolutionEvent {
2307 event: EvolutionEvent::CapsuleReleased {
2308 capsule_id: capsule.id.clone(),
2309 state: AssetState::Promoted,
2310 },
2311 },
2312 ],
2313 )
2314 }
2315
2316 struct FixedValidator {
2317 success: bool,
2318 }
2319
2320 #[async_trait]
2321 impl Validator for FixedValidator {
2322 async fn run(
2323 &self,
2324 _receipt: &SandboxReceipt,
2325 plan: &ValidationPlan,
2326 ) -> Result<ValidationReport, ValidationError> {
2327 Ok(ValidationReport {
2328 success: self.success,
2329 duration_ms: 1,
2330 stages: Vec::new(),
2331 logs: if self.success {
2332 format!("{} ok", plan.profile)
2333 } else {
2334 format!("{} failed", plan.profile)
2335 },
2336 })
2337 }
2338 }
2339
2340 #[tokio::test]
2341 async fn command_validator_aggregates_stage_reports() {
2342 let workspace = temp_workspace("validator");
2343 let receipt = SandboxReceipt {
2344 mutation_id: "m".into(),
2345 workdir: workspace,
2346 applied: true,
2347 changed_files: Vec::new(),
2348 patch_hash: "hash".into(),
2349 stdout_log: std::env::temp_dir().join("stdout.log"),
2350 stderr_log: std::env::temp_dir().join("stderr.log"),
2351 };
2352 let validator = CommandValidator::new(SandboxPolicy {
2353 allowed_programs: vec!["git".into()],
2354 max_duration_ms: 1_000,
2355 max_output_bytes: 1024,
2356 denied_env_prefixes: Vec::new(),
2357 });
2358 let report = validator
2359 .run(
2360 &receipt,
2361 &ValidationPlan {
2362 profile: "test".into(),
2363 stages: vec![ValidationStage::Command {
2364 program: "git".into(),
2365 args: vec!["--version".into()],
2366 timeout_ms: 1_000,
2367 }],
2368 },
2369 )
2370 .await
2371 .unwrap();
2372 assert_eq!(report.stages.len(), 1);
2373 }
2374
2375 #[tokio::test]
2376 async fn capture_successful_mutation_appends_capsule() {
2377 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
2378 let capsule = evo
2379 .capture_successful_mutation(&"run-1".into(), sample_mutation())
2380 .await
2381 .unwrap();
2382 let events = store.scan(1).unwrap();
2383 assert!(events
2384 .iter()
2385 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
2386 assert!(!capsule.id.is_empty());
2387 }
2388
2389 #[tokio::test]
2390 async fn replay_hit_records_capsule_reused() {
2391 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
2392 let capsule = evo
2393 .capture_successful_mutation(&"run-2".into(), sample_mutation())
2394 .await
2395 .unwrap();
2396 let decision = evo
2397 .replay_or_fallback(replay_input("missing readme"))
2398 .await
2399 .unwrap();
2400 assert!(decision.used_capsule);
2401 assert_eq!(decision.capsule_id, Some(capsule.id));
2402 assert!(store
2403 .scan(1)
2404 .unwrap()
2405 .iter()
2406 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. })));
2407 }
2408
2409 #[tokio::test]
2410 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
2411 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
2412 let capsule = evo
2413 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
2414 .await
2415 .unwrap();
2416 let decision = evo
2417 .replay_or_fallback(replay_input("missing readme"))
2418 .await
2419 .unwrap();
2420 assert!(decision.used_capsule);
2421
2422 evo.revoke_assets(&RevokeNotice {
2423 sender_id: "node-metrics".into(),
2424 asset_ids: vec![capsule.id.clone()],
2425 reason: "manual test revoke".into(),
2426 })
2427 .unwrap();
2428
2429 let snapshot = evo.metrics_snapshot().unwrap();
2430 assert_eq!(snapshot.replay_attempts_total, 1);
2431 assert_eq!(snapshot.replay_success_total, 1);
2432 assert_eq!(snapshot.replay_success_rate, 1.0);
2433 assert_eq!(snapshot.mutation_declared_total, 1);
2434 assert_eq!(snapshot.promoted_mutations_total, 1);
2435 assert_eq!(snapshot.promotion_ratio, 1.0);
2436 assert_eq!(snapshot.gene_revocations_total, 1);
2437 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
2438 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
2439 assert_eq!(snapshot.promoted_genes, 0);
2440 assert_eq!(snapshot.promoted_capsules, 0);
2441
2442 let rendered = evo.render_metrics_prometheus().unwrap();
2443 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
2444 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
2445 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
2446 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
2447 assert!(rendered.contains("oris_evolution_health 1"));
2448 }
2449
2450 #[tokio::test]
2451 async fn remote_replay_prefers_closest_environment_match() {
2452 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
2453 let input = replay_input("env-signal");
2454
2455 let envelope_a = remote_publish_envelope_with_env(
2456 "node-a",
2457 "run-remote-a",
2458 "gene-a",
2459 "capsule-a",
2460 "mutation-a",
2461 "env-signal",
2462 "A.md",
2463 "# from a",
2464 input.env.clone(),
2465 );
2466 let envelope_b = remote_publish_envelope_with_env(
2467 "node-b",
2468 "run-remote-b",
2469 "gene-b",
2470 "capsule-b",
2471 "mutation-b",
2472 "env-signal",
2473 "B.md",
2474 "# from b",
2475 EnvFingerprint {
2476 rustc_version: "old-rustc".into(),
2477 cargo_lock_hash: "other-lock".into(),
2478 target_triple: "aarch64-apple-darwin".into(),
2479 os: "linux".into(),
2480 },
2481 );
2482
2483 evo.import_remote_envelope(&envelope_a).unwrap();
2484 evo.import_remote_envelope(&envelope_b).unwrap();
2485
2486 let decision = evo.replay_or_fallback(input).await.unwrap();
2487
2488 assert!(decision.used_capsule);
2489 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
2490 assert!(!decision.fallback_to_planner);
2491 }
2492
2493 #[tokio::test]
2494 async fn remote_capsule_stays_quarantined_until_first_successful_replay() {
2495 let (evo, store) = build_test_evo(
2496 "remote-quarantine",
2497 "run-remote-quarantine",
2498 command_validator(),
2499 );
2500 let envelope = remote_publish_envelope(
2501 "node-remote",
2502 "run-remote-quarantine",
2503 "gene-remote",
2504 "capsule-remote",
2505 "mutation-remote",
2506 "remote-signal",
2507 "REMOTE.md",
2508 "# from remote",
2509 );
2510
2511 evo.import_remote_envelope(&envelope).unwrap();
2512
2513 let before_replay = store.rebuild_projection().unwrap();
2514 let imported_capsule = before_replay
2515 .capsules
2516 .iter()
2517 .find(|capsule| capsule.id == "capsule-remote")
2518 .unwrap();
2519 assert_eq!(imported_capsule.state, AssetState::Quarantined);
2520
2521 let decision = evo
2522 .replay_or_fallback(replay_input("remote-signal"))
2523 .await
2524 .unwrap();
2525
2526 assert!(decision.used_capsule);
2527 assert_eq!(decision.capsule_id, Some("capsule-remote".into()));
2528
2529 let after_replay = store.rebuild_projection().unwrap();
2530 let released_capsule = after_replay
2531 .capsules
2532 .iter()
2533 .find(|capsule| capsule.id == "capsule-remote")
2534 .unwrap();
2535 assert_eq!(released_capsule.state, AssetState::Promoted);
2536 }
2537
2538 #[tokio::test]
2539 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
2540 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
2541 let capsule = evo
2542 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
2543 .await
2544 .unwrap();
2545 let publish = evo.export_promoted_assets("node-local");
2546 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
2547
2548 let decision = evo
2549 .replay_or_fallback(replay_input("missing readme"))
2550 .await
2551 .unwrap();
2552 assert!(decision.used_capsule);
2553 assert_eq!(decision.capsule_id, Some(capsule.id));
2554 }
2555
2556 #[tokio::test]
2557 async fn second_replay_validation_failure_revokes_gene_immediately() {
2558 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
2559 let capsule = capturer
2560 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
2561 .await
2562 .unwrap();
2563
2564 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
2565 let failing_replay = build_test_evo_with_store(
2566 "revoke-replay",
2567 "run-replay-fail",
2568 failing_validator,
2569 store.clone(),
2570 );
2571
2572 let first = failing_replay
2573 .replay_or_fallback(replay_input("missing readme"))
2574 .await
2575 .unwrap();
2576 let second = failing_replay
2577 .replay_or_fallback(replay_input("missing readme"))
2578 .await
2579 .unwrap();
2580
2581 assert!(!first.used_capsule);
2582 assert!(first.fallback_to_planner);
2583 assert!(!second.used_capsule);
2584 assert!(second.fallback_to_planner);
2585
2586 let projection = store.rebuild_projection().unwrap();
2587 let gene = projection
2588 .genes
2589 .iter()
2590 .find(|gene| gene.id == capsule.gene_id)
2591 .unwrap();
2592 assert_eq!(gene.state, AssetState::Promoted);
2593 let committed_capsule = projection
2594 .capsules
2595 .iter()
2596 .find(|current| current.id == capsule.id)
2597 .unwrap();
2598 assert_eq!(committed_capsule.state, AssetState::Promoted);
2599
2600 let events = store.scan(1).unwrap();
2601 assert_eq!(
2602 events
2603 .iter()
2604 .filter(|stored| {
2605 matches!(
2606 &stored.event,
2607 EvolutionEvent::ValidationFailed {
2608 gene_id: Some(gene_id),
2609 ..
2610 } if gene_id == &capsule.gene_id
2611 )
2612 })
2613 .count(),
2614 1
2615 );
2616 assert!(!events.iter().any(|stored| {
2617 matches!(
2618 &stored.event,
2619 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
2620 )
2621 }));
2622
2623 let recovered = build_test_evo_with_store(
2624 "revoke-replay",
2625 "run-replay-check",
2626 command_validator(),
2627 store.clone(),
2628 );
2629 let after_revoke = recovered
2630 .replay_or_fallback(replay_input("missing readme"))
2631 .await
2632 .unwrap();
2633 assert!(!after_revoke.used_capsule);
2634 assert!(after_revoke.fallback_to_planner);
2635 assert!(after_revoke.reason.contains("below replay threshold"));
2636 }
2637
2638 #[tokio::test]
2639 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
2640 let ledger = Arc::new(Mutex::new(EvuLedger {
2641 accounts: vec![],
2642 reputations: vec![
2643 oris_economics::ReputationRecord {
2644 node_id: "node-a".into(),
2645 publish_success_rate: 0.4,
2646 validator_accuracy: 0.4,
2647 reuse_impact: 0,
2648 },
2649 oris_economics::ReputationRecord {
2650 node_id: "node-b".into(),
2651 publish_success_rate: 0.95,
2652 validator_accuracy: 0.95,
2653 reuse_impact: 8,
2654 },
2655 ],
2656 }));
2657 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
2658 let evo = evo.with_economics(ledger.clone());
2659
2660 let envelope_a = remote_publish_envelope(
2661 "node-a",
2662 "run-remote-a",
2663 "gene-a",
2664 "capsule-a",
2665 "mutation-a",
2666 "shared-signal",
2667 "A.md",
2668 "# from a",
2669 );
2670 let envelope_b = remote_publish_envelope(
2671 "node-b",
2672 "run-remote-b",
2673 "gene-b",
2674 "capsule-b",
2675 "mutation-b",
2676 "shared-signal",
2677 "B.md",
2678 "# from b",
2679 );
2680
2681 evo.import_remote_envelope(&envelope_a).unwrap();
2682 evo.import_remote_envelope(&envelope_b).unwrap();
2683
2684 let decision = evo
2685 .replay_or_fallback(replay_input("shared-signal"))
2686 .await
2687 .unwrap();
2688
2689 assert!(decision.used_capsule);
2690 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
2691 let locked = ledger.lock().unwrap();
2692 let rewarded = locked
2693 .accounts
2694 .iter()
2695 .find(|item| item.node_id == "node-b")
2696 .unwrap();
2697 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
2698 assert!(
2699 locked.selector_reputation_bias()["node-b"]
2700 > locked.selector_reputation_bias()["node-a"]
2701 );
2702 }
2703
2704 #[tokio::test]
2705 async fn remote_reuse_failure_penalizes_remote_reputation() {
2706 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
2707 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
2708 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
2709 let evo = evo.with_economics(ledger.clone());
2710
2711 let envelope = remote_publish_envelope(
2712 "node-remote",
2713 "run-remote-failed",
2714 "gene-remote",
2715 "capsule-remote",
2716 "mutation-remote",
2717 "failure-signal",
2718 "FAILED.md",
2719 "# from remote",
2720 );
2721 evo.import_remote_envelope(&envelope).unwrap();
2722
2723 let decision = evo
2724 .replay_or_fallback(replay_input("failure-signal"))
2725 .await
2726 .unwrap();
2727
2728 assert!(!decision.used_capsule);
2729 assert!(decision.fallback_to_planner);
2730
2731 let signal = evo.economics_signal("node-remote").unwrap();
2732 assert_eq!(signal.available_evu, 0);
2733 assert!(signal.publish_success_rate < 0.5);
2734 assert!(signal.validator_accuracy < 0.5);
2735 }
2736}