1use std::collections::{BTreeMap, BTreeSet};
4use std::fs;
5use std::path::{Path, PathBuf};
6use std::process::Command;
7use std::sync::{Arc, Mutex};
8
9use async_trait::async_trait;
10use chrono::{DateTime, Duration, Utc};
11use oris_agent_contract::{
12 AgentRole, BoundedTaskClass, CoordinationMessage, CoordinationPlan, CoordinationPrimitive,
13 CoordinationResult, CoordinationTask, ExecutionFeedback,
14 MutationProposal as AgentMutationProposal, ReplayFeedback, ReplayPlannerDirective,
15 SupervisedDevloopOutcome, SupervisedDevloopRequest, SupervisedDevloopStatus,
16};
17use oris_economics::{EconomicsSignal, EvuLedger, StakePolicy};
18use oris_evolution::{
19 compute_artifact_hash, decayed_replay_confidence, next_id, stable_hash_json, AssetState,
20 BlastRadius, CandidateSource, Capsule, CapsuleId, EnvFingerprint, EvolutionError,
21 EvolutionEvent, EvolutionProjection, EvolutionStore, Gene, GeneCandidate, MutationId,
22 PreparedMutation, Selector, SelectorInput, StoreBackedSelector, StoredEvolutionEvent,
23 TransitionReasonCode, ValidationSnapshot, MIN_REPLAY_CONFIDENCE,
24};
25use oris_evolution_network::{EvolutionEnvelope, NetworkAsset, SyncAudit};
26use oris_governor::{DefaultGovernor, Governor, GovernorDecision, GovernorInput};
27use oris_kernel::{Kernel, KernelState, RunId};
28use oris_sandbox::{
29 compute_blast_radius, execute_allowed_command, Sandbox, SandboxPolicy, SandboxReceipt,
30};
31use oris_spec::CompiledMutationPlan;
32use serde::{Deserialize, Serialize};
33use serde_json::Value;
34use thiserror::Error;
35
36pub use oris_evolution::{
37 default_store_root, ArtifactEncoding, AssetState as EvoAssetState,
38 BlastRadius as EvoBlastRadius, CandidateSource as EvoCandidateSource,
39 EnvFingerprint as EvoEnvFingerprint, EvolutionStore as EvoEvolutionStore, JsonlEvolutionStore,
40 MutationArtifact, MutationIntent, MutationTarget, Outcome, RiskLevel,
41 SelectorInput as EvoSelectorInput, TransitionReasonCode as EvoTransitionReasonCode,
42};
43pub use oris_evolution_network::{
44 FetchQuery, FetchResponse, MessageType, PublishRequest, RevokeNotice,
45};
46pub use oris_governor::{CoolingWindow, GovernorConfig, RevocationReason};
47pub use oris_sandbox::{LocalProcessSandbox, SandboxPolicy as EvoSandboxPolicy};
48pub use oris_spec::{SpecCompileError, SpecCompiler, SpecDocument};
49
50#[derive(Clone, Debug, Serialize, Deserialize)]
51pub struct ValidationPlan {
52 pub profile: String,
53 pub stages: Vec<ValidationStage>,
54}
55
56impl ValidationPlan {
57 pub fn oris_default() -> Self {
58 Self {
59 profile: "oris-default".into(),
60 stages: vec![
61 ValidationStage::Command {
62 program: "cargo".into(),
63 args: vec!["fmt".into(), "--all".into(), "--check".into()],
64 timeout_ms: 60_000,
65 },
66 ValidationStage::Command {
67 program: "cargo".into(),
68 args: vec!["check".into(), "--workspace".into()],
69 timeout_ms: 180_000,
70 },
71 ValidationStage::Command {
72 program: "cargo".into(),
73 args: vec![
74 "test".into(),
75 "-p".into(),
76 "oris-kernel".into(),
77 "-p".into(),
78 "oris-evolution".into(),
79 "-p".into(),
80 "oris-sandbox".into(),
81 "-p".into(),
82 "oris-evokernel".into(),
83 "--lib".into(),
84 ],
85 timeout_ms: 300_000,
86 },
87 ValidationStage::Command {
88 program: "cargo".into(),
89 args: vec![
90 "test".into(),
91 "-p".into(),
92 "oris-runtime".into(),
93 "--lib".into(),
94 ],
95 timeout_ms: 300_000,
96 },
97 ],
98 }
99 }
100}
101
102#[derive(Clone, Debug, Serialize, Deserialize)]
103pub enum ValidationStage {
104 Command {
105 program: String,
106 args: Vec<String>,
107 timeout_ms: u64,
108 },
109}
110
111#[derive(Clone, Debug, Serialize, Deserialize)]
112pub struct ValidationStageReport {
113 pub stage: String,
114 pub success: bool,
115 pub exit_code: Option<i32>,
116 pub duration_ms: u64,
117 pub stdout: String,
118 pub stderr: String,
119}
120
121#[derive(Clone, Debug, Serialize, Deserialize)]
122pub struct ValidationReport {
123 pub success: bool,
124 pub duration_ms: u64,
125 pub stages: Vec<ValidationStageReport>,
126 pub logs: String,
127}
128
129#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
130pub struct SignalExtractionInput {
131 pub patch_diff: String,
132 pub intent: String,
133 pub expected_effect: String,
134 pub declared_signals: Vec<String>,
135 pub changed_files: Vec<String>,
136 pub validation_success: bool,
137 pub validation_logs: String,
138 pub stage_outputs: Vec<String>,
139}
140
141#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
142pub struct SignalExtractionOutput {
143 pub values: Vec<String>,
144 pub hash: String,
145}
146
147#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
148pub struct SeedTemplate {
149 pub id: String,
150 pub intent: String,
151 pub signals: Vec<String>,
152 pub diff_payload: String,
153 pub validation_profile: String,
154}
155
156#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
157pub struct BootstrapReport {
158 pub seeded: bool,
159 pub genes_added: usize,
160 pub capsules_added: usize,
161}
162
163const REPORTED_EXPERIENCE_RETENTION_LIMIT: usize = 3;
164
165impl ValidationReport {
166 pub fn to_snapshot(&self, profile: &str) -> ValidationSnapshot {
167 ValidationSnapshot {
168 success: self.success,
169 profile: profile.to_string(),
170 duration_ms: self.duration_ms,
171 summary: if self.success {
172 "validation passed".into()
173 } else {
174 "validation failed".into()
175 },
176 }
177 }
178}
179
180pub fn extract_deterministic_signals(input: &SignalExtractionInput) -> SignalExtractionOutput {
181 let mut signals = BTreeSet::new();
182
183 for declared in &input.declared_signals {
184 if let Some(phrase) = normalize_signal_phrase(declared) {
185 signals.insert(phrase);
186 }
187 extend_signal_tokens(&mut signals, declared);
188 }
189
190 for text in [
191 input.patch_diff.as_str(),
192 input.intent.as_str(),
193 input.expected_effect.as_str(),
194 input.validation_logs.as_str(),
195 ] {
196 extend_signal_tokens(&mut signals, text);
197 }
198
199 for changed_file in &input.changed_files {
200 extend_signal_tokens(&mut signals, changed_file);
201 }
202
203 for stage_output in &input.stage_outputs {
204 extend_signal_tokens(&mut signals, stage_output);
205 }
206
207 signals.insert(if input.validation_success {
208 "validation passed".into()
209 } else {
210 "validation failed".into()
211 });
212
213 let values = signals.into_iter().take(32).collect::<Vec<_>>();
214 let hash =
215 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
216 SignalExtractionOutput { values, hash }
217}
218
219#[derive(Debug, Error)]
220pub enum ValidationError {
221 #[error("validation execution failed: {0}")]
222 Execution(String),
223}
224
225#[async_trait]
226pub trait Validator: Send + Sync {
227 async fn run(
228 &self,
229 receipt: &SandboxReceipt,
230 plan: &ValidationPlan,
231 ) -> Result<ValidationReport, ValidationError>;
232}
233
234pub struct CommandValidator {
235 policy: SandboxPolicy,
236}
237
238impl CommandValidator {
239 pub fn new(policy: SandboxPolicy) -> Self {
240 Self { policy }
241 }
242}
243
244#[async_trait]
245impl Validator for CommandValidator {
246 async fn run(
247 &self,
248 receipt: &SandboxReceipt,
249 plan: &ValidationPlan,
250 ) -> Result<ValidationReport, ValidationError> {
251 let started = std::time::Instant::now();
252 let mut stages = Vec::new();
253 let mut success = true;
254 let mut logs = String::new();
255
256 for stage in &plan.stages {
257 match stage {
258 ValidationStage::Command {
259 program,
260 args,
261 timeout_ms,
262 } => {
263 let result = execute_allowed_command(
264 &self.policy,
265 &receipt.workdir,
266 program,
267 args,
268 *timeout_ms,
269 )
270 .await;
271 let report = match result {
272 Ok(output) => ValidationStageReport {
273 stage: format!("{program} {}", args.join(" ")),
274 success: output.success,
275 exit_code: output.exit_code,
276 duration_ms: output.duration_ms,
277 stdout: output.stdout,
278 stderr: output.stderr,
279 },
280 Err(err) => ValidationStageReport {
281 stage: format!("{program} {}", args.join(" ")),
282 success: false,
283 exit_code: None,
284 duration_ms: 0,
285 stdout: String::new(),
286 stderr: err.to_string(),
287 },
288 };
289 if !report.success {
290 success = false;
291 }
292 if !report.stdout.is_empty() {
293 logs.push_str(&report.stdout);
294 logs.push('\n');
295 }
296 if !report.stderr.is_empty() {
297 logs.push_str(&report.stderr);
298 logs.push('\n');
299 }
300 stages.push(report);
301 if !success {
302 break;
303 }
304 }
305 }
306 }
307
308 Ok(ValidationReport {
309 success,
310 duration_ms: started.elapsed().as_millis() as u64,
311 stages,
312 logs,
313 })
314 }
315}
316
317#[derive(Clone, Debug)]
318pub struct ReplayDecision {
319 pub used_capsule: bool,
320 pub capsule_id: Option<CapsuleId>,
321 pub fallback_to_planner: bool,
322 pub reason: String,
323}
324
325#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
326pub struct ReplayTaskClassMetrics {
327 pub task_class_id: String,
328 pub task_label: String,
329 pub replay_success_total: u64,
330 pub reasoning_steps_avoided_total: u64,
331}
332
333#[derive(Clone, Copy, Debug, Eq, PartialEq)]
334enum CoordinationTaskState {
335 Ready,
336 Waiting,
337 BlockedByFailure,
338 PermanentlyBlocked,
339}
340
341#[derive(Clone, Debug, Default)]
342pub struct MultiAgentCoordinator;
343
344impl MultiAgentCoordinator {
345 pub fn new() -> Self {
346 Self
347 }
348
349 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
350 let primitive = plan.primitive.clone();
351 let root_goal = plan.root_goal.clone();
352 let timeout_ms = plan.timeout_ms;
353 let max_retries = plan.max_retries;
354 let mut tasks = BTreeMap::new();
355 for task in plan.tasks {
356 tasks.entry(task.id.clone()).or_insert(task);
357 }
358
359 let mut pending = tasks.keys().cloned().collect::<BTreeSet<_>>();
360 let mut completed = BTreeSet::new();
361 let mut failed = BTreeSet::new();
362 let mut completed_order = Vec::new();
363 let mut failed_order = Vec::new();
364 let mut skipped = BTreeSet::new();
365 let mut attempts = BTreeMap::new();
366 let mut messages = Vec::new();
367
368 loop {
369 if matches!(primitive, CoordinationPrimitive::Conditional) {
370 self.apply_conditional_skips(
371 &tasks,
372 &mut pending,
373 &completed,
374 &failed,
375 &mut skipped,
376 &mut messages,
377 );
378 }
379
380 let mut ready = self.ready_task_ids(&tasks, &pending, &completed, &failed, &skipped);
381 if ready.is_empty() {
382 break;
383 }
384 if matches!(primitive, CoordinationPrimitive::Sequential) {
385 ready.truncate(1);
386 }
387
388 for task_id in ready {
389 let Some(task) = tasks.get(&task_id) else {
390 continue;
391 };
392 if !pending.contains(&task_id) {
393 continue;
394 }
395 self.record_handoff_messages(task, &tasks, &completed, &failed, &mut messages);
396
397 let prior_failures = attempts.get(&task_id).copied().unwrap_or(0);
398 if Self::simulate_task_failure(task, prior_failures) {
399 let failure_count = prior_failures + 1;
400 attempts.insert(task_id.clone(), failure_count);
401 let will_retry = failure_count <= max_retries;
402 messages.push(CoordinationMessage {
403 from_role: task.role.clone(),
404 to_role: task.role.clone(),
405 task_id: task_id.clone(),
406 content: if will_retry {
407 format!("task {task_id} failed on attempt {failure_count} and will retry")
408 } else {
409 format!(
410 "task {task_id} failed on attempt {failure_count} and exhausted retries"
411 )
412 },
413 });
414 if !will_retry {
415 pending.remove(&task_id);
416 if failed.insert(task_id.clone()) {
417 failed_order.push(task_id);
418 }
419 }
420 continue;
421 }
422
423 pending.remove(&task_id);
424 if completed.insert(task_id.clone()) {
425 completed_order.push(task_id);
426 }
427 }
428 }
429
430 let blocked_ids = pending.into_iter().collect::<Vec<_>>();
431 for task_id in blocked_ids {
432 let Some(task) = tasks.get(&task_id) else {
433 continue;
434 };
435 let state = self.classify_task(task, &tasks, &completed, &failed, &skipped);
436 let content = match state {
437 CoordinationTaskState::BlockedByFailure => {
438 format!("task {task_id} blocked by failed dependencies")
439 }
440 CoordinationTaskState::PermanentlyBlocked => {
441 format!("task {task_id} has invalid coordination prerequisites")
442 }
443 CoordinationTaskState::Waiting => {
444 format!("task {task_id} has unresolved dependencies")
445 }
446 CoordinationTaskState::Ready => {
447 format!("task {task_id} was left pending unexpectedly")
448 }
449 };
450 messages.push(CoordinationMessage {
451 from_role: task.role.clone(),
452 to_role: task.role.clone(),
453 task_id: task_id.clone(),
454 content,
455 });
456 if failed.insert(task_id.clone()) {
457 failed_order.push(task_id);
458 }
459 }
460
461 CoordinationResult {
462 completed_tasks: completed_order,
463 failed_tasks: failed_order,
464 messages,
465 summary: format!(
466 "goal '{}' completed {} tasks, failed {}, skipped {} using {:?} coordination (timeout={}ms, max_retries={})",
467 root_goal,
468 completed.len(),
469 failed.len(),
470 skipped.len(),
471 primitive,
472 timeout_ms,
473 max_retries
474 ),
475 }
476 }
477
478 fn ready_task_ids(
479 &self,
480 tasks: &BTreeMap<String, CoordinationTask>,
481 pending: &BTreeSet<String>,
482 completed: &BTreeSet<String>,
483 failed: &BTreeSet<String>,
484 skipped: &BTreeSet<String>,
485 ) -> Vec<String> {
486 pending
487 .iter()
488 .filter_map(|task_id| {
489 let task = tasks.get(task_id)?;
490 (self.classify_task(task, tasks, completed, failed, skipped)
491 == CoordinationTaskState::Ready)
492 .then(|| task_id.clone())
493 })
494 .collect()
495 }
496
497 fn apply_conditional_skips(
498 &self,
499 tasks: &BTreeMap<String, CoordinationTask>,
500 pending: &mut BTreeSet<String>,
501 completed: &BTreeSet<String>,
502 failed: &BTreeSet<String>,
503 skipped: &mut BTreeSet<String>,
504 messages: &mut Vec<CoordinationMessage>,
505 ) {
506 let skip_ids = pending
507 .iter()
508 .filter_map(|task_id| {
509 let task = tasks.get(task_id)?;
510 (self.classify_task(task, tasks, completed, failed, skipped)
511 == CoordinationTaskState::BlockedByFailure)
512 .then(|| task_id.clone())
513 })
514 .collect::<Vec<_>>();
515
516 for task_id in skip_ids {
517 let Some(task) = tasks.get(&task_id) else {
518 continue;
519 };
520 pending.remove(&task_id);
521 skipped.insert(task_id.clone());
522 messages.push(CoordinationMessage {
523 from_role: task.role.clone(),
524 to_role: task.role.clone(),
525 task_id: task_id.clone(),
526 content: format!("task {task_id} skipped due to failed dependency chain"),
527 });
528 }
529 }
530
531 fn classify_task(
532 &self,
533 task: &CoordinationTask,
534 tasks: &BTreeMap<String, CoordinationTask>,
535 completed: &BTreeSet<String>,
536 failed: &BTreeSet<String>,
537 skipped: &BTreeSet<String>,
538 ) -> CoordinationTaskState {
539 match task.role {
540 AgentRole::Planner | AgentRole::Coder => {
541 let mut waiting = false;
542 for dependency_id in &task.depends_on {
543 if !tasks.contains_key(dependency_id) {
544 return CoordinationTaskState::PermanentlyBlocked;
545 }
546 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
547 return CoordinationTaskState::BlockedByFailure;
548 }
549 if !completed.contains(dependency_id) {
550 waiting = true;
551 }
552 }
553 if waiting {
554 CoordinationTaskState::Waiting
555 } else {
556 CoordinationTaskState::Ready
557 }
558 }
559 AgentRole::Repair => {
560 let mut waiting = false;
561 let mut has_coder_dependency = false;
562 let mut has_failed_coder = false;
563 for dependency_id in &task.depends_on {
564 let Some(dependency) = tasks.get(dependency_id) else {
565 return CoordinationTaskState::PermanentlyBlocked;
566 };
567 let is_coder = matches!(dependency.role, AgentRole::Coder);
568 if is_coder {
569 has_coder_dependency = true;
570 }
571 if skipped.contains(dependency_id) {
572 return CoordinationTaskState::BlockedByFailure;
573 }
574 if failed.contains(dependency_id) {
575 if is_coder {
576 has_failed_coder = true;
577 } else {
578 return CoordinationTaskState::BlockedByFailure;
579 }
580 continue;
581 }
582 if !completed.contains(dependency_id) {
583 waiting = true;
584 }
585 }
586 if !has_coder_dependency {
587 CoordinationTaskState::PermanentlyBlocked
588 } else if waiting {
589 CoordinationTaskState::Waiting
590 } else if has_failed_coder {
591 CoordinationTaskState::Ready
592 } else {
593 CoordinationTaskState::PermanentlyBlocked
594 }
595 }
596 AgentRole::Optimizer => {
597 let mut waiting = false;
598 let mut has_impl_dependency = false;
599 let mut has_completed_impl = false;
600 let mut has_failed_impl = false;
601 for dependency_id in &task.depends_on {
602 let Some(dependency) = tasks.get(dependency_id) else {
603 return CoordinationTaskState::PermanentlyBlocked;
604 };
605 let is_impl = matches!(dependency.role, AgentRole::Coder | AgentRole::Repair);
606 if is_impl {
607 has_impl_dependency = true;
608 }
609 if skipped.contains(dependency_id) || failed.contains(dependency_id) {
610 if is_impl {
611 has_failed_impl = true;
612 continue;
613 }
614 return CoordinationTaskState::BlockedByFailure;
615 }
616 if completed.contains(dependency_id) {
617 if is_impl {
618 has_completed_impl = true;
619 }
620 continue;
621 }
622 waiting = true;
623 }
624 if !has_impl_dependency {
625 CoordinationTaskState::PermanentlyBlocked
626 } else if waiting {
627 CoordinationTaskState::Waiting
628 } else if has_completed_impl {
629 CoordinationTaskState::Ready
630 } else if has_failed_impl {
631 CoordinationTaskState::BlockedByFailure
632 } else {
633 CoordinationTaskState::PermanentlyBlocked
634 }
635 }
636 }
637 }
638
639 fn record_handoff_messages(
640 &self,
641 task: &CoordinationTask,
642 tasks: &BTreeMap<String, CoordinationTask>,
643 completed: &BTreeSet<String>,
644 failed: &BTreeSet<String>,
645 messages: &mut Vec<CoordinationMessage>,
646 ) {
647 let mut dependency_ids = task.depends_on.clone();
648 dependency_ids.sort();
649 dependency_ids.dedup();
650
651 for dependency_id in dependency_ids {
652 let Some(dependency) = tasks.get(&dependency_id) else {
653 continue;
654 };
655 if completed.contains(&dependency_id) {
656 messages.push(CoordinationMessage {
657 from_role: dependency.role.clone(),
658 to_role: task.role.clone(),
659 task_id: task.id.clone(),
660 content: format!("handoff from {dependency_id} to {}", task.id),
661 });
662 } else if failed.contains(&dependency_id) {
663 messages.push(CoordinationMessage {
664 from_role: dependency.role.clone(),
665 to_role: task.role.clone(),
666 task_id: task.id.clone(),
667 content: format!("failed dependency {dependency_id} routed to {}", task.id),
668 });
669 }
670 }
671 }
672
673 fn simulate_task_failure(task: &CoordinationTask, prior_failures: u32) -> bool {
674 let normalized = task.description.to_ascii_lowercase();
675 normalized.contains("force-fail")
676 || (normalized.contains("fail-once") && prior_failures == 0)
677 }
678}
679
680#[derive(Debug, Error)]
681pub enum ReplayError {
682 #[error("store error: {0}")]
683 Store(String),
684 #[error("sandbox error: {0}")]
685 Sandbox(String),
686 #[error("validation error: {0}")]
687 Validation(String),
688}
689
690#[async_trait]
691pub trait ReplayExecutor: Send + Sync {
692 async fn try_replay(
693 &self,
694 input: &SelectorInput,
695 policy: &SandboxPolicy,
696 validation: &ValidationPlan,
697 ) -> Result<ReplayDecision, ReplayError>;
698
699 async fn try_replay_for_run(
700 &self,
701 run_id: &RunId,
702 input: &SelectorInput,
703 policy: &SandboxPolicy,
704 validation: &ValidationPlan,
705 ) -> Result<ReplayDecision, ReplayError> {
706 let _ = run_id;
707 self.try_replay(input, policy, validation).await
708 }
709}
710
711pub struct StoreReplayExecutor {
712 pub sandbox: Arc<dyn Sandbox>,
713 pub validator: Arc<dyn Validator>,
714 pub store: Arc<dyn EvolutionStore>,
715 pub selector: Arc<dyn Selector>,
716 pub governor: Arc<dyn Governor>,
717 pub economics: Option<Arc<Mutex<EvuLedger>>>,
718 pub remote_publishers: Option<Arc<Mutex<BTreeMap<String, String>>>>,
719 pub stake_policy: StakePolicy,
720}
721
722struct ReplayCandidates {
723 candidates: Vec<GeneCandidate>,
724 exact_match: bool,
725}
726
727#[async_trait]
728impl ReplayExecutor for StoreReplayExecutor {
729 async fn try_replay(
730 &self,
731 input: &SelectorInput,
732 policy: &SandboxPolicy,
733 validation: &ValidationPlan,
734 ) -> Result<ReplayDecision, ReplayError> {
735 self.try_replay_inner(None, input, policy, validation).await
736 }
737
738 async fn try_replay_for_run(
739 &self,
740 run_id: &RunId,
741 input: &SelectorInput,
742 policy: &SandboxPolicy,
743 validation: &ValidationPlan,
744 ) -> Result<ReplayDecision, ReplayError> {
745 self.try_replay_inner(Some(run_id), input, policy, validation)
746 .await
747 }
748}
749
750impl StoreReplayExecutor {
751 fn collect_replay_candidates(&self, input: &SelectorInput) -> ReplayCandidates {
752 self.apply_confidence_revalidation();
753 let mut selector_input = input.clone();
754 if self.economics.is_some() && self.remote_publishers.is_some() {
755 selector_input.limit = selector_input.limit.max(4);
756 }
757 let mut candidates = self.selector.select(&selector_input);
758 self.rerank_with_reputation_bias(&mut candidates);
759 let mut exact_match = false;
760 if candidates.is_empty() {
761 let mut exact_candidates = exact_match_candidates(self.store.as_ref(), input);
762 self.rerank_with_reputation_bias(&mut exact_candidates);
763 if !exact_candidates.is_empty() {
764 candidates = exact_candidates;
765 exact_match = true;
766 }
767 }
768 if candidates.is_empty() {
769 let mut remote_candidates =
770 quarantined_remote_exact_match_candidates(self.store.as_ref(), input);
771 self.rerank_with_reputation_bias(&mut remote_candidates);
772 if !remote_candidates.is_empty() {
773 candidates = remote_candidates;
774 exact_match = true;
775 }
776 }
777 candidates.truncate(input.limit.max(1));
778 ReplayCandidates {
779 candidates,
780 exact_match,
781 }
782 }
783
784 fn apply_confidence_revalidation(&self) {
785 let Ok(projection) = projection_snapshot(self.store.as_ref()) else {
786 return;
787 };
788 for target in stale_replay_revalidation_targets(&projection, Utc::now()) {
789 let reason = format!(
790 "confidence decayed to {:.3}; revalidation required before replay",
791 target.decayed_confidence
792 );
793 if self
794 .store
795 .append_event(EvolutionEvent::PromotionEvaluated {
796 gene_id: target.gene_id.clone(),
797 state: AssetState::Quarantined,
798 reason: reason.clone(),
799 reason_code: TransitionReasonCode::RevalidationConfidenceDecay,
800 })
801 .is_err()
802 {
803 continue;
804 }
805 for capsule_id in target.capsule_ids {
806 if self
807 .store
808 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
809 .is_err()
810 {
811 break;
812 }
813 }
814 }
815 }
816
817 async fn try_replay_inner(
818 &self,
819 replay_run_id: Option<&RunId>,
820 input: &SelectorInput,
821 policy: &SandboxPolicy,
822 validation: &ValidationPlan,
823 ) -> Result<ReplayDecision, ReplayError> {
824 let ReplayCandidates {
825 candidates,
826 exact_match,
827 } = self.collect_replay_candidates(input);
828 let Some(best) = candidates.into_iter().next() else {
829 return Ok(ReplayDecision {
830 used_capsule: false,
831 capsule_id: None,
832 fallback_to_planner: true,
833 reason: "no matching gene".into(),
834 });
835 };
836 if !exact_match && best.score < 0.82 {
837 return Ok(ReplayDecision {
838 used_capsule: false,
839 capsule_id: None,
840 fallback_to_planner: true,
841 reason: format!("best gene score {:.3} below replay threshold", best.score),
842 });
843 }
844
845 let Some(capsule) = best.capsules.first().cloned() else {
846 return Ok(ReplayDecision {
847 used_capsule: false,
848 capsule_id: None,
849 fallback_to_planner: true,
850 reason: "candidate gene has no capsule".into(),
851 });
852 };
853 let remote_publisher = self.publisher_for_capsule(&capsule.id);
854
855 let Some(mutation) = find_declared_mutation(self.store.as_ref(), &capsule.mutation_id)
856 .map_err(|err| ReplayError::Store(err.to_string()))?
857 else {
858 return Ok(ReplayDecision {
859 used_capsule: false,
860 capsule_id: None,
861 fallback_to_planner: true,
862 reason: "mutation payload missing from store".into(),
863 });
864 };
865
866 let receipt = match self.sandbox.apply(&mutation, policy).await {
867 Ok(receipt) => receipt,
868 Err(err) => {
869 self.record_reuse_settlement(remote_publisher.as_deref(), false);
870 return Ok(ReplayDecision {
871 used_capsule: false,
872 capsule_id: Some(capsule.id.clone()),
873 fallback_to_planner: true,
874 reason: format!("replay patch apply failed: {err}"),
875 });
876 }
877 };
878
879 let report = self
880 .validator
881 .run(&receipt, validation)
882 .await
883 .map_err(|err| ReplayError::Validation(err.to_string()))?;
884 if !report.success {
885 self.record_replay_validation_failure(&best, &capsule, validation, &report)?;
886 self.record_reuse_settlement(remote_publisher.as_deref(), false);
887 return Ok(ReplayDecision {
888 used_capsule: false,
889 capsule_id: Some(capsule.id.clone()),
890 fallback_to_planner: true,
891 reason: "replay validation failed".into(),
892 });
893 }
894
895 if matches!(capsule.state, AssetState::Quarantined) {
896 self.store
897 .append_event(EvolutionEvent::ValidationPassed {
898 mutation_id: capsule.mutation_id.clone(),
899 report: report.to_snapshot(&validation.profile),
900 gene_id: Some(best.gene.id.clone()),
901 })
902 .map_err(|err| ReplayError::Store(err.to_string()))?;
903 if matches!(best.gene.state, AssetState::Quarantined) {
904 self.store
905 .append_event(EvolutionEvent::PromotionEvaluated {
906 gene_id: best.gene.id.clone(),
907 state: AssetState::Promoted,
908 reason: "remote asset locally validated via replay".into(),
909 reason_code: TransitionReasonCode::PromotionRemoteReplayValidated,
910 })
911 .map_err(|err| ReplayError::Store(err.to_string()))?;
912 self.store
913 .append_event(EvolutionEvent::GenePromoted {
914 gene_id: best.gene.id.clone(),
915 })
916 .map_err(|err| ReplayError::Store(err.to_string()))?;
917 }
918 self.store
919 .append_event(EvolutionEvent::CapsuleReleased {
920 capsule_id: capsule.id.clone(),
921 state: AssetState::Promoted,
922 })
923 .map_err(|err| ReplayError::Store(err.to_string()))?;
924 }
925
926 self.store
927 .append_event(EvolutionEvent::CapsuleReused {
928 capsule_id: capsule.id.clone(),
929 gene_id: capsule.gene_id.clone(),
930 run_id: capsule.run_id.clone(),
931 replay_run_id: replay_run_id.cloned(),
932 })
933 .map_err(|err| ReplayError::Store(err.to_string()))?;
934 self.record_reuse_settlement(remote_publisher.as_deref(), true);
935
936 Ok(ReplayDecision {
937 used_capsule: true,
938 capsule_id: Some(capsule.id),
939 fallback_to_planner: false,
940 reason: if exact_match {
941 "replayed via cold-start lookup".into()
942 } else {
943 "replayed via selector".into()
944 },
945 })
946 }
947
948 fn rerank_with_reputation_bias(&self, candidates: &mut [GeneCandidate]) {
949 let Some(ledger) = self.economics.as_ref() else {
950 return;
951 };
952 let reputation_bias = ledger
953 .lock()
954 .ok()
955 .map(|locked| locked.selector_reputation_bias())
956 .unwrap_or_default();
957 if reputation_bias.is_empty() {
958 return;
959 }
960 let required_assets = candidates
961 .iter()
962 .filter_map(|candidate| {
963 candidate
964 .capsules
965 .first()
966 .map(|capsule| capsule.id.as_str())
967 })
968 .collect::<Vec<_>>();
969 let publisher_map = self.remote_publishers_snapshot(&required_assets);
970 if publisher_map.is_empty() {
971 return;
972 }
973 candidates.sort_by(|left, right| {
974 effective_candidate_score(right, &publisher_map, &reputation_bias)
975 .partial_cmp(&effective_candidate_score(
976 left,
977 &publisher_map,
978 &reputation_bias,
979 ))
980 .unwrap_or(std::cmp::Ordering::Equal)
981 .then_with(|| left.gene.id.cmp(&right.gene.id))
982 });
983 }
984
985 fn publisher_for_capsule(&self, capsule_id: &str) -> Option<String> {
986 self.remote_publishers_snapshot(&[capsule_id])
987 .get(capsule_id)
988 .cloned()
989 }
990
991 fn remote_publishers_snapshot(&self, required_assets: &[&str]) -> BTreeMap<String, String> {
992 let cached = self
993 .remote_publishers
994 .as_ref()
995 .and_then(|remote_publishers| {
996 remote_publishers.lock().ok().map(|locked| locked.clone())
997 })
998 .unwrap_or_default();
999 if !cached.is_empty()
1000 && required_assets
1001 .iter()
1002 .all(|asset_id| cached.contains_key(*asset_id))
1003 {
1004 return cached;
1005 }
1006
1007 let persisted = remote_publishers_by_asset_from_store(self.store.as_ref());
1008 if persisted.is_empty() {
1009 return cached;
1010 }
1011
1012 let mut merged = cached;
1013 for (asset_id, sender_id) in persisted {
1014 merged.entry(asset_id).or_insert(sender_id);
1015 }
1016
1017 if let Some(remote_publishers) = self.remote_publishers.as_ref() {
1018 if let Ok(mut locked) = remote_publishers.lock() {
1019 for (asset_id, sender_id) in &merged {
1020 locked.entry(asset_id.clone()).or_insert(sender_id.clone());
1021 }
1022 }
1023 }
1024
1025 merged
1026 }
1027
1028 fn record_reuse_settlement(&self, publisher_id: Option<&str>, success: bool) {
1029 let Some(publisher_id) = publisher_id else {
1030 return;
1031 };
1032 let Some(ledger) = self.economics.as_ref() else {
1033 return;
1034 };
1035 if let Ok(mut locked) = ledger.lock() {
1036 locked.settle_remote_reuse(publisher_id, success, &self.stake_policy);
1037 }
1038 }
1039
1040 fn record_replay_validation_failure(
1041 &self,
1042 best: &GeneCandidate,
1043 capsule: &Capsule,
1044 validation: &ValidationPlan,
1045 report: &ValidationReport,
1046 ) -> Result<(), ReplayError> {
1047 let projection = projection_snapshot(self.store.as_ref())
1048 .map_err(|err| ReplayError::Store(err.to_string()))?;
1049 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1050 Self::confidence_context(&projection, &best.gene.id);
1051
1052 self.store
1053 .append_event(EvolutionEvent::ValidationFailed {
1054 mutation_id: capsule.mutation_id.clone(),
1055 report: report.to_snapshot(&validation.profile),
1056 gene_id: Some(best.gene.id.clone()),
1057 })
1058 .map_err(|err| ReplayError::Store(err.to_string()))?;
1059
1060 let replay_failures = self.replay_failure_count(&best.gene.id)?;
1061 let governor_decision = self.governor.evaluate(GovernorInput {
1062 candidate_source: if self.publisher_for_capsule(&capsule.id).is_some() {
1063 CandidateSource::Remote
1064 } else {
1065 CandidateSource::Local
1066 },
1067 success_count: 0,
1068 blast_radius: BlastRadius {
1069 files_changed: capsule.outcome.changed_files.len(),
1070 lines_changed: capsule.outcome.lines_changed,
1071 },
1072 replay_failures,
1073 recent_mutation_ages_secs: Vec::new(),
1074 current_confidence,
1075 historical_peak_confidence,
1076 confidence_last_updated_secs,
1077 });
1078
1079 if matches!(governor_decision.target_state, AssetState::Revoked) {
1080 self.store
1081 .append_event(EvolutionEvent::PromotionEvaluated {
1082 gene_id: best.gene.id.clone(),
1083 state: AssetState::Revoked,
1084 reason: governor_decision.reason.clone(),
1085 reason_code: governor_decision.reason_code.clone(),
1086 })
1087 .map_err(|err| ReplayError::Store(err.to_string()))?;
1088 self.store
1089 .append_event(EvolutionEvent::GeneRevoked {
1090 gene_id: best.gene.id.clone(),
1091 reason: governor_decision.reason,
1092 })
1093 .map_err(|err| ReplayError::Store(err.to_string()))?;
1094 for related in &best.capsules {
1095 self.store
1096 .append_event(EvolutionEvent::CapsuleQuarantined {
1097 capsule_id: related.id.clone(),
1098 })
1099 .map_err(|err| ReplayError::Store(err.to_string()))?;
1100 }
1101 }
1102
1103 Ok(())
1104 }
1105
1106 fn confidence_context(
1107 projection: &EvolutionProjection,
1108 gene_id: &str,
1109 ) -> (f32, f32, Option<u64>) {
1110 let peak_confidence = projection
1111 .capsules
1112 .iter()
1113 .filter(|capsule| capsule.gene_id == gene_id)
1114 .map(|capsule| capsule.confidence)
1115 .fold(0.0_f32, f32::max);
1116 let age_secs = projection
1117 .last_updated_at
1118 .get(gene_id)
1119 .and_then(|timestamp| Self::seconds_since_timestamp(timestamp, Utc::now()));
1120 (peak_confidence, peak_confidence, age_secs)
1121 }
1122
1123 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1124 let parsed = DateTime::parse_from_rfc3339(timestamp)
1125 .ok()?
1126 .with_timezone(&Utc);
1127 let elapsed = now.signed_duration_since(parsed);
1128 if elapsed < Duration::zero() {
1129 Some(0)
1130 } else {
1131 u64::try_from(elapsed.num_seconds()).ok()
1132 }
1133 }
1134
1135 fn replay_failure_count(&self, gene_id: &str) -> Result<u64, ReplayError> {
1136 Ok(self
1137 .store
1138 .scan(1)
1139 .map_err(|err| ReplayError::Store(err.to_string()))?
1140 .into_iter()
1141 .filter(|stored| {
1142 matches!(
1143 &stored.event,
1144 EvolutionEvent::ValidationFailed {
1145 gene_id: Some(current_gene_id),
1146 ..
1147 } if current_gene_id == gene_id
1148 )
1149 })
1150 .count() as u64)
1151 }
1152}
1153
1154#[derive(Clone, Debug, PartialEq)]
1155struct ConfidenceRevalidationTarget {
1156 gene_id: String,
1157 capsule_ids: Vec<String>,
1158 decayed_confidence: f32,
1159}
1160
1161fn stale_replay_revalidation_targets(
1162 projection: &EvolutionProjection,
1163 now: DateTime<Utc>,
1164) -> Vec<ConfidenceRevalidationTarget> {
1165 projection
1166 .genes
1167 .iter()
1168 .filter(|gene| gene.state == AssetState::Promoted)
1169 .filter_map(|gene| {
1170 let promoted_capsules = projection
1171 .capsules
1172 .iter()
1173 .filter(|capsule| {
1174 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
1175 })
1176 .collect::<Vec<_>>();
1177 if promoted_capsules.is_empty() {
1178 return None;
1179 }
1180 let age_secs = projection
1181 .last_updated_at
1182 .get(&gene.id)
1183 .and_then(|timestamp| seconds_since_timestamp_for_confidence(timestamp, now));
1184 let decayed_confidence = promoted_capsules
1185 .iter()
1186 .map(|capsule| decayed_replay_confidence(capsule.confidence, age_secs))
1187 .fold(0.0_f32, f32::max);
1188 if decayed_confidence >= MIN_REPLAY_CONFIDENCE {
1189 return None;
1190 }
1191 Some(ConfidenceRevalidationTarget {
1192 gene_id: gene.id.clone(),
1193 capsule_ids: promoted_capsules
1194 .into_iter()
1195 .map(|capsule| capsule.id.clone())
1196 .collect(),
1197 decayed_confidence,
1198 })
1199 })
1200 .collect()
1201}
1202
1203fn seconds_since_timestamp_for_confidence(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1204 let parsed = DateTime::parse_from_rfc3339(timestamp)
1205 .ok()?
1206 .with_timezone(&Utc);
1207 let elapsed = now.signed_duration_since(parsed);
1208 if elapsed < Duration::zero() {
1209 Some(0)
1210 } else {
1211 u64::try_from(elapsed.num_seconds()).ok()
1212 }
1213}
1214
1215#[derive(Debug, Error)]
1216pub enum EvoKernelError {
1217 #[error("sandbox error: {0}")]
1218 Sandbox(String),
1219 #[error("validation error: {0}")]
1220 Validation(String),
1221 #[error("validation failed")]
1222 ValidationFailed(ValidationReport),
1223 #[error("store error: {0}")]
1224 Store(String),
1225}
1226
1227#[derive(Clone, Debug)]
1228pub struct CaptureOutcome {
1229 pub capsule: Capsule,
1230 pub gene: Gene,
1231 pub governor_decision: GovernorDecision,
1232}
1233
1234#[derive(Clone, Debug, Serialize, Deserialize)]
1235pub struct ImportOutcome {
1236 pub imported_asset_ids: Vec<String>,
1237 pub accepted: bool,
1238 #[serde(default, skip_serializing_if = "Option::is_none")]
1239 pub next_cursor: Option<String>,
1240 #[serde(default, skip_serializing_if = "Option::is_none")]
1241 pub resume_token: Option<String>,
1242 #[serde(default)]
1243 pub sync_audit: SyncAudit,
1244}
1245
1246#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
1247pub struct EvolutionMetricsSnapshot {
1248 pub replay_attempts_total: u64,
1249 pub replay_success_total: u64,
1250 pub replay_success_rate: f64,
1251 pub confidence_revalidations_total: u64,
1252 pub replay_reasoning_avoided_total: u64,
1253 pub replay_task_classes: Vec<ReplayTaskClassMetrics>,
1254 pub mutation_declared_total: u64,
1255 pub promoted_mutations_total: u64,
1256 pub promotion_ratio: f64,
1257 pub gene_revocations_total: u64,
1258 pub mutation_velocity_last_hour: u64,
1259 pub revoke_frequency_last_hour: u64,
1260 pub promoted_genes: u64,
1261 pub promoted_capsules: u64,
1262 pub last_event_seq: u64,
1263}
1264
1265#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
1266pub struct EvolutionHealthSnapshot {
1267 pub status: String,
1268 pub last_event_seq: u64,
1269 pub promoted_genes: u64,
1270 pub promoted_capsules: u64,
1271}
1272
1273#[derive(Clone)]
1274pub struct EvolutionNetworkNode {
1275 pub store: Arc<dyn EvolutionStore>,
1276}
1277
1278impl EvolutionNetworkNode {
1279 pub fn new(store: Arc<dyn EvolutionStore>) -> Self {
1280 Self { store }
1281 }
1282
1283 pub fn with_default_store() -> Self {
1284 Self {
1285 store: Arc::new(JsonlEvolutionStore::new(default_store_root())),
1286 }
1287 }
1288
1289 pub fn accept_publish_request(
1290 &self,
1291 request: &PublishRequest,
1292 ) -> Result<ImportOutcome, EvoKernelError> {
1293 let requested_cursor = resolve_requested_cursor(
1294 &request.sender_id,
1295 request.since_cursor.as_deref(),
1296 request.resume_token.as_deref(),
1297 )?;
1298 import_remote_envelope_into_store(
1299 self.store.as_ref(),
1300 &EvolutionEnvelope::publish(request.sender_id.clone(), request.assets.clone()),
1301 None,
1302 requested_cursor,
1303 )
1304 }
1305
1306 pub fn ensure_builtin_experience_assets(
1307 &self,
1308 sender_id: impl Into<String>,
1309 ) -> Result<ImportOutcome, EvoKernelError> {
1310 ensure_builtin_experience_assets_in_store(self.store.as_ref(), sender_id.into())
1311 }
1312
1313 pub fn record_reported_experience(
1314 &self,
1315 sender_id: impl Into<String>,
1316 gene_id: impl Into<String>,
1317 signals: Vec<String>,
1318 strategy: Vec<String>,
1319 validation: Vec<String>,
1320 ) -> Result<ImportOutcome, EvoKernelError> {
1321 record_reported_experience_in_store(
1322 self.store.as_ref(),
1323 sender_id.into(),
1324 gene_id.into(),
1325 signals,
1326 strategy,
1327 validation,
1328 )
1329 }
1330
1331 pub fn publish_local_assets(
1332 &self,
1333 sender_id: impl Into<String>,
1334 ) -> Result<EvolutionEnvelope, EvoKernelError> {
1335 export_promoted_assets_from_store(self.store.as_ref(), sender_id)
1336 }
1337
1338 pub fn fetch_assets(
1339 &self,
1340 responder_id: impl Into<String>,
1341 query: &FetchQuery,
1342 ) -> Result<FetchResponse, EvoKernelError> {
1343 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1344 }
1345
1346 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1347 revoke_assets_in_store(self.store.as_ref(), notice)
1348 }
1349
1350 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1351 evolution_metrics_snapshot(self.store.as_ref())
1352 }
1353
1354 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1355 self.metrics_snapshot().map(|snapshot| {
1356 let health = evolution_health_snapshot(&snapshot);
1357 render_evolution_metrics_prometheus(&snapshot, &health)
1358 })
1359 }
1360
1361 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1362 self.metrics_snapshot()
1363 .map(|snapshot| evolution_health_snapshot(&snapshot))
1364 }
1365}
1366
1367pub struct EvoKernel<S: KernelState> {
1368 pub kernel: Arc<Kernel<S>>,
1369 pub sandbox: Arc<dyn Sandbox>,
1370 pub validator: Arc<dyn Validator>,
1371 pub store: Arc<dyn EvolutionStore>,
1372 pub selector: Arc<dyn Selector>,
1373 pub governor: Arc<dyn Governor>,
1374 pub economics: Arc<Mutex<EvuLedger>>,
1375 pub remote_publishers: Arc<Mutex<BTreeMap<String, String>>>,
1376 pub stake_policy: StakePolicy,
1377 pub sandbox_policy: SandboxPolicy,
1378 pub validation_plan: ValidationPlan,
1379}
1380
1381impl<S: KernelState> EvoKernel<S> {
1382 fn recent_prior_mutation_ages_secs(
1383 &self,
1384 exclude_mutation_id: Option<&str>,
1385 ) -> Result<Vec<u64>, EvolutionError> {
1386 let now = Utc::now();
1387 let mut ages = self
1388 .store
1389 .scan(1)?
1390 .into_iter()
1391 .filter_map(|stored| match stored.event {
1392 EvolutionEvent::MutationDeclared { mutation }
1393 if exclude_mutation_id != Some(mutation.intent.id.as_str()) =>
1394 {
1395 Self::seconds_since_timestamp(&stored.timestamp, now)
1396 }
1397 _ => None,
1398 })
1399 .collect::<Vec<_>>();
1400 ages.sort_unstable();
1401 Ok(ages)
1402 }
1403
1404 fn seconds_since_timestamp(timestamp: &str, now: DateTime<Utc>) -> Option<u64> {
1405 let parsed = DateTime::parse_from_rfc3339(timestamp)
1406 .ok()?
1407 .with_timezone(&Utc);
1408 let elapsed = now.signed_duration_since(parsed);
1409 if elapsed < Duration::zero() {
1410 Some(0)
1411 } else {
1412 u64::try_from(elapsed.num_seconds()).ok()
1413 }
1414 }
1415
1416 pub fn new(
1417 kernel: Arc<Kernel<S>>,
1418 sandbox: Arc<dyn Sandbox>,
1419 validator: Arc<dyn Validator>,
1420 store: Arc<dyn EvolutionStore>,
1421 ) -> Self {
1422 let selector: Arc<dyn Selector> = Arc::new(StoreBackedSelector::new(store.clone()));
1423 Self {
1424 kernel,
1425 sandbox,
1426 validator,
1427 store,
1428 selector,
1429 governor: Arc::new(DefaultGovernor::default()),
1430 economics: Arc::new(Mutex::new(EvuLedger::default())),
1431 remote_publishers: Arc::new(Mutex::new(BTreeMap::new())),
1432 stake_policy: StakePolicy::default(),
1433 sandbox_policy: SandboxPolicy::oris_default(),
1434 validation_plan: ValidationPlan::oris_default(),
1435 }
1436 }
1437
1438 pub fn with_selector(mut self, selector: Arc<dyn Selector>) -> Self {
1439 self.selector = selector;
1440 self
1441 }
1442
1443 pub fn with_sandbox_policy(mut self, policy: SandboxPolicy) -> Self {
1444 self.sandbox_policy = policy;
1445 self
1446 }
1447
1448 pub fn with_governor(mut self, governor: Arc<dyn Governor>) -> Self {
1449 self.governor = governor;
1450 self
1451 }
1452
1453 pub fn with_economics(mut self, economics: Arc<Mutex<EvuLedger>>) -> Self {
1454 self.economics = economics;
1455 self
1456 }
1457
1458 pub fn with_stake_policy(mut self, policy: StakePolicy) -> Self {
1459 self.stake_policy = policy;
1460 self
1461 }
1462
1463 pub fn with_validation_plan(mut self, plan: ValidationPlan) -> Self {
1464 self.validation_plan = plan;
1465 self
1466 }
1467
1468 pub fn select_candidates(&self, input: &SelectorInput) -> Vec<GeneCandidate> {
1469 let executor = StoreReplayExecutor {
1470 sandbox: self.sandbox.clone(),
1471 validator: self.validator.clone(),
1472 store: self.store.clone(),
1473 selector: self.selector.clone(),
1474 governor: self.governor.clone(),
1475 economics: Some(self.economics.clone()),
1476 remote_publishers: Some(self.remote_publishers.clone()),
1477 stake_policy: self.stake_policy.clone(),
1478 };
1479 executor.collect_replay_candidates(input).candidates
1480 }
1481
1482 pub fn bootstrap_if_empty(&self, run_id: &RunId) -> Result<BootstrapReport, EvoKernelError> {
1483 let projection = projection_snapshot(self.store.as_ref())?;
1484 if !projection.genes.is_empty() {
1485 return Ok(BootstrapReport::default());
1486 }
1487
1488 let templates = built_in_seed_templates();
1489 for template in &templates {
1490 let mutation = build_seed_mutation(template);
1491 let extracted = extract_seed_signals(template);
1492 let gene = build_bootstrap_gene(template, &extracted)
1493 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1494 let capsule = build_bootstrap_capsule(run_id, template, &mutation, &gene)
1495 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1496
1497 self.store
1498 .append_event(EvolutionEvent::MutationDeclared {
1499 mutation: mutation.clone(),
1500 })
1501 .map_err(store_err)?;
1502 self.store
1503 .append_event(EvolutionEvent::SignalsExtracted {
1504 mutation_id: mutation.intent.id.clone(),
1505 hash: extracted.hash.clone(),
1506 signals: extracted.values.clone(),
1507 })
1508 .map_err(store_err)?;
1509 self.store
1510 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1511 .map_err(store_err)?;
1512 self.store
1513 .append_event(EvolutionEvent::PromotionEvaluated {
1514 gene_id: gene.id.clone(),
1515 state: AssetState::Quarantined,
1516 reason: "bootstrap seeds require local validation before replay".into(),
1517 reason_code: TransitionReasonCode::DowngradeBootstrapRequiresLocalValidation,
1518 })
1519 .map_err(store_err)?;
1520 self.store
1521 .append_event(EvolutionEvent::CapsuleCommitted {
1522 capsule: capsule.clone(),
1523 })
1524 .map_err(store_err)?;
1525 self.store
1526 .append_event(EvolutionEvent::CapsuleQuarantined {
1527 capsule_id: capsule.id,
1528 })
1529 .map_err(store_err)?;
1530 }
1531
1532 Ok(BootstrapReport {
1533 seeded: true,
1534 genes_added: templates.len(),
1535 capsules_added: templates.len(),
1536 })
1537 }
1538
1539 pub async fn capture_successful_mutation(
1540 &self,
1541 run_id: &RunId,
1542 mutation: PreparedMutation,
1543 ) -> Result<Capsule, EvoKernelError> {
1544 Ok(self
1545 .capture_mutation_with_governor(run_id, mutation)
1546 .await?
1547 .capsule)
1548 }
1549
1550 pub async fn capture_mutation_with_governor(
1551 &self,
1552 run_id: &RunId,
1553 mutation: PreparedMutation,
1554 ) -> Result<CaptureOutcome, EvoKernelError> {
1555 self.store
1556 .append_event(EvolutionEvent::MutationDeclared {
1557 mutation: mutation.clone(),
1558 })
1559 .map_err(store_err)?;
1560
1561 let receipt = match self.sandbox.apply(&mutation, &self.sandbox_policy).await {
1562 Ok(receipt) => receipt,
1563 Err(err) => {
1564 self.store
1565 .append_event(EvolutionEvent::MutationRejected {
1566 mutation_id: mutation.intent.id.clone(),
1567 reason: err.to_string(),
1568 })
1569 .map_err(store_err)?;
1570 return Err(EvoKernelError::Sandbox(err.to_string()));
1571 }
1572 };
1573
1574 self.store
1575 .append_event(EvolutionEvent::MutationApplied {
1576 mutation_id: mutation.intent.id.clone(),
1577 patch_hash: receipt.patch_hash.clone(),
1578 changed_files: receipt
1579 .changed_files
1580 .iter()
1581 .map(|path| path.to_string_lossy().to_string())
1582 .collect(),
1583 })
1584 .map_err(store_err)?;
1585
1586 let report = self
1587 .validator
1588 .run(&receipt, &self.validation_plan)
1589 .await
1590 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1591 if !report.success {
1592 self.store
1593 .append_event(EvolutionEvent::ValidationFailed {
1594 mutation_id: mutation.intent.id.clone(),
1595 report: report.to_snapshot(&self.validation_plan.profile),
1596 gene_id: None,
1597 })
1598 .map_err(store_err)?;
1599 return Err(EvoKernelError::ValidationFailed(report));
1600 }
1601
1602 self.store
1603 .append_event(EvolutionEvent::ValidationPassed {
1604 mutation_id: mutation.intent.id.clone(),
1605 report: report.to_snapshot(&self.validation_plan.profile),
1606 gene_id: None,
1607 })
1608 .map_err(store_err)?;
1609
1610 let extracted_signals = extract_deterministic_signals(&SignalExtractionInput {
1611 patch_diff: mutation.artifact.payload.clone(),
1612 intent: mutation.intent.intent.clone(),
1613 expected_effect: mutation.intent.expected_effect.clone(),
1614 declared_signals: mutation.intent.signals.clone(),
1615 changed_files: receipt
1616 .changed_files
1617 .iter()
1618 .map(|path| path.to_string_lossy().to_string())
1619 .collect(),
1620 validation_success: report.success,
1621 validation_logs: report.logs.clone(),
1622 stage_outputs: report
1623 .stages
1624 .iter()
1625 .flat_map(|stage| [stage.stdout.clone(), stage.stderr.clone()])
1626 .filter(|value| !value.is_empty())
1627 .collect(),
1628 });
1629 self.store
1630 .append_event(EvolutionEvent::SignalsExtracted {
1631 mutation_id: mutation.intent.id.clone(),
1632 hash: extracted_signals.hash.clone(),
1633 signals: extracted_signals.values.clone(),
1634 })
1635 .map_err(store_err)?;
1636
1637 let projection = projection_snapshot(self.store.as_ref())?;
1638 let blast_radius = compute_blast_radius(&mutation.artifact.payload);
1639 let recent_mutation_ages_secs = self
1640 .recent_prior_mutation_ages_secs(Some(mutation.intent.id.as_str()))
1641 .map_err(store_err)?;
1642 let mut gene = derive_gene(
1643 &mutation,
1644 &receipt,
1645 &self.validation_plan.profile,
1646 &extracted_signals.values,
1647 );
1648 let (current_confidence, historical_peak_confidence, confidence_last_updated_secs) =
1649 StoreReplayExecutor::confidence_context(&projection, &gene.id);
1650 let success_count = projection
1651 .genes
1652 .iter()
1653 .find(|existing| existing.id == gene.id)
1654 .map(|existing| {
1655 projection
1656 .capsules
1657 .iter()
1658 .filter(|capsule| capsule.gene_id == existing.id)
1659 .count() as u64
1660 })
1661 .unwrap_or(0)
1662 + 1;
1663 let governor_decision = self.governor.evaluate(GovernorInput {
1664 candidate_source: CandidateSource::Local,
1665 success_count,
1666 blast_radius: blast_radius.clone(),
1667 replay_failures: 0,
1668 recent_mutation_ages_secs,
1669 current_confidence,
1670 historical_peak_confidence,
1671 confidence_last_updated_secs,
1672 });
1673
1674 gene.state = governor_decision.target_state.clone();
1675 self.store
1676 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
1677 .map_err(store_err)?;
1678 self.store
1679 .append_event(EvolutionEvent::PromotionEvaluated {
1680 gene_id: gene.id.clone(),
1681 state: governor_decision.target_state.clone(),
1682 reason: governor_decision.reason.clone(),
1683 reason_code: governor_decision.reason_code.clone(),
1684 })
1685 .map_err(store_err)?;
1686 if matches!(governor_decision.target_state, AssetState::Promoted) {
1687 self.store
1688 .append_event(EvolutionEvent::GenePromoted {
1689 gene_id: gene.id.clone(),
1690 })
1691 .map_err(store_err)?;
1692 }
1693 if matches!(governor_decision.target_state, AssetState::Revoked) {
1694 self.store
1695 .append_event(EvolutionEvent::GeneRevoked {
1696 gene_id: gene.id.clone(),
1697 reason: governor_decision.reason.clone(),
1698 })
1699 .map_err(store_err)?;
1700 }
1701 if let Some(spec_id) = &mutation.intent.spec_id {
1702 self.store
1703 .append_event(EvolutionEvent::SpecLinked {
1704 mutation_id: mutation.intent.id.clone(),
1705 spec_id: spec_id.clone(),
1706 })
1707 .map_err(store_err)?;
1708 }
1709
1710 let mut capsule = build_capsule(
1711 run_id,
1712 &mutation,
1713 &receipt,
1714 &report,
1715 &self.validation_plan.profile,
1716 &gene,
1717 &blast_radius,
1718 )
1719 .map_err(|err| EvoKernelError::Validation(err.to_string()))?;
1720 capsule.state = governor_decision.target_state.clone();
1721 self.store
1722 .append_event(EvolutionEvent::CapsuleCommitted {
1723 capsule: capsule.clone(),
1724 })
1725 .map_err(store_err)?;
1726 if matches!(governor_decision.target_state, AssetState::Quarantined) {
1727 self.store
1728 .append_event(EvolutionEvent::CapsuleQuarantined {
1729 capsule_id: capsule.id.clone(),
1730 })
1731 .map_err(store_err)?;
1732 }
1733
1734 Ok(CaptureOutcome {
1735 capsule,
1736 gene,
1737 governor_decision,
1738 })
1739 }
1740
1741 pub async fn capture_from_proposal(
1742 &self,
1743 run_id: &RunId,
1744 proposal: &AgentMutationProposal,
1745 diff_payload: String,
1746 base_revision: Option<String>,
1747 ) -> Result<CaptureOutcome, EvoKernelError> {
1748 let intent = MutationIntent {
1749 id: next_id("proposal"),
1750 intent: proposal.intent.clone(),
1751 target: MutationTarget::Paths {
1752 allow: proposal.files.clone(),
1753 },
1754 expected_effect: proposal.expected_effect.clone(),
1755 risk: RiskLevel::Low,
1756 signals: proposal.files.clone(),
1757 spec_id: None,
1758 };
1759 self.capture_mutation_with_governor(
1760 run_id,
1761 prepare_mutation(intent, diff_payload, base_revision),
1762 )
1763 .await
1764 }
1765
1766 pub fn feedback_for_agent(outcome: &CaptureOutcome) -> ExecutionFeedback {
1767 ExecutionFeedback {
1768 accepted: !matches!(outcome.governor_decision.target_state, AssetState::Revoked),
1769 asset_state: Some(format!("{:?}", outcome.governor_decision.target_state)),
1770 summary: outcome.governor_decision.reason.clone(),
1771 }
1772 }
1773
1774 pub fn replay_feedback_for_agent(
1775 signals: &[String],
1776 decision: &ReplayDecision,
1777 ) -> ReplayFeedback {
1778 let (task_class_id, task_label) = replay_task_descriptor(signals);
1779 let planner_directive = if decision.used_capsule {
1780 ReplayPlannerDirective::SkipPlanner
1781 } else {
1782 ReplayPlannerDirective::PlanFallback
1783 };
1784 let reasoning_steps_avoided = u64::from(decision.used_capsule);
1785 let fallback_reason = if decision.fallback_to_planner {
1786 Some(decision.reason.clone())
1787 } else {
1788 None
1789 };
1790 let summary = if decision.used_capsule {
1791 format!("reused prior capsule for task class '{task_label}'; skip planner")
1792 } else {
1793 format!(
1794 "planner fallback required for task class '{task_label}': {}",
1795 decision.reason
1796 )
1797 };
1798
1799 ReplayFeedback {
1800 used_capsule: decision.used_capsule,
1801 capsule_id: decision.capsule_id.clone(),
1802 planner_directive,
1803 reasoning_steps_avoided,
1804 fallback_reason,
1805 task_class_id,
1806 task_label,
1807 summary,
1808 }
1809 }
1810 pub async fn run_supervised_devloop(
1811 &self,
1812 run_id: &RunId,
1813 request: &SupervisedDevloopRequest,
1814 diff_payload: String,
1815 base_revision: Option<String>,
1816 ) -> Result<SupervisedDevloopOutcome, EvoKernelError> {
1817 let task_class = classify_supervised_devloop_request(request);
1818 let Some(task_class) = task_class else {
1819 return Ok(SupervisedDevloopOutcome {
1820 task_id: request.task.id.clone(),
1821 task_class: None,
1822 status: SupervisedDevloopStatus::RejectedByPolicy,
1823 execution_feedback: None,
1824 summary: format!(
1825 "supervised devloop rejected task '{}' because it is an unsupported task outside the bounded scope",
1826 request.task.id
1827 ),
1828 });
1829 };
1830
1831 if !request.approval.approved {
1832 return Ok(SupervisedDevloopOutcome {
1833 task_id: request.task.id.clone(),
1834 task_class: Some(task_class),
1835 status: SupervisedDevloopStatus::AwaitingApproval,
1836 execution_feedback: None,
1837 summary: format!(
1838 "supervised devloop paused task '{}' until explicit human approval is granted",
1839 request.task.id
1840 ),
1841 });
1842 }
1843
1844 let capture = self
1845 .capture_from_proposal(run_id, &request.proposal, diff_payload, base_revision)
1846 .await?;
1847 let approver = request
1848 .approval
1849 .approver
1850 .as_deref()
1851 .unwrap_or("unknown approver");
1852
1853 Ok(SupervisedDevloopOutcome {
1854 task_id: request.task.id.clone(),
1855 task_class: Some(task_class),
1856 status: SupervisedDevloopStatus::Executed,
1857 execution_feedback: Some(Self::feedback_for_agent(&capture)),
1858 summary: format!(
1859 "supervised devloop executed task '{}' with explicit approval from {approver}",
1860 request.task.id
1861 ),
1862 })
1863 }
1864 pub fn coordinate(&self, plan: CoordinationPlan) -> CoordinationResult {
1865 MultiAgentCoordinator::new().coordinate(plan)
1866 }
1867
1868 pub fn export_promoted_assets(
1869 &self,
1870 sender_id: impl Into<String>,
1871 ) -> Result<EvolutionEnvelope, EvoKernelError> {
1872 let sender_id = sender_id.into();
1873 let envelope = export_promoted_assets_from_store(self.store.as_ref(), sender_id.clone())?;
1874 if !envelope.assets.is_empty() {
1875 let mut ledger = self
1876 .economics
1877 .lock()
1878 .map_err(|_| EvoKernelError::Validation("economics ledger lock poisoned".into()))?;
1879 if ledger
1880 .reserve_publish_stake(&sender_id, &self.stake_policy)
1881 .is_none()
1882 {
1883 return Err(EvoKernelError::Validation(
1884 "insufficient EVU for remote publish".into(),
1885 ));
1886 }
1887 }
1888 Ok(envelope)
1889 }
1890
1891 pub fn import_remote_envelope(
1892 &self,
1893 envelope: &EvolutionEnvelope,
1894 ) -> Result<ImportOutcome, EvoKernelError> {
1895 import_remote_envelope_into_store(
1896 self.store.as_ref(),
1897 envelope,
1898 Some(self.remote_publishers.as_ref()),
1899 None,
1900 )
1901 }
1902
1903 pub fn fetch_assets(
1904 &self,
1905 responder_id: impl Into<String>,
1906 query: &FetchQuery,
1907 ) -> Result<FetchResponse, EvoKernelError> {
1908 fetch_assets_from_store(self.store.as_ref(), responder_id, query)
1909 }
1910
1911 pub fn revoke_assets(&self, notice: &RevokeNotice) -> Result<RevokeNotice, EvoKernelError> {
1912 revoke_assets_in_store(self.store.as_ref(), notice)
1913 }
1914
1915 pub async fn replay_or_fallback(
1916 &self,
1917 input: SelectorInput,
1918 ) -> Result<ReplayDecision, EvoKernelError> {
1919 let replay_run_id = next_id("replay");
1920 self.replay_or_fallback_for_run(&replay_run_id, input).await
1921 }
1922
1923 pub async fn replay_or_fallback_for_run(
1924 &self,
1925 run_id: &RunId,
1926 input: SelectorInput,
1927 ) -> Result<ReplayDecision, EvoKernelError> {
1928 let executor = StoreReplayExecutor {
1929 sandbox: self.sandbox.clone(),
1930 validator: self.validator.clone(),
1931 store: self.store.clone(),
1932 selector: self.selector.clone(),
1933 governor: self.governor.clone(),
1934 economics: Some(self.economics.clone()),
1935 remote_publishers: Some(self.remote_publishers.clone()),
1936 stake_policy: self.stake_policy.clone(),
1937 };
1938 executor
1939 .try_replay_for_run(run_id, &input, &self.sandbox_policy, &self.validation_plan)
1940 .await
1941 .map_err(|err| EvoKernelError::Validation(err.to_string()))
1942 }
1943
1944 pub fn economics_signal(&self, node_id: &str) -> Option<EconomicsSignal> {
1945 self.economics.lock().ok()?.governor_signal(node_id)
1946 }
1947
1948 pub fn selector_reputation_bias(&self) -> BTreeMap<String, f32> {
1949 self.economics
1950 .lock()
1951 .ok()
1952 .map(|locked| locked.selector_reputation_bias())
1953 .unwrap_or_default()
1954 }
1955
1956 pub fn metrics_snapshot(&self) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
1957 evolution_metrics_snapshot(self.store.as_ref())
1958 }
1959
1960 pub fn render_metrics_prometheus(&self) -> Result<String, EvoKernelError> {
1961 self.metrics_snapshot().map(|snapshot| {
1962 let health = evolution_health_snapshot(&snapshot);
1963 render_evolution_metrics_prometheus(&snapshot, &health)
1964 })
1965 }
1966
1967 pub fn health_snapshot(&self) -> Result<EvolutionHealthSnapshot, EvoKernelError> {
1968 self.metrics_snapshot()
1969 .map(|snapshot| evolution_health_snapshot(&snapshot))
1970 }
1971}
1972
1973pub fn prepare_mutation(
1974 intent: MutationIntent,
1975 diff_payload: String,
1976 base_revision: Option<String>,
1977) -> PreparedMutation {
1978 PreparedMutation {
1979 intent,
1980 artifact: MutationArtifact {
1981 encoding: ArtifactEncoding::UnifiedDiff,
1982 content_hash: compute_artifact_hash(&diff_payload),
1983 payload: diff_payload,
1984 base_revision,
1985 },
1986 }
1987}
1988
1989pub fn prepare_mutation_from_spec(
1990 plan: CompiledMutationPlan,
1991 diff_payload: String,
1992 base_revision: Option<String>,
1993) -> PreparedMutation {
1994 prepare_mutation(plan.mutation_intent, diff_payload, base_revision)
1995}
1996
1997pub fn default_evolution_store() -> Arc<dyn EvolutionStore> {
1998 Arc::new(oris_evolution::JsonlEvolutionStore::new(
1999 default_store_root(),
2000 ))
2001}
2002
2003fn built_in_seed_templates() -> Vec<SeedTemplate> {
2004 vec![
2005 SeedTemplate {
2006 id: "bootstrap-readme".into(),
2007 intent: "Seed a baseline README recovery pattern".into(),
2008 signals: vec!["bootstrap readme".into(), "missing readme".into()],
2009 diff_payload: "\
2010diff --git a/README.md b/README.md
2011new file mode 100644
2012index 0000000..1111111
2013--- /dev/null
2014+++ b/README.md
2015@@ -0,0 +1,3 @@
2016+# Oris
2017+Bootstrap documentation seed
2018+"
2019 .into(),
2020 validation_profile: "bootstrap-seed".into(),
2021 },
2022 SeedTemplate {
2023 id: "bootstrap-test-fix".into(),
2024 intent: "Seed a deterministic test stabilization pattern".into(),
2025 signals: vec!["bootstrap test fix".into(), "failing tests".into()],
2026 diff_payload: "\
2027diff --git a/src/lib.rs b/src/lib.rs
2028index 1111111..2222222 100644
2029--- a/src/lib.rs
2030+++ b/src/lib.rs
2031@@ -1 +1,2 @@
2032 pub fn demo() -> usize { 1 }
2033+pub fn normalize_test_output() -> bool { true }
2034"
2035 .into(),
2036 validation_profile: "bootstrap-seed".into(),
2037 },
2038 SeedTemplate {
2039 id: "bootstrap-refactor".into(),
2040 intent: "Seed a low-risk refactor capsule".into(),
2041 signals: vec!["bootstrap refactor".into(), "small refactor".into()],
2042 diff_payload: "\
2043diff --git a/src/lib.rs b/src/lib.rs
2044index 2222222..3333333 100644
2045--- a/src/lib.rs
2046+++ b/src/lib.rs
2047@@ -1 +1,3 @@
2048 pub fn demo() -> usize { 1 }
2049+
2050+fn extract_strategy_key(input: &str) -> &str { input }
2051"
2052 .into(),
2053 validation_profile: "bootstrap-seed".into(),
2054 },
2055 SeedTemplate {
2056 id: "bootstrap-logging".into(),
2057 intent: "Seed a baseline structured logging mutation".into(),
2058 signals: vec!["bootstrap logging".into(), "structured logs".into()],
2059 diff_payload: "\
2060diff --git a/src/lib.rs b/src/lib.rs
2061index 3333333..4444444 100644
2062--- a/src/lib.rs
2063+++ b/src/lib.rs
2064@@ -1 +1,3 @@
2065 pub fn demo() -> usize { 1 }
2066+
2067+fn emit_bootstrap_log() { println!(\"bootstrap-log\"); }
2068"
2069 .into(),
2070 validation_profile: "bootstrap-seed".into(),
2071 },
2072 ]
2073}
2074
2075fn build_seed_mutation(template: &SeedTemplate) -> PreparedMutation {
2076 let changed_files = seed_changed_files(&template.diff_payload);
2077 let target = if changed_files.is_empty() {
2078 MutationTarget::WorkspaceRoot
2079 } else {
2080 MutationTarget::Paths {
2081 allow: changed_files,
2082 }
2083 };
2084 prepare_mutation(
2085 MutationIntent {
2086 id: stable_hash_json(&("bootstrap-mutation", &template.id))
2087 .unwrap_or_else(|_| format!("bootstrap-mutation-{}", template.id)),
2088 intent: template.intent.clone(),
2089 target,
2090 expected_effect: format!("seed {}", template.id),
2091 risk: RiskLevel::Low,
2092 signals: template.signals.clone(),
2093 spec_id: None,
2094 },
2095 template.diff_payload.clone(),
2096 None,
2097 )
2098}
2099
2100fn extract_seed_signals(template: &SeedTemplate) -> SignalExtractionOutput {
2101 let mut signals = BTreeSet::new();
2102 for declared in &template.signals {
2103 if let Some(phrase) = normalize_signal_phrase(declared) {
2104 signals.insert(phrase);
2105 }
2106 extend_signal_tokens(&mut signals, declared);
2107 }
2108 extend_signal_tokens(&mut signals, &template.intent);
2109 extend_signal_tokens(&mut signals, &template.diff_payload);
2110 for changed_file in seed_changed_files(&template.diff_payload) {
2111 extend_signal_tokens(&mut signals, &changed_file);
2112 }
2113 let values = signals.into_iter().take(32).collect::<Vec<_>>();
2114 let hash =
2115 stable_hash_json(&values).unwrap_or_else(|_| compute_artifact_hash(&values.join("\n")));
2116 SignalExtractionOutput { values, hash }
2117}
2118
2119fn seed_changed_files(diff_payload: &str) -> Vec<String> {
2120 let mut changed_files = BTreeSet::new();
2121 for line in diff_payload.lines() {
2122 if let Some(path) = line.strip_prefix("+++ b/") {
2123 let normalized = path.trim();
2124 if !normalized.is_empty() {
2125 changed_files.insert(normalized.to_string());
2126 }
2127 }
2128 }
2129 changed_files.into_iter().collect()
2130}
2131
2132fn build_bootstrap_gene(
2133 template: &SeedTemplate,
2134 extracted: &SignalExtractionOutput,
2135) -> Result<Gene, EvolutionError> {
2136 let strategy = vec![template.id.clone(), "bootstrap".into()];
2137 let id = stable_hash_json(&(
2138 "bootstrap-gene",
2139 &template.id,
2140 &extracted.values,
2141 &template.validation_profile,
2142 ))?;
2143 Ok(Gene {
2144 id,
2145 signals: extracted.values.clone(),
2146 strategy,
2147 validation: vec![template.validation_profile.clone()],
2148 state: AssetState::Quarantined,
2149 })
2150}
2151
2152fn build_bootstrap_capsule(
2153 run_id: &RunId,
2154 template: &SeedTemplate,
2155 mutation: &PreparedMutation,
2156 gene: &Gene,
2157) -> Result<Capsule, EvolutionError> {
2158 let cwd = std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf());
2159 let env = current_env_fingerprint(&cwd);
2160 let diff_hash = mutation.artifact.content_hash.clone();
2161 let changed_files = seed_changed_files(&template.diff_payload);
2162 let validator_hash = stable_hash_json(&(
2163 "bootstrap-validator",
2164 &template.id,
2165 &template.validation_profile,
2166 &diff_hash,
2167 ))?;
2168 let id = stable_hash_json(&(
2169 "bootstrap-capsule",
2170 &template.id,
2171 run_id,
2172 &gene.id,
2173 &diff_hash,
2174 &env,
2175 ))?;
2176 Ok(Capsule {
2177 id,
2178 gene_id: gene.id.clone(),
2179 mutation_id: mutation.intent.id.clone(),
2180 run_id: run_id.clone(),
2181 diff_hash,
2182 confidence: 0.0,
2183 env,
2184 outcome: Outcome {
2185 success: false,
2186 validation_profile: template.validation_profile.clone(),
2187 validation_duration_ms: 0,
2188 changed_files,
2189 validator_hash,
2190 lines_changed: compute_blast_radius(&template.diff_payload).lines_changed,
2191 replay_verified: false,
2192 },
2193 state: AssetState::Quarantined,
2194 })
2195}
2196
2197fn derive_gene(
2198 mutation: &PreparedMutation,
2199 receipt: &SandboxReceipt,
2200 validation_profile: &str,
2201 extracted_signals: &[String],
2202) -> Gene {
2203 let mut strategy = BTreeSet::new();
2204 for file in &receipt.changed_files {
2205 if let Some(component) = file.components().next() {
2206 strategy.insert(component.as_os_str().to_string_lossy().to_string());
2207 }
2208 }
2209 for token in mutation
2210 .artifact
2211 .payload
2212 .split(|ch: char| !ch.is_ascii_alphanumeric())
2213 {
2214 if token.len() == 5
2215 && token.starts_with('E')
2216 && token[1..].chars().all(|ch| ch.is_ascii_digit())
2217 {
2218 strategy.insert(token.to_string());
2219 }
2220 }
2221 for token in mutation.intent.intent.split_whitespace().take(8) {
2222 strategy.insert(token.to_ascii_lowercase());
2223 }
2224 let strategy = strategy.into_iter().collect::<Vec<_>>();
2225 let id = stable_hash_json(&(extracted_signals, &strategy, validation_profile))
2226 .unwrap_or_else(|_| next_id("gene"));
2227 Gene {
2228 id,
2229 signals: extracted_signals.to_vec(),
2230 strategy,
2231 validation: vec![validation_profile.to_string()],
2232 state: AssetState::Promoted,
2233 }
2234}
2235
2236fn build_capsule(
2237 run_id: &RunId,
2238 mutation: &PreparedMutation,
2239 receipt: &SandboxReceipt,
2240 report: &ValidationReport,
2241 validation_profile: &str,
2242 gene: &Gene,
2243 blast_radius: &BlastRadius,
2244) -> Result<Capsule, EvolutionError> {
2245 let env = current_env_fingerprint(&receipt.workdir);
2246 let validator_hash = stable_hash_json(report)?;
2247 let diff_hash = mutation.artifact.content_hash.clone();
2248 let id = stable_hash_json(&(run_id, &gene.id, &diff_hash, &mutation.intent.id))?;
2249 Ok(Capsule {
2250 id,
2251 gene_id: gene.id.clone(),
2252 mutation_id: mutation.intent.id.clone(),
2253 run_id: run_id.clone(),
2254 diff_hash,
2255 confidence: 0.7,
2256 env,
2257 outcome: oris_evolution::Outcome {
2258 success: true,
2259 validation_profile: validation_profile.to_string(),
2260 validation_duration_ms: report.duration_ms,
2261 changed_files: receipt
2262 .changed_files
2263 .iter()
2264 .map(|path| path.to_string_lossy().to_string())
2265 .collect(),
2266 validator_hash,
2267 lines_changed: blast_radius.lines_changed,
2268 replay_verified: false,
2269 },
2270 state: AssetState::Promoted,
2271 })
2272}
2273
2274fn current_env_fingerprint(workdir: &Path) -> EnvFingerprint {
2275 let rustc_version = Command::new("rustc")
2276 .arg("--version")
2277 .output()
2278 .ok()
2279 .filter(|output| output.status.success())
2280 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
2281 .unwrap_or_else(|| "rustc unknown".into());
2282 let cargo_lock_hash = fs::read(workdir.join("Cargo.lock"))
2283 .ok()
2284 .map(|bytes| {
2285 let value = String::from_utf8_lossy(&bytes);
2286 compute_artifact_hash(&value)
2287 })
2288 .unwrap_or_else(|| "missing-cargo-lock".into());
2289 let target_triple = format!(
2290 "{}-unknown-{}",
2291 std::env::consts::ARCH,
2292 std::env::consts::OS
2293 );
2294 EnvFingerprint {
2295 rustc_version,
2296 cargo_lock_hash,
2297 target_triple,
2298 os: std::env::consts::OS.to_string(),
2299 }
2300}
2301
2302fn extend_signal_tokens(out: &mut BTreeSet<String>, input: &str) {
2303 for raw in input.split(|ch: char| !ch.is_ascii_alphanumeric()) {
2304 let trimmed = raw.trim();
2305 if trimmed.is_empty() {
2306 continue;
2307 }
2308 let normalized = if is_rust_error_code(trimmed) {
2309 let mut chars = trimmed.chars();
2310 let prefix = chars
2311 .next()
2312 .map(|ch| ch.to_ascii_uppercase())
2313 .unwrap_or('E');
2314 format!("{prefix}{}", chars.as_str())
2315 } else {
2316 trimmed.to_ascii_lowercase()
2317 };
2318 if normalized.len() < 3 {
2319 continue;
2320 }
2321 out.insert(normalized);
2322 }
2323}
2324
2325fn normalize_signal_phrase(input: &str) -> Option<String> {
2326 let normalized = input
2327 .split(|ch: char| !ch.is_ascii_alphanumeric())
2328 .filter_map(|raw| {
2329 let trimmed = raw.trim();
2330 if trimmed.is_empty() {
2331 return None;
2332 }
2333 let normalized = if is_rust_error_code(trimmed) {
2334 let mut chars = trimmed.chars();
2335 let prefix = chars
2336 .next()
2337 .map(|ch| ch.to_ascii_uppercase())
2338 .unwrap_or('E');
2339 format!("{prefix}{}", chars.as_str())
2340 } else {
2341 trimmed.to_ascii_lowercase()
2342 };
2343 if normalized.len() < 3 {
2344 None
2345 } else {
2346 Some(normalized)
2347 }
2348 })
2349 .collect::<Vec<_>>()
2350 .join(" ");
2351 if normalized.is_empty() {
2352 None
2353 } else {
2354 Some(normalized)
2355 }
2356}
2357
2358fn replay_task_descriptor(signals: &[String]) -> (String, String) {
2359 let normalized = signals
2360 .iter()
2361 .filter_map(|signal| normalize_signal_phrase(signal))
2362 .collect::<BTreeSet<_>>()
2363 .into_iter()
2364 .collect::<Vec<_>>();
2365 if normalized.is_empty() {
2366 return ("unknown".into(), "unknown".into());
2367 }
2368 let task_label = normalized
2369 .iter()
2370 .find(|value| {
2371 value.as_str() != "validation passed" && value.as_str() != "validation failed"
2372 })
2373 .cloned()
2374 .unwrap_or_else(|| normalized[0].clone());
2375 let task_class_id = stable_hash_json(&normalized)
2376 .unwrap_or_else(|_| compute_artifact_hash(&normalized.join("\n")));
2377 (task_class_id, task_label)
2378}
2379
2380fn is_rust_error_code(value: &str) -> bool {
2381 value.len() == 5
2382 && matches!(value.as_bytes().first(), Some(b'e') | Some(b'E'))
2383 && value[1..].chars().all(|ch| ch.is_ascii_digit())
2384}
2385
2386fn classify_supervised_devloop_request(
2387 request: &SupervisedDevloopRequest,
2388) -> Option<BoundedTaskClass> {
2389 let path = request.proposal.files.first()?.trim();
2390 if request.proposal.files.len() != 1 || path.is_empty() {
2391 return None;
2392 }
2393 let normalized = path.replace('\\', "/");
2394 if normalized.starts_with("docs/") && normalized.ends_with(".md") {
2395 Some(BoundedTaskClass::DocsSingleFile)
2396 } else {
2397 None
2398 }
2399}
2400
2401fn find_declared_mutation(
2402 store: &dyn EvolutionStore,
2403 mutation_id: &MutationId,
2404) -> Result<Option<PreparedMutation>, EvolutionError> {
2405 for stored in store.scan(1)? {
2406 if let EvolutionEvent::MutationDeclared { mutation } = stored.event {
2407 if &mutation.intent.id == mutation_id {
2408 return Ok(Some(mutation));
2409 }
2410 }
2411 }
2412 Ok(None)
2413}
2414
2415fn exact_match_candidates(store: &dyn EvolutionStore, input: &SelectorInput) -> Vec<GeneCandidate> {
2416 let Ok(projection) = projection_snapshot(store) else {
2417 return Vec::new();
2418 };
2419 let capsules = projection.capsules.clone();
2420 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2421 let requested_spec_id = input
2422 .spec_id
2423 .as_deref()
2424 .map(str::trim)
2425 .filter(|value| !value.is_empty());
2426 let signal_set = input
2427 .signals
2428 .iter()
2429 .map(|signal| signal.to_ascii_lowercase())
2430 .collect::<BTreeSet<_>>();
2431 let mut candidates = projection
2432 .genes
2433 .into_iter()
2434 .filter_map(|gene| {
2435 if gene.state != AssetState::Promoted {
2436 return None;
2437 }
2438 if let Some(spec_id) = requested_spec_id {
2439 let matches_spec = spec_ids_by_gene
2440 .get(&gene.id)
2441 .map(|values| {
2442 values
2443 .iter()
2444 .any(|value| value.eq_ignore_ascii_case(spec_id))
2445 })
2446 .unwrap_or(false);
2447 if !matches_spec {
2448 return None;
2449 }
2450 }
2451 let gene_signals = gene
2452 .signals
2453 .iter()
2454 .map(|signal| signal.to_ascii_lowercase())
2455 .collect::<BTreeSet<_>>();
2456 if gene_signals == signal_set {
2457 let mut matched_capsules = capsules
2458 .iter()
2459 .filter(|capsule| {
2460 capsule.gene_id == gene.id && capsule.state == AssetState::Promoted
2461 })
2462 .cloned()
2463 .collect::<Vec<_>>();
2464 matched_capsules.sort_by(|left, right| {
2465 replay_environment_match_factor(&input.env, &right.env)
2466 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2467 .unwrap_or(std::cmp::Ordering::Equal)
2468 .then_with(|| {
2469 right
2470 .confidence
2471 .partial_cmp(&left.confidence)
2472 .unwrap_or(std::cmp::Ordering::Equal)
2473 })
2474 .then_with(|| left.id.cmp(&right.id))
2475 });
2476 if matched_capsules.is_empty() {
2477 None
2478 } else {
2479 let score = matched_capsules
2480 .first()
2481 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2482 .unwrap_or(0.0);
2483 Some(GeneCandidate {
2484 gene,
2485 score,
2486 capsules: matched_capsules,
2487 })
2488 }
2489 } else {
2490 None
2491 }
2492 })
2493 .collect::<Vec<_>>();
2494 candidates.sort_by(|left, right| {
2495 right
2496 .score
2497 .partial_cmp(&left.score)
2498 .unwrap_or(std::cmp::Ordering::Equal)
2499 .then_with(|| left.gene.id.cmp(&right.gene.id))
2500 });
2501 candidates
2502}
2503
2504fn quarantined_remote_exact_match_candidates(
2505 store: &dyn EvolutionStore,
2506 input: &SelectorInput,
2507) -> Vec<GeneCandidate> {
2508 let remote_asset_ids = store
2509 .scan(1)
2510 .ok()
2511 .map(|events| {
2512 events
2513 .into_iter()
2514 .filter_map(|stored| match stored.event {
2515 EvolutionEvent::RemoteAssetImported {
2516 source: CandidateSource::Remote,
2517 asset_ids,
2518 ..
2519 } => Some(asset_ids),
2520 _ => None,
2521 })
2522 .flatten()
2523 .collect::<BTreeSet<_>>()
2524 })
2525 .unwrap_or_default();
2526 if remote_asset_ids.is_empty() {
2527 return Vec::new();
2528 }
2529
2530 let Ok(projection) = projection_snapshot(store) else {
2531 return Vec::new();
2532 };
2533 let capsules = projection.capsules.clone();
2534 let spec_ids_by_gene = projection.spec_ids_by_gene.clone();
2535 let requested_spec_id = input
2536 .spec_id
2537 .as_deref()
2538 .map(str::trim)
2539 .filter(|value| !value.is_empty());
2540 let normalized_signals = input
2541 .signals
2542 .iter()
2543 .filter_map(|signal| normalize_signal_phrase(signal))
2544 .collect::<BTreeSet<_>>()
2545 .into_iter()
2546 .collect::<Vec<_>>();
2547 if normalized_signals.is_empty() {
2548 return Vec::new();
2549 }
2550 let mut candidates = projection
2551 .genes
2552 .into_iter()
2553 .filter_map(|gene| {
2554 if !matches!(gene.state, AssetState::Promoted | AssetState::Quarantined) {
2555 return None;
2556 }
2557 if let Some(spec_id) = requested_spec_id {
2558 let matches_spec = spec_ids_by_gene
2559 .get(&gene.id)
2560 .map(|values| {
2561 values
2562 .iter()
2563 .any(|value| value.eq_ignore_ascii_case(spec_id))
2564 })
2565 .unwrap_or(false);
2566 if !matches_spec {
2567 return None;
2568 }
2569 }
2570 let normalized_gene_signals = gene
2571 .signals
2572 .iter()
2573 .filter_map(|candidate| normalize_signal_phrase(candidate))
2574 .collect::<Vec<_>>();
2575 let matched_query_count = normalized_signals
2576 .iter()
2577 .filter(|signal| {
2578 normalized_gene_signals.iter().any(|candidate| {
2579 candidate.contains(signal.as_str()) || signal.contains(candidate)
2580 })
2581 })
2582 .count();
2583 if matched_query_count == 0 {
2584 return None;
2585 }
2586
2587 let mut matched_capsules = capsules
2588 .iter()
2589 .filter(|capsule| {
2590 capsule.gene_id == gene.id
2591 && capsule.state == AssetState::Quarantined
2592 && remote_asset_ids.contains(&capsule.id)
2593 })
2594 .cloned()
2595 .collect::<Vec<_>>();
2596 matched_capsules.sort_by(|left, right| {
2597 replay_environment_match_factor(&input.env, &right.env)
2598 .partial_cmp(&replay_environment_match_factor(&input.env, &left.env))
2599 .unwrap_or(std::cmp::Ordering::Equal)
2600 .then_with(|| {
2601 right
2602 .confidence
2603 .partial_cmp(&left.confidence)
2604 .unwrap_or(std::cmp::Ordering::Equal)
2605 })
2606 .then_with(|| left.id.cmp(&right.id))
2607 });
2608 if matched_capsules.is_empty() {
2609 None
2610 } else {
2611 let overlap = matched_query_count as f32 / normalized_signals.len() as f32;
2612 let env_score = matched_capsules
2613 .first()
2614 .map(|capsule| replay_environment_match_factor(&input.env, &capsule.env))
2615 .unwrap_or(0.0);
2616 Some(GeneCandidate {
2617 gene,
2618 score: overlap.max(env_score),
2619 capsules: matched_capsules,
2620 })
2621 }
2622 })
2623 .collect::<Vec<_>>();
2624 candidates.sort_by(|left, right| {
2625 right
2626 .score
2627 .partial_cmp(&left.score)
2628 .unwrap_or(std::cmp::Ordering::Equal)
2629 .then_with(|| left.gene.id.cmp(&right.gene.id))
2630 });
2631 candidates
2632}
2633
2634fn replay_environment_match_factor(input: &EnvFingerprint, candidate: &EnvFingerprint) -> f32 {
2635 let fields = [
2636 input
2637 .rustc_version
2638 .eq_ignore_ascii_case(&candidate.rustc_version),
2639 input
2640 .cargo_lock_hash
2641 .eq_ignore_ascii_case(&candidate.cargo_lock_hash),
2642 input
2643 .target_triple
2644 .eq_ignore_ascii_case(&candidate.target_triple),
2645 input.os.eq_ignore_ascii_case(&candidate.os),
2646 ];
2647 let matched_fields = fields.into_iter().filter(|matched| *matched).count() as f32;
2648 0.5 + ((matched_fields / 4.0) * 0.5)
2649}
2650
2651fn effective_candidate_score(
2652 candidate: &GeneCandidate,
2653 publishers_by_asset: &BTreeMap<String, String>,
2654 reputation_bias: &BTreeMap<String, f32>,
2655) -> f32 {
2656 let bias = candidate
2657 .capsules
2658 .first()
2659 .and_then(|capsule| publishers_by_asset.get(&capsule.id))
2660 .and_then(|publisher| reputation_bias.get(publisher))
2661 .copied()
2662 .unwrap_or(0.0)
2663 .clamp(0.0, 1.0);
2664 candidate.score * (1.0 + (bias * 0.1))
2665}
2666
2667fn export_promoted_assets_from_store(
2668 store: &dyn EvolutionStore,
2669 sender_id: impl Into<String>,
2670) -> Result<EvolutionEnvelope, EvoKernelError> {
2671 let (events, projection) = scan_projection(store)?;
2672 let genes = projection
2673 .genes
2674 .into_iter()
2675 .filter(|gene| gene.state == AssetState::Promoted)
2676 .collect::<Vec<_>>();
2677 let capsules = projection
2678 .capsules
2679 .into_iter()
2680 .filter(|capsule| capsule.state == AssetState::Promoted)
2681 .collect::<Vec<_>>();
2682 let assets = replay_export_assets(&events, genes, capsules);
2683 Ok(EvolutionEnvelope::publish(sender_id, assets))
2684}
2685
2686fn scan_projection(
2687 store: &dyn EvolutionStore,
2688) -> Result<(Vec<StoredEvolutionEvent>, EvolutionProjection), EvoKernelError> {
2689 store.scan_projection().map_err(store_err)
2690}
2691
2692fn projection_snapshot(store: &dyn EvolutionStore) -> Result<EvolutionProjection, EvoKernelError> {
2693 scan_projection(store).map(|(_, projection)| projection)
2694}
2695
2696fn replay_export_assets(
2697 events: &[StoredEvolutionEvent],
2698 genes: Vec<Gene>,
2699 capsules: Vec<Capsule>,
2700) -> Vec<NetworkAsset> {
2701 let mutation_ids = capsules
2702 .iter()
2703 .map(|capsule| capsule.mutation_id.clone())
2704 .collect::<BTreeSet<_>>();
2705 let mut assets = replay_export_events_for_mutations(events, &mutation_ids);
2706 for gene in genes {
2707 assets.push(NetworkAsset::Gene { gene });
2708 }
2709 for capsule in capsules {
2710 assets.push(NetworkAsset::Capsule { capsule });
2711 }
2712 assets
2713}
2714
2715fn replay_export_events_for_mutations(
2716 events: &[StoredEvolutionEvent],
2717 mutation_ids: &BTreeSet<String>,
2718) -> Vec<NetworkAsset> {
2719 if mutation_ids.is_empty() {
2720 return Vec::new();
2721 }
2722
2723 let mut assets = Vec::new();
2724 let mut seen_mutations = BTreeSet::new();
2725 let mut seen_spec_links = BTreeSet::new();
2726 for stored in events {
2727 match &stored.event {
2728 EvolutionEvent::MutationDeclared { mutation }
2729 if mutation_ids.contains(mutation.intent.id.as_str())
2730 && seen_mutations.insert(mutation.intent.id.clone()) =>
2731 {
2732 assets.push(NetworkAsset::EvolutionEvent {
2733 event: EvolutionEvent::MutationDeclared {
2734 mutation: mutation.clone(),
2735 },
2736 });
2737 }
2738 EvolutionEvent::SpecLinked {
2739 mutation_id,
2740 spec_id,
2741 } if mutation_ids.contains(mutation_id.as_str())
2742 && seen_spec_links.insert((mutation_id.clone(), spec_id.clone())) =>
2743 {
2744 assets.push(NetworkAsset::EvolutionEvent {
2745 event: EvolutionEvent::SpecLinked {
2746 mutation_id: mutation_id.clone(),
2747 spec_id: spec_id.clone(),
2748 },
2749 });
2750 }
2751 _ => {}
2752 }
2753 }
2754
2755 assets
2756}
2757
2758const SYNC_CURSOR_PREFIX: &str = "seq:";
2759const SYNC_RESUME_TOKEN_PREFIX: &str = "gep-rt1|";
2760
2761#[derive(Clone, Debug)]
2762struct DeltaWindow {
2763 changed_gene_ids: BTreeSet<String>,
2764 changed_capsule_ids: BTreeSet<String>,
2765 changed_mutation_ids: BTreeSet<String>,
2766}
2767
2768fn normalize_sync_value(value: Option<&str>) -> Option<String> {
2769 value
2770 .map(str::trim)
2771 .filter(|value| !value.is_empty())
2772 .map(ToOwned::to_owned)
2773}
2774
2775fn parse_sync_cursor_seq(cursor: &str) -> Option<u64> {
2776 let trimmed = cursor.trim();
2777 if trimmed.is_empty() {
2778 return None;
2779 }
2780 let raw = trimmed.strip_prefix(SYNC_CURSOR_PREFIX).unwrap_or(trimmed);
2781 raw.parse::<u64>().ok()
2782}
2783
2784fn format_sync_cursor(seq: u64) -> String {
2785 format!("{SYNC_CURSOR_PREFIX}{seq}")
2786}
2787
2788fn encode_resume_token(sender_id: &str, cursor: &str) -> String {
2789 format!("{SYNC_RESUME_TOKEN_PREFIX}{sender_id}|{cursor}")
2790}
2791
2792fn decode_resume_token(sender_id: &str, token: &str) -> Result<String, EvoKernelError> {
2793 let token = token.trim();
2794 let Some(encoded) = token.strip_prefix(SYNC_RESUME_TOKEN_PREFIX) else {
2795 return Ok(token.to_string());
2796 };
2797 let (token_sender, cursor) = encoded.split_once('|').ok_or_else(|| {
2798 EvoKernelError::Validation(
2799 "invalid resume_token format; expected gep-rt1|<sender>|<seq>".into(),
2800 )
2801 })?;
2802 if token_sender != sender_id.trim() {
2803 return Err(EvoKernelError::Validation(
2804 "resume_token sender mismatch".into(),
2805 ));
2806 }
2807 Ok(cursor.to_string())
2808}
2809
2810fn resolve_requested_cursor(
2811 sender_id: &str,
2812 since_cursor: Option<&str>,
2813 resume_token: Option<&str>,
2814) -> Result<Option<String>, EvoKernelError> {
2815 let cursor = if let Some(token) = normalize_sync_value(resume_token) {
2816 Some(decode_resume_token(sender_id, &token)?)
2817 } else {
2818 normalize_sync_value(since_cursor)
2819 };
2820
2821 let Some(cursor) = cursor else {
2822 return Ok(None);
2823 };
2824 let seq = parse_sync_cursor_seq(&cursor).ok_or_else(|| {
2825 EvoKernelError::Validation("invalid since_cursor/resume_token cursor format".into())
2826 })?;
2827 Ok(Some(format_sync_cursor(seq)))
2828}
2829
2830fn latest_store_cursor(store: &dyn EvolutionStore) -> Result<Option<String>, EvoKernelError> {
2831 let events = store.scan(1).map_err(store_err)?;
2832 Ok(events.last().map(|stored| format_sync_cursor(stored.seq)))
2833}
2834
2835fn delta_window(events: &[StoredEvolutionEvent], since_seq: u64) -> DeltaWindow {
2836 let mut changed_gene_ids = BTreeSet::new();
2837 let mut changed_capsule_ids = BTreeSet::new();
2838 let mut changed_mutation_ids = BTreeSet::new();
2839
2840 for stored in events {
2841 if stored.seq <= since_seq {
2842 continue;
2843 }
2844 match &stored.event {
2845 EvolutionEvent::MutationDeclared { mutation } => {
2846 changed_mutation_ids.insert(mutation.intent.id.clone());
2847 }
2848 EvolutionEvent::SpecLinked { mutation_id, .. } => {
2849 changed_mutation_ids.insert(mutation_id.clone());
2850 }
2851 EvolutionEvent::GeneProjected { gene } => {
2852 changed_gene_ids.insert(gene.id.clone());
2853 }
2854 EvolutionEvent::GenePromoted { gene_id }
2855 | EvolutionEvent::GeneRevoked { gene_id, .. }
2856 | EvolutionEvent::PromotionEvaluated { gene_id, .. } => {
2857 changed_gene_ids.insert(gene_id.clone());
2858 }
2859 EvolutionEvent::CapsuleCommitted { capsule } => {
2860 changed_capsule_ids.insert(capsule.id.clone());
2861 changed_gene_ids.insert(capsule.gene_id.clone());
2862 changed_mutation_ids.insert(capsule.mutation_id.clone());
2863 }
2864 EvolutionEvent::CapsuleReleased { capsule_id, .. }
2865 | EvolutionEvent::CapsuleQuarantined { capsule_id } => {
2866 changed_capsule_ids.insert(capsule_id.clone());
2867 }
2868 EvolutionEvent::RemoteAssetImported { asset_ids, .. } => {
2869 for asset_id in asset_ids {
2870 changed_gene_ids.insert(asset_id.clone());
2871 changed_capsule_ids.insert(asset_id.clone());
2872 }
2873 }
2874 _ => {}
2875 }
2876 }
2877
2878 DeltaWindow {
2879 changed_gene_ids,
2880 changed_capsule_ids,
2881 changed_mutation_ids,
2882 }
2883}
2884
2885fn import_remote_envelope_into_store(
2886 store: &dyn EvolutionStore,
2887 envelope: &EvolutionEnvelope,
2888 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
2889 requested_cursor: Option<String>,
2890) -> Result<ImportOutcome, EvoKernelError> {
2891 if !envelope.verify_content_hash() {
2892 record_manifest_validation(store, envelope, false, "invalid evolution envelope hash")?;
2893 return Err(EvoKernelError::Validation(
2894 "invalid evolution envelope hash".into(),
2895 ));
2896 }
2897 if let Err(reason) = envelope.verify_manifest() {
2898 record_manifest_validation(
2899 store,
2900 envelope,
2901 false,
2902 format!("manifest validation failed: {reason}"),
2903 )?;
2904 return Err(EvoKernelError::Validation(format!(
2905 "invalid evolution envelope manifest: {reason}"
2906 )));
2907 }
2908 record_manifest_validation(store, envelope, true, "manifest validated")?;
2909
2910 let sender_id = normalized_sender_id(&envelope.sender_id);
2911 let (events, projection) = scan_projection(store)?;
2912 let mut known_gene_ids = projection
2913 .genes
2914 .into_iter()
2915 .map(|gene| gene.id)
2916 .collect::<BTreeSet<_>>();
2917 let mut known_capsule_ids = projection
2918 .capsules
2919 .into_iter()
2920 .map(|capsule| capsule.id)
2921 .collect::<BTreeSet<_>>();
2922 let mut known_mutation_ids = BTreeSet::new();
2923 let mut known_spec_links = BTreeSet::new();
2924 for stored in &events {
2925 match &stored.event {
2926 EvolutionEvent::MutationDeclared { mutation } => {
2927 known_mutation_ids.insert(mutation.intent.id.clone());
2928 }
2929 EvolutionEvent::SpecLinked {
2930 mutation_id,
2931 spec_id,
2932 } => {
2933 known_spec_links.insert((mutation_id.clone(), spec_id.clone()));
2934 }
2935 _ => {}
2936 }
2937 }
2938 let mut imported_asset_ids = Vec::new();
2939 let mut applied_count = 0usize;
2940 let mut skipped_count = 0usize;
2941 for asset in &envelope.assets {
2942 match asset {
2943 NetworkAsset::Gene { gene } => {
2944 if !known_gene_ids.insert(gene.id.clone()) {
2945 skipped_count += 1;
2946 continue;
2947 }
2948 imported_asset_ids.push(gene.id.clone());
2949 applied_count += 1;
2950 let mut quarantined_gene = gene.clone();
2951 quarantined_gene.state = AssetState::Quarantined;
2952 store
2953 .append_event(EvolutionEvent::RemoteAssetImported {
2954 source: CandidateSource::Remote,
2955 asset_ids: vec![gene.id.clone()],
2956 sender_id: sender_id.clone(),
2957 })
2958 .map_err(store_err)?;
2959 store
2960 .append_event(EvolutionEvent::GeneProjected {
2961 gene: quarantined_gene.clone(),
2962 })
2963 .map_err(store_err)?;
2964 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
2965 store
2966 .append_event(EvolutionEvent::PromotionEvaluated {
2967 gene_id: quarantined_gene.id,
2968 state: AssetState::Quarantined,
2969 reason: "remote asset requires local validation before promotion".into(),
2970 reason_code: TransitionReasonCode::DowngradeRemoteRequiresLocalValidation,
2971 })
2972 .map_err(store_err)?;
2973 }
2974 NetworkAsset::Capsule { capsule } => {
2975 if !known_capsule_ids.insert(capsule.id.clone()) {
2976 skipped_count += 1;
2977 continue;
2978 }
2979 imported_asset_ids.push(capsule.id.clone());
2980 applied_count += 1;
2981 store
2982 .append_event(EvolutionEvent::RemoteAssetImported {
2983 source: CandidateSource::Remote,
2984 asset_ids: vec![capsule.id.clone()],
2985 sender_id: sender_id.clone(),
2986 })
2987 .map_err(store_err)?;
2988 let mut quarantined = capsule.clone();
2989 quarantined.state = AssetState::Quarantined;
2990 store
2991 .append_event(EvolutionEvent::CapsuleCommitted {
2992 capsule: quarantined.clone(),
2993 })
2994 .map_err(store_err)?;
2995 record_remote_publisher_for_asset(remote_publishers, &envelope.sender_id, asset);
2996 store
2997 .append_event(EvolutionEvent::CapsuleQuarantined {
2998 capsule_id: quarantined.id,
2999 })
3000 .map_err(store_err)?;
3001 }
3002 NetworkAsset::EvolutionEvent { event } => {
3003 let should_append = match event {
3004 EvolutionEvent::MutationDeclared { mutation } => {
3005 known_mutation_ids.insert(mutation.intent.id.clone())
3006 }
3007 EvolutionEvent::SpecLinked {
3008 mutation_id,
3009 spec_id,
3010 } => known_spec_links.insert((mutation_id.clone(), spec_id.clone())),
3011 _ if should_import_remote_event(event) => true,
3012 _ => false,
3013 };
3014 if should_append {
3015 store.append_event(event.clone()).map_err(store_err)?;
3016 applied_count += 1;
3017 } else {
3018 skipped_count += 1;
3019 }
3020 }
3021 }
3022 }
3023 let next_cursor = latest_store_cursor(store)?;
3024 let resume_token = next_cursor.as_ref().and_then(|cursor| {
3025 normalized_sender_id(&envelope.sender_id).map(|sender| encode_resume_token(&sender, cursor))
3026 });
3027
3028 Ok(ImportOutcome {
3029 imported_asset_ids,
3030 accepted: true,
3031 next_cursor: next_cursor.clone(),
3032 resume_token,
3033 sync_audit: SyncAudit {
3034 batch_id: next_id("sync-import"),
3035 requested_cursor,
3036 scanned_count: envelope.assets.len(),
3037 applied_count,
3038 skipped_count,
3039 failed_count: 0,
3040 failure_reasons: Vec::new(),
3041 },
3042 })
3043}
3044
3045const EVOMAP_SNAPSHOT_ROOT: &str = "assets/gep/evomap_snapshot";
3046const EVOMAP_SNAPSHOT_GENES_FILE: &str = "genes.json";
3047const EVOMAP_SNAPSHOT_CAPSULES_FILE: &str = "capsules.json";
3048const EVOMAP_BUILTIN_RUN_ID: &str = "builtin-evomap-seed";
3049
3050#[derive(Debug, Deserialize)]
3051struct EvoMapGeneDocument {
3052 #[serde(default)]
3053 genes: Vec<EvoMapGeneAsset>,
3054}
3055
3056#[derive(Debug, Deserialize)]
3057struct EvoMapGeneAsset {
3058 id: String,
3059 #[serde(default)]
3060 category: Option<String>,
3061 #[serde(default)]
3062 signals_match: Vec<Value>,
3063 #[serde(default)]
3064 strategy: Vec<String>,
3065 #[serde(default)]
3066 validation: Vec<String>,
3067 #[serde(default)]
3068 constraints: Option<EvoMapConstraintAsset>,
3069 #[serde(default)]
3070 model_name: Option<String>,
3071 #[serde(default)]
3072 schema_version: Option<String>,
3073 #[serde(default)]
3074 compatibility: Option<Value>,
3075}
3076
3077#[derive(Clone, Debug, Deserialize, Default)]
3078struct EvoMapConstraintAsset {
3079 #[serde(default)]
3080 max_files: Option<usize>,
3081 #[serde(default)]
3082 forbidden_paths: Vec<String>,
3083}
3084
3085#[derive(Debug, Deserialize)]
3086struct EvoMapCapsuleDocument {
3087 #[serde(default)]
3088 capsules: Vec<EvoMapCapsuleAsset>,
3089}
3090
3091#[derive(Debug, Deserialize)]
3092struct EvoMapCapsuleAsset {
3093 id: String,
3094 gene: String,
3095 #[serde(default)]
3096 trigger: Vec<String>,
3097 #[serde(default)]
3098 summary: String,
3099 #[serde(default)]
3100 diff: Option<String>,
3101 #[serde(default)]
3102 confidence: Option<f32>,
3103 #[serde(default)]
3104 outcome: Option<EvoMapOutcomeAsset>,
3105 #[serde(default)]
3106 blast_radius: Option<EvoMapBlastRadiusAsset>,
3107 #[serde(default)]
3108 content: Option<EvoMapCapsuleContentAsset>,
3109 #[serde(default)]
3110 env_fingerprint: Option<Value>,
3111 #[serde(default)]
3112 model_name: Option<String>,
3113 #[serde(default)]
3114 schema_version: Option<String>,
3115 #[serde(default)]
3116 compatibility: Option<Value>,
3117}
3118
3119#[derive(Clone, Debug, Deserialize, Default)]
3120struct EvoMapOutcomeAsset {
3121 #[serde(default)]
3122 status: Option<String>,
3123 #[serde(default)]
3124 score: Option<f32>,
3125}
3126
3127#[derive(Clone, Debug, Deserialize, Default)]
3128struct EvoMapBlastRadiusAsset {
3129 #[serde(default)]
3130 lines: usize,
3131}
3132
3133#[derive(Clone, Debug, Deserialize, Default)]
3134struct EvoMapCapsuleContentAsset {
3135 #[serde(default)]
3136 changed_files: Vec<String>,
3137}
3138
3139#[derive(Debug)]
3140struct BuiltinCapsuleSeed {
3141 capsule: Capsule,
3142 mutation: PreparedMutation,
3143}
3144
3145#[derive(Debug)]
3146struct BuiltinAssetBundle {
3147 genes: Vec<Gene>,
3148 capsules: Vec<BuiltinCapsuleSeed>,
3149}
3150
3151fn built_in_experience_genes() -> Vec<Gene> {
3152 vec![
3153 Gene {
3154 id: "builtin-experience-docs-rewrite-v1".into(),
3155 signals: vec!["docs.rewrite".into(), "docs".into(), "rewrite".into()],
3156 strategy: vec![
3157 "asset_origin=builtin".into(),
3158 "task_class=docs.rewrite".into(),
3159 "task_label=Docs rewrite".into(),
3160 "template_id=builtin-docs-rewrite-v1".into(),
3161 "summary=baseline docs rewrite experience".into(),
3162 ],
3163 validation: vec!["builtin-template".into(), "origin=builtin".into()],
3164 state: AssetState::Promoted,
3165 },
3166 Gene {
3167 id: "builtin-experience-ci-fix-v1".into(),
3168 signals: vec![
3169 "ci.fix".into(),
3170 "ci".into(),
3171 "test".into(),
3172 "failure".into(),
3173 ],
3174 strategy: vec![
3175 "asset_origin=builtin".into(),
3176 "task_class=ci.fix".into(),
3177 "task_label=CI fix".into(),
3178 "template_id=builtin-ci-fix-v1".into(),
3179 "summary=baseline ci stabilization experience".into(),
3180 ],
3181 validation: vec!["builtin-template".into(), "origin=builtin".into()],
3182 state: AssetState::Promoted,
3183 },
3184 Gene {
3185 id: "builtin-experience-task-decomposition-v1".into(),
3186 signals: vec![
3187 "task.decomposition".into(),
3188 "task".into(),
3189 "decomposition".into(),
3190 "planning".into(),
3191 ],
3192 strategy: vec![
3193 "asset_origin=builtin".into(),
3194 "task_class=task.decomposition".into(),
3195 "task_label=Task decomposition".into(),
3196 "template_id=builtin-task-decomposition-v1".into(),
3197 "summary=baseline task decomposition and routing experience".into(),
3198 ],
3199 validation: vec!["builtin-template".into(), "origin=builtin".into()],
3200 state: AssetState::Promoted,
3201 },
3202 Gene {
3203 id: "builtin-experience-project-workflow-v1".into(),
3204 signals: vec![
3205 "project.workflow".into(),
3206 "project".into(),
3207 "workflow".into(),
3208 "milestone".into(),
3209 ],
3210 strategy: vec![
3211 "asset_origin=builtin".into(),
3212 "task_class=project.workflow".into(),
3213 "task_label=Project workflow".into(),
3214 "template_id=builtin-project-workflow-v1".into(),
3215 "summary=baseline project proposal and merge workflow experience".into(),
3216 ],
3217 validation: vec!["builtin-template".into(), "origin=builtin".into()],
3218 state: AssetState::Promoted,
3219 },
3220 Gene {
3221 id: "builtin-experience-service-bid-v1".into(),
3222 signals: vec![
3223 "service.bid".into(),
3224 "service".into(),
3225 "bid".into(),
3226 "economics".into(),
3227 ],
3228 strategy: vec![
3229 "asset_origin=builtin".into(),
3230 "task_class=service.bid".into(),
3231 "task_label=Service bid".into(),
3232 "template_id=builtin-service-bid-v1".into(),
3233 "summary=baseline service bidding and settlement experience".into(),
3234 ],
3235 validation: vec!["builtin-template".into(), "origin=builtin".into()],
3236 state: AssetState::Promoted,
3237 },
3238 ]
3239}
3240
3241fn evomap_snapshot_path(file_name: &str) -> PathBuf {
3242 PathBuf::from(env!("CARGO_MANIFEST_DIR"))
3243 .join(EVOMAP_SNAPSHOT_ROOT)
3244 .join(file_name)
3245}
3246
3247fn read_evomap_snapshot(file_name: &str) -> Result<Option<String>, EvoKernelError> {
3248 let path = evomap_snapshot_path(file_name);
3249 if !path.exists() {
3250 return Ok(None);
3251 }
3252 fs::read_to_string(&path).map(Some).map_err(|err| {
3253 EvoKernelError::Validation(format!(
3254 "failed to read EvoMap snapshot {}: {err}",
3255 path.display()
3256 ))
3257 })
3258}
3259
3260fn compatibility_state_from_value(value: Option<&Value>) -> Option<String> {
3261 let value = value?;
3262 if let Some(state) = value.as_str() {
3263 let normalized = state.trim().to_ascii_lowercase();
3264 if normalized.is_empty() {
3265 return None;
3266 }
3267 return Some(normalized);
3268 }
3269 value
3270 .get("state")
3271 .and_then(Value::as_str)
3272 .map(str::trim)
3273 .filter(|state| !state.is_empty())
3274 .map(|state| state.to_ascii_lowercase())
3275}
3276
3277fn map_evomap_state(value: Option<&Value>) -> AssetState {
3278 match compatibility_state_from_value(value).as_deref() {
3279 Some("promoted") => AssetState::Promoted,
3280 Some("candidate") => AssetState::Candidate,
3281 Some("quarantined") => AssetState::Quarantined,
3282 Some("revoked") => AssetState::Revoked,
3283 Some("rejected") => AssetState::Archived,
3284 Some("archived") => AssetState::Archived,
3285 _ => AssetState::Candidate,
3286 }
3287}
3288
3289fn value_as_signal_string(value: &Value) -> Option<String> {
3290 match value {
3291 Value::String(raw) => {
3292 let normalized = raw.trim();
3293 if normalized.is_empty() {
3294 None
3295 } else {
3296 Some(normalized.to_string())
3297 }
3298 }
3299 Value::Object(_) => {
3300 let serialized = serde_json::to_string(value).ok()?;
3301 let normalized = serialized.trim();
3302 if normalized.is_empty() {
3303 None
3304 } else {
3305 Some(normalized.to_string())
3306 }
3307 }
3308 Value::Null => None,
3309 other => {
3310 let rendered = other.to_string();
3311 let normalized = rendered.trim();
3312 if normalized.is_empty() {
3313 None
3314 } else {
3315 Some(normalized.to_string())
3316 }
3317 }
3318 }
3319}
3320
3321fn parse_diff_changed_files(payload: &str) -> Vec<String> {
3322 let mut changed_files = BTreeSet::new();
3323 for line in payload.lines() {
3324 let line = line.trim();
3325 if let Some(path) = line.strip_prefix("+++ b/") {
3326 let path = path.trim();
3327 if !path.is_empty() && path != "/dev/null" {
3328 changed_files.insert(path.to_string());
3329 }
3330 continue;
3331 }
3332 if let Some(path) = line.strip_prefix("diff --git a/") {
3333 if let Some((_, right)) = path.split_once(" b/") {
3334 let right = right.trim();
3335 if !right.is_empty() {
3336 changed_files.insert(right.to_string());
3337 }
3338 }
3339 }
3340 }
3341 changed_files.into_iter().collect()
3342}
3343
3344fn strip_diff_code_fence(payload: &str) -> String {
3345 let trimmed = payload.trim();
3346 if !trimmed.starts_with("```") {
3347 return trimmed.to_string();
3348 }
3349 let mut lines = trimmed.lines().collect::<Vec<_>>();
3350 if lines.is_empty() {
3351 return String::new();
3352 }
3353 lines.remove(0);
3354 if lines
3355 .last()
3356 .map(|line| line.trim() == "```")
3357 .unwrap_or(false)
3358 {
3359 lines.pop();
3360 }
3361 lines.join("\n").trim().to_string()
3362}
3363
3364fn synthetic_diff_for_capsule(capsule: &EvoMapCapsuleAsset) -> String {
3365 let file_path = format!("docs/evomap_builtin_capsules/{}.md", capsule.id);
3366 let mut content = Vec::new();
3367 content.push(format!("# EvoMap Builtin Capsule {}", capsule.id));
3368 if capsule.summary.trim().is_empty() {
3369 content.push("summary: missing".to_string());
3370 } else {
3371 content.push(format!("summary: {}", capsule.summary.trim()));
3372 }
3373 if !capsule.trigger.is_empty() {
3374 content.push(format!("trigger: {}", capsule.trigger.join(", ")));
3375 }
3376 content.push(format!("gene: {}", capsule.gene));
3377 let added = content
3378 .into_iter()
3379 .map(|line| format!("+{}", line.replace('\r', "")))
3380 .collect::<Vec<_>>()
3381 .join("\n");
3382 format!(
3383 "diff --git a/{file_path} b/{file_path}\nnew file mode 100644\nindex 0000000..1111111\n--- /dev/null\n+++ b/{file_path}\n@@ -0,0 +1,{line_count} @@\n{added}\n",
3384 line_count = added.lines().count()
3385 )
3386}
3387
3388fn normalized_diff_payload(capsule: &EvoMapCapsuleAsset) -> String {
3389 if let Some(raw) = capsule.diff.as_deref() {
3390 let normalized = strip_diff_code_fence(raw);
3391 if !normalized.trim().is_empty() {
3392 return normalized;
3393 }
3394 }
3395 synthetic_diff_for_capsule(capsule)
3396}
3397
3398fn env_field(value: Option<&Value>, keys: &[&str]) -> Option<String> {
3399 let object = value?.as_object()?;
3400 keys.iter().find_map(|key| {
3401 object
3402 .get(*key)
3403 .and_then(Value::as_str)
3404 .map(str::trim)
3405 .filter(|value| !value.is_empty())
3406 .map(|value| value.to_string())
3407 })
3408}
3409
3410fn map_evomap_env_fingerprint(value: Option<&Value>) -> EnvFingerprint {
3411 let os =
3412 env_field(value, &["os", "platform", "os_release"]).unwrap_or_else(|| "unknown".into());
3413 let target_triple = env_field(value, &["target_triple"]).unwrap_or_else(|| {
3414 let arch = env_field(value, &["arch"]).unwrap_or_else(|| "unknown".into());
3415 format!("{arch}-unknown-{os}")
3416 });
3417 EnvFingerprint {
3418 rustc_version: env_field(value, &["runtime", "rustc_version", "node_version"])
3419 .unwrap_or_else(|| "unknown".into()),
3420 cargo_lock_hash: env_field(value, &["cargo_lock_hash"]).unwrap_or_else(|| "unknown".into()),
3421 target_triple,
3422 os,
3423 }
3424}
3425
3426fn load_evomap_builtin_assets() -> Result<Option<BuiltinAssetBundle>, EvoKernelError> {
3427 let genes_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_GENES_FILE)?;
3428 let capsules_raw = read_evomap_snapshot(EVOMAP_SNAPSHOT_CAPSULES_FILE)?;
3429 let (Some(genes_raw), Some(capsules_raw)) = (genes_raw, capsules_raw) else {
3430 return Ok(None);
3431 };
3432
3433 let genes_doc: EvoMapGeneDocument = serde_json::from_str(&genes_raw).map_err(|err| {
3434 EvoKernelError::Validation(format!("failed to parse EvoMap genes snapshot: {err}"))
3435 })?;
3436 let capsules_doc: EvoMapCapsuleDocument =
3437 serde_json::from_str(&capsules_raw).map_err(|err| {
3438 EvoKernelError::Validation(format!("failed to parse EvoMap capsules snapshot: {err}"))
3439 })?;
3440
3441 let mut genes = Vec::new();
3442 let mut known_gene_ids = BTreeSet::new();
3443 for source in genes_doc.genes {
3444 let EvoMapGeneAsset {
3445 id,
3446 category,
3447 signals_match,
3448 strategy,
3449 validation,
3450 constraints,
3451 model_name,
3452 schema_version,
3453 compatibility,
3454 } = source;
3455 let gene_id = id.trim();
3456 if gene_id.is_empty() {
3457 return Err(EvoKernelError::Validation(
3458 "EvoMap snapshot gene id must not be empty".into(),
3459 ));
3460 }
3461 if !known_gene_ids.insert(gene_id.to_string()) {
3462 continue;
3463 }
3464
3465 let mut seen_signals = BTreeSet::new();
3466 let mut signals = Vec::new();
3467 for signal in signals_match {
3468 let Some(normalized) = value_as_signal_string(&signal) else {
3469 continue;
3470 };
3471 if seen_signals.insert(normalized.clone()) {
3472 signals.push(normalized);
3473 }
3474 }
3475 if signals.is_empty() {
3476 signals.push(format!("gene:{}", gene_id.to_ascii_lowercase()));
3477 }
3478
3479 let mut strategy = strategy
3480 .into_iter()
3481 .map(|item| item.trim().to_string())
3482 .filter(|item| !item.is_empty())
3483 .collect::<Vec<_>>();
3484 if strategy.is_empty() {
3485 strategy.push("evomap strategy missing in snapshot".into());
3486 }
3487 let constraint = constraints.unwrap_or_default();
3488 let compat_state = compatibility_state_from_value(compatibility.as_ref())
3489 .unwrap_or_else(|| "candidate".to_string());
3490 ensure_strategy_metadata(&mut strategy, "asset_origin", "builtin_evomap");
3491 ensure_strategy_metadata(
3492 &mut strategy,
3493 "evomap_category",
3494 category.as_deref().unwrap_or("unknown"),
3495 );
3496 ensure_strategy_metadata(
3497 &mut strategy,
3498 "evomap_constraints_max_files",
3499 &constraint.max_files.unwrap_or_default().to_string(),
3500 );
3501 ensure_strategy_metadata(
3502 &mut strategy,
3503 "evomap_constraints_forbidden_paths",
3504 &constraint.forbidden_paths.join("|"),
3505 );
3506 ensure_strategy_metadata(
3507 &mut strategy,
3508 "evomap_model_name",
3509 model_name.as_deref().unwrap_or("unknown"),
3510 );
3511 ensure_strategy_metadata(
3512 &mut strategy,
3513 "evomap_schema_version",
3514 schema_version.as_deref().unwrap_or("1.5.0"),
3515 );
3516 ensure_strategy_metadata(&mut strategy, "evomap_compatibility_state", &compat_state);
3517
3518 let mut validation = validation
3519 .into_iter()
3520 .map(|item| item.trim().to_string())
3521 .filter(|item| !item.is_empty())
3522 .collect::<Vec<_>>();
3523 if validation.is_empty() {
3524 validation.push("evomap-builtin-seed".into());
3525 }
3526
3527 genes.push(Gene {
3528 id: gene_id.to_string(),
3529 signals,
3530 strategy,
3531 validation,
3532 state: map_evomap_state(compatibility.as_ref()),
3533 });
3534 }
3535
3536 let mut capsules = Vec::new();
3537 let known_gene_ids = genes
3538 .iter()
3539 .map(|gene| gene.id.clone())
3540 .collect::<BTreeSet<_>>();
3541 for source in capsules_doc.capsules {
3542 let EvoMapCapsuleAsset {
3543 id,
3544 gene,
3545 trigger,
3546 summary,
3547 diff,
3548 confidence,
3549 outcome,
3550 blast_radius,
3551 content,
3552 env_fingerprint,
3553 model_name: _model_name,
3554 schema_version: _schema_version,
3555 compatibility,
3556 } = source;
3557 let source_for_diff = EvoMapCapsuleAsset {
3558 id: id.clone(),
3559 gene: gene.clone(),
3560 trigger: trigger.clone(),
3561 summary: summary.clone(),
3562 diff,
3563 confidence,
3564 outcome: outcome.clone(),
3565 blast_radius: blast_radius.clone(),
3566 content: content.clone(),
3567 env_fingerprint: env_fingerprint.clone(),
3568 model_name: None,
3569 schema_version: None,
3570 compatibility: compatibility.clone(),
3571 };
3572 if !known_gene_ids.contains(gene.as_str()) {
3573 return Err(EvoKernelError::Validation(format!(
3574 "EvoMap capsule {} references unknown gene {}",
3575 id, gene
3576 )));
3577 }
3578 let normalized_diff = normalized_diff_payload(&source_for_diff);
3579 if normalized_diff.trim().is_empty() {
3580 return Err(EvoKernelError::Validation(format!(
3581 "EvoMap capsule {} has empty normalized diff payload",
3582 id
3583 )));
3584 }
3585 let mut changed_files = content
3586 .as_ref()
3587 .map(|content| {
3588 content
3589 .changed_files
3590 .iter()
3591 .map(|item| item.trim().to_string())
3592 .filter(|item| !item.is_empty())
3593 .collect::<Vec<_>>()
3594 })
3595 .unwrap_or_default();
3596 if changed_files.is_empty() {
3597 changed_files = parse_diff_changed_files(&normalized_diff);
3598 }
3599 if changed_files.is_empty() {
3600 changed_files.push(format!("docs/evomap_builtin_capsules/{}.md", id));
3601 }
3602
3603 let confidence = confidence
3604 .or_else(|| outcome.as_ref().and_then(|outcome| outcome.score))
3605 .unwrap_or(0.6)
3606 .clamp(0.0, 1.0);
3607 let status_success = outcome
3608 .as_ref()
3609 .and_then(|outcome| outcome.status.as_deref())
3610 .map(|status| status.eq_ignore_ascii_case("success"))
3611 .unwrap_or(true);
3612 let blast_radius = blast_radius.unwrap_or_default();
3613 let mutation_id = format!("builtin-evomap-mutation-{}", id);
3614 let intent = MutationIntent {
3615 id: mutation_id.clone(),
3616 intent: if summary.trim().is_empty() {
3617 format!("apply EvoMap capsule {}", id)
3618 } else {
3619 summary.trim().to_string()
3620 },
3621 target: MutationTarget::Paths {
3622 allow: changed_files.clone(),
3623 },
3624 expected_effect: format!("seed replay candidate from EvoMap capsule {}", id),
3625 risk: RiskLevel::Low,
3626 signals: if trigger.is_empty() {
3627 vec![format!("capsule:{}", id.to_ascii_lowercase())]
3628 } else {
3629 trigger
3630 .iter()
3631 .map(|signal| signal.trim().to_ascii_lowercase())
3632 .filter(|signal| !signal.is_empty())
3633 .collect::<Vec<_>>()
3634 },
3635 spec_id: None,
3636 };
3637 let mutation = PreparedMutation {
3638 intent,
3639 artifact: oris_evolution::MutationArtifact {
3640 encoding: ArtifactEncoding::UnifiedDiff,
3641 payload: normalized_diff.clone(),
3642 base_revision: None,
3643 content_hash: compute_artifact_hash(&normalized_diff),
3644 },
3645 };
3646 let capsule = Capsule {
3647 id: id.clone(),
3648 gene_id: gene.clone(),
3649 mutation_id,
3650 run_id: EVOMAP_BUILTIN_RUN_ID.to_string(),
3651 diff_hash: compute_artifact_hash(&normalized_diff),
3652 confidence,
3653 env: map_evomap_env_fingerprint(env_fingerprint.as_ref()),
3654 outcome: Outcome {
3655 success: status_success,
3656 validation_profile: "evomap-builtin-seed".into(),
3657 validation_duration_ms: 0,
3658 changed_files,
3659 validator_hash: "builtin-evomap".into(),
3660 lines_changed: blast_radius.lines,
3661 replay_verified: false,
3662 },
3663 state: map_evomap_state(compatibility.as_ref()),
3664 };
3665 capsules.push(BuiltinCapsuleSeed { capsule, mutation });
3666 }
3667
3668 Ok(Some(BuiltinAssetBundle { genes, capsules }))
3669}
3670
3671fn ensure_builtin_experience_assets_in_store(
3672 store: &dyn EvolutionStore,
3673 sender_id: String,
3674) -> Result<ImportOutcome, EvoKernelError> {
3675 let (events, projection) = scan_projection(store)?;
3676 let mut known_gene_ids = projection
3677 .genes
3678 .into_iter()
3679 .map(|gene| gene.id)
3680 .collect::<BTreeSet<_>>();
3681 let mut known_capsule_ids = projection
3682 .capsules
3683 .into_iter()
3684 .map(|capsule| capsule.id)
3685 .collect::<BTreeSet<_>>();
3686 let mut known_mutation_ids = BTreeSet::new();
3687 for stored in &events {
3688 if let EvolutionEvent::MutationDeclared { mutation } = &stored.event {
3689 known_mutation_ids.insert(mutation.intent.id.clone());
3690 }
3691 }
3692 let normalized_sender = normalized_sender_id(&sender_id);
3693 let mut imported_asset_ids = Vec::new();
3694 let mut bundle = BuiltinAssetBundle {
3697 genes: built_in_experience_genes(),
3698 capsules: Vec::new(),
3699 };
3700 if let Some(snapshot_bundle) = load_evomap_builtin_assets()? {
3701 bundle.genes.extend(snapshot_bundle.genes);
3702 bundle.capsules.extend(snapshot_bundle.capsules);
3703 }
3704 let scanned_count = bundle.genes.len() + bundle.capsules.len();
3705
3706 for gene in bundle.genes {
3707 if !known_gene_ids.insert(gene.id.clone()) {
3708 continue;
3709 }
3710
3711 store
3712 .append_event(EvolutionEvent::RemoteAssetImported {
3713 source: CandidateSource::Local,
3714 asset_ids: vec![gene.id.clone()],
3715 sender_id: normalized_sender.clone(),
3716 })
3717 .map_err(store_err)?;
3718 store
3719 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
3720 .map_err(store_err)?;
3721 match gene.state {
3722 AssetState::Revoked | AssetState::Archived => {}
3723 AssetState::Quarantined => {
3724 store
3725 .append_event(EvolutionEvent::PromotionEvaluated {
3726 gene_id: gene.id.clone(),
3727 state: AssetState::Quarantined,
3728 reason:
3729 "built-in EvoMap asset requires additional validation before promotion"
3730 .into(),
3731 reason_code: TransitionReasonCode::DowngradeBuiltinRequiresValidation,
3732 })
3733 .map_err(store_err)?;
3734 }
3735 AssetState::Promoted | AssetState::Candidate => {
3736 store
3737 .append_event(EvolutionEvent::PromotionEvaluated {
3738 gene_id: gene.id.clone(),
3739 state: AssetState::Promoted,
3740 reason: "built-in experience asset promoted for cold-start compatibility"
3741 .into(),
3742 reason_code: TransitionReasonCode::PromotionBuiltinColdStartCompatibility,
3743 })
3744 .map_err(store_err)?;
3745 store
3746 .append_event(EvolutionEvent::GenePromoted {
3747 gene_id: gene.id.clone(),
3748 })
3749 .map_err(store_err)?;
3750 }
3751 }
3752 imported_asset_ids.push(gene.id.clone());
3753 }
3754
3755 for seed in bundle.capsules {
3756 if !known_gene_ids.contains(seed.capsule.gene_id.as_str()) {
3757 return Err(EvoKernelError::Validation(format!(
3758 "built-in capsule {} references unknown gene {}",
3759 seed.capsule.id, seed.capsule.gene_id
3760 )));
3761 }
3762 if known_mutation_ids.insert(seed.mutation.intent.id.clone()) {
3763 store
3764 .append_event(EvolutionEvent::MutationDeclared {
3765 mutation: seed.mutation.clone(),
3766 })
3767 .map_err(store_err)?;
3768 }
3769 if !known_capsule_ids.insert(seed.capsule.id.clone()) {
3770 continue;
3771 }
3772 store
3773 .append_event(EvolutionEvent::RemoteAssetImported {
3774 source: CandidateSource::Local,
3775 asset_ids: vec![seed.capsule.id.clone()],
3776 sender_id: normalized_sender.clone(),
3777 })
3778 .map_err(store_err)?;
3779 store
3780 .append_event(EvolutionEvent::CapsuleCommitted {
3781 capsule: seed.capsule.clone(),
3782 })
3783 .map_err(store_err)?;
3784 match seed.capsule.state {
3785 AssetState::Revoked | AssetState::Archived => {}
3786 AssetState::Quarantined => {
3787 store
3788 .append_event(EvolutionEvent::CapsuleQuarantined {
3789 capsule_id: seed.capsule.id.clone(),
3790 })
3791 .map_err(store_err)?;
3792 }
3793 AssetState::Promoted | AssetState::Candidate => {
3794 store
3795 .append_event(EvolutionEvent::CapsuleReleased {
3796 capsule_id: seed.capsule.id.clone(),
3797 state: AssetState::Promoted,
3798 })
3799 .map_err(store_err)?;
3800 }
3801 }
3802 imported_asset_ids.push(seed.capsule.id.clone());
3803 }
3804
3805 let next_cursor = latest_store_cursor(store)?;
3806 let resume_token = next_cursor.as_ref().and_then(|cursor| {
3807 normalized_sender
3808 .as_deref()
3809 .map(|sender| encode_resume_token(sender, cursor))
3810 });
3811 let applied_count = imported_asset_ids.len();
3812 let skipped_count = scanned_count.saturating_sub(applied_count);
3813
3814 Ok(ImportOutcome {
3815 imported_asset_ids,
3816 accepted: true,
3817 next_cursor: next_cursor.clone(),
3818 resume_token,
3819 sync_audit: SyncAudit {
3820 batch_id: next_id("sync-import"),
3821 requested_cursor: None,
3822 scanned_count,
3823 applied_count,
3824 skipped_count,
3825 failed_count: 0,
3826 failure_reasons: Vec::new(),
3827 },
3828 })
3829}
3830
3831fn strategy_metadata_value(strategy: &[String], key: &str) -> Option<String> {
3832 strategy.iter().find_map(|entry| {
3833 let (entry_key, entry_value) = entry.split_once('=')?;
3834 if entry_key.trim().eq_ignore_ascii_case(key) {
3835 let normalized = entry_value.trim();
3836 if normalized.is_empty() {
3837 None
3838 } else {
3839 Some(normalized.to_string())
3840 }
3841 } else {
3842 None
3843 }
3844 })
3845}
3846
3847fn ensure_strategy_metadata(strategy: &mut Vec<String>, key: &str, value: &str) {
3848 let normalized = value.trim();
3849 if normalized.is_empty() || strategy_metadata_value(strategy, key).is_some() {
3850 return;
3851 }
3852 strategy.push(format!("{key}={normalized}"));
3853}
3854
3855fn enforce_reported_experience_retention(
3856 store: &dyn EvolutionStore,
3857 task_class: &str,
3858 keep_latest: usize,
3859) -> Result<(), EvoKernelError> {
3860 let task_class = task_class.trim();
3861 if task_class.is_empty() || keep_latest == 0 {
3862 return Ok(());
3863 }
3864
3865 let (_, projection) = scan_projection(store)?;
3866 let mut candidates = projection
3867 .genes
3868 .iter()
3869 .filter(|gene| gene.state == AssetState::Promoted)
3870 .filter_map(|gene| {
3871 let origin = strategy_metadata_value(&gene.strategy, "asset_origin")?;
3872 if !origin.eq_ignore_ascii_case("reported_experience") {
3873 return None;
3874 }
3875 let gene_task_class = strategy_metadata_value(&gene.strategy, "task_class")?;
3876 if !gene_task_class.eq_ignore_ascii_case(task_class) {
3877 return None;
3878 }
3879 let updated_at = projection
3880 .last_updated_at
3881 .get(&gene.id)
3882 .cloned()
3883 .unwrap_or_default();
3884 Some((gene.id.clone(), updated_at))
3885 })
3886 .collect::<Vec<_>>();
3887 if candidates.len() <= keep_latest {
3888 return Ok(());
3889 }
3890
3891 candidates.sort_by(|left, right| right.1.cmp(&left.1).then_with(|| right.0.cmp(&left.0)));
3892 let stale_gene_ids = candidates
3893 .into_iter()
3894 .skip(keep_latest)
3895 .map(|(gene_id, _)| gene_id)
3896 .collect::<BTreeSet<_>>();
3897 if stale_gene_ids.is_empty() {
3898 return Ok(());
3899 }
3900
3901 let reason =
3902 format!("reported experience retention limit exceeded for task_class={task_class}");
3903 for gene_id in &stale_gene_ids {
3904 store
3905 .append_event(EvolutionEvent::GeneRevoked {
3906 gene_id: gene_id.clone(),
3907 reason: reason.clone(),
3908 })
3909 .map_err(store_err)?;
3910 }
3911
3912 let stale_capsule_ids = projection
3913 .capsules
3914 .iter()
3915 .filter(|capsule| stale_gene_ids.contains(&capsule.gene_id))
3916 .map(|capsule| capsule.id.clone())
3917 .collect::<BTreeSet<_>>();
3918 for capsule_id in stale_capsule_ids {
3919 store
3920 .append_event(EvolutionEvent::CapsuleQuarantined { capsule_id })
3921 .map_err(store_err)?;
3922 }
3923 Ok(())
3924}
3925
3926fn record_reported_experience_in_store(
3927 store: &dyn EvolutionStore,
3928 sender_id: String,
3929 gene_id: String,
3930 signals: Vec<String>,
3931 strategy: Vec<String>,
3932 validation: Vec<String>,
3933) -> Result<ImportOutcome, EvoKernelError> {
3934 let gene_id = gene_id.trim();
3935 if gene_id.is_empty() {
3936 return Err(EvoKernelError::Validation(
3937 "reported experience gene_id must not be empty".into(),
3938 ));
3939 }
3940
3941 let mut unique_signals = BTreeSet::new();
3942 let mut normalized_signals = Vec::new();
3943 for signal in signals {
3944 let normalized = signal.trim().to_ascii_lowercase();
3945 if normalized.is_empty() {
3946 continue;
3947 }
3948 if unique_signals.insert(normalized.clone()) {
3949 normalized_signals.push(normalized);
3950 }
3951 }
3952 if normalized_signals.is_empty() {
3953 return Err(EvoKernelError::Validation(
3954 "reported experience signals must not be empty".into(),
3955 ));
3956 }
3957
3958 let mut unique_strategy = BTreeSet::new();
3959 let mut normalized_strategy = Vec::new();
3960 for entry in strategy {
3961 let normalized = entry.trim().to_string();
3962 if normalized.is_empty() {
3963 continue;
3964 }
3965 if unique_strategy.insert(normalized.clone()) {
3966 normalized_strategy.push(normalized);
3967 }
3968 }
3969 if normalized_strategy.is_empty() {
3970 normalized_strategy.push("reported local replay experience".into());
3971 }
3972 let task_class_id = strategy_metadata_value(&normalized_strategy, "task_class")
3973 .or_else(|| normalized_signals.first().cloned())
3974 .unwrap_or_else(|| "reported-experience".into());
3975 let task_label = strategy_metadata_value(&normalized_strategy, "task_label")
3976 .or_else(|| normalized_signals.first().cloned())
3977 .unwrap_or_else(|| task_class_id.clone());
3978 ensure_strategy_metadata(
3979 &mut normalized_strategy,
3980 "asset_origin",
3981 "reported_experience",
3982 );
3983 ensure_strategy_metadata(&mut normalized_strategy, "task_class", &task_class_id);
3984 ensure_strategy_metadata(&mut normalized_strategy, "task_label", &task_label);
3985
3986 let mut unique_validation = BTreeSet::new();
3987 let mut normalized_validation = Vec::new();
3988 for entry in validation {
3989 let normalized = entry.trim().to_string();
3990 if normalized.is_empty() {
3991 continue;
3992 }
3993 if unique_validation.insert(normalized.clone()) {
3994 normalized_validation.push(normalized);
3995 }
3996 }
3997 if normalized_validation.is_empty() {
3998 normalized_validation.push("a2a.tasks.report".into());
3999 }
4000
4001 let gene = Gene {
4002 id: gene_id.to_string(),
4003 signals: normalized_signals,
4004 strategy: normalized_strategy,
4005 validation: normalized_validation,
4006 state: AssetState::Promoted,
4007 };
4008 let normalized_sender = normalized_sender_id(&sender_id);
4009
4010 store
4011 .append_event(EvolutionEvent::RemoteAssetImported {
4012 source: CandidateSource::Local,
4013 asset_ids: vec![gene.id.clone()],
4014 sender_id: normalized_sender.clone(),
4015 })
4016 .map_err(store_err)?;
4017 store
4018 .append_event(EvolutionEvent::GeneProjected { gene: gene.clone() })
4019 .map_err(store_err)?;
4020 store
4021 .append_event(EvolutionEvent::PromotionEvaluated {
4022 gene_id: gene.id.clone(),
4023 state: AssetState::Promoted,
4024 reason: "trusted local report promoted reusable experience".into(),
4025 reason_code: TransitionReasonCode::PromotionTrustedLocalReport,
4026 })
4027 .map_err(store_err)?;
4028 store
4029 .append_event(EvolutionEvent::GenePromoted {
4030 gene_id: gene.id.clone(),
4031 })
4032 .map_err(store_err)?;
4033 enforce_reported_experience_retention(
4034 store,
4035 &task_class_id,
4036 REPORTED_EXPERIENCE_RETENTION_LIMIT,
4037 )?;
4038
4039 let imported_asset_ids = vec![gene.id];
4040 let next_cursor = latest_store_cursor(store)?;
4041 let resume_token = next_cursor.as_ref().and_then(|cursor| {
4042 normalized_sender
4043 .as_deref()
4044 .map(|sender| encode_resume_token(sender, cursor))
4045 });
4046 Ok(ImportOutcome {
4047 imported_asset_ids,
4048 accepted: true,
4049 next_cursor,
4050 resume_token,
4051 sync_audit: SyncAudit {
4052 batch_id: next_id("sync-import"),
4053 requested_cursor: None,
4054 scanned_count: 1,
4055 applied_count: 1,
4056 skipped_count: 0,
4057 failed_count: 0,
4058 failure_reasons: Vec::new(),
4059 },
4060 })
4061}
4062
4063fn normalized_sender_id(sender_id: &str) -> Option<String> {
4064 let trimmed = sender_id.trim();
4065 if trimmed.is_empty() {
4066 None
4067 } else {
4068 Some(trimmed.to_string())
4069 }
4070}
4071
4072fn record_manifest_validation(
4073 store: &dyn EvolutionStore,
4074 envelope: &EvolutionEnvelope,
4075 accepted: bool,
4076 reason: impl Into<String>,
4077) -> Result<(), EvoKernelError> {
4078 let manifest = envelope.manifest.as_ref();
4079 let sender_id = manifest
4080 .and_then(|value| normalized_sender_id(&value.sender_id))
4081 .or_else(|| normalized_sender_id(&envelope.sender_id));
4082 let publisher = manifest.and_then(|value| normalized_sender_id(&value.publisher));
4083 let asset_ids = manifest
4084 .map(|value| value.asset_ids.clone())
4085 .unwrap_or_else(|| EvolutionEnvelope::manifest_asset_ids(&envelope.assets));
4086
4087 store
4088 .append_event(EvolutionEvent::ManifestValidated {
4089 accepted,
4090 reason: reason.into(),
4091 sender_id,
4092 publisher,
4093 asset_ids,
4094 })
4095 .map_err(store_err)?;
4096 Ok(())
4097}
4098
4099fn record_remote_publisher_for_asset(
4100 remote_publishers: Option<&Mutex<BTreeMap<String, String>>>,
4101 sender_id: &str,
4102 asset: &NetworkAsset,
4103) {
4104 let Some(remote_publishers) = remote_publishers else {
4105 return;
4106 };
4107 let sender_id = sender_id.trim();
4108 if sender_id.is_empty() {
4109 return;
4110 }
4111 let Ok(mut publishers) = remote_publishers.lock() else {
4112 return;
4113 };
4114 match asset {
4115 NetworkAsset::Gene { gene } => {
4116 publishers.insert(gene.id.clone(), sender_id.to_string());
4117 }
4118 NetworkAsset::Capsule { capsule } => {
4119 publishers.insert(capsule.id.clone(), sender_id.to_string());
4120 }
4121 NetworkAsset::EvolutionEvent { .. } => {}
4122 }
4123}
4124
4125fn remote_publishers_by_asset_from_store(store: &dyn EvolutionStore) -> BTreeMap<String, String> {
4126 let Ok(events) = store.scan(1) else {
4127 return BTreeMap::new();
4128 };
4129 remote_publishers_by_asset_from_events(&events)
4130}
4131
4132fn remote_publishers_by_asset_from_events(
4133 events: &[StoredEvolutionEvent],
4134) -> BTreeMap<String, String> {
4135 let mut imported_asset_publishers = BTreeMap::<String, String>::new();
4136 let mut known_gene_ids = BTreeSet::<String>::new();
4137 let mut known_capsule_ids = BTreeSet::<String>::new();
4138 let mut publishers_by_asset = BTreeMap::<String, String>::new();
4139
4140 for stored in events {
4141 match &stored.event {
4142 EvolutionEvent::RemoteAssetImported {
4143 source: CandidateSource::Remote,
4144 asset_ids,
4145 sender_id,
4146 } => {
4147 let Some(sender_id) = sender_id.as_deref().and_then(normalized_sender_id) else {
4148 continue;
4149 };
4150 for asset_id in asset_ids {
4151 imported_asset_publishers.insert(asset_id.clone(), sender_id.clone());
4152 if known_gene_ids.contains(asset_id) || known_capsule_ids.contains(asset_id) {
4153 publishers_by_asset.insert(asset_id.clone(), sender_id.clone());
4154 }
4155 }
4156 }
4157 EvolutionEvent::GeneProjected { gene } => {
4158 known_gene_ids.insert(gene.id.clone());
4159 if let Some(sender_id) = imported_asset_publishers.get(&gene.id) {
4160 publishers_by_asset.insert(gene.id.clone(), sender_id.clone());
4161 }
4162 }
4163 EvolutionEvent::CapsuleCommitted { capsule } => {
4164 known_capsule_ids.insert(capsule.id.clone());
4165 if let Some(sender_id) = imported_asset_publishers.get(&capsule.id) {
4166 publishers_by_asset.insert(capsule.id.clone(), sender_id.clone());
4167 }
4168 }
4169 _ => {}
4170 }
4171 }
4172
4173 publishers_by_asset
4174}
4175
4176fn should_import_remote_event(event: &EvolutionEvent) -> bool {
4177 matches!(
4178 event,
4179 EvolutionEvent::MutationDeclared { .. } | EvolutionEvent::SpecLinked { .. }
4180 )
4181}
4182
4183fn fetch_assets_from_store(
4184 store: &dyn EvolutionStore,
4185 responder_id: impl Into<String>,
4186 query: &FetchQuery,
4187) -> Result<FetchResponse, EvoKernelError> {
4188 let (events, projection) = scan_projection(store)?;
4189 let requested_cursor = resolve_requested_cursor(
4190 &query.sender_id,
4191 query.since_cursor.as_deref(),
4192 query.resume_token.as_deref(),
4193 )?;
4194 let since_seq = requested_cursor
4195 .as_deref()
4196 .and_then(parse_sync_cursor_seq)
4197 .unwrap_or(0);
4198 let normalized_signals: Vec<String> = query
4199 .signals
4200 .iter()
4201 .map(|signal| signal.trim().to_ascii_lowercase())
4202 .filter(|signal| !signal.is_empty())
4203 .collect();
4204 let matches_any_signal = |candidate: &str| {
4205 if normalized_signals.is_empty() {
4206 return true;
4207 }
4208 let candidate = candidate.to_ascii_lowercase();
4209 normalized_signals
4210 .iter()
4211 .any(|signal| candidate.contains(signal) || signal.contains(&candidate))
4212 };
4213
4214 let matched_genes: Vec<Gene> = projection
4215 .genes
4216 .into_iter()
4217 .filter(|gene| gene.state == AssetState::Promoted)
4218 .filter(|gene| gene.signals.iter().any(|signal| matches_any_signal(signal)))
4219 .collect();
4220 let matched_gene_ids: BTreeSet<String> =
4221 matched_genes.iter().map(|gene| gene.id.clone()).collect();
4222 let matched_capsules: Vec<Capsule> = projection
4223 .capsules
4224 .into_iter()
4225 .filter(|capsule| capsule.state == AssetState::Promoted)
4226 .filter(|capsule| matched_gene_ids.contains(&capsule.gene_id))
4227 .collect();
4228 let all_assets = replay_export_assets(&events, matched_genes.clone(), matched_capsules.clone());
4229 let (selected_genes, selected_capsules) = if requested_cursor.is_some() {
4230 let delta = delta_window(&events, since_seq);
4231 let selected_capsules = matched_capsules
4232 .into_iter()
4233 .filter(|capsule| {
4234 delta.changed_capsule_ids.contains(&capsule.id)
4235 || delta.changed_mutation_ids.contains(&capsule.mutation_id)
4236 })
4237 .collect::<Vec<_>>();
4238 let selected_gene_ids = selected_capsules
4239 .iter()
4240 .map(|capsule| capsule.gene_id.clone())
4241 .collect::<BTreeSet<_>>();
4242 let selected_genes = matched_genes
4243 .into_iter()
4244 .filter(|gene| {
4245 delta.changed_gene_ids.contains(&gene.id) || selected_gene_ids.contains(&gene.id)
4246 })
4247 .collect::<Vec<_>>();
4248 (selected_genes, selected_capsules)
4249 } else {
4250 (matched_genes, matched_capsules)
4251 };
4252 let assets = replay_export_assets(&events, selected_genes, selected_capsules);
4253 let next_cursor = events.last().map(|stored| format_sync_cursor(stored.seq));
4254 let resume_token = next_cursor
4255 .as_ref()
4256 .map(|cursor| encode_resume_token(&query.sender_id, cursor));
4257 let applied_count = assets.len();
4258 let skipped_count = all_assets.len().saturating_sub(applied_count);
4259
4260 Ok(FetchResponse {
4261 sender_id: responder_id.into(),
4262 assets,
4263 next_cursor: next_cursor.clone(),
4264 resume_token,
4265 sync_audit: SyncAudit {
4266 batch_id: next_id("sync-fetch"),
4267 requested_cursor,
4268 scanned_count: all_assets.len(),
4269 applied_count,
4270 skipped_count,
4271 failed_count: 0,
4272 failure_reasons: Vec::new(),
4273 },
4274 })
4275}
4276
4277fn revoke_assets_in_store(
4278 store: &dyn EvolutionStore,
4279 notice: &RevokeNotice,
4280) -> Result<RevokeNotice, EvoKernelError> {
4281 let projection = projection_snapshot(store)?;
4282 let requested: BTreeSet<String> = notice
4283 .asset_ids
4284 .iter()
4285 .map(|asset_id| asset_id.trim().to_string())
4286 .filter(|asset_id| !asset_id.is_empty())
4287 .collect();
4288 let mut revoked_gene_ids = BTreeSet::new();
4289 let mut quarantined_capsule_ids = BTreeSet::new();
4290
4291 for gene in &projection.genes {
4292 if requested.contains(&gene.id) {
4293 revoked_gene_ids.insert(gene.id.clone());
4294 }
4295 }
4296 for capsule in &projection.capsules {
4297 if requested.contains(&capsule.id) {
4298 quarantined_capsule_ids.insert(capsule.id.clone());
4299 revoked_gene_ids.insert(capsule.gene_id.clone());
4300 }
4301 }
4302 for capsule in &projection.capsules {
4303 if revoked_gene_ids.contains(&capsule.gene_id) {
4304 quarantined_capsule_ids.insert(capsule.id.clone());
4305 }
4306 }
4307
4308 for gene_id in &revoked_gene_ids {
4309 store
4310 .append_event(EvolutionEvent::GeneRevoked {
4311 gene_id: gene_id.clone(),
4312 reason: notice.reason.clone(),
4313 })
4314 .map_err(store_err)?;
4315 }
4316 for capsule_id in &quarantined_capsule_ids {
4317 store
4318 .append_event(EvolutionEvent::CapsuleQuarantined {
4319 capsule_id: capsule_id.clone(),
4320 })
4321 .map_err(store_err)?;
4322 }
4323
4324 let mut affected_ids: Vec<String> = revoked_gene_ids.into_iter().collect();
4325 affected_ids.extend(quarantined_capsule_ids);
4326 affected_ids.sort();
4327 affected_ids.dedup();
4328
4329 Ok(RevokeNotice {
4330 sender_id: notice.sender_id.clone(),
4331 asset_ids: affected_ids,
4332 reason: notice.reason.clone(),
4333 })
4334}
4335
4336fn evolution_metrics_snapshot(
4337 store: &dyn EvolutionStore,
4338) -> Result<EvolutionMetricsSnapshot, EvoKernelError> {
4339 let (events, projection) = scan_projection(store)?;
4340 let gene_task_classes = projection
4341 .genes
4342 .iter()
4343 .map(|gene| (gene.id.clone(), replay_task_descriptor(&gene.signals)))
4344 .collect::<BTreeMap<_, _>>();
4345 let replay_success_total = events
4346 .iter()
4347 .filter(|stored| matches!(stored.event, EvolutionEvent::CapsuleReused { .. }))
4348 .count() as u64;
4349 let mut replay_task_class_totals = BTreeMap::<(String, String), u64>::new();
4350 for stored in &events {
4351 if let EvolutionEvent::CapsuleReused { gene_id, .. } = &stored.event {
4352 if let Some((task_class_id, task_label)) = gene_task_classes.get(gene_id) {
4353 *replay_task_class_totals
4354 .entry((task_class_id.clone(), task_label.clone()))
4355 .or_insert(0) += 1;
4356 }
4357 }
4358 }
4359 let replay_task_classes = replay_task_class_totals
4360 .into_iter()
4361 .map(
4362 |((task_class_id, task_label), replay_success_total)| ReplayTaskClassMetrics {
4363 task_class_id,
4364 task_label,
4365 replay_success_total,
4366 reasoning_steps_avoided_total: replay_success_total,
4367 },
4368 )
4369 .collect::<Vec<_>>();
4370 let replay_reasoning_avoided_total = replay_task_classes
4371 .iter()
4372 .map(|entry| entry.reasoning_steps_avoided_total)
4373 .sum();
4374 let replay_failures_total = events
4375 .iter()
4376 .filter(|stored| is_replay_validation_failure(&stored.event))
4377 .count() as u64;
4378 let replay_attempts_total = replay_success_total + replay_failures_total;
4379 let confidence_revalidations_total = events
4380 .iter()
4381 .filter(|stored| is_confidence_revalidation_event(&stored.event))
4382 .count() as u64;
4383 let mutation_declared_total = events
4384 .iter()
4385 .filter(|stored| matches!(stored.event, EvolutionEvent::MutationDeclared { .. }))
4386 .count() as u64;
4387 let promoted_mutations_total = events
4388 .iter()
4389 .filter(|stored| matches!(stored.event, EvolutionEvent::GenePromoted { .. }))
4390 .count() as u64;
4391 let gene_revocations_total = events
4392 .iter()
4393 .filter(|stored| matches!(stored.event, EvolutionEvent::GeneRevoked { .. }))
4394 .count() as u64;
4395 let cutoff = Utc::now() - Duration::hours(1);
4396 let mutation_velocity_last_hour = count_recent_events(&events, cutoff, |event| {
4397 matches!(event, EvolutionEvent::MutationDeclared { .. })
4398 });
4399 let revoke_frequency_last_hour = count_recent_events(&events, cutoff, |event| {
4400 matches!(event, EvolutionEvent::GeneRevoked { .. })
4401 });
4402 let promoted_genes = projection
4403 .genes
4404 .iter()
4405 .filter(|gene| gene.state == AssetState::Promoted)
4406 .count() as u64;
4407 let promoted_capsules = projection
4408 .capsules
4409 .iter()
4410 .filter(|capsule| capsule.state == AssetState::Promoted)
4411 .count() as u64;
4412
4413 Ok(EvolutionMetricsSnapshot {
4414 replay_attempts_total,
4415 replay_success_total,
4416 replay_success_rate: safe_ratio(replay_success_total, replay_attempts_total),
4417 confidence_revalidations_total,
4418 replay_reasoning_avoided_total,
4419 replay_task_classes,
4420 mutation_declared_total,
4421 promoted_mutations_total,
4422 promotion_ratio: safe_ratio(promoted_mutations_total, mutation_declared_total),
4423 gene_revocations_total,
4424 mutation_velocity_last_hour,
4425 revoke_frequency_last_hour,
4426 promoted_genes,
4427 promoted_capsules,
4428 last_event_seq: events.last().map(|stored| stored.seq).unwrap_or(0),
4429 })
4430}
4431
4432fn evolution_health_snapshot(snapshot: &EvolutionMetricsSnapshot) -> EvolutionHealthSnapshot {
4433 EvolutionHealthSnapshot {
4434 status: "ok".into(),
4435 last_event_seq: snapshot.last_event_seq,
4436 promoted_genes: snapshot.promoted_genes,
4437 promoted_capsules: snapshot.promoted_capsules,
4438 }
4439}
4440
4441fn render_evolution_metrics_prometheus(
4442 snapshot: &EvolutionMetricsSnapshot,
4443 health: &EvolutionHealthSnapshot,
4444) -> String {
4445 let mut out = String::new();
4446 out.push_str(
4447 "# HELP oris_evolution_replay_attempts_total Total replay attempts that reached validation.\n",
4448 );
4449 out.push_str("# TYPE oris_evolution_replay_attempts_total counter\n");
4450 out.push_str(&format!(
4451 "oris_evolution_replay_attempts_total {}\n",
4452 snapshot.replay_attempts_total
4453 ));
4454 out.push_str("# HELP oris_evolution_replay_success_total Total replay attempts that reused a capsule successfully.\n");
4455 out.push_str("# TYPE oris_evolution_replay_success_total counter\n");
4456 out.push_str(&format!(
4457 "oris_evolution_replay_success_total {}\n",
4458 snapshot.replay_success_total
4459 ));
4460 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_total Total planner steps avoided by successful replay.\n");
4461 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_total counter\n");
4462 out.push_str(&format!(
4463 "oris_evolution_replay_reasoning_avoided_total {}\n",
4464 snapshot.replay_reasoning_avoided_total
4465 ));
4466 out.push_str("# HELP oris_evolution_replay_utilization_by_task_class_total Successful replay reuse counts grouped by deterministic task class.\n");
4467 out.push_str("# TYPE oris_evolution_replay_utilization_by_task_class_total counter\n");
4468 for task_class in &snapshot.replay_task_classes {
4469 out.push_str(&format!(
4470 "oris_evolution_replay_utilization_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
4471 prometheus_label_value(&task_class.task_class_id),
4472 prometheus_label_value(&task_class.task_label),
4473 task_class.replay_success_total
4474 ));
4475 }
4476 out.push_str("# HELP oris_evolution_replay_reasoning_avoided_by_task_class_total Planner steps avoided by successful replay grouped by deterministic task class.\n");
4477 out.push_str("# TYPE oris_evolution_replay_reasoning_avoided_by_task_class_total counter\n");
4478 for task_class in &snapshot.replay_task_classes {
4479 out.push_str(&format!(
4480 "oris_evolution_replay_reasoning_avoided_by_task_class_total{{task_class_id=\"{}\",task_label=\"{}\"}} {}\n",
4481 prometheus_label_value(&task_class.task_class_id),
4482 prometheus_label_value(&task_class.task_label),
4483 task_class.reasoning_steps_avoided_total
4484 ));
4485 }
4486 out.push_str("# HELP oris_evolution_replay_success_rate Successful replay attempts divided by replay attempts that reached validation.\n");
4487 out.push_str("# TYPE oris_evolution_replay_success_rate gauge\n");
4488 out.push_str(&format!(
4489 "oris_evolution_replay_success_rate {:.6}\n",
4490 snapshot.replay_success_rate
4491 ));
4492 out.push_str("# HELP oris_evolution_confidence_revalidations_total Total confidence-driven demotions that require revalidation before replay.\n");
4493 out.push_str("# TYPE oris_evolution_confidence_revalidations_total counter\n");
4494 out.push_str(&format!(
4495 "oris_evolution_confidence_revalidations_total {}\n",
4496 snapshot.confidence_revalidations_total
4497 ));
4498 out.push_str(
4499 "# HELP oris_evolution_mutation_declared_total Total declared mutations recorded in the evolution log.\n",
4500 );
4501 out.push_str("# TYPE oris_evolution_mutation_declared_total counter\n");
4502 out.push_str(&format!(
4503 "oris_evolution_mutation_declared_total {}\n",
4504 snapshot.mutation_declared_total
4505 ));
4506 out.push_str("# HELP oris_evolution_promoted_mutations_total Total mutations promoted by the governor.\n");
4507 out.push_str("# TYPE oris_evolution_promoted_mutations_total counter\n");
4508 out.push_str(&format!(
4509 "oris_evolution_promoted_mutations_total {}\n",
4510 snapshot.promoted_mutations_total
4511 ));
4512 out.push_str(
4513 "# HELP oris_evolution_promotion_ratio Promoted mutations divided by declared mutations.\n",
4514 );
4515 out.push_str("# TYPE oris_evolution_promotion_ratio gauge\n");
4516 out.push_str(&format!(
4517 "oris_evolution_promotion_ratio {:.6}\n",
4518 snapshot.promotion_ratio
4519 ));
4520 out.push_str("# HELP oris_evolution_gene_revocations_total Total gene revocations recorded in the evolution log.\n");
4521 out.push_str("# TYPE oris_evolution_gene_revocations_total counter\n");
4522 out.push_str(&format!(
4523 "oris_evolution_gene_revocations_total {}\n",
4524 snapshot.gene_revocations_total
4525 ));
4526 out.push_str("# HELP oris_evolution_mutation_velocity_last_hour Declared mutations observed in the last hour.\n");
4527 out.push_str("# TYPE oris_evolution_mutation_velocity_last_hour gauge\n");
4528 out.push_str(&format!(
4529 "oris_evolution_mutation_velocity_last_hour {}\n",
4530 snapshot.mutation_velocity_last_hour
4531 ));
4532 out.push_str("# HELP oris_evolution_revoke_frequency_last_hour Gene revocations observed in the last hour.\n");
4533 out.push_str("# TYPE oris_evolution_revoke_frequency_last_hour gauge\n");
4534 out.push_str(&format!(
4535 "oris_evolution_revoke_frequency_last_hour {}\n",
4536 snapshot.revoke_frequency_last_hour
4537 ));
4538 out.push_str("# HELP oris_evolution_promoted_genes Current promoted genes in the evolution projection.\n");
4539 out.push_str("# TYPE oris_evolution_promoted_genes gauge\n");
4540 out.push_str(&format!(
4541 "oris_evolution_promoted_genes {}\n",
4542 snapshot.promoted_genes
4543 ));
4544 out.push_str("# HELP oris_evolution_promoted_capsules Current promoted capsules in the evolution projection.\n");
4545 out.push_str("# TYPE oris_evolution_promoted_capsules gauge\n");
4546 out.push_str(&format!(
4547 "oris_evolution_promoted_capsules {}\n",
4548 snapshot.promoted_capsules
4549 ));
4550 out.push_str("# HELP oris_evolution_store_last_event_seq Last visible append-only evolution event sequence.\n");
4551 out.push_str("# TYPE oris_evolution_store_last_event_seq gauge\n");
4552 out.push_str(&format!(
4553 "oris_evolution_store_last_event_seq {}\n",
4554 snapshot.last_event_seq
4555 ));
4556 out.push_str(
4557 "# HELP oris_evolution_health Evolution observability store health (1 = healthy).\n",
4558 );
4559 out.push_str("# TYPE oris_evolution_health gauge\n");
4560 out.push_str(&format!(
4561 "oris_evolution_health {}\n",
4562 u8::from(health.status == "ok")
4563 ));
4564 out
4565}
4566
4567fn count_recent_events(
4568 events: &[StoredEvolutionEvent],
4569 cutoff: DateTime<Utc>,
4570 predicate: impl Fn(&EvolutionEvent) -> bool,
4571) -> u64 {
4572 events
4573 .iter()
4574 .filter(|stored| {
4575 predicate(&stored.event)
4576 && parse_event_timestamp(&stored.timestamp)
4577 .map(|timestamp| timestamp >= cutoff)
4578 .unwrap_or(false)
4579 })
4580 .count() as u64
4581}
4582
4583fn prometheus_label_value(input: &str) -> String {
4584 input
4585 .replace('\\', "\\\\")
4586 .replace('\n', "\\n")
4587 .replace('"', "\\\"")
4588}
4589
4590fn parse_event_timestamp(raw: &str) -> Option<DateTime<Utc>> {
4591 DateTime::parse_from_rfc3339(raw)
4592 .ok()
4593 .map(|parsed| parsed.with_timezone(&Utc))
4594}
4595
4596fn is_replay_validation_failure(event: &EvolutionEvent) -> bool {
4597 matches!(
4598 event,
4599 EvolutionEvent::ValidationFailed {
4600 gene_id: Some(_),
4601 ..
4602 }
4603 )
4604}
4605
4606fn is_confidence_revalidation_event(event: &EvolutionEvent) -> bool {
4607 matches!(
4608 event,
4609 EvolutionEvent::PromotionEvaluated {
4610 state,
4611 reason,
4612 reason_code,
4613 ..
4614 }
4615 if *state == AssetState::Quarantined
4616 && (reason_code == &TransitionReasonCode::RevalidationConfidenceDecay
4617 || (reason_code == &TransitionReasonCode::Unspecified
4618 && reason.contains("confidence decayed")))
4619 )
4620}
4621
4622fn safe_ratio(numerator: u64, denominator: u64) -> f64 {
4623 if denominator == 0 {
4624 0.0
4625 } else {
4626 numerator as f64 / denominator as f64
4627 }
4628}
4629
4630fn store_err(err: EvolutionError) -> EvoKernelError {
4631 EvoKernelError::Store(err.to_string())
4632}
4633
4634#[cfg(test)]
4635mod tests {
4636 use super::*;
4637 use oris_agent_contract::{
4638 AgentRole, CoordinationPlan, CoordinationPrimitive, CoordinationTask,
4639 };
4640 use oris_kernel::{
4641 AllowAllPolicy, InMemoryEventStore, KernelMode, KernelState, NoopActionExecutor,
4642 NoopStepFn, StateUpdatedOnlyReducer,
4643 };
4644 use serde::{Deserialize, Serialize};
4645
4646 #[derive(Clone, Debug, Default, Serialize, Deserialize)]
4647 struct TestState;
4648
4649 impl KernelState for TestState {
4650 fn version(&self) -> u32 {
4651 1
4652 }
4653 }
4654
4655 fn temp_workspace(name: &str) -> std::path::PathBuf {
4656 let root =
4657 std::env::temp_dir().join(format!("oris-evokernel-{name}-{}", std::process::id()));
4658 if root.exists() {
4659 fs::remove_dir_all(&root).unwrap();
4660 }
4661 fs::create_dir_all(root.join("src")).unwrap();
4662 fs::write(
4663 root.join("Cargo.toml"),
4664 "[package]\nname = \"sample\"\nversion = \"0.1.0\"\nedition = \"2021\"\n",
4665 )
4666 .unwrap();
4667 fs::write(root.join("Cargo.lock"), "# lock\n").unwrap();
4668 fs::write(root.join("src/lib.rs"), "pub fn demo() -> usize { 1 }\n").unwrap();
4669 root
4670 }
4671
4672 fn test_kernel() -> Arc<Kernel<TestState>> {
4673 Arc::new(Kernel::<TestState> {
4674 events: Box::new(InMemoryEventStore::new()),
4675 snaps: None,
4676 reducer: Box::new(StateUpdatedOnlyReducer),
4677 exec: Box::new(NoopActionExecutor),
4678 step: Box::new(NoopStepFn),
4679 policy: Box::new(AllowAllPolicy),
4680 effect_sink: None,
4681 mode: KernelMode::Normal,
4682 })
4683 }
4684
4685 fn lightweight_plan() -> ValidationPlan {
4686 ValidationPlan {
4687 profile: "test".into(),
4688 stages: vec![ValidationStage::Command {
4689 program: "git".into(),
4690 args: vec!["--version".into()],
4691 timeout_ms: 5_000,
4692 }],
4693 }
4694 }
4695
4696 fn sample_mutation() -> PreparedMutation {
4697 prepare_mutation(
4698 MutationIntent {
4699 id: "mutation-1".into(),
4700 intent: "add README".into(),
4701 target: MutationTarget::Paths {
4702 allow: vec!["README.md".into()],
4703 },
4704 expected_effect: "repo still builds".into(),
4705 risk: RiskLevel::Low,
4706 signals: vec!["missing readme".into()],
4707 spec_id: None,
4708 },
4709 "\
4710diff --git a/README.md b/README.md
4711new file mode 100644
4712index 0000000..1111111
4713--- /dev/null
4714+++ b/README.md
4715@@ -0,0 +1 @@
4716+# sample
4717"
4718 .into(),
4719 Some("HEAD".into()),
4720 )
4721 }
4722
4723 fn base_sandbox_policy() -> SandboxPolicy {
4724 SandboxPolicy {
4725 allowed_programs: vec!["git".into()],
4726 max_duration_ms: 60_000,
4727 max_output_bytes: 1024 * 1024,
4728 denied_env_prefixes: Vec::new(),
4729 }
4730 }
4731
4732 fn command_validator() -> Arc<dyn Validator> {
4733 Arc::new(CommandValidator::new(base_sandbox_policy()))
4734 }
4735
4736 fn replay_input(signal: &str) -> SelectorInput {
4737 let rustc_version = std::process::Command::new("rustc")
4738 .arg("--version")
4739 .output()
4740 .ok()
4741 .filter(|output| output.status.success())
4742 .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string())
4743 .unwrap_or_else(|| "rustc unknown".into());
4744 SelectorInput {
4745 signals: vec![signal.into()],
4746 env: EnvFingerprint {
4747 rustc_version,
4748 cargo_lock_hash: compute_artifact_hash("# lock\n"),
4749 target_triple: format!(
4750 "{}-unknown-{}",
4751 std::env::consts::ARCH,
4752 std::env::consts::OS
4753 ),
4754 os: std::env::consts::OS.into(),
4755 },
4756 spec_id: None,
4757 limit: 1,
4758 }
4759 }
4760
4761 fn build_test_evo_with_store(
4762 name: &str,
4763 run_id: &str,
4764 validator: Arc<dyn Validator>,
4765 store: Arc<dyn EvolutionStore>,
4766 ) -> EvoKernel<TestState> {
4767 let workspace = temp_workspace(name);
4768 let sandbox: Arc<dyn Sandbox> = Arc::new(oris_sandbox::LocalProcessSandbox::new(
4769 run_id,
4770 &workspace,
4771 std::env::temp_dir(),
4772 ));
4773 EvoKernel::new(test_kernel(), sandbox, validator, store)
4774 .with_governor(Arc::new(DefaultGovernor::new(
4775 oris_governor::GovernorConfig {
4776 promote_after_successes: 1,
4777 ..Default::default()
4778 },
4779 )))
4780 .with_validation_plan(lightweight_plan())
4781 .with_sandbox_policy(base_sandbox_policy())
4782 }
4783
4784 fn build_test_evo(
4785 name: &str,
4786 run_id: &str,
4787 validator: Arc<dyn Validator>,
4788 ) -> (EvoKernel<TestState>, Arc<dyn EvolutionStore>) {
4789 let store_root = std::env::temp_dir().join(format!(
4790 "oris-evokernel-{name}-store-{}",
4791 std::process::id()
4792 ));
4793 if store_root.exists() {
4794 fs::remove_dir_all(&store_root).unwrap();
4795 }
4796 let store: Arc<dyn EvolutionStore> =
4797 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
4798 let evo = build_test_evo_with_store(name, run_id, validator, store.clone());
4799 (evo, store)
4800 }
4801
4802 fn remote_publish_envelope(
4803 sender_id: &str,
4804 run_id: &str,
4805 gene_id: &str,
4806 capsule_id: &str,
4807 mutation_id: &str,
4808 signal: &str,
4809 file_name: &str,
4810 line: &str,
4811 ) -> EvolutionEnvelope {
4812 remote_publish_envelope_with_env(
4813 sender_id,
4814 run_id,
4815 gene_id,
4816 capsule_id,
4817 mutation_id,
4818 signal,
4819 file_name,
4820 line,
4821 replay_input(signal).env,
4822 )
4823 }
4824
4825 fn remote_publish_envelope_with_env(
4826 sender_id: &str,
4827 run_id: &str,
4828 gene_id: &str,
4829 capsule_id: &str,
4830 mutation_id: &str,
4831 signal: &str,
4832 file_name: &str,
4833 line: &str,
4834 env: EnvFingerprint,
4835 ) -> EvolutionEnvelope {
4836 let mutation = prepare_mutation(
4837 MutationIntent {
4838 id: mutation_id.into(),
4839 intent: format!("add {file_name}"),
4840 target: MutationTarget::Paths {
4841 allow: vec![file_name.into()],
4842 },
4843 expected_effect: "replay should still validate".into(),
4844 risk: RiskLevel::Low,
4845 signals: vec![signal.into()],
4846 spec_id: None,
4847 },
4848 format!(
4849 "\
4850diff --git a/{file_name} b/{file_name}
4851new file mode 100644
4852index 0000000..1111111
4853--- /dev/null
4854+++ b/{file_name}
4855@@ -0,0 +1 @@
4856+{line}
4857"
4858 ),
4859 Some("HEAD".into()),
4860 );
4861 let gene = Gene {
4862 id: gene_id.into(),
4863 signals: vec![signal.into()],
4864 strategy: vec![file_name.into()],
4865 validation: vec!["test".into()],
4866 state: AssetState::Promoted,
4867 };
4868 let capsule = Capsule {
4869 id: capsule_id.into(),
4870 gene_id: gene_id.into(),
4871 mutation_id: mutation_id.into(),
4872 run_id: run_id.into(),
4873 diff_hash: mutation.artifact.content_hash.clone(),
4874 confidence: 0.9,
4875 env,
4876 outcome: Outcome {
4877 success: true,
4878 validation_profile: "test".into(),
4879 validation_duration_ms: 1,
4880 changed_files: vec![file_name.into()],
4881 validator_hash: "validator-hash".into(),
4882 lines_changed: 1,
4883 replay_verified: false,
4884 },
4885 state: AssetState::Promoted,
4886 };
4887 EvolutionEnvelope::publish(
4888 sender_id,
4889 vec![
4890 NetworkAsset::EvolutionEvent {
4891 event: EvolutionEvent::MutationDeclared { mutation },
4892 },
4893 NetworkAsset::Gene { gene: gene.clone() },
4894 NetworkAsset::Capsule {
4895 capsule: capsule.clone(),
4896 },
4897 NetworkAsset::EvolutionEvent {
4898 event: EvolutionEvent::CapsuleReleased {
4899 capsule_id: capsule.id.clone(),
4900 state: AssetState::Promoted,
4901 },
4902 },
4903 ],
4904 )
4905 }
4906
4907 fn remote_publish_envelope_with_signals(
4908 sender_id: &str,
4909 run_id: &str,
4910 gene_id: &str,
4911 capsule_id: &str,
4912 mutation_id: &str,
4913 mutation_signals: Vec<String>,
4914 gene_signals: Vec<String>,
4915 file_name: &str,
4916 line: &str,
4917 env: EnvFingerprint,
4918 ) -> EvolutionEnvelope {
4919 let mutation = prepare_mutation(
4920 MutationIntent {
4921 id: mutation_id.into(),
4922 intent: format!("add {file_name}"),
4923 target: MutationTarget::Paths {
4924 allow: vec![file_name.into()],
4925 },
4926 expected_effect: "replay should still validate".into(),
4927 risk: RiskLevel::Low,
4928 signals: mutation_signals,
4929 spec_id: None,
4930 },
4931 format!(
4932 "\
4933diff --git a/{file_name} b/{file_name}
4934new file mode 100644
4935index 0000000..1111111
4936--- /dev/null
4937+++ b/{file_name}
4938@@ -0,0 +1 @@
4939+{line}
4940"
4941 ),
4942 Some("HEAD".into()),
4943 );
4944 let gene = Gene {
4945 id: gene_id.into(),
4946 signals: gene_signals,
4947 strategy: vec![file_name.into()],
4948 validation: vec!["test".into()],
4949 state: AssetState::Promoted,
4950 };
4951 let capsule = Capsule {
4952 id: capsule_id.into(),
4953 gene_id: gene_id.into(),
4954 mutation_id: mutation_id.into(),
4955 run_id: run_id.into(),
4956 diff_hash: mutation.artifact.content_hash.clone(),
4957 confidence: 0.9,
4958 env,
4959 outcome: Outcome {
4960 success: true,
4961 validation_profile: "test".into(),
4962 validation_duration_ms: 1,
4963 changed_files: vec![file_name.into()],
4964 validator_hash: "validator-hash".into(),
4965 lines_changed: 1,
4966 replay_verified: false,
4967 },
4968 state: AssetState::Promoted,
4969 };
4970 EvolutionEnvelope::publish(
4971 sender_id,
4972 vec![
4973 NetworkAsset::EvolutionEvent {
4974 event: EvolutionEvent::MutationDeclared { mutation },
4975 },
4976 NetworkAsset::Gene { gene: gene.clone() },
4977 NetworkAsset::Capsule {
4978 capsule: capsule.clone(),
4979 },
4980 NetworkAsset::EvolutionEvent {
4981 event: EvolutionEvent::CapsuleReleased {
4982 capsule_id: capsule.id.clone(),
4983 state: AssetState::Promoted,
4984 },
4985 },
4986 ],
4987 )
4988 }
4989
4990 struct FixedValidator {
4991 success: bool,
4992 }
4993
4994 #[async_trait]
4995 impl Validator for FixedValidator {
4996 async fn run(
4997 &self,
4998 _receipt: &SandboxReceipt,
4999 plan: &ValidationPlan,
5000 ) -> Result<ValidationReport, ValidationError> {
5001 Ok(ValidationReport {
5002 success: self.success,
5003 duration_ms: 1,
5004 stages: Vec::new(),
5005 logs: if self.success {
5006 format!("{} ok", plan.profile)
5007 } else {
5008 format!("{} failed", plan.profile)
5009 },
5010 })
5011 }
5012 }
5013
5014 struct FailOnAppendStore {
5015 inner: JsonlEvolutionStore,
5016 fail_on_call: usize,
5017 call_count: Mutex<usize>,
5018 }
5019
5020 impl FailOnAppendStore {
5021 fn new(root_dir: std::path::PathBuf, fail_on_call: usize) -> Self {
5022 Self {
5023 inner: JsonlEvolutionStore::new(root_dir),
5024 fail_on_call,
5025 call_count: Mutex::new(0),
5026 }
5027 }
5028 }
5029
5030 impl EvolutionStore for FailOnAppendStore {
5031 fn append_event(&self, event: EvolutionEvent) -> Result<u64, EvolutionError> {
5032 let mut call_count = self
5033 .call_count
5034 .lock()
5035 .map_err(|_| EvolutionError::Io("test store lock poisoned".into()))?;
5036 *call_count += 1;
5037 if *call_count == self.fail_on_call {
5038 return Err(EvolutionError::Io("injected append failure".into()));
5039 }
5040 self.inner.append_event(event)
5041 }
5042
5043 fn scan(&self, from_seq: u64) -> Result<Vec<StoredEvolutionEvent>, EvolutionError> {
5044 self.inner.scan(from_seq)
5045 }
5046
5047 fn rebuild_projection(&self) -> Result<EvolutionProjection, EvolutionError> {
5048 self.inner.rebuild_projection()
5049 }
5050 }
5051
5052 #[test]
5053 fn coordination_planner_to_coder_handoff_is_deterministic() {
5054 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
5055 root_goal: "ship feature".into(),
5056 primitive: CoordinationPrimitive::Sequential,
5057 tasks: vec![
5058 CoordinationTask {
5059 id: "planner".into(),
5060 role: AgentRole::Planner,
5061 description: "split the work".into(),
5062 depends_on: Vec::new(),
5063 },
5064 CoordinationTask {
5065 id: "coder".into(),
5066 role: AgentRole::Coder,
5067 description: "implement the patch".into(),
5068 depends_on: vec!["planner".into()],
5069 },
5070 ],
5071 timeout_ms: 5_000,
5072 max_retries: 0,
5073 });
5074
5075 assert_eq!(result.completed_tasks, vec!["planner", "coder"]);
5076 assert!(result.failed_tasks.is_empty());
5077 assert!(result.messages.iter().any(|message| {
5078 message.from_role == AgentRole::Planner
5079 && message.to_role == AgentRole::Coder
5080 && message.task_id == "coder"
5081 }));
5082 }
5083
5084 #[test]
5085 fn coordination_repair_runs_only_after_coder_failure() {
5086 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
5087 root_goal: "fix broken implementation".into(),
5088 primitive: CoordinationPrimitive::Sequential,
5089 tasks: vec![
5090 CoordinationTask {
5091 id: "coder".into(),
5092 role: AgentRole::Coder,
5093 description: "force-fail initial implementation".into(),
5094 depends_on: Vec::new(),
5095 },
5096 CoordinationTask {
5097 id: "repair".into(),
5098 role: AgentRole::Repair,
5099 description: "patch the failed implementation".into(),
5100 depends_on: vec!["coder".into()],
5101 },
5102 ],
5103 timeout_ms: 5_000,
5104 max_retries: 0,
5105 });
5106
5107 assert_eq!(result.completed_tasks, vec!["repair"]);
5108 assert_eq!(result.failed_tasks, vec!["coder"]);
5109 assert!(result.messages.iter().any(|message| {
5110 message.from_role == AgentRole::Coder
5111 && message.to_role == AgentRole::Repair
5112 && message.task_id == "repair"
5113 }));
5114 }
5115
5116 #[test]
5117 fn coordination_optimizer_runs_after_successful_implementation_step() {
5118 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
5119 root_goal: "ship optimized patch".into(),
5120 primitive: CoordinationPrimitive::Sequential,
5121 tasks: vec![
5122 CoordinationTask {
5123 id: "coder".into(),
5124 role: AgentRole::Coder,
5125 description: "implement a working patch".into(),
5126 depends_on: Vec::new(),
5127 },
5128 CoordinationTask {
5129 id: "optimizer".into(),
5130 role: AgentRole::Optimizer,
5131 description: "tighten the implementation".into(),
5132 depends_on: vec!["coder".into()],
5133 },
5134 ],
5135 timeout_ms: 5_000,
5136 max_retries: 0,
5137 });
5138
5139 assert_eq!(result.completed_tasks, vec!["coder", "optimizer"]);
5140 assert!(result.failed_tasks.is_empty());
5141 }
5142
5143 #[test]
5144 fn coordination_parallel_waves_preserve_sorted_merge_order() {
5145 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
5146 root_goal: "parallelize safe tasks".into(),
5147 primitive: CoordinationPrimitive::Parallel,
5148 tasks: vec![
5149 CoordinationTask {
5150 id: "z-task".into(),
5151 role: AgentRole::Planner,
5152 description: "analyze z".into(),
5153 depends_on: Vec::new(),
5154 },
5155 CoordinationTask {
5156 id: "a-task".into(),
5157 role: AgentRole::Coder,
5158 description: "implement a".into(),
5159 depends_on: Vec::new(),
5160 },
5161 CoordinationTask {
5162 id: "mid-task".into(),
5163 role: AgentRole::Optimizer,
5164 description: "polish after both".into(),
5165 depends_on: vec!["z-task".into(), "a-task".into()],
5166 },
5167 ],
5168 timeout_ms: 5_000,
5169 max_retries: 0,
5170 });
5171
5172 assert_eq!(result.completed_tasks, vec!["a-task", "z-task", "mid-task"]);
5173 assert!(result.failed_tasks.is_empty());
5174 }
5175
5176 #[test]
5177 fn coordination_retries_stop_at_max_retries() {
5178 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
5179 root_goal: "retry then stop".into(),
5180 primitive: CoordinationPrimitive::Sequential,
5181 tasks: vec![CoordinationTask {
5182 id: "coder".into(),
5183 role: AgentRole::Coder,
5184 description: "force-fail this task".into(),
5185 depends_on: Vec::new(),
5186 }],
5187 timeout_ms: 5_000,
5188 max_retries: 1,
5189 });
5190
5191 assert!(result.completed_tasks.is_empty());
5192 assert_eq!(result.failed_tasks, vec!["coder"]);
5193 assert_eq!(
5194 result
5195 .messages
5196 .iter()
5197 .filter(|message| message.task_id == "coder" && message.content.contains("failed"))
5198 .count(),
5199 2
5200 );
5201 }
5202
5203 #[test]
5204 fn coordination_conditional_mode_skips_downstream_tasks_on_failure() {
5205 let result = MultiAgentCoordinator::new().coordinate(CoordinationPlan {
5206 root_goal: "skip blocked follow-up work".into(),
5207 primitive: CoordinationPrimitive::Conditional,
5208 tasks: vec![
5209 CoordinationTask {
5210 id: "coder".into(),
5211 role: AgentRole::Coder,
5212 description: "force-fail the implementation".into(),
5213 depends_on: Vec::new(),
5214 },
5215 CoordinationTask {
5216 id: "optimizer".into(),
5217 role: AgentRole::Optimizer,
5218 description: "only optimize a successful implementation".into(),
5219 depends_on: vec!["coder".into()],
5220 },
5221 ],
5222 timeout_ms: 5_000,
5223 max_retries: 0,
5224 });
5225
5226 assert!(result.completed_tasks.is_empty());
5227 assert_eq!(result.failed_tasks, vec!["coder"]);
5228 assert!(result.messages.iter().any(|message| {
5229 message.task_id == "optimizer"
5230 && message
5231 .content
5232 .contains("skipped due to failed dependency chain")
5233 }));
5234 assert!(!result
5235 .failed_tasks
5236 .iter()
5237 .any(|task_id| task_id == "optimizer"));
5238 }
5239
5240 #[tokio::test]
5241 async fn command_validator_aggregates_stage_reports() {
5242 let workspace = temp_workspace("validator");
5243 let receipt = SandboxReceipt {
5244 mutation_id: "m".into(),
5245 workdir: workspace,
5246 applied: true,
5247 changed_files: Vec::new(),
5248 patch_hash: "hash".into(),
5249 stdout_log: std::env::temp_dir().join("stdout.log"),
5250 stderr_log: std::env::temp_dir().join("stderr.log"),
5251 };
5252 let validator = CommandValidator::new(SandboxPolicy {
5253 allowed_programs: vec!["git".into()],
5254 max_duration_ms: 1_000,
5255 max_output_bytes: 1024,
5256 denied_env_prefixes: Vec::new(),
5257 });
5258 let report = validator
5259 .run(
5260 &receipt,
5261 &ValidationPlan {
5262 profile: "test".into(),
5263 stages: vec![ValidationStage::Command {
5264 program: "git".into(),
5265 args: vec!["--version".into()],
5266 timeout_ms: 1_000,
5267 }],
5268 },
5269 )
5270 .await
5271 .unwrap();
5272 assert_eq!(report.stages.len(), 1);
5273 }
5274
5275 #[tokio::test]
5276 async fn capture_successful_mutation_appends_capsule() {
5277 let (evo, store) = build_test_evo("capture", "run-1", command_validator());
5278 let capsule = evo
5279 .capture_successful_mutation(&"run-1".into(), sample_mutation())
5280 .await
5281 .unwrap();
5282 let events = store.scan(1).unwrap();
5283 assert!(events
5284 .iter()
5285 .any(|stored| matches!(stored.event, EvolutionEvent::CapsuleCommitted { .. })));
5286 assert!(!capsule.id.is_empty());
5287 }
5288
5289 #[tokio::test]
5290 async fn replay_hit_records_capsule_reused() {
5291 let (evo, store) = build_test_evo("replay", "run-2", command_validator());
5292 let capsule = evo
5293 .capture_successful_mutation(&"run-2".into(), sample_mutation())
5294 .await
5295 .unwrap();
5296 let replay_run_id = "run-replay".to_string();
5297 let decision = evo
5298 .replay_or_fallback_for_run(&replay_run_id, replay_input("missing readme"))
5299 .await
5300 .unwrap();
5301 assert!(decision.used_capsule);
5302 assert_eq!(decision.capsule_id, Some(capsule.id));
5303 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
5304 &stored.event,
5305 EvolutionEvent::CapsuleReused {
5306 run_id,
5307 replay_run_id: Some(current_replay_run_id),
5308 ..
5309 } if run_id == "run-2" && current_replay_run_id == &replay_run_id
5310 )));
5311 }
5312
5313 #[tokio::test]
5314 async fn legacy_replay_executor_api_preserves_original_capsule_run_id() {
5315 let capture_run_id = "run-legacy-capture".to_string();
5316 let (evo, store) = build_test_evo("replay-legacy", &capture_run_id, command_validator());
5317 let capsule = evo
5318 .capture_successful_mutation(&capture_run_id, sample_mutation())
5319 .await
5320 .unwrap();
5321 let executor = StoreReplayExecutor {
5322 sandbox: evo.sandbox.clone(),
5323 validator: evo.validator.clone(),
5324 store: evo.store.clone(),
5325 selector: evo.selector.clone(),
5326 governor: evo.governor.clone(),
5327 economics: Some(evo.economics.clone()),
5328 remote_publishers: Some(evo.remote_publishers.clone()),
5329 stake_policy: evo.stake_policy.clone(),
5330 };
5331
5332 let decision = executor
5333 .try_replay(
5334 &replay_input("missing readme"),
5335 &evo.sandbox_policy,
5336 &evo.validation_plan,
5337 )
5338 .await
5339 .unwrap();
5340
5341 assert!(decision.used_capsule);
5342 assert_eq!(decision.capsule_id, Some(capsule.id));
5343 assert!(store.scan(1).unwrap().iter().any(|stored| matches!(
5344 &stored.event,
5345 EvolutionEvent::CapsuleReused {
5346 run_id,
5347 replay_run_id: None,
5348 ..
5349 } if run_id == &capture_run_id
5350 )));
5351 }
5352
5353 #[tokio::test]
5354 async fn metrics_snapshot_tracks_replay_promotion_and_revocation_signals() {
5355 let (evo, _) = build_test_evo("metrics", "run-metrics", command_validator());
5356 let capsule = evo
5357 .capture_successful_mutation(&"run-metrics".into(), sample_mutation())
5358 .await
5359 .unwrap();
5360 let decision = evo
5361 .replay_or_fallback(replay_input("missing readme"))
5362 .await
5363 .unwrap();
5364 assert!(decision.used_capsule);
5365
5366 evo.revoke_assets(&RevokeNotice {
5367 sender_id: "node-metrics".into(),
5368 asset_ids: vec![capsule.id.clone()],
5369 reason: "manual test revoke".into(),
5370 })
5371 .unwrap();
5372
5373 let snapshot = evo.metrics_snapshot().unwrap();
5374 assert_eq!(snapshot.replay_attempts_total, 1);
5375 assert_eq!(snapshot.replay_success_total, 1);
5376 assert_eq!(snapshot.replay_success_rate, 1.0);
5377 assert_eq!(snapshot.confidence_revalidations_total, 0);
5378 assert_eq!(snapshot.replay_reasoning_avoided_total, 1);
5379 assert_eq!(snapshot.replay_task_classes.len(), 1);
5380 assert_eq!(snapshot.replay_task_classes[0].replay_success_total, 1);
5381 assert_eq!(
5382 snapshot.replay_task_classes[0].reasoning_steps_avoided_total,
5383 1
5384 );
5385 assert_eq!(snapshot.confidence_revalidations_total, 0);
5386 assert_eq!(snapshot.mutation_declared_total, 1);
5387 assert_eq!(snapshot.promoted_mutations_total, 1);
5388 assert_eq!(snapshot.promotion_ratio, 1.0);
5389 assert_eq!(snapshot.gene_revocations_total, 1);
5390 assert_eq!(snapshot.mutation_velocity_last_hour, 1);
5391 assert_eq!(snapshot.revoke_frequency_last_hour, 1);
5392 assert_eq!(snapshot.promoted_genes, 0);
5393 assert_eq!(snapshot.promoted_capsules, 0);
5394
5395 let rendered = evo.render_metrics_prometheus().unwrap();
5396 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_total 1"));
5397 assert!(rendered.contains("oris_evolution_replay_utilization_by_task_class_total"));
5398 assert!(rendered.contains("oris_evolution_replay_reasoning_avoided_by_task_class_total"));
5399 assert!(rendered.contains("oris_evolution_replay_success_rate 1.000000"));
5400 assert!(rendered.contains("oris_evolution_confidence_revalidations_total 0"));
5401 assert!(rendered.contains("oris_evolution_promotion_ratio 1.000000"));
5402 assert!(rendered.contains("oris_evolution_revoke_frequency_last_hour 1"));
5403 assert!(rendered.contains("oris_evolution_mutation_velocity_last_hour 1"));
5404 assert!(rendered.contains("oris_evolution_health 1"));
5405 }
5406
5407 #[test]
5408 fn stale_replay_targets_require_confidence_revalidation() {
5409 let now = Utc::now();
5410 let projection = EvolutionProjection {
5411 genes: vec![Gene {
5412 id: "gene-stale".into(),
5413 signals: vec!["missing readme".into()],
5414 strategy: vec!["README.md".into()],
5415 validation: vec!["test".into()],
5416 state: AssetState::Promoted,
5417 }],
5418 capsules: vec![Capsule {
5419 id: "capsule-stale".into(),
5420 gene_id: "gene-stale".into(),
5421 mutation_id: "mutation-stale".into(),
5422 run_id: "run-stale".into(),
5423 diff_hash: "hash".into(),
5424 confidence: 0.8,
5425 env: replay_input("missing readme").env,
5426 outcome: Outcome {
5427 success: true,
5428 validation_profile: "test".into(),
5429 validation_duration_ms: 1,
5430 changed_files: vec!["README.md".into()],
5431 validator_hash: "validator".into(),
5432 lines_changed: 1,
5433 replay_verified: false,
5434 },
5435 state: AssetState::Promoted,
5436 }],
5437 reuse_counts: BTreeMap::from([("gene-stale".into(), 1)]),
5438 attempt_counts: BTreeMap::from([("gene-stale".into(), 1)]),
5439 last_updated_at: BTreeMap::from([(
5440 "gene-stale".into(),
5441 (now - Duration::hours(48)).to_rfc3339(),
5442 )]),
5443 spec_ids_by_gene: BTreeMap::new(),
5444 };
5445
5446 let targets = stale_replay_revalidation_targets(&projection, now);
5447
5448 assert_eq!(targets.len(), 1);
5449 assert_eq!(targets[0].gene_id, "gene-stale");
5450 assert_eq!(targets[0].capsule_ids, vec!["capsule-stale".to_string()]);
5451 assert!(targets[0].decayed_confidence < MIN_REPLAY_CONFIDENCE);
5452 }
5453
5454 #[tokio::test]
5455 async fn remote_replay_prefers_closest_environment_match() {
5456 let (evo, _) = build_test_evo("remote-env", "run-remote-env", command_validator());
5457 let input = replay_input("env-signal");
5458
5459 let envelope_a = remote_publish_envelope_with_env(
5460 "node-a",
5461 "run-remote-a",
5462 "gene-a",
5463 "capsule-a",
5464 "mutation-a",
5465 "env-signal",
5466 "A.md",
5467 "# from a",
5468 input.env.clone(),
5469 );
5470 let envelope_b = remote_publish_envelope_with_env(
5471 "node-b",
5472 "run-remote-b",
5473 "gene-b",
5474 "capsule-b",
5475 "mutation-b",
5476 "env-signal",
5477 "B.md",
5478 "# from b",
5479 EnvFingerprint {
5480 rustc_version: "old-rustc".into(),
5481 cargo_lock_hash: "other-lock".into(),
5482 target_triple: "aarch64-apple-darwin".into(),
5483 os: "linux".into(),
5484 },
5485 );
5486
5487 evo.import_remote_envelope(&envelope_a).unwrap();
5488 evo.import_remote_envelope(&envelope_b).unwrap();
5489
5490 let decision = evo.replay_or_fallback(input).await.unwrap();
5491
5492 assert!(decision.used_capsule);
5493 assert_eq!(decision.capsule_id, Some("capsule-a".into()));
5494 assert!(!decision.fallback_to_planner);
5495 }
5496
5497 #[test]
5498 fn remote_cold_start_scoring_caps_distinct_query_coverage() {
5499 let (evo, _) = build_test_evo("remote-score", "run-remote-score", command_validator());
5500 let input = replay_input("missing readme");
5501
5502 let exact = remote_publish_envelope_with_signals(
5503 "node-exact",
5504 "run-remote-exact",
5505 "gene-exact",
5506 "capsule-exact",
5507 "mutation-exact",
5508 vec!["missing readme".into()],
5509 vec!["missing readme".into()],
5510 "EXACT.md",
5511 "# exact",
5512 input.env.clone(),
5513 );
5514 let overlapping = remote_publish_envelope_with_signals(
5515 "node-overlap",
5516 "run-remote-overlap",
5517 "gene-overlap",
5518 "capsule-overlap",
5519 "mutation-overlap",
5520 vec!["missing readme".into()],
5521 vec!["missing".into(), "readme".into()],
5522 "OVERLAP.md",
5523 "# overlap",
5524 input.env.clone(),
5525 );
5526
5527 evo.import_remote_envelope(&exact).unwrap();
5528 evo.import_remote_envelope(&overlapping).unwrap();
5529
5530 let candidates = quarantined_remote_exact_match_candidates(evo.store.as_ref(), &input);
5531 let exact_candidate = candidates
5532 .iter()
5533 .find(|candidate| candidate.gene.id == "gene-exact")
5534 .unwrap();
5535 let overlap_candidate = candidates
5536 .iter()
5537 .find(|candidate| candidate.gene.id == "gene-overlap")
5538 .unwrap();
5539
5540 assert_eq!(exact_candidate.score, 1.0);
5541 assert_eq!(overlap_candidate.score, 1.0);
5542 assert!(candidates.iter().all(|candidate| candidate.score <= 1.0));
5543 }
5544
5545 #[test]
5546 fn exact_match_candidates_respect_spec_linked_events() {
5547 let (evo, _) = build_test_evo(
5548 "spec-linked-filter",
5549 "run-spec-linked-filter",
5550 command_validator(),
5551 );
5552 let mut input = replay_input("missing readme");
5553 input.spec_id = Some("spec-readme".into());
5554
5555 let mut mutation = sample_mutation();
5556 mutation.intent.id = "mutation-spec-linked".into();
5557 mutation.intent.spec_id = None;
5558 let gene = Gene {
5559 id: "gene-spec-linked".into(),
5560 signals: vec!["missing readme".into()],
5561 strategy: vec!["README.md".into()],
5562 validation: vec!["test".into()],
5563 state: AssetState::Promoted,
5564 };
5565 let capsule = Capsule {
5566 id: "capsule-spec-linked".into(),
5567 gene_id: gene.id.clone(),
5568 mutation_id: mutation.intent.id.clone(),
5569 run_id: "run-spec-linked".into(),
5570 diff_hash: mutation.artifact.content_hash.clone(),
5571 confidence: 0.9,
5572 env: input.env.clone(),
5573 outcome: Outcome {
5574 success: true,
5575 validation_profile: "test".into(),
5576 validation_duration_ms: 1,
5577 changed_files: vec!["README.md".into()],
5578 validator_hash: "validator-hash".into(),
5579 lines_changed: 1,
5580 replay_verified: false,
5581 },
5582 state: AssetState::Promoted,
5583 };
5584
5585 evo.store
5586 .append_event(EvolutionEvent::MutationDeclared { mutation })
5587 .unwrap();
5588 evo.store
5589 .append_event(EvolutionEvent::GeneProjected { gene })
5590 .unwrap();
5591 evo.store
5592 .append_event(EvolutionEvent::CapsuleCommitted { capsule })
5593 .unwrap();
5594 evo.store
5595 .append_event(EvolutionEvent::SpecLinked {
5596 mutation_id: "mutation-spec-linked".into(),
5597 spec_id: "spec-readme".into(),
5598 })
5599 .unwrap();
5600
5601 let candidates = exact_match_candidates(evo.store.as_ref(), &input);
5602 assert_eq!(candidates.len(), 1);
5603 assert_eq!(candidates[0].gene.id, "gene-spec-linked");
5604 }
5605
5606 #[tokio::test]
5607 async fn remote_capsule_stays_quarantined_until_first_successful_replay() {
5608 let (evo, store) = build_test_evo(
5609 "remote-quarantine",
5610 "run-remote-quarantine",
5611 command_validator(),
5612 );
5613 let envelope = remote_publish_envelope(
5614 "node-remote",
5615 "run-remote-quarantine",
5616 "gene-remote",
5617 "capsule-remote",
5618 "mutation-remote",
5619 "remote-signal",
5620 "REMOTE.md",
5621 "# from remote",
5622 );
5623
5624 evo.import_remote_envelope(&envelope).unwrap();
5625
5626 let before_replay = store.rebuild_projection().unwrap();
5627 let imported_gene = before_replay
5628 .genes
5629 .iter()
5630 .find(|gene| gene.id == "gene-remote")
5631 .unwrap();
5632 let imported_capsule = before_replay
5633 .capsules
5634 .iter()
5635 .find(|capsule| capsule.id == "capsule-remote")
5636 .unwrap();
5637 assert_eq!(imported_gene.state, AssetState::Quarantined);
5638 assert_eq!(imported_capsule.state, AssetState::Quarantined);
5639 let exported_before_replay =
5640 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
5641 assert!(exported_before_replay.assets.is_empty());
5642
5643 let decision = evo
5644 .replay_or_fallback(replay_input("remote-signal"))
5645 .await
5646 .unwrap();
5647
5648 assert!(decision.used_capsule);
5649 assert_eq!(decision.capsule_id, Some("capsule-remote".into()));
5650
5651 let after_replay = store.rebuild_projection().unwrap();
5652 let promoted_gene = after_replay
5653 .genes
5654 .iter()
5655 .find(|gene| gene.id == "gene-remote")
5656 .unwrap();
5657 let released_capsule = after_replay
5658 .capsules
5659 .iter()
5660 .find(|capsule| capsule.id == "capsule-remote")
5661 .unwrap();
5662 assert_eq!(promoted_gene.state, AssetState::Promoted);
5663 assert_eq!(released_capsule.state, AssetState::Promoted);
5664 let exported_after_replay =
5665 export_promoted_assets_from_store(store.as_ref(), "node-local").unwrap();
5666 assert_eq!(exported_after_replay.assets.len(), 3);
5667 assert!(exported_after_replay.assets.iter().any(|asset| matches!(
5668 asset,
5669 NetworkAsset::EvolutionEvent {
5670 event: EvolutionEvent::MutationDeclared { .. }
5671 }
5672 )));
5673 }
5674
5675 #[tokio::test]
5676 async fn publish_local_assets_include_mutation_payload_for_remote_replay() {
5677 let (source, source_store) = build_test_evo(
5678 "remote-publish-export",
5679 "run-remote-publish-export",
5680 command_validator(),
5681 );
5682 source
5683 .capture_successful_mutation(&"run-remote-publish-export".into(), sample_mutation())
5684 .await
5685 .unwrap();
5686 let envelope = EvolutionNetworkNode::new(source_store.clone())
5687 .publish_local_assets("node-source")
5688 .unwrap();
5689 assert!(envelope.assets.iter().any(|asset| matches!(
5690 asset,
5691 NetworkAsset::EvolutionEvent {
5692 event: EvolutionEvent::MutationDeclared { mutation }
5693 } if mutation.intent.id == "mutation-1"
5694 )));
5695
5696 let (remote, _) = build_test_evo(
5697 "remote-publish-import",
5698 "run-remote-publish-import",
5699 command_validator(),
5700 );
5701 remote.import_remote_envelope(&envelope).unwrap();
5702
5703 let decision = remote
5704 .replay_or_fallback(replay_input("missing readme"))
5705 .await
5706 .unwrap();
5707
5708 assert!(decision.used_capsule);
5709 assert!(!decision.fallback_to_planner);
5710 }
5711
5712 #[tokio::test]
5713 async fn import_remote_envelope_records_manifest_validation_event() {
5714 let (source, source_store) = build_test_evo(
5715 "remote-manifest-success-source",
5716 "run-remote-manifest-success-source",
5717 command_validator(),
5718 );
5719 source
5720 .capture_successful_mutation(
5721 &"run-remote-manifest-success-source".into(),
5722 sample_mutation(),
5723 )
5724 .await
5725 .unwrap();
5726 let envelope = EvolutionNetworkNode::new(source_store.clone())
5727 .publish_local_assets("node-source")
5728 .unwrap();
5729
5730 let (remote, remote_store) = build_test_evo(
5731 "remote-manifest-success-remote",
5732 "run-remote-manifest-success-remote",
5733 command_validator(),
5734 );
5735 remote.import_remote_envelope(&envelope).unwrap();
5736
5737 let events = remote_store.scan(1).unwrap();
5738 assert!(events.iter().any(|stored| matches!(
5739 &stored.event,
5740 EvolutionEvent::ManifestValidated {
5741 accepted: true,
5742 reason,
5743 sender_id: Some(sender_id),
5744 publisher: Some(publisher),
5745 asset_ids,
5746 } if reason == "manifest validated"
5747 && sender_id == "node-source"
5748 && publisher == "node-source"
5749 && !asset_ids.is_empty()
5750 )));
5751 }
5752
5753 #[test]
5754 fn import_remote_envelope_rejects_invalid_manifest_and_records_audit_event() {
5755 let (remote, remote_store) = build_test_evo(
5756 "remote-manifest-invalid",
5757 "run-remote-manifest-invalid",
5758 command_validator(),
5759 );
5760 let mut envelope = remote_publish_envelope(
5761 "node-remote",
5762 "run-remote-manifest-invalid",
5763 "gene-remote",
5764 "capsule-remote",
5765 "mutation-remote",
5766 "manifest-signal",
5767 "MANIFEST.md",
5768 "# drift",
5769 );
5770 if let Some(manifest) = envelope.manifest.as_mut() {
5771 manifest.asset_hash = "tampered-hash".to_string();
5772 }
5773 envelope.content_hash = envelope.compute_content_hash();
5774
5775 let error = remote.import_remote_envelope(&envelope).unwrap_err();
5776 assert!(error.to_string().contains("manifest"));
5777
5778 let events = remote_store.scan(1).unwrap();
5779 assert!(events.iter().any(|stored| matches!(
5780 &stored.event,
5781 EvolutionEvent::ManifestValidated {
5782 accepted: false,
5783 reason,
5784 sender_id: Some(sender_id),
5785 publisher: Some(publisher),
5786 asset_ids,
5787 } if reason.contains("manifest asset_hash mismatch")
5788 && sender_id == "node-remote"
5789 && publisher == "node-remote"
5790 && !asset_ids.is_empty()
5791 )));
5792 }
5793
5794 #[tokio::test]
5795 async fn fetch_assets_include_mutation_payload_for_remote_replay() {
5796 let (evo, store) = build_test_evo(
5797 "remote-fetch-export",
5798 "run-remote-fetch",
5799 command_validator(),
5800 );
5801 evo.capture_successful_mutation(&"run-remote-fetch".into(), sample_mutation())
5802 .await
5803 .unwrap();
5804
5805 let response = EvolutionNetworkNode::new(store.clone())
5806 .fetch_assets(
5807 "node-source",
5808 &FetchQuery {
5809 sender_id: "node-client".into(),
5810 signals: vec!["missing readme".into()],
5811 since_cursor: None,
5812 resume_token: None,
5813 },
5814 )
5815 .unwrap();
5816
5817 assert!(response.assets.iter().any(|asset| matches!(
5818 asset,
5819 NetworkAsset::EvolutionEvent {
5820 event: EvolutionEvent::MutationDeclared { mutation }
5821 } if mutation.intent.id == "mutation-1"
5822 )));
5823 assert!(response
5824 .assets
5825 .iter()
5826 .any(|asset| matches!(asset, NetworkAsset::Gene { .. })));
5827 assert!(response
5828 .assets
5829 .iter()
5830 .any(|asset| matches!(asset, NetworkAsset::Capsule { .. })));
5831 }
5832
5833 #[test]
5834 fn fetch_assets_delta_sync_supports_since_cursor_and_resume_token() {
5835 let store_root =
5836 std::env::temp_dir().join(format!("oris-evokernel-fetch-delta-store-{}", next_id("t")));
5837 if store_root.exists() {
5838 fs::remove_dir_all(&store_root).unwrap();
5839 }
5840 let store: Arc<dyn EvolutionStore> =
5841 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
5842 let node = EvolutionNetworkNode::new(store.clone());
5843 node.record_reported_experience(
5844 "delta-agent",
5845 "gene-delta-a",
5846 vec!["delta.signal".into()],
5847 vec![
5848 "task_class=delta.signal".into(),
5849 "task_label=delta replay".into(),
5850 ],
5851 vec!["a2a.tasks.report".into()],
5852 )
5853 .unwrap();
5854
5855 let first = node
5856 .fetch_assets(
5857 "execution-api",
5858 &FetchQuery {
5859 sender_id: "delta-agent".into(),
5860 signals: vec!["delta.signal".into()],
5861 since_cursor: None,
5862 resume_token: None,
5863 },
5864 )
5865 .unwrap();
5866 let first_cursor = first.next_cursor.clone().expect("first next_cursor");
5867 let first_token = first.resume_token.clone().expect("first resume_token");
5868 assert!(first.assets.iter().any(
5869 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
5870 ));
5871
5872 let restarted = EvolutionNetworkNode::new(store.clone());
5873 restarted
5874 .record_reported_experience(
5875 "delta-agent",
5876 "gene-delta-b",
5877 vec!["delta.signal".into()],
5878 vec![
5879 "task_class=delta.signal".into(),
5880 "task_label=delta replay".into(),
5881 ],
5882 vec!["a2a.tasks.report".into()],
5883 )
5884 .unwrap();
5885
5886 let from_token = restarted
5887 .fetch_assets(
5888 "execution-api",
5889 &FetchQuery {
5890 sender_id: "delta-agent".into(),
5891 signals: vec!["delta.signal".into()],
5892 since_cursor: None,
5893 resume_token: Some(first_token),
5894 },
5895 )
5896 .unwrap();
5897 assert!(from_token.assets.iter().any(
5898 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
5899 ));
5900 assert!(!from_token.assets.iter().any(
5901 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-a")
5902 ));
5903 assert_eq!(
5904 from_token.sync_audit.requested_cursor,
5905 Some(first_cursor.clone())
5906 );
5907 assert!(from_token.sync_audit.applied_count >= 1);
5908
5909 let from_cursor = restarted
5910 .fetch_assets(
5911 "execution-api",
5912 &FetchQuery {
5913 sender_id: "delta-agent".into(),
5914 signals: vec!["delta.signal".into()],
5915 since_cursor: Some(first_cursor),
5916 resume_token: None,
5917 },
5918 )
5919 .unwrap();
5920 assert!(from_cursor.assets.iter().any(
5921 |asset| matches!(asset, NetworkAsset::Gene { gene } if gene.id == "gene-delta-b")
5922 ));
5923 }
5924
5925 #[test]
5926 fn partial_remote_import_keeps_publisher_for_already_imported_assets() {
5927 let store_root = std::env::temp_dir().join(format!(
5928 "oris-evokernel-remote-partial-store-{}",
5929 std::process::id()
5930 ));
5931 if store_root.exists() {
5932 fs::remove_dir_all(&store_root).unwrap();
5933 }
5934 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
5935 let evo = build_test_evo_with_store(
5936 "remote-partial",
5937 "run-remote-partial",
5938 command_validator(),
5939 store.clone(),
5940 );
5941 let envelope = remote_publish_envelope(
5942 "node-partial",
5943 "run-remote-partial",
5944 "gene-partial",
5945 "capsule-partial",
5946 "mutation-partial",
5947 "partial-signal",
5948 "PARTIAL.md",
5949 "# partial",
5950 );
5951
5952 let result = evo.import_remote_envelope(&envelope);
5953
5954 assert!(matches!(result, Err(EvoKernelError::Store(_))));
5955 let projection = store.rebuild_projection().unwrap();
5956 assert!(projection
5957 .genes
5958 .iter()
5959 .any(|gene| gene.id == "gene-partial"));
5960 assert!(projection.capsules.is_empty());
5961 let publishers = evo.remote_publishers.lock().unwrap();
5962 assert_eq!(
5963 publishers.get("gene-partial").map(String::as_str),
5964 Some("node-partial")
5965 );
5966 }
5967
5968 #[test]
5969 fn retry_remote_import_after_partial_failure_only_imports_missing_assets() {
5970 let store_root = std::env::temp_dir().join(format!(
5971 "oris-evokernel-remote-partial-retry-store-{}",
5972 next_id("t")
5973 ));
5974 if store_root.exists() {
5975 fs::remove_dir_all(&store_root).unwrap();
5976 }
5977 let store: Arc<dyn EvolutionStore> = Arc::new(FailOnAppendStore::new(store_root, 5));
5978 let evo = build_test_evo_with_store(
5979 "remote-partial-retry",
5980 "run-remote-partial-retry",
5981 command_validator(),
5982 store.clone(),
5983 );
5984 let envelope = remote_publish_envelope(
5985 "node-partial",
5986 "run-remote-partial-retry",
5987 "gene-partial-retry",
5988 "capsule-partial-retry",
5989 "mutation-partial-retry",
5990 "partial-retry-signal",
5991 "PARTIAL_RETRY.md",
5992 "# partial retry",
5993 );
5994
5995 let first = evo.import_remote_envelope(&envelope);
5996 assert!(matches!(first, Err(EvoKernelError::Store(_))));
5997
5998 let retry = evo.import_remote_envelope(&envelope).unwrap();
5999
6000 assert_eq!(retry.imported_asset_ids, vec!["capsule-partial-retry"]);
6001 let projection = store.rebuild_projection().unwrap();
6002 let gene = projection
6003 .genes
6004 .iter()
6005 .find(|gene| gene.id == "gene-partial-retry")
6006 .unwrap();
6007 assert_eq!(gene.state, AssetState::Quarantined);
6008 let capsule = projection
6009 .capsules
6010 .iter()
6011 .find(|capsule| capsule.id == "capsule-partial-retry")
6012 .unwrap();
6013 assert_eq!(capsule.state, AssetState::Quarantined);
6014 assert_eq!(projection.attempt_counts["gene-partial-retry"], 1);
6015
6016 let events = store.scan(1).unwrap();
6017 assert_eq!(
6018 events
6019 .iter()
6020 .filter(|stored| {
6021 matches!(
6022 &stored.event,
6023 EvolutionEvent::MutationDeclared { mutation }
6024 if mutation.intent.id == "mutation-partial-retry"
6025 )
6026 })
6027 .count(),
6028 1
6029 );
6030 assert_eq!(
6031 events
6032 .iter()
6033 .filter(|stored| {
6034 matches!(
6035 &stored.event,
6036 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-partial-retry"
6037 )
6038 })
6039 .count(),
6040 1
6041 );
6042 assert_eq!(
6043 events
6044 .iter()
6045 .filter(|stored| {
6046 matches!(
6047 &stored.event,
6048 EvolutionEvent::CapsuleCommitted { capsule }
6049 if capsule.id == "capsule-partial-retry"
6050 )
6051 })
6052 .count(),
6053 1
6054 );
6055 }
6056
6057 #[tokio::test]
6058 async fn duplicate_remote_import_does_not_requarantine_locally_validated_assets() {
6059 let (evo, store) = build_test_evo(
6060 "remote-idempotent",
6061 "run-remote-idempotent",
6062 command_validator(),
6063 );
6064 let envelope = remote_publish_envelope(
6065 "node-idempotent",
6066 "run-remote-idempotent",
6067 "gene-idempotent",
6068 "capsule-idempotent",
6069 "mutation-idempotent",
6070 "idempotent-signal",
6071 "IDEMPOTENT.md",
6072 "# idempotent",
6073 );
6074
6075 let first = evo.import_remote_envelope(&envelope).unwrap();
6076 assert_eq!(
6077 first.imported_asset_ids,
6078 vec!["gene-idempotent", "capsule-idempotent"]
6079 );
6080
6081 let decision = evo
6082 .replay_or_fallback(replay_input("idempotent-signal"))
6083 .await
6084 .unwrap();
6085 assert!(decision.used_capsule);
6086 assert_eq!(decision.capsule_id, Some("capsule-idempotent".into()));
6087
6088 let projection_before = store.rebuild_projection().unwrap();
6089 let attempts_before = projection_before.attempt_counts["gene-idempotent"];
6090 let gene_before = projection_before
6091 .genes
6092 .iter()
6093 .find(|gene| gene.id == "gene-idempotent")
6094 .unwrap();
6095 assert_eq!(gene_before.state, AssetState::Promoted);
6096 let capsule_before = projection_before
6097 .capsules
6098 .iter()
6099 .find(|capsule| capsule.id == "capsule-idempotent")
6100 .unwrap();
6101 assert_eq!(capsule_before.state, AssetState::Promoted);
6102
6103 let second = evo.import_remote_envelope(&envelope).unwrap();
6104 assert!(second.imported_asset_ids.is_empty());
6105
6106 let projection_after = store.rebuild_projection().unwrap();
6107 assert_eq!(
6108 projection_after.attempt_counts["gene-idempotent"],
6109 attempts_before
6110 );
6111 let gene_after = projection_after
6112 .genes
6113 .iter()
6114 .find(|gene| gene.id == "gene-idempotent")
6115 .unwrap();
6116 assert_eq!(gene_after.state, AssetState::Promoted);
6117 let capsule_after = projection_after
6118 .capsules
6119 .iter()
6120 .find(|capsule| capsule.id == "capsule-idempotent")
6121 .unwrap();
6122 assert_eq!(capsule_after.state, AssetState::Promoted);
6123
6124 let events = store.scan(1).unwrap();
6125 assert_eq!(
6126 events
6127 .iter()
6128 .filter(|stored| {
6129 matches!(
6130 &stored.event,
6131 EvolutionEvent::MutationDeclared { mutation }
6132 if mutation.intent.id == "mutation-idempotent"
6133 )
6134 })
6135 .count(),
6136 1
6137 );
6138 assert_eq!(
6139 events
6140 .iter()
6141 .filter(|stored| {
6142 matches!(
6143 &stored.event,
6144 EvolutionEvent::GeneProjected { gene } if gene.id == "gene-idempotent"
6145 )
6146 })
6147 .count(),
6148 1
6149 );
6150 assert_eq!(
6151 events
6152 .iter()
6153 .filter(|stored| {
6154 matches!(
6155 &stored.event,
6156 EvolutionEvent::CapsuleCommitted { capsule }
6157 if capsule.id == "capsule-idempotent"
6158 )
6159 })
6160 .count(),
6161 1
6162 );
6163
6164 assert_eq!(first.sync_audit.scanned_count, envelope.assets.len());
6165 assert_eq!(first.sync_audit.failed_count, 0);
6166 assert_eq!(second.sync_audit.applied_count, 0);
6167 assert_eq!(second.sync_audit.skipped_count, envelope.assets.len());
6168 assert!(second.resume_token.is_some());
6169 }
6170
6171 #[tokio::test]
6172 async fn insufficient_evu_blocks_publish_but_not_local_replay() {
6173 let (evo, _) = build_test_evo("stake-gate", "run-stake", command_validator());
6174 let capsule = evo
6175 .capture_successful_mutation(&"run-stake".into(), sample_mutation())
6176 .await
6177 .unwrap();
6178 let publish = evo.export_promoted_assets("node-local");
6179 assert!(matches!(publish, Err(EvoKernelError::Validation(_))));
6180
6181 let decision = evo
6182 .replay_or_fallback(replay_input("missing readme"))
6183 .await
6184 .unwrap();
6185 assert!(decision.used_capsule);
6186 assert_eq!(decision.capsule_id, Some(capsule.id));
6187 }
6188
6189 #[tokio::test]
6190 async fn second_replay_validation_failure_revokes_gene_immediately() {
6191 let (capturer, store) = build_test_evo("revoke-replay", "run-capture", command_validator());
6192 let capsule = capturer
6193 .capture_successful_mutation(&"run-capture".into(), sample_mutation())
6194 .await
6195 .unwrap();
6196
6197 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
6198 let failing_replay = build_test_evo_with_store(
6199 "revoke-replay",
6200 "run-replay-fail",
6201 failing_validator,
6202 store.clone(),
6203 );
6204
6205 let first = failing_replay
6206 .replay_or_fallback(replay_input("missing readme"))
6207 .await
6208 .unwrap();
6209 let second = failing_replay
6210 .replay_or_fallback(replay_input("missing readme"))
6211 .await
6212 .unwrap();
6213
6214 assert!(!first.used_capsule);
6215 assert!(first.fallback_to_planner);
6216 assert!(!second.used_capsule);
6217 assert!(second.fallback_to_planner);
6218
6219 let projection = store.rebuild_projection().unwrap();
6220 let gene = projection
6221 .genes
6222 .iter()
6223 .find(|gene| gene.id == capsule.gene_id)
6224 .unwrap();
6225 assert_eq!(gene.state, AssetState::Promoted);
6226 let committed_capsule = projection
6227 .capsules
6228 .iter()
6229 .find(|current| current.id == capsule.id)
6230 .unwrap();
6231 assert_eq!(committed_capsule.state, AssetState::Promoted);
6232
6233 let events = store.scan(1).unwrap();
6234 assert_eq!(
6235 events
6236 .iter()
6237 .filter(|stored| {
6238 matches!(
6239 &stored.event,
6240 EvolutionEvent::ValidationFailed {
6241 gene_id: Some(gene_id),
6242 ..
6243 } if gene_id == &capsule.gene_id
6244 )
6245 })
6246 .count(),
6247 1
6248 );
6249 assert!(!events.iter().any(|stored| {
6250 matches!(
6251 &stored.event,
6252 EvolutionEvent::GeneRevoked { gene_id, .. } if gene_id == &capsule.gene_id
6253 )
6254 }));
6255
6256 let recovered = build_test_evo_with_store(
6257 "revoke-replay",
6258 "run-replay-check",
6259 command_validator(),
6260 store.clone(),
6261 );
6262 let after_revoke = recovered
6263 .replay_or_fallback(replay_input("missing readme"))
6264 .await
6265 .unwrap();
6266 assert!(!after_revoke.used_capsule);
6267 assert!(after_revoke.fallback_to_planner);
6268 assert!(after_revoke.reason.contains("below replay threshold"));
6269 }
6270
6271 #[tokio::test]
6272 async fn remote_reuse_success_rewards_publisher_and_biases_selection() {
6273 let ledger = Arc::new(Mutex::new(EvuLedger {
6274 accounts: vec![],
6275 reputations: vec![
6276 oris_economics::ReputationRecord {
6277 node_id: "node-a".into(),
6278 publish_success_rate: 0.4,
6279 validator_accuracy: 0.4,
6280 reuse_impact: 0,
6281 },
6282 oris_economics::ReputationRecord {
6283 node_id: "node-b".into(),
6284 publish_success_rate: 0.95,
6285 validator_accuracy: 0.95,
6286 reuse_impact: 8,
6287 },
6288 ],
6289 }));
6290 let (evo, _) = build_test_evo("remote-success", "run-remote", command_validator());
6291 let evo = evo.with_economics(ledger.clone());
6292
6293 let envelope_a = remote_publish_envelope(
6294 "node-a",
6295 "run-remote-a",
6296 "gene-a",
6297 "capsule-a",
6298 "mutation-a",
6299 "shared-signal",
6300 "A.md",
6301 "# from a",
6302 );
6303 let envelope_b = remote_publish_envelope(
6304 "node-b",
6305 "run-remote-b",
6306 "gene-b",
6307 "capsule-b",
6308 "mutation-b",
6309 "shared-signal",
6310 "B.md",
6311 "# from b",
6312 );
6313
6314 evo.import_remote_envelope(&envelope_a).unwrap();
6315 evo.import_remote_envelope(&envelope_b).unwrap();
6316
6317 let decision = evo
6318 .replay_or_fallback(replay_input("shared-signal"))
6319 .await
6320 .unwrap();
6321
6322 assert!(decision.used_capsule);
6323 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
6324 let locked = ledger.lock().unwrap();
6325 let rewarded = locked
6326 .accounts
6327 .iter()
6328 .find(|item| item.node_id == "node-b")
6329 .unwrap();
6330 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
6331 assert!(
6332 locked.selector_reputation_bias()["node-b"]
6333 > locked.selector_reputation_bias()["node-a"]
6334 );
6335 }
6336
6337 #[tokio::test]
6338 async fn remote_reuse_settlement_tracks_selected_capsule_publisher_for_shared_gene() {
6339 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
6340 let (evo, _) = build_test_evo(
6341 "remote-shared-publisher",
6342 "run-remote-shared-publisher",
6343 command_validator(),
6344 );
6345 let evo = evo.with_economics(ledger.clone());
6346 let input = replay_input("shared-signal");
6347 let preferred = remote_publish_envelope_with_env(
6348 "node-a",
6349 "run-remote-a",
6350 "gene-shared",
6351 "capsule-preferred",
6352 "mutation-preferred",
6353 "shared-signal",
6354 "A.md",
6355 "# from a",
6356 input.env.clone(),
6357 );
6358 let fallback = remote_publish_envelope_with_env(
6359 "node-b",
6360 "run-remote-b",
6361 "gene-shared",
6362 "capsule-fallback",
6363 "mutation-fallback",
6364 "shared-signal",
6365 "B.md",
6366 "# from b",
6367 EnvFingerprint {
6368 rustc_version: "old-rustc".into(),
6369 cargo_lock_hash: "other-lock".into(),
6370 target_triple: "aarch64-apple-darwin".into(),
6371 os: "linux".into(),
6372 },
6373 );
6374
6375 evo.import_remote_envelope(&preferred).unwrap();
6376 evo.import_remote_envelope(&fallback).unwrap();
6377
6378 let decision = evo.replay_or_fallback(input).await.unwrap();
6379
6380 assert!(decision.used_capsule);
6381 assert_eq!(decision.capsule_id, Some("capsule-preferred".into()));
6382 let locked = ledger.lock().unwrap();
6383 let rewarded = locked
6384 .accounts
6385 .iter()
6386 .find(|item| item.node_id == "node-a")
6387 .unwrap();
6388 assert_eq!(rewarded.balance, evo.stake_policy.reuse_reward);
6389 assert!(locked.accounts.iter().all(|item| item.node_id != "node-b"));
6390 }
6391
6392 #[test]
6393 fn select_candidates_surfaces_ranked_remote_cold_start_candidates() {
6394 let ledger = Arc::new(Mutex::new(EvuLedger {
6395 accounts: vec![],
6396 reputations: vec![
6397 oris_economics::ReputationRecord {
6398 node_id: "node-a".into(),
6399 publish_success_rate: 0.4,
6400 validator_accuracy: 0.4,
6401 reuse_impact: 0,
6402 },
6403 oris_economics::ReputationRecord {
6404 node_id: "node-b".into(),
6405 publish_success_rate: 0.95,
6406 validator_accuracy: 0.95,
6407 reuse_impact: 8,
6408 },
6409 ],
6410 }));
6411 let (evo, _) = build_test_evo("remote-select", "run-remote-select", command_validator());
6412 let evo = evo.with_economics(ledger);
6413
6414 let envelope_a = remote_publish_envelope(
6415 "node-a",
6416 "run-remote-a",
6417 "gene-a",
6418 "capsule-a",
6419 "mutation-a",
6420 "shared-signal",
6421 "A.md",
6422 "# from a",
6423 );
6424 let envelope_b = remote_publish_envelope(
6425 "node-b",
6426 "run-remote-b",
6427 "gene-b",
6428 "capsule-b",
6429 "mutation-b",
6430 "shared-signal",
6431 "B.md",
6432 "# from b",
6433 );
6434
6435 evo.import_remote_envelope(&envelope_a).unwrap();
6436 evo.import_remote_envelope(&envelope_b).unwrap();
6437
6438 let candidates = evo.select_candidates(&replay_input("shared-signal"));
6439
6440 assert_eq!(candidates.len(), 1);
6441 assert_eq!(candidates[0].gene.id, "gene-b");
6442 assert_eq!(candidates[0].capsules[0].id, "capsule-b");
6443 }
6444
6445 #[tokio::test]
6446 async fn remote_reuse_publisher_bias_survives_restart() {
6447 let ledger = Arc::new(Mutex::new(EvuLedger {
6448 accounts: vec![],
6449 reputations: vec![
6450 oris_economics::ReputationRecord {
6451 node_id: "node-a".into(),
6452 publish_success_rate: 0.4,
6453 validator_accuracy: 0.4,
6454 reuse_impact: 0,
6455 },
6456 oris_economics::ReputationRecord {
6457 node_id: "node-b".into(),
6458 publish_success_rate: 0.95,
6459 validator_accuracy: 0.95,
6460 reuse_impact: 8,
6461 },
6462 ],
6463 }));
6464 let store_root = std::env::temp_dir().join(format!(
6465 "oris-evokernel-remote-restart-store-{}",
6466 next_id("t")
6467 ));
6468 if store_root.exists() {
6469 fs::remove_dir_all(&store_root).unwrap();
6470 }
6471 let store: Arc<dyn EvolutionStore> =
6472 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
6473 let evo = build_test_evo_with_store(
6474 "remote-success-restart-source",
6475 "run-remote-restart-source",
6476 command_validator(),
6477 store.clone(),
6478 )
6479 .with_economics(ledger.clone());
6480
6481 let envelope_a = remote_publish_envelope(
6482 "node-a",
6483 "run-remote-a",
6484 "gene-a",
6485 "capsule-a",
6486 "mutation-a",
6487 "shared-signal",
6488 "A.md",
6489 "# from a",
6490 );
6491 let envelope_b = remote_publish_envelope(
6492 "node-b",
6493 "run-remote-b",
6494 "gene-b",
6495 "capsule-b",
6496 "mutation-b",
6497 "shared-signal",
6498 "B.md",
6499 "# from b",
6500 );
6501
6502 evo.import_remote_envelope(&envelope_a).unwrap();
6503 evo.import_remote_envelope(&envelope_b).unwrap();
6504
6505 let recovered = build_test_evo_with_store(
6506 "remote-success-restart-recovered",
6507 "run-remote-restart-recovered",
6508 command_validator(),
6509 store.clone(),
6510 )
6511 .with_economics(ledger.clone());
6512
6513 let decision = recovered
6514 .replay_or_fallback(replay_input("shared-signal"))
6515 .await
6516 .unwrap();
6517
6518 assert!(decision.used_capsule);
6519 assert_eq!(decision.capsule_id, Some("capsule-b".into()));
6520 let locked = ledger.lock().unwrap();
6521 let rewarded = locked
6522 .accounts
6523 .iter()
6524 .find(|item| item.node_id == "node-b")
6525 .unwrap();
6526 assert_eq!(rewarded.balance, recovered.stake_policy.reuse_reward);
6527 }
6528
6529 #[tokio::test]
6530 async fn remote_reuse_failure_penalizes_remote_reputation() {
6531 let ledger = Arc::new(Mutex::new(EvuLedger::default()));
6532 let failing_validator: Arc<dyn Validator> = Arc::new(FixedValidator { success: false });
6533 let (evo, _) = build_test_evo("remote-failure", "run-failure", failing_validator);
6534 let evo = evo.with_economics(ledger.clone());
6535
6536 let envelope = remote_publish_envelope(
6537 "node-remote",
6538 "run-remote-failed",
6539 "gene-remote",
6540 "capsule-remote",
6541 "mutation-remote",
6542 "failure-signal",
6543 "FAILED.md",
6544 "# from remote",
6545 );
6546 evo.import_remote_envelope(&envelope).unwrap();
6547
6548 let decision = evo
6549 .replay_or_fallback(replay_input("failure-signal"))
6550 .await
6551 .unwrap();
6552
6553 assert!(!decision.used_capsule);
6554 assert!(decision.fallback_to_planner);
6555
6556 let signal = evo.economics_signal("node-remote").unwrap();
6557 assert_eq!(signal.available_evu, 0);
6558 assert!(signal.publish_success_rate < 0.5);
6559 assert!(signal.validator_accuracy < 0.5);
6560 }
6561
6562 #[test]
6563 fn ensure_builtin_experience_assets_is_idempotent_and_fetchable() {
6564 let store_root = std::env::temp_dir().join(format!(
6565 "oris-evokernel-builtin-experience-store-{}",
6566 next_id("t")
6567 ));
6568 if store_root.exists() {
6569 fs::remove_dir_all(&store_root).unwrap();
6570 }
6571 let store: Arc<dyn EvolutionStore> =
6572 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
6573 let node = EvolutionNetworkNode::new(store.clone());
6574
6575 let first = node
6576 .ensure_builtin_experience_assets("runtime-bootstrap")
6577 .unwrap();
6578 assert!(!first.imported_asset_ids.is_empty());
6579
6580 let second = node
6581 .ensure_builtin_experience_assets("runtime-bootstrap")
6582 .unwrap();
6583 assert!(second.imported_asset_ids.is_empty());
6584
6585 let fetch = node
6586 .fetch_assets(
6587 "execution-api",
6588 &FetchQuery {
6589 sender_id: "compat-agent".into(),
6590 signals: vec!["error".into()],
6591 since_cursor: None,
6592 resume_token: None,
6593 },
6594 )
6595 .unwrap();
6596
6597 let mut has_builtin_evomap = false;
6598 for asset in fetch.assets {
6599 if let NetworkAsset::Gene { gene } = asset {
6600 if strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
6601 == Some("builtin_evomap")
6602 && gene.state == AssetState::Promoted
6603 {
6604 has_builtin_evomap = true;
6605 break;
6606 }
6607 }
6608 }
6609 assert!(has_builtin_evomap);
6610 }
6611
6612 #[test]
6613 fn reported_experience_retention_keeps_latest_three_and_preserves_builtin_assets() {
6614 let store_root = std::env::temp_dir().join(format!(
6615 "oris-evokernel-reported-retention-store-{}",
6616 next_id("t")
6617 ));
6618 if store_root.exists() {
6619 fs::remove_dir_all(&store_root).unwrap();
6620 }
6621 let store: Arc<dyn EvolutionStore> =
6622 Arc::new(oris_evolution::JsonlEvolutionStore::new(&store_root));
6623 let node = EvolutionNetworkNode::new(store.clone());
6624
6625 node.ensure_builtin_experience_assets("runtime-bootstrap")
6626 .unwrap();
6627
6628 for idx in 0..4 {
6629 node.record_reported_experience(
6630 "reporter-a",
6631 format!("reported-docs-rewrite-v{}", idx + 1),
6632 vec!["docs.rewrite".into(), format!("task-{}", idx + 1)],
6633 vec![
6634 "task_class=docs.rewrite".into(),
6635 format!("task_label=Docs rewrite v{}", idx + 1),
6636 format!("summary=reported replay {}", idx + 1),
6637 ],
6638 vec!["a2a.tasks.report".into()],
6639 )
6640 .unwrap();
6641 }
6642
6643 let (_, projection) = store.scan_projection().unwrap();
6644 let reported_promoted = projection
6645 .genes
6646 .iter()
6647 .filter(|gene| {
6648 gene.state == AssetState::Promoted
6649 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
6650 == Some("reported_experience")
6651 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
6652 == Some("docs.rewrite")
6653 })
6654 .count();
6655 let reported_revoked = projection
6656 .genes
6657 .iter()
6658 .filter(|gene| {
6659 gene.state == AssetState::Revoked
6660 && strategy_metadata_value(&gene.strategy, "asset_origin").as_deref()
6661 == Some("reported_experience")
6662 && strategy_metadata_value(&gene.strategy, "task_class").as_deref()
6663 == Some("docs.rewrite")
6664 })
6665 .count();
6666 let builtin_promoted = projection
6667 .genes
6668 .iter()
6669 .filter(|gene| {
6670 gene.state == AssetState::Promoted
6671 && matches!(
6672 strategy_metadata_value(&gene.strategy, "asset_origin").as_deref(),
6673 Some("builtin") | Some("builtin_evomap")
6674 )
6675 })
6676 .count();
6677
6678 assert_eq!(reported_promoted, 3);
6679 assert_eq!(reported_revoked, 1);
6680 assert!(builtin_promoted >= 1);
6681
6682 let fetch = node
6683 .fetch_assets(
6684 "execution-api",
6685 &FetchQuery {
6686 sender_id: "consumer-b".into(),
6687 signals: vec!["docs.rewrite".into()],
6688 since_cursor: None,
6689 resume_token: None,
6690 },
6691 )
6692 .unwrap();
6693 let docs_genes = fetch
6694 .assets
6695 .into_iter()
6696 .filter_map(|asset| match asset {
6697 NetworkAsset::Gene { gene } => Some(gene),
6698 _ => None,
6699 })
6700 .filter(|gene| {
6701 strategy_metadata_value(&gene.strategy, "task_class").as_deref()
6702 == Some("docs.rewrite")
6703 })
6704 .collect::<Vec<_>>();
6705 assert!(docs_genes.len() >= 3);
6706 }
6707}