use clap::{Args, Subcommand};
use cortex_core::{
accepted_axiom_source_commits, compose_policy_outcomes, is_axiom_source_commit_fresh,
parse_authority_feedback_loop, parse_axiom_execution_trust, parse_cortex_context_trust,
AuditRecordId, ClaimCeiling, ClaimProofState, MemoryId, PolicyContribution, PolicyOutcome,
ProvenanceClass, SemanticTrustClass, SemanticUse,
AXIOM_EXECUTION_TRUST_SOURCE_COMMIT_STALE_INVARIANT, CORTEX_AXIOM_ACCEPTED_SOURCE_COMMITS_ENV,
};
use cortex_memory::{
AdmissionDecision, AdmissionEnvelopeError, AdmissionLifecycle, AdmissionSemanticTrustInput,
AxiomMemoryAdmissionRequest, AxiomTrustExchangeAdmissionRequest, TrustExchangeAdmission,
};
use cortex_retrieval::{
compose_fuzzy_boost, compose_lexical_semantic, cosine_similarity, extract_snippet, query_fts5,
resolve_conflicts, score, snippet_ansi_highlighted, snippet_plain_text, tokenize_query,
AuthorityLevel, AuthorityProofHint, ConflictingMemoryInput, EmbedRecord, Embedder,
LexicalDocument, LexicalIndex, LocalStubEmbedder, OllamaEmbedder, ProofClosureHint,
ScoreComponent, ScoreInputs, STUB_BACKEND_ID,
};
use cortex_store::proof::verify_memory_proof_closure;
use cortex_store::repo::memories::{
accept_open_contradiction_contribution, accept_proof_closure_contribution,
ACCEPT_OPERATOR_TEMPORAL_USE_RULE_ID, ACCEPT_SEMANTIC_TRUST_RULE_ID,
};
use cortex_store::repo::EmbeddingRepo;
use cortex_store::repo::{
ContradictionRepo, MemoryAcceptanceAudit, MemoryRecord, MemoryRepo, MemorySessionUse,
OutcomeMemoryRelationRecord,
};
use cortex_store::Pool;
use rusqlite::params as rparams;
use serde_json::json;
use std::collections::{BTreeMap, BTreeSet};
use std::path::PathBuf;
use std::{fs, io};
use crate::cmd::open_default_store;
use crate::config::EmbeddingBackend;
use crate::exit::Exit;
use crate::output::{self, Envelope};
#[derive(Debug, Subcommand)]
pub enum MemorySub {
Accept(AcceptArgs),
AdmitAxiom(AdmitAxiomArgs),
List(ListArgs),
Search(SearchArgs),
Embed(EmbedArgs),
#[command(subcommand)]
Tag(TagSub),
#[command(subcommand)]
Outcome(OutcomeSub),
Health(HealthArgs),
}
#[derive(Debug, Args)]
pub struct EmbedArgs {
#[arg(long)]
pub preview: bool,
#[arg(long, value_name = "MODEL")]
pub model: Option<String>,
#[arg(long, value_name = "URL")]
pub endpoint: Option<String>,
#[arg(long, value_name = "DIM", default_value = "768")]
pub dim: usize,
}
#[derive(Debug, Args)]
pub struct AcceptArgs {
#[arg(value_name = "MEMORY_ID")]
pub candidate_id: MemoryId,
}
#[derive(Debug, Args)]
pub struct AdmitAxiomArgs {
#[arg(
long,
value_name = "PATH",
conflicts_with_all = [
"json",
"cortex_context_trust",
"axiom_execution_trust",
"authority_feedback_loop",
]
)]
pub file: Option<PathBuf>,
#[arg(
long,
value_name = "JSON",
conflicts_with_all = [
"file",
"cortex_context_trust",
"axiom_execution_trust",
"authority_feedback_loop",
]
)]
pub json: Option<String>,
#[arg(long, value_name = "PATH")]
pub cortex_context_trust: Option<PathBuf>,
#[arg(long, value_name = "PATH")]
pub axiom_execution_trust: Option<PathBuf>,
#[arg(long, value_name = "PATH")]
pub authority_feedback_loop: Option<PathBuf>,
#[arg(long, value_name = "LIFECYCLE", default_value = "candidate_only")]
pub lifecycle: String,
#[arg(long)]
pub derived_from_quarantined: bool,
}
#[derive(Debug, Args)]
pub struct SearchArgs {
#[arg(value_name = "QUERY")]
pub query: String,
#[arg(long)]
pub explain: bool,
#[arg(long = "tag", value_name = "TAG")]
pub tag: Vec<String>,
#[arg(long)]
pub fuzzy: bool,
#[arg(long)]
pub semantic: bool,
#[arg(long)]
pub snippet: bool,
}
#[derive(Debug, Subcommand)]
pub enum TagSub {
Add(TagAddArgs),
Remove(TagRemoveArgs),
}
#[derive(Debug, Args)]
pub struct TagAddArgs {
#[arg(value_name = "MEMORY_ID")]
pub memory_id: MemoryId,
#[arg(value_name = "TAG")]
pub tag: String,
}
#[derive(Debug, Args)]
pub struct TagRemoveArgs {
#[arg(value_name = "MEMORY_ID")]
pub memory_id: MemoryId,
#[arg(value_name = "TAG")]
pub tag: String,
}
#[derive(Debug, Subcommand)]
pub enum OutcomeSub {
Record(OutcomeRecordArgs),
}
#[derive(Debug, Args)]
pub struct OutcomeRecordArgs {
#[arg(long, value_name = "ID")]
pub memory_id: MemoryId,
#[arg(long, value_name = "SESSION_ID")]
pub session: String,
#[arg(long, value_name = "RESULT")]
pub result: OutcomeResult,
#[arg(long, value_name = "TEXT")]
pub note: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, clap::ValueEnum)]
pub enum OutcomeResult {
Helpful,
NotHelpful,
}
#[derive(Debug, Args)]
pub struct HealthArgs {
#[arg(long)]
pub unvalidated_only: bool,
#[arg(long, value_name = "DAYS", default_value = "30")]
pub older_than: u32,
#[arg(long, value_name = "FLOAT", default_value = "0.5")]
pub confidence_below: f64,
}
pub const OUTCOME_MEMORY_NOT_FOUND_INVARIANT: &str = "memory.outcome.memory_not_found";
pub const TAG_WRITE_PENDING_INVARIANT: &str =
"memory.tag.write_path_pending_authority_revalidation";
pub const ACCEPT_OPERATOR_TEMPORAL_AUTHORITY_WARN_NO_ATTESTATION_INVARIANT: &str =
"memory.accept.operator_temporal_authority.warn_no_attestation";
#[derive(Debug, Args)]
pub struct ListArgs {
#[arg(long = "tag", value_name = "TAG")]
pub tag: Vec<String>,
}
pub fn run(sub: MemorySub) -> Exit {
match sub {
MemorySub::Accept(args) => accept(args),
MemorySub::AdmitAxiom(args) => admit_axiom(args),
MemorySub::List(args) => list(args),
MemorySub::Search(args) => search(args),
MemorySub::Embed(args) => embed(args),
MemorySub::Tag(sub) => tag(sub),
MemorySub::Outcome(sub) => outcome(sub),
MemorySub::Health(args) => health(args),
}
}
fn tag(sub: TagSub) -> Exit {
match sub {
TagSub::Add(args) => add_tag(args),
TagSub::Remove(args) => remove_tag(args),
}
}
fn add_tag(args: TagAddArgs) -> Exit {
let pool = match open_default_store("memory tag add") {
Ok(pool) => pool,
Err(exit) => {
if output::json_enabled() {
let payload = json!({ "detail": "failed to open store" });
let envelope = Envelope::new("cortex.memory.tag.add", exit, payload);
return output::emit(&envelope, exit);
}
return exit;
}
};
let operator_temporal_contribution = tag_operator_temporal_contribution();
let policy = cortex_core::compose_policy_outcomes(vec![operator_temporal_contribution], None);
match policy.final_outcome {
cortex_core::PolicyOutcome::Quarantine | cortex_core::PolicyOutcome::Reject => {
let exit = Exit::PreconditionUnmet;
if output::json_enabled() {
let payload = json!({
"detail": format!(
"invariant={TAG_WRITE_PENDING_INVARIANT}: \
operator temporal authority gate blocked tag mutation"
)
});
let envelope = Envelope::new("cortex.memory.tag.add", exit, payload);
return output::emit(&envelope, exit);
}
eprintln!(
"cortex memory tag add: invariant={TAG_WRITE_PENDING_INVARIANT} \
operator temporal authority gate blocked tag mutation"
);
return exit;
}
_ => {}
}
let repo = MemoryRepo::new(&pool);
match repo.add_domain_tag(&args.memory_id, &args.tag, chrono::Utc::now()) {
Ok(true) => {
let id = args.memory_id.to_string();
let tag = &args.tag;
if !output::json_enabled() {
println!("memory tag add: tag '{tag}' added to {id}");
return Exit::Ok;
}
let payload = json!({
"memory_id": id,
"tag": tag,
"added": true,
"persisted": true,
});
let envelope = Envelope::new("cortex.memory.tag.add", Exit::Ok, payload);
output::emit(&envelope, Exit::Ok)
}
Ok(false) => {
let id = args.memory_id.to_string();
let tag = &args.tag;
if !output::json_enabled() {
println!("memory tag add: tag '{tag}' already present on {id}");
return Exit::Ok;
}
let payload = json!({
"memory_id": id,
"tag": tag,
"added": false,
"persisted": false,
});
let envelope = Envelope::new("cortex.memory.tag.add", Exit::Ok, payload);
output::emit(&envelope, Exit::Ok)
}
Err(cortex_store::StoreError::Validation(msg)) => {
let exit = Exit::PreconditionUnmet;
if output::json_enabled() {
let payload = json!({ "detail": format!("precondition unmet: {msg}") });
let envelope = Envelope::new("cortex.memory.tag.add", exit, payload);
return output::emit(&envelope, exit);
}
eprintln!("cortex memory tag add: precondition unmet: {msg}");
exit
}
Err(err) => {
let exit = Exit::Internal;
if output::json_enabled() {
let payload = json!({ "detail": format!("internal error: {err}") });
let envelope = Envelope::new("cortex.memory.tag.add", exit, payload);
return output::emit(&envelope, exit);
}
eprintln!("cortex memory tag add: internal error: {err}");
exit
}
}
}
fn remove_tag(args: TagRemoveArgs) -> Exit {
let pool = match open_default_store("memory tag remove") {
Ok(pool) => pool,
Err(exit) => {
if output::json_enabled() {
let payload = json!({ "detail": "failed to open store" });
let envelope = Envelope::new("cortex.memory.tag.remove", exit, payload);
return output::emit(&envelope, exit);
}
return exit;
}
};
let operator_temporal_contribution = tag_operator_temporal_contribution();
let policy = cortex_core::compose_policy_outcomes(vec![operator_temporal_contribution], None);
match policy.final_outcome {
cortex_core::PolicyOutcome::Quarantine | cortex_core::PolicyOutcome::Reject => {
let exit = Exit::PreconditionUnmet;
if output::json_enabled() {
let payload = json!({
"detail": format!(
"invariant={TAG_WRITE_PENDING_INVARIANT}: \
operator temporal authority gate blocked tag mutation"
)
});
let envelope = Envelope::new("cortex.memory.tag.remove", exit, payload);
return output::emit(&envelope, exit);
}
eprintln!(
"cortex memory tag remove: invariant={TAG_WRITE_PENDING_INVARIANT} \
operator temporal authority gate blocked tag mutation"
);
return exit;
}
_ => {}
}
let repo = MemoryRepo::new(&pool);
match repo.remove_domain_tag(&args.memory_id, &args.tag, chrono::Utc::now()) {
Ok(true) => {
let id = args.memory_id.to_string();
let tag = &args.tag;
if !output::json_enabled() {
println!("memory tag remove: tag '{tag}' removed from {id}");
return Exit::Ok;
}
let payload = json!({
"memory_id": id,
"tag": tag,
"removed": true,
"persisted": true,
});
let envelope = Envelope::new("cortex.memory.tag.remove", Exit::Ok, payload);
output::emit(&envelope, Exit::Ok)
}
Ok(false) => {
let id = args.memory_id.to_string();
let tag = &args.tag;
if !output::json_enabled() {
println!("memory tag remove: tag '{tag}' not present on {id}");
return Exit::Ok;
}
let payload = json!({
"memory_id": id,
"tag": tag,
"removed": false,
"persisted": false,
});
let envelope = Envelope::new("cortex.memory.tag.remove", Exit::Ok, payload);
output::emit(&envelope, Exit::Ok)
}
Err(cortex_store::StoreError::Validation(msg)) => {
let exit = Exit::PreconditionUnmet;
if output::json_enabled() {
let payload = json!({ "detail": format!("precondition unmet: {msg}") });
let envelope = Envelope::new("cortex.memory.tag.remove", exit, payload);
return output::emit(&envelope, exit);
}
eprintln!("cortex memory tag remove: precondition unmet: {msg}");
exit
}
Err(err) => {
let exit = Exit::Internal;
if output::json_enabled() {
let payload = json!({ "detail": format!("internal error: {err}") });
let envelope = Envelope::new("cortex.memory.tag.remove", exit, payload);
return output::emit(&envelope, exit);
}
eprintln!("cortex memory tag remove: internal error: {err}");
exit
}
}
}
fn tag_operator_temporal_contribution() -> PolicyContribution {
PolicyContribution::new(
ACCEPT_OPERATOR_TEMPORAL_USE_RULE_ID,
PolicyOutcome::Warn,
format!(
"{ACCEPT_OPERATOR_TEMPORAL_AUTHORITY_WARN_NO_ATTESTATION_INVARIANT}: \
no operator attestation bound on this CLI surface; accepting at the honest floor",
),
)
.expect("tag operator temporal contribution shape is valid")
}
fn outcome(sub: OutcomeSub) -> Exit {
match sub {
OutcomeSub::Record(args) => outcome_record(args),
}
}
fn outcome_record(args: OutcomeRecordArgs) -> Exit {
if args.session.trim().is_empty() {
eprintln!("cortex memory outcome record: --session must not be empty");
return outcome_record_failure_envelope(Exit::Usage, "session must not be empty", None);
}
let pool = match open_default_store("memory outcome record") {
Ok(pool) => pool,
Err(exit) => return outcome_record_failure_envelope(exit, "failed to open store", None),
};
let repo = MemoryRepo::new(&pool);
let memory = match repo.get_by_id(&args.memory_id) {
Ok(Some(m)) if m.status == "active" => m,
Ok(Some(_)) => {
let detail = format!(
"{OUTCOME_MEMORY_NOT_FOUND_INVARIANT}: memory {} exists but is not active",
args.memory_id
);
eprintln!("cortex memory outcome record: {detail}");
return outcome_record_failure_envelope(
Exit::PreconditionUnmet,
&detail,
Some(args.memory_id.to_string()),
);
}
Ok(None) => {
let detail = format!(
"{OUTCOME_MEMORY_NOT_FOUND_INVARIANT}: memory {} not found",
args.memory_id
);
eprintln!("cortex memory outcome record: {detail}");
return outcome_record_failure_envelope(
Exit::PreconditionUnmet,
&detail,
Some(args.memory_id.to_string()),
);
}
Err(err) => {
let detail = format!("failed to look up memory {}: {err}", args.memory_id);
eprintln!("cortex memory outcome record: {detail}");
return outcome_record_failure_envelope(
Exit::Internal,
&detail,
Some(args.memory_id.to_string()),
);
}
};
let now = chrono::Utc::now();
let session_use = MemorySessionUse {
memory_id: memory.id,
session_id: args.session.clone(),
first_used_at: now,
last_used_at: now,
use_count: 1,
};
if let Err(err) = repo.record_session_use(&session_use) {
let detail = format!("failed to record session use: {err}");
eprintln!("cortex memory outcome record: {detail}");
return outcome_record_failure_envelope(
Exit::Internal,
&detail,
Some(args.memory_id.to_string()),
);
}
let relation = match args.result {
OutcomeResult::Helpful => cortex_core::OutcomeMemoryRelation::Used,
OutcomeResult::NotHelpful => cortex_core::OutcomeMemoryRelation::Rejected,
};
let result_name = match args.result {
OutcomeResult::Helpful => "helpful",
OutcomeResult::NotHelpful => "not-helpful",
};
let outcome_ref = format!(
"outcome:{}:{}:{}",
args.session, args.memory_id, result_name
);
let relation_record = OutcomeMemoryRelationRecord {
outcome_ref: outcome_ref.clone(),
memory_id: args.memory_id,
relation,
recorded_at: now,
source_event_id: None,
validation_scope: None,
validating_principal_id: None,
evidence_ref: None,
};
if let Err(err) = repo.record_outcome_relation(&relation_record, None) {
let detail = format!("failed to record outcome relation: {err}");
eprintln!("cortex memory outcome record: {detail}");
return outcome_record_failure_envelope(
Exit::Internal,
&detail,
Some(args.memory_id.to_string()),
);
}
if !output::json_enabled() {
println!(
"outcome recorded: memory {} marked {} for session {}",
args.memory_id, result_name, args.session
);
if let Some(note) = &args.note {
println!(" note: {note}");
}
return Exit::Ok;
}
let payload = json!({
"memory_id": args.memory_id.to_string(),
"session_id": args.session,
"result": result_name,
"outcome_ref": outcome_ref,
"note": args.note,
"persisted": true,
});
let envelope = Envelope::new("cortex.memory.outcome.record", Exit::Ok, payload);
output::emit(&envelope, Exit::Ok)
}
fn outcome_record_failure_envelope(exit: Exit, detail: &str, memory_id: Option<String>) -> Exit {
if !output::json_enabled() {
return exit;
}
let payload = json!({
"memory_id": memory_id,
"detail": detail,
"persisted": false,
});
let envelope = Envelope::new("cortex.memory.outcome.record", exit, payload);
output::emit(&envelope, exit)
}
fn health(args: HealthArgs) -> Exit {
let pool = match open_default_store("memory health") {
Ok(pool) => pool,
Err(exit) => return health_failure_envelope(exit, "failed to open store"),
};
let conn = pool.prepare("SELECT id, confidence, created_at, validation_epoch, cross_session_use_count FROM memories WHERE status = 'active';");
let mut stmt = match conn {
Ok(s) => s,
Err(err) => {
let detail = format!("failed to prepare health query: {err}");
eprintln!("cortex memory health: {detail}");
return health_failure_envelope(Exit::Internal, &detail);
}
};
let cutoff_days = i64::from(args.older_than);
let cutoff_dt = chrono::Utc::now() - chrono::Duration::days(cutoff_days);
let cutoff_rfc = cutoff_dt.to_rfc3339();
#[derive(Debug)]
struct HealthRow {
id: String,
confidence: f64,
created_at: String,
validation_epoch: Option<i64>,
cross_session_use_count: Option<i64>,
}
let rows_result = stmt.query_map(rparams![], |row| {
Ok(HealthRow {
id: row.get(0)?,
confidence: row.get(1)?,
created_at: row.get(2)?,
validation_epoch: row.get(3)?,
cross_session_use_count: row.get(4)?,
})
});
let rows = match rows_result {
Ok(r) => r,
Err(err) => {
let detail = format!("failed to query active memories: {err}");
eprintln!("cortex memory health: {detail}");
return health_failure_envelope(Exit::Internal, &detail);
}
};
let mut total = 0u64;
let mut low_confidence_ids: Vec<String> = Vec::new();
let mut never_validated_ids: Vec<String> = Vec::new();
let mut older_than_ids: Vec<String> = Vec::new();
let mut no_outcome_ids: Vec<String> = Vec::new();
for row_result in rows {
let row = match row_result {
Ok(r) => r,
Err(err) => {
let detail = format!("failed to read health row: {err}");
eprintln!("cortex memory health: {detail}");
return health_failure_envelope(Exit::Internal, &detail);
}
};
total += 1;
if row.confidence < args.confidence_below {
low_confidence_ids.push(row.id.clone());
}
let validation_epoch = row.validation_epoch.unwrap_or(0);
if validation_epoch == 0 {
never_validated_ids.push(row.id.clone());
}
if row.created_at.as_str() < cutoff_rfc.as_str() {
older_than_ids.push(row.id.clone());
}
let use_count = row.cross_session_use_count.unwrap_or(0);
if use_count == 0 {
no_outcome_ids.push(row.id.clone());
}
}
if args.unvalidated_only {
let nv_ids: std::collections::BTreeSet<&str> =
never_validated_ids.iter().map(|s| s.as_str()).collect();
low_confidence_ids.retain(|id| nv_ids.contains(id.as_str()));
older_than_ids.retain(|id| nv_ids.contains(id.as_str()));
no_outcome_ids.retain(|id| nv_ids.contains(id.as_str()));
}
let json_mode = output::json_enabled();
if !json_mode {
println!("memory health:");
println!(" total active: {total}");
println!(
" low confidence (<{}): {}",
args.confidence_below,
low_confidence_ids.len()
);
println!(" never validated: {}", never_validated_ids.len());
println!(
" older than {} days: {}",
args.older_than,
older_than_ids.len()
);
println!(" no outcome recorded: {}", no_outcome_ids.len());
return Exit::Ok;
}
let payload = json!({
"total_active": total,
"low_confidence_count": low_confidence_ids.len(),
"never_validated_count": never_validated_ids.len(),
"older_than_days_count": older_than_ids.len(),
"no_outcome_count": no_outcome_ids.len(),
"older_than_days": args.older_than,
"confidence_below": args.confidence_below,
"low_confidence_ids": low_confidence_ids,
"never_validated_ids": never_validated_ids,
"older_than_ids": older_than_ids,
"no_outcome_ids": no_outcome_ids,
});
let envelope = Envelope::new("cortex.memory.health", Exit::Ok, payload);
output::emit(&envelope, Exit::Ok)
}
fn health_failure_envelope(exit: Exit, detail: &str) -> Exit {
if !output::json_enabled() {
return exit;
}
let payload = json!({
"detail": detail,
"total_active": 0,
});
let envelope = Envelope::new("cortex.memory.health", exit, payload);
output::emit(&envelope, exit)
}
fn accept(args: AcceptArgs) -> Exit {
let pool = match open_default_store("memory accept") {
Ok(pool) => pool,
Err(exit) => return memory_accept_envelope(exit, None, "failed to open store", None),
};
let repo = MemoryRepo::new(&pool);
let audit = MemoryAcceptanceAudit {
id: AuditRecordId::new(),
actor_json: json!({"kind": "cli", "command": "memory accept"}),
reason: "operator accepted candidate memory via CLI".to_string(),
source_refs_json: json!([args.candidate_id.to_string()]),
created_at: chrono::Utc::now(),
};
let (policy, proof_closure) = match compose_memory_accept_policy(&pool, &args.candidate_id) {
Ok(value) => value,
Err(exit) => return exit,
};
match cortex_memory::accept(
&repo,
&args.candidate_id,
chrono::Utc::now(),
&audit,
&policy,
&proof_closure,
) {
Ok(id) => {
let id_str = id.to_string();
if !output::json_enabled() {
println!("cortex memory accept: accepted {id_str}");
}
memory_accept_envelope(Exit::Ok, Some(id_str), "accepted", Some(&policy))
}
Err(err) => {
eprintln!("cortex memory accept: {err}");
memory_accept_envelope(
Exit::PreconditionUnmet,
None,
&err.to_string(),
Some(&policy),
)
}
}
}
fn compose_memory_accept_policy(
pool: &Pool,
candidate_id: &MemoryId,
) -> Result<(cortex_core::PolicyDecision, cortex_core::ProofClosureReport), Exit> {
let proof_report = verify_memory_proof_closure(pool, candidate_id).map_err(|err| {
eprintln!("cortex memory accept: proof closure preflight failed: {err}");
Exit::PreconditionUnmet
})?;
let proof_contribution = accept_proof_closure_contribution(&proof_report);
let candidate_ref = candidate_id.to_string();
let contradictions = ContradictionRepo::new(pool).list_open().map_err(|err| {
eprintln!("cortex memory accept: contradiction preflight failed: {err}");
Exit::PreconditionUnmet
})?;
let open_contradictions = contradictions
.iter()
.filter(|row| row.left_ref == candidate_ref || row.right_ref == candidate_ref)
.count();
let contradiction_contribution = accept_open_contradiction_contribution(open_contradictions);
let semantic_trust_contribution = PolicyContribution::new(
ACCEPT_SEMANTIC_TRUST_RULE_ID,
PolicyOutcome::Allow,
"operator CLI invocation: candidate passed lineage validation upstream",
)
.expect("static semantic trust contribution shape is valid");
let operator_temporal_use_contribution = PolicyContribution::new(
ACCEPT_OPERATOR_TEMPORAL_USE_RULE_ID,
PolicyOutcome::Warn,
format!(
"{ACCEPT_OPERATOR_TEMPORAL_AUTHORITY_WARN_NO_ATTESTATION_INVARIANT}: no operator attestation bound on this CLI surface; accepting at the honest floor",
),
)
.expect("static operator temporal use contribution shape is valid");
let decision = compose_policy_outcomes(
vec![
proof_contribution,
contradiction_contribution,
semantic_trust_contribution,
operator_temporal_use_contribution,
],
None,
);
Ok((decision, proof_report))
}
fn memory_accept_envelope(
exit: Exit,
memory_id: Option<String>,
detail: &str,
policy: Option<&cortex_core::PolicyDecision>,
) -> Exit {
if !output::json_enabled() {
return exit;
}
let payload = if let Some(policy) = policy {
json!({
"memory_id": memory_id,
"detail": detail,
"policy_outcome": {
"final_outcome": policy.final_outcome,
"contributing": policy.contributing,
"discarded": policy.discarded,
},
})
} else {
json!({
"memory_id": memory_id,
"detail": detail,
})
};
let mut envelope = Envelope::new("cortex.memory.accept", exit, payload);
if let Some(policy) = policy {
envelope = envelope.with_policy_outcome(json!({
"final_outcome": policy.final_outcome,
"contributing": policy.contributing,
"discarded": policy.discarded,
}));
}
output::emit(&envelope, exit)
}
fn admit_axiom(args: AdmitAxiomArgs) -> Exit {
if args.cortex_context_trust.is_some()
|| args.axiom_execution_trust.is_some()
|| args.authority_feedback_loop.is_some()
{
return admit_axiom_trust_exchange(args);
}
let input = match read_axiom_admission_input(&args) {
Ok(input) => input,
Err(err) => {
eprintln!("cortex memory admit-axiom: {err}");
return admit_axiom_failure_envelope(Exit::PreconditionUnmet, &err.to_string());
}
};
let request = match AxiomMemoryAdmissionRequest::from_json_envelope(&input) {
Ok(request) => request,
Err(AdmissionEnvelopeError::InvalidEnvelope { message }) => {
eprintln!("cortex memory admit-axiom: invalid AXIOM admission envelope: {message}");
return admit_axiom_failure_envelope(
Exit::QuarantinedInput,
&format!("invalid AXIOM admission envelope: {message}"),
);
}
};
let decision = request.admission_decision();
let policy = request.policy_decision();
let semantic = request.semantic_trust_report(AdmissionSemanticTrustInput::new(
SemanticUse::CandidateMemory,
));
let (decision_name, exit) = match decision {
AdmissionDecision::AdmitCandidate => ("admit_candidate", Exit::Ok),
AdmissionDecision::Quarantine { .. } => ("quarantine", Exit::QuarantinedInput),
AdmissionDecision::Reject { .. } => ("reject", Exit::QuarantinedInput),
};
let report = json!({
"command": "memory admit-axiom",
"decision": decision_name,
"policy_outcome": policy.final_outcome,
"policy_contributing": policy.contributing,
"candidate_state": request.candidate_state,
"semantic_intended_use": semantic.intended_use,
"provenance_class": semantic.provenance_class,
"semantic_trust": semantic.semantic_trust.semantic_trust,
"semantic_policy_outcome": semantic.semantic_trust.policy_outcome,
"semantic_claim_ceiling": semantic.semantic_trust.claim_ceiling,
"semantic_reasons": semantic.semantic_trust.reasons,
"explicit_non_promotion": request.explicit_non_promotion,
"persisted": false,
});
if output::json_enabled() {
let policy_summary = serde_json::json!({
"final_outcome": policy.final_outcome,
"contributing": policy.contributing,
});
let envelope = Envelope::new("cortex.memory.admit_axiom", exit, report)
.with_policy_outcome(policy_summary);
return output::emit(&envelope, exit);
}
println!(
"{}",
serde_json::to_string_pretty(&report).expect("admission report is serializable")
);
exit
}
fn admit_axiom_failure_envelope(exit: Exit, detail: &str) -> Exit {
if !output::json_enabled() {
return exit;
}
let payload = json!({
"command": "memory admit-axiom",
"decision": "error",
"detail": detail,
"persisted": false,
});
let envelope = Envelope::new("cortex.memory.admit_axiom", exit, payload);
output::emit(&envelope, exit)
}
fn read_axiom_admission_input(args: &AdmitAxiomArgs) -> Result<String, io::Error> {
match (args.file.as_ref(), args.json.as_ref()) {
(Some(path), None) => fs::read_to_string(path),
(None, Some(json)) => Ok(json.clone()),
(None, None) => Err(io::Error::new(
io::ErrorKind::InvalidInput,
"admit-axiom requires --file, --json, or a pai-axiom trust exchange flag",
)),
(Some(_), Some(_)) => Err(io::Error::new(
io::ErrorKind::InvalidInput,
"--file and --json are mutually exclusive",
)),
}
}
fn parse_admission_lifecycle(value: &str) -> Result<AdmissionLifecycle, String> {
match value {
"candidate_only" => Ok(AdmissionLifecycle::CandidateOnly),
"validated" => Ok(AdmissionLifecycle::Validated),
"promoted" => Ok(AdmissionLifecycle::Promoted),
"stale" => Ok(AdmissionLifecycle::Stale),
"quarantined" => Ok(AdmissionLifecycle::Quarantined),
"unknown" => Ok(AdmissionLifecycle::Unknown),
other => Err(format!(
"unsupported lifecycle `{other}`; expected one of \
candidate_only, validated, promoted, stale, quarantined, unknown"
)),
}
}
fn admit_axiom_trust_exchange(args: AdmitAxiomArgs) -> Exit {
let lifecycle = match parse_admission_lifecycle(&args.lifecycle) {
Ok(lifecycle) => lifecycle,
Err(err) => {
eprintln!("cortex memory admit-axiom: {err}");
return admit_axiom_failure_envelope(Exit::Usage, &err);
}
};
let exec_path = match args.axiom_execution_trust.as_ref() {
Some(path) => path,
None => {
let detail = "--axiom-execution-trust is required for trust exchange admission";
eprintln!("cortex memory admit-axiom: {detail}");
return admit_axiom_failure_envelope(Exit::Usage, detail);
}
};
if args.authority_feedback_loop.is_none() {
let detail = "axiom.admission.authority_feedback_loop.missing --authority-feedback-loop is required for trust exchange admission (P6 closure: the feedback-loop record carries the same_loop_promotion / authority_claims / target_domain_validation gates that pai-axiom relies on)";
eprintln!("cortex memory admit-axiom: {detail}");
return admit_axiom_failure_envelope(Exit::Usage, detail);
}
if args.cortex_context_trust.is_none() {
let detail = "axiom.admission.cortex_context_trust.missing --cortex-context-trust is required for trust exchange admission (P6 follow-up: the context-trust envelope is the load-bearing carrier for four downstream gates — quarantine propagation, redaction-state-blocks-critical-premise, proof-state-failed, and proof-state-missing — that silently no-op when the envelope is absent)";
eprintln!("cortex memory admit-axiom: {detail}");
return admit_axiom_failure_envelope(Exit::Usage, detail);
}
let exec_bytes = match fs::read_to_string(exec_path) {
Ok(bytes) => bytes,
Err(err) => {
let detail = format!(
"failed to read axiom_execution_trust at {}: {err}",
exec_path.display()
);
eprintln!("cortex memory admit-axiom: {detail}");
return admit_axiom_failure_envelope(Exit::PreconditionUnmet, &detail);
}
};
let exec = match parse_axiom_execution_trust(&exec_bytes) {
Ok(envelope) => envelope,
Err(err) => {
let detail = format!("invalid axiom_execution_trust envelope: {}", err.reason);
eprintln!(
"cortex memory admit-axiom: invariant={} {detail}",
err.invariant
);
return admit_axiom_failure_envelope(Exit::QuarantinedInput, &detail);
}
};
{
let accepted = accepted_axiom_source_commits();
if !is_axiom_source_commit_fresh(&exec.tool_provenance.source_commit, &accepted) {
let detail = format!(
"tool_provenance.source_commit `{}` is not on the Cortex-side acceptance list \
(defaults documented at `cortex_core::DEFAULT_ACCEPTED_AXIOM_SOURCE_COMMITS`; \
operator rotation via `{CORTEX_AXIOM_ACCEPTED_SOURCE_COMMITS_ENV}`)",
exec.tool_provenance.source_commit,
);
eprintln!(
"cortex memory admit-axiom: invariant={AXIOM_EXECUTION_TRUST_SOURCE_COMMIT_STALE_INVARIANT} {detail}",
);
return admit_axiom_failure_envelope(Exit::QuarantinedInput, &detail);
}
}
let mut request = AxiomTrustExchangeAdmissionRequest::new(exec, lifecycle)
.with_derived_from_quarantined(args.derived_from_quarantined);
if let Some(path) = args.cortex_context_trust.as_ref() {
let bytes = match fs::read_to_string(path) {
Ok(bytes) => bytes,
Err(err) => {
let detail = format!(
"failed to read cortex_context_trust at {}: {err}",
path.display()
);
eprintln!("cortex memory admit-axiom: {detail}");
return admit_axiom_failure_envelope(Exit::PreconditionUnmet, &detail);
}
};
match parse_cortex_context_trust(&bytes) {
Ok(envelope) => {
request = request.with_cortex_context_trust(envelope);
}
Err(err) => {
let detail = format!("invalid cortex_context_trust envelope: {}", err.reason);
eprintln!(
"cortex memory admit-axiom: invariant={} {detail}",
err.invariant
);
return admit_axiom_failure_envelope(Exit::QuarantinedInput, &detail);
}
}
}
if let Some(path) = args.authority_feedback_loop.as_ref() {
let bytes = match fs::read_to_string(path) {
Ok(bytes) => bytes,
Err(err) => {
let detail = format!(
"failed to read authority_feedback_loop at {}: {err}",
path.display()
);
eprintln!("cortex memory admit-axiom: {detail}");
return admit_axiom_failure_envelope(Exit::PreconditionUnmet, &detail);
}
};
match parse_authority_feedback_loop(&bytes) {
Ok(envelope) => {
request = request.with_authority_feedback_loop(envelope);
}
Err(err) => {
let detail = format!("invalid authority_feedback_loop envelope: {}", err.reason);
eprintln!(
"cortex memory admit-axiom: invariant={} {detail}",
err.invariant
);
return admit_axiom_failure_envelope(Exit::QuarantinedInput, &detail);
}
}
}
let decision = request.decide();
emit_trust_exchange_diagnostic(&decision);
emit_trust_exchange_envelope(&decision)
}
fn emit_trust_exchange_diagnostic(decision: &TrustExchangeAdmission) {
let invariants = decision.invariants();
eprintln!(
"cortex memory admit-axiom: decision={} policy_outcome={:?}",
decision.decision_name(),
decision.policy_decision().final_outcome
);
if !invariants.is_empty() {
eprintln!(
"cortex memory admit-axiom: failing_edges={}",
invariants.join(",")
);
}
if let Some(outputs) = decision.named_quarantine_outputs() {
for invariant in outputs.invariants() {
eprintln!("cortex memory admit-axiom: named_quarantine_output={invariant}");
}
}
if let Some(forbidden) = decision.forbidden_uses() {
let names: Vec<String> = forbidden
.iter()
.map(|use_| {
serde_json::to_value(use_)
.ok()
.and_then(|value| value.as_str().map(ToOwned::to_owned))
.unwrap_or_else(|| "unknown".to_string())
})
.collect();
eprintln!(
"cortex memory admit-axiom: forbidden_uses={}",
names.join(",")
);
}
}
fn emit_trust_exchange_envelope(decision: &TrustExchangeAdmission) -> Exit {
let exit = match decision {
TrustExchangeAdmission::AdmitCandidate { .. } => Exit::Ok,
TrustExchangeAdmission::Quarantine { .. } | TrustExchangeAdmission::Reject { .. } => {
Exit::QuarantinedInput
}
};
let policy = decision.policy_decision();
let forbidden_uses = decision
.forbidden_uses()
.map(|uses| serde_json::to_value(uses).unwrap_or(serde_json::Value::Null))
.unwrap_or(serde_json::Value::Null);
let failing_edges: Vec<&str> = decision.invariants();
let named_outputs = decision
.named_quarantine_outputs()
.map(|outputs| serde_json::to_value(outputs).unwrap_or(serde_json::Value::Null))
.unwrap_or(serde_json::Value::Null);
let report = json!({
"command": "memory admit-axiom",
"mode": "trust_exchange",
"decision": decision.decision_name(),
"policy_outcome": policy.final_outcome,
"policy_contributing": policy.contributing,
"policy_discarded": policy.discarded,
"failing_edges": failing_edges,
"named_quarantine_outputs": named_outputs,
"forbidden_uses": forbidden_uses,
"persisted": false,
});
if !output::json_enabled() {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("trust exchange report is serializable")
);
return exit;
}
let policy_summary = json!({
"final_outcome": policy.final_outcome,
"contributing": policy.contributing,
});
let envelope = Envelope::new("cortex.memory.admit_axiom", exit, report)
.with_policy_outcome(policy_summary);
output::emit(&envelope, exit)
}
fn search(args: SearchArgs) -> Exit {
if args.query.trim().is_empty() {
eprintln!("cortex memory search: QUERY must not be empty");
return search_failure_envelope(Exit::Usage, "QUERY must not be empty");
}
let query_terms: Vec<String> = tokenize_query(&args.query);
let (query_embed, semantic_backend_id): (Option<Vec<f32>>, Option<String>) = if args.semantic {
let ollama_backend_id = resolve_ollama_backend_id();
let ollama_result = ollama_backend_id.as_ref().and_then(|(bid, embedder)| {
match embedder.embed(args.query.trim(), &[]) {
Ok(vec) => Some((vec, bid.clone())),
Err(_) => None, }
});
if let Some((vec, bid)) = ollama_result {
(Some(vec), Some(bid))
} else {
let embedder = LocalStubEmbedder::new();
match embedder.embed(args.query.trim(), &[]) {
Ok(vec) => (Some(vec), Some(STUB_BACKEND_ID.to_string())),
Err(err) => {
let detail = format!("semantic embed failed for query: {err}");
eprintln!("cortex memory search: {detail}");
return search_failure_envelope(Exit::Internal, &detail);
}
}
}
} else {
(None, None)
};
let pool = match open_default_store("memory search") {
Ok(pool) => pool,
Err(exit) => return search_failure_envelope(exit, "failed to open store"),
};
let repo = MemoryRepo::new(&pool);
let memories = match repo.list_by_status("active") {
Ok(memories) => memories,
Err(err) => {
eprintln!("cortex memory search: failed to read active memories: {err}");
return search_failure_envelope(
Exit::Internal,
&format!("failed to read active memories: {err}"),
);
}
};
let mut proof_states = BTreeMap::new();
for memory in &memories {
let proof = match verify_memory_proof_closure(&pool, &memory.id) {
Ok(proof) => proof,
Err(err) => {
eprintln!(
"cortex memory search: failed to verify memory {} proof closure: {err}",
memory.id
);
return search_failure_envelope(
Exit::PreconditionUnmet,
&format!("failed to verify memory {} proof closure: {err}", memory.id),
);
}
};
if let Err(err) = proof.require_current_use_allowed() {
eprintln!(
"cortex memory search: memory {} excluded from default retrieval use: {err}",
memory.id
);
return search_failure_envelope(
Exit::PreconditionUnmet,
&format!(
"memory {} excluded from default retrieval use: {err}",
memory.id
),
);
}
proof_states.insert(memory.id.to_string(), ClaimProofState::from(proof.state()));
}
if let Err(err) = gate_open_contradictions_for_default_search(&pool, &memories) {
eprintln!("cortex memory search: {err}");
return search_failure_envelope(Exit::PreconditionUnmet, &err);
}
let documents = memories.iter().map(memory_document).collect::<Vec<_>>();
let index = LexicalIndex::new(documents);
let hits = match index.search_with_tag_filter(&args.query, &args.tag) {
Ok(hits) => hits,
Err(err) => {
eprintln!("cortex memory search: {err}");
return search_failure_envelope(Exit::Usage, &err.to_string());
}
};
let mut fuzzy_scores: BTreeMap<String, f32> = BTreeMap::new();
let mut fuzzy_only_hits: Vec<&MemoryRecord> = Vec::new();
if args.fuzzy {
let fts_limit = std::cmp::max(hits.len() * 2, 16);
let fts_hits = match query_fts5(&repo, &args.query, fts_limit) {
Ok(hits) => hits,
Err(err) => {
eprintln!("cortex memory search: fuzzy retrieval failed: {err}");
return search_failure_envelope(
Exit::Internal,
&format!("fuzzy retrieval failed: {err}"),
);
}
};
let lexical_ids: BTreeSet<String> =
hits.iter().map(|hit| hit.document.id.to_string()).collect();
let tag_filtered_ids: BTreeSet<String> = memories
.iter()
.filter(|memory| {
if args.tag.is_empty() {
true
} else {
let domains = memory
.domains_json
.as_array()
.map(|values| {
values
.iter()
.filter_map(|value| value.as_str().map(str::to_string))
.collect::<BTreeSet<_>>()
})
.unwrap_or_default();
args.tag.iter().all(|tag| domains.contains(tag))
}
})
.map(|memory| memory.id.to_string())
.collect();
for fts_hit in fts_hits {
let id_str = fts_hit.memory_id.to_string();
if !tag_filtered_ids.contains(&id_str) {
continue;
}
fuzzy_scores.insert(id_str.clone(), fts_hit.normalized_score);
if !lexical_ids.contains(&id_str) {
if let Some(memory) = memories
.iter()
.find(|memory| memory.id == fts_hit.memory_id)
{
fuzzy_only_hits.push(memory);
}
}
}
}
let json_mode = output::json_enabled();
if hits.is_empty() && fuzzy_only_hits.is_empty() {
if !json_mode {
println!("cortex memory search: no matches");
return Exit::Ok;
}
let payload = json!({
"query": args.query,
"match_count": 0,
"matches": [],
"claim_ceiling": "local_unsigned",
"forbidden_uses": [
"compliance_evidence",
"cross_system_trust_decision",
"external_reporting",
],
});
let envelope = Envelope::new("cortex.memory.search", Exit::Ok, payload);
return output::emit(&envelope, Exit::Ok);
}
let mut validation_epochs: BTreeMap<String, u32> = BTreeMap::new();
for memory in &memories {
let epoch = match repo.validation_epoch_for(&memory.id) {
Ok(epoch) => epoch.unwrap_or(0),
Err(err) => {
eprintln!(
"cortex memory search: failed to read validation_epoch for memory {}: {err}",
memory.id
);
return search_failure_envelope(
Exit::Internal,
&format!(
"failed to read validation_epoch for memory {}: {err}",
memory.id
),
);
}
};
validation_epochs.insert(memory.id.to_string(), epoch);
}
let mut matches = Vec::new();
for hit in hits {
let Some(memory) = memories.iter().find(|memory| memory.id == hit.document.id) else {
continue;
};
let validation_epoch = validation_epochs
.get(&memory.id.to_string())
.copied()
.unwrap_or(0);
let fuzz = if args.fuzzy {
fuzzy_scores
.get(&memory.id.to_string())
.copied()
.unwrap_or(0.0)
} else {
0.0
};
let sem_score =
if let (Some(ref qembed), Some(ref bid)) = (&query_embed, &semantic_backend_id) {
compute_or_warm_embedding(&pool, memory, qembed, bid)
} else {
None
};
let lexical_input = if args.semantic {
compose_lexical_semantic(hit.explanation.lexical_match, fuzz, sem_score)
} else if args.fuzzy {
compose_fuzzy_boost(hit.explanation.lexical_match, fuzz)
} else {
hit.explanation.lexical_match
};
let explanation = score(score_inputs(memory, lexical_input, validation_epoch));
if !json_mode {
let claim_display = if args.snippet {
let term_refs: Vec<&str> = query_terms.iter().map(|s| s.as_str()).collect();
let snippet = extract_snippet(&memory.claim, &term_refs);
snippet_ansi_highlighted(&snippet)
} else {
memory.claim.clone()
};
if args.semantic {
println!(
"{}\t{:.4}\tsem={:.4}\t{}",
memory.id,
explanation.final_score,
sem_score.unwrap_or(0.0),
claim_display
);
} else {
println!(
"{}\t{:.4}\t{}",
memory.id, explanation.final_score, claim_display
);
}
}
let proof_state = proof_states
.get(&memory.id.to_string())
.copied()
.unwrap_or(ClaimProofState::Unknown);
let uncertainty = retrieval_uncertainty(memory, proof_state);
if args.explain && !json_mode {
print_component("lexical_match", explanation.lexical_match);
print_component("semantic_similarity", explanation.semantic_similarity);
print_component("brightness", explanation.brightness);
print_component("domain_overlap", explanation.domain_overlap);
print_component("validation", explanation.validation);
print_component("authority_weight", explanation.authority_weight);
print_component("contradiction_risk", explanation.contradiction_risk);
print_component("staleness_penalty", explanation.staleness_penalty);
print_uncertainty(uncertainty);
}
if json_mode {
let mut entry = json!({
"memory_id": memory.id.to_string(),
"claim": memory.claim,
"final_score": explanation.final_score,
});
if args.semantic {
entry["sem_score"] = json!(sem_score.unwrap_or(0.0));
}
if args.snippet {
let term_refs: Vec<&str> = query_terms.iter().map(|s| s.as_str()).collect();
let snippet = extract_snippet(&memory.claim, &term_refs);
let ranges: Vec<[usize; 2]> = snippet
.highlight_ranges
.iter()
.map(|r| [r.start, r.end])
.collect();
entry["snippet"] = json!({
"text": snippet_plain_text(&snippet),
"truncated": snippet.truncated,
"highlight_ranges": ranges,
});
}
if args.explain {
entry["explanation"] = json!({
"lexical_match": component_value(explanation.lexical_match),
"semantic_similarity": component_value(explanation.semantic_similarity),
"brightness": component_value(explanation.brightness),
"domain_overlap": component_value(explanation.domain_overlap),
"validation": component_value(explanation.validation),
"authority_weight": component_value(explanation.authority_weight),
"contradiction_risk": component_value(explanation.contradiction_risk),
"staleness_penalty": component_value(explanation.staleness_penalty),
});
entry["uncertainty"] = json!({
"proof_state": wire_string(uncertainty.proof_state),
"provenance_class": wire_string(uncertainty.provenance_class),
"semantic_trust": wire_string(uncertainty.semantic_trust),
"claim_ceiling": wire_string(uncertainty.claim_ceiling),
});
}
matches.push(entry);
}
}
for memory in fuzzy_only_hits {
let validation_epoch = validation_epochs
.get(&memory.id.to_string())
.copied()
.unwrap_or(0);
let fuzz = fuzzy_scores
.get(&memory.id.to_string())
.copied()
.unwrap_or(0.0);
let sem_score =
if let (Some(ref qembed), Some(ref bid)) = (&query_embed, &semantic_backend_id) {
compute_or_warm_embedding(&pool, memory, qembed, bid)
} else {
None
};
let lexical_input = if args.semantic {
compose_lexical_semantic(0.0, fuzz, sem_score)
} else {
compose_fuzzy_boost(0.0, fuzz)
};
let explanation = score(score_inputs(memory, lexical_input, validation_epoch));
if !json_mode {
let claim_display = if args.snippet {
let term_refs: Vec<&str> = query_terms.iter().map(|s| s.as_str()).collect();
let snippet = extract_snippet(&memory.claim, &term_refs);
snippet_ansi_highlighted(&snippet)
} else {
memory.claim.clone()
};
if args.semantic {
println!(
"{}\t{:.4}\tsem={:.4}\t{}",
memory.id,
explanation.final_score,
sem_score.unwrap_or(0.0),
claim_display
);
} else {
println!(
"{}\t{:.4}\t{}",
memory.id, explanation.final_score, claim_display
);
}
}
let proof_state = proof_states
.get(&memory.id.to_string())
.copied()
.unwrap_or(ClaimProofState::Unknown);
let uncertainty = retrieval_uncertainty(memory, proof_state);
if args.explain && !json_mode {
print_component("lexical_match", explanation.lexical_match);
print_component("semantic_similarity", explanation.semantic_similarity);
print_component("brightness", explanation.brightness);
print_component("domain_overlap", explanation.domain_overlap);
print_component("validation", explanation.validation);
print_component("authority_weight", explanation.authority_weight);
print_component("contradiction_risk", explanation.contradiction_risk);
print_component("staleness_penalty", explanation.staleness_penalty);
print_uncertainty(uncertainty);
}
if json_mode {
let mut entry = json!({
"memory_id": memory.id.to_string(),
"claim": memory.claim,
"final_score": explanation.final_score,
});
if args.semantic {
entry["sem_score"] = json!(sem_score.unwrap_or(0.0));
}
if args.snippet {
let term_refs: Vec<&str> = query_terms.iter().map(|s| s.as_str()).collect();
let snippet = extract_snippet(&memory.claim, &term_refs);
let ranges: Vec<[usize; 2]> = snippet
.highlight_ranges
.iter()
.map(|r| [r.start, r.end])
.collect();
entry["snippet"] = json!({
"text": snippet_plain_text(&snippet),
"truncated": snippet.truncated,
"highlight_ranges": ranges,
});
}
if args.explain {
entry["explanation"] = json!({
"lexical_match": component_value(explanation.lexical_match),
"semantic_similarity": component_value(explanation.semantic_similarity),
"brightness": component_value(explanation.brightness),
"domain_overlap": component_value(explanation.domain_overlap),
"validation": component_value(explanation.validation),
"authority_weight": component_value(explanation.authority_weight),
"contradiction_risk": component_value(explanation.contradiction_risk),
"staleness_penalty": component_value(explanation.staleness_penalty),
});
entry["uncertainty"] = json!({
"proof_state": wire_string(uncertainty.proof_state),
"provenance_class": wire_string(uncertainty.provenance_class),
"semantic_trust": wire_string(uncertainty.semantic_trust),
"claim_ceiling": wire_string(uncertainty.claim_ceiling),
});
}
matches.push(entry);
}
}
if args.semantic && json_mode {
matches.sort_by(|a, b| {
let sa = a["final_score"].as_f64().unwrap_or(0.0);
let sb = b["final_score"].as_f64().unwrap_or(0.0);
sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
});
}
if !json_mode {
return Exit::Ok;
}
let payload = json!({
"query": args.query,
"match_count": matches.len(),
"matches": matches,
"claim_ceiling": "local_unsigned",
"forbidden_uses": [
"compliance_evidence",
"cross_system_trust_decision",
"external_reporting",
],
});
let envelope = Envelope::new("cortex.memory.search", Exit::Ok, payload);
output::emit(&envelope, Exit::Ok)
}
fn embed(args: EmbedArgs) -> Exit {
let cfg = EmbeddingBackend::resolve();
let (cfg_endpoint, cfg_model, cfg_timeout_ms) = match cfg {
EmbeddingBackend::Ollama {
endpoint,
model,
timeout_ms,
..
} => (endpoint, model, timeout_ms),
EmbeddingBackend::Stub => (
cortex_retrieval::DEFAULT_OLLAMA_ENDPOINT.to_string(),
cortex_retrieval::DEFAULT_OLLAMA_EMBED_MODEL.to_string(),
30_000u64,
),
};
let endpoint = args.endpoint.unwrap_or(cfg_endpoint);
let model = args.model.unwrap_or(cfg_model);
let dim = args.dim;
let timeout_ms = cfg_timeout_ms;
let ollama_backend_id = OllamaEmbedder::backend_id_for(&model, dim);
let embedder = match OllamaEmbedder::new(&endpoint, &model, dim) {
Ok(e) => e.with_timeout_ms(timeout_ms),
Err(err) => {
eprintln!("cortex memory embed: failed to configure Ollama embedder: {err}");
return Exit::Usage;
}
};
let pool = match open_default_store("memory embed") {
Ok(pool) => pool,
Err(exit) => return exit,
};
let repo = MemoryRepo::new(&pool);
let embed_repo = EmbeddingRepo::new(&pool);
let memories = match repo.list_by_status("active") {
Ok(m) => m,
Err(err) => {
eprintln!("cortex memory embed: failed to list active memories: {err}");
return Exit::Internal;
}
};
let total = memories.len();
let mut would_enrich = 0usize;
let mut enriched = 0usize;
let mut errors = 0usize;
for memory in &memories {
let has_ollama = match embed_repo.read(&memory.id, &ollama_backend_id) {
Ok(Some(_)) => true,
Ok(None) => false,
Err(err) => {
eprintln!(
"cortex memory embed: warning: failed to check Ollama embedding for {}: {err}",
memory.id
);
false
}
};
if has_ollama {
continue;
}
would_enrich += 1;
if args.preview {
continue;
}
let tags: Vec<String> = memory
.domains_json
.as_array()
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(ToString::to_string))
.collect()
})
.unwrap_or_default();
let vector = match embedder.embed(&memory.claim, &tags) {
Ok(v) => v,
Err(err) => {
eprintln!(
"cortex memory embed: failed to embed memory {}: {err}",
memory.id
);
errors += 1;
continue;
}
};
let record =
match EmbedRecord::new(memory.id, &ollama_backend_id, vector, chrono::Utc::now()) {
Ok(r) => r,
Err(err) => {
eprintln!(
"cortex memory embed: failed to build embed record for {}: {err}",
memory.id
);
errors += 1;
continue;
}
};
if let Err(err) = embed_repo.write(&record) {
eprintln!(
"cortex memory embed: failed to write embedding for {}: {err}",
memory.id
);
errors += 1;
continue;
}
enriched += 1;
}
if args.preview {
let json_mode = output::json_enabled();
if !json_mode {
println!(
"cortex memory embed --preview: {would_enrich}/{total} memories would be enriched with {model}:{dim}"
);
return Exit::Ok;
}
let payload = serde_json::json!({
"preview": true,
"total_active": total,
"would_enrich": would_enrich,
"already_enriched": total - would_enrich,
"model": model,
"dim": dim,
"backend_id": ollama_backend_id,
});
let envelope = Envelope::new("cortex.memory.embed", Exit::Ok, payload);
return output::emit(&envelope, Exit::Ok);
}
let json_mode = output::json_enabled();
if !json_mode {
println!("Enriched {enriched}/{total} memories with {model}:{dim}");
if errors > 0 {
eprintln!("cortex memory embed: {errors} error(s) during enrichment");
}
return if errors > 0 { Exit::Internal } else { Exit::Ok };
}
let exit = if errors > 0 { Exit::Internal } else { Exit::Ok };
let payload = serde_json::json!({
"preview": false,
"total_active": total,
"enriched": enriched,
"already_enriched": total - would_enrich,
"errors": errors,
"model": model,
"dim": dim,
"backend_id": ollama_backend_id,
});
let envelope = Envelope::new("cortex.memory.embed", exit, payload);
output::emit(&envelope, exit)
}
fn list(args: ListArgs) -> Exit {
let pool = match open_default_store("memory list") {
Ok(pool) => pool,
Err(exit) => return list_failure_envelope(exit, "failed to open store", &args.tag),
};
let repo = MemoryRepo::new(&pool);
let memories = if args.tag.is_empty() {
match repo.list_by_status("active") {
Ok(memories) => memories,
Err(err) => {
eprintln!("cortex memory list: failed to read active memories: {err}");
return list_failure_envelope(
Exit::Internal,
&format!("failed to read active memories: {err}"),
&args.tag,
);
}
}
} else {
match repo.list_by_status_with_tags("active", &args.tag) {
Ok(memories) => memories,
Err(err) => {
eprintln!("cortex memory list: failed to read tag-filtered active memories: {err}");
return list_failure_envelope(
Exit::Internal,
&format!("failed to read tag-filtered active memories: {err}"),
&args.tag,
);
}
}
};
let json_mode = output::json_enabled();
if !json_mode {
if memories.is_empty() {
println!("cortex memory list: no matches");
return Exit::Ok;
}
for memory in &memories {
let tags = string_array(&memory.domains_json).join(",");
println!("{}\t{}\t{}", memory.id, tags, memory.claim);
}
return Exit::Ok;
}
let entries: Vec<serde_json::Value> = memories
.iter()
.map(|memory| {
json!({
"memory_id": memory.id.to_string(),
"claim": memory.claim,
"authority": memory.authority,
"domains": string_array(&memory.domains_json),
"updated_at": memory.updated_at.to_rfc3339(),
})
})
.collect();
let payload = json!({
"tag_filter": args.tag,
"match_count": entries.len(),
"matches": entries,
});
let envelope = Envelope::new("cortex.memory.list", Exit::Ok, payload);
output::emit(&envelope, Exit::Ok)
}
fn list_failure_envelope(exit: Exit, detail: &str, tag_filter: &[String]) -> Exit {
if !output::json_enabled() {
return exit;
}
let payload = json!({
"detail": detail,
"tag_filter": tag_filter,
"matches": [],
"match_count": 0,
});
let envelope = Envelope::new("cortex.memory.list", exit, payload);
output::emit(&envelope, exit)
}
fn search_failure_envelope(exit: Exit, detail: &str) -> Exit {
if !output::json_enabled() {
return exit;
}
let payload = json!({
"detail": detail,
"matches": [],
"match_count": 0,
});
let envelope = Envelope::new("cortex.memory.search", exit, payload);
output::emit(&envelope, exit)
}
fn component_value(component: ScoreComponent) -> serde_json::Value {
json!({
"raw": component.raw,
"weight": component.weight,
"contribution": component.contribution,
})
}
fn gate_open_contradictions_for_default_search(
pool: &Pool,
memories: &[MemoryRecord],
) -> Result<(), String> {
let active_by_id = memories
.iter()
.map(|memory| (memory.id.to_string(), memory))
.collect::<BTreeMap<_, _>>();
let contradictions = ContradictionRepo::new(pool)
.list_open()
.map_err(|err| format!("failed to read open contradictions: {err}"))?;
let mut affected_ids = BTreeSet::new();
let mut conflict_edges = BTreeMap::<String, BTreeSet<String>>::new();
for contradiction in contradictions {
let left_active = active_by_id.contains_key(&contradiction.left_ref);
let right_active = active_by_id.contains_key(&contradiction.right_ref);
if !left_active && !right_active {
continue;
}
if !(left_active && right_active) {
return Err(format!(
"open contradiction {} references unavailable memory and cannot be resolved for default retrieval",
contradiction.id
));
}
affected_ids.insert(contradiction.left_ref.clone());
affected_ids.insert(contradiction.right_ref.clone());
conflict_edges
.entry(contradiction.left_ref.clone())
.or_default()
.insert(contradiction.right_ref.clone());
conflict_edges
.entry(contradiction.right_ref)
.or_default()
.insert(contradiction.left_ref);
}
if affected_ids.is_empty() {
return Ok(());
}
let inputs = affected_ids
.iter()
.filter_map(|id| active_by_id.get(id.as_str()).copied())
.map(|memory| {
ConflictingMemoryInput::new(
memory.id.to_string(),
Some(memory.id.to_string()),
memory.claim.clone(),
AuthorityProofHint {
authority: authority_level(&memory.authority),
proof: ProofClosureHint::FullChainVerified,
},
)
.with_conflicts(
conflict_edges
.get(&memory.id.to_string())
.map(|ids| ids.iter().cloned().collect())
.unwrap_or_default(),
)
})
.collect::<Vec<_>>();
let output = resolve_conflicts(&inputs, &[]);
output
.require_default_use_allowed()
.map_err(|err| format!("open contradiction blocks default retrieval use: {err}"))
}
fn authority_level(authority: &str) -> AuthorityLevel {
match authority {
"user" | "operator" => AuthorityLevel::High,
"tool" | "system" => AuthorityLevel::Medium,
_ => AuthorityLevel::Low,
}
}
fn memory_document(memory: &MemoryRecord) -> LexicalDocument {
LexicalDocument::accepted_memory(
memory.id,
memory.claim.clone(),
string_array(&memory.domains_json),
)
}
fn score_inputs(memory: &MemoryRecord, lexical_match: f32, validation_epoch: u32) -> ScoreInputs {
let validation = if validation_epoch > 0 { 1.0 } else { 0.0 };
ScoreInputs {
lexical_match,
brightness: json_number(&memory.salience_json, "score")
.or_else(|| json_number(&memory.salience_json, "brightness"))
.unwrap_or(0.5),
domain_overlap: 0.0,
validation,
authority_weight: if memory.authority == "user" { 1.0 } else { 0.5 },
contradiction_risk: json_number(&memory.salience_json, "contradiction_risk").unwrap_or(0.0),
staleness_penalty: json_number(&memory.salience_json, "staleness_penalty").unwrap_or(0.0),
}
}
fn compute_or_warm_embedding(
pool: &cortex_store::Pool,
memory: &cortex_store::repo::MemoryRecord,
query_embed: &[f32],
backend_id: &str,
) -> Option<f32> {
let embed_repo = EmbeddingRepo::new(pool);
let record_opt = match embed_repo.read(&memory.id, backend_id) {
Ok(r) => r,
Err(err) => {
eprintln!(
"cortex memory search: failed to read embedding ({backend_id}) for memory {}: {err}",
memory.id
);
return None;
}
};
let mem_vec: Vec<f32> = if let Some(record) = record_opt {
record.vector
} else if backend_id == STUB_BACKEND_ID {
let embedder = LocalStubEmbedder::new();
let tags = memory
.domains_json
.as_array()
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(ToString::to_string))
.collect::<Vec<_>>()
})
.unwrap_or_default();
let vec = match embedder.embed(&memory.claim, &tags) {
Ok(v) => v,
Err(err) => {
eprintln!(
"cortex memory search: failed to embed memory {} with stub: {err}",
memory.id
);
return None;
}
};
match EmbedRecord::new(memory.id, STUB_BACKEND_ID, vec.clone(), chrono::Utc::now()) {
Ok(record) => {
if let Err(err) = embed_repo.write(&record) {
eprintln!(
"cortex memory search: warning: failed to warm BLAKE3 embedding for memory {}: {err}",
memory.id
);
}
}
Err(err) => {
eprintln!(
"cortex memory search: warning: failed to build BLAKE3 embed record for memory {}: {err}",
memory.id
);
}
}
vec
} else {
return None;
};
Some(cosine_similarity(query_embed, &mem_vec))
}
fn resolve_ollama_backend_id() -> Option<(String, OllamaEmbedder)> {
match EmbeddingBackend::resolve() {
EmbeddingBackend::Ollama {
endpoint,
model,
dim,
timeout_ms,
} => {
let embedder = OllamaEmbedder::new(&endpoint, &model, dim).ok()?;
let bid = embedder.backend_id().to_string();
Some((bid, embedder.with_timeout_ms(timeout_ms)))
}
EmbeddingBackend::Stub => None,
}
}
fn print_component(name: &str, component: ScoreComponent) {
println!(
" {name}: raw={:.4} weight={:.4} contribution={:.4}",
component.raw, component.weight, component.contribution
);
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct RetrievalUncertainty {
proof_state: ClaimProofState,
provenance_class: ProvenanceClass,
semantic_trust: SemanticTrustClass,
claim_ceiling: ClaimCeiling,
}
fn retrieval_uncertainty(
memory: &MemoryRecord,
proof_state: ClaimProofState,
) -> RetrievalUncertainty {
let (provenance_class, semantic_trust) = match memory.authority.as_str() {
"user" | "operator" => (
ProvenanceClass::OperatorAttested,
SemanticTrustClass::SingleFamily,
),
"tool" | "system" => (
ProvenanceClass::ToolObserved,
SemanticTrustClass::SingleFamily,
),
_ => (
ProvenanceClass::RuntimeDerived,
SemanticTrustClass::CandidateOnly,
),
};
let claim_ceiling = ClaimCeiling::LocalUnsigned
.min(proof_state.claim_ceiling())
.min(provenance_class.claim_ceiling())
.min(semantic_trust.claim_ceiling());
RetrievalUncertainty {
proof_state,
provenance_class,
semantic_trust,
claim_ceiling,
}
}
fn print_uncertainty(uncertainty: RetrievalUncertainty) {
println!(" proof_state: {}", wire_string(uncertainty.proof_state));
println!(
" provenance_class: {}",
wire_string(uncertainty.provenance_class)
);
println!(
" semantic_trust: {}",
wire_string(uncertainty.semantic_trust)
);
println!(
" claim_ceiling: {}",
wire_string(uncertainty.claim_ceiling)
);
}
fn wire_string<T: serde::Serialize>(value: T) -> String {
serde_json::to_value(value)
.ok()
.and_then(|value| value.as_str().map(ToOwned::to_owned))
.unwrap_or_else(|| "unknown".to_string())
}
fn string_array(value: &serde_json::Value) -> Vec<String> {
value
.as_array()
.into_iter()
.flatten()
.filter_map(|value| value.as_str().map(ToString::to_string))
.collect()
}
fn json_number(value: &serde_json::Value, key: &str) -> Option<f32> {
value.get(key)?.as_f64().map(|value| value as f32)
}