use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use crate::{exit::Exit, paths::DataLayout};
const CONFIG_ENV_KEY: &str = "CORTEX_CONFIG";
const DATA_DIR_ENV_KEY: &str = "CORTEX_DATA_DIR";
const DB_PATH_ENV_KEY: &str = "CORTEX_DB_PATH";
const EVENT_LOG_PATH_ENV_KEY: &str = "CORTEX_EVENT_LOG_PATH";
const LLM_BACKEND_ENV_KEY: &str = "CORTEX_LLM_BACKEND";
const LLM_MODEL_ENV_KEY: &str = "CORTEX_LLM_MODEL";
const LLM_ENDPOINT_ENV_KEY: &str = "CORTEX_LLM_ENDPOINT";
const LLM_API_KEY_ENV_KEY: &str = "CORTEX_LLM_API_KEY";
pub(crate) const DEFAULT_OLLAMA_ENDPOINT: &str = "http://localhost:11434";
const ENV_KEYS: &[&str] = &[
"APPDATA",
"HOME",
"RUST_LOG",
"XDG_CONFIG_HOME",
"XDG_DATA_HOME",
];
const SECRET_MARKERS: &[&str] = &[
"ACCESS_TOKEN",
"API_KEY",
"AUTH",
"CREDENTIAL",
"KEY",
"PASS",
"PASSWORD",
"PRIVATE",
"SECRET",
"TOKEN",
];
const REDACTED: &str = "<redacted>";
#[derive(Debug, Serialize)]
pub(crate) struct EffectiveConfig {
data_dir: String,
db_path: String,
event_log_path: String,
env: BTreeMap<String, String>,
}
pub(crate) fn print_effective_config() -> Exit {
let config = match effective_config() {
Ok(config) => config,
Err(exit) => return exit,
};
match serde_json::to_string_pretty(&config) {
Ok(json) => {
println!("{json}");
Exit::Ok
}
Err(err) => {
eprintln!("cortex --print-config: failed to serialize config: {err}");
Exit::Internal
}
}
}
fn effective_config() -> Result<EffectiveConfig, Exit> {
let file_config = load_file_config()?;
let layout = resolve_configured_layout(file_config)?;
Ok(EffectiveConfig {
data_dir: display_path(&layout.data_dir),
db_path: display_path(&layout.db_path),
event_log_path: display_path(&layout.event_log_path),
env: relevant_env(),
})
}
#[derive(Debug, Default, Deserialize)]
#[serde(deny_unknown_fields)]
struct FileConfig {
data_dir: Option<PathBuf>,
db_path: Option<PathBuf>,
event_log_path: Option<PathBuf>,
#[serde(default)]
llm: Option<LlmFileConfig>,
#[serde(default)]
embeddings: Option<EmbeddingsFileConfig>,
#[serde(default)]
mcp: Option<McpFileConfig>,
}
#[derive(Debug, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub(crate) struct EmbeddingsFileConfig {
#[serde(default = "default_embedding_backend")]
pub(crate) backend: String,
pub(crate) ollama: Option<OllamaEmbedFileConfig>,
}
#[derive(Debug, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub(crate) struct OllamaEmbedFileConfig {
#[serde(default = "default_ollama_embed_endpoint")]
pub(crate) endpoint: String,
#[serde(default = "default_ollama_embed_model")]
pub(crate) model: String,
#[serde(default = "default_ollama_embed_dim")]
pub(crate) dim: usize,
#[serde(default = "default_ollama_embed_timeout_ms")]
pub(crate) timeout_ms: u64,
}
fn default_embedding_backend() -> String {
"stub".to_string()
}
fn default_ollama_embed_endpoint() -> String {
DEFAULT_OLLAMA_ENDPOINT.to_string()
}
fn default_ollama_embed_model() -> String {
"nomic-embed-text".to_string()
}
fn default_ollama_embed_dim() -> usize {
768
}
fn default_ollama_embed_timeout_ms() -> u64 {
30_000
}
#[derive(Debug, Clone)]
pub(crate) enum EmbeddingBackend {
Stub,
Ollama {
endpoint: String,
model: String,
dim: usize,
timeout_ms: u64,
},
}
impl EmbeddingBackend {
pub(crate) fn resolve() -> Self {
let file_embeddings = load_file_config().ok().and_then(|fc| fc.embeddings);
let backend_str = std::env::var("CORTEX_EMBEDDING_BACKEND")
.ok()
.filter(|s| !s.is_empty())
.unwrap_or_else(|| {
file_embeddings
.as_ref()
.map(|e| e.backend.clone())
.unwrap_or_else(|| "stub".to_string())
});
match backend_str.as_str() {
"ollama" => {
let endpoint = std::env::var("CORTEX_EMBEDDING_ENDPOINT")
.ok()
.filter(|s| !s.is_empty())
.unwrap_or_else(|| {
file_embeddings
.as_ref()
.and_then(|e| e.ollama.as_ref())
.map(|o| o.endpoint.clone())
.unwrap_or_else(|| DEFAULT_OLLAMA_ENDPOINT.to_string())
});
let model = std::env::var("CORTEX_EMBEDDING_MODEL")
.ok()
.filter(|s| !s.is_empty())
.unwrap_or_else(|| {
file_embeddings
.as_ref()
.and_then(|e| e.ollama.as_ref())
.map(|o| o.model.clone())
.unwrap_or_else(|| "nomic-embed-text".to_string())
});
let dim = file_embeddings
.as_ref()
.and_then(|e| e.ollama.as_ref())
.map(|o| o.dim)
.unwrap_or(768);
let timeout_ms = file_embeddings
.as_ref()
.and_then(|e| e.ollama.as_ref())
.map(|o| o.timeout_ms)
.unwrap_or(30_000);
EmbeddingBackend::Ollama {
endpoint,
model,
dim,
timeout_ms,
}
}
_ => EmbeddingBackend::Stub,
}
}
}
#[derive(Debug, Default, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub(crate) struct McpFileConfig {
#[serde(default)]
pub(crate) auto_commit: bool,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub(crate) enum AutoCommitSource {
#[default]
NotSet,
EnvVar,
ConfigFile,
}
#[derive(Debug, Clone, Default)]
pub(crate) struct McpConfig {
pub(crate) auto_commit: bool,
pub(crate) auto_commit_source: AutoCommitSource,
}
impl McpConfig {
pub(crate) fn resolve() -> Self {
if std::env::var("CORTEX_MCP_AUTO_COMMIT").as_deref() == Ok("1") {
return McpConfig {
auto_commit: true,
auto_commit_source: AutoCommitSource::EnvVar,
};
}
let file_mcp = match load_file_config() {
Ok(fc) => fc.mcp,
Err(_) => {
eprintln!(
"cortex serve: warning: failed to read or parse config file; \
auto_commit defaults to false"
);
None
}
};
let auto_commit = file_mcp.map(|m| m.auto_commit).unwrap_or(false);
let auto_commit_source = if auto_commit {
AutoCommitSource::ConfigFile
} else {
AutoCommitSource::NotSet
};
McpConfig {
auto_commit,
auto_commit_source,
}
}
}
#[derive(Debug, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub(crate) struct LlmFileConfig {
#[serde(default = "default_llm_backend")]
pub(crate) backend: String,
pub(crate) ollama: Option<OllamaFileConfig>,
pub(crate) claude: Option<ClaudeFileConfig>,
#[serde(rename = "openai-compat")]
pub(crate) openai_compat: Option<OpenAiCompatFileConfig>,
}
#[derive(Debug, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub(crate) struct ClaudeFileConfig {
pub(crate) model: String,
#[serde(default = "default_claude_max_tokens")]
pub(crate) max_tokens: u32,
#[serde(default = "default_claude_timeout_ms")]
pub(crate) timeout_ms: u64,
#[serde(default = "default_claude_max_sensitivity")]
pub(crate) max_sensitivity: String,
}
fn default_claude_max_tokens() -> u32 {
4096
}
fn default_claude_timeout_ms() -> u64 {
60_000
}
fn default_claude_max_sensitivity() -> String {
"medium".to_string()
}
#[derive(Debug, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub(crate) struct OllamaFileConfig {
pub(crate) endpoint: String,
pub(crate) model: String,
#[serde(default = "default_ollama_timeout_ms")]
pub(crate) timeout_ms: u64,
}
#[derive(Debug, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub(crate) struct OpenAiCompatFileConfig {
pub(crate) base_url: String,
pub(crate) model: String,
#[serde(default)]
pub(crate) api_key: String,
#[serde(default = "default_openai_compat_timeout_ms")]
pub(crate) timeout_ms: u64,
#[serde(default = "default_openai_compat_max_sensitivity")]
pub(crate) max_sensitivity: String,
}
fn default_openai_compat_timeout_ms() -> u64 {
60_000
}
fn default_openai_compat_max_sensitivity() -> String {
"medium".to_string()
}
fn default_llm_backend() -> String {
"offline".to_string()
}
fn default_ollama_timeout_ms() -> u64 {
30_000
}
#[allow(dead_code)]
#[derive(Debug, Clone)]
pub(crate) enum LlmBackend {
Offline,
Claude {
model: String,
max_tokens: u32,
timeout_ms: u64,
max_sensitivity: String,
},
Ollama {
endpoint: String,
model: String,
timeout_ms: u64,
},
OpenAiCompat {
base_url: String,
model: String,
api_key: Option<String>,
timeout_ms: u64,
max_sensitivity: String,
},
}
impl LlmBackend {
pub(crate) fn resolve() -> Self {
let file_llm = load_file_config().ok().and_then(|fc| fc.llm);
let env_backend = std::env::var(LLM_BACKEND_ENV_KEY)
.ok()
.filter(|s| !s.is_empty());
let env_model = std::env::var(LLM_MODEL_ENV_KEY)
.ok()
.filter(|s| !s.is_empty());
let env_endpoint = std::env::var(LLM_ENDPOINT_ENV_KEY)
.ok()
.filter(|s| !s.is_empty());
let env_api_key = std::env::var(LLM_API_KEY_ENV_KEY)
.ok()
.filter(|s| !s.is_empty());
let backend_str = env_backend
.as_deref()
.or_else(|| file_llm.as_ref().map(|c| c.backend.as_str()))
.unwrap_or("offline");
match backend_str {
"ollama" => {
let model = env_model
.or_else(|| {
file_llm
.as_ref()
.and_then(|c| c.ollama.as_ref())
.map(|o| o.model.clone())
})
.unwrap_or_default();
let endpoint = env_endpoint
.or_else(|| {
file_llm
.as_ref()
.and_then(|c| c.ollama.as_ref())
.map(|o| o.endpoint.clone())
})
.unwrap_or_else(|| DEFAULT_OLLAMA_ENDPOINT.to_string());
let timeout_ms = file_llm
.as_ref()
.and_then(|c| c.ollama.as_ref())
.map(|o| o.timeout_ms)
.unwrap_or(30_000);
LlmBackend::Ollama {
endpoint,
model,
timeout_ms,
}
}
"claude" => {
let model = env_model
.or_else(|| {
file_llm
.as_ref()
.and_then(|c| c.claude.as_ref())
.map(|c| c.model.clone())
})
.unwrap_or_default();
let claude_cfg = file_llm.as_ref().and_then(|c| c.claude.as_ref());
LlmBackend::Claude {
model,
max_tokens: claude_cfg.map(|c| c.max_tokens).unwrap_or(4096),
timeout_ms: claude_cfg.map(|c| c.timeout_ms).unwrap_or(60_000),
max_sensitivity: claude_cfg
.map(|c| c.max_sensitivity.clone())
.unwrap_or_else(|| "medium".to_string()),
}
}
"openai-compat" => {
let base_url = env_endpoint
.or_else(|| {
file_llm
.as_ref()
.and_then(|c| c.openai_compat.as_ref())
.map(|o| o.base_url.clone())
})
.unwrap_or_else(|| "http://localhost:1234".to_string());
let model = env_model
.or_else(|| {
file_llm
.as_ref()
.and_then(|c| c.openai_compat.as_ref())
.map(|o| o.model.clone())
})
.unwrap_or_default();
let api_key = env_api_key.or_else(|| {
file_llm
.as_ref()
.and_then(|c| c.openai_compat.as_ref())
.and_then(|o| {
if o.api_key.is_empty() {
None
} else {
Some(o.api_key.clone())
}
})
});
let timeout_ms = file_llm
.as_ref()
.and_then(|c| c.openai_compat.as_ref())
.map(|o| o.timeout_ms)
.unwrap_or(60_000);
let max_sensitivity = file_llm
.as_ref()
.and_then(|c| c.openai_compat.as_ref())
.map(|o| o.max_sensitivity.clone())
.unwrap_or_else(|| "medium".to_string());
LlmBackend::OpenAiCompat {
base_url,
model,
api_key,
timeout_ms,
max_sensitivity,
}
}
_ => LlmBackend::Offline,
}
}
}
fn load_file_config() -> Result<FileConfig, Exit> {
let explicit_path = std::env::var_os(CONFIG_ENV_KEY).filter(|value| !value.is_empty());
let (path, required) = match explicit_path {
Some(path) => (PathBuf::from(path), true),
None => match default_config_path() {
Some(path) => (path, false),
None => return Ok(FileConfig::default()),
},
};
let text = match std::fs::read_to_string(&path) {
Ok(text) => text,
Err(err) if !required && err.kind() == std::io::ErrorKind::NotFound => {
return Ok(FileConfig::default());
}
Err(err) => {
eprintln!(
"cortex --print-config: failed to read config file {}: {err}",
path.display()
);
return Err(Exit::PreconditionUnmet);
}
};
toml::from_str(&text).map_err(|err| {
eprintln!(
"cortex --print-config: failed to parse config file {}: {err}",
path.display()
);
Exit::Usage
})
}
fn resolve_configured_layout(file_config: FileConfig) -> Result<DataLayout, Exit> {
let data_dir = env_path(DATA_DIR_ENV_KEY).or(file_config.data_dir);
let db_path = env_path(DB_PATH_ENV_KEY).or(file_config.db_path);
let event_log_path = env_path(EVENT_LOG_PATH_ENV_KEY).or(file_config.event_log_path);
match data_dir {
Some(data_dir) => Ok(DataLayout {
db_path: db_path.unwrap_or_else(|| data_dir.join("cortex.db")),
event_log_path: event_log_path.unwrap_or_else(|| data_dir.join("events.jsonl")),
data_dir,
}),
None => DataLayout::resolve(db_path, event_log_path),
}
}
fn env_path(key: &str) -> Option<PathBuf> {
std::env::var_os(key)
.filter(|value| !value.is_empty())
.map(PathBuf::from)
}
fn default_config_path() -> Option<PathBuf> {
if let Some(config_home) = std::env::var_os("XDG_CONFIG_HOME").filter(|value| !value.is_empty())
{
return Some(
PathBuf::from(config_home)
.join("cortex")
.join("config.toml"),
);
}
dirs::config_dir().map(|dir| dir.join("cortex").join("config.toml"))
}
fn relevant_env() -> BTreeMap<String, String> {
let mut env = BTreeMap::new();
for (key, value) in std::env::vars() {
if ENV_KEYS.contains(&key.as_str()) || key.starts_with("CORTEX_") {
env.insert(key.clone(), redact_env_value(&key, value));
}
}
env
}
fn redact_env_value(key: &str, value: String) -> String {
if is_secret_like_key(key) {
return REDACTED.to_string();
}
if key == LLM_MODEL_ENV_KEY && value_contains_sha256_digest(&value) {
return REDACTED.to_string();
}
value
}
fn value_contains_sha256_digest(value: &str) -> bool {
if let Some((_before, after)) = value.split_once("@sha256:") {
let hex_part = after
.split_once(|c: char| !c.is_ascii_hexdigit())
.map(|(h, _)| h)
.unwrap_or(after);
return hex_part.len() == 64;
}
false
}
fn is_secret_like_key(key: &str) -> bool {
let upper = key.to_ascii_uppercase();
SECRET_MARKERS.iter().any(|marker| upper.contains(marker))
}
fn display_path(path: &Path) -> String {
path.display().to_string()
}
#[cfg(test)]
mod tests {
use super::{is_secret_like_key, redact_env_value, REDACTED};
#[test]
fn secret_like_keys_are_redacted() {
assert_eq!(
redact_env_value("CORTEX_API_TOKEN", "secret".to_string()),
REDACTED
);
assert_eq!(
redact_env_value("CORTEX_PROFILE", "local".to_string()),
"local"
);
}
#[test]
fn key_names_are_classified_case_insensitively() {
assert!(is_secret_like_key("cortex_private_key_path"));
assert!(is_secret_like_key("CORTEX_PASSWORD"));
assert!(!is_secret_like_key("CORTEX_FIXTURES_DIR"));
}
}