use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct StConfig {
#[serde(default)]
pub api_keys: ApiKeys,
#[serde(default)]
pub models: ModelConfig,
#[serde(default)]
pub daemon: DaemonConfig,
#[serde(default)]
pub safety: SafetyConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ApiKeys {
pub anthropic: Option<String>,
pub openai: Option<String>,
pub google: Option<String>,
pub openrouter: Option<String>,
pub grok: Option<String>,
#[serde(default)]
pub custom: HashMap<String, String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelConfig {
pub default_model: String,
#[serde(default)]
pub aliases: HashMap<String, String>,
#[serde(default)]
pub blocked: Vec<String>,
}
impl Default for ModelConfig {
fn default() -> Self {
let mut aliases = HashMap::new();
aliases.insert("claude".into(), "claude-sonnet-4-6".into());
aliases.insert("opus".into(), "claude-opus-4-6".into());
aliases.insert("haiku".into(), "claude-haiku-4-5".into());
aliases.insert("gpt4".into(), "gpt-4o".into());
aliases.insert("gemini".into(), "gemini-2.0-flash".into());
Self {
default_model: "claude-sonnet-4-6".into(),
aliases,
blocked: vec!["greatcoderMDK".into()], }
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DaemonConfig {
pub port: u16,
pub auto_start: bool,
pub allow_external: bool,
}
impl Default for DaemonConfig {
fn default() -> Self {
Self {
port: 28428,
auto_start: false,
allow_external: false,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SafetyConfig {
pub custodian_enabled: bool,
pub transparency_logging: bool,
#[serde(default)]
pub model_scores: HashMap<String, u8>,
}
impl Default for SafetyConfig {
fn default() -> Self {
let mut scores = HashMap::new();
scores.insert("claude-opus-4-6".into(), 10);
scores.insert("claude-sonnet-4-6".into(), 10);
scores.insert("claude-haiku-4-5".into(), 10);
scores.insert("gpt-4o".into(), 9);
scores.insert("gpt-4-turbo".into(), 9);
scores.insert("gemini-2.0-flash".into(), 9);
scores.insert("greatcoderMDK".into(), 2);
Self {
custodian_enabled: true,
transparency_logging: true,
model_scores: scores,
}
}
}
impl StConfig {
pub fn config_path() -> Result<PathBuf> {
let st_dir = dirs::home_dir()
.context("Could not find home directory")?
.join(".st");
fs::create_dir_all(&st_dir)?;
Ok(st_dir.join("config.toml"))
}
pub fn load() -> Result<Self> {
let path = Self::config_path()?;
if path.exists() {
let content = fs::read_to_string(&path)
.with_context(|| format!("Failed to read {}", path.display()))?;
let config: StConfig = toml::from_str(&content)
.with_context(|| format!("Failed to parse {}", path.display()))?;
Ok(config)
} else {
let config = Self::default();
config.save()?;
Ok(config)
}
}
pub fn save(&self) -> Result<()> {
let path = Self::config_path()?;
let content = toml::to_string_pretty(self)?;
fs::write(&path, content)?;
Ok(())
}
pub fn get_api_key(&self, provider: &str) -> Option<String> {
let from_config = match provider.to_lowercase().as_str() {
"anthropic" | "claude" => self.api_keys.anthropic.clone(),
"openai" | "gpt" => self.api_keys.openai.clone(),
"google" | "gemini" => self.api_keys.google.clone(),
"openrouter" => self.api_keys.openrouter.clone(),
"grok" | "xai" => self.api_keys.grok.clone(),
other => self.api_keys.custom.get(other).cloned(),
};
from_config.or_else(|| {
let env_var = match provider.to_lowercase().as_str() {
"anthropic" | "claude" => "ANTHROPIC_API_KEY",
"openai" | "gpt" => "OPENAI_API_KEY",
"google" | "gemini" => "GOOGLE_API_KEY",
"openrouter" => "OPENROUTER_API_KEY",
"grok" | "xai" => "XAI_API_KEY",
_ => return None,
};
std::env::var(env_var).ok()
})
}
pub fn is_model_blocked(&self, model: &str) -> bool {
self.models.blocked.iter().any(|b| model.contains(b))
}
pub fn get_model_score(&self, model: &str) -> u8 {
self.safety.model_scores.get(model).copied().unwrap_or(5) }
}