use directories::ProjectDirs;
use figment::Figment;
use figment::providers::{Env, Format, Serialized, Toml};
use secrecy::SecretString;
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
use tracing::warn;
use crate::cli::Cli;
use crate::error::{Error, Result};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CommitFormat {
#[serde(default = "default_true")]
pub include_body: bool,
#[serde(default = "default_true")]
pub include_scope: bool,
#[serde(default = "default_true")]
pub lowercase_subject: bool,
}
impl Default for CommitFormat {
fn default() -> Self {
Self {
include_body: true,
include_scope: true,
lowercase_subject: true,
}
}
}
fn default_true() -> bool {
true
}
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize, PartialEq, Eq)]
#[non_exhaustive]
#[serde(rename_all = "lowercase")]
pub enum Provider {
#[default]
Ollama,
OpenAI,
Anthropic,
}
impl std::fmt::Display for Provider {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ollama => write!(f, "ollama"),
Self::OpenAI => write!(f, "openai"),
Self::Anthropic => write!(f, "anthropic"),
}
}
}
#[derive(Clone, Serialize, Deserialize)]
pub struct Config {
#[serde(default)]
pub provider: Provider,
#[serde(default = "default_model")]
pub model: String,
#[serde(default = "default_ollama_host")]
pub ollama_host: String,
#[serde(default, skip_serializing)]
pub api_key: Option<SecretString>,
#[serde(default = "default_max_diff_lines")]
pub max_diff_lines: usize,
#[serde(default = "default_max_file_lines")]
pub max_file_lines: usize,
#[serde(default = "default_max_context_chars")]
pub max_context_chars: usize,
#[serde(default = "default_timeout_secs")]
pub timeout_secs: u64,
#[serde(default = "default_temperature")]
pub temperature: f32,
#[serde(default = "default_num_predict")]
pub num_predict: u32,
#[serde(default)]
pub think: bool,
#[serde(default)]
pub openai_base_url: Option<String>,
#[serde(default)]
pub anthropic_base_url: Option<String>,
#[serde(default = "default_rename_threshold")]
pub rename_threshold: u8,
#[serde(default)]
pub custom_secret_patterns: Vec<String>,
#[serde(default)]
pub disabled_secret_patterns: Vec<String>,
#[serde(default)]
pub locale: Option<String>,
#[serde(default)]
pub learn_from_history: bool,
#[serde(default = "default_history_sample_size")]
pub history_sample_size: usize,
#[serde(default)]
pub exclude_patterns: Vec<String>,
#[serde(default)]
pub system_prompt_path: Option<PathBuf>,
#[serde(default)]
pub template_path: Option<PathBuf>,
#[serde(default)]
pub format: CommitFormat,
}
fn default_max_context_chars() -> usize {
24_000
}
fn default_model() -> String {
"qwen3.5:4b".into()
}
fn default_ollama_host() -> String {
"http://localhost:11434".into()
}
fn default_max_diff_lines() -> usize {
500
}
fn default_max_file_lines() -> usize {
100
}
fn default_timeout_secs() -> u64 {
300
}
fn default_temperature() -> f32 {
0.3
}
fn default_num_predict() -> u32 {
256
}
fn default_rename_threshold() -> u8 {
70
}
fn default_history_sample_size() -> usize {
50
}
impl Default for Config {
fn default() -> Self {
Self {
provider: Provider::default(),
model: default_model(),
ollama_host: default_ollama_host(),
api_key: None,
max_diff_lines: default_max_diff_lines(),
max_file_lines: default_max_file_lines(),
max_context_chars: default_max_context_chars(),
timeout_secs: default_timeout_secs(),
temperature: default_temperature(),
num_predict: default_num_predict(),
think: false,
openai_base_url: None,
anthropic_base_url: None,
rename_threshold: default_rename_threshold(),
custom_secret_patterns: Vec::new(),
disabled_secret_patterns: Vec::new(),
locale: None,
learn_from_history: false,
history_sample_size: default_history_sample_size(),
exclude_patterns: Vec::new(),
system_prompt_path: None,
template_path: None,
format: CommitFormat::default(),
}
}
}
impl std::fmt::Debug for Config {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Config")
.field("provider", &self.provider)
.field("model", &self.model)
.field("ollama_host", &self.ollama_host)
.field("api_key", &self.api_key.as_ref().map(|_| "[REDACTED]"))
.field("max_diff_lines", &self.max_diff_lines)
.field("max_file_lines", &self.max_file_lines)
.field("max_context_chars", &self.max_context_chars)
.field("timeout_secs", &self.timeout_secs)
.field("temperature", &self.temperature)
.field("num_predict", &self.num_predict)
.field("think", &self.think)
.field("openai_base_url", &self.openai_base_url)
.field("anthropic_base_url", &self.anthropic_base_url)
.field("rename_threshold", &self.rename_threshold)
.field("custom_secret_patterns", &self.custom_secret_patterns)
.field("disabled_secret_patterns", &self.disabled_secret_patterns)
.field("locale", &self.locale)
.field("learn_from_history", &self.learn_from_history)
.field("history_sample_size", &self.history_sample_size)
.field("exclude_patterns", &self.exclude_patterns)
.field("system_prompt_path", &self.system_prompt_path)
.field("template_path", &self.template_path)
.field("format", &self.format)
.finish()
}
}
impl Config {
pub fn load(cli: &Cli) -> Result<Self> {
let mut figment = Figment::new().merge(Serialized::defaults(Config::default()));
let mut has_project_config = false;
if let Ok(cwd) = std::env::current_dir() {
let project_config = cwd.join(".commitbee.toml");
if project_config.exists() {
has_project_config = true;
figment = figment.merge(Toml::file(&project_config));
}
}
if let Some(path) = Self::config_path()
&& path.exists()
{
figment = figment.merge(Toml::file(&path));
}
figment = figment.merge(Env::prefixed("COMMITBEE_").split("__"));
let mut config: Config = figment
.extract()
.map_err(|e| Error::Config(e.to_string()))?;
if has_project_config
&& let Ok(cwd) = std::env::current_dir()
&& let Ok(content) = fs::read_to_string(cwd.join(".commitbee.toml"))
&& let Ok(table) = content.parse::<toml::Table>()
{
if table.contains_key("api_key") {
warn!("project .commitbee.toml sets api_key — ignoring for security");
config.api_key = None;
}
if table.contains_key("openai_base_url") {
warn!("project .commitbee.toml sets openai_base_url — blocked for security");
config.openai_base_url = None;
}
if table.contains_key("anthropic_base_url") {
warn!("project .commitbee.toml sets anthropic_base_url — blocked for security");
config.anthropic_base_url = None;
}
if table.contains_key("ollama_host") {
warn!("project .commitbee.toml sets ollama_host — blocked for security");
config.ollama_host = Config::default().ollama_host;
}
}
config.apply_cli(cli)?;
if config.api_key.is_none() {
config.api_key = match config.provider {
Provider::OpenAI => std::env::var("OPENAI_API_KEY").ok().map(SecretString::from),
Provider::Anthropic => std::env::var("ANTHROPIC_API_KEY")
.ok()
.map(SecretString::from),
Provider::Ollama => None,
};
}
#[cfg(feature = "secure-storage")]
if config.api_key.is_none() && config.provider != Provider::Ollama {
let provider_name = config.provider.to_string();
if let Ok(entry) = keyring::Entry::new("commitbee", &provider_name)
&& let Ok(key) = entry.get_password()
{
config.api_key = Some(SecretString::from(key));
}
}
config.validate(&cli.command)?;
Ok(config)
}
pub fn config_dir() -> Option<PathBuf> {
ProjectDirs::from("", "", "commitbee").map(|dirs| dirs.config_dir().to_path_buf())
}
pub fn config_path() -> Option<PathBuf> {
Self::config_dir().map(|d| d.join("config.toml"))
}
fn apply_cli(&mut self, cli: &Cli) -> Result<()> {
if let Some(ref p) = cli.provider {
self.provider = match p.to_lowercase().as_str() {
"ollama" => Provider::Ollama,
"openai" => Provider::OpenAI,
"anthropic" => Provider::Anthropic,
other => {
return Err(Error::Config(format!(
"Unknown provider '{}'. Valid options: ollama, openai, anthropic",
other
)));
}
};
}
if let Some(ref m) = cli.model {
self.model = m.clone();
}
if cli.no_scope {
self.format.include_scope = false;
}
if let Some(ref l) = cli.locale {
self.locale = Some(l.clone());
}
if !cli.exclude.is_empty() {
self.exclude_patterns.extend(cli.exclude.iter().cloned());
}
Ok(())
}
fn requires_api_key(command: &Option<crate::cli::Commands>) -> bool {
match command {
None => true, Some(cmd) => matches!(cmd, crate::cli::Commands::Doctor),
}
}
fn validate(&self, command: &Option<crate::cli::Commands>) -> Result<()> {
if Self::requires_api_key(command)
&& self.provider != Provider::Ollama
&& self.api_key.is_none()
{
return Err(Error::Config(format!(
"{} requires an API key. Set COMMITBEE_API_KEY, {}_API_KEY, or store securely with: commitbee config set-key {}",
self.provider,
format!("{:?}", self.provider).to_uppercase(),
format!("{:?}", self.provider).to_lowercase()
)));
}
if !(10..=10_000).contains(&self.max_diff_lines) {
return Err(Error::Config(format!(
"max_diff_lines must be 10–10000, got {}",
self.max_diff_lines
)));
}
if !(10..=1_000).contains(&self.max_file_lines) {
return Err(Error::Config(format!(
"max_file_lines must be 10–1000, got {}",
self.max_file_lines
)));
}
if !(1_000..=200_000).contains(&self.max_context_chars) {
return Err(Error::Config(format!(
"max_context_chars must be 1000–200000, got {}",
self.max_context_chars
)));
}
if !(1..=3600).contains(&self.timeout_secs) {
return Err(Error::Config(format!(
"timeout_secs must be 1–3600, got {}",
self.timeout_secs
)));
}
if !(0.0..=2.0).contains(&self.temperature) {
return Err(Error::Config(format!(
"temperature must be 0.0–1.0, got {}",
self.temperature
)));
}
if self.rename_threshold > 100 {
return Err(Error::Config(format!(
"rename_threshold must be 0–100, got {}",
self.rename_threshold
)));
}
if self.ollama_host.is_empty() {
return Err(Error::Config("ollama_host cannot be empty".into()));
}
if !self.ollama_host.starts_with("http://") && !self.ollama_host.starts_with("https://") {
return Err(Error::Config(format!(
"ollama_host must start with http:// or https://, got '{}'",
self.ollama_host
)));
}
if let Some(ref url) = self.openai_base_url
&& !url.starts_with("http://")
&& !url.starts_with("https://")
{
return Err(Error::Config(format!(
"openai_base_url must start with http:// or https://, got '{}'",
url
)));
}
if let Some(ref url) = self.anthropic_base_url
&& !url.starts_with("http://")
&& !url.starts_with("https://")
{
return Err(Error::Config(format!(
"anthropic_base_url must start with http:// or https://, got '{}'",
url
)));
}
Ok(())
}
pub fn create_default() -> Result<PathBuf> {
let Some(dir) = Self::config_dir() else {
return Err(Error::Config("Cannot determine config directory".into()));
};
fs::create_dir_all(&dir)?;
let path = dir.join("config.toml");
let content = Self::generate_default_config();
fs::write(&path, &content)?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(&path)?.permissions();
perms.set_mode(0o600);
fs::set_permissions(&path, perms)?;
}
Ok(path)
}
pub fn generate_default_config() -> String {
let default = Config::default();
let table: toml::Table = {
let s = toml::to_string(&default).expect("Config serializes to TOML");
toml::from_str(&s).expect("round-trips as TOML table")
};
#[derive(Clone, Copy)]
enum Show {
Active,
CommentedOut,
}
struct Field {
key: &'static str,
comment: &'static str,
show: Show,
example: Option<&'static str>,
}
let fields: &[Field] = &[
Field {
key: "provider",
comment: "LLM provider: ollama, openai, anthropic",
show: Show::Active,
example: None,
},
Field {
key: "model",
comment: "Model name (for Ollama, use `ollama list` to see available)",
show: Show::Active,
example: None,
},
Field {
key: "ollama_host",
comment: "Ollama server URL",
show: Show::Active,
example: None,
},
Field {
key: "max_diff_lines",
comment: "Maximum lines of diff to include in prompt",
show: Show::Active,
example: None,
},
Field {
key: "max_file_lines",
comment: "Maximum lines per file in diff",
show: Show::Active,
example: None,
},
Field {
key: "num_predict",
comment: "Maximum tokens to generate\n\
Increase to 8192+ if using thinking models with think = true",
show: Show::CommentedOut,
example: None,
},
Field {
key: "think",
comment: "Enable thinking/reasoning for Ollama models\n\
When enabled, models like qwen3 will reason before responding.\n\
Requires higher num_predict (8192+) to accommodate thinking tokens.",
show: Show::CommentedOut,
example: None,
},
Field {
key: "max_context_chars",
comment: "Maximum context characters for LLM prompt (~4 chars per token)\n\
Increase for larger models (e.g., 48000 for 16K context)",
show: Show::CommentedOut,
example: None,
},
Field {
key: "timeout_secs",
comment: "Request timeout in seconds",
show: Show::CommentedOut,
example: None,
},
Field {
key: "temperature",
comment: "LLM temperature (0.0-1.0, default 0.3)",
show: Show::CommentedOut,
example: None,
},
Field {
key: "rename_threshold",
comment: "Rename detection similarity threshold (0-100)\n\
Set to 0 to disable rename detection",
show: Show::CommentedOut,
example: None,
},
Field {
key: "locale",
comment: "Language for commit message generation (ISO 639-1 code)\n\
Type and scope remain in English per Conventional Commits spec.",
show: Show::CommentedOut,
example: Some("\"de\""),
},
Field {
key: "custom_secret_patterns",
comment: "Custom secret patterns (additional regex patterns for secret scanning)",
show: Show::CommentedOut,
example: Some("[\"CUSTOM_KEY_[a-zA-Z0-9]{32}\"]"),
},
Field {
key: "disabled_secret_patterns",
comment: "Disable built-in secret patterns by name",
show: Show::CommentedOut,
example: Some("[\"Generic Secret (unquoted)\"]"),
},
Field {
key: "learn_from_history",
comment: "Experimental: learn commit style from repository history\n\
Analyzes recent commits to learn scope naming, type patterns, and\n\
subject phrasing conventions for the repository.",
show: Show::CommentedOut,
example: None,
},
Field {
key: "history_sample_size",
comment: "Number of recent commits to sample for style learning",
show: Show::CommentedOut,
example: None,
},
Field {
key: "exclude_patterns",
comment: "Exclude files matching glob patterns from analysis and diff context\n\
Excluded files are listed in output but not sent to the LLM.",
show: Show::CommentedOut,
example: Some("[\"*.lock\", \"**/*.generated.*\"]"),
},
Field {
key: "openai_base_url",
comment: "Base URL for OpenAI-compatible APIs",
show: Show::CommentedOut,
example: Some("\"https://api.openai.com/v1\""),
},
Field {
key: "anthropic_base_url",
comment: "Base URL for Anthropic API",
show: Show::CommentedOut,
example: Some("\"https://api.anthropic.com/v1\""),
},
Field {
key: "system_prompt_path",
comment: "Custom system prompt file (overrides built-in prompt)",
show: Show::CommentedOut,
example: Some("\"/path/to/system_prompt.txt\""),
},
Field {
key: "template_path",
comment: "Custom user prompt template file\n\
Supports {{diff}}, {{symbols}}, {{files}} variables",
show: Show::CommentedOut,
example: Some("\"/path/to/template.txt\""),
},
];
let format_fields: &[(&str, &str)] = &[
("include_body", "Include body/description in commit message"),
(
"include_scope",
"Include scope in commit type, e.g., feat(scope): subject",
),
(
"lowercase_subject",
"Enforce lowercase first character of subject (conventional commits best practice)",
),
];
let mut out = String::from("# CommitBee Configuration\n");
for field in fields {
out.push('\n');
for line in field.comment.lines() {
out.push_str("# ");
out.push_str(line);
out.push('\n');
}
let val_str = if let Some(v) = table.get(field.key) {
if v.is_float() {
format!("{} = {}", field.key, default.temperature)
} else {
format!("{} = {v}", field.key)
}
} else if let Some(ex) = field.example {
format!("{} = {ex}", field.key)
} else {
continue;
};
if matches!(field.show, Show::CommentedOut) {
out.push_str("# ");
}
out.push_str(&val_str);
out.push('\n');
}
out.push_str("\n# Commit message format options\n[format]\n");
if let Some(toml::Value::Table(fmt)) = table.get("format") {
for (key, comment) in format_fields {
if let Some(v) = fmt.get(*key) {
out.push_str(&format!("# {comment}\n{key} = {v}\n"));
}
}
}
out
}
}