use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use tracing;
const fn default_config_version() -> u32 {
1
}
const fn default_tool_idle_timeout() -> u64 {
300
}
#[derive(Debug)]
pub struct MigrationResult {
pub migrated: bool,
pub from_version: u32,
pub to_version: u32,
pub backup_path: Option<std::path::PathBuf>,
}
impl MigrationResult {
pub fn new(from_version: u32, to_version: u32, backup_path: Option<std::path::PathBuf>) -> Self {
Self {
migrated: from_version != to_version,
from_version,
to_version,
backup_path,
}
}
pub fn no_migration(version: u32) -> Self {
Self {
migrated: false,
from_version: version,
to_version: version,
backup_path: None,
}
}
}
const LATEST_CONFIG_VERSION: u32 = 1;
pub fn migrate_to_latest(config: &mut PawanConfig, config_path: Option<&PathBuf>) -> MigrationResult {
let current_version = config.config_version;
if current_version >= LATEST_CONFIG_VERSION {
return MigrationResult::no_migration(current_version);
}
let backup_path = config_path.and_then(|path| create_backup(path).ok());
let mut version = current_version;
while version < LATEST_CONFIG_VERSION {
version = match migrate_to_version(config, version + 1) {
Ok(v) => v,
Err(e) => {
tracing::error!(
from_version = version,
to_version = LATEST_CONFIG_VERSION,
error = %e,
"Config migration failed"
);
return MigrationResult::new(current_version, version, backup_path);
}
};
}
config.config_version = LATEST_CONFIG_VERSION;
MigrationResult::new(current_version, LATEST_CONFIG_VERSION, backup_path)
}
fn migrate_to_version(config: &mut PawanConfig, target_version: u32) -> Result<u32, String> {
match target_version {
1 => migrate_to_v1(config),
_ => Err(format!("Unknown target version: {}", target_version)),
}
}
fn migrate_to_v1(config: &mut PawanConfig) -> Result<u32, String> {
config.config_version = 1;
if config.tool_call_idle_timeout_secs == 0 {
config.tool_call_idle_timeout_secs = default_tool_idle_timeout();
}
tracing::info!("Config migrated to version 1");
Ok(1)
}
fn create_backup(config_path: &PathBuf) -> Result<PathBuf, String> {
let timestamp = chrono::Utc::now().format("%Y%m%d_%H%M%S");
let backup_path = config_path.with_extension(format!("toml.backup.{}", timestamp));
std::fs::copy(config_path, &backup_path).map_err(|e| {
format!("Failed to create backup at {}: {}", backup_path.display(), e)
})?;
tracing::info!(backup = %backup_path.display(), "Config backup created");
Ok(backup_path)
}
pub fn save_config(config: &PawanConfig, path: &PathBuf) -> Result<(), String> {
let toml_string = toml::to_string_pretty(config).map_err(|e| {
format!("Failed to serialize config to TOML: {}", e)
})?;
std::fs::write(path, toml_string).map_err(|e| {
format!("Failed to write config to {}: {}", path.display(), e)
})?;
tracing::info!(path = %path.display(), "Config saved");
Ok(())
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
#[serde(rename_all = "lowercase")]
pub enum LlmProvider {
#[default]
Nvidia,
Ollama,
OpenAI,
Mlx,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct PawanConfig {
#[serde(default = "default_config_version")]
pub config_version: u32,
pub provider: LlmProvider,
pub model: String,
pub base_url: Option<String>,
pub dry_run: bool,
pub auto_backup: bool,
pub require_git_clean: bool,
pub bash_timeout_secs: u64,
#[serde(default = "default_tool_idle_timeout")]
pub tool_call_idle_timeout_secs: u64,
pub max_file_size_kb: usize,
pub max_tool_iterations: usize,
pub max_context_tokens: usize,
pub system_prompt: Option<String>,
pub temperature: f32,
pub top_p: f32,
pub max_tokens: usize,
pub thinking_budget: usize,
pub max_retries: usize,
pub fallback_models: Vec<String>,
pub max_result_chars: usize,
pub reasoning_mode: bool,
pub healing: HealingConfig,
pub targets: HashMap<String, TargetConfig>,
pub tui: TuiConfig,
#[serde(default)]
pub mcp: HashMap<String, McpServerEntry>,
#[serde(default)]
pub permissions: HashMap<String, ToolPermission>,
pub cloud: Option<CloudConfig>,
#[serde(default)]
pub models: ModelRouting,
#[serde(default)]
pub eruka: crate::eruka_bridge::ErukaConfig,
#[serde(default)]
pub use_ares_backend: bool,
#[serde(default)]
pub use_coordinator: bool,
#[serde(default)]
pub skills_repo: Option<PathBuf>,
#[serde(default)]
pub local_first: bool,
#[serde(default)]
pub local_endpoint: Option<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ModelRouting {
pub code: Option<String>,
pub orchestrate: Option<String>,
pub execute: Option<String>,
}
impl ModelRouting {
pub fn route(&self, query: &str) -> Option<&str> {
let q = query.to_lowercase();
if self.code.is_some() {
let code_signals = ["implement", "write", "create", "refactor", "fix", "add test",
"add function", "struct", "enum", "trait", "algorithm", "data structure"];
if code_signals.iter().any(|s| q.contains(s)) {
return self.code.as_deref();
}
}
if self.orchestrate.is_some() {
let orch_signals = ["search", "find", "analyze", "review", "explain", "compare",
"list", "check", "verify", "diagnose", "audit"];
if orch_signals.iter().any(|s| q.contains(s)) {
return self.orchestrate.as_deref();
}
}
if self.execute.is_some() {
let exec_signals = ["run", "execute", "bash", "cargo", "test", "build",
"deploy", "install", "commit"];
if exec_signals.iter().any(|s| q.contains(s)) {
return self.execute.as_deref();
}
}
None
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CloudConfig {
pub provider: LlmProvider,
pub model: String,
#[serde(default)]
pub fallback_models: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum ToolPermission {
Allow,
Deny,
Prompt,
}
impl ToolPermission {
pub fn resolve(name: &str, permissions: &HashMap<String, ToolPermission>) -> Self {
if let Some(p) = permissions.get(name) {
return p.clone();
}
match name {
"bash" | "git_commit" | "write_file" | "edit_file_lines"
| "insert_after" | "append_file" => ToolPermission::Allow, _ => ToolPermission::Allow,
}
}
}
impl Default for PawanConfig {
fn default() -> Self {
let mut targets = HashMap::new();
targets.insert(
"self".to_string(),
TargetConfig {
path: PathBuf::from("."),
description: "Current project codebase".to_string(),
},
);
Self {
provider: LlmProvider::Nvidia,
config_version: default_config_version(),
model: crate::DEFAULT_MODEL.to_string(),
base_url: None,
dry_run: false,
auto_backup: true,
require_git_clean: false,
bash_timeout_secs: crate::DEFAULT_BASH_TIMEOUT,
tool_call_idle_timeout_secs: default_tool_idle_timeout(),
max_file_size_kb: 1024,
max_tool_iterations: crate::MAX_TOOL_ITERATIONS,
max_context_tokens: 100000,
system_prompt: None,
temperature: 1.0,
top_p: 0.95,
max_tokens: 8192,
thinking_budget: 0, reasoning_mode: true,
max_retries: 3,
fallback_models: Vec::new(),
max_result_chars: 8000,
healing: HealingConfig::default(),
targets,
tui: TuiConfig::default(),
mcp: HashMap::new(),
permissions: HashMap::new(),
cloud: None,
models: ModelRouting::default(),
eruka: crate::eruka_bridge::ErukaConfig::default(),
use_ares_backend: false,
use_coordinator: false,
skills_repo: None,
local_first: false,
local_endpoint: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct HealingConfig {
pub auto_commit: bool,
pub fix_errors: bool,
pub fix_warnings: bool,
pub fix_tests: bool,
pub generate_docs: bool,
#[serde(default)]
pub fix_security: bool,
pub max_attempts: usize,
#[serde(default)]
pub verify_cmd: Option<String>,
}
impl Default for HealingConfig {
fn default() -> Self {
Self {
auto_commit: false,
fix_errors: true,
fix_warnings: true,
fix_tests: true,
generate_docs: false,
fix_security: false,
max_attempts: 3,
verify_cmd: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TargetConfig {
pub path: PathBuf,
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct TuiConfig {
pub syntax_highlighting: bool,
pub theme: String,
pub line_numbers: bool,
pub mouse_support: bool,
pub scroll_speed: usize,
pub max_history: usize,
pub auto_save_enabled: bool,
pub auto_save_interval_minutes: u32,
pub auto_save_dir: Option<std::path::PathBuf>,
}
impl Default for TuiConfig {
fn default() -> Self {
Self {
syntax_highlighting: true,
theme: "base16-ocean.dark".to_string(),
line_numbers: true,
mouse_support: true,
scroll_speed: 3,
max_history: 1000,
auto_save_enabled: true,
auto_save_interval_minutes: 5,
auto_save_dir: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct McpServerEntry {
pub command: String,
#[serde(default)]
pub args: Vec<String>,
#[serde(default)]
pub env: HashMap<String, String>,
#[serde(default = "default_true")]
pub enabled: bool,
}
fn default_true() -> bool {
true
}
impl PawanConfig {
pub fn load(path: Option<&PathBuf>) -> crate::Result<Self> {
let config_path = path.cloned().or_else(|| {
let pawan_toml = PathBuf::from("pawan.toml");
if pawan_toml.exists() {
return Some(pawan_toml);
}
let ares_toml = PathBuf::from("ares.toml");
if ares_toml.exists() {
return Some(ares_toml);
}
if let Some(home) = dirs::home_dir() {
let global = home.join(".config/pawan/pawan.toml");
if global.exists() {
return Some(global);
}
}
None
});
match config_path {
Some(path) => {
let content = std::fs::read_to_string(&path).map_err(|e| {
crate::PawanError::Config(format!("Failed to read {}: {}", path.display(), e))
})?;
if path.file_name().map(|n| n == "ares.toml").unwrap_or(false) {
let value: toml::Value = toml::from_str(&content).map_err(|e| {
crate::PawanError::Config(format!(
"Failed to parse {}: {}",
path.display(),
e
))
})?;
if let Some(pawan_section) = value.get("pawan") {
let config: PawanConfig =
pawan_section.clone().try_into().map_err(|e| {
crate::PawanError::Config(format!(
"Failed to parse [pawan] section: {}",
e
))
})?;
return Ok(config);
}
Ok(Self::default())
} else {
let mut config: PawanConfig = toml::from_str(&content).map_err(|e| {
crate::PawanError::Config(format!(
"Failed to parse {}: {}",
path.display(),
e
))
})?;
let migration_result = migrate_to_latest(&mut config, Some(&path));
if migration_result.migrated {
tracing::info!(
from_version = migration_result.from_version,
to_version = migration_result.to_version,
backup = ?migration_result.backup_path,
"Config migrated"
);
if let Err(e) = save_config(&config, &path) {
tracing::warn!(error = %e, "Failed to save migrated config");
}
}
Ok(config)
}
}
None => Ok(Self::default()),
}
}
pub fn apply_env_overrides(&mut self) {
if let Ok(model) = std::env::var("PAWAN_MODEL") {
self.model = model;
}
if let Ok(provider) = std::env::var("PAWAN_PROVIDER") {
match provider.to_lowercase().as_str() {
"nvidia" | "nim" => self.provider = LlmProvider::Nvidia,
"ollama" => self.provider = LlmProvider::Ollama,
"openai" => self.provider = LlmProvider::OpenAI,
"mlx" | "mlx-lm" => self.provider = LlmProvider::Mlx,
_ => tracing::warn!(provider = provider.as_str(), "Unknown PAWAN_PROVIDER, ignoring"),
}
}
if let Ok(temp) = std::env::var("PAWAN_TEMPERATURE") {
if let Ok(t) = temp.parse::<f32>() {
self.temperature = t;
}
}
if let Ok(tokens) = std::env::var("PAWAN_MAX_TOKENS") {
if let Ok(t) = tokens.parse::<usize>() {
self.max_tokens = t;
}
}
if let Ok(iters) = std::env::var("PAWAN_MAX_ITERATIONS") {
if let Ok(i) = iters.parse::<usize>() {
self.max_tool_iterations = i;
}
}
if let Ok(ctx) = std::env::var("PAWAN_MAX_CONTEXT_TOKENS") {
if let Ok(c) = ctx.parse::<usize>() {
self.max_context_tokens = c;
}
}
if let Ok(models) = std::env::var("PAWAN_FALLBACK_MODELS") {
self.fallback_models = models.split(',').map(|s| s.trim().to_string()).filter(|s| !s.is_empty()).collect();
}
if let Ok(chars) = std::env::var("PAWAN_MAX_RESULT_CHARS") {
if let Ok(c) = chars.parse::<usize>() {
self.max_result_chars = c;
}
}
}
pub fn get_target(&self, name: &str) -> Option<&TargetConfig> {
self.targets.get(name)
}
pub fn get_system_prompt(&self) -> String {
let base = self
.system_prompt
.clone()
.unwrap_or_else(|| DEFAULT_SYSTEM_PROMPT.to_string());
let mut prompt = base;
if let Some((filename, ctx)) = Self::load_context_file() {
prompt = format!("{}\n\n## Project Context (from {})\n\n{}", prompt, filename, ctx);
}
if let Some(skill_ctx) = Self::load_skill_context() {
prompt = format!("{}\n\n## Active Skill (from SKILL.md)\n\n{}", prompt, skill_ctx);
}
prompt
}
fn load_context_file() -> Option<(String, String)> {
for path in &["PAWAN.md", "AGENTS.md", "CLAUDE.md", ".pawan/context.md"] {
let p = PathBuf::from(path);
if p.exists() {
if let Ok(content) = std::fs::read_to_string(&p) {
if !content.trim().is_empty() {
return Some((path.to_string(), content));
}
}
}
}
None
}
fn load_skill_context() -> Option<String> {
use thulp_skill_files::SkillFile;
let skill_path = std::path::Path::new("SKILL.md");
if !skill_path.exists() {
return None;
}
match SkillFile::parse(skill_path) {
Ok(skill) => {
let name = skill.effective_name();
let desc = skill.frontmatter.description.as_deref().unwrap_or("no description");
let tools_str = match &skill.frontmatter.allowed_tools {
Some(tools) => tools.join(", "),
None => "all".to_string(),
};
Some(format!(
"[Skill: {}] {}\nAllowed tools: {}\n---\n{}",
name, desc, tools_str, skill.content
))
}
Err(e) => {
tracing::warn!("Failed to parse SKILL.md: {}", e);
None
}
}
}
pub fn resolve_skills_repo(&self) -> Option<PathBuf> {
if let Ok(env_path) = std::env::var("PAWAN_SKILLS_REPO") {
let p = PathBuf::from(env_path);
if p.is_dir() {
return Some(p);
}
tracing::warn!(path = %p.display(), "PAWAN_SKILLS_REPO set but directory does not exist");
}
if let Some(ref p) = self.skills_repo {
if p.is_dir() {
return Some(p.clone());
}
tracing::warn!(path = %p.display(), "config.skills_repo set but directory does not exist");
}
if let Some(home) = dirs::home_dir() {
let default = home.join(".config").join("pawan").join("skills");
if default.is_dir() {
return Some(default);
}
}
None
}
pub fn auto_discover_mcp_servers(&mut self) -> Vec<String> {
let mut discovered = Vec::new();
if !self.mcp.contains_key("eruka") && which::which("eruka-mcp").is_ok() {
self.mcp.insert(
"eruka".to_string(),
McpServerEntry {
command: "eruka-mcp".to_string(),
args: vec!["--transport".to_string(), "stdio".to_string()],
env: HashMap::new(),
enabled: true,
},
);
discovered.push("eruka".to_string());
tracing::info!("auto-discovered eruka-mcp");
}
if !self.mcp.contains_key("daedra") && which::which("daedra").is_ok() {
self.mcp.insert(
"daedra".to_string(),
McpServerEntry {
command: "daedra".to_string(),
args: vec![
"serve".to_string(),
"--transport".to_string(),
"stdio".to_string(),
"--quiet".to_string(),
],
env: HashMap::new(),
enabled: true,
},
);
discovered.push("daedra".to_string());
tracing::info!("auto-discovered daedra");
}
if !self.mcp.contains_key("deagle") && which::which("deagle-mcp").is_ok() {
self.mcp.insert(
"deagle".to_string(),
McpServerEntry {
command: "deagle-mcp".to_string(),
args: vec!["--transport".to_string(), "stdio".to_string()],
env: HashMap::new(),
enabled: true,
},
);
discovered.push("deagle".to_string());
tracing::info!("auto-discovered deagle-mcp");
}
discovered
}
pub fn discover_skills_from_repo(&self) -> Vec<(String, String, PathBuf)> {
use thulp_skill_files::SkillFile;
let repo = match self.resolve_skills_repo() {
Some(r) => r,
None => return Vec::new(),
};
let mut results = Vec::new();
let walker = match std::fs::read_dir(&repo) {
Ok(w) => w,
Err(e) => {
tracing::warn!(path = %repo.display(), error = %e, "failed to read skills repo");
return Vec::new();
}
};
for entry in walker.flatten() {
let path = entry.path();
let skill_file = path.join("SKILL.md");
if !skill_file.is_file() {
continue;
}
match SkillFile::parse(&skill_file) {
Ok(skill) => {
let name = skill.effective_name();
let desc = skill
.frontmatter
.description
.clone()
.unwrap_or_else(|| "(no description)".to_string());
results.push((name, desc, skill_file));
}
Err(e) => {
tracing::debug!(path = %skill_file.display(), error = %e, "skip unparseable skill");
}
}
}
results.sort_by(|a, b| a.0.cmp(&b.0));
results
}
pub fn use_thinking_mode(&self) -> bool {
self.reasoning_mode
&& (self.model.contains("deepseek")
|| self.model.contains("gemma")
|| self.model.contains("glm")
|| self.model.contains("qwen")
|| self.model.contains("mistral-small-4"))
}
}
pub const DEFAULT_SYSTEM_PROMPT: &str = r#"You are Pawan, an expert coding assistant.
# Efficiency
- Act immediately. Do NOT explore or plan before writing. Write code FIRST, then verify.
- write_file creates parents automatically. No mkdir needed.
- cargo check runs automatically after .rs writes — fix errors immediately.
- Use relative paths from workspace root.
- Missing tools are auto-installed via mise. Don't check dependencies.
- You have limited tool iterations. Be direct. No preamble.
# Tool Selection
Use the BEST tool for the job — do NOT use bash for things dedicated tools handle:
- File ops: read_file, write_file, edit_file, edit_file_lines, insert_after, append_file, list_directory
- Code intelligence: ast_grep (AST search + rewrite via tree-sitter — prefer for structural changes)
- Search: glob_search (files by pattern), grep_search (content by regex), ripgrep (native rg), fd (native find)
- Shell: bash (commands), sd (find-replace in files), mise (tool/task/env manager), zoxide (smart cd)
- Git: git_status, git_diff, git_add, git_commit, git_log, git_blame, git_branch, git_checkout, git_stash
- Agent: spawn_agent (delegate subtask), spawn_agents (parallel sub-agents)
- Web: mcp_daedra_web_search (ALWAYS use for web queries — never bash+curl)
Prefer ast_grep over edit_file for code refactors. Prefer grep_search over bash grep.
Prefer fd over bash find. Prefer sd over bash sed.
# Parallel Execution
Call multiple tools in a single response when they are independent.
If tool B depends on tool A's result, call them sequentially.
Never parallelize destructive operations (writes, deletes, commits).
# Read Before Modifying
Do NOT propose changes to code you haven't read. If asked to modify a file, read it first.
Understand existing code, patterns, and style before suggesting changes.
# Scope Discipline
Make minimal, focused changes. Follow existing code style.
- Don't add features, refactor, or "improve" code beyond what was asked.
- Don't add docstrings, comments, or type annotations to code you didn't change.
- A bug fix doesn't need surrounding code cleaned up.
- Don't add error handling for scenarios that can't happen.
# Executing Actions with Care
Consider reversibility and blast radius before acting:
- Freely take local, reversible actions (editing files, running tests).
- For hard-to-reverse actions (force-push, rm -rf, dropping tables), ask first.
- Match the scope of your actions to what was requested.
- Investigate before deleting — unfamiliar files may be the user's in-progress work.
- Don't use destructive shortcuts to bypass safety checks.
# Git Safety
- NEVER skip hooks (--no-verify) unless explicitly asked.
- ALWAYS create NEW commits rather than amending (amend after hook failure destroys work).
- NEVER force-push to main/master. Warn if requested.
- Prefer staging specific files over `git add -A` (avoids committing secrets).
- Only commit when explicitly asked. Don't be over-eager.
- Commit messages: focus on WHY, not WHAT. Use HEREDOC for multi-line messages.
- Use the git author from `git config user.name` / `git config user.email`.
# Output Style
Be concise. Lead with the answer, not the reasoning.
Focus text output on: decisions needing input, status updates, errors/blockers.
If you can say it in one sentence, don't use three.
After .rs writes, cargo check auto-runs — fix errors immediately if it fails.
Run tests when the task calls for it (cargo test -p <crate>).
One fix at a time. If it doesn't work, try a different approach."#;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_provider_mlx_parsing() {
let toml = r#"
provider = "mlx"
model = "mlx-community/Qwen3.5-9B-4bit"
"#;
let config: PawanConfig = toml::from_str(toml).expect("should parse without error");
assert_eq!(config.provider, LlmProvider::Mlx);
assert_eq!(config.model, "mlx-community/Qwen3.5-9B-4bit");
}
#[test]
fn test_provider_mlx_lm_alias() {
let mut config = PawanConfig::default();
std::env::set_var("PAWAN_PROVIDER", "mlx-lm");
config.apply_env_overrides();
std::env::remove_var("PAWAN_PROVIDER");
assert_eq!(config.provider, LlmProvider::Mlx);
}
#[test]
fn test_mlx_base_url_override() {
let toml = r#"
provider = "mlx"
model = "test-model"
base_url = "http://192.168.1.100:8080/v1"
"#;
let config: PawanConfig = toml::from_str(toml).expect("should parse without error");
assert_eq!(config.provider, LlmProvider::Mlx);
assert_eq!(
config.base_url.as_deref(),
Some("http://192.168.1.100:8080/v1")
);
}
#[test]
fn test_route_code_signals() {
let routing = ModelRouting {
code: Some("code-model".into()),
orchestrate: Some("orch-model".into()),
execute: Some("exec-model".into()),
};
assert_eq!(routing.route("implement a linked list"), Some("code-model"));
assert_eq!(routing.route("refactor the parser"), Some("code-model"));
assert_eq!(routing.route("add test for config"), Some("code-model"));
assert_eq!(routing.route("Write a new struct"), Some("code-model"));
}
#[test]
fn test_route_orchestration_signals() {
let routing = ModelRouting {
code: Some("code-model".into()),
orchestrate: Some("orch-model".into()),
execute: Some("exec-model".into()),
};
assert_eq!(routing.route("analyze the error logs"), Some("orch-model"));
assert_eq!(routing.route("review this PR"), Some("orch-model"));
assert_eq!(routing.route("explain how the agent works"), Some("orch-model"));
assert_eq!(routing.route("search for uses of foo"), Some("orch-model"));
}
#[test]
fn test_route_execution_signals() {
let routing = ModelRouting {
code: Some("code-model".into()),
orchestrate: Some("orch-model".into()),
execute: Some("exec-model".into()),
};
assert_eq!(routing.route("run cargo test"), Some("exec-model"));
assert_eq!(routing.route("execute the deploy script"), Some("exec-model"));
assert_eq!(routing.route("build the project"), Some("exec-model"));
assert_eq!(routing.route("commit these changes"), Some("exec-model"));
}
#[test]
fn test_route_no_match_returns_none() {
let routing = ModelRouting {
code: Some("code-model".into()),
orchestrate: Some("orch-model".into()),
execute: Some("exec-model".into()),
};
assert_eq!(routing.route("hello world"), None);
}
#[test]
fn test_route_empty_routing_returns_none() {
let routing = ModelRouting::default();
assert_eq!(routing.route("implement something"), None);
assert_eq!(routing.route("search for bugs"), None);
}
#[test]
fn test_route_case_insensitive() {
let routing = ModelRouting {
code: Some("code-model".into()),
orchestrate: None,
execute: None,
};
assert_eq!(routing.route("IMPLEMENT a FUNCTION"), Some("code-model"));
}
#[test]
fn test_route_partial_routing() {
let routing = ModelRouting {
code: Some("code-model".into()),
orchestrate: None,
execute: None,
};
assert_eq!(routing.route("implement x"), Some("code-model"));
assert_eq!(routing.route("search for y"), None);
assert_eq!(routing.route("run tests"), None);
}
#[test]
fn test_env_override_model() {
let mut config = PawanConfig::default();
std::env::set_var("PAWAN_MODEL", "custom/model-123");
config.apply_env_overrides();
std::env::remove_var("PAWAN_MODEL");
assert_eq!(config.model, "custom/model-123");
}
#[test]
fn test_env_override_temperature() {
let mut config = PawanConfig::default();
std::env::set_var("PAWAN_TEMPERATURE", "0.9");
config.apply_env_overrides();
std::env::remove_var("PAWAN_TEMPERATURE");
assert!((config.temperature - 0.9).abs() < f32::EPSILON);
}
#[test]
fn test_env_override_invalid_temperature_ignored() {
let mut config = PawanConfig::default();
let original = config.temperature;
std::env::set_var("PAWAN_TEMPERATURE", "not_a_number");
config.apply_env_overrides();
std::env::remove_var("PAWAN_TEMPERATURE");
assert!((config.temperature - original).abs() < f32::EPSILON);
}
#[test]
fn test_env_override_max_tokens() {
let mut config = PawanConfig::default();
std::env::set_var("PAWAN_MAX_TOKENS", "16384");
config.apply_env_overrides();
std::env::remove_var("PAWAN_MAX_TOKENS");
assert_eq!(config.max_tokens, 16384);
}
#[test]
fn test_env_override_fallback_models() {
let mut config = PawanConfig::default();
std::env::set_var("PAWAN_FALLBACK_MODELS", "model-a, model-b, model-c");
config.apply_env_overrides();
std::env::remove_var("PAWAN_FALLBACK_MODELS");
assert_eq!(config.fallback_models, vec!["model-a", "model-b", "model-c"]);
}
#[test]
fn test_env_override_fallback_models_filters_empty() {
let mut config = PawanConfig::default();
std::env::set_var("PAWAN_FALLBACK_MODELS", "model-a,,, model-b,");
config.apply_env_overrides();
std::env::remove_var("PAWAN_FALLBACK_MODELS");
assert_eq!(config.fallback_models, vec!["model-a", "model-b"]);
}
#[test]
fn test_env_override_provider_variants() {
for (env_val, expected) in [
("nvidia", LlmProvider::Nvidia),
("nim", LlmProvider::Nvidia),
("ollama", LlmProvider::Ollama),
("openai", LlmProvider::OpenAI),
("mlx", LlmProvider::Mlx),
] {
let mut config = PawanConfig::default();
std::env::set_var("PAWAN_PROVIDER", env_val);
config.apply_env_overrides();
std::env::remove_var("PAWAN_PROVIDER");
assert_eq!(config.provider, expected, "PAWAN_PROVIDER={} should map to {:?}", env_val, expected);
}
}
#[test]
fn test_thinking_mode_supported_models() {
for model in ["deepseek-ai/deepseek-r1", "google/gemma-4-31b-it", "z-ai/glm5",
"qwen/qwen3.5-122b", "mistralai/mistral-small-4-119b"] {
let config = PawanConfig { model: model.into(), reasoning_mode: true, ..Default::default() };
assert!(config.use_thinking_mode(), "thinking mode should be on for {}", model);
}
}
#[test]
fn test_thinking_mode_disabled_when_reasoning_off() {
let config = PawanConfig { model: "deepseek-ai/deepseek-r1".into(), reasoning_mode: false, ..Default::default() };
assert!(!config.use_thinking_mode());
}
#[test]
fn test_thinking_mode_unsupported_models() {
for model in ["meta/llama-3.1-70b", "minimaxai/minimax-m2.5", "stepfun-ai/step-3.5-flash"] {
let config = PawanConfig { model: model.into(), reasoning_mode: true, ..Default::default() };
assert!(!config.use_thinking_mode(), "thinking mode should be off for {}", model);
}
}
#[test]
fn test_system_prompt_default() {
let config = PawanConfig::default();
let prompt = config.get_system_prompt();
assert!(prompt.contains("Pawan"), "default prompt should mention Pawan");
assert!(prompt.contains("coding"), "default prompt should mention coding");
}
#[test]
fn test_system_prompt_custom_override() {
let config = PawanConfig { system_prompt: Some("Custom system prompt.".into()), ..Default::default() };
let prompt = config.get_system_prompt();
assert!(prompt.starts_with("Custom system prompt."));
}
#[test]
fn test_config_with_cloud_fallback() {
let toml = r#"
model = "qwen/qwen3.5-122b-a10b"
[cloud]
provider = "nvidia"
model = "minimaxai/minimax-m2.5"
"#;
let config: PawanConfig = toml::from_str(toml).expect("should parse");
assert_eq!(config.model, "qwen/qwen3.5-122b-a10b");
let cloud = config.cloud.unwrap();
assert_eq!(cloud.model, "minimaxai/minimax-m2.5");
}
#[test]
fn test_config_with_healing() {
let toml = r#"
model = "test"
[healing]
fix_errors = true
fix_warnings = false
fix_tests = true
"#;
let config: PawanConfig = toml::from_str(toml).expect("should parse");
assert!(config.healing.fix_errors);
assert!(!config.healing.fix_warnings);
assert!(config.healing.fix_tests);
}
#[test]
fn test_config_defaults_sensible() {
let config = PawanConfig::default();
assert_eq!(config.provider, LlmProvider::Nvidia);
assert!(config.temperature > 0.0 && config.temperature <= 1.0);
assert!(config.max_tokens > 0);
assert!(config.max_tool_iterations > 0);
}
#[test]
fn test_context_file_search_order() {
let config = PawanConfig::default();
let prompt = config.get_system_prompt();
if std::path::Path::new("PAWAN.md").exists() {
assert!(prompt.contains("Project Context"), "Should inject project context when PAWAN.md exists");
assert!(prompt.contains("from PAWAN.md"), "Should identify source as PAWAN.md");
}
}
#[test]
fn test_system_prompt_injection_format() {
let config = PawanConfig {
system_prompt: Some("Base prompt.".into()),
..Default::default()
};
let prompt = config.get_system_prompt();
if prompt.contains("Project Context") {
assert!(prompt.contains("from "), "Injection should include source filename");
}
}
#[test]
fn test_resolve_skills_repo_env_var_takes_priority() {
let env_dir = tempfile::TempDir::new().expect("tempdir");
let cfg_dir = tempfile::TempDir::new().expect("tempdir");
let config = PawanConfig {
skills_repo: Some(cfg_dir.path().to_path_buf()),
..Default::default()
};
std::env::set_var("PAWAN_SKILLS_REPO", env_dir.path());
let resolved = config.resolve_skills_repo();
std::env::remove_var("PAWAN_SKILLS_REPO");
let resolved = resolved.expect("env var path should resolve to Some");
assert_eq!(
resolved.canonicalize().unwrap(),
env_dir.path().canonicalize().unwrap(),
"env var should take priority over config.skills_repo"
);
}
#[test]
fn test_resolve_skills_repo_env_var_nonexistent_falls_through() {
let bogus = PathBuf::from("/tmp/pawan-nonexistent-skills-repo-for-test-xyz123");
assert!(!bogus.exists(), "precondition: bogus path must not exist");
let config = PawanConfig {
skills_repo: Some(PathBuf::from("/tmp/pawan-also-nonexistent-abc789")),
..Default::default()
};
std::env::set_var("PAWAN_SKILLS_REPO", &bogus);
let resolved = config.resolve_skills_repo();
std::env::remove_var("PAWAN_SKILLS_REPO");
if let Some(ref p) = resolved {
assert_ne!(p, &bogus, "nonexistent env var path must not be returned");
assert!(p.is_dir(), "any returned path must be an existing directory");
}
}
#[test]
fn test_auto_discover_mcp_is_idempotent() {
let mut config = PawanConfig::default();
let first = config.auto_discover_mcp_servers();
let len_after_first = config.mcp.len();
let second = config.auto_discover_mcp_servers();
let len_after_second = config.mcp.len();
assert!(
second.is_empty(),
"second call must discover nothing (got {:?})",
second
);
assert_eq!(
len_after_first, len_after_second,
"mcp map length must not change between calls (first discovered {:?})",
first
);
}
#[test]
fn test_auto_discover_mcp_preserves_existing_entries() {
let mut config = PawanConfig::default();
let custom = McpServerEntry {
command: "custom-eruka".to_string(),
args: vec!["--custom-flag".to_string()],
env: HashMap::new(),
enabled: true,
};
config.mcp.insert("eruka".to_string(), custom);
let discovered = config.auto_discover_mcp_servers();
assert!(
!discovered.contains(&"eruka".to_string()),
"pre-existing 'eruka' entry must not be rediscovered, got {:?}",
discovered
);
let entry = config.mcp.get("eruka").expect("eruka entry must still exist");
assert_eq!(entry.command, "custom-eruka", "custom command must be preserved");
assert_eq!(entry.args, vec!["--custom-flag".to_string()]);
}
#[test]
fn test_discover_skills_from_repo_returns_parsed_skills() {
let repo = tempfile::TempDir::new().expect("tempdir");
let skill_dir = repo.path().join("example-skill");
std::fs::create_dir(&skill_dir).expect("mkdir example-skill");
let skill_md = skill_dir.join("SKILL.md");
std::fs::write(
&skill_md,
"---\nname: example-skill\ndescription: A test skill used in pawan unit tests\n---\n# Instructions\n\nDo the thing.\n",
)
.expect("write SKILL.md");
let empty_dir = repo.path().join("not-a-skill");
std::fs::create_dir(&empty_dir).expect("mkdir not-a-skill");
let config = PawanConfig {
skills_repo: Some(repo.path().to_path_buf()),
..Default::default()
};
std::env::remove_var("PAWAN_SKILLS_REPO");
let skills = config.discover_skills_from_repo();
assert_eq!(skills.len(), 1, "expected exactly 1 skill, got {:?}", skills);
let (name, desc, path) = &skills[0];
assert_eq!(name, "example-skill");
assert_eq!(desc, "A test skill used in pawan unit tests");
assert_eq!(path, &skill_md);
}
#[test]
fn test_load_with_explicit_pawan_toml_path() {
let tmp = tempfile::TempDir::new().expect("tempdir");
let path = tmp.path().join("pawan.toml");
std::fs::write(
&path,
r#"
provider = "nvidia"
model = "meta/llama-3.1-405b-instruct"
"#,
)
.expect("write pawan.toml");
let config = PawanConfig::load(Some(&path)).expect("load should succeed");
assert_eq!(config.model, "meta/llama-3.1-405b-instruct");
}
#[test]
fn test_load_with_invalid_toml_returns_error() {
let tmp = tempfile::TempDir::new().expect("tempdir");
let path = tmp.path().join("pawan.toml");
std::fs::write(&path, "this is not [[valid] toml @@").expect("write bad toml");
let result = PawanConfig::load(Some(&path));
assert!(result.is_err(), "malformed TOML must return Err");
let err_msg = format!("{}", result.unwrap_err());
assert!(
err_msg.to_lowercase().contains("parse")
|| err_msg.to_lowercase().contains("failed"),
"error should mention parse/failed, got: {}",
err_msg
);
}
#[test]
fn test_load_with_nonexistent_path_returns_error() {
let bogus = PathBuf::from("/tmp/definitely-does-not-exist-abc123-xyz.toml");
let result = PawanConfig::load(Some(&bogus));
assert!(
result.is_err(),
"non-existent explicit path must return Err"
);
}
#[test]
fn test_load_ares_toml_with_pawan_section() {
let tmp = tempfile::TempDir::new().expect("tempdir");
let path = tmp.path().join("ares.toml");
std::fs::write(
&path,
r#"
# ares config (unrelated to pawan)
[server]
port = 3000
[pawan]
provider = "ollama"
model = "qwen3-coder:30b"
"#,
)
.expect("write ares.toml");
let config = PawanConfig::load(Some(&path)).expect("ares.toml load should succeed");
assert_eq!(config.provider, LlmProvider::Ollama);
assert_eq!(config.model, "qwen3-coder:30b");
}
#[test]
fn test_load_ares_toml_without_pawan_section_returns_defaults() {
let tmp = tempfile::TempDir::new().expect("tempdir");
let path = tmp.path().join("ares.toml");
std::fs::write(
&path,
r#"
[server]
port = 3000
workers = 4
"#,
)
.expect("write ares.toml without pawan section");
let config = PawanConfig::load(Some(&path)).expect("load should succeed");
let defaults = PawanConfig::default();
assert_eq!(config.provider, defaults.provider);
assert_eq!(config.model, defaults.model);
}
#[test]
fn test_load_empty_toml_file_returns_defaults() {
let tmp = tempfile::TempDir::new().expect("tempdir");
let path = tmp.path().join("pawan.toml");
std::fs::write(&path, "").expect("write empty toml");
let config = PawanConfig::load(Some(&path)).expect("empty toml should load");
let defaults = PawanConfig::default();
assert_eq!(config.provider, defaults.provider);
}
}
#[test]
fn test_default_config_version() {
assert_eq!(default_config_version(), 1);
}
#[test]
fn test_default_tool_idle_timeout() {
assert_eq!(default_tool_idle_timeout(), 300);
}
#[test]
fn test_config_version_field_exists() {
let config = PawanConfig::default();
assert_eq!(config.config_version, 1);
}
#[test]
fn test_tool_idle_timeout_field_exists() {
let config = PawanConfig::default();
assert_eq!(config.tool_call_idle_timeout_secs, 300);
}
#[test]
fn test_migration_result_fields() {
let result = MigrationResult {
migrated: true,
from_version: 0,
to_version: 1,
backup_path: Some(std::path::PathBuf::from("/tmp/backup.toml")),
};
assert!(result.migrated);
assert_eq!(result.from_version, 0);
assert_eq!(result.to_version, 1);
assert!(result.backup_path.is_some());
}
#[test]
fn test_migrate_to_latest_no_migration_needed() {
let mut config = PawanConfig::default();
config.config_version = 1;
let result = migrate_to_latest(&mut config, None);
assert!(!result.migrated, "Should not migrate if already at latest version");
assert_eq!(result.from_version, 1);
assert_eq!(result.to_version, 1);
}
#[test]
fn test_migrate_to_latest_performs_migration() {
let mut config = PawanConfig::default();
config.config_version = 0;
let result = migrate_to_latest(&mut config, None);
assert!(result.migrated, "Should migrate from old version");
assert_eq!(result.from_version, 0);
assert_eq!(result.to_version, 1);
assert_eq!(config.config_version, 1, "Config version should be updated");
}
#[test]
fn test_migrate_to_v1_adds_default_fields() {
let mut config = PawanConfig::default();
config.config_version = 0;
let result = migrate_to_v1(&mut config);
assert!(result.is_ok(), "Migration should succeed");
assert_eq!(result.unwrap(), 1, "Should return new version");
assert_eq!(config.config_version, 1, "Config version should be updated");
}
#[test]
fn test_migration_result_no_migration() {
let result = MigrationResult::no_migration(1);
assert!(!result.migrated, "Should indicate no migration");
assert_eq!(result.from_version, 1);
assert_eq!(result.to_version, 1);
assert!(result.backup_path.is_none(), "Should not have backup path");
}
#[test]
fn test_migration_result_with_backup() {
let backup_path = std::path::PathBuf::from("/tmp/backup.toml");
let result = MigrationResult::new(0, 1, Some(backup_path.clone()));
assert!(result.migrated, "Should indicate migration occurred");
assert_eq!(result.from_version, 0);
assert_eq!(result.to_version, 1);
assert_eq!(result.backup_path, Some(backup_path), "Should have backup path");
}