1use serde::{Deserialize, Serialize};
10use std::collections::HashMap;
11use std::path::PathBuf;
12use tracing;
13
14#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
16#[serde(rename_all = "lowercase")]
17pub enum LlmProvider {
18 #[default]
20 Nvidia,
21 Ollama,
23 OpenAI,
25}
26
27#[derive(Debug, Clone, Serialize, Deserialize)]
29#[serde(default)]
30pub struct PawanConfig {
31 pub provider: LlmProvider,
33
34 pub model: String,
36
37 pub dry_run: bool,
39
40 pub auto_backup: bool,
42
43 pub require_git_clean: bool,
45
46 pub bash_timeout_secs: u64,
48
49 pub max_file_size_kb: usize,
51
52 pub max_tool_iterations: usize,
54 pub max_context_tokens: usize,
56
57 pub system_prompt: Option<String>,
59
60 pub temperature: f32,
62
63 pub top_p: f32,
65
66 pub max_tokens: usize,
68
69 pub max_retries: usize,
71
72 pub fallback_models: Vec<String>,
74 pub max_result_chars: usize,
76
77 pub reasoning_mode: bool,
79
80 pub healing: HealingConfig,
82
83 pub targets: HashMap<String, TargetConfig>,
85
86 pub tui: TuiConfig,
88
89 #[serde(default)]
91 pub mcp: HashMap<String, McpServerEntry>,
92
93 #[serde(default)]
95 pub permissions: HashMap<String, ToolPermission>,
96
97 pub cloud: Option<CloudConfig>,
100
101 #[serde(default)]
103 pub eruka: crate::eruka_bridge::ErukaConfig,
104}
105
106#[derive(Debug, Clone, Serialize, Deserialize)]
122pub struct CloudConfig {
123 pub provider: LlmProvider,
125 pub model: String,
127 #[serde(default)]
129 pub fallback_models: Vec<String>,
130}
131
132#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
134#[serde(rename_all = "lowercase")]
135pub enum ToolPermission {
136 Allow,
138 Deny,
140}
141
142impl Default for PawanConfig {
143 fn default() -> Self {
144 let mut targets = HashMap::new();
145 targets.insert(
146 "ares".to_string(),
147 TargetConfig {
148 path: PathBuf::from("../.."),
149 description: "A.R.E.S server codebase".to_string(),
150 },
151 );
152 targets.insert(
153 "self".to_string(),
154 TargetConfig {
155 path: PathBuf::from("."),
156 description: "Pawan's own codebase".to_string(),
157 },
158 );
159
160 Self {
161 provider: LlmProvider::Nvidia,
162 model: crate::DEFAULT_MODEL.to_string(),
163 dry_run: false,
164 auto_backup: true,
165 require_git_clean: false,
166 bash_timeout_secs: crate::DEFAULT_BASH_TIMEOUT,
167 max_file_size_kb: 1024,
168 max_tool_iterations: crate::MAX_TOOL_ITERATIONS,
169 max_context_tokens: 100000,
170 system_prompt: None,
171 temperature: 1.0,
172 top_p: 0.95,
173 max_tokens: 8192,
174 reasoning_mode: true,
175 max_retries: 3,
176 fallback_models: Vec::new(),
177 max_result_chars: 8000,
178 healing: HealingConfig::default(),
179 targets,
180 tui: TuiConfig::default(),
181 mcp: HashMap::new(),
182 permissions: HashMap::new(),
183 cloud: None,
184 eruka: crate::eruka_bridge::ErukaConfig::default(),
185 }
186 }
187}
188
189#[derive(Debug, Clone, Serialize, Deserialize)]
191#[serde(default)]
192pub struct HealingConfig {
193 pub auto_commit: bool,
195
196 pub fix_errors: bool,
198
199 pub fix_warnings: bool,
201
202 pub fix_tests: bool,
204
205 pub generate_docs: bool,
207
208 pub max_attempts: usize,
210}
211
212impl Default for HealingConfig {
213 fn default() -> Self {
214 Self {
215 auto_commit: false,
216 fix_errors: true,
217 fix_warnings: true,
218 fix_tests: true,
219 generate_docs: false,
220 max_attempts: 3,
221 }
222 }
223}
224
225#[derive(Debug, Clone, Serialize, Deserialize)]
227pub struct TargetConfig {
228 pub path: PathBuf,
230
231 pub description: String,
233}
234
235#[derive(Debug, Clone, Serialize, Deserialize)]
237#[serde(default)]
238pub struct TuiConfig {
239 pub syntax_highlighting: bool,
241
242 pub theme: String,
244
245 pub line_numbers: bool,
247
248 pub mouse_support: bool,
250
251 pub scroll_speed: usize,
253
254 pub max_history: usize,
256}
257
258impl Default for TuiConfig {
259 fn default() -> Self {
260 Self {
261 syntax_highlighting: true,
262 theme: "base16-ocean.dark".to_string(),
263 line_numbers: true,
264 mouse_support: true,
265 scroll_speed: 3,
266 max_history: 1000,
267 }
268 }
269}
270
271#[derive(Debug, Clone, Serialize, Deserialize)]
273pub struct McpServerEntry {
274 pub command: String,
276 #[serde(default)]
278 pub args: Vec<String>,
279 #[serde(default)]
281 pub env: HashMap<String, String>,
282 #[serde(default = "default_true")]
284 pub enabled: bool,
285}
286
287fn default_true() -> bool {
288 true
289}
290
291impl PawanConfig {
292 pub fn load(path: Option<&PathBuf>) -> crate::Result<Self> {
294 let config_path = path.cloned().or_else(|| {
295 let pawan_toml = PathBuf::from("pawan.toml");
297 if pawan_toml.exists() {
298 return Some(pawan_toml);
299 }
300
301 let ares_toml = PathBuf::from("ares.toml");
303 if ares_toml.exists() {
304 return Some(ares_toml);
305 }
306
307 None
308 });
309
310 match config_path {
311 Some(path) => {
312 let content = std::fs::read_to_string(&path).map_err(|e| {
313 crate::PawanError::Config(format!("Failed to read {}: {}", path.display(), e))
314 })?;
315
316 if path.file_name().map(|n| n == "ares.toml").unwrap_or(false) {
318 let value: toml::Value = toml::from_str(&content).map_err(|e| {
320 crate::PawanError::Config(format!(
321 "Failed to parse {}: {}",
322 path.display(),
323 e
324 ))
325 })?;
326
327 if let Some(pawan_section) = value.get("pawan") {
328 let config: PawanConfig =
329 pawan_section.clone().try_into().map_err(|e| {
330 crate::PawanError::Config(format!(
331 "Failed to parse [pawan] section: {}",
332 e
333 ))
334 })?;
335 return Ok(config);
336 }
337
338 Ok(Self::default())
340 } else {
341 toml::from_str(&content).map_err(|e| {
343 crate::PawanError::Config(format!(
344 "Failed to parse {}: {}",
345 path.display(),
346 e
347 ))
348 })
349 }
350 }
351 None => Ok(Self::default()),
352 }
353 }
354
355 pub fn apply_env_overrides(&mut self) {
357 if let Ok(model) = std::env::var("PAWAN_MODEL") {
358 self.model = model;
359 }
360 if let Ok(provider) = std::env::var("PAWAN_PROVIDER") {
361 match provider.to_lowercase().as_str() {
362 "nvidia" | "nim" => self.provider = LlmProvider::Nvidia,
363 "ollama" => self.provider = LlmProvider::Ollama,
364 "openai" => self.provider = LlmProvider::OpenAI,
365 _ => tracing::warn!(provider = provider.as_str(), "Unknown PAWAN_PROVIDER, ignoring"),
366 }
367 }
368 if let Ok(temp) = std::env::var("PAWAN_TEMPERATURE") {
369 if let Ok(t) = temp.parse::<f32>() {
370 self.temperature = t;
371 }
372 }
373 if let Ok(tokens) = std::env::var("PAWAN_MAX_TOKENS") {
374 if let Ok(t) = tokens.parse::<usize>() {
375 self.max_tokens = t;
376 }
377 }
378 if let Ok(iters) = std::env::var("PAWAN_MAX_ITERATIONS") {
379 if let Ok(i) = iters.parse::<usize>() {
380 self.max_tool_iterations = i;
381 }
382 }
383 if let Ok(ctx) = std::env::var("PAWAN_MAX_CONTEXT_TOKENS") {
384 if let Ok(c) = ctx.parse::<usize>() {
385 self.max_context_tokens = c;
386 }
387 }
388 if let Ok(models) = std::env::var("PAWAN_FALLBACK_MODELS") {
389 self.fallback_models = models.split(',').map(|s| s.trim().to_string()).filter(|s| !s.is_empty()).collect();
390 }
391 if let Ok(chars) = std::env::var("PAWAN_MAX_RESULT_CHARS") {
392 if let Ok(c) = chars.parse::<usize>() {
393 self.max_result_chars = c;
394 }
395 }
396 }
397
398 pub fn get_target(&self, name: &str) -> Option<&TargetConfig> {
400 self.targets.get(name)
401 }
402
403 pub fn get_system_prompt(&self) -> String {
405 let base = self
406 .system_prompt
407 .clone()
408 .unwrap_or_else(|| DEFAULT_SYSTEM_PROMPT.to_string());
409
410 let context = Self::load_context_file();
412 if let Some(ctx) = context {
413 format!("{}\n\n## Project Context (from PAWAN.md)\n\n{}", base, ctx)
414 } else {
415 base
416 }
417 }
418
419 fn load_context_file() -> Option<String> {
421 for path in &["PAWAN.md", ".pawan/context.md"] {
423 let p = PathBuf::from(path);
424 if p.exists() {
425 if let Ok(content) = std::fs::read_to_string(&p) {
426 if !content.trim().is_empty() {
427 return Some(content);
428 }
429 }
430 }
431 }
432 None
433 }
434
435 pub fn use_thinking_mode(&self) -> bool {
438 self.reasoning_mode && self.model.contains("deepseek")
439 }
440}
441
442pub const DEFAULT_SYSTEM_PROMPT: &str = r#"You are Pawan, an expert coding assistant capable of working on any project (Rust, Python, JavaScript, and more). You have self-healing, code review, and testing capabilities.
444
445Available tools:
446- File: read_file, write_file, edit_file, list_directory
447- Search: glob_search, grep_search
448- Shell: bash
449- Git: git_status, git_diff, git_add, git_commit, git_log, git_blame, git_branch, git_checkout, git_stash
450- Agent: spawn_agent
451
452When making changes:
4531. Always read files before modifying them to understand context
4542. Make minimal, focused changes
4553. Explain your reasoning before making changes
4564. Verify changes compile and tests pass when appropriate
4575. Follow existing code style and patterns
458
459When fixing issues:
4601. Understand the root cause before attempting fixes
4612. Make one fix at a time and verify it works
4623. If a fix doesn't work, try a different approach
4634. Document what you changed and why
464
465Be concise in explanations but thorough in code changes.
466
467Git commits: always use author `bkataru <baalateja.k@gmail.com>`. Pass -c user.name="bkataru" -c user.email="baalateja.k@gmail.com" on every git commit. Never use: kavesbteja@gmail.com, baalateja.kataru@gmail.com, or noreply emails."#;