use std::io::{self, BufRead, Write};
use std::path::PathBuf;
use crate::config;
pub async fn probe_ollama() -> Option<Vec<String>> {
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_millis(500))
.build()
.ok()?;
let resp = client
.get("http://localhost:11434/api/tags")
.send()
.await
.ok()?;
if !resp.status().is_success() {
return None;
}
let body: serde_json::Value = resp.json().await.ok()?;
let models: Vec<String> = body["models"]
.as_array()?
.iter()
.filter_map(|m| m["name"].as_str().map(|s| s.to_string()))
.collect();
if models.is_empty() {
None
} else {
Some(models)
}
}
struct ProfileForm {
profile_name: String,
endpoint: String,
model: String,
context_tokens: u32,
api_key: Option<String>,
key_source: Option<String>,
}
impl ProfileForm {
fn to_config_toml(&self) -> String {
let mut out = format!(
"# PareCode configuration — generated by setup wizard\n\n\
default_profile = \"{name}\"\n\n\
[profiles.{name}]\n\
endpoint = \"{endpoint}\"\n\
model = \"{model}\"\n\
context_tokens = {ctx}\n",
name = self.profile_name,
endpoint = self.endpoint,
model = self.model,
ctx = self.context_tokens,
);
if let Some(ref key) = self.api_key {
out.push_str(&format!("api_key = \"{key}\"\n"));
}
out.push_str(PROFILE_EXAMPLES);
out
}
fn print_summary(&self) {
println!();
println!(" ┌─────────────────────────────────────────┐");
println!(" │ Profile: {:<30}│", self.profile_name);
println!(" ├─────────────────────────────────────────┤");
println!(" │ Endpoint: {:<29}│", truncate(&self.endpoint, 29));
println!(" │ Model: {:<29}│", truncate(&self.model, 29));
println!(" │ Context: {:<29}│", format!("{}", self.context_tokens));
if let Some(ref src) = self.key_source {
println!(" │ API Key: {:<29}│", src);
} else if self.api_key.is_some() {
println!(" │ API Key: {:<29}│", mask_key(self.api_key.as_deref().unwrap_or("")));
} else {
println!(" │ API Key: {:<29}│", "(none)");
}
println!(" └─────────────────────────────────────────┘");
}
}
fn truncate(s: &str, max: usize) -> String {
if s.len() <= max {
s.to_string()
} else {
format!("{}…", &s[..max - 1])
}
}
fn mask_key(key: &str) -> String {
if key.len() <= 8 {
"••••••••".to_string()
} else {
let visible = &key[key.len() - 4..];
format!("••••••{visible}")
}
}
fn write_config(content: &str) -> anyhow::Result<PathBuf> {
let path = config::config_path();
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(&path, content)?;
Ok(path)
}
fn read_line() -> String {
let mut buf = String::new();
let _ = io::stdin().lock().read_line(&mut buf);
buf.trim().to_string()
}
fn pick(prompt: &str, options: &[&str]) -> usize {
println!();
println!(" {prompt}");
for (i, opt) in options.iter().enumerate() {
println!(" {}. {opt}", i + 1);
}
print!(" ❯ ");
let _ = io::stdout().flush();
loop {
let input = read_line();
if let Ok(n) = input.parse::<usize>() {
if n >= 1 && n <= options.len() {
return n - 1;
}
}
print!(" Pick 1–{}: ", options.len());
let _ = io::stdout().flush();
}
}
fn prompt_string(label: &str) -> String {
print!(" {label}: ");
let _ = io::stdout().flush();
read_line()
}
fn prompt_string_default(label: &str, default: &str) -> String {
print!(" {label} [{default}]: ");
let _ = io::stdout().flush();
let input = read_line();
if input.is_empty() { default.to_string() } else { input }
}
fn confirm(prompt: &str) -> bool {
print!(" {prompt} [Y/n]: ");
let _ = io::stdout().flush();
let answer = read_line();
!answer.eq_ignore_ascii_case("n") && !answer.eq_ignore_ascii_case("no")
}
fn guess_context_tokens(model: &str) -> u32 {
let m = model.to_lowercase();
if m.contains("llama3") && m.contains("70") {
131_072
} else if m.contains("qwen") && (m.contains("32") || m.contains("72")) {
65_536
} else if m.contains("deepseek") {
65_536
} else {
32_768
}
}
fn resolve_api_key(env_var: &str, provider_name: &str) -> Option<(String, String)> {
if let Ok(key) = std::env::var(env_var) {
if !key.is_empty() {
println!();
println!(" Found ${env_var} in environment ({}).", mask_key(&key));
if confirm("Use this key?") {
return Some((key, format!("${env_var} (from env)")));
}
}
}
println!();
let key = prompt_string(&format!("{provider_name} API key"));
if key.is_empty() {
println!(" No key provided — you can add it later in the config file.");
None
} else {
Some((key.clone(), mask_key(&key)))
}
}
struct ProviderPreset {
profile_name: &'static str,
endpoint: &'static str,
model: &'static str,
context_tokens: u32,
env_var: &'static str,
display_name: &'static str,
}
const PRESETS: &[ProviderPreset] = &[
ProviderPreset {
profile_name: "claude",
endpoint: "https://api.anthropic.com/v1",
model: "claude-sonnet-4-6",
context_tokens: 200_000,
env_var: "ANTHROPIC_API_KEY",
display_name: "Anthropic",
},
ProviderPreset {
profile_name: "openai",
endpoint: "https://api.openai.com/v1",
model: "gpt-4o",
context_tokens: 128_000,
env_var: "OPENAI_API_KEY",
display_name: "OpenAI",
},
ProviderPreset {
profile_name: "openrouter",
endpoint: "https://openrouter.ai/api/v1",
model: "qwen/qwen-2.5-coder-32b-instruct",
context_tokens: 32_768,
env_var: "OPENROUTER_API_KEY",
display_name: "OpenRouter",
},
];
pub async fn run_setup_wizard() -> anyhow::Result<bool> {
println!();
println!(" Welcome to PareCode ⚒");
println!();
println!(" No config found at {}", config::config_path().display());
println!(" Let's get you set up.");
let method = pick(
"How will you connect to an LLM?",
&[
"Cloud API — Anthropic Claude, OpenAI, OpenRouter",
"Local model — Ollama (free, private, offline)",
"Custom endpoint — vLLM, LM Studio, text-gen-webui, etc.",
"Skip — I'll configure manually later",
],
);
let form = match method {
0 => wizard_cloud_api().await?,
1 => wizard_local_ollama().await?,
2 => wizard_custom_endpoint()?,
_ => {
println!();
println!(" No problem. Create a config anytime:");
println!(" parecode --init");
println!(" Or set environment variables:");
println!(" PARECODE_ENDPOINT=http://... PARECODE_MODEL=... parecode");
return Ok(false);
}
};
let Some(form) = form else {
return Ok(false);
};
let final_form = confirm_or_edit(form)?;
let Some(final_form) = final_form else {
println!(" Cancelled.");
return Ok(false);
};
let path = write_config(&final_form.to_config_toml())?;
println!();
println!(" ✓ Config written to {}", path.display());
maybe_run_init().await;
Ok(true)
}
async fn wizard_cloud_api() -> anyhow::Result<Option<ProfileForm>> {
let provider_idx = pick(
"Which provider?",
&[
"Anthropic Claude — claude-sonnet-4-6 (recommended)",
"OpenAI — gpt-4o",
"OpenRouter — any model, single API key",
],
);
let preset = &PRESETS[provider_idx];
let (api_key, key_source) = match resolve_api_key(preset.env_var, preset.display_name) {
Some((k, s)) => (Some(k), Some(s)),
None => (None, None),
};
Ok(Some(ProfileForm {
profile_name: preset.profile_name.to_string(),
endpoint: preset.endpoint.to_string(),
model: preset.model.to_string(),
context_tokens: preset.context_tokens,
api_key,
key_source,
}))
}
async fn wizard_local_ollama() -> anyhow::Result<Option<ProfileForm>> {
println!();
print!(" Checking for Ollama at localhost:11434... ");
let _ = io::stdout().flush();
let models = probe_ollama().await;
match models {
Some(models) if !models.is_empty() => {
println!("✓ found ({} model{})", models.len(), if models.len() == 1 { "" } else { "s" });
let model = if models.len() == 1 {
println!(" Using: {}", models[0]);
models[0].clone()
} else {
let strs: Vec<&str> = models.iter().map(|s| s.as_str()).collect();
let choice = pick("Which model?", &strs);
models[choice].clone()
};
let ctx = guess_context_tokens(&model);
Ok(Some(ProfileForm {
profile_name: "local".to_string(),
endpoint: "http://localhost:11434".to_string(),
model,
context_tokens: ctx,
api_key: None,
key_source: None,
}))
}
_ => {
println!("✗ not reachable");
println!();
println!(" Ollama doesn't appear to be running.");
println!(" Install from https://ollama.ai, then run: ollama pull qwen3:14b");
println!();
if confirm("Write config with Ollama defaults anyway?") {
Ok(Some(ProfileForm {
profile_name: "local".to_string(),
endpoint: "http://localhost:11434".to_string(),
model: "qwen3:14b".to_string(),
context_tokens: 32_768,
api_key: None,
key_source: None,
}))
} else {
Ok(None)
}
}
}
}
fn wizard_custom_endpoint() -> anyhow::Result<Option<ProfileForm>> {
println!();
println!(" Enter your OpenAI-compatible endpoint details.");
let endpoint = prompt_string_default("Endpoint URL", "http://localhost:8000/v1");
let model = prompt_string("Model name");
if model.is_empty() {
println!(" Model name is required.");
return Ok(None);
}
let ctx_str = prompt_string_default("Context tokens", "32768");
let context_tokens: u32 = ctx_str.parse().unwrap_or(32_768);
println!();
let key_input = prompt_string("API key (leave blank if not required)");
let (api_key, key_source) = if key_input.is_empty() {
(None, None)
} else {
let masked = mask_key(&key_input);
(Some(key_input), Some(masked))
};
Ok(Some(ProfileForm {
profile_name: "custom".to_string(),
endpoint,
model,
context_tokens,
api_key,
key_source,
}))
}
fn confirm_or_edit(mut form: ProfileForm) -> anyhow::Result<Option<ProfileForm>> {
loop {
form.print_summary();
println!();
let action = pick(
"Save this configuration?",
&[
"Yes — write config and start",
"Edit a field",
"Cancel",
],
);
match action {
0 => return Ok(Some(form)),
1 => {
let field = pick(
"Which field?",
&["Profile name", "Endpoint", "Model", "Context tokens", "API key"],
);
match field {
0 => form.profile_name = prompt_string_default("Profile name", &form.profile_name),
1 => form.endpoint = prompt_string_default("Endpoint", &form.endpoint),
2 => form.model = prompt_string_default("Model", &form.model),
3 => {
let v = prompt_string_default("Context tokens", &form.context_tokens.to_string());
form.context_tokens = v.parse().unwrap_or(form.context_tokens);
}
4 => {
let v = prompt_string("API key (blank to clear)");
if v.is_empty() {
form.api_key = None;
form.key_source = None;
} else {
form.key_source = Some(mask_key(&v));
form.api_key = Some(v);
}
}
_ => {}
}
}
_ => return Ok(None),
}
}
}
async fn maybe_run_init() {
let cwd = match std::env::current_dir() {
Ok(c) => c,
Err(_) => return,
};
let markers = [
"Cargo.toml", "package.json", "pyproject.toml", "go.mod",
"Makefile", "CMakeLists.txt", "src",
];
if !markers.iter().any(|m| cwd.join(m).exists()) {
return;
}
let parecode_dir = cwd.join(".parecode");
if parecode_dir.join("conventions.md").exists() {
return;
}
println!();
print!(" Detect project context? [Y/n] ");
let _ = io::stdout().flush();
let answer = read_line();
if answer.eq_ignore_ascii_case("n") || answer.eq_ignore_ascii_case("no") {
return;
}
let content = crate::init::run_project_init(&cwd);
match crate::init::save_conventions(&cwd, &content) {
Ok(path) => {
println!(" ✓ Written to {}", path.display());
}
Err(e) => {
println!(" ✗ Failed to write conventions: {e}");
}
}
}
#[allow(dead_code)]
pub async fn is_ollama_running() -> bool {
probe_ollama().await.is_some()
}
pub async fn check_for_update() -> Option<(String, String)> {
let current = env!("CARGO_PKG_VERSION");
let cache_path = update_cache_path();
if let Some(cached) = read_update_cache(&cache_path) {
return if cached != current && version_gt(&cached, current) {
Some((current.to_string(), cached))
} else {
None
};
}
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(3))
.build()
.ok()?;
let resp = client
.get("https://api.github.com/repos/PartTimer1996/parecode/releases/latest")
.header("User-Agent", "parecode")
.send()
.await
.ok()?;
if !resp.status().is_success() {
return None;
}
let body: serde_json::Value = resp.json().await.ok()?;
let tag = body["tag_name"].as_str()?;
let latest = tag.trim_start_matches('v').to_string();
write_update_cache(&cache_path, &latest);
if version_gt(&latest, current) {
Some((current.to_string(), latest))
} else {
None
}
}
fn update_cache_path() -> PathBuf {
let base = std::env::var("XDG_DATA_HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| {
std::env::var("HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from("."))
.join(".local/share")
});
base.join("parecode").join("update-check")
}
fn read_update_cache(path: &PathBuf) -> Option<String> {
let metadata = std::fs::metadata(path).ok()?;
let modified = metadata.modified().ok()?;
let age = std::time::SystemTime::now().duration_since(modified).ok()?;
if age > std::time::Duration::from_secs(86_400) {
return None;
}
let content = std::fs::read_to_string(path).ok()?;
let version = content.trim().to_string();
if version.is_empty() {
None
} else {
Some(version)
}
}
fn write_update_cache(path: &PathBuf, version: &str) {
if let Some(parent) = path.parent() {
let _ = std::fs::create_dir_all(parent);
}
let _ = std::fs::write(path, version);
}
fn version_gt(a: &str, b: &str) -> bool {
let parse = |s: &str| -> Vec<u32> {
s.split('.')
.filter_map(|p| p.parse::<u32>().ok())
.collect()
};
let va = parse(a);
let vb = parse(b);
for i in 0..va.len().max(vb.len()) {
let x = va.get(i).copied().unwrap_or(0);
let y = vb.get(i).copied().unwrap_or(0);
if x > y { return true; }
if x < y { return false; }
}
false
}
pub fn shell_completion_hint() -> Option<String> {
let marker = completion_marker_path();
if marker.exists() {
return None;
}
if let Some(parent) = marker.parent() {
let _ = std::fs::create_dir_all(parent);
}
let _ = std::fs::write(&marker, "shown");
let shell = std::env::var("SHELL").unwrap_or_default();
let hint = if shell.contains("zsh") {
" tip: enable tab-completion:\n parecode --completions zsh > ~/.zfunc/_parecode"
} else if shell.contains("bash") {
" tip: enable tab-completion:\n parecode --completions bash > ~/.bash_completion.d/parecode"
} else if shell.contains("fish") {
" tip: enable tab-completion:\n parecode --completions fish > ~/.config/fish/completions/parecode.fish"
} else {
" tip: enable tab-completion with: parecode --completions <shell>"
};
Some(hint.to_string())
}
fn completion_marker_path() -> PathBuf {
let base = std::env::var("XDG_DATA_HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| {
std::env::var("HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from("."))
.join(".local/share")
});
base.join("parecode").join("completions-hint-shown")
}
const PROFILE_EXAMPLES: &str = r#"
# ── Additional profile examples (uncomment and edit to add) ──────────────────
# ── Anthropic Claude ─────────────────────────────────────────────────────────
# [profiles.claude]
# endpoint = "https://api.anthropic.com/v1"
# model = "claude-sonnet-4-6"
# context_tokens = 200000
# api_key = "sk-ant-..."
# cost_per_mtok_input = 3.0 # USD per 1M input tokens — enables cost estimates in /plan
# ── Anthropic Claude — Opus planner + Haiku executor ─────────────────────────
# Uses Opus for planning (high reasoning, low token count) and Haiku for
# executing each step (fast, cheap). Best cost/quality ratio for large tasks.
# [profiles.claude-split]
# endpoint = "https://api.anthropic.com/v1"
# model = "claude-haiku-4-5-20251001"
# planner_model = "claude-opus-4-6"
# context_tokens = 200000
# api_key = "sk-ant-..."
# ── OpenAI ───────────────────────────────────────────────────────────────────
# [profiles.openai]
# endpoint = "https://api.openai.com/v1"
# model = "gpt-4o"
# context_tokens = 128000
# api_key = "sk-..."
# ── OpenRouter ────────────────────────────────────────────────────────────────
# [profiles.openrouter]
# endpoint = "https://openrouter.ai/api/v1"
# model = "qwen/qwen-2.5-coder-32b-instruct"
# context_tokens = 32768
# api_key = "sk-or-..."
# ── Local Ollama (alternative model) ─────────────────────────────────────────
# [profiles.small]
# endpoint = "http://localhost:11434"
# model = "qwen3:8b"
# context_tokens = 32768
# ── Git integration (optional, per-profile) ──────────────────────────────────
# git_context = true # inject git status into system prompt; enables checkpoints/diffs
# auto_commit = false # auto-commit all changes after each successful task
# auto_commit_prefix = "parecode: "
# ── MCP servers (optional, per-profile) ──────────────────────────────────────
# Add MCP servers to any profile to give the model extra tools.
# Tools appear as "<server_name>.<tool_name>" (e.g. "brave.brave_web_search").
#
# Example: Brave Search (requires BRAVE_API_KEY)
# [[profiles.local.mcp_servers]]
# name = "brave"
# command = ["npx", "-y", "@modelcontextprotocol/server-brave-search"]
# [profiles.local.mcp_servers.env]
# BRAVE_API_KEY = "BSA..."
#
# Example: Fetch (HTTP fetch + HTML→text, no API key needed)
# [[profiles.local.mcp_servers]]
# name = "fetch"
# command = ["uvx", "mcp-server-fetch"]
"#;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_version_gt() {
assert!(version_gt("0.2.0", "0.1.0"));
assert!(version_gt("1.0.0", "0.9.9"));
assert!(version_gt("0.1.1", "0.1.0"));
assert!(!version_gt("0.1.0", "0.1.0"));
assert!(!version_gt("0.1.0", "0.2.0"));
}
#[test]
fn test_guess_context_tokens() {
assert_eq!(guess_context_tokens("qwen3:14b"), 32_768);
assert_eq!(guess_context_tokens("qwen3:32b"), 65_536);
assert_eq!(guess_context_tokens("llama3.1:70b"), 131_072);
assert_eq!(guess_context_tokens("deepseek-coder-v2:16b"), 65_536);
}
}