dictx 0.1.1

A fast, colorful terminal dictionary with offline indexes and optional AI explanations.
use crate::config::AppConfig;
use crate::output;
use crate::AiArgs;
use anyhow::{anyhow, Context, Result};
use reqwest::blocking::Client;
use serde::{Deserialize, Serialize};
use std::env;

#[derive(Debug, Serialize)]
struct ChatRequest {
    model: String,
    messages: Vec<Message>,
    temperature: f32,
}

#[derive(Debug, Serialize)]
struct Message {
    role: String,
    content: String,
}

#[derive(Debug, Deserialize)]
struct ChatResponse {
    choices: Vec<Choice>,
}

#[derive(Debug, Deserialize)]
struct Choice {
    message: ResponseMessage,
}

#[derive(Debug, Deserialize)]
struct ResponseMessage {
    content: String,
}

pub fn run_ai(config: &AppConfig, args: AiArgs, color: bool) -> Result<()> {
    let content = explain(
        config,
        &args.text,
        args.context.as_deref(),
        args.model.as_deref(),
    )?;
    output::print_ai_answer(&args.text, &content, color);
    Ok(())
}

pub fn explain(
    config: &AppConfig,
    text: &str,
    context: Option<&str>,
    model_override: Option<&str>,
) -> Result<String> {
    let api_key = env::var(&config.ai.api_key_env)
        .or_else(|_| env::var("DICTX_AI_API_KEY"))
        .or_else(|_| env::var("DEEPSEEK_API_KEY"))
        .or_else(|_| env::var("OPENAI_API_KEY"))
        .map_err(|_| {
            anyhow!(
                "未找到 API key。DeepSeek 默认请设置 {};也兼容 DICTX_AI_API_KEY 或 OPENAI_API_KEY。",
                config.ai.api_key_env
            )
        })?;

    let model = model_override
        .map(ToOwned::to_owned)
        .unwrap_or_else(|| config.ai.model.clone());
    let prompt = build_prompt(text, context);
    let url = format!(
        "{}/chat/completions",
        config.ai.base_url.trim_end_matches('/')
    );
    let client = Client::builder()
        .timeout(std::time::Duration::from_secs(45))
        .build()
        .context("创建 HTTP 客户端失败")?;

    let response = client
        .post(url)
        .bearer_auth(api_key)
        .json(&ChatRequest {
            model,
            messages: vec![
                Message {
                    role: "system".to_string(),
                    content: "你是一个面向中文用户的简洁词典助手。回答应包含词性、中文释义、英文释义、常用搭配、例句,并避免编造来源。".to_string(),
                },
                Message {
                    role: "user".to_string(),
                    content: prompt,
                },
            ],
            temperature: 0.2,
        })
        .send()
        .context("请求 AI API 失败")?;

    if !response.status().is_success() {
        let status = response.status();
        let body = response.text().unwrap_or_default();
        return Err(anyhow!("AI API 返回错误 {status}: {body}"));
    }

    let parsed: ChatResponse = response.json().context("解析 AI API 响应失败")?;
    let content = parsed
        .choices
        .into_iter()
        .next()
        .map(|choice| choice.message.content)
        .ok_or_else(|| anyhow!("AI API 响应没有 choices"))?;
    Ok(content)
}

fn build_prompt(text: &str, context: Option<&str>) -> String {
    match context {
        Some(context) if !context.trim().is_empty() => format!(
            "请解释词语或短语 `{}`。上下文: {}\n请用词典格式输出,中文为主。",
            text, context
        ),
        _ => format!("请解释词语或短语 `{}`。请用词典格式输出,中文为主。", text),
    }
}