Skip to main content

langcodec_cli/
ai.rs

1use std::sync::Arc;
2
3use crate::config::CliConfig;
4use mentra::{
5    BuiltinProvider,
6    provider::{self, Provider},
7};
8
9#[derive(Debug, Clone, PartialEq, Eq)]
10pub(crate) enum ProviderKind {
11    OpenAI,
12    Anthropic,
13    Gemini,
14}
15
16impl ProviderKind {
17    pub(crate) fn parse(value: &str) -> Result<Self, String> {
18        match value.trim().to_ascii_lowercase().as_str() {
19            "openai" => Ok(Self::OpenAI),
20            "anthropic" => Ok(Self::Anthropic),
21            "gemini" => Ok(Self::Gemini),
22            other => Err(format!(
23                "Unsupported provider '{}'. Expected one of: openai, anthropic, gemini",
24                other
25            )),
26        }
27    }
28
29    pub(crate) fn display_name(&self) -> &'static str {
30        match self {
31            Self::OpenAI => "openai",
32            Self::Anthropic => "anthropic",
33            Self::Gemini => "gemini",
34        }
35    }
36
37    pub(crate) fn api_key_env(&self) -> &'static str {
38        match self {
39            Self::OpenAI => "OPENAI_API_KEY",
40            Self::Anthropic => "ANTHROPIC_API_KEY",
41            Self::Gemini => "GEMINI_API_KEY",
42        }
43    }
44
45    pub(crate) fn builtin_provider(&self) -> BuiltinProvider {
46        match self {
47            Self::OpenAI => BuiltinProvider::OpenAI,
48            Self::Anthropic => BuiltinProvider::Anthropic,
49            Self::Gemini => BuiltinProvider::Gemini,
50        }
51    }
52}
53
54#[derive(Clone)]
55pub(crate) struct ProviderSetup {
56    pub(crate) provider_kind: ProviderKind,
57    pub(crate) provider: Arc<dyn Provider>,
58}
59
60pub(crate) fn resolve_provider(
61    cli: Option<&str>,
62    config: Option<&CliConfig>,
63    translate_cfg: Option<&str>,
64) -> Result<ProviderKind, String> {
65    if let Some(value) = cli {
66        return ProviderKind::parse(value);
67    }
68    if let Some(value) = translate_cfg {
69        return ProviderKind::parse(value);
70    }
71    if let Some(config) = config {
72        let configured = config.configured_provider_names();
73        match configured.len() {
74            1 => return ProviderKind::parse(configured[0]),
75            0 => {}
76            _ => {
77                return Err(
78                    "Multiple provider sections are configured; specify --provider or set translate.provider in langcodec.toml"
79                        .to_string(),
80                );
81            }
82        }
83    }
84
85    let mut available = Vec::new();
86    for kind in [
87        ProviderKind::OpenAI,
88        ProviderKind::Anthropic,
89        ProviderKind::Gemini,
90    ] {
91        if std::env::var(kind.api_key_env()).is_ok() {
92            available.push(kind);
93        }
94    }
95
96    match available.len() {
97        1 => Ok(available.remove(0)),
98        0 => Err(
99            "--provider is required (or configure exactly one provider section like [openai] in langcodec.toml, set translate.provider, or configure exactly one provider API key)"
100                .to_string(),
101        ),
102        _ => Err(
103            "Multiple provider API keys are configured; specify --provider or configure a single provider section in langcodec.toml"
104                .to_string(),
105        ),
106    }
107}
108
109pub(crate) fn resolve_model(
110    cli: Option<&str>,
111    config: Option<&CliConfig>,
112    provider: &ProviderKind,
113    translate_cfg: Option<&str>,
114) -> Result<String, String> {
115    cli.map(ToOwned::to_owned)
116        .or_else(|| {
117            config.and_then(|cfg| {
118                cfg.provider_model(provider.display_name())
119                    .map(ToOwned::to_owned)
120            })
121        })
122        .or_else(|| translate_cfg.map(ToOwned::to_owned))
123        .or_else(|| std::env::var("MENTRA_MODEL").ok())
124        .ok_or_else(|| {
125            format!(
126                "--model is required (or set [{}].model in langcodec.toml, set translate.model, or set MENTRA_MODEL)",
127                provider.display_name()
128            )
129        })
130}
131
132pub(crate) fn read_api_key(kind: &ProviderKind) -> Result<String, String> {
133    std::env::var(kind.api_key_env()).map_err(|_| {
134        format!(
135            "Missing {} environment variable for {} provider",
136            kind.api_key_env(),
137            kind.display_name()
138        )
139    })
140}
141
142pub(crate) fn build_provider(kind: &ProviderKind) -> Result<ProviderSetup, String> {
143    let api_key = read_api_key(kind)?;
144
145    let provider: Arc<dyn Provider> = match kind {
146        ProviderKind::OpenAI => Arc::new(provider::openai::OpenAIProvider::new(api_key)),
147        ProviderKind::Anthropic => Arc::new(provider::anthropic::AnthropicProvider::new(api_key)),
148        ProviderKind::Gemini => Arc::new(provider::gemini::GeminiProvider::new(api_key)),
149    };
150
151    Ok(ProviderSetup {
152        provider_kind: kind.clone(),
153        provider,
154    })
155}
156
157#[cfg(test)]
158mod tests {
159    use super::*;
160
161    #[test]
162    fn resolve_provider_uses_single_configured_provider_section() {
163        let config: CliConfig = toml::from_str(
164            r#"
165[openai]
166model = "gpt-5.4"
167"#,
168        )
169        .expect("parse config");
170
171        let provider = resolve_provider(None, Some(&config), None).expect("resolve provider");
172        assert_eq!(provider, ProviderKind::OpenAI);
173    }
174
175    #[test]
176    fn resolve_provider_rejects_multiple_configured_provider_sections() {
177        let config: CliConfig = toml::from_str(
178            r#"
179[openai]
180model = "gpt-5.4"
181
182[anthropic]
183model = "claude-sonnet"
184"#,
185        )
186        .expect("parse config");
187
188        let err = resolve_provider(None, Some(&config), None).unwrap_err();
189        assert!(err.contains("Multiple provider sections are configured"));
190    }
191
192    #[test]
193    fn resolve_model_prefers_selected_provider_section() {
194        let config: CliConfig = toml::from_str(
195            r#"
196[openai]
197model = "gpt-5.4"
198
199[anthropic]
200model = "claude-sonnet"
201"#,
202        )
203        .expect("parse config");
204
205        let model = resolve_model(None, Some(&config), &ProviderKind::Anthropic, None)
206            .expect("resolve model");
207        assert_eq!(model, "claude-sonnet");
208    }
209}