use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ProviderAuthKind {
#[serde(rename = "api_key")]
ApiKey,
#[serde(rename = "oauth")]
OAuth,
#[serde(rename = "token")]
Token,
#[serde(rename = "device_code")]
DeviceCode,
#[serde(rename = "custom")]
Custom,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderPluginWizardSetup {
pub choice_id: Option<String>,
pub choice_label: Option<String>,
pub choice_hint: Option<String>,
pub assistant_priority: Option<i32>,
#[serde(rename = "assistantVisibility")]
pub assistant_visibility: Option<String>,
#[serde(rename = "groupId")]
pub group_id: Option<String>,
#[serde(rename = "groupLabel")]
pub group_label: Option<String>,
#[serde(rename = "groupHint")]
pub group_hint: Option<String>,
#[serde(rename = "methodId")]
pub method_id: Option<String>,
#[serde(rename = "onboardingScopes")]
pub onboarding_scopes: Option<Vec<String>>,
#[serde(rename = "modelAllowlist")]
pub model_allowlist: Option<ModelAllowlist>,
#[serde(rename = "modelSelection")]
pub model_selection: Option<ModelSelection>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelAllowlist {
#[serde(rename = "allowedKeys")]
pub allowed_keys: Option<Vec<String>>,
#[serde(rename = "initialSelections")]
pub initial_selections: Option<Vec<String>>,
pub message: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelSelection {
#[serde(rename = "promptWhenAuthChoiceProvided")]
pub prompt_when_auth_choice_provided: Option<bool>,
#[serde(rename = "allowKeepCurrent")]
pub allow_keep_current: Option<bool>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderAuthMethod {
pub id: String,
pub label: String,
pub hint: Option<String>,
pub kind: ProviderAuthKind,
pub wizard: Option<ProviderPluginWizardSetup>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderPlugin {
pub id: String,
pub label: String,
#[serde(rename = "docsPath")]
pub docs_path: Option<String>,
pub icon: &'static str,
#[serde(rename = "hookAliases")]
pub hook_aliases: Option<Vec<String>>,
#[serde(rename = "envVars")]
pub env_vars: Option<Vec<String>>,
pub auth: Vec<ProviderAuthMethod>,
#[serde(rename = "defaultModel")]
pub default_model: Option<String>,
}
impl ProviderPlugin {
#[allow(dead_code)]
pub fn auth_method(&self, method_id: &str) -> Option<&ProviderAuthMethod> {
self.auth.iter().find(|m| m.id == method_id)
}
}
pub struct ProviderRegistry;
impl ProviderRegistry {
pub fn providers() -> &'static [ProviderPlugin] {
&PROVIDERS
}
#[allow(dead_code)]
pub fn find(id: &str) -> Option<&'static ProviderPlugin> {
let id_lower = id.to_lowercase();
PROVIDERS.iter().find(|p| p.id.to_lowercase() == id_lower)
}
#[allow(dead_code)]
pub fn ids() -> Vec<String> {
PROVIDERS.iter().map(|p| p.id.clone()).collect()
}
}
static PROVIDERS: Lazy<Vec<ProviderPlugin>> = Lazy::new(|| {
vec![
ProviderPlugin {
id: "anthropic".to_string(),
label: "Anthropic".to_string(),
docs_path: Some("/providers/models".to_string()),
icon: "🧠",
hook_aliases: Some(vec!["claude-cli".to_string()]),
env_vars: Some(vec![
"ANTHROPIC_API_KEY".to_string(),
"ANTHROPIC_OAUTH_TOKEN".to_string(),
]),
default_model: Some("anthropic/claude-sonnet-4-6".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "Anthropic API key".to_string(),
hint: Some("Direct Anthropic API key".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("anthropic-api-key".to_string()),
choice_label: Some("Anthropic API key".to_string()),
choice_hint: Some("Direct API key path".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("anthropic".to_string()),
group_label: Some("Anthropic".to_string()),
group_hint: Some("Claude API key".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "openai".to_string(),
label: "OpenAI".to_string(),
docs_path: Some("/providers/models".to_string()),
icon: "🤖",
hook_aliases: Some(vec![
"azure-openai".to_string(),
"azure-openai-responses".to_string(),
]),
env_vars: Some(vec!["OPENAI_API_KEY".to_string()]),
default_model: Some("openai/gpt-5.4".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "OpenAI API key".to_string(),
hint: Some("Direct OpenAI API key".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("openai-api-key".to_string()),
choice_label: Some("OpenAI API key".to_string()),
choice_hint: Some("Direct API key path".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("openai".to_string()),
group_label: Some("OpenAI".to_string()),
group_hint: Some("GPT models via OpenAI API".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "google".to_string(),
label: "Google".to_string(),
docs_path: Some("/providers/models".to_string()),
icon: "🔵",
hook_aliases: None,
env_vars: Some(vec!["GOOGLE_API_KEY".to_string()]),
default_model: Some("google/gemini-2.5".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "Google API key".to_string(),
hint: Some("Google AI / Gemini API key".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("google-api-key".to_string()),
choice_label: Some("Google API key".to_string()),
choice_hint: Some("Direct API key path".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("google".to_string()),
group_label: Some("Google".to_string()),
group_hint: Some("Gemini models via Google AI".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "deepseek".to_string(),
label: "DeepSeek".to_string(),
docs_path: Some("/providers/deepseek".to_string()),
icon: "🔮",
hook_aliases: None,
env_vars: Some(vec!["DEEPSEEK_API_KEY".to_string()]),
default_model: Some("deepseek/deepseek-chat-v3".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "DeepSeek API key".to_string(),
hint: Some("DeepSeek API key".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("deepseek-api-key".to_string()),
choice_label: Some("DeepSeek API key".to_string()),
choice_hint: Some("Direct API key path".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("deepseek".to_string()),
group_label: Some("DeepSeek".to_string()),
group_hint: Some("DeepSeek models via DeepSeek API".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "openrouter".to_string(),
label: "OpenRouter".to_string(),
docs_path: Some("/providers/openrouter".to_string()),
icon: "🛤️",
hook_aliases: None,
env_vars: Some(vec!["OPENROUTER_API_KEY".to_string()]),
default_model: Some("openrouter/anthropic/claude-sonnet-4-6".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "OpenRouter API key".to_string(),
hint: Some("OpenRouter API key".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("openrouter-api-key".to_string()),
choice_label: Some("OpenRouter API key".to_string()),
choice_hint: Some("Direct API key path".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("openrouter".to_string()),
group_label: Some("OpenRouter".to_string()),
group_hint: Some("Multiple models via OpenRouter".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "azure".to_string(),
label: "Azure OpenAI".to_string(),
docs_path: Some("/providers/azure".to_string()),
icon: "☁️",
hook_aliases: None,
env_vars: Some(vec!["AZURE_OPENAI_API_KEY".to_string()]),
default_model: Some("azure/gpt-5.4".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "Azure OpenAI API key".to_string(),
hint: Some("Azure OpenAI API key".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("azure-api-key".to_string()),
choice_label: Some("Azure OpenAI API key".to_string()),
choice_hint: Some("Direct API key path".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("azure".to_string()),
group_label: Some("Azure OpenAI".to_string()),
group_hint: Some("OpenAI models via Azure".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "groq".to_string(),
label: "Groq".to_string(),
docs_path: Some("/providers/groq".to_string()),
icon: "⚡",
hook_aliases: None,
env_vars: Some(vec!["GROQ_API_KEY".to_string()]),
default_model: Some("groq/llama-4-scout".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "Groq API key".to_string(),
hint: Some("Groq API key".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("groq-api-key".to_string()),
choice_label: Some("Groq API key".to_string()),
choice_hint: Some("Direct API key path".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("groq".to_string()),
group_label: Some("Groq".to_string()),
group_hint: Some("Fast inference via Groq".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "together".to_string(),
label: "Together AI".to_string(),
docs_path: Some("/providers/together".to_string()),
icon: "🤝",
hook_aliases: None,
env_vars: Some(vec!["TOGETHER_API_KEY".to_string()]),
default_model: Some("together/llama-4-scout".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "Together AI API key".to_string(),
hint: Some("Together AI API key".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("together-api-key".to_string()),
choice_label: Some("Together AI API key".to_string()),
choice_hint: Some("Direct API key path".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("together".to_string()),
group_label: Some("Together AI".to_string()),
group_hint: Some("Open models via Together AI".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "minimax".to_string(),
label: "MiniMax".to_string(),
docs_path: Some("/providers/minimax".to_string()),
icon: "🟠",
hook_aliases: Some(vec!["minimax-cn".to_string()]),
env_vars: Some(vec![
"MINIMAX_API_KEY".to_string(),
"MINIMAX_CODING_API_KEY".to_string(),
]),
default_model: Some("minimax/MiniMax-Text-01".to_string()),
auth: vec![
ProviderAuthMethod {
id: "api-global".to_string(),
label: "MiniMax API key (Global)".to_string(),
hint: Some("Global endpoint - api.minimax.io".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("minimax-global-api".to_string()),
choice_label: Some("MiniMax API key (Global)".to_string()),
choice_hint: Some("Global endpoint - api.minimax.io".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("minimax".to_string()),
group_label: Some("MiniMax".to_string()),
group_hint: Some("M2.7 reasoning models".to_string()),
method_id: Some("api-global".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
},
ProviderAuthMethod {
id: "api-cn".to_string(),
label: "MiniMax API key (CN)".to_string(),
hint: Some("CN endpoint - api.minimaxi.com".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("minimax-cn-api".to_string()),
choice_label: Some("MiniMax API key (CN)".to_string()),
choice_hint: Some("CN endpoint - api.minimaxi.com".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("minimax".to_string()),
group_label: Some("MiniMax".to_string()),
group_hint: Some("M2.7 reasoning models".to_string()),
method_id: Some("api-cn".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
},
],
},
ProviderPlugin {
id: "kimi".to_string(),
label: "Kimi (Moonshot AI)".to_string(),
docs_path: Some("/providers/moonshot".to_string()),
icon: "🌙",
hook_aliases: Some(vec!["kimi-code".to_string(), "kimi-coding".to_string()]),
env_vars: Some(vec![
"KIMI_API_KEY".to_string(),
"KIMICODE_API_KEY".to_string(),
]),
default_model: Some("kimi/kimi-k2.5".to_string()),
auth: vec![ProviderAuthMethod {
id: "api-key".to_string(),
label: "Kimi Code API key".to_string(),
hint: Some("Kimi K2.5 + Kimi coding models".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("kimi-code-api-key".to_string()),
choice_label: Some("Kimi Code API key".to_string()),
choice_hint: Some("Kimi K2.5 coding endpoint".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("moonshot".to_string()),
group_label: Some("Moonshot AI (Kimi K2.5)".to_string()),
group_hint: Some("Kimi K2.5 models".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
}],
},
ProviderPlugin {
id: "zai".to_string(),
label: "Z.AI (GLM)".to_string(),
docs_path: Some("/providers/models".to_string()),
icon: "🧬",
hook_aliases: Some(vec!["z-ai".to_string(), "z.ai".to_string()]),
env_vars: Some(vec!["ZAI_API_KEY".to_string(), "Z_AI_API_KEY".to_string()]),
default_model: Some("zai/glm-4.7".to_string()),
auth: vec![
ProviderAuthMethod {
id: "api-key".to_string(),
label: "Z.AI API key".to_string(),
hint: Some("Z.AI GLM models".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("zai-api-key".to_string()),
choice_label: Some("Z.AI API key".to_string()),
choice_hint: Some("GLM models via Z.AI".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("zai".to_string()),
group_label: Some("Z.AI".to_string()),
group_hint: Some("GLM Coding Plan / Global / CN".to_string()),
method_id: Some("api-key".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
},
ProviderAuthMethod {
id: "coding-global".to_string(),
label: "Coding-Plan-Global".to_string(),
hint: Some("GLM Coding Plan Global (api.z.ai)".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("zai-coding-global".to_string()),
choice_label: Some("Coding-Plan-Global".to_string()),
choice_hint: Some("GLM Coding Plan Global (api.z.ai)".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("zai".to_string()),
group_label: Some("Z.AI".to_string()),
group_hint: Some("GLM Coding Plan / Global / CN".to_string()),
method_id: Some("coding-global".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
},
ProviderAuthMethod {
id: "coding-cn".to_string(),
label: "Coding-Plan-CN".to_string(),
hint: Some("GLM Coding Plan CN (open.bigmodel.cn)".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("zai-coding-cn".to_string()),
choice_label: Some("Coding-Plan-CN".to_string()),
choice_hint: Some("GLM Coding Plan CN (open.bigmodel.cn)".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("zai".to_string()),
group_label: Some("Z.AI".to_string()),
group_hint: Some("GLM Coding Plan / Global / CN".to_string()),
method_id: Some("coding-cn".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
},
ProviderAuthMethod {
id: "global".to_string(),
label: "Global".to_string(),
hint: Some("Z.AI Global (api.z.ai)".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("zai-global".to_string()),
choice_label: Some("Global".to_string()),
choice_hint: Some("Z.AI Global (api.z.ai)".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("zai".to_string()),
group_label: Some("Z.AI".to_string()),
group_hint: Some("GLM Coding Plan / Global / CN".to_string()),
method_id: Some("global".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
},
ProviderAuthMethod {
id: "cn".to_string(),
label: "CN".to_string(),
hint: Some("Z.AI CN (open.bigmodel.cn)".to_string()),
kind: ProviderAuthKind::ApiKey,
wizard: Some(ProviderPluginWizardSetup {
choice_id: Some("zai-cn".to_string()),
choice_label: Some("CN".to_string()),
choice_hint: Some("Z.AI CN (open.bigmodel.cn)".to_string()),
assistant_priority: Some(0),
assistant_visibility: None,
group_id: Some("zai".to_string()),
group_label: Some("Z.AI".to_string()),
group_hint: Some("GLM Coding Plan / Global / CN".to_string()),
method_id: Some("cn".to_string()),
onboarding_scopes: Some(vec!["text-inference".to_string()]),
model_allowlist: None,
model_selection: None,
}),
},
],
},
]
});
#[derive(Debug, Serialize)]
pub struct PiAuthJson {
#[serde(rename = "anthropic")]
anthropic: Option<ProviderAuth>,
#[serde(rename = "openai")]
openai: Option<ProviderAuth>,
#[serde(rename = "google")]
google: Option<ProviderAuth>,
#[serde(rename = "deepseek")]
deepseek: Option<ProviderAuth>,
#[serde(rename = "openrouter")]
openrouter: Option<ProviderAuth>,
#[serde(rename = "azure")]
azure: Option<ProviderAuth>,
#[serde(rename = "groq")]
groq: Option<ProviderAuth>,
#[serde(rename = "together")]
together: Option<ProviderAuth>,
#[serde(rename = "minimax")]
minimax: Option<ProviderAuth>,
#[serde(rename = "kimi")]
kimi: Option<ProviderAuth>,
#[serde(rename = "zai")]
zai: Option<ProviderAuth>,
}
#[derive(Debug, Serialize)]
pub struct ProviderAuth {
#[serde(rename = "type")]
pub auth_type: String,
pub key: String,
}
impl PiAuthJson {
pub fn new(provider_keys: &[(String, String)]) -> Self {
let mut auth = PiAuthJson {
anthropic: None,
openai: None,
google: None,
deepseek: None,
openrouter: None,
azure: None,
groq: None,
together: None,
minimax: None,
kimi: None,
zai: None,
};
for (provider, key) in provider_keys {
let provider_auth = ProviderAuth {
auth_type: "api_key".to_string(),
key: key.clone(),
};
match provider.as_str() {
"anthropic" => auth.anthropic = Some(provider_auth),
"openai" => auth.openai = Some(provider_auth),
"google" => auth.google = Some(provider_auth),
"deepseek" => auth.deepseek = Some(provider_auth),
"openrouter" => auth.openrouter = Some(provider_auth),
"azure" => auth.azure = Some(provider_auth),
"groq" => auth.groq = Some(provider_auth),
"together" => auth.together = Some(provider_auth),
"minimax" => auth.minimax = Some(provider_auth),
"kimi" => auth.kimi = Some(provider_auth),
"zai" => auth.zai = Some(provider_auth),
_ => {}
}
}
auth
}
pub fn to_json_string(&self) -> Result<String, serde_json::Error> {
serde_json::to_string_pretty(self)
}
}