vtcode-config 0.98.7

Config loader components shared across VT Code and downstream adopters
Documentation
use serde::{Deserialize, Serialize};
use std::fmt;
use std::str::FromStr;

use super::{ModelId, ModelParseError};

/// Supported AI model providers
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
pub enum Provider {
    /// Google Gemini models
    Gemini,
    /// OpenAI GPT models
    #[default]
    OpenAI,
    /// Anthropic Claude models
    Anthropic,
    /// GitHub Copilot preview integration
    Copilot,
    /// DeepSeek native models
    DeepSeek,
    /// OpenRouter marketplace models
    OpenRouter,
    /// Local Ollama models
    Ollama,
    /// LM Studio local models
    LmStudio,
    /// Moonshot.ai models
    Moonshot,
    /// Z.AI GLM models
    ZAI,
    /// MiniMax models
    Minimax,
    /// Hugging Face Inference Providers
    HuggingFace,
}

impl Provider {
    /// Get the default API key environment variable for this provider
    pub fn default_api_key_env(&self) -> &'static str {
        match self {
            Provider::Gemini => "GEMINI_API_KEY",
            Provider::OpenAI => "OPENAI_API_KEY",
            Provider::Anthropic => "ANTHROPIC_API_KEY",
            Provider::Copilot => "",
            Provider::DeepSeek => "DEEPSEEK_API_KEY",
            Provider::OpenRouter => "OPENROUTER_API_KEY",
            Provider::Ollama => "OLLAMA_API_KEY",
            Provider::LmStudio => "LMSTUDIO_API_KEY",
            Provider::Moonshot => "MOONSHOT_API_KEY",
            Provider::ZAI => "ZAI_API_KEY",
            Provider::Minimax => "MINIMAX_API_KEY",
            Provider::HuggingFace => "HF_TOKEN",
        }
    }

    /// Get all supported providers
    pub fn all_providers() -> Vec<Provider> {
        vec![
            Provider::OpenAI,
            Provider::Anthropic,
            Provider::Copilot,
            Provider::Minimax,
            Provider::Gemini,
            Provider::DeepSeek,
            Provider::HuggingFace,
            Provider::OpenRouter,
            Provider::Ollama,
            Provider::LmStudio,
            Provider::Moonshot,
            Provider::ZAI,
        ]
    }

    /// Human-friendly label for display purposes
    pub fn label(&self) -> &'static str {
        match self {
            Provider::Gemini => "Gemini",
            Provider::OpenAI => "OpenAI",
            Provider::Anthropic => "Anthropic",
            Provider::Copilot => "GitHub Copilot",
            Provider::DeepSeek => "DeepSeek",
            Provider::OpenRouter => "OpenRouter",
            Provider::Ollama => "Ollama",
            Provider::LmStudio => "LM Studio",
            Provider::Moonshot => "Moonshot",
            Provider::ZAI => "Z.AI",
            Provider::Minimax => "MiniMax",
            Provider::HuggingFace => "Hugging Face",
        }
    }

    pub fn is_dynamic(&self) -> bool {
        matches!(self, Provider::Copilot) || self.is_local()
    }

    pub fn is_local(&self) -> bool {
        matches!(self, Provider::Ollama | Provider::LmStudio)
    }

    pub fn local_install_instructions(&self) -> Option<&'static str> {
        match self {
            Provider::Ollama => Some(
                "Ollama server is not running. To start:\n  1. Install Ollama from https://ollama.com\n  2. Run 'ollama serve' in a terminal\n  3. Pull models using 'ollama pull <model-name>' (e.g., 'ollama pull gpt-oss:20b')",
            ),
            Provider::LmStudio => Some(
                "LM Studio server is not running. To start:\n  1. Install LM Studio from https://lmstudio.ai\n  2. Open LM Studio and start the Local Server on port 1234\n  3. Load the model you want to use",
            ),
            _ => None,
        }
    }

    /// Determine if the provider supports configurable reasoning effort for the model
    pub fn supports_reasoning_effort(&self, model: &str) -> bool {
        use crate::constants::models;

        match self {
            Provider::Gemini => models::google::REASONING_MODELS.contains(&model),
            Provider::OpenAI => models::openai::REASONING_MODELS.contains(&model),
            Provider::Anthropic => models::anthropic::REASONING_MODELS.contains(&model),
            Provider::Copilot => false,
            Provider::DeepSeek => model == models::deepseek::DEEPSEEK_REASONER,
            Provider::OpenRouter => {
                if let Ok(model_id) = ModelId::from_str(model) {
                    if let Some(meta) = crate::models::openrouter_generated::metadata_for(model_id)
                    {
                        return meta.reasoning;
                    }
                    return matches!(
                        model_id,
                        ModelId::OpenRouterMinimaxM25 | ModelId::OpenRouterQwen3CoderNext
                    );
                }
                models::openrouter::REASONING_MODELS.contains(&model)
            }
            Provider::Ollama => models::ollama::REASONING_LEVEL_MODELS.contains(&model),
            Provider::LmStudio => models::lmstudio::REASONING_MODELS.contains(&model),
            Provider::Moonshot => models::moonshot::REASONING_MODELS.contains(&model),
            Provider::ZAI => models::zai::REASONING_MODELS.contains(&model),
            Provider::Minimax => models::minimax::SUPPORTED_MODELS.contains(&model),
            Provider::HuggingFace => models::huggingface::REASONING_MODELS.contains(&model),
        }
    }

    /// Determine if the provider supports the `service_tier` request parameter for the model.
    pub fn supports_service_tier(&self, model: &str) -> bool {
        use crate::constants::models;

        match self {
            Provider::OpenAI => models::openai::SERVICE_TIER_MODELS.contains(&model),
            _ => false,
        }
    }

    pub fn uses_managed_auth(&self) -> bool {
        matches!(self, Provider::Copilot)
    }
}

impl fmt::Display for Provider {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        match self {
            Provider::Gemini => write!(f, "gemini"),
            Provider::OpenAI => write!(f, "openai"),
            Provider::Anthropic => write!(f, "anthropic"),
            Provider::Copilot => write!(f, "copilot"),
            Provider::DeepSeek => write!(f, "deepseek"),
            Provider::OpenRouter => write!(f, "openrouter"),
            Provider::Ollama => write!(f, "ollama"),
            Provider::LmStudio => write!(f, "lmstudio"),
            Provider::Moonshot => write!(f, "moonshot"),
            Provider::ZAI => write!(f, "zai"),
            Provider::Minimax => write!(f, "minimax"),
            Provider::HuggingFace => write!(f, "huggingface"),
        }
    }
}

impl AsRef<str> for Provider {
    fn as_ref(&self) -> &str {
        match self {
            Provider::Gemini => "gemini",
            Provider::OpenAI => "openai",
            Provider::Anthropic => "anthropic",
            Provider::Copilot => "copilot",
            Provider::DeepSeek => "deepseek",
            Provider::OpenRouter => "openrouter",
            Provider::Ollama => "ollama",
            Provider::LmStudio => "lmstudio",
            Provider::Moonshot => "moonshot",
            Provider::ZAI => "zai",
            Provider::Minimax => "minimax",
            Provider::HuggingFace => "huggingface",
        }
    }
}

impl FromStr for Provider {
    type Err = ModelParseError;

    fn from_str(s: &str) -> Result<Self, Self::Err> {
        match s.to_lowercase().as_str() {
            "gemini" => Ok(Provider::Gemini),
            "openai" => Ok(Provider::OpenAI),
            "anthropic" => Ok(Provider::Anthropic),
            "copilot" => Ok(Provider::Copilot),
            "deepseek" => Ok(Provider::DeepSeek),
            "openrouter" => Ok(Provider::OpenRouter),
            "ollama" => Ok(Provider::Ollama),
            "lmstudio" => Ok(Provider::LmStudio),
            "moonshot" => Ok(Provider::Moonshot),
            "zai" => Ok(Provider::ZAI),
            "minimax" => Ok(Provider::Minimax),
            "huggingface" => Ok(Provider::HuggingFace),
            _ => Err(ModelParseError::InvalidProvider(s.to_string())),
        }
    }
}