ai_lib/client/
provider.rs1#[derive(Debug, Clone, Copy, PartialEq)]
2pub enum Provider {
3 Groq,
5 XaiGrok,
6 Ollama,
7 DeepSeek,
8 Anthropic,
9 AzureOpenAI,
10 HuggingFace,
11 TogetherAI,
12 OpenRouter,
13 Replicate,
14 BaiduWenxin,
16 TencentHunyuan,
17 IflytekSpark,
18 Moonshot,
19 ZhipuAI,
20 MiniMax,
21 OpenAI,
23 Qwen,
24 Gemini,
25 Mistral,
26 Cohere,
27 Perplexity,
28 AI21,
29 }
31
32impl Provider {
33 pub fn default_chat_model(&self) -> &'static str {
36 match self {
37 Provider::Groq => "llama-3.1-8b-instant",
38 Provider::XaiGrok => "grok-beta",
39 Provider::Ollama => "llama3-8b",
40 Provider::DeepSeek => "deepseek-chat",
41 Provider::Anthropic => "claude-3-5-sonnet-20241022",
42 Provider::AzureOpenAI => "gpt-35-turbo",
43 Provider::HuggingFace => "microsoft/DialoGPT-medium",
44 Provider::TogetherAI => "meta-llama/Llama-3-8b-chat-hf",
45 Provider::OpenRouter => "openai/gpt-3.5-turbo",
46 Provider::Replicate => "meta/llama-2-7b-chat",
47 Provider::BaiduWenxin => "ernie-3.5",
48 Provider::TencentHunyuan => "hunyuan-standard",
49 Provider::IflytekSpark => "spark-v3.0",
50 Provider::Moonshot => "moonshot-v1-8k",
51 Provider::ZhipuAI => "glm-4",
52 Provider::MiniMax => "abab6.5-chat",
53 Provider::OpenAI => "gpt-3.5-turbo",
54 Provider::Qwen => "qwen-turbo",
55 Provider::Gemini => "gemini-1.5-flash",
56 Provider::Mistral => "mistral-small",
57 Provider::Cohere => "command-r",
58 Provider::Perplexity => "llama-3.1-sonar-small-128k-online",
59 Provider::AI21 => "j2-ultra",
60 }
61 }
62
63 pub fn default_multimodal_model(&self) -> Option<&'static str> {
65 match self {
66 Provider::OpenAI => Some("gpt-4o"),
67 Provider::AzureOpenAI => Some("gpt-4o"),
68 Provider::Anthropic => Some("claude-3-5-sonnet-20241022"),
69 Provider::Groq => None,
70 Provider::Gemini => Some("gemini-1.5-flash"),
71 Provider::Cohere => Some("command-r-plus"),
72 Provider::OpenRouter => Some("openai/gpt-4o"),
73 Provider::Replicate => Some("meta/llama-2-7b-chat"),
74 Provider::ZhipuAI => Some("glm-4v"),
75 Provider::MiniMax => Some("abab6.5-chat"),
76 Provider::Perplexity => Some("llama-3.1-sonar-small-128k-online"),
77 Provider::AI21 => Some("j2-ultra"),
78 _ => None,
79 }
80 }
81
82 pub fn env_prefix(&self) -> &'static str {
87 match self {
88 Provider::Groq => "GROQ",
89 Provider::XaiGrok => "GROK",
90 Provider::Ollama => "OLLAMA",
91 Provider::DeepSeek => "DEEPSEEK",
92 Provider::Anthropic => "ANTHROPIC",
93 Provider::AzureOpenAI => "AZURE_OPENAI",
94 Provider::HuggingFace => "HUGGINGFACE",
95 Provider::TogetherAI => "TOGETHER",
96 Provider::OpenRouter => "OPENROUTER",
97 Provider::Replicate => "REPLICATE",
98 Provider::BaiduWenxin => "BAIDU_WENXIN",
99 Provider::TencentHunyuan => "TENCENT_HUNYUAN",
100 Provider::IflytekSpark => "IFLYTEK",
101 Provider::Moonshot => "MOONSHOT",
102 Provider::ZhipuAI => "ZHIPU",
103 Provider::MiniMax => "MINIMAX",
104 Provider::OpenAI => "OPENAI",
105 Provider::Qwen => "DASHSCOPE",
106 Provider::Gemini => "GEMINI",
107 Provider::Mistral => "MISTRAL",
108 Provider::Cohere => "COHERE",
109 Provider::Perplexity => "PERPLEXITY",
110 Provider::AI21 => "AI21",
111 }
112 }
113}