Skip to main content

agentzero_core/common/
local_providers.rs

1#[derive(Debug, Clone, Copy, PartialEq, Eq)]
2pub enum LocalProviderType {
3    ChatCompletion,
4    Transcription,
5}
6
7#[derive(Debug, Clone, Copy, PartialEq, Eq)]
8pub struct LocalProviderMeta {
9    pub id: &'static str,
10    pub default_port: u16,
11    pub default_base_url: &'static str,
12    pub models_endpoint: Option<&'static str>,
13    pub supports_pull: bool,
14    pub provider_type: LocalProviderType,
15}
16
17const LOCAL_PROVIDERS: &[LocalProviderMeta] = &[
18    LocalProviderMeta {
19        id: "builtin",
20        default_port: 0,
21        default_base_url: "",
22        models_endpoint: None,
23        supports_pull: false,
24        provider_type: LocalProviderType::ChatCompletion,
25    },
26    LocalProviderMeta {
27        id: "ollama",
28        default_port: 11434,
29        default_base_url: "http://localhost:11434",
30        models_endpoint: Some("/api/tags"),
31        supports_pull: true,
32        provider_type: LocalProviderType::ChatCompletion,
33    },
34    LocalProviderMeta {
35        id: "llamacpp",
36        default_port: 8080,
37        default_base_url: "http://localhost:8080",
38        models_endpoint: Some("/v1/models"),
39        supports_pull: false,
40        provider_type: LocalProviderType::ChatCompletion,
41    },
42    LocalProviderMeta {
43        id: "lmstudio",
44        default_port: 1234,
45        default_base_url: "http://localhost:1234",
46        models_endpoint: Some("/v1/models"),
47        supports_pull: false,
48        provider_type: LocalProviderType::ChatCompletion,
49    },
50    LocalProviderMeta {
51        id: "vllm",
52        default_port: 8000,
53        default_base_url: "http://localhost:8000",
54        models_endpoint: Some("/v1/models"),
55        supports_pull: false,
56        provider_type: LocalProviderType::ChatCompletion,
57    },
58    LocalProviderMeta {
59        id: "sglang",
60        default_port: 30000,
61        default_base_url: "http://localhost:30000",
62        models_endpoint: Some("/v1/models"),
63        supports_pull: false,
64        provider_type: LocalProviderType::ChatCompletion,
65    },
66    LocalProviderMeta {
67        id: "osaurus",
68        default_port: 8080,
69        default_base_url: "http://localhost:8080",
70        models_endpoint: Some("/v1/models"),
71        supports_pull: false,
72        provider_type: LocalProviderType::ChatCompletion,
73    },
74    LocalProviderMeta {
75        id: "whispercpp",
76        default_port: 8080,
77        default_base_url: "http://localhost:8080",
78        models_endpoint: None,
79        supports_pull: false,
80        provider_type: LocalProviderType::Transcription,
81    },
82];
83
84pub fn is_local_provider(id: &str) -> bool {
85    let needle = id.trim().to_ascii_lowercase();
86    LOCAL_PROVIDERS.iter().any(|p| p.id == needle)
87}
88
89pub fn local_provider_meta(id: &str) -> Option<&'static LocalProviderMeta> {
90    let needle = id.trim().to_ascii_lowercase();
91    LOCAL_PROVIDERS.iter().find(|p| p.id == needle)
92}
93
94pub fn all_local_providers() -> &'static [LocalProviderMeta] {
95    LOCAL_PROVIDERS
96}
97
98#[cfg(test)]
99mod tests {
100    use super::*;
101
102    #[test]
103    fn is_local_provider_recognises_ollama() {
104        assert!(is_local_provider("ollama"));
105        assert!(is_local_provider("Ollama"));
106        assert!(is_local_provider("  ollama  "));
107    }
108
109    #[test]
110    fn is_local_provider_rejects_cloud_providers() {
111        assert!(!is_local_provider("openrouter"));
112        assert!(!is_local_provider("openai"));
113        assert!(!is_local_provider("anthropic"));
114    }
115
116    #[test]
117    fn local_provider_meta_returns_correct_defaults() {
118        let meta = local_provider_meta("ollama").expect("ollama should be found");
119        assert_eq!(meta.default_port, 11434);
120        assert_eq!(meta.default_base_url, "http://localhost:11434");
121        assert!(meta.supports_pull);
122    }
123
124    #[test]
125    fn local_provider_meta_returns_none_for_unknown() {
126        assert!(local_provider_meta("openai").is_none());
127        assert!(local_provider_meta("").is_none());
128    }
129
130    #[test]
131    fn all_local_providers_contains_expected_count() {
132        assert_eq!(all_local_providers().len(), 8);
133    }
134
135    #[test]
136    fn builtin_is_local_provider() {
137        assert!(is_local_provider("builtin"));
138    }
139
140    #[test]
141    fn whispercpp_is_transcription_type() {
142        let meta = local_provider_meta("whispercpp").expect("whispercpp should be found");
143        assert_eq!(meta.provider_type, LocalProviderType::Transcription);
144        assert!(meta.models_endpoint.is_none());
145    }
146}