Skip to main content

evo_common/
config.rs

1use serde::{Deserialize, Serialize};
2use std::collections::HashMap;
3
4#[derive(Debug, Clone, Serialize, Deserialize)]
5pub struct GatewayConfig {
6    pub server: ServerConfig,
7    pub providers: Vec<ProviderConfig>,
8    /// Optional reliability configuration for retry/fallback behavior.
9    /// When absent, requests use single-attempt mode (current default).
10    #[serde(default, skip_serializing_if = "Option::is_none")]
11    pub reliability: Option<ReliabilityConfig>,
12    /// Optional hint-based model routing.
13    /// Maps hint names (e.g. "coding", "fast") to `provider:model` strings.
14    #[serde(default, skip_serializing_if = "Option::is_none")]
15    pub routing: Option<RoutingConfig>,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct ServerConfig {
20    pub host: String,
21    pub port: u16,
22}
23
24/// Which wire protocol the provider speaks.
25#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq, Eq)]
26#[serde(rename_all = "snake_case")]
27pub enum ProviderType {
28    /// OpenAI-compatible REST API (OpenAI, OpenRouter, Ollama, vLLM, etc.)
29    #[default]
30    OpenAiCompatible,
31    /// Anthropic Messages API — different auth headers and request format.
32    Anthropic,
33    /// Cursor — spawns `cursor-agent` CLI subprocess instead of HTTP proxying.
34    Cursor,
35    /// Claude Code — spawns `claude` CLI subprocess in print mode.
36    ClaudeCode,
37    /// Codex CLI — spawns `codex` CLI subprocess in exec mode.
38    CodexCli,
39    /// OpenAI Codex Responses API — direct HTTP with OAuth/bearer token auth.
40    CodexAuth,
41    /// Google Generative AI (Gemini) — native generateContent API with query-param auth.
42    Google,
43    /// GitHub Copilot — token exchange flow + OpenAI-compatible wire format.
44    GithubCopilot,
45}
46
47#[derive(Debug, Clone, Serialize, Deserialize)]
48pub struct ProviderConfig {
49    pub name: String,
50    pub base_url: String,
51    /// One or more env-var names whose values are API tokens.
52    /// Multiple tokens enable a round-robin pool: ["KEY_1", "KEY_2", ...].
53    /// Leave empty for unauthenticated providers (e.g. local Ollama).
54    #[serde(default)]
55    pub api_key_envs: Vec<String>,
56    pub enabled: bool,
57    /// Wire protocol this provider uses.
58    #[serde(default)]
59    pub provider_type: ProviderType,
60    /// Optional extra HTTP headers sent on every request (e.g. OpenRouter's
61    /// `HTTP-Referer` and `X-Title`).
62    #[serde(default)]
63    pub extra_headers: HashMap<String, String>,
64    #[serde(default)]
65    pub rate_limit: Option<RateLimitConfig>,
66    /// Known model IDs this provider supports.
67    /// For API providers the gateway can also fetch from upstream `/models`.
68    /// For CLI providers (cursor, claude-code, codex-cli) this is the only
69    /// way to declare available models since CLIs have no listing API.
70    #[serde(default)]
71    pub models: Vec<String>,
72    /// Optional rich metadata per model (keyed by model ID).
73    /// When present, `/v1/models` responses include context_window, max_tokens, etc.
74    #[serde(default, skip_serializing_if = "Option::is_none")]
75    pub model_metadata: Option<HashMap<String, ModelMetadata>>,
76}
77
78/// Rich metadata for a single model — context window, pricing, capabilities.
79#[derive(Debug, Clone, Serialize, Deserialize)]
80pub struct ModelMetadata {
81    #[serde(default, skip_serializing_if = "Option::is_none")]
82    pub context_window: Option<u32>,
83    #[serde(default, skip_serializing_if = "Option::is_none")]
84    pub max_tokens: Option<u32>,
85    #[serde(default, skip_serializing_if = "Option::is_none")]
86    pub reasoning: Option<bool>,
87    #[serde(default, skip_serializing_if = "Option::is_none")]
88    pub input_types: Option<Vec<String>>,
89    #[serde(default, skip_serializing_if = "Option::is_none")]
90    pub cost: Option<ModelCost>,
91}
92
93/// Per-model pricing in USD per 1M tokens.
94#[derive(Debug, Clone, Serialize, Deserialize)]
95pub struct ModelCost {
96    pub input: f64,
97    pub output: f64,
98    #[serde(default, skip_serializing_if = "Option::is_none")]
99    pub cache_read: Option<f64>,
100    #[serde(default, skip_serializing_if = "Option::is_none")]
101    pub cache_write: Option<f64>,
102}
103
104#[derive(Debug, Clone, Serialize, Deserialize)]
105pub struct RateLimitConfig {
106    pub requests_per_minute: u32,
107    pub burst_size: u32,
108}
109
110/// Retry and fallback configuration for upstream provider requests.
111#[derive(Debug, Clone, Serialize, Deserialize)]
112pub struct ReliabilityConfig {
113    /// Maximum retry attempts per provider before falling back (default: 3).
114    #[serde(default = "default_max_retries")]
115    pub max_retries: u32,
116    /// Base backoff in milliseconds for exponential backoff (default: 200).
117    #[serde(default = "default_base_backoff_ms")]
118    pub base_backoff_ms: u64,
119    /// Maximum backoff cap in milliseconds (default: 10000).
120    #[serde(default = "default_max_backoff_ms")]
121    pub max_backoff_ms: u64,
122    /// Ordered list of provider names to try on failure.
123    /// If empty, only the originally targeted provider is attempted.
124    #[serde(default)]
125    pub fallback_chain: Vec<String>,
126}
127
128fn default_max_retries() -> u32 {
129    3
130}
131fn default_base_backoff_ms() -> u64 {
132    200
133}
134fn default_max_backoff_ms() -> u64 {
135    10_000
136}
137
138/// Hint-based model routing configuration.
139#[derive(Debug, Clone, Serialize, Deserialize)]
140pub struct RoutingConfig {
141    /// Maps hint names to `provider:model` strings.
142    /// Example: `{"coding": "anthropic:claude-opus-4-5", "fast": "openai:gpt-4o-mini"}`
143    #[serde(default)]
144    pub model_routes: HashMap<String, String>,
145    /// Default `provider:model` when no hint matches and no provider is specified.
146    #[serde(default, skip_serializing_if = "Option::is_none")]
147    pub default_route: Option<String>,
148}
149
150#[derive(Debug, Clone, Serialize, Deserialize)]
151pub struct AgentConfig {
152    pub role: String,
153    pub skills: Vec<String>,
154    pub king_address: String,
155}
156
157impl GatewayConfig {
158    pub fn from_toml(content: &str) -> Result<Self, toml::de::Error> {
159        toml::from_str(content)
160    }
161
162    pub fn to_toml(&self) -> Result<String, toml::ser::Error> {
163        toml::to_string_pretty(self)
164    }
165
166    pub fn from_json(content: &str) -> Result<Self, serde_json::Error> {
167        serde_json::from_str(content)
168    }
169
170    pub fn to_json(&self) -> Result<String, serde_json::Error> {
171        serde_json::to_string_pretty(self)
172    }
173}
174
175impl AgentConfig {
176    pub fn from_toml(content: &str) -> Result<Self, toml::de::Error> {
177        toml::from_str(content)
178    }
179}
180
181#[cfg(test)]
182mod tests {
183    use super::*;
184
185    #[test]
186    fn parse_gateway_config_with_pool() {
187        let toml_str = r#"
188[server]
189host = "0.0.0.0"
190port = 8080
191
192[[providers]]
193name = "openai"
194base_url = "https://api.openai.com/v1"
195api_key_envs = ["OPENAI_API_KEY_1", "OPENAI_API_KEY_2"]
196enabled = true
197provider_type = "open_ai_compatible"
198
199[[providers]]
200name = "anthropic"
201base_url = "https://api.anthropic.com/v1"
202api_key_envs = ["ANTHROPIC_API_KEY"]
203enabled = true
204provider_type = "anthropic"
205
206[[providers]]
207name = "openrouter"
208base_url = "https://openrouter.ai/api/v1"
209api_key_envs = ["OPENROUTER_API_KEY"]
210enabled = true
211provider_type = "open_ai_compatible"
212
213[providers.extra_headers]
214"HTTP-Referer" = "https://github.com/ai-evo-agents"
215"X-Title" = "evo-gateway"
216"#;
217        let config = GatewayConfig::from_toml(toml_str).unwrap();
218        assert_eq!(config.server.port, 8080);
219        assert_eq!(config.providers.len(), 3);
220        assert_eq!(config.providers[0].api_key_envs.len(), 2);
221        assert_eq!(config.providers[1].provider_type, ProviderType::Anthropic);
222        assert!(
223            config.providers[2]
224                .extra_headers
225                .contains_key("HTTP-Referer")
226        );
227    }
228
229    #[test]
230    fn roundtrip_gateway_config_toml() {
231        let config = GatewayConfig {
232            server: ServerConfig {
233                host: "127.0.0.1".into(),
234                port: 3000,
235            },
236            providers: vec![ProviderConfig {
237                name: "test".into(),
238                base_url: "http://localhost:11434".into(),
239                api_key_envs: vec![],
240                enabled: true,
241                provider_type: ProviderType::OpenAiCompatible,
242                extra_headers: HashMap::new(),
243                rate_limit: None,
244                models: vec![],
245                model_metadata: None,
246            }],
247            reliability: None,
248            routing: None,
249        };
250        let toml_str = config.to_toml().unwrap();
251        let parsed = GatewayConfig::from_toml(&toml_str).unwrap();
252        assert_eq!(parsed.server.port, 3000);
253        assert_eq!(parsed.providers[0].api_key_envs.len(), 0);
254    }
255
256    #[test]
257    fn roundtrip_gateway_config_json() {
258        let config = GatewayConfig {
259            server: ServerConfig {
260                host: "0.0.0.0".into(),
261                port: 8080,
262            },
263            providers: vec![
264                ProviderConfig {
265                    name: "openai".into(),
266                    base_url: "https://api.openai.com/v1".into(),
267                    api_key_envs: vec!["OPENAI_API_KEY".into()],
268                    enabled: true,
269                    provider_type: ProviderType::OpenAiCompatible,
270                    extra_headers: HashMap::new(),
271                    rate_limit: None,
272                    models: vec![],
273                    model_metadata: None,
274                },
275                ProviderConfig {
276                    name: "anthropic".into(),
277                    base_url: "https://api.anthropic.com/v1".into(),
278                    api_key_envs: vec!["ANTHROPIC_API_KEY".into()],
279                    enabled: true,
280                    provider_type: ProviderType::Anthropic,
281                    extra_headers: HashMap::new(),
282                    rate_limit: None,
283                    models: vec![],
284                    model_metadata: None,
285                },
286            ],
287            reliability: None,
288            routing: None,
289        };
290        let json_str = config.to_json().unwrap();
291        let parsed = GatewayConfig::from_json(&json_str).unwrap();
292        assert_eq!(parsed.server.port, 8080);
293        assert_eq!(parsed.providers.len(), 2);
294        assert_eq!(parsed.providers[1].provider_type, ProviderType::Anthropic);
295        assert_eq!(parsed.providers[0].api_key_envs[0], "OPENAI_API_KEY");
296    }
297
298    #[test]
299    fn roundtrip_provider_type_claude_code() {
300        let config = GatewayConfig {
301            server: ServerConfig {
302                host: "127.0.0.1".into(),
303                port: 8080,
304            },
305            providers: vec![ProviderConfig {
306                name: "claude-code".into(),
307                base_url: String::new(),
308                api_key_envs: vec![],
309                enabled: false,
310                provider_type: ProviderType::ClaudeCode,
311                extra_headers: HashMap::new(),
312                rate_limit: None,
313                models: vec![],
314                model_metadata: None,
315            }],
316            reliability: None,
317            routing: None,
318        };
319        let json_str = config.to_json().unwrap();
320        assert!(json_str.contains("\"claude_code\""));
321        let parsed = GatewayConfig::from_json(&json_str).unwrap();
322        assert_eq!(parsed.providers[0].provider_type, ProviderType::ClaudeCode);
323    }
324
325    #[test]
326    fn roundtrip_provider_type_codex_cli() {
327        let config = GatewayConfig {
328            server: ServerConfig {
329                host: "127.0.0.1".into(),
330                port: 8080,
331            },
332            providers: vec![ProviderConfig {
333                name: "codex-cli".into(),
334                base_url: String::new(),
335                api_key_envs: vec![],
336                enabled: false,
337                provider_type: ProviderType::CodexCli,
338                extra_headers: HashMap::new(),
339                rate_limit: None,
340                models: vec![],
341                model_metadata: None,
342            }],
343            reliability: None,
344            routing: None,
345        };
346        let json_str = config.to_json().unwrap();
347        assert!(json_str.contains("\"codex_cli\""));
348        let parsed = GatewayConfig::from_json(&json_str).unwrap();
349        assert_eq!(parsed.providers[0].provider_type, ProviderType::CodexCli);
350    }
351
352    #[test]
353    fn roundtrip_provider_type_cursor() {
354        let config = GatewayConfig {
355            server: ServerConfig {
356                host: "127.0.0.1".into(),
357                port: 8080,
358            },
359            providers: vec![ProviderConfig {
360                name: "cursor".into(),
361                base_url: String::new(),
362                api_key_envs: vec![],
363                enabled: false,
364                provider_type: ProviderType::Cursor,
365                extra_headers: HashMap::new(),
366                rate_limit: None,
367                models: vec![],
368                model_metadata: None,
369            }],
370            reliability: None,
371            routing: None,
372        };
373        let json_str = config.to_json().unwrap();
374        assert!(json_str.contains("\"cursor\""));
375        let parsed = GatewayConfig::from_json(&json_str).unwrap();
376        assert_eq!(parsed.providers[0].provider_type, ProviderType::Cursor);
377    }
378
379    #[test]
380    fn roundtrip_provider_models_field() {
381        let config = GatewayConfig {
382            server: ServerConfig {
383                host: "127.0.0.1".into(),
384                port: 8080,
385            },
386            providers: vec![ProviderConfig {
387                name: "openai".into(),
388                base_url: "https://api.openai.com/v1".into(),
389                api_key_envs: vec![],
390                enabled: true,
391                provider_type: ProviderType::OpenAiCompatible,
392                extra_headers: HashMap::new(),
393                rate_limit: None,
394                models: vec!["gpt-4o".into(), "gpt-4o-mini".into()],
395                model_metadata: None,
396            }],
397            reliability: None,
398            routing: None,
399        };
400        let json_str = config.to_json().unwrap();
401        assert!(json_str.contains("gpt-4o"));
402        let parsed = GatewayConfig::from_json(&json_str).unwrap();
403        assert_eq!(parsed.providers[0].models.len(), 2);
404        assert_eq!(parsed.providers[0].models[0], "gpt-4o");
405        assert_eq!(parsed.providers[0].models[1], "gpt-4o-mini");
406    }
407
408    #[test]
409    fn roundtrip_provider_type_codex_auth() {
410        let config = GatewayConfig {
411            server: ServerConfig {
412                host: "127.0.0.1".into(),
413                port: 8080,
414            },
415            providers: vec![ProviderConfig {
416                name: "codex-auth".into(),
417                base_url: "https://api.openai.com/v1".into(),
418                api_key_envs: vec!["OPENAI_API_KEY".into()],
419                enabled: false,
420                provider_type: ProviderType::CodexAuth,
421                extra_headers: HashMap::new(),
422                rate_limit: None,
423                models: vec![],
424                model_metadata: None,
425            }],
426            reliability: None,
427            routing: None,
428        };
429        let json_str = config.to_json().unwrap();
430        assert!(json_str.contains("\"codex_auth\""));
431        let parsed = GatewayConfig::from_json(&json_str).unwrap();
432        assert_eq!(parsed.providers[0].provider_type, ProviderType::CodexAuth);
433    }
434
435    #[test]
436    fn models_field_defaults_to_empty() {
437        // JSON without "models" field should deserialize to empty vec
438        let json_str = r#"{
439            "server": { "host": "127.0.0.1", "port": 8080 },
440            "providers": [{
441                "name": "test",
442                "base_url": "",
443                "enabled": true
444            }]
445        }"#;
446        let config = GatewayConfig::from_json(json_str).unwrap();
447        assert!(config.providers[0].models.is_empty());
448    }
449
450    #[test]
451    fn roundtrip_provider_type_google() {
452        let config = GatewayConfig {
453            server: ServerConfig {
454                host: "127.0.0.1".into(),
455                port: 8080,
456            },
457            providers: vec![ProviderConfig {
458                name: "google".into(),
459                base_url: "https://generativelanguage.googleapis.com".into(),
460                api_key_envs: vec!["GEMINI_API_KEY".into()],
461                enabled: false,
462                provider_type: ProviderType::Google,
463                extra_headers: HashMap::new(),
464                rate_limit: None,
465                models: vec!["gemini-2.5-pro".into()],
466                model_metadata: None,
467            }],
468            reliability: None,
469            routing: None,
470        };
471        let json_str = config.to_json().unwrap();
472        assert!(json_str.contains("\"google\""));
473        let parsed = GatewayConfig::from_json(&json_str).unwrap();
474        assert_eq!(parsed.providers[0].provider_type, ProviderType::Google);
475    }
476
477    #[test]
478    fn roundtrip_provider_type_github_copilot() {
479        let config = GatewayConfig {
480            server: ServerConfig {
481                host: "127.0.0.1".into(),
482                port: 8080,
483            },
484            providers: vec![ProviderConfig {
485                name: "github-copilot".into(),
486                base_url: String::new(),
487                api_key_envs: vec!["COPILOT_GITHUB_TOKEN".into()],
488                enabled: false,
489                provider_type: ProviderType::GithubCopilot,
490                extra_headers: HashMap::new(),
491                rate_limit: None,
492                models: vec!["gpt-4o".into()],
493                model_metadata: None,
494            }],
495            reliability: None,
496            routing: None,
497        };
498        let json_str = config.to_json().unwrap();
499        assert!(json_str.contains("\"github_copilot\""));
500        let parsed = GatewayConfig::from_json(&json_str).unwrap();
501        assert_eq!(
502            parsed.providers[0].provider_type,
503            ProviderType::GithubCopilot
504        );
505    }
506
507    #[test]
508    fn roundtrip_model_metadata() {
509        let mut metadata = HashMap::new();
510        metadata.insert(
511            "gpt-4o".to_string(),
512            ModelMetadata {
513                context_window: Some(128_000),
514                max_tokens: Some(16_384),
515                reasoning: Some(false),
516                input_types: Some(vec!["text".into(), "image".into()]),
517                cost: Some(ModelCost {
518                    input: 2.50,
519                    output: 10.00,
520                    cache_read: None,
521                    cache_write: None,
522                }),
523            },
524        );
525        let config = GatewayConfig {
526            server: ServerConfig {
527                host: "127.0.0.1".into(),
528                port: 8080,
529            },
530            providers: vec![ProviderConfig {
531                name: "openai".into(),
532                base_url: "https://api.openai.com/v1".into(),
533                api_key_envs: vec![],
534                enabled: true,
535                provider_type: ProviderType::OpenAiCompatible,
536                extra_headers: HashMap::new(),
537                rate_limit: None,
538                models: vec!["gpt-4o".into()],
539                model_metadata: Some(metadata),
540            }],
541            reliability: None,
542            routing: None,
543        };
544        let json_str = config.to_json().unwrap();
545        assert!(json_str.contains("context_window"));
546        assert!(json_str.contains("128000"));
547        let parsed = GatewayConfig::from_json(&json_str).unwrap();
548        let meta = parsed.providers[0]
549            .model_metadata
550            .as_ref()
551            .unwrap()
552            .get("gpt-4o")
553            .unwrap();
554        assert_eq!(meta.context_window, Some(128_000));
555        assert_eq!(meta.reasoning, Some(false));
556        assert_eq!(meta.cost.as_ref().unwrap().input, 2.50);
557    }
558
559    #[test]
560    fn model_metadata_defaults_to_none() {
561        let json_str = r#"{
562            "server": { "host": "127.0.0.1", "port": 8080 },
563            "providers": [{
564                "name": "test",
565                "base_url": "",
566                "enabled": true
567            }]
568        }"#;
569        let config = GatewayConfig::from_json(json_str).unwrap();
570        assert!(config.providers[0].model_metadata.is_none());
571    }
572}