Skip to main content

synwire_core/agents/
model_info.rs

1//! Model information, capabilities, selection, and provider traits.
2
3use serde::{Deserialize, Serialize};
4
5use crate::BoxFuture;
6use crate::agents::error::AgentError;
7
8/// Model reasoning effort level.
9#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
10#[non_exhaustive]
11pub enum EffortLevel {
12    /// Minimal reasoning.
13    Low,
14    /// Moderate reasoning.
15    Medium,
16    /// Deep reasoning (default).
17    High,
18    /// Maximum reasoning.
19    Max,
20}
21
22/// Thinking/reasoning configuration.
23#[derive(Debug, Clone, Serialize, Deserialize)]
24#[non_exhaustive]
25pub enum ThinkingConfig {
26    /// Model decides reasoning depth.
27    Adaptive,
28    /// Fixed token budget for reasoning.
29    Enabled {
30        /// Token budget for reasoning.
31        budget_tokens: u32,
32    },
33    /// No reasoning/thinking.
34    Disabled,
35}
36
37/// Model capabilities.
38#[derive(Debug, Clone, Default, Serialize, Deserialize)]
39#[allow(clippy::struct_excessive_bools)]
40pub struct ModelCapabilities {
41    /// Supports tool use.
42    pub tool_calling: bool,
43    /// Supports image input.
44    pub vision: bool,
45    /// Supports streaming output.
46    pub streaming: bool,
47    /// Supports native JSON mode.
48    pub structured_output: bool,
49    /// Supports reasoning effort levels.
50    pub effort_levels: bool,
51}
52
53/// Model metadata and capabilities.
54#[derive(Debug, Clone, Serialize, Deserialize)]
55pub struct ModelInfo {
56    /// Model identifier.
57    pub id: String,
58    /// Human-readable name.
59    pub display_name: String,
60    /// Model description.
61    pub description: String,
62    /// Feature support flags.
63    pub capabilities: ModelCapabilities,
64    /// Max context tokens.
65    pub context_window: u32,
66    /// Max output tokens.
67    pub max_output_tokens: u32,
68    /// Supported reasoning levels.
69    pub supported_effort_levels: Vec<EffortLevel>,
70}
71
72// ---------------------------------------------------------------------------
73// ModelProvider trait
74// ---------------------------------------------------------------------------
75
76/// Source of model metadata — implemented by each LLM provider crate.
77pub trait ModelProvider: Send + Sync {
78    /// Return all models offered by this provider.
79    fn list_models(&self) -> BoxFuture<'_, Result<Vec<ModelInfo>, AgentError>>;
80}
81
82// ---------------------------------------------------------------------------
83// ModelSelector
84// ---------------------------------------------------------------------------
85
86/// Queries a `ModelProvider` and selects models that meet specified criteria.
87pub struct ModelSelector<'a> {
88    models: &'a [ModelInfo],
89}
90
91impl<'a> ModelSelector<'a> {
92    /// Create a selector over a slice of model infos.
93    #[must_use]
94    pub const fn new(models: &'a [ModelInfo]) -> Self {
95        Self { models }
96    }
97
98    /// Find a model by exact ID.
99    #[must_use]
100    pub fn by_name(&self, id: &str) -> Option<&ModelInfo> {
101        self.models.iter().find(|m| m.id == id)
102    }
103
104    /// Return all models from a provider whose ID starts with `prefix`.
105    #[must_use]
106    pub fn by_provider(&self, prefix: &str) -> Vec<&ModelInfo> {
107        self.models
108            .iter()
109            .filter(|m| m.id.starts_with(prefix))
110            .collect()
111    }
112
113    /// Return all models that support tool calling.
114    #[must_use]
115    pub fn with_tool_calling(&self) -> Vec<&ModelInfo> {
116        self.models
117            .iter()
118            .filter(|m| m.capabilities.tool_calling)
119            .collect()
120    }
121
122    /// Return all models that support vision (image input).
123    #[must_use]
124    pub fn with_vision(&self) -> Vec<&ModelInfo> {
125        self.models
126            .iter()
127            .filter(|m| m.capabilities.vision)
128            .collect()
129    }
130
131    /// Return all models that support streaming.
132    #[must_use]
133    pub fn with_streaming(&self) -> Vec<&ModelInfo> {
134        self.models
135            .iter()
136            .filter(|m| m.capabilities.streaming)
137            .collect()
138    }
139
140    /// Return all models that support native structured output.
141    #[must_use]
142    pub fn with_structured_output(&self) -> Vec<&ModelInfo> {
143        self.models
144            .iter()
145            .filter(|m| m.capabilities.structured_output)
146            .collect()
147    }
148
149    /// Return all models that support effort levels.
150    #[must_use]
151    pub fn with_effort_levels(&self) -> Vec<&ModelInfo> {
152        self.models
153            .iter()
154            .filter(|m| m.capabilities.effort_levels)
155            .collect()
156    }
157
158    /// Return all models with a context window at least `min_tokens`.
159    #[must_use]
160    pub fn by_min_context(&self, min_tokens: u32) -> Vec<&ModelInfo> {
161        self.models
162            .iter()
163            .filter(|m| m.context_window >= min_tokens)
164            .collect()
165    }
166}
167
168#[cfg(test)]
169mod tests {
170    use super::*;
171
172    fn make_model(id: &str, tool_calling: bool, vision: bool) -> ModelInfo {
173        ModelInfo {
174            id: id.to_string(),
175            display_name: id.to_string(),
176            description: String::new(),
177            capabilities: ModelCapabilities {
178                tool_calling,
179                vision,
180                streaming: true,
181                structured_output: false,
182                effort_levels: false,
183            },
184            context_window: 100_000,
185            max_output_tokens: 4096,
186            supported_effort_levels: Vec::new(),
187        }
188    }
189
190    #[test]
191    fn test_selector_by_name() {
192        let models = vec![
193            make_model("anthropic/claude-3-5-sonnet", true, true),
194            make_model("openai/gpt-4o", true, false),
195        ];
196        let sel = ModelSelector::new(&models);
197        assert!(sel.by_name("openai/gpt-4o").is_some());
198        assert!(sel.by_name("nonexistent").is_none());
199    }
200
201    #[test]
202    fn test_selector_by_provider() {
203        let models = vec![
204            make_model("anthropic/claude-3-5-sonnet", true, true),
205            make_model("anthropic/claude-3-haiku", true, false),
206            make_model("openai/gpt-4o", true, false),
207        ];
208        let sel = ModelSelector::new(&models);
209        assert_eq!(sel.by_provider("anthropic/").len(), 2);
210        assert_eq!(sel.by_provider("openai/").len(), 1);
211    }
212
213    #[test]
214    fn test_selector_with_vision() {
215        let models = vec![
216            make_model("vision-model", true, true),
217            make_model("text-only", true, false),
218        ];
219        let sel = ModelSelector::new(&models);
220        let vision = sel.with_vision();
221        assert_eq!(vision.len(), 1);
222        assert_eq!(vision[0].id, "vision-model");
223    }
224}