mermaid_cli/models/traits.rs
1//! Core Model trait - the single interface for model interactions
2//!
3//! Adapters implement this trait directly. No intermediate layers.
4
5use async_trait::async_trait;
6
7use super::config::ModelConfig;
8use super::error::Result;
9use super::types::{ChatMessage, ModelResponse, StreamCallback};
10
11/// Core trait that all model adapters implement
12///
13/// This is the only abstraction layer between user code and model providers.
14#[async_trait]
15pub trait Model: Send + Sync {
16 /// Send a chat conversation to the model and get a response
17 async fn chat(
18 &self,
19 messages: &[ChatMessage],
20 config: &ModelConfig,
21 stream_callback: Option<StreamCallback>,
22 ) -> Result<ModelResponse>;
23
24 /// Get the model identifier (e.g., "ollama/tinyllama")
25 fn name(&self) -> &str;
26
27 /// Check if this is a local model (no external API calls)
28 fn is_local(&self) -> bool;
29
30 /// Check if the model backend is available and healthy
31 async fn health_check(&self) -> Result<()>;
32
33 /// List available models from this backend
34 async fn list_models(&self) -> Result<Vec<String>>;
35
36 /// Check if a specific model is available
37 ///
38 /// Matches exact names and implicit `:latest` tags:
39 /// - "llama3" matches "llama3:latest" (bare name matches with :latest tag)
40 /// - "llama3:latest" matches "llama3:latest" (exact)
41 /// - "llama3:7b" does NOT match "llama3:latest" (different tags)
42 async fn has_model(&self, model_name: &str) -> Result<bool> {
43 let models = self.list_models().await?;
44 Ok(models.iter().any(|m| {
45 // Exact match
46 m == model_name
47 // Bare name matches model with :latest tag
48 || (!model_name.contains(':') && *m == format!("{}:latest", model_name))
49 // Model with :latest tag matches bare name
50 || (!m.contains(':') && model_name == format!("{}:latest", m))
51 }))
52 }
53
54 /// Get model capabilities
55 fn capabilities(&self) -> ModelCapabilities {
56 ModelCapabilities::default()
57 }
58}
59
60/// Model capabilities (what the model/backend supports)
61#[derive(Debug, Clone)]
62pub struct ModelCapabilities {
63 /// Maximum context length supported
64 pub max_context_length: usize,
65 /// Supports streaming responses
66 pub supports_streaming: bool,
67 /// Supports function/tool calling
68 pub supports_functions: bool,
69 /// Supports vision/images
70 pub supports_vision: bool,
71}
72
73impl Default for ModelCapabilities {
74 fn default() -> Self {
75 Self {
76 max_context_length: 4096,
77 supports_streaming: true,
78 supports_functions: false,
79 supports_vision: false,
80 }
81 }
82}