code_mesh_core/llm/
mod.rs

1//! LLM provider abstractions and implementations
2
3use async_trait::async_trait;
4use serde::{Deserialize, Serialize};
5use std::collections::HashMap;
6
7pub mod provider;
8pub mod model;
9pub mod anthropic;
10pub mod openai;
11pub mod github_copilot;
12pub mod registry;
13
14#[cfg(test)]
15mod anthropic_test;
16
17pub use provider::{
18    Provider, ProviderConfig, ProviderRegistry, ModelConfig, ModelCapabilities, 
19    Cost, Limits, ProviderSource, ProviderStatus, RetryConfig, retry_with_backoff,
20    ModelInfo, ModelLimits, ModelPricing, ModelStatus, ProviderHealth, RateLimitInfo, UsageStats,
21    Model
22};
23pub use anthropic::{AnthropicProvider, AnthropicModel, AnthropicModelWithProvider};
24pub use openai::{OpenAIProvider, OpenAIModel, OpenAIModelWithProvider, AzureOpenAIProvider, AzureOpenAIModelWithProvider};
25pub use github_copilot::{GitHubCopilotProvider, GitHubCopilotModel, GitHubCopilotModelWithProvider};
26pub use registry::{LLMRegistry, create_default_registry, create_registry_with_models_dev};
27
28/// Language model trait for interacting with LLM providers
29#[async_trait]
30pub trait LanguageModel: Send + Sync {
31    /// Generate text from a list of messages
32    async fn generate(
33        &self,
34        messages: Vec<Message>,
35        options: GenerateOptions,
36    ) -> crate::Result<GenerateResult>;
37    
38    /// Stream text generation
39    async fn stream(
40        &self,
41        messages: Vec<Message>,
42        options: StreamOptions,
43    ) -> crate::Result<Box<dyn futures::Stream<Item = crate::Result<StreamChunk>> + Send + Unpin>>;
44    
45    /// Check if the model supports tool calling
46    fn supports_tools(&self) -> bool;
47    
48    /// Check if the model supports vision/images
49    fn supports_vision(&self) -> bool;
50    
51    /// Check if the model supports caching
52    fn supports_caching(&self) -> bool;
53}
54
55/// Message in a conversation
56#[derive(Debug, Clone, Serialize, Deserialize)]
57pub struct Message {
58    pub role: MessageRole,
59    pub content: MessageContent,
60    #[serde(skip_serializing_if = "Option::is_none")]
61    pub name: Option<String>,
62    #[serde(skip_serializing_if = "Option::is_none")]
63    pub tool_calls: Option<Vec<ToolCall>>,
64    #[serde(skip_serializing_if = "Option::is_none")]
65    pub tool_call_id: Option<String>,
66}
67
68#[derive(Debug, Clone, Serialize, Deserialize)]
69#[serde(rename_all = "lowercase")]
70pub enum MessageRole {
71    System,
72    User,
73    Assistant,
74    Tool,
75}
76
77#[derive(Debug, Clone, Serialize, Deserialize)]
78#[serde(untagged)]
79pub enum MessageContent {
80    Text(String),
81    Parts(Vec<MessagePart>),
82}
83
84#[derive(Debug, Clone, Serialize, Deserialize)]
85#[serde(tag = "type", rename_all = "snake_case")]
86pub enum MessagePart {
87    Text { text: String },
88    Image { image: ImageData },
89}
90
91#[derive(Debug, Clone, Serialize, Deserialize)]
92pub struct ImageData {
93    pub url: Option<String>,
94    pub base64: Option<String>,
95    pub mime_type: String,
96}
97
98#[derive(Debug, Clone, Serialize, Deserialize)]
99pub struct ToolCall {
100    pub id: String,
101    pub name: String,
102    pub arguments: serde_json::Value,
103}
104
105#[derive(Debug, Clone, Default)]
106pub struct GenerateOptions {
107    pub temperature: Option<f32>,
108    pub max_tokens: Option<u32>,
109    pub tools: Vec<ToolDefinition>,
110    pub stop_sequences: Vec<String>,
111}
112
113#[derive(Debug, Clone, Default)]
114pub struct StreamOptions {
115    pub temperature: Option<f32>,
116    pub max_tokens: Option<u32>,
117    pub tools: Vec<ToolDefinition>,
118    pub stop_sequences: Vec<String>,
119}
120
121#[derive(Debug, Clone, Serialize, Deserialize)]
122pub struct ToolDefinition {
123    pub name: String,
124    pub description: String,
125    pub parameters: serde_json::Value,
126}
127
128#[derive(Debug, Clone)]
129pub struct GenerateResult {
130    pub content: String,
131    pub tool_calls: Vec<ToolCall>,
132    pub usage: Usage,
133    pub finish_reason: FinishReason,
134}
135
136#[derive(Debug, Clone)]
137pub struct StreamChunk {
138    pub delta: String,
139    pub tool_calls: Vec<ToolCall>,
140    pub finish_reason: Option<FinishReason>,
141}
142
143#[derive(Debug, Clone, Copy)]
144pub struct Usage {
145    pub prompt_tokens: u32,
146    pub completion_tokens: u32,
147    pub total_tokens: u32,
148}
149
150#[derive(Debug, Clone, Copy, PartialEq, Eq)]
151pub enum FinishReason {
152    Stop,
153    Length,
154    ToolCalls,
155    ContentFilter,
156}