Skip to main content

llmg_providers/
langgraph.rs

1use llmg_core::{
2    provider::{ApiKeyCredentials, Credentials, LlmError, Provider},
3    types::{
4        ChatCompletionRequest, ChatCompletionResponse, Choice, EmbeddingRequest, EmbeddingResponse,
5        Message, Usage,
6    },
7};
8
9/// LangGraph API client for agent framework
10#[derive(Debug)]
11pub struct LangGraphClient {
12    http_client: reqwest::Client,
13    base_url: String,
14    credentials: Box<dyn Credentials>,
15}
16
17#[derive(Debug, serde::Serialize)]
18struct LangGraphRequest {
19    model: String,
20    messages: Vec<LangGraphMessage>,
21    #[serde(skip_serializing_if = "Option::is_none")]
22    temperature: Option<f32>,
23    #[serde(skip_serializing_if = "Option::is_none")]
24    max_tokens: Option<u32>,
25    #[serde(skip_serializing_if = "Option::is_none")]
26    top_p: Option<f32>,
27    #[serde(skip_serializing_if = "Option::is_none")]
28    stream: Option<bool>,
29}
30
31#[derive(Debug, serde::Serialize, serde::Deserialize)]
32struct LangGraphMessage {
33    role: String,
34    content: String,
35}
36
37#[derive(Debug, serde::Deserialize)]
38struct LangGraphResponse {
39    id: String,
40    object: String,
41    created: u64,
42    model: String,
43    choices: Vec<LangGraphChoice>,
44    #[serde(skip_serializing_if = "Option::is_none")]
45    usage: Option<LangGraphUsage>,
46}
47
48#[derive(Debug, serde::Deserialize)]
49struct LangGraphChoice {
50    index: u32,
51    message: LangGraphMessage,
52    finish_reason: Option<String>,
53}
54
55#[derive(Debug, serde::Deserialize)]
56struct LangGraphUsage {
57    prompt_tokens: u32,
58    completion_tokens: u32,
59    total_tokens: u32,
60}
61
62impl LangGraphClient {
63    /// Create a new LangGraph client from environment
64    pub fn from_env() -> Result<Self, LlmError> {
65        let api_key = std::env::var("LANGGRAPH_API_KEY").map_err(|_| LlmError::AuthError)?;
66
67        Ok(Self::new(api_key))
68    }
69
70    /// Create a new LangGraph client with explicit API key
71    pub fn new(api_key: impl Into<String>) -> Self {
72        let api_key = api_key.into();
73
74        Self {
75            http_client: reqwest::Client::new(),
76            base_url: "https://api.smith.langchain.com".to_string(),
77            credentials: Box::new(ApiKeyCredentials::new(api_key)),
78        }
79    }
80
81    /// Create with custom base URL
82    pub fn with_base_url(mut self, url: impl Into<String>) -> Self {
83        self.base_url = url.into();
84        self
85    }
86
87    fn convert_request(&self, request: ChatCompletionRequest) -> LangGraphRequest {
88        let messages = request
89            .messages
90            .into_iter()
91            .filter_map(|msg| match msg {
92                Message::System { content, .. } => Some(LangGraphMessage {
93                    role: "system".to_string(),
94                    content,
95                }),
96                Message::User { content, .. } => Some(LangGraphMessage {
97                    role: "user".to_string(),
98                    content,
99                }),
100                Message::Assistant { content, .. } => content.map(|c| LangGraphMessage {
101                    role: "assistant".to_string(),
102                    content: c,
103                }),
104                _ => None,
105            })
106            .collect();
107
108        LangGraphRequest {
109            model: request.model,
110            messages,
111            temperature: request.temperature,
112            max_tokens: request.max_tokens,
113            top_p: request.top_p,
114            stream: request.stream,
115        }
116    }
117
118    fn convert_response(&self, response: LangGraphResponse) -> ChatCompletionResponse {
119        let choices = response
120            .choices
121            .into_iter()
122            .map(|choice| Choice {
123                index: choice.index,
124                message: Message::Assistant {
125                    content: Some(choice.message.content),
126                    refusal: None,
127                    tool_calls: None,
128                },
129                finish_reason: choice.finish_reason,
130            })
131            .collect();
132
133        ChatCompletionResponse {
134            id: response.id,
135            object: response.object,
136            created: response.created as i64,
137            model: response.model,
138            choices,
139            usage: response.usage.map(|u| Usage {
140                prompt_tokens: u.prompt_tokens,
141                completion_tokens: u.completion_tokens,
142                total_tokens: u.total_tokens,
143            }),
144        }
145    }
146
147    async fn make_request(
148        &self,
149        request: ChatCompletionRequest,
150    ) -> Result<ChatCompletionResponse, LlmError> {
151        let langgraph_req = self.convert_request(request);
152        let url = format!("{}/chat/completions", self.base_url);
153
154        let mut req = self
155            .http_client
156            .post(&url)
157            .json(&langgraph_req)
158            .build()
159            .map_err(|e| LlmError::HttpError(e.to_string()))?;
160
161        self.credentials.apply(&mut req)?;
162
163        let response = self
164            .http_client
165            .execute(req)
166            .await
167            .map_err(|e| LlmError::HttpError(e.to_string()))?;
168
169        if !response.status().is_success() {
170            let status = response.status().as_u16();
171            let text = response.text().await.unwrap_or_default();
172            return Err(LlmError::ApiError {
173                status,
174                message: text,
175            });
176        }
177
178        let langgraph_resp: LangGraphResponse = response
179            .json()
180            .await
181            .map_err(|e| LlmError::HttpError(e.to_string()))?;
182
183        Ok(self.convert_response(langgraph_resp))
184    }
185}
186
187#[async_trait::async_trait]
188impl Provider for LangGraphClient {
189    async fn chat_completion(
190        &self,
191        request: ChatCompletionRequest,
192    ) -> Result<ChatCompletionResponse, LlmError> {
193        self.make_request(request).await
194    }
195
196    async fn embeddings(&self, _request: EmbeddingRequest) -> Result<EmbeddingResponse, LlmError> {
197        Err(LlmError::ProviderError(
198            "LangGraph does not support embeddings".to_string(),
199        ))
200    }
201    fn provider_name(&self) -> &'static str {
202        "langgraph"
203    }
204}
205
206#[cfg(test)]
207mod tests {
208    use super::*;
209
210    #[test]
211    fn test_langgraph_client_creation() {
212        let client = LangGraphClient::new("test-key");
213        assert_eq!(client.provider_name(), "langgraph");
214    }
215
216    #[test]
217    fn test_request_conversion() {
218        let client = LangGraphClient::new("test-key");
219
220        let request = ChatCompletionRequest {
221            model: "langgraph-agent".to_string(),
222            messages: vec![Message::User {
223                content: "Hello!".to_string(),
224                name: None,
225            }],
226            temperature: Some(0.7),
227            max_tokens: Some(100),
228            stream: None,
229            top_p: None,
230            frequency_penalty: None,
231            presence_penalty: None,
232            stop: None,
233            user: None,
234            tools: None,
235            tool_choice: None,
236            response_format: None,
237        };
238
239        let langgraph_req = client.convert_request(request);
240
241        assert_eq!(langgraph_req.model, "langgraph-agent");
242        assert_eq!(langgraph_req.messages.len(), 1);
243        assert_eq!(langgraph_req.messages[0].role, "user");
244    }
245}
246