llm_link/api/
convert.rs

1use crate::normalizer::Response;
2use anyhow::Result;
3use llm_connector::types::{Function, Message as LlmMessage, MessageBlock, Role as LlmRole, Tool};
4use serde_json::Value;
5
6/// Convert OpenAI messages format to llm-connector format
7#[allow(dead_code)]
8pub fn openai_messages_to_llm(messages: Vec<Value>) -> Result<Vec<LlmMessage>> {
9    let mut llm_messages = Vec::with_capacity(messages.len());
10
11    for msg in messages {
12        let role = msg["role"]
13            .as_str()
14            .ok_or_else(|| anyhow::anyhow!("Missing role"))?;
15
16        // Determine the role
17        let llm_role = match role {
18            "system" => LlmRole::System,
19            "user" => LlmRole::User,
20            "assistant" => LlmRole::Assistant,
21            "tool" => LlmRole::Tool,
22            _ => return Err(anyhow::anyhow!("Unsupported role: {}", role)),
23        };
24
25        // Handle content (can be string, array, or null)
26        let content = if msg["content"].is_null() {
27            // Null content is allowed for assistant messages with tool_calls
28            String::new()
29        } else if let Some(content_str) = msg["content"].as_str() {
30            // Simple string content
31            content_str.to_string()
32        } else if let Some(content_array) = msg["content"].as_array() {
33            // Array content (e.g., from Codex with text and images)
34            // Extract text parts and concatenate them
35            let mut text_parts = Vec::with_capacity(content_array.len());
36            for part in content_array {
37                if let Some(text) = part["text"].as_str() {
38                    text_parts.push(text);
39                } else if let Some(text) = part.as_str() {
40                    // Sometimes the array contains direct strings
41                    text_parts.push(text);
42                }
43            }
44            if text_parts.is_empty() {
45                return Err(anyhow::anyhow!("Content array has no text parts"));
46            }
47            text_parts.join("\n")
48        } else {
49            return Err(anyhow::anyhow!(
50                "Content must be string, array, or null, got: {:?}",
51                msg["content"]
52            ));
53        };
54
55        // Extract tool_calls if present (for assistant messages)
56        let tool_calls = if role == "assistant" {
57            msg.get("tool_calls")
58                .and_then(|tc| serde_json::from_value(tc.clone()).ok())
59        } else {
60            None
61        };
62
63        // Extract tool_call_id if present (for tool messages)
64        let tool_call_id = if role == "tool" {
65            // First try standard tool_call_id field
66            let tool_call_id = msg.get("tool_call_id")
67                .and_then(|id| id.as_str())
68                .map(|s| s.to_string());
69
70            // If not found, try Zed's tool_name field as fallback
71            let tool_call_id = if tool_call_id.is_none() {
72                msg.get("tool_name")
73                    .and_then(|name| name.as_str())
74                    .map(|s| format!("zed_tool_{}", s)) // Prefix to distinguish from real tool_call_ids
75            } else {
76                tool_call_id
77            };
78
79            tracing::debug!("🔧 Tool message: tool_call_id={:?}, tool_name={:?}",
80                          tool_call_id, msg.get("tool_name"));
81
82            tool_call_id
83        } else {
84            None
85        };
86
87        // Debug logging for tool messages
88        if role == "tool" {
89            tracing::debug!("🔧 Converting tool message: role={}, tool_call_id={:?}, content_len={}",
90                          role, tool_call_id, content.len());
91
92            // Additional validation: ensure tool_call_id is not empty
93            if let Some(ref id) = tool_call_id {
94                if id.trim().is_empty() {
95                    return Err(anyhow::anyhow!(
96                        "Tool message has empty 'tool_call_id' field. Tool call ID must be a non-empty string."
97                    ));
98                }
99                tracing::debug!("✅ Tool message validation passed: tool_call_id='{}'", id);
100            }
101        }
102
103        llm_messages.push(LlmMessage {
104            role: llm_role,
105            content: vec![MessageBlock::Text { text: content }],
106            name: None,
107            tool_calls,
108            tool_call_id,
109            reasoning_content: None,
110            reasoning: None,
111            thought: None,
112            thinking: None,
113        });
114    }
115
116    Ok(llm_messages)
117}
118
119/// Convert Response to OpenAI format
120#[allow(dead_code)]
121pub fn response_to_openai(response: Response) -> Value {
122    let mut message = serde_json::json!({
123        "role": "assistant",
124        "content": response.content
125    });
126
127    // Add tool_calls if present
128    if let Some(tool_calls) = response.tool_calls {
129        message["tool_calls"] = tool_calls;
130    }
131
132    serde_json::json!({
133        "id": uuid::Uuid::new_v4().to_string(),
134        "object": "chat.completion",
135        "created": chrono::Utc::now().timestamp(),
136        "model": response.model,
137        "choices": [{
138            "index": 0,
139            "message": message,
140            "finish_reason": "stop"
141        }],
142        "usage": {
143            "prompt_tokens": response.usage.prompt_tokens,
144            "completion_tokens": response.usage.completion_tokens,
145            "total_tokens": response.usage.total_tokens
146        }
147    })
148}
149
150/// Convert Response to Ollama format
151#[allow(dead_code)]
152pub fn response_to_ollama(response: Response) -> Value {
153    let mut message = serde_json::json!({
154        "role": "assistant",
155        "content": response.content
156    });
157
158    if let Some(tool_calls) = response.tool_calls {
159        message["tool_calls"] = tool_calls;
160    }
161
162    serde_json::json!({
163        "model": response.model,
164        "created_at": chrono::Utc::now().to_rfc3339(),
165        "message": message,
166        "done": true,
167        "total_duration": 0,
168        "load_duration": 0,
169        "prompt_eval_count": response.usage.prompt_tokens,
170        "prompt_eval_duration": 0,
171        "eval_count": response.usage.completion_tokens,
172        "eval_duration": 0
173    })
174}
175
176/// Convert MiniMax API response to Ollama format
177#[allow(dead_code)]
178pub fn response_to_ollama_from_minimax(minimax_response: Value) -> Value {
179    // Extract content from MiniMax response
180    let content = minimax_response
181        .get("choices")
182        .and_then(|c| c.get(0))
183        .and_then(|c| c.get("message"))
184        .and_then(|m| m.get("content"))
185        .and_then(|c| c.as_str())
186        .unwrap_or("No response")
187        .to_string();
188
189    let model = minimax_response
190        .get("model")
191        .and_then(|m| m.as_str())
192        .unwrap_or("MiniMax-M2")
193        .to_string();
194
195    serde_json::json!({
196        "model": model,
197        "created_at": chrono::Utc::now().to_rfc3339(),
198        "message": {
199            "role": "assistant",
200            "content": content
201        },
202        "done": true,
203        "total_duration": 0,
204        "load_duration": 0,
205        "prompt_eval_count": 0,
206        "prompt_eval_duration": 0,
207        "eval_count": 0,
208        "eval_duration": 0
209    })
210}
211
212/// Convert OpenAI tools format to llm-connector format
213#[allow(dead_code)]
214pub fn openai_tools_to_llm(tools: Vec<Value>) -> Vec<Tool> {
215    tools
216        .into_iter()
217        .filter_map(|tool| {
218            let tool_type = tool.get("type")?.as_str()?.to_string();
219            let function = tool.get("function")?;
220
221            Some(Tool {
222                tool_type,
223                function: Function {
224                    name: function.get("name")?.as_str()?.to_string(),
225                    description: function
226                        .get("description")
227                        .and_then(|d| d.as_str())
228                        .map(String::from),
229                    parameters: function.get("parameters")?.clone(),
230                },
231            })
232        })
233        .collect()
234}
235
236/// Convert model list to Ollama format
237#[allow(dead_code)]
238pub fn models_to_ollama(models: Vec<crate::normalizer::Model>) -> Vec<Value> {
239    models
240        .into_iter()
241        .map(|model| {
242            let family = model.id.split('-').next().unwrap_or("unknown");
243            serde_json::json!({
244                "name": model.id,
245                "model": model.id,
246                "modified_at": chrono::Utc::now().to_rfc3339(),
247                "size": 1000000,
248                "digest": format!("sha256:{}", "0".repeat(64)),
249                "details": {
250                    "parent_model": "",
251                    "format": "gguf",
252                    "family": family,
253                    "families": [family],
254                    "parameter_size": "7B",
255                    "quantization_level": "Q4_K_M"
256                },
257                "expires_at": null
258            })
259        })
260        .collect()
261}
262
263#[cfg(test)]
264mod tests {
265    use super::*;
266    use serde_json::json;
267
268    #[test]
269    fn test_tool_call_id_conversion() {
270        let messages = vec![
271            json!({
272                "role": "user",
273                "content": "What is the weather in Beijing?"
274            }),
275            json!({
276                "role": "assistant",
277                "content": null,
278                "tool_calls": [
279                    {
280                        "id": "call_123",
281                        "type": "function",
282                        "function": {
283                            "name": "get_weather",
284                            "arguments": "{\"location\": \"Beijing\"}"
285                        }
286                    }
287                ]
288            }),
289            json!({
290                "role": "tool",
291                "content": "The weather in Beijing is sunny, 25°C",
292                "tool_call_id": "call_123"
293            }),
294            json!({
295                "role": "user",
296                "content": "What about Shanghai?"
297            })
298        ];
299
300        let result = openai_messages_to_llm(messages);
301        assert!(result.is_ok());
302
303        let llm_messages = result.unwrap();
304        assert_eq!(llm_messages.len(), 4);
305
306        // Check that the tool message has the tool_call_id
307        let tool_message = &llm_messages[2];
308        assert_eq!(tool_message.role, LlmRole::Tool);
309        assert_eq!(tool_message.tool_call_id, Some("call_123".to_string()));
310
311        // Check that the assistant message has tool_calls
312        let assistant_message = &llm_messages[1];
313        assert_eq!(assistant_message.role, LlmRole::Assistant);
314        assert!(assistant_message.tool_calls.is_some());
315    }
316
317    #[test]
318    fn test_missing_tool_call_id() {
319        let messages = vec![
320            json!({
321                "role": "tool",
322                "content": "Some tool response"
323                // Missing tool_call_id field - this is now allowed for compatibility
324            })
325        ];
326
327        let result = openai_messages_to_llm(messages);
328        assert!(result.is_ok());
329
330        let llm_messages = result.unwrap();
331        assert_eq!(llm_messages.len(), 1);
332
333        let tool_message = &llm_messages[0];
334        assert_eq!(tool_message.role, LlmRole::Tool);
335        assert_eq!(tool_message.tool_call_id, None); // No tool_call_id is now allowed
336    }
337
338    #[test]
339    fn test_empty_tool_call_id() {
340        let messages = vec![
341            json!({
342                "role": "tool",
343                "content": "Some tool response",
344                "tool_call_id": ""  // Empty tool_call_id
345            })
346        ];
347
348        let result = openai_messages_to_llm(messages);
349        assert!(result.is_err());
350
351        let error_msg = result.unwrap_err().to_string();
352        assert!(error_msg.contains("Tool message has empty 'tool_call_id' field"));
353    }
354}
355