llm_link/api/
convert.rs

1use crate::llm::Response;
2use anyhow::Result;
3use llm_connector::types::{Function, Message as LlmMessage, MessageBlock, Role as LlmRole, Tool};
4use serde_json::Value;
5
6/// Convert OpenAI messages format to llm-connector format
7#[allow(dead_code)]
8pub fn openai_messages_to_llm(messages: Vec<Value>) -> Result<Vec<LlmMessage>> {
9    let mut llm_messages = Vec::new();
10
11    for msg in messages {
12        let role = msg["role"]
13            .as_str()
14            .ok_or_else(|| anyhow::anyhow!("Missing role"))?;
15
16        // Determine the role
17        let llm_role = match role {
18            "system" => LlmRole::System,
19            "user" => LlmRole::User,
20            "assistant" => LlmRole::Assistant,
21            "tool" => LlmRole::Tool,
22            _ => return Err(anyhow::anyhow!("Unsupported role: {}", role)),
23        };
24
25        // Handle content (can be string, array, or null)
26        let content = if msg["content"].is_null() {
27            // Null content is allowed for assistant messages with tool_calls
28            String::new()
29        } else if let Some(content_str) = msg["content"].as_str() {
30            // Simple string content
31            content_str.to_string()
32        } else if let Some(content_array) = msg["content"].as_array() {
33            // Array content (e.g., from Codex with text and images)
34            // Extract text parts and concatenate them
35            let mut text_parts = Vec::new();
36            for part in content_array {
37                if let Some(text) = part["text"].as_str() {
38                    text_parts.push(text);
39                } else if let Some(text) = part.as_str() {
40                    // Sometimes the array contains direct strings
41                    text_parts.push(text);
42                }
43            }
44            if text_parts.is_empty() {
45                return Err(anyhow::anyhow!("Content array has no text parts"));
46            }
47            text_parts.join("\n")
48        } else {
49            return Err(anyhow::anyhow!(
50                "Content must be string, array, or null, got: {:?}",
51                msg["content"]
52            ));
53        };
54
55        // Extract tool_calls if present (for assistant messages)
56        let tool_calls = if role == "assistant" {
57            msg.get("tool_calls")
58                .and_then(|tc| serde_json::from_value(tc.clone()).ok())
59        } else {
60            None
61        };
62
63        // Extract tool_call_id if present (for tool messages)
64        let tool_call_id = if role == "tool" {
65            msg.get("tool_call_id")
66                .and_then(|id| id.as_str())
67                .map(|s| s.to_string())
68        } else {
69            None
70        };
71
72        llm_messages.push(LlmMessage {
73            role: llm_role,
74            content: vec![MessageBlock::Text { text: content }],
75            name: None,
76            tool_calls,
77            tool_call_id,
78            reasoning_content: None,
79            reasoning: None,
80            thought: None,
81            thinking: None,
82        });
83    }
84
85    Ok(llm_messages)
86}
87
88/// Convert Response to OpenAI format
89#[allow(dead_code)]
90pub fn response_to_openai(response: Response) -> Value {
91    let mut message = serde_json::json!({
92        "role": "assistant",
93        "content": response.content
94    });
95
96    // Add tool_calls if present
97    if let Some(tool_calls) = response.tool_calls {
98        message["tool_calls"] = tool_calls;
99    }
100
101    serde_json::json!({
102        "id": uuid::Uuid::new_v4().to_string(),
103        "object": "chat.completion",
104        "created": chrono::Utc::now().timestamp(),
105        "model": response.model,
106        "choices": [{
107            "index": 0,
108            "message": message,
109            "finish_reason": "stop"
110        }],
111        "usage": {
112            "prompt_tokens": response.usage.prompt_tokens,
113            "completion_tokens": response.usage.completion_tokens,
114            "total_tokens": response.usage.total_tokens
115        }
116    })
117}
118
119/// Convert Response to Ollama format
120#[allow(dead_code)]
121pub fn response_to_ollama(response: Response) -> Value {
122    serde_json::json!({
123        "model": response.model,
124        "created_at": chrono::Utc::now().to_rfc3339(),
125        "message": {
126            "role": "assistant",
127            "content": response.content
128        },
129        "done": true,
130        "total_duration": 0,
131        "load_duration": 0,
132        "prompt_eval_count": response.usage.prompt_tokens,
133        "prompt_eval_duration": 0,
134        "eval_count": response.usage.completion_tokens,
135        "eval_duration": 0
136    })
137}
138
139/// Convert OpenAI tools format to llm-connector format
140#[allow(dead_code)]
141pub fn openai_tools_to_llm(tools: Vec<Value>) -> Vec<Tool> {
142    tools
143        .into_iter()
144        .filter_map(|tool| {
145            let tool_type = tool.get("type")?.as_str()?.to_string();
146            let function = tool.get("function")?;
147
148            Some(Tool {
149                tool_type,
150                function: Function {
151                    name: function.get("name")?.as_str()?.to_string(),
152                    description: function
153                        .get("description")
154                        .and_then(|d| d.as_str())
155                        .map(String::from),
156                    parameters: function.get("parameters")?.clone(),
157                },
158            })
159        })
160        .collect()
161}
162
163/// Convert model list to Ollama format
164#[allow(dead_code)]
165pub fn models_to_ollama(models: Vec<crate::llm::Model>) -> Vec<Value> {
166    models
167        .into_iter()
168        .map(|model| {
169            let family = model.id.split('-').next().unwrap_or("unknown");
170            serde_json::json!({
171                "name": model.id,
172                "model": model.id,
173                "modified_at": chrono::Utc::now().to_rfc3339(),
174                "size": 1000000,
175                "digest": format!("sha256:{}", "0".repeat(64)),
176                "details": {
177                    "parent_model": "",
178                    "format": "gguf",
179                    "family": family,
180                    "families": [family],
181                    "parameter_size": "7B",
182                    "quantization_level": "Q4_K_M"
183                },
184                "expires_at": null
185            })
186        })
187        .collect()
188}
189