ort_openrouter_cli/input/
to_json.rs1extern crate alloc;
8use alloc::string::String;
9
10use crate::{ErrorKind, LastData, Message, OrtResult, PromptOpts, Write, ort_error};
11
12pub fn build_body(idx: usize, opts: &PromptOpts, messages: &[Message]) -> OrtResult<String> {
15 let capacity: u32 = 1024 + messages.iter().map(|m| m.size()).sum::<u32>();
16 let mut string_buf = String::with_capacity(capacity as usize);
17 let mut w = unsafe { string_buf.as_mut_vec() };
18
19 w.write_str("{\"stream\": true, \"usage\": {\"include\": true}, \"model\": ")?;
20 write_json_str(&mut w, opts.models.get(idx).expect("Missing model"))?;
21
22 if opts.priority.is_some() || opts.provider.is_some() {
23 w.write_str(", \"provider\": {")?;
24 let mut is_first = true;
25 if let Some(p) = opts.priority {
26 w.write_str("\"sort\":")?;
27 write_json_str_simple(&mut w, p.as_str())?;
28 is_first = false;
29 }
30 if let Some(pr) = &opts.provider {
31 if !is_first {
32 w.write_str(", ")?;
33 }
34 w.write_str("\"order\": [")?;
35 write_json_str(&mut w, pr)?;
36 w.write_char(']')?;
37 }
38 w.write_char('}')?;
39 }
40
41 w.write_str(", \"reasoning\": ")?;
42 match &opts.reasoning {
43 None => {
45 w.write_str("{\"enabled\": false}")?;
46 }
47 Some(r_cfg) if !r_cfg.enabled => {
49 w.write_str("{\"enabled\": false}")?;
50 }
51 Some(r_cfg) => match (r_cfg.effort, r_cfg.tokens) {
53 (Some(effort), _) => {
54 w.write_str("{\"exclude\": false, \"enabled\": true, \"effort\":")?;
55 write_json_str_simple(&mut w, effort.as_str())?;
56 w.write_char('}')?;
57 }
58 (_, Some(tokens)) => {
59 w.write_str("{\"exclude\": false, \"enabled\": true, \"max_tokens\":")?;
60 write_u32(&mut w, tokens)?;
61 w.write_char('}')?;
62 }
63 _ => unreachable!("Reasoning effort and tokens cannot both be null"),
64 },
65 };
66
67 w.write_str(", \"messages\":")?;
68 Message::write_json_array(messages, &mut w)?;
69
70 w.write_char('}')?;
71
72 Ok(string_buf)
73}
74
75impl LastData {
76 pub fn to_json_writer<W: Write>(&self, writer: W) -> OrtResult<()> {
77 let mut w = writer;
78
79 w.write_str("{\"opts\":{")?;
80 let mut first = true;
81
82 if let Some(ref v) = self.opts.prompt {
83 if !first {
84 w.write_char(',')?;
85 } else {
86 first = false;
87 }
88 w.write_str("\"prompt\":")?;
89 write_json_str(&mut w, v)?;
90 }
91 if let Some(v) = self.opts.models.first() {
93 if !first {
94 w.write_char(',')?;
95 } else {
96 first = false;
97 }
98 w.write_str("\"model\":")?;
99 write_json_str(&mut w, v)?;
100 }
101 if let Some(ref v) = self.opts.provider {
102 if !first {
103 w.write_char(',')?;
104 } else {
105 first = false;
106 }
107 w.write_str("\"provider\":")?;
108 write_json_str(&mut w, v)?;
109 }
110 if let Some(ref v) = self.opts.system {
111 if !first {
112 w.write_char(',')?;
113 } else {
114 first = false;
115 }
116 w.write_str("\"system\":")?;
117 write_json_str(&mut w, v)?;
118 }
119 if let Some(ref p) = self.opts.priority {
120 if !first {
121 w.write_char(',')?;
122 } else {
123 first = false;
124 }
125 w.write_str("\"priority\":")?;
126 write_json_str_simple(&mut w, p.as_str())?;
127 }
128 if let Some(ref rc) = self.opts.reasoning {
129 if !first {
130 w.write_char(',')?;
131 } else {
132 first = false;
133 }
134 w.write_str("\"reasoning\":{")?;
135 w.write_str("\"enabled\":")?;
137 write_bool(&mut w, rc.enabled)?;
138 if let Some(ref eff) = rc.effort {
139 w.write_str(",\"effort\":")?;
140 write_json_str_simple(&mut w, eff.as_str())?;
141 }
142 if let Some(tokens) = rc.tokens {
143 w.write_str(",\"tokens\":")?;
144 write_u32(&mut w, tokens)?;
145 }
146 w.write_char('}')?;
147 }
148 if let Some(show) = self.opts.show_reasoning {
149 if !first {
150 w.write_char(',')?;
151 } else {
152 first = false;
153 }
154 w.write_str("\"show_reasoning\":")?;
155 write_bool(&mut w, show)?;
156 }
157 if let Some(quiet) = self.opts.quiet {
158 if !first {
159 w.write_char(',')?;
160 } else {
161 }
163 w.write_str("\"quiet\":")?;
164 write_bool(&mut w, quiet)?;
165 }
166
167 w.write_char(',')?;
169 w.write_str("\"merge_config\":")?;
170 write_bool(&mut w, self.opts.merge_config)?;
171
172 w.write_str("},\"messages\":")?;
173 Message::write_json_array(&self.messages, &mut w)?;
174
175 w.write_char('}')?;
176 Ok(())
177 }
178}
179
180const HEX: &[u8; 16] = b"0123456789ABCDEF";
181
182fn write_bool<W: Write>(w: &mut W, v: bool) -> OrtResult<usize> {
183 if v {
184 w.write_str("true")
185 } else {
186 w.write_str("false")
187 }
188}
189
190fn write_u32<W: Write>(w: &mut W, mut n: u32) -> OrtResult<usize> {
191 if n == 0 {
192 return w.write_str("0");
193 }
194 let mut buf = [0u8; 10];
195 let mut i = buf.len();
196 while n > 0 {
197 i -= 1;
198 buf[i] = b'0' + (n % 10) as u8;
199 n /= 10;
200 }
201 w.write(&buf[i..])
202}
203
204impl Message {
205 pub fn write_json_array<W: Write>(msgs: &[Message], w: &mut W) -> OrtResult<()> {
206 w.write_char('[')?;
207 for (i, msg) in msgs.iter().enumerate() {
208 if i != 0 {
209 w.write_char(',')?;
210 }
211 write_json(msg, w)?;
212 }
213 w.write_char(']')?;
214 Ok(())
215 }
216}
217
218pub fn write_json<W: Write>(data: &Message, w: &mut W) -> OrtResult<()> {
219 w.write_str("{\"role\":")?;
220 write_json_str_simple(w, data.role.as_str())?;
221 match (&data.content, &data.reasoning) {
222 (Some(_), Some(_)) | (None, None) => {
223 return Err(ort_error(
224 ErrorKind::InvalidMessageSchema,
225 "Message must have exactly one of 'content' or 'reasoning'.",
226 ));
227 }
228 (Some(content), _) => {
229 w.write_str(",\"content\":")?;
230 write_json_str(w, content)?;
231 }
232 (_, Some(reasoning)) => {
233 w.write_str(",\"reasoning\":")?;
234 write_json_str(w, reasoning)?;
235 }
236 }
237 w.write_char('}')?;
238 Ok(())
239}
240
241fn write_json_str_simple<W: Write>(w: &mut W, s: &str) -> OrtResult<()> {
243 w.write_char('"')?;
244 w.write_str(s)?;
245 w.write_char('"')?;
246 Ok(())
247}
248
249fn write_json_str<W: Write>(w: &mut W, s: &str) -> OrtResult<()> {
251 w.write_char('"')?;
252 let bytes = s.as_bytes();
253 let mut start = 0;
254
255 for i in 0..bytes.len() {
256 let b = bytes[i];
257 let esc = match b {
258 b'"' => Some(b"\\\""), b'\\' => Some(b"\\\\"),
260 b'\n' => Some(b"\\n"),
261 b'\r' => Some(b"\\r"),
262 b'\t' => Some(b"\\t"),
263 0x08 => Some(b"\\b"),
264 0x0C => Some(b"\\f"),
265 0x00..=0x1F => None, _ => continue,
267 };
268
269 if start < i {
270 w.write(&bytes[start..i])?;
271 }
272
273 if let Some(e) = esc {
274 w.write(e)?;
275 } else {
276 let mut buf = [0u8; 6];
278 buf[0] = b'\\';
279 buf[1] = b'u';
280 buf[2] = b'0';
281 buf[3] = b'0';
282 buf[4] = HEX[((b >> 4) & 0xF) as usize];
283 buf[5] = HEX[(b & 0xF) as usize];
284 w.write(&buf)?;
285 }
286
287 start = i + 1;
288 }
289
290 if start < bytes.len() {
291 w.write(&bytes[start..])?;
292 }
293 w.write_char('"')?;
294 Ok(())
295}
296
297#[cfg(test)]
298mod tests {
299 extern crate alloc;
300 use alloc::string::ToString;
301 use alloc::vec;
302
303 use super::*;
304 use crate::ReasoningConfig;
305
306 #[test]
307 fn test_last_data() {
308 let opts = PromptOpts {
309 prompt: None,
310 models: vec!["google/gemma-3n-e4b-it:free".to_string()],
311 provider: Some("google-ai-studio".to_string()),
312 system: Some("System prompt here".to_string()),
313 priority: None,
314 reasoning: Some(ReasoningConfig::off()),
315 show_reasoning: Some(false),
316 quiet: None,
317 merge_config: true,
318 };
319 let messages = vec![
320 Message::user("Hello".to_string()),
321 Message::assistant("Hello there!".to_string()),
322 ];
323 let l = LastData { opts, messages };
324
325 let mut got = String::with_capacity(64);
326 if let Err(err) = l.to_json_writer(unsafe { got.as_mut_vec() }) {
327 panic!("{}", err.as_string());
328 }
329
330 let expected = r#"{"opts":{"model":"google/gemma-3n-e4b-it:free","provider":"google-ai-studio","system":"System prompt here","reasoning":{"enabled":false},"show_reasoning":false,"merge_config":true},"messages":[{"role":"user","content":"Hello"},{"role":"assistant","content":"Hello there!"}]}"#;
331
332 assert_eq!(got, expected);
333 }
334
335 #[test]
336 fn test_build_body() {
337 let opts = PromptOpts {
338 prompt: None,
339 models: vec!["google/gemma-3n-e4b-it:free".to_string()],
340 provider: Some("google-ai-studio".to_string()),
341 system: Some("System prompt here".to_string()),
342 priority: None,
343 reasoning: Some(ReasoningConfig::off()),
344 show_reasoning: Some(false),
345 quiet: None,
346 merge_config: false,
347 };
348 let messages = vec![
349 Message::user("Hello".to_string()),
350 Message::assistant("Hello there!".to_string()),
351 ];
352 let got = match build_body(0, &opts, &messages) {
353 Ok(got) => got,
354 Err(err) => {
355 panic!("{}", err.as_string());
356 }
357 };
358
359 let expected = r#"{"stream": true, "usage": {"include": true}, "model": "google/gemma-3n-e4b-it:free", "provider": {"order": ["google-ai-studio"]}, "reasoning": {"enabled": false}, "messages":[{"role":"user","content":"Hello"},{"role":"assistant","content":"Hello there!"}]}"#;
360
361 assert_eq!(got, expected);
362 }
363}