1use std::collections::HashMap;
4
5use serde::{Deserialize, Serialize};
6
7use crate::{
8 client::{Opencode, RequestOptions},
9 error::OpencodeError,
10};
11
12#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
18pub struct App {
19 pub git: bool,
21 pub hostname: String,
23 pub path: AppPath,
25 pub time: AppTime,
27}
28
29#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
31pub struct AppPath {
32 pub config: String,
34 pub cwd: String,
36 pub data: String,
38 pub root: String,
40 pub state: String,
42}
43
44#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
46pub struct AppTime {
47 #[serde(skip_serializing_if = "Option::is_none")]
49 pub initialized: Option<f64>,
50}
51
52#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
58pub struct Mode {
59 pub name: String,
61 pub tools: HashMap<String, bool>,
63 #[serde(skip_serializing_if = "Option::is_none")]
65 pub model: Option<ModeModel>,
66 #[serde(skip_serializing_if = "Option::is_none")]
68 pub prompt: Option<String>,
69 #[serde(skip_serializing_if = "Option::is_none")]
71 pub temperature: Option<f64>,
72}
73
74#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
76pub struct ModeModel {
77 #[serde(rename = "modelID")]
79 pub model_id: String,
80 #[serde(rename = "providerID")]
82 pub provider_id: String,
83}
84
85#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
91#[allow(clippy::struct_excessive_bools)]
92pub struct Model {
93 pub id: String,
95 pub attachment: bool,
97 pub cost: ModelCost,
99 pub limit: ModelLimit,
101 pub name: String,
103 pub options: HashMap<String, serde_json::Value>,
105 pub reasoning: bool,
107 pub release_date: String,
109 pub temperature: bool,
111 pub tool_call: bool,
113}
114
115#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
117pub struct ModelCost {
118 pub input: f64,
120 pub output: f64,
122 #[serde(skip_serializing_if = "Option::is_none")]
124 pub cache_read: Option<f64>,
125 #[serde(skip_serializing_if = "Option::is_none")]
127 pub cache_write: Option<f64>,
128}
129
130#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
132pub struct ModelLimit {
133 pub context: u64,
135 pub output: u64,
137}
138
139#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
145pub struct Provider {
146 pub id: String,
148 pub env: Vec<String>,
150 pub models: HashMap<String, Model>,
152 pub name: String,
154 #[serde(skip_serializing_if = "Option::is_none")]
156 pub api: Option<String>,
157 #[serde(skip_serializing_if = "Option::is_none")]
159 pub npm: Option<String>,
160}
161
162pub type AppInitResponse = bool;
168
169pub type AppLogResponse = bool;
171
172pub type AppModesResponse = Vec<Mode>;
174
175#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
177pub struct AppProvidersResponse {
178 pub default: HashMap<String, String>,
180 pub providers: Vec<Provider>,
182}
183
184#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
186#[serde(rename_all = "lowercase")]
187pub enum LogLevel {
188 Debug,
190 Info,
192 Error,
194 Warn,
196}
197
198#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
200pub struct AppLogParams {
201 pub level: LogLevel,
203 pub message: String,
205 pub service: String,
207 #[serde(skip_serializing_if = "Option::is_none")]
209 pub extra: Option<HashMap<String, serde_json::Value>>,
210}
211
212pub struct AppResource<'a> {
218 client: &'a Opencode,
219}
220
221impl<'a> AppResource<'a> {
222 pub(crate) const fn new(client: &'a Opencode) -> Self {
224 Self { client }
225 }
226
227 pub async fn get(&self, options: Option<&RequestOptions>) -> Result<App, OpencodeError> {
229 self.client.get("/app", options).await
230 }
231
232 pub async fn init(
234 &self,
235 options: Option<&RequestOptions>,
236 ) -> Result<AppInitResponse, OpencodeError> {
237 self.client.post::<bool, ()>("/app/init", None, options).await
238 }
239
240 pub async fn log(
242 &self,
243 params: &AppLogParams,
244 options: Option<&RequestOptions>,
245 ) -> Result<AppLogResponse, OpencodeError> {
246 self.client.post("/log", Some(params), options).await
247 }
248
249 pub async fn modes(
251 &self,
252 options: Option<&RequestOptions>,
253 ) -> Result<AppModesResponse, OpencodeError> {
254 self.client.get("/mode", options).await
255 }
256
257 pub async fn providers(
259 &self,
260 options: Option<&RequestOptions>,
261 ) -> Result<AppProvidersResponse, OpencodeError> {
262 self.client.get("/config/providers", options).await
263 }
264}
265
266#[cfg(test)]
271mod tests {
272 use serde_json::json;
273
274 use super::*;
275
276 #[test]
277 fn app_round_trip() {
278 let app = App {
279 git: true,
280 hostname: "dev-machine".into(),
281 path: AppPath {
282 config: "/home/user/.config/opencode".into(),
283 cwd: "/home/user/project".into(),
284 data: "/home/user/.local/share/opencode".into(),
285 root: "/home/user/project".into(),
286 state: "/home/user/.local/state/opencode".into(),
287 },
288 time: AppTime { initialized: Some(1_700_000_000.0) },
289 };
290 let json_str = serde_json::to_string(&app).unwrap();
291 let back: App = serde_json::from_str(&json_str).unwrap();
292 assert_eq!(app, back);
293 }
294
295 #[test]
296 fn app_time_optional_initialized() {
297 let app = App {
298 git: false,
299 hostname: "ci".into(),
300 path: AppPath {
301 config: "/tmp/cfg".into(),
302 cwd: "/tmp".into(),
303 data: "/tmp/data".into(),
304 root: "/tmp".into(),
305 state: "/tmp/state".into(),
306 },
307 time: AppTime { initialized: None },
308 };
309 let json_str = serde_json::to_string(&app).unwrap();
310 assert!(!json_str.contains("initialized"));
312 let back: App = serde_json::from_str(&json_str).unwrap();
313 assert_eq!(app, back);
314 }
315
316 #[test]
317 fn mode_full_round_trip() {
318 let mode = Mode {
319 name: "code".into(),
320 tools: HashMap::from([("bash".into(), true), ("edit".into(), false)]),
321 model: Some(ModeModel { model_id: "gpt-4o".into(), provider_id: "openai".into() }),
322 prompt: Some("You are a coding assistant.".into()),
323 temperature: Some(0.7),
324 };
325 let json_str = serde_json::to_string(&mode).unwrap();
326 assert!(json_str.contains("modelID"));
328 assert!(json_str.contains("providerID"));
329 let back: Mode = serde_json::from_str(&json_str).unwrap();
330 assert_eq!(mode, back);
331 }
332
333 #[test]
334 fn mode_minimal() {
335 let mode = Mode {
336 name: "default".into(),
337 tools: HashMap::new(),
338 model: None,
339 prompt: None,
340 temperature: None,
341 };
342 let json_str = serde_json::to_string(&mode).unwrap();
343 assert!(!json_str.contains("model"));
344 assert!(!json_str.contains("prompt"));
345 assert!(!json_str.contains("temperature"));
346 let back: Mode = serde_json::from_str(&json_str).unwrap();
347 assert_eq!(mode, back);
348 }
349
350 #[test]
351 fn model_round_trip() {
352 let model = Model {
353 id: "gpt-4o".into(),
354 attachment: true,
355 cost: ModelCost { input: 5.0, output: 15.0, cache_read: Some(2.5), cache_write: None },
356 limit: ModelLimit { context: 128_000, output: 4_096 },
357 name: "GPT-4o".into(),
358 options: HashMap::from([("streaming".into(), json!(true))]),
359 reasoning: false,
360 release_date: "2024-05-13".into(),
361 temperature: true,
362 tool_call: true,
363 };
364 let json_str = serde_json::to_string(&model).unwrap();
365 let back: Model = serde_json::from_str(&json_str).unwrap();
366 assert_eq!(model, back);
367 }
368
369 #[test]
370 fn model_cost_no_cache() {
371 let cost = ModelCost { input: 1.0, output: 2.0, cache_read: None, cache_write: None };
372 let json_str = serde_json::to_string(&cost).unwrap();
373 assert!(!json_str.contains("cache_read"));
374 assert!(!json_str.contains("cache_write"));
375 let back: ModelCost = serde_json::from_str(&json_str).unwrap();
376 assert_eq!(cost, back);
377 }
378
379 #[test]
380 fn provider_round_trip() {
381 let provider = Provider {
382 id: "openai".into(),
383 env: vec!["OPENAI_API_KEY".into()],
384 models: HashMap::from([(
385 "gpt-4o".into(),
386 Model {
387 id: "gpt-4o".into(),
388 attachment: true,
389 cost: ModelCost {
390 input: 5.0,
391 output: 15.0,
392 cache_read: None,
393 cache_write: None,
394 },
395 limit: ModelLimit { context: 128_000, output: 4_096 },
396 name: "GPT-4o".into(),
397 options: HashMap::new(),
398 reasoning: false,
399 release_date: "2024-05-13".into(),
400 temperature: true,
401 tool_call: true,
402 },
403 )]),
404 name: "OpenAI".into(),
405 api: Some("https://api.openai.com/v1".into()),
406 npm: None,
407 };
408 let json_str = serde_json::to_string(&provider).unwrap();
409 let back: Provider = serde_json::from_str(&json_str).unwrap();
410 assert_eq!(provider, back);
411 }
412
413 #[test]
414 fn app_log_params_with_extra() {
415 let params = AppLogParams {
416 level: LogLevel::Info,
417 message: "server started".into(),
418 service: "api-gateway".into(),
419 extra: Some(HashMap::from([
420 ("port".into(), json!(8080)),
421 ("env".into(), json!("production")),
422 ])),
423 };
424 let json_str = serde_json::to_string(¶ms).unwrap();
425 assert!(json_str.contains(r#""level":"info"#));
426 let back: AppLogParams = serde_json::from_str(&json_str).unwrap();
427 assert_eq!(params, back);
428 }
429
430 #[test]
431 fn app_log_params_without_extra() {
432 let params = AppLogParams {
433 level: LogLevel::Error,
434 message: "something broke".into(),
435 service: "worker".into(),
436 extra: None,
437 };
438 let json_str = serde_json::to_string(¶ms).unwrap();
439 assert!(!json_str.contains("extra"));
440 assert!(json_str.contains(r#""level":"error"#));
441 let back: AppLogParams = serde_json::from_str(&json_str).unwrap();
442 assert_eq!(params, back);
443 }
444
445 #[test]
446 fn log_level_variants() {
447 for (variant, expected) in [
448 (LogLevel::Debug, "debug"),
449 (LogLevel::Info, "info"),
450 (LogLevel::Error, "error"),
451 (LogLevel::Warn, "warn"),
452 ] {
453 let json_str = serde_json::to_string(&variant).unwrap();
454 assert_eq!(json_str, format!("\"{expected}\""));
455 let back: LogLevel = serde_json::from_str(&json_str).unwrap();
456 assert_eq!(variant, back);
457 }
458 }
459
460 #[test]
461 fn app_providers_response_round_trip() {
462 let resp = AppProvidersResponse {
463 default: HashMap::from([
464 ("openai".into(), "gpt-4o".into()),
465 ("anthropic".into(), "claude-3-opus".into()),
466 ]),
467 providers: vec![Provider {
468 id: "openai".into(),
469 env: vec!["OPENAI_API_KEY".into()],
470 models: HashMap::new(),
471 name: "OpenAI".into(),
472 api: None,
473 npm: None,
474 }],
475 };
476 let json_str = serde_json::to_string(&resp).unwrap();
477 let back: AppProvidersResponse = serde_json::from_str(&json_str).unwrap();
478 assert_eq!(resp, back);
479 }
480
481 #[test]
482 fn mode_model_serde_rename() {
483 let m = ModeModel { model_id: "claude-3-opus".into(), provider_id: "anthropic".into() };
484 let v: serde_json::Value = serde_json::to_value(&m).unwrap();
485 assert_eq!(v["modelID"], "claude-3-opus");
486 assert_eq!(v["providerID"], "anthropic");
487 let back: ModeModel = serde_json::from_value(v).unwrap();
488 assert_eq!(m, back);
489 }
490
491 #[test]
494 fn provider_no_api_no_npm() {
495 let provider = Provider {
496 id: "custom".into(),
497 env: vec![],
498 models: HashMap::new(),
499 name: "Custom".into(),
500 api: None,
501 npm: None,
502 };
503 let json_str = serde_json::to_string(&provider).unwrap();
504 assert!(!json_str.contains("api"));
505 assert!(!json_str.contains("npm"));
506 let back: Provider = serde_json::from_str(&json_str).unwrap();
507 assert_eq!(provider, back);
508 }
509
510 #[test]
511 fn model_cost_cache_read_only() {
512 let cost = ModelCost { input: 3.0, output: 6.0, cache_read: Some(1.5), cache_write: None };
513 let json_str = serde_json::to_string(&cost).unwrap();
514 assert!(json_str.contains("cache_read"));
515 assert!(!json_str.contains("cache_write"));
516 let back: ModelCost = serde_json::from_str(&json_str).unwrap();
517 assert_eq!(cost, back);
518 }
519
520 #[test]
521 fn model_cost_cache_write_only() {
522 let cost = ModelCost { input: 3.0, output: 6.0, cache_read: None, cache_write: Some(4.0) };
523 let json_str = serde_json::to_string(&cost).unwrap();
524 assert!(!json_str.contains("cache_read"));
525 assert!(json_str.contains("cache_write"));
526 let back: ModelCost = serde_json::from_str(&json_str).unwrap();
527 assert_eq!(cost, back);
528 }
529
530 #[test]
531 fn app_time_initialized_absent_from_json() {
532 let raw = r#"{"git":true,"hostname":"h","path":{"config":"c","cwd":"w","data":"d","root":"r","state":"s"},"time":{}}"#;
534 let app: App = serde_json::from_str(raw).unwrap();
535 assert_eq!(app.time.initialized, None);
536 }
537
538 #[test]
539 fn app_log_params_extra_empty_map() {
540 let params = AppLogParams {
541 level: LogLevel::Debug,
542 message: "trace".into(),
543 service: "svc".into(),
544 extra: Some(HashMap::new()),
545 };
546 let json_str = serde_json::to_string(¶ms).unwrap();
547 assert!(json_str.contains("extra"));
548 let back: AppLogParams = serde_json::from_str(&json_str).unwrap();
549 assert_eq!(params, back);
550 }
551
552 #[test]
553 fn mode_with_empty_tools_and_some_model() {
554 let mode = Mode {
555 name: "review".into(),
556 tools: HashMap::new(),
557 model: Some(ModeModel { model_id: "o1".into(), provider_id: "openai".into() }),
558 prompt: None,
559 temperature: None,
560 };
561 let json_str = serde_json::to_string(&mode).unwrap();
562 assert!(!json_str.contains("prompt"));
563 assert!(!json_str.contains("temperature"));
564 assert!(json_str.contains("modelID"));
565 let back: Mode = serde_json::from_str(&json_str).unwrap();
566 assert_eq!(mode, back);
567 }
568
569 #[test]
570 fn model_with_empty_options() {
571 let model = Model {
572 id: "small".into(),
573 attachment: false,
574 cost: ModelCost { input: 0.1, output: 0.2, cache_read: None, cache_write: None },
575 limit: ModelLimit { context: 4096, output: 512 },
576 name: "Small Model".into(),
577 options: HashMap::new(),
578 reasoning: false,
579 release_date: "2025-01-01".into(),
580 temperature: false,
581 tool_call: false,
582 };
583 let json_str = serde_json::to_string(&model).unwrap();
584 let back: Model = serde_json::from_str(&json_str).unwrap();
585 assert_eq!(model, back);
586 }
587}