lm_studio_api/chat/
request.rs1use crate::prelude::*;
2use super::{ Model, Message, format::* };
3
4#[derive(Debug, Clone, Serialize, Deserialize, From)]
6#[serde(untagged)]
7pub enum Request {
8 #[from] Messages(Messages),
9 #[from] Prompt(Prompt),
10 #[from] Embeddings(Embeddings),
11}
12
13
14#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct Messages {
17 pub model: Model,
18 pub messages: Vec<Message>,
19 #[serde(skip)]
20 pub context: bool,
21 pub temperature: f32,
22 pub max_tokens: i32,
23 pub stream: bool,
24 #[serde(skip_serializing_if = "Option::is_none")]
25 #[serde(rename = "response_format")]
26 pub format: Option<Format>,
27 #[serde(skip)]
28 pub skip_think: bool,
29}
30
31impl ::std::default::Default for Messages {
32 fn default() -> Self {
33 Self {
34 model: Model::Other(str!()),
35 messages: vec![],
36 context: true,
37 temperature: 0.7,
38 max_tokens: -1,
39 stream: false,
40 format: None,
41 skip_think: true,
42 }
43 }
44}
45
46
47#[derive(Debug, Clone, Serialize, Deserialize)]
49pub struct Prompt {
50 pub model: Model,
51 pub prompt: String,
52 #[serde(skip)]
53 pub context: bool,
54 pub temperature: f32,
55 pub max_tokens: i32,
56 pub stream: bool,
57 pub stop: String,
58 #[serde(skip)]
59 pub skip_think: bool,
60}
61
62impl ::std::default::Default for Prompt {
63 fn default() -> Self {
64 Self {
65 model: Model::Other(str!()),
66 prompt: str!(),
67 context: true,
68 temperature: 0.7,
69 max_tokens: -1,
70 stream: false,
71 stop: str!("\n"),
72 skip_think: true,
73 }
74 }
75}
76
77
78#[derive(Debug, Clone, Serialize, Deserialize)]
80pub struct Embeddings {
81 pub model: Model,
82 pub input: Vec<String>,
83}
84
85impl ::std::default::Default for Embeddings {
86 fn default() -> Self {
87 Self {
88 model: Model::Other(str!()),
89 input: vec![],
90 }
91 }
92}