model_gateway_rs/model/
ollama.rs

1use serde::{Deserialize, Serialize};
2
3use crate::model::llm::ChatMessage;
4
5#[derive(Debug, Clone, Serialize)]
6pub struct OllamaChatOptions {
7    #[serde(skip_serializing_if = "Option::is_none")]
8    pub num_predict: Option<u32>,
9    #[serde(skip_serializing_if = "Option::is_none")]
10    pub temperature: Option<f32>,
11}
12
13#[derive(Debug, Clone, Serialize)]
14pub struct OllamaChatRequest {
15    pub model: String,
16    pub messages: Vec<ChatMessage>,
17    #[serde(skip_serializing_if = "Option::is_none")]
18    pub stream: Option<bool>,
19    #[serde(skip_serializing_if = "Option::is_none")]
20    pub options: Option<OllamaChatOptions>,
21}
22
23#[derive(Debug, Deserialize)]
24pub struct OllamaChatResponse {
25    pub model: String,
26    pub created_at: String,
27    pub message: ChatMessage,
28    pub done_reason: Option<String>,
29    pub done: bool,
30    pub total_duration: Option<u64>,
31    pub load_duration: Option<u64>,
32    pub prompt_eval_count: Option<u32>,
33    pub prompt_eval_duration: Option<u64>,
34    pub eval_count: Option<u32>,
35    pub eval_duration: Option<u64>,
36}
37
38impl OllamaChatResponse {
39    pub fn first_message(&self) -> String {
40        self.message.content.clone()
41    }
42}