kind_openai/endpoints/
chat.rs

1mod standard;
2mod structured;
3
4pub use standard::*;
5pub use structured::*;
6
7use std::borrow::Cow;
8
9use bon::{builder, Builder};
10use serde::{Deserialize, Serialize};
11
12/// The model that can be used for either standard or structured chat completions.
13#[derive(Serialize, Clone, Copy)]
14#[allow(non_camel_case_types)]
15pub enum Model {
16    #[serde(rename = "gpt-4o-2024-11-20")]
17    Gpt4o_2024_11_20,
18    #[serde(rename = "gpt-4o-2024-08-06")]
19    Gpt4o_2024_08_06,
20    #[serde(rename = "gpt-4o")]
21    Gpt4o,
22    #[serde(rename = "gpt-4o-mini")]
23    Gpt4oMini,
24}
25
26pub use standard::{ChatCompletion, ChatCompletionBuilder};
27pub use structured::StructuredChatCompletion;
28
29use crate::{OpenAIError, OpenAIResult};
30
31/// The role of the message used for the chat completion.
32#[derive(Serialize, Debug, Deserialize, Clone, Copy)]
33#[serde(rename_all = "snake_case")]
34pub enum Role {
35    /// The system message, describing the task to the model. As a tip, when using structured outputs
36    /// try and keep this smaller. You don't need to include each field / variant's description in this,
37    /// and can instead rely on docstrings to be included in the schema's prompt!
38    System,
39    /// The user message, i.e. the payload into the model.
40    User,
41    /// The assistant message, i.e. the model's response.
42    Assistant,
43}
44
45/// A chat completion message. You can pre-populate the request with user and
46/// assistant messages (alongside the system message) to provide context for the
47/// completion.
48#[derive(Serialize, Debug, Builder)]
49#[builder(start_fn = role)]
50pub struct Message<'a> {
51    #[builder(start_fn)]
52    role: Role,
53    content: Cow<'a, str>,
54    refusal: Option<&'a str>,
55    name: Option<Cow<'a, str>>,
56}
57
58#[macro_export]
59macro_rules! system_message {
60    ($($arg:tt)*) => {
61        ::kind_openai::endpoints::chat::Message::role(
62            ::kind_openai::endpoints::chat::Role::System
63        )
64        .content(format!($($arg)*).into())
65        .build();
66    };
67}
68
69#[macro_export]
70macro_rules! user_message {
71    ($($arg:tt)*) => {
72        ::kind_openai::endpoints::chat::Message::role(
73            ::kind_openai::endpoints::chat::Role::User
74        )
75        .content(format!($($arg)*).into())
76        .build();
77    };
78}
79
80#[macro_export]
81macro_rules! assistant_message {
82    ($($arg:tt)*) => {
83        ::kind_openai::endpoints::chat::Message::role(
84            ::kind_openai::endpoints::chat::Role::Assistant
85        )
86        .content(format!($($arg)*).into())
87        .build();
88    };
89}
90
91/// A chat completion response message. Don't use this type directly, and instead use the
92/// `?` AKA `Try` operator to convert it into a result that can be used.
93pub struct UnifiedChatCompletionResponseMessage<T> {
94    content: T,
95    refusal: Option<String>,
96}
97
98impl<T> From<UnifiedChatCompletionResponseMessage<T>> for OpenAIResult<T> {
99    fn from(value: UnifiedChatCompletionResponseMessage<T>) -> Self {
100        match value.refusal {
101            Some(refusal) => Err(OpenAIError::Refusal(refusal)),
102            None => Ok(value.content),
103        }
104    }
105}
106
107/// The reason the response was terminated.
108#[derive(Deserialize, Clone, Copy)]
109#[serde(rename_all = "snake_case")]
110pub enum FinishReason {
111    Stop,
112    Length,
113    ContentFilter,
114    ToolCalls,
115}