1#![doc = include_str!("../README.MD")]
2#![warn(missing_docs)]
3#![deny(warnings)]
4
5#[cfg_attr(not(any(feature = "ollama", feature = "llama.cpp")), deny(warnings))]
6mod error;
7pub mod prelude;
8
9#[cfg(feature = "llama.cpp")]
10pub mod llama_cpp;
11
12#[cfg(feature = "ollama")]
13pub mod ollama;
14
15#[cfg(feature = "simple-api")]
16pub mod simple_api;
17
18#[cfg(feature = "template")]
19pub mod template;
20
21use std::future::Future;
22
23pub use error::Error;
25use futures::Stream;
26
27pub type Result<T> = std::result::Result<T, Error>;
29
30#[derive(Debug)]
32pub enum Format
33{
34 Text,
36 Json,
38}
39
40#[derive(Debug)]
42pub enum Role
43{
44 User,
46 System,
48 Assistant,
50 Custom(String),
52}
53
54#[derive(Debug)]
56pub struct Message
57{
58 pub role: Role,
60 pub content: String,
62}
63
64#[derive(Debug)]
66pub struct ChatPrompt
67{
68 pub messages: Vec<Message>,
70 pub format: Format,
72}
73
74impl ChatPrompt
75{
76 pub fn new() -> Self
78 {
79 Self {
80 messages: Default::default(),
81 format: Format::Text,
82 }
83 }
84 pub fn message(mut self, role: Role, content: impl Into<String>) -> Self
86 {
87 self.messages.push(Message {
88 role,
89 content: content.into(),
90 });
91 self
92 }
93 pub fn message_opt(mut self, role: Role, content: Option<String>) -> Self
95 {
96 if let Some(content) = content
97 {
98 self.messages.push(Message {
99 role,
100 content: content.into(),
101 });
102 }
103 self
104 }
105 pub fn user(self, content: impl Into<String>) -> Self
107 {
108 self.message(Role::User, content)
109 }
110 pub fn system(self, content: impl Into<String>) -> Self
112 {
113 self.message(Role::System, content)
114 }
115 pub fn system_opt(self, content: Option<String>) -> Self
117 {
118 self.message_opt(Role::System, content)
119 }
120 pub fn assistant(self, content: impl Into<String>) -> Self
122 {
123 self.message(Role::Assistant, content)
124 }
125 pub fn assistant_opt(self, content: Option<String>) -> Self
127 {
128 self.message_opt(Role::Assistant, content)
129 }
130 pub fn format(mut self, format: impl Into<Format>) -> Self
132 {
133 self.format = format.into();
134 self
135 }
136}
137
138#[derive(Debug)]
140pub struct GenerationPrompt
141{
142 pub user: String,
144 pub system: Option<String>,
146 pub assistant: Option<String>,
148 pub format: Format,
150 #[cfg(feature = "image")]
152 pub image: Option<kproc_values::Image>,
153}
154
155impl GenerationPrompt
156{
157 pub fn prompt(user: impl Into<String>) -> Self
159 {
160 Self {
161 user: user.into(),
162 system: Default::default(),
163 assistant: Default::default(),
164 format: Format::Text,
165 #[cfg(feature = "image")]
166 image: None,
167 }
168 }
169 pub fn system(mut self, content: impl Into<String>) -> Self
171 {
172 self.system = Some(content.into());
173 self
174 }
175 pub fn assistant(mut self, content: impl Into<String>) -> Self
177 {
178 self.assistant = Some(content.into());
179 self
180 }
181 pub fn format(mut self, format: impl Into<Format>) -> Self
183 {
184 self.format = format.into();
185 self
186 }
187 #[cfg(feature = "image")]
189 pub fn image(mut self, image: impl Into<kproc_values::Image>) -> Self
190 {
191 self.image = Some(image.into());
192 self
193 }
194}
195
196pub type StringStream = ccutils::futures::BoxedStream<Result<String>>;
198
199#[allow(dead_code)]
201pub(crate) fn pin_stream<T: 'static + Send + Stream<Item = Result<String>>>(t: T) -> StringStream
202{
203 Box::pin(t)
204}
205
206fn accumulate<T>(stream_maker: T) -> Result<impl Future<Output = Result<String>> + Send>
207where
208 T: Future<Output = Result<StringStream>> + Send,
209{
210 use futures::stream::StreamExt;
211 Ok(async {
212 let mut result: String = Default::default();
213 let mut stream = Box::pin(stream_maker.await?);
214 while let Some(next_token) = stream.next().await
215 {
216 if result.is_empty()
217 {
218 result = next_token?;
219 }
220 else
221 {
222 result.push_str(&next_token?);
223 }
224 }
225 Ok(result)
226 })
227}
228
229#[allow(dead_code)]
230pub(crate) fn generate_with_chat<LLM>(
231 llm: &LLM,
232 prompt: GenerationPrompt,
233) -> Result<impl Future<Output = Result<StringStream>> + Send + use<'_, LLM>>
234where
235 LLM: LargeLanguageModel,
236{
237 let chat_prompt = ChatPrompt::new()
238 .system_opt(prompt.system)
239 .assistant_opt(prompt.assistant)
240 .user(prompt.user)
241 .format(prompt.format);
242 llm.chat_stream(chat_prompt)
243}
244
245pub trait LargeLanguageModel
247{
248 fn chat_stream(
250 &self,
251 prompt: ChatPrompt,
252 ) -> Result<impl Future<Output = Result<StringStream>> + Send>;
253 fn chat(&self, prompt: ChatPrompt) -> Result<impl Future<Output = Result<String>> + Send>
256 {
257 let stream = self.chat_stream(prompt)?;
258 accumulate(stream)
259 }
260 fn generate_stream(
262 &self,
263 prompt: GenerationPrompt,
264 ) -> Result<impl Future<Output = Result<StringStream>> + Send>;
265 fn generate(
268 &self,
269 prompt: GenerationPrompt,
270 ) -> Result<impl Future<Output = Result<String>> + Send>
271 {
272 let stream = self.generate_stream(prompt)?;
273 accumulate(stream)
274 }
275}