kproc_llm/
lib.rs

1#![doc = include_str!("../README.MD")]
2#[warn(missing_docs)]
3#[deny(warnings)]
4mod error;
5pub mod prelude;
6
7#[cfg(feature = "llama.cpp")]
8pub mod llama_cpp;
9#[cfg(feature = "ollama")]
10pub mod ollama;
11
12#[cfg(feature = "template")]
13pub mod template;
14
15use std::{future::Future, pin::Pin};
16
17/// Export Error enum.
18pub use error::Error;
19use futures::Stream;
20
21/// Export Result type.
22pub type Result<T> = std::result::Result<T, Error>;
23
24/// Format
25pub enum Format
26{
27  /// Output text
28  Text,
29  /// Output Json
30  Json,
31}
32
33/// Prompt
34pub struct Prompt
35{
36  pub prompt: String,
37  pub assistant: Option<String>,
38  pub system: Option<String>,
39  pub format: Format,
40  #[cfg(feature = "image")]
41  pub image: Option<kproc_values::Image>,
42}
43
44impl Prompt
45{
46  /// Create a new prompt, from the given string.
47  pub fn prompt(prompt: impl Into<String>) -> Self
48  {
49    let prompt = prompt.into();
50    Self {
51      prompt,
52      assistant: None,
53      system: None,
54      format: Format::Text,
55      #[cfg(feature = "image")]
56      image: None,
57    }
58  }
59  /// Set the system hint.
60  pub fn system(mut self, system: impl Into<String>) -> Self
61  {
62    self.system = Some(system.into());
63    self
64  }
65  /// Set the result format
66  pub fn format(mut self, format: impl Into<Format>) -> Self
67  {
68    self.format = format.into();
69    self
70  }
71  /// Set an input image
72  #[cfg(feature = "image")]
73  pub fn image(mut self, image: impl Into<kproc_values::Image>) -> Self
74  {
75    self.image = Some(image.into());
76    self
77  }
78}
79
80/// String stream
81pub type StringStream = Pin<Box<dyn Stream<Item = Result<String>> + Send>>;
82
83pub(crate) fn pin_stream<T: 'static + Send + Stream<Item = Result<String>>>(t: T) -> StringStream
84{
85  Box::pin(t)
86}
87
88/// LLM
89pub trait LargeLanguageModel
90{
91  fn infer_stream(
92    &self,
93    prompt: Prompt,
94  ) -> Result<impl Future<Output = Result<StringStream>> + Send>;
95  fn infer(&self, prompt: Prompt) -> Result<impl Future<Output = Result<String>> + Send>
96  {
97    use futures::stream::StreamExt;
98    let stream = self.infer_stream(prompt)?;
99    Ok(async {
100      let mut result: String = Default::default();
101      let mut stream = Box::pin(stream.await?);
102      while let Some(next_token) = stream.next().await
103      {
104        if result.is_empty()
105        {
106          result = next_token?;
107        }
108        else
109        {
110          result.push_str(&next_token?);
111        }
112      }
113      Ok(result)
114    })
115  }
116}