llm_chain/chains/
conversation.rs

1//! The `Chain` module models a conversation between an entity and an LLM.
2//! It manages the conversation state and provides methods for sending messages and receiving responses.
3//!
4//! It relies on the `traits::Executor` trait to execute prompts and handle LLM interactions.
5
6use crate::options::Options;
7use crate::output::Output;
8use crate::prompt::{ChatMessageCollection, Prompt, PromptTemplate, StringTemplateError};
9use crate::step::Step;
10use crate::tokens::{PromptTokensError, TokenizerError};
11use crate::traits::{Executor, ExecutorError};
12use crate::{parameters, Parameters};
13use serde::{Deserialize, Serialize};
14
15/// `Chain` represents a conversation between an entity and an LLM.
16///
17/// It holds the conversation state and provides methods for sending messages and receiving responses.
18#[derive(Serialize, Deserialize, Default)]
19pub struct Chain {
20    state: ChatMessageCollection<String>,
21}
22
23impl Chain {
24    /// Constructs a new `Chain` with the given conversation state.
25    /// Self,
26    /// # Arguments
27    /// * `state` - The initial prompt state to use.
28    pub fn new(state: PromptTemplate) -> Result<Chain, StringTemplateError> {
29        state
30            .format(&parameters!())
31            .map(|state| state.to_chat())
32            .map(|state| Self { state })
33    }
34
35    /// Constructs a new `Chain` with the given conversation state by passing a ChatMessageCollection<String> (clone).
36    /// Self,
37    /// # Arguments
38    /// * `state` - The initial prompt state to use.
39    pub fn new_with_message_collection(state: &ChatMessageCollection<String>) -> Chain {
40        Self {
41            state: state.clone(),
42        }
43    }
44
45    /// Sends a message to the LLM and returns the response.
46    ///
47    /// This method sends a message to the LLM, adding it and the response to the internal state.
48    ///
49    /// # Arguments
50    /// * `step` - The step to send.
51    /// * `parameters` - The parameters to use when formatting the step.
52    /// * `exec` - The executor to use.
53    ///
54    /// # Returns
55    /// A `Result` containing the LLM's response as `E::Output` on success or an `Error` variant on failure.
56    pub async fn send_message<E: Executor>(
57        &mut self,
58        step: Step,
59        parameters: &Parameters,
60        exec: &E,
61    ) -> Result<Output, Error> {
62        let fmt = step.format(parameters)?;
63        self.send_message_raw(step.options(), &fmt, exec).await
64    }
65
66    /// Sends a message to the LLM and returns the response.
67    ///
68    /// This method takes a ready prompt and options and sends it to the LLM, adding it and the response to the internal state.
69    ///
70    /// # Arguments
71    /// * `options` - The options to use when executing the prompt.
72    /// * `prompt` - The prompt to send.
73    /// * `exec` - The executor to use.
74    ///
75    /// # Returns
76    /// A `Result` containing the LLM's response as `E::Output` on success or an `Error` variant on failure.
77    pub async fn send_message_raw<E: Executor>(
78        &mut self,
79        options: &Options,
80        prompt: &Prompt,
81        exec: &E,
82    ) -> Result<Output, Error> {
83        let tok = exec.tokens_used(options, prompt)?;
84        let tokens_remaining = tok.tokens_remaining();
85        let tokenizer = exec.get_tokenizer(options)?;
86        self.state.trim_context(&tokenizer, tokens_remaining)?;
87
88        // Combine the conversation history with the new prompt.
89        let prompt_with_history = Prompt::Chat(self.state.clone()).combine(prompt);
90
91        // Execute the prompt and retrieve the LLM's response.
92        let res = exec.execute(options, &prompt_with_history).await?;
93        let content = res.to_immediate().await?.as_content().to_chat();
94
95        self.state = prompt_with_history.to_chat();
96        self.state.append(content.clone());
97
98        Ok(Output::new_immediate(content.into()))
99    }
100}
101
102/// An error type representing various errors that can occur while interacting with the `Chain`.
103#[derive(thiserror::Error, Debug)]
104pub enum Error {
105    #[error("PromptTokensError: {0}")]
106    PromptTokens(#[from] PromptTokensError),
107    #[error("TokenizerError: {0}")]
108    Tokenizer(#[from] TokenizerError),
109    #[error("ExecutorError: {0}")]
110    Executor(#[from] ExecutorError),
111    #[error("No model output")]
112    NoModelOutput,
113    #[error("StringTemplateError: {0}")]
114    StringTemplate(#[from] crate::prompt::StringTemplateError),
115}