llmvm_core_lib/
prompts.rs

1use std::{cell::RefCell, sync::Arc};
2
3use handlebars::{
4    no_escape, Context, Handlebars, Helper, HelperDef, HelperResult, Output, RenderContext,
5    Renderable, StringOutput,
6};
7use llmvm_util::{get_file_path, DirType};
8use rust_embed::RustEmbed;
9use serde_json::Value;
10use tokio::fs;
11use tracing::debug;
12
13use crate::{error::CoreError, Result};
14
15#[derive(RustEmbed)]
16#[folder = "./prompts"]
17struct BuiltInPrompts;
18
19struct SystemRoleHelper(Arc<std::sync::Mutex<SystemRoleHelperState>>);
20
21#[derive(Default)]
22struct SystemRoleHelperState {
23    used: bool,
24    out: RefCell<StringOutput>,
25}
26
27impl HelperDef for SystemRoleHelper {
28    fn call<'reg: 'rc, 'rc>(
29        &self,
30        h: &Helper<'reg, 'rc>,
31        r: &'reg Handlebars<'reg>,
32        ctx: &'rc Context,
33        rc: &mut RenderContext<'reg, 'rc>,
34        _out: &mut dyn Output,
35    ) -> HelperResult {
36        h.template()
37            .map(|t| {
38                let mut state = self.0.lock().unwrap();
39                state.used = true;
40                t.render(r, ctx, rc, &mut *state.out.get_mut())
41            })
42            .unwrap_or(Ok(()))
43    }
44}
45
46impl SystemRoleHelperState {
47    fn system_prompt(&self) -> Option<String> {
48        match self.used {
49            false => None,
50            true => self.out.take().into_string().ok(),
51        }
52    }
53}
54
55/// A prompt that is ready to use for text generation.
56/// May contain an optional prompt for the system role.
57#[derive(Debug)]
58pub struct ReadyPrompt {
59    /// A system role prompt. For chat generation requests, this will be appended
60    /// to the `thread_messages` of the backend generation request. For non-chat
61    /// generation requests, the system prompt will be prepended to the main prompt.
62    pub system_prompt: Option<String>,
63    /// The main prompt. For chat generation requests, this prompt should use the user role.
64    pub main_prompt: String,
65}
66
67impl ReadyPrompt {
68    async fn load_template(template_id: &str) -> Result<String> {
69        let template_file_name = format!("{}.hbs", template_id);
70        let template_path = get_file_path(DirType::Prompts, &template_file_name, false)
71            .ok_or(CoreError::UserHomeNotFound)?;
72        if fs::try_exists(&template_path).await.unwrap_or_default() {
73            return Ok(fs::read_to_string(template_path).await?);
74        };
75        let embedded_file =
76            BuiltInPrompts::get(&template_file_name).ok_or(CoreError::TemplateNotFound)?;
77        Ok(std::str::from_utf8(embedded_file.data.as_ref())?.to_string())
78    }
79
80    fn process(template: &str, parameters: &Value, is_chat_model: bool) -> Result<Self> {
81        let mut handlebars = Handlebars::new();
82        handlebars.register_escape_fn(no_escape);
83        let system_role_helper_state =
84            Arc::new(std::sync::Mutex::new(SystemRoleHelperState::default()));
85        handlebars.register_helper(
86            "system_role",
87            Box::new(SystemRoleHelper(system_role_helper_state.clone())),
88        );
89
90        debug!("prompt parameters = {:?}", parameters);
91
92        let mut main_prompt = handlebars
93            .render_template(template, parameters)?
94            .trim()
95            .to_string();
96
97        let mut system_prompt = system_role_helper_state
98            .lock()
99            .unwrap()
100            .system_prompt()
101            .map(|s| s.trim().to_string());
102
103        if system_prompt.is_some() && !is_chat_model {
104            main_prompt = format!("{}\n\n{}", system_prompt.take().unwrap(), main_prompt);
105        }
106
107        Ok(Self {
108            system_prompt,
109            main_prompt,
110        })
111    }
112
113    /// Loads a Handlebars prompt template from the current project or
114    /// user home directory, and generates a full prompt using the
115    /// given template parameters.
116    pub async fn from_stored_template(
117        template_id: &str,
118        parameters: &Value,
119        is_chat_model: bool,
120    ) -> Result<Self> {
121        let template = Self::load_template(template_id).await?;
122        Self::process(&template, parameters, is_chat_model)
123    }
124
125    /// Generates a full prompt using a Handlebars template and parameters.
126    pub fn from_custom_template(
127        template: &str,
128        parameters: &Value,
129        is_chat_model: bool,
130    ) -> Result<Self> {
131        Self::process(template, parameters, is_chat_model)
132    }
133
134    /// Creates a `ReadyPrompt` from a completed prompt.
135    pub fn from_custom_prompt(main_prompt: String) -> Self {
136        Self {
137            system_prompt: None,
138            main_prompt,
139        }
140    }
141}