1use llama_cpp_2::{
2 ApplyChatTemplateError,
3 model::{LlamaChatMessage, LlamaChatTemplate, LlamaModel},
4};
5
6use crate::MessageRole;
7
8pub trait ChatTemplate {
9 type Error;
10 fn apply_template(
11 &self,
12 model: &LlamaModel,
13 model_tmpl: &LlamaChatTemplate,
14 messages: &[(MessageRole, String)],
15 ) -> Result<String, Self::Error>;
16}
17
18#[derive(Clone, Default)]
19pub struct ModelChatTemplate;
20
21impl ChatTemplate for ModelChatTemplate {
22 fn apply_template(
23 &self,
24 model: &LlamaModel,
25 model_tmpl: &LlamaChatTemplate,
26 messages: &[(MessageRole, String)],
27 ) -> Result<String, Self::Error> {
28 let llama_msg = messages
29 .iter()
30 .map(|(role, cnt)| LlamaChatMessage::new(role.to_string(), cnt.clone()))
31 .collect::<Result<Vec<_>, _>>()
32 .expect("message preprocessing failed");
33 return model.apply_chat_template(model_tmpl, &llama_msg, true);
34 }
35
36 type Error = ApplyChatTemplateError;
37}