kproc-llm 0.6.1

Knowledge Processing library, using LLMs.
Documentation
//! Template module
use crate::prelude::*;

/// Template
#[derive(Debug)]
pub struct Template
{
  #[allow(dead_code)]
  source: String,
}

impl Template
{
  /// New template
  pub fn new(source: impl Into<String>) -> Result<Template>
  {
    let source = source.into();
    Ok(Template { source })
  }
  #[allow(dead_code)]
  pub(crate) fn render(&self, messages: &[crate::Message]) -> Result<String>
  {
    let mut env = minijinja::Environment::new();
    env.add_template("template", &self.source)?;
    Ok(
      env
        .get_template("template")?
        .render(minijinja::context! { messages => messages })?,
    )
  }
}

#[cfg(test)]
mod tests
{
  #[test]
  fn template_llama()
  {
    let template = super::Template::new(include_str!("../data/templates/llama")).unwrap();
    assert_eq!(
      template.render(&[crate::Message {
        role: crate::Role::User,
        content: "Hello world.".into()
      }]).unwrap(),
      "<|start_header_id|>user<|end_header_id|>Hello world.<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|><|start_header_id|>assistant<|end_header_id|>\n"
    );
  }
}