Skip to main content

llama_cpp_bindings/openai/
openai_chat_template_params.rs

1/// Parameters for applying OpenAI-compatible chat templates.
2#[derive(Debug, Clone, PartialEq)]
3pub struct OpenAIChatTemplateParams<'params> {
4    /// OpenAI-compatible messages JSON array.
5    pub messages_json: &'params str,
6    /// Optional OpenAI-compatible tools JSON array.
7    pub tools_json: Option<&'params str>,
8    /// Optional tool choice string.
9    pub tool_choice: Option<&'params str>,
10    /// Optional JSON schema string for tool grammar generation.
11    pub json_schema: Option<&'params str>,
12    /// Optional custom grammar string.
13    pub grammar: Option<&'params str>,
14    /// Optional reasoning format string.
15    pub reasoning_format: Option<&'params str>,
16    /// Optional chat template kwargs JSON object.
17    pub chat_template_kwargs: Option<&'params str>,
18    /// Whether to add the assistant generation prompt.
19    pub add_generation_prompt: bool,
20    /// Whether to render templates with Jinja.
21    pub use_jinja: bool,
22    /// Whether to allow parallel tool calls.
23    pub parallel_tool_calls: bool,
24    /// Whether thinking blocks are enabled.
25    pub enable_thinking: bool,
26    /// Whether to add BOS.
27    pub add_bos: bool,
28    /// Whether to add EOS.
29    pub add_eos: bool,
30    /// Whether to parse tool calls in responses.
31    pub parse_tool_calls: bool,
32}