1use std::convert::Infallible;
2
3use llama_link::*;
4use llmtoolbox::{tool, ToolBox};
5
6#[derive(Debug)]
7struct ConversationTool {
8 user_name: String,
9}
10
11#[tool]
12impl ConversationTool {
13 fn new(user_name: String) -> Self {
14 Self { user_name }
15 }
16
17 #[tool_part]
20 fn give_negative_opinion(&self, topic: ConverstationTopic) -> String {
21 format!(
22 "Hello {}, I don't like `{}`, because `{}`",
23 self.user_name, topic.topic, topic.opinion
24 )
25 }
26
27 #[tool_part]
30 async fn give_positive_opinion(&self, topic: ConverstationTopic) -> String {
31 format!(
32 "Hello {}, I like `{}`, because `{}`",
33 self.user_name, topic.topic, topic.opinion
34 )
35 }
36}
37
38#[derive(serde::Deserialize, schemars::JsonSchema)]
39pub struct ConverstationTopic {
40 pub topic: String,
42 pub opinion: String,
44}
45
46#[tokio::main]
47async fn main() {
48 let mut toolbox: ToolBox<String, Infallible> = ToolBox::new();
49 let tool = ConversationTool::new("Dave".to_owned());
50 toolbox.add_tool(tool).unwrap();
51
52 let link = LlamaLink::new("http://127.0.0.1:3756", Config::builder().build());
53 let system = format!("You are a helpful AI assistant. Respond to the user in this json function calling format: {}",serde_json::to_string(toolbox.schema()).unwrap());
54 let messages = vec![Message::User("What do you think about the rust programming language".to_owned())];
55 let result = link
56 .call_function_with_format(
57 &system, &messages, &PromptFormatter::default(), &toolbox)
58 .await;
59 match result {
60 Ok(Ok(call_result)) => println!("{}", call_result),
61 Err(error) => panic!("{}", error),
62 }
63}
64
65