ic_llm/
lib.rs

1//! A library for making requests to the LLM canister on the Internet Computer.
2use candid::{CandidType, Principal};
3use serde::{Deserialize, Serialize};
4use std::fmt;
5
6// The principal of the LLM canister.
7const LLM_CANISTER: &str = "w36hm-eqaaa-aaaal-qr76a-cai";
8
9#[derive(CandidType, Serialize, Deserialize, Debug)]
10struct Request {
11    model: String,
12    messages: Vec<ChatMessage>,
13}
14
15/// The role of a `ChatMessage`.
16#[derive(CandidType, Serialize, Deserialize, Debug)]
17pub enum Role {
18    #[serde(rename = "system")]
19    System,
20    #[serde(rename = "user")]
21    User,
22}
23
24/// A message in a chat.
25#[derive(CandidType, Serialize, Deserialize, Debug)]
26pub struct ChatMessage {
27    pub role: Role,
28    pub content: String,
29}
30
31/// Supported LLM models.
32#[derive(Debug)]
33pub enum Model {
34    Llama3_1_8B,
35}
36
37impl fmt::Display for Model {
38    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
39        let text = match self {
40            Model::Llama3_1_8B => "llama3.1:8b",
41        };
42        write!(f, "{}", text)
43    }
44}
45
46/// Sends a single message to a model.
47///
48/// # Example
49///
50/// ```
51/// use ic_llm::Model;
52///
53/// # async fn prompt_example() -> String {
54/// ic_llm::prompt(Model::Llama3_1_8B, "What's the speed of light?").await
55/// # }
56/// ```
57pub async fn prompt<P: ToString>(model: Model, prompt_str: P) -> String {
58    let llm_canister = Principal::from_text(LLM_CANISTER).expect("invalid canister id");
59
60    let res: (String,) = ic_cdk::call(
61        llm_canister,
62        "v0_chat",
63        (Request {
64            model: model.to_string(),
65            messages: vec![ChatMessage {
66                role: Role::User,
67                content: prompt_str.to_string(),
68            }],
69        },),
70    )
71    .await
72    .unwrap();
73    res.0
74}
75
76/// Sends a list of messages to a model.
77///
78/// # Example
79///
80/// ```
81/// use ic_llm::{Model, ChatMessage, Role};
82///
83/// # async fn chat_example() -> String {
84/// ic_llm::chat(
85///     Model::Llama3_1_8B,
86///     vec![
87///         ChatMessage {
88///             role: Role::System,
89///             content: "You are a helpful assistant".to_string(),
90///         },
91///         ChatMessage {
92///             role: Role::User,
93///             content: "How big is the sun?".to_string(),
94///         },
95///     ],
96/// )
97/// .await
98/// # }
99/// ```
100pub async fn chat(model: Model, messages: Vec<ChatMessage>) -> String {
101    let llm_canister = Principal::from_text(LLM_CANISTER).expect("invalid canister id");
102
103    let res: (String,) = ic_cdk::call(
104        llm_canister,
105        "v0_chat",
106        (Request {
107            model: model.to_string(),
108            messages,
109        },),
110    )
111    .await
112    .unwrap();
113    res.0
114}