ic_llm/
lib.rs

1//! A library for making requests to the LLM canister on the Internet Computer.
2use candid::{CandidType, Principal};
3use serde::{Deserialize, Serialize};
4use std::fmt;
5
6// The principal of the LLM canister.
7const LLM_CANISTER: &str = "w36hm-eqaaa-aaaal-qr76a-cai";
8
9#[derive(CandidType, Serialize, Deserialize, Debug)]
10struct Request {
11    model: String,
12    messages: Vec<ChatMessage>,
13}
14
15/// The role of a `ChatMessage`.
16#[derive(CandidType, Serialize, Deserialize, Debug)]
17pub enum Role {
18    #[serde(rename = "system")]
19    System,
20    #[serde(rename = "user")]
21    User,
22    #[serde(rename = "assistant")]
23    Assistant,
24}
25
26/// A message in a chat.
27#[derive(CandidType, Serialize, Deserialize, Debug)]
28pub struct ChatMessage {
29    pub role: Role,
30    pub content: String,
31}
32
33/// Supported LLM models.
34#[derive(Debug)]
35pub enum Model {
36    Llama3_1_8B,
37}
38
39impl fmt::Display for Model {
40    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
41        let text = match self {
42            Model::Llama3_1_8B => "llama3.1:8b",
43        };
44        write!(f, "{}", text)
45    }
46}
47
48/// Sends a single message to a model.
49///
50/// # Example
51///
52/// ```
53/// use ic_llm::Model;
54///
55/// # async fn prompt_example() -> String {
56/// ic_llm::prompt(Model::Llama3_1_8B, "What's the speed of light?").await
57/// # }
58/// ```
59pub async fn prompt<P: ToString>(model: Model, prompt_str: P) -> String {
60    let llm_canister = Principal::from_text(LLM_CANISTER).expect("invalid canister id");
61
62    let res: (String,) = ic_cdk::call(
63        llm_canister,
64        "v0_chat",
65        (Request {
66            model: model.to_string(),
67            messages: vec![ChatMessage {
68                role: Role::User,
69                content: prompt_str.to_string(),
70            }],
71        },),
72    )
73    .await
74    .unwrap();
75    res.0
76}
77
78/// Sends a list of messages to a model.
79///
80/// # Example
81///
82/// ```
83/// use ic_llm::{Model, ChatMessage, Role};
84///
85/// # async fn chat_example() -> String {
86/// ic_llm::chat(
87///     Model::Llama3_1_8B,
88///     vec![
89///         ChatMessage {
90///             role: Role::System,
91///             content: "You are a helpful assistant".to_string(),
92///         },
93///         ChatMessage {
94///             role: Role::User,
95///             content: "How big is the sun?".to_string(),
96///         },
97///     ],
98/// )
99/// .await
100/// # }
101/// ```
102pub async fn chat(model: Model, messages: Vec<ChatMessage>) -> String {
103    let llm_canister = Principal::from_text(LLM_CANISTER).expect("invalid canister id");
104
105    let res: (String,) = ic_cdk::call(
106        llm_canister,
107        "v0_chat",
108        (Request {
109            model: model.to_string(),
110            messages,
111        },),
112    )
113    .await
114    .unwrap();
115    res.0
116}