Skip to main content

context/
context.rs

1use std::collections::HashMap;
2
3use rig::client::{CompletionClient, ProviderClient};
4use rig::completion::Prompt;
5use rig_openapi_tools::OpenApiToolset;
6
7#[tokio::main]
8async fn main() -> anyhow::Result<()> {
9    let openai = rig::providers::openai::Client::from_env();
10
11    // ---------------------------------------------------------------
12    // 1. Visible context — LLM sees the values and uses them in calls
13    // ---------------------------------------------------------------
14    println!("=== Visible context ===\n");
15
16    let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
17        .base_url("https://petstore3.swagger.io/api/v3")
18        .build()?;
19
20    println!("Loaded {} tools from Petstore spec\n", toolset.len());
21
22    // The LLM sees these values in its preamble and uses them
23    // when calling tools. For example, it will pass `username` to getUserByName.
24    let visible_ctx = HashMap::from([
25        ("username".to_string(), "user1".to_string()),
26        ("preferred_status".to_string(), "available".to_string()),
27    ]);
28    let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
29
30    let preamble = format!(
31        "You have access to the Swagger Petstore API.\n\n\
32         {context_preamble}\n\n\
33         When I refer to \"my\" profile or data, use the username from the context above."
34    );
35
36    let agent = openai
37        .agent("gpt-4o")
38        .preamble(&preamble)
39        .tools(toolset.tools_with_context(&HashMap::new()))
40        .build();
41
42    // The LLM picks up username=user1 from the preamble
43    // and calls getUserByName with username "user1".
44    println!(">>> Look up my user profile and summarize it.");
45    let response: String = agent
46        .prompt("Look up my user profile and summarize it.")
47        .await?;
48    println!("{response}\n");
49
50    // The LLM picks up preferred_status=available from the preamble
51    // and calls findPetsByStatus with status "available".
52    println!(">>> Find pets matching my preferred status.");
53    let response: String = agent
54        .prompt("Find pets matching my preferred status.")
55        .await?;
56    println!("{response}\n");
57
58    // ---------------------------------------------------------------
59    // 2. Hidden context — auto-injected, LLM never sees the values
60    // ---------------------------------------------------------------
61    println!("=== Hidden context ===\n");
62
63    // Hidden context is useful for secrets, user IDs, or any parameter
64    // the LLM should NOT decide — it's injected automatically at execution
65    // time and removed from the tool schema so the LLM can't see or override it.
66
67    // Static hidden context set at build time (e.g. API key for the upstream service)
68    let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
69        .base_url("https://petstore3.swagger.io/api/v3")
70        .hidden_context("api_key", "special-key")
71        .build()?;
72
73    // Per-request hidden context (e.g. current user from session).
74    // The LLM won't see `username` in the tool schema — it's filled in
75    // automatically, so it can't hallucinate a different user.
76    let per_request_ctx = HashMap::from([("username".to_string(), "user1".to_string())]);
77
78    let agent = openai
79        .agent("gpt-4o")
80        .preamble(
81            "You have access to the Swagger Petstore API. \
82             Use the available tools to answer questions about the pet store.",
83        )
84        .tools(toolset.tools_with_context(&per_request_ctx))
85        .build();
86
87    // The LLM calls getUserByName without providing `username` —
88    // it's not in the schema. The library injects username=user1 automatically.
89    println!(">>> Get my profile.");
90    let response: String = agent.prompt("Get my profile.").await?;
91    println!("{response}\n");
92
93    Ok(())
94}