pub struct OpenApiToolsetBuilder { /* private fields */ }Expand description
Builder for configuring an OpenApiToolset.
Implementations§
Source§impl OpenApiToolsetBuilder
impl OpenApiToolsetBuilder
Sourcepub fn base_url(self, url: impl Into<String>) -> Self
pub fn base_url(self, url: impl Into<String>) -> Self
Override the base URL from the spec.
Examples found in repository?
examples/petstore.rs (line 13)
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // Parse once at startup — reuse across requests
12 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
13 .base_url("https://petstore3.swagger.io/api/v3")
14 .build()?;
15
16 println!("Loaded {} tools from Petstore spec\n", toolset.len());
17
18 // Simulate a per-request context (e.g. from a logged-in user session)
19 let visible_ctx = HashMap::from([
20 ("username".to_string(), "user1".to_string()),
21 ("preferred_status".to_string(), "available".to_string()),
22 ]);
23 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
24
25 let preamble = format!(
26 "You have access to the Swagger Petstore API. \
27 Use the available tools to answer questions about the pet store.\n\n\
28 {context_preamble}"
29 );
30
31 // Create agent with per-request tools (cheap clone)
32 let agent = openai
33 .agent("gpt-4o")
34 .preamble(&preamble)
35 .tools(toolset.tools_with_context(&HashMap::new()))
36 .build();
37
38 let prompts = [
39 "What pets are currently available in the store? Show me the first 3.",
40 "How many dogs are in the store?",
41 "Look up my user profile and summarize it.",
42 ];
43
44 for prompt in prompts {
45 println!(">>> {prompt}");
46 let response: String = agent.prompt(prompt).await?;
47 println!("{response}\n");
48 }
49
50 Ok(())
51}More examples
examples/context.rs (line 17)
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // ---------------------------------------------------------------
12 // 1. Visible context — LLM sees the values and uses them in calls
13 // ---------------------------------------------------------------
14 println!("=== Visible context ===\n");
15
16 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
17 .base_url("https://petstore3.swagger.io/api/v3")
18 .build()?;
19
20 println!("Loaded {} tools from Petstore spec\n", toolset.len());
21
22 // The LLM sees these values in its preamble and uses them
23 // when calling tools. For example, it will pass `username` to getUserByName.
24 let visible_ctx = HashMap::from([
25 ("username".to_string(), "user1".to_string()),
26 ("preferred_status".to_string(), "available".to_string()),
27 ]);
28 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
29
30 let preamble = format!(
31 "You have access to the Swagger Petstore API.\n\n\
32 {context_preamble}\n\n\
33 When I refer to \"my\" profile or data, use the username from the context above."
34 );
35
36 let agent = openai
37 .agent("gpt-4o")
38 .preamble(&preamble)
39 .tools(toolset.tools_with_context(&HashMap::new()))
40 .build();
41
42 // The LLM picks up username=user1 from the preamble
43 // and calls getUserByName with username "user1".
44 println!(">>> Look up my user profile and summarize it.");
45 let response: String = agent
46 .prompt("Look up my user profile and summarize it.")
47 .await?;
48 println!("{response}\n");
49
50 // The LLM picks up preferred_status=available from the preamble
51 // and calls findPetsByStatus with status "available".
52 println!(">>> Find pets matching my preferred status.");
53 let response: String = agent
54 .prompt("Find pets matching my preferred status.")
55 .await?;
56 println!("{response}\n");
57
58 // ---------------------------------------------------------------
59 // 2. Hidden context — auto-injected, LLM never sees the values
60 // ---------------------------------------------------------------
61 println!("=== Hidden context ===\n");
62
63 // Hidden context is useful for secrets, user IDs, or any parameter
64 // the LLM should NOT decide — it's injected automatically at execution
65 // time and removed from the tool schema so the LLM can't see or override it.
66
67 // Static hidden context set at build time (e.g. API key for the upstream service)
68 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
69 .base_url("https://petstore3.swagger.io/api/v3")
70 .hidden_context("api_key", "special-key")
71 .build()?;
72
73 // Per-request hidden context (e.g. current user from session).
74 // The LLM won't see `username` in the tool schema — it's filled in
75 // automatically, so it can't hallucinate a different user.
76 let per_request_ctx = HashMap::from([("username".to_string(), "user1".to_string())]);
77
78 let agent = openai
79 .agent("gpt-4o")
80 .preamble(
81 "You have access to the Swagger Petstore API. \
82 Use the available tools to answer questions about the pet store.",
83 )
84 .tools(toolset.tools_with_context(&per_request_ctx))
85 .build();
86
87 // The LLM calls getUserByName without providing `username` —
88 // it's not in the schema. The library injects username=user1 automatically.
89 println!(">>> Get my profile.");
90 let response: String = agent.prompt("Get my profile.").await?;
91 println!("{response}\n");
92
93 Ok(())
94}Sourcepub fn client(self, client: Client) -> Self
pub fn client(self, client: Client) -> Self
Provide a pre-configured reqwest client (e.g. with default auth headers or timeouts).
Add a hidden context parameter that will be auto-injected into tool calls. The LLM will not see this parameter in the tool schema.
Examples found in repository?
examples/context.rs (line 70)
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // ---------------------------------------------------------------
12 // 1. Visible context — LLM sees the values and uses them in calls
13 // ---------------------------------------------------------------
14 println!("=== Visible context ===\n");
15
16 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
17 .base_url("https://petstore3.swagger.io/api/v3")
18 .build()?;
19
20 println!("Loaded {} tools from Petstore spec\n", toolset.len());
21
22 // The LLM sees these values in its preamble and uses them
23 // when calling tools. For example, it will pass `username` to getUserByName.
24 let visible_ctx = HashMap::from([
25 ("username".to_string(), "user1".to_string()),
26 ("preferred_status".to_string(), "available".to_string()),
27 ]);
28 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
29
30 let preamble = format!(
31 "You have access to the Swagger Petstore API.\n\n\
32 {context_preamble}\n\n\
33 When I refer to \"my\" profile or data, use the username from the context above."
34 );
35
36 let agent = openai
37 .agent("gpt-4o")
38 .preamble(&preamble)
39 .tools(toolset.tools_with_context(&HashMap::new()))
40 .build();
41
42 // The LLM picks up username=user1 from the preamble
43 // and calls getUserByName with username "user1".
44 println!(">>> Look up my user profile and summarize it.");
45 let response: String = agent
46 .prompt("Look up my user profile and summarize it.")
47 .await?;
48 println!("{response}\n");
49
50 // The LLM picks up preferred_status=available from the preamble
51 // and calls findPetsByStatus with status "available".
52 println!(">>> Find pets matching my preferred status.");
53 let response: String = agent
54 .prompt("Find pets matching my preferred status.")
55 .await?;
56 println!("{response}\n");
57
58 // ---------------------------------------------------------------
59 // 2. Hidden context — auto-injected, LLM never sees the values
60 // ---------------------------------------------------------------
61 println!("=== Hidden context ===\n");
62
63 // Hidden context is useful for secrets, user IDs, or any parameter
64 // the LLM should NOT decide — it's injected automatically at execution
65 // time and removed from the tool schema so the LLM can't see or override it.
66
67 // Static hidden context set at build time (e.g. API key for the upstream service)
68 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
69 .base_url("https://petstore3.swagger.io/api/v3")
70 .hidden_context("api_key", "special-key")
71 .build()?;
72
73 // Per-request hidden context (e.g. current user from session).
74 // The LLM won't see `username` in the tool schema — it's filled in
75 // automatically, so it can't hallucinate a different user.
76 let per_request_ctx = HashMap::from([("username".to_string(), "user1".to_string())]);
77
78 let agent = openai
79 .agent("gpt-4o")
80 .preamble(
81 "You have access to the Swagger Petstore API. \
82 Use the available tools to answer questions about the pet store.",
83 )
84 .tools(toolset.tools_with_context(&per_request_ctx))
85 .build();
86
87 // The LLM calls getUserByName without providing `username` —
88 // it's not in the schema. The library injects username=user1 automatically.
89 println!(">>> Get my profile.");
90 let response: String = agent.prompt("Get my profile.").await?;
91 println!("{response}\n");
92
93 Ok(())
94}Sourcepub fn bearer_token(self, token: &str) -> Self
pub fn bearer_token(self, token: &str) -> Self
Convenience: create a client with a bearer token Authorization header.
Sourcepub fn build(self) -> Result<OpenApiToolset>
pub fn build(self) -> Result<OpenApiToolset>
Build the toolset, parsing the spec and creating tools.
Examples found in repository?
examples/petstore.rs (line 14)
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // Parse once at startup — reuse across requests
12 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
13 .base_url("https://petstore3.swagger.io/api/v3")
14 .build()?;
15
16 println!("Loaded {} tools from Petstore spec\n", toolset.len());
17
18 // Simulate a per-request context (e.g. from a logged-in user session)
19 let visible_ctx = HashMap::from([
20 ("username".to_string(), "user1".to_string()),
21 ("preferred_status".to_string(), "available".to_string()),
22 ]);
23 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
24
25 let preamble = format!(
26 "You have access to the Swagger Petstore API. \
27 Use the available tools to answer questions about the pet store.\n\n\
28 {context_preamble}"
29 );
30
31 // Create agent with per-request tools (cheap clone)
32 let agent = openai
33 .agent("gpt-4o")
34 .preamble(&preamble)
35 .tools(toolset.tools_with_context(&HashMap::new()))
36 .build();
37
38 let prompts = [
39 "What pets are currently available in the store? Show me the first 3.",
40 "How many dogs are in the store?",
41 "Look up my user profile and summarize it.",
42 ];
43
44 for prompt in prompts {
45 println!(">>> {prompt}");
46 let response: String = agent.prompt(prompt).await?;
47 println!("{response}\n");
48 }
49
50 Ok(())
51}More examples
examples/context.rs (line 18)
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // ---------------------------------------------------------------
12 // 1. Visible context — LLM sees the values and uses them in calls
13 // ---------------------------------------------------------------
14 println!("=== Visible context ===\n");
15
16 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
17 .base_url("https://petstore3.swagger.io/api/v3")
18 .build()?;
19
20 println!("Loaded {} tools from Petstore spec\n", toolset.len());
21
22 // The LLM sees these values in its preamble and uses them
23 // when calling tools. For example, it will pass `username` to getUserByName.
24 let visible_ctx = HashMap::from([
25 ("username".to_string(), "user1".to_string()),
26 ("preferred_status".to_string(), "available".to_string()),
27 ]);
28 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
29
30 let preamble = format!(
31 "You have access to the Swagger Petstore API.\n\n\
32 {context_preamble}\n\n\
33 When I refer to \"my\" profile or data, use the username from the context above."
34 );
35
36 let agent = openai
37 .agent("gpt-4o")
38 .preamble(&preamble)
39 .tools(toolset.tools_with_context(&HashMap::new()))
40 .build();
41
42 // The LLM picks up username=user1 from the preamble
43 // and calls getUserByName with username "user1".
44 println!(">>> Look up my user profile and summarize it.");
45 let response: String = agent
46 .prompt("Look up my user profile and summarize it.")
47 .await?;
48 println!("{response}\n");
49
50 // The LLM picks up preferred_status=available from the preamble
51 // and calls findPetsByStatus with status "available".
52 println!(">>> Find pets matching my preferred status.");
53 let response: String = agent
54 .prompt("Find pets matching my preferred status.")
55 .await?;
56 println!("{response}\n");
57
58 // ---------------------------------------------------------------
59 // 2. Hidden context — auto-injected, LLM never sees the values
60 // ---------------------------------------------------------------
61 println!("=== Hidden context ===\n");
62
63 // Hidden context is useful for secrets, user IDs, or any parameter
64 // the LLM should NOT decide — it's injected automatically at execution
65 // time and removed from the tool schema so the LLM can't see or override it.
66
67 // Static hidden context set at build time (e.g. API key for the upstream service)
68 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
69 .base_url("https://petstore3.swagger.io/api/v3")
70 .hidden_context("api_key", "special-key")
71 .build()?;
72
73 // Per-request hidden context (e.g. current user from session).
74 // The LLM won't see `username` in the tool schema — it's filled in
75 // automatically, so it can't hallucinate a different user.
76 let per_request_ctx = HashMap::from([("username".to_string(), "user1".to_string())]);
77
78 let agent = openai
79 .agent("gpt-4o")
80 .preamble(
81 "You have access to the Swagger Petstore API. \
82 Use the available tools to answer questions about the pet store.",
83 )
84 .tools(toolset.tools_with_context(&per_request_ctx))
85 .build();
86
87 // The LLM calls getUserByName without providing `username` —
88 // it's not in the schema. The library injects username=user1 automatically.
89 println!(">>> Get my profile.");
90 let response: String = agent.prompt("Get my profile.").await?;
91 println!("{response}\n");
92
93 Ok(())
94}Auto Trait Implementations§
impl Freeze for OpenApiToolsetBuilder
impl !RefUnwindSafe for OpenApiToolsetBuilder
impl Send for OpenApiToolsetBuilder
impl Sync for OpenApiToolsetBuilder
impl Unpin for OpenApiToolsetBuilder
impl UnsafeUnpin for OpenApiToolsetBuilder
impl !UnwindSafe for OpenApiToolsetBuilder
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more