pub struct OpenApiToolset { /* private fields */ }Expand description
A set of tools generated from an OpenAPI specification.
Each operation in the spec becomes a tool that can be registered with a rig agent. The toolset is designed to be parsed once and reused across requests.
Implementations§
Source§impl OpenApiToolset
impl OpenApiToolset
Sourcepub fn from_file(path: impl AsRef<Path>) -> Result<Self>
pub fn from_file(path: impl AsRef<Path>) -> Result<Self>
Parse an OpenAPI spec from a YAML or JSON file.
Examples found in repository?
6async fn main() -> anyhow::Result<()> {
7 let openai = rig::providers::openai::Client::from_env();
8
9 let toolset = OpenApiToolset::from_file("examples/openapi.yaml")?;
10 println!("Loaded {} tools from OpenAPI spec", toolset.len());
11
12 let agent = openai
13 .agent("gpt-4o")
14 .preamble("You have access to API tools. Use them when asked.")
15 .tools(toolset.into_tools())
16 .build();
17
18 let response: String = agent
19 .prompt("Use the API tool to get user 1 and summarize the result.")
20 .await?;
21
22 println!("{response}");
23
24 Ok(())
25}Sourcepub fn from_spec_str(spec_str: &str) -> Result<Self>
pub fn from_spec_str(spec_str: &str) -> Result<Self>
Parse an OpenAPI spec from a YAML or JSON string.
Sourcepub fn builder(spec_str: &str) -> OpenApiToolsetBuilder
pub fn builder(spec_str: &str) -> OpenApiToolsetBuilder
Start building a toolset from a YAML or JSON string with configuration options.
Sourcepub fn builder_from_file(
path: impl AsRef<Path>,
) -> Result<OpenApiToolsetBuilder>
pub fn builder_from_file( path: impl AsRef<Path>, ) -> Result<OpenApiToolsetBuilder>
Start building a toolset from a file with configuration options.
Examples found in repository?
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // Parse once at startup — reuse across requests
12 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
13 .base_url("https://petstore3.swagger.io/api/v3")
14 .build()?;
15
16 println!("Loaded {} tools from Petstore spec\n", toolset.len());
17
18 // Simulate a per-request context (e.g. from a logged-in user session)
19 let visible_ctx = HashMap::from([
20 ("username".to_string(), "user1".to_string()),
21 ("preferred_status".to_string(), "available".to_string()),
22 ]);
23 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
24
25 let preamble = format!(
26 "You have access to the Swagger Petstore API. \
27 Use the available tools to answer questions about the pet store.\n\n\
28 {context_preamble}"
29 );
30
31 // Create agent with per-request tools (cheap clone)
32 let agent = openai
33 .agent("gpt-4o")
34 .preamble(&preamble)
35 .tools(toolset.tools_with_context(&HashMap::new()))
36 .build();
37
38 let prompts = [
39 "What pets are currently available in the store? Show me the first 3.",
40 "How many dogs are in the store?",
41 "Look up my user profile and summarize it.",
42 ];
43
44 for prompt in prompts {
45 println!(">>> {prompt}");
46 let response: String = agent.prompt(prompt).await?;
47 println!("{response}\n");
48 }
49
50 Ok(())
51}More examples
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // ---------------------------------------------------------------
12 // 1. Visible context — LLM sees the values and uses them in calls
13 // ---------------------------------------------------------------
14 println!("=== Visible context ===\n");
15
16 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
17 .base_url("https://petstore3.swagger.io/api/v3")
18 .build()?;
19
20 println!("Loaded {} tools from Petstore spec\n", toolset.len());
21
22 // The LLM sees these values in its preamble and uses them
23 // when calling tools. For example, it will pass `username` to getUserByName.
24 let visible_ctx = HashMap::from([
25 ("username".to_string(), "user1".to_string()),
26 ("preferred_status".to_string(), "available".to_string()),
27 ]);
28 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
29
30 let preamble = format!(
31 "You have access to the Swagger Petstore API.\n\n\
32 {context_preamble}\n\n\
33 When I refer to \"my\" profile or data, use the username from the context above."
34 );
35
36 let agent = openai
37 .agent("gpt-4o")
38 .preamble(&preamble)
39 .tools(toolset.tools_with_context(&HashMap::new()))
40 .build();
41
42 // The LLM picks up username=user1 from the preamble
43 // and calls getUserByName with username "user1".
44 println!(">>> Look up my user profile and summarize it.");
45 let response: String = agent
46 .prompt("Look up my user profile and summarize it.")
47 .await?;
48 println!("{response}\n");
49
50 // The LLM picks up preferred_status=available from the preamble
51 // and calls findPetsByStatus with status "available".
52 println!(">>> Find pets matching my preferred status.");
53 let response: String = agent
54 .prompt("Find pets matching my preferred status.")
55 .await?;
56 println!("{response}\n");
57
58 // ---------------------------------------------------------------
59 // 2. Hidden context — auto-injected, LLM never sees the values
60 // ---------------------------------------------------------------
61 println!("=== Hidden context ===\n");
62
63 // Hidden context is useful for secrets, user IDs, or any parameter
64 // the LLM should NOT decide — it's injected automatically at execution
65 // time and removed from the tool schema so the LLM can't see or override it.
66
67 // Static hidden context set at build time (e.g. API key for the upstream service)
68 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
69 .base_url("https://petstore3.swagger.io/api/v3")
70 .hidden_context("api_key", "special-key")
71 .build()?;
72
73 // Per-request hidden context (e.g. current user from session).
74 // The LLM won't see `username` in the tool schema — it's filled in
75 // automatically, so it can't hallucinate a different user.
76 let per_request_ctx = HashMap::from([("username".to_string(), "user1".to_string())]);
77
78 let agent = openai
79 .agent("gpt-4o")
80 .preamble(
81 "You have access to the Swagger Petstore API. \
82 Use the available tools to answer questions about the pet store.",
83 )
84 .tools(toolset.tools_with_context(&per_request_ctx))
85 .build();
86
87 // The LLM calls getUserByName without providing `username` —
88 // it's not in the schema. The library injects username=user1 automatically.
89 println!(">>> Get my profile.");
90 let response: String = agent.prompt("Get my profile.").await?;
91 println!("{response}\n");
92
93 Ok(())
94}Sourcepub fn len(&self) -> usize
pub fn len(&self) -> usize
Return the number of tools parsed from the spec.
Examples found in repository?
6async fn main() -> anyhow::Result<()> {
7 let openai = rig::providers::openai::Client::from_env();
8
9 let toolset = OpenApiToolset::from_file("examples/openapi.yaml")?;
10 println!("Loaded {} tools from OpenAPI spec", toolset.len());
11
12 let agent = openai
13 .agent("gpt-4o")
14 .preamble("You have access to API tools. Use them when asked.")
15 .tools(toolset.into_tools())
16 .build();
17
18 let response: String = agent
19 .prompt("Use the API tool to get user 1 and summarize the result.")
20 .await?;
21
22 println!("{response}");
23
24 Ok(())
25}More examples
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // Parse once at startup — reuse across requests
12 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
13 .base_url("https://petstore3.swagger.io/api/v3")
14 .build()?;
15
16 println!("Loaded {} tools from Petstore spec\n", toolset.len());
17
18 // Simulate a per-request context (e.g. from a logged-in user session)
19 let visible_ctx = HashMap::from([
20 ("username".to_string(), "user1".to_string()),
21 ("preferred_status".to_string(), "available".to_string()),
22 ]);
23 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
24
25 let preamble = format!(
26 "You have access to the Swagger Petstore API. \
27 Use the available tools to answer questions about the pet store.\n\n\
28 {context_preamble}"
29 );
30
31 // Create agent with per-request tools (cheap clone)
32 let agent = openai
33 .agent("gpt-4o")
34 .preamble(&preamble)
35 .tools(toolset.tools_with_context(&HashMap::new()))
36 .build();
37
38 let prompts = [
39 "What pets are currently available in the store? Show me the first 3.",
40 "How many dogs are in the store?",
41 "Look up my user profile and summarize it.",
42 ];
43
44 for prompt in prompts {
45 println!(">>> {prompt}");
46 let response: String = agent.prompt(prompt).await?;
47 println!("{response}\n");
48 }
49
50 Ok(())
51}8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // ---------------------------------------------------------------
12 // 1. Visible context — LLM sees the values and uses them in calls
13 // ---------------------------------------------------------------
14 println!("=== Visible context ===\n");
15
16 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
17 .base_url("https://petstore3.swagger.io/api/v3")
18 .build()?;
19
20 println!("Loaded {} tools from Petstore spec\n", toolset.len());
21
22 // The LLM sees these values in its preamble and uses them
23 // when calling tools. For example, it will pass `username` to getUserByName.
24 let visible_ctx = HashMap::from([
25 ("username".to_string(), "user1".to_string()),
26 ("preferred_status".to_string(), "available".to_string()),
27 ]);
28 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
29
30 let preamble = format!(
31 "You have access to the Swagger Petstore API.\n\n\
32 {context_preamble}\n\n\
33 When I refer to \"my\" profile or data, use the username from the context above."
34 );
35
36 let agent = openai
37 .agent("gpt-4o")
38 .preamble(&preamble)
39 .tools(toolset.tools_with_context(&HashMap::new()))
40 .build();
41
42 // The LLM picks up username=user1 from the preamble
43 // and calls getUserByName with username "user1".
44 println!(">>> Look up my user profile and summarize it.");
45 let response: String = agent
46 .prompt("Look up my user profile and summarize it.")
47 .await?;
48 println!("{response}\n");
49
50 // The LLM picks up preferred_status=available from the preamble
51 // and calls findPetsByStatus with status "available".
52 println!(">>> Find pets matching my preferred status.");
53 let response: String = agent
54 .prompt("Find pets matching my preferred status.")
55 .await?;
56 println!("{response}\n");
57
58 // ---------------------------------------------------------------
59 // 2. Hidden context — auto-injected, LLM never sees the values
60 // ---------------------------------------------------------------
61 println!("=== Hidden context ===\n");
62
63 // Hidden context is useful for secrets, user IDs, or any parameter
64 // the LLM should NOT decide — it's injected automatically at execution
65 // time and removed from the tool schema so the LLM can't see or override it.
66
67 // Static hidden context set at build time (e.g. API key for the upstream service)
68 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
69 .base_url("https://petstore3.swagger.io/api/v3")
70 .hidden_context("api_key", "special-key")
71 .build()?;
72
73 // Per-request hidden context (e.g. current user from session).
74 // The LLM won't see `username` in the tool schema — it's filled in
75 // automatically, so it can't hallucinate a different user.
76 let per_request_ctx = HashMap::from([("username".to_string(), "user1".to_string())]);
77
78 let agent = openai
79 .agent("gpt-4o")
80 .preamble(
81 "You have access to the Swagger Petstore API. \
82 Use the available tools to answer questions about the pet store.",
83 )
84 .tools(toolset.tools_with_context(&per_request_ctx))
85 .build();
86
87 // The LLM calls getUserByName without providing `username` —
88 // it's not in the schema. The library injects username=user1 automatically.
89 println!(">>> Get my profile.");
90 let response: String = agent.prompt("Get my profile.").await?;
91 println!("{response}\n");
92
93 Ok(())
94}Sourcepub fn into_tools(self) -> Vec<Box<dyn ToolDyn>>
pub fn into_tools(self) -> Vec<Box<dyn ToolDyn>>
Consume the toolset and return tools for use with rig’s AgentBuilder::tools().
Examples found in repository?
6async fn main() -> anyhow::Result<()> {
7 let openai = rig::providers::openai::Client::from_env();
8
9 let toolset = OpenApiToolset::from_file("examples/openapi.yaml")?;
10 println!("Loaded {} tools from OpenAPI spec", toolset.len());
11
12 let agent = openai
13 .agent("gpt-4o")
14 .preamble("You have access to API tools. Use them when asked.")
15 .tools(toolset.into_tools())
16 .build();
17
18 let response: String = agent
19 .prompt("Use the API tool to get user 1 and summarize the result.")
20 .await?;
21
22 println!("{response}");
23
24 Ok(())
25}Sourcepub fn tools_with_context(
&self,
context: &HashMap<String, String>,
) -> Vec<Box<dyn ToolDyn>>
pub fn tools_with_context( &self, context: &HashMap<String, String>, ) -> Vec<Box<dyn ToolDyn>>
Clone the tools with per-request context injected as hidden parameters. The LLM will not see these parameters in tool schemas, but they will be auto-injected into every tool call at execution time.
This is the primary way to add per-request state (user ID, session info, etc.) while reusing the parsed toolset across requests.
Examples found in repository?
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // Parse once at startup — reuse across requests
12 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
13 .base_url("https://petstore3.swagger.io/api/v3")
14 .build()?;
15
16 println!("Loaded {} tools from Petstore spec\n", toolset.len());
17
18 // Simulate a per-request context (e.g. from a logged-in user session)
19 let visible_ctx = HashMap::from([
20 ("username".to_string(), "user1".to_string()),
21 ("preferred_status".to_string(), "available".to_string()),
22 ]);
23 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
24
25 let preamble = format!(
26 "You have access to the Swagger Petstore API. \
27 Use the available tools to answer questions about the pet store.\n\n\
28 {context_preamble}"
29 );
30
31 // Create agent with per-request tools (cheap clone)
32 let agent = openai
33 .agent("gpt-4o")
34 .preamble(&preamble)
35 .tools(toolset.tools_with_context(&HashMap::new()))
36 .build();
37
38 let prompts = [
39 "What pets are currently available in the store? Show me the first 3.",
40 "How many dogs are in the store?",
41 "Look up my user profile and summarize it.",
42 ];
43
44 for prompt in prompts {
45 println!(">>> {prompt}");
46 let response: String = agent.prompt(prompt).await?;
47 println!("{response}\n");
48 }
49
50 Ok(())
51}More examples
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // ---------------------------------------------------------------
12 // 1. Visible context — LLM sees the values and uses them in calls
13 // ---------------------------------------------------------------
14 println!("=== Visible context ===\n");
15
16 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
17 .base_url("https://petstore3.swagger.io/api/v3")
18 .build()?;
19
20 println!("Loaded {} tools from Petstore spec\n", toolset.len());
21
22 // The LLM sees these values in its preamble and uses them
23 // when calling tools. For example, it will pass `username` to getUserByName.
24 let visible_ctx = HashMap::from([
25 ("username".to_string(), "user1".to_string()),
26 ("preferred_status".to_string(), "available".to_string()),
27 ]);
28 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
29
30 let preamble = format!(
31 "You have access to the Swagger Petstore API.\n\n\
32 {context_preamble}\n\n\
33 When I refer to \"my\" profile or data, use the username from the context above."
34 );
35
36 let agent = openai
37 .agent("gpt-4o")
38 .preamble(&preamble)
39 .tools(toolset.tools_with_context(&HashMap::new()))
40 .build();
41
42 // The LLM picks up username=user1 from the preamble
43 // and calls getUserByName with username "user1".
44 println!(">>> Look up my user profile and summarize it.");
45 let response: String = agent
46 .prompt("Look up my user profile and summarize it.")
47 .await?;
48 println!("{response}\n");
49
50 // The LLM picks up preferred_status=available from the preamble
51 // and calls findPetsByStatus with status "available".
52 println!(">>> Find pets matching my preferred status.");
53 let response: String = agent
54 .prompt("Find pets matching my preferred status.")
55 .await?;
56 println!("{response}\n");
57
58 // ---------------------------------------------------------------
59 // 2. Hidden context — auto-injected, LLM never sees the values
60 // ---------------------------------------------------------------
61 println!("=== Hidden context ===\n");
62
63 // Hidden context is useful for secrets, user IDs, or any parameter
64 // the LLM should NOT decide — it's injected automatically at execution
65 // time and removed from the tool schema so the LLM can't see or override it.
66
67 // Static hidden context set at build time (e.g. API key for the upstream service)
68 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
69 .base_url("https://petstore3.swagger.io/api/v3")
70 .hidden_context("api_key", "special-key")
71 .build()?;
72
73 // Per-request hidden context (e.g. current user from session).
74 // The LLM won't see `username` in the tool schema — it's filled in
75 // automatically, so it can't hallucinate a different user.
76 let per_request_ctx = HashMap::from([("username".to_string(), "user1".to_string())]);
77
78 let agent = openai
79 .agent("gpt-4o")
80 .preamble(
81 "You have access to the Swagger Petstore API. \
82 Use the available tools to answer questions about the pet store.",
83 )
84 .tools(toolset.tools_with_context(&per_request_ctx))
85 .build();
86
87 // The LLM calls getUserByName without providing `username` —
88 // it's not in the schema. The library injects username=user1 automatically.
89 println!(">>> Get my profile.");
90 let response: String = agent.prompt("Get my profile.").await?;
91 println!("{response}\n");
92
93 Ok(())
94}Sourcepub fn context_preamble(context: &HashMap<String, String>) -> String
pub fn context_preamble(context: &HashMap<String, String>) -> String
Generate a preamble snippet describing the visible context for the LLM.
Include this in your agent’s .preamble() so the LLM knows about
available context values it can use when calling tools.
Examples found in repository?
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // Parse once at startup — reuse across requests
12 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
13 .base_url("https://petstore3.swagger.io/api/v3")
14 .build()?;
15
16 println!("Loaded {} tools from Petstore spec\n", toolset.len());
17
18 // Simulate a per-request context (e.g. from a logged-in user session)
19 let visible_ctx = HashMap::from([
20 ("username".to_string(), "user1".to_string()),
21 ("preferred_status".to_string(), "available".to_string()),
22 ]);
23 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
24
25 let preamble = format!(
26 "You have access to the Swagger Petstore API. \
27 Use the available tools to answer questions about the pet store.\n\n\
28 {context_preamble}"
29 );
30
31 // Create agent with per-request tools (cheap clone)
32 let agent = openai
33 .agent("gpt-4o")
34 .preamble(&preamble)
35 .tools(toolset.tools_with_context(&HashMap::new()))
36 .build();
37
38 let prompts = [
39 "What pets are currently available in the store? Show me the first 3.",
40 "How many dogs are in the store?",
41 "Look up my user profile and summarize it.",
42 ];
43
44 for prompt in prompts {
45 println!(">>> {prompt}");
46 let response: String = agent.prompt(prompt).await?;
47 println!("{response}\n");
48 }
49
50 Ok(())
51}More examples
8async fn main() -> anyhow::Result<()> {
9 let openai = rig::providers::openai::Client::from_env();
10
11 // ---------------------------------------------------------------
12 // 1. Visible context — LLM sees the values and uses them in calls
13 // ---------------------------------------------------------------
14 println!("=== Visible context ===\n");
15
16 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
17 .base_url("https://petstore3.swagger.io/api/v3")
18 .build()?;
19
20 println!("Loaded {} tools from Petstore spec\n", toolset.len());
21
22 // The LLM sees these values in its preamble and uses them
23 // when calling tools. For example, it will pass `username` to getUserByName.
24 let visible_ctx = HashMap::from([
25 ("username".to_string(), "user1".to_string()),
26 ("preferred_status".to_string(), "available".to_string()),
27 ]);
28 let context_preamble = OpenApiToolset::context_preamble(&visible_ctx);
29
30 let preamble = format!(
31 "You have access to the Swagger Petstore API.\n\n\
32 {context_preamble}\n\n\
33 When I refer to \"my\" profile or data, use the username from the context above."
34 );
35
36 let agent = openai
37 .agent("gpt-4o")
38 .preamble(&preamble)
39 .tools(toolset.tools_with_context(&HashMap::new()))
40 .build();
41
42 // The LLM picks up username=user1 from the preamble
43 // and calls getUserByName with username "user1".
44 println!(">>> Look up my user profile and summarize it.");
45 let response: String = agent
46 .prompt("Look up my user profile and summarize it.")
47 .await?;
48 println!("{response}\n");
49
50 // The LLM picks up preferred_status=available from the preamble
51 // and calls findPetsByStatus with status "available".
52 println!(">>> Find pets matching my preferred status.");
53 let response: String = agent
54 .prompt("Find pets matching my preferred status.")
55 .await?;
56 println!("{response}\n");
57
58 // ---------------------------------------------------------------
59 // 2. Hidden context — auto-injected, LLM never sees the values
60 // ---------------------------------------------------------------
61 println!("=== Hidden context ===\n");
62
63 // Hidden context is useful for secrets, user IDs, or any parameter
64 // the LLM should NOT decide — it's injected automatically at execution
65 // time and removed from the tool schema so the LLM can't see or override it.
66
67 // Static hidden context set at build time (e.g. API key for the upstream service)
68 let toolset = OpenApiToolset::builder_from_file("examples/petstore.json")?
69 .base_url("https://petstore3.swagger.io/api/v3")
70 .hidden_context("api_key", "special-key")
71 .build()?;
72
73 // Per-request hidden context (e.g. current user from session).
74 // The LLM won't see `username` in the tool schema — it's filled in
75 // automatically, so it can't hallucinate a different user.
76 let per_request_ctx = HashMap::from([("username".to_string(), "user1".to_string())]);
77
78 let agent = openai
79 .agent("gpt-4o")
80 .preamble(
81 "You have access to the Swagger Petstore API. \
82 Use the available tools to answer questions about the pet store.",
83 )
84 .tools(toolset.tools_with_context(&per_request_ctx))
85 .build();
86
87 // The LLM calls getUserByName without providing `username` —
88 // it's not in the schema. The library injects username=user1 automatically.
89 println!(">>> Get my profile.");
90 let response: String = agent.prompt("Get my profile.").await?;
91 println!("{response}\n");
92
93 Ok(())
94}