pub struct OpenAIProvider { /* private fields */ }Expand description
OpenAI-compatible LLM provider
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new()
.api_key("sk-...")
.model("gpt-4o")
.temperature(0.7);Implementations§
Source§impl OpenAIProvider
impl OpenAIProvider
Sourcepub fn new() -> Self
pub fn new() -> Self
Create a new OpenAI provider with default settings
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new();Examples found in repository?
examples/chatbot.rs (line 10)
7async fn main() {
8 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10 let llm = OpenAIProvider::new()
11 .api_key(api_key)
12 .base_url("https://openrouter.ai/api/v1")
13 .model("google/gemini-3-flash-preview");
14
15 let agent = Agent::new(llm).system("You are a helpful assistant");
16
17 run_cli_loop(agent).await
18}More examples
examples/history.rs (line 37)
34async fn main() {
35 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37 let llm = OpenAIProvider::new()
38 .api_key(api_key)
39 .base_url("https://openrouter.ai/api/v1")
40 .model("google/gemini-3-flash-preview");
41
42 let agent = Agent::new(llm)
43 .system("You are a helpful assistant")
44 .history(CustomHistory::new(3));
45
46 run_cli_loop(agent).await
47}examples/fn_tools.rs (line 30)
27async fn main() {
28 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30 let llm = OpenAIProvider::new()
31 .api_key(api_key)
32 .base_url("https://openrouter.ai/api/v1")
33 .model("google/gemini-3-flash-preview");
34
35 let agent = Agent::new(llm)
36 .system("You are a helpful assistant with access to tools")
37 .tool(get_weather)
38 .tool(add);
39
40 run_cli_loop(agent).await
41}examples/bind_tools.rs (line 66)
63async fn main() {
64 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66 let llm = OpenAIProvider::new()
67 .api_key(api_key)
68 .base_url("https://openrouter.ai/api/v1")
69 .model("google/gemini-3-flash-preview");
70
71 let mut data = HashMap::new();
72 data.insert("name".to_string(), "Alice".to_string());
73 data.insert("age".to_string(), "30".to_string());
74
75 let r = ReadonlyTool {
76 data: Arc::new(data),
77 };
78
79 let w = WritableTool {
80 data: Arc::new(Mutex::new(HashMap::new())),
81 };
82
83 let agent = Agent::new(llm)
84 .system("You are a helpful assistant with access to tools")
85 .bind(r.clone(), ReadonlyTool::fetch)
86 .bind(w.clone(), WritableTool::read)
87 .bind(w, WritableTool::write);
88
89 run_cli_loop(agent).await
90}examples/mcp.rs (line 68)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18 // Connect to a server running as a child process
19 let service = ()
20 .serve(TokioChildProcess::new(Command::new("npx").configure(
21 |cmd| {
22 cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23 },
24 ))?)
25 .await?;
26 println!("Connected to MCP server");
27
28 // List available tools and convert to tool definitions
29 let tools = service.list_tools(Default::default()).await?.tools;
30 let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31 println!("Available tools: {names:#?}");
32 let mcp_tool_defs = tools
33 .iter()
34 .map(|t| ToolDefinition {
35 tool_type: "function".into(),
36 function: ToolFunction {
37 name: t.name.to_string(),
38 description: t.description.as_deref().unwrap_or_default().to_string(),
39 parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40 },
41 })
42 .collect();
43
44 let mcp_tool_executor = {
45 let peer = service.clone();
46 move |name: String, args: String| {
47 let peer = peer.clone();
48 async move {
49 peer.call_tool(CallToolRequestParams {
50 meta: None,
51 name: name.into(),
52 arguments: serde_json::from_str(&args).unwrap(),
53 task: None,
54 })
55 .await
56 .unwrap()
57 .content[0]
58 .as_text()
59 .unwrap()
60 .text
61 .clone()
62 }
63 }
64 };
65
66 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68 let llm = OpenAIProvider::new()
69 .api_key(api_key)
70 .base_url("https://openrouter.ai/api/v1")
71 .model("google/gemini-3-flash-preview");
72
73 let agent = Agent::new(llm)
74 .system("You are a helpful assistant")
75 .external(mcp_tool_defs, mcp_tool_executor);
76
77 run_cli_loop(agent).await;
78
79 // Gracefully close the connection
80 service.cancel().await?;
81 Ok(())
82}Sourcepub fn base_url(self, value: impl Into<String>) -> Self
pub fn base_url(self, value: impl Into<String>) -> Self
Set the base URL for the API endpoint (default: https://api.openai.com/v1)
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new()
.base_url("https://api.custom.com/v1");Examples found in repository?
examples/chatbot.rs (line 12)
7async fn main() {
8 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10 let llm = OpenAIProvider::new()
11 .api_key(api_key)
12 .base_url("https://openrouter.ai/api/v1")
13 .model("google/gemini-3-flash-preview");
14
15 let agent = Agent::new(llm).system("You are a helpful assistant");
16
17 run_cli_loop(agent).await
18}More examples
examples/history.rs (line 39)
34async fn main() {
35 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37 let llm = OpenAIProvider::new()
38 .api_key(api_key)
39 .base_url("https://openrouter.ai/api/v1")
40 .model("google/gemini-3-flash-preview");
41
42 let agent = Agent::new(llm)
43 .system("You are a helpful assistant")
44 .history(CustomHistory::new(3));
45
46 run_cli_loop(agent).await
47}examples/fn_tools.rs (line 32)
27async fn main() {
28 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30 let llm = OpenAIProvider::new()
31 .api_key(api_key)
32 .base_url("https://openrouter.ai/api/v1")
33 .model("google/gemini-3-flash-preview");
34
35 let agent = Agent::new(llm)
36 .system("You are a helpful assistant with access to tools")
37 .tool(get_weather)
38 .tool(add);
39
40 run_cli_loop(agent).await
41}examples/bind_tools.rs (line 68)
63async fn main() {
64 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66 let llm = OpenAIProvider::new()
67 .api_key(api_key)
68 .base_url("https://openrouter.ai/api/v1")
69 .model("google/gemini-3-flash-preview");
70
71 let mut data = HashMap::new();
72 data.insert("name".to_string(), "Alice".to_string());
73 data.insert("age".to_string(), "30".to_string());
74
75 let r = ReadonlyTool {
76 data: Arc::new(data),
77 };
78
79 let w = WritableTool {
80 data: Arc::new(Mutex::new(HashMap::new())),
81 };
82
83 let agent = Agent::new(llm)
84 .system("You are a helpful assistant with access to tools")
85 .bind(r.clone(), ReadonlyTool::fetch)
86 .bind(w.clone(), WritableTool::read)
87 .bind(w, WritableTool::write);
88
89 run_cli_loop(agent).await
90}examples/mcp.rs (line 70)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18 // Connect to a server running as a child process
19 let service = ()
20 .serve(TokioChildProcess::new(Command::new("npx").configure(
21 |cmd| {
22 cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23 },
24 ))?)
25 .await?;
26 println!("Connected to MCP server");
27
28 // List available tools and convert to tool definitions
29 let tools = service.list_tools(Default::default()).await?.tools;
30 let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31 println!("Available tools: {names:#?}");
32 let mcp_tool_defs = tools
33 .iter()
34 .map(|t| ToolDefinition {
35 tool_type: "function".into(),
36 function: ToolFunction {
37 name: t.name.to_string(),
38 description: t.description.as_deref().unwrap_or_default().to_string(),
39 parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40 },
41 })
42 .collect();
43
44 let mcp_tool_executor = {
45 let peer = service.clone();
46 move |name: String, args: String| {
47 let peer = peer.clone();
48 async move {
49 peer.call_tool(CallToolRequestParams {
50 meta: None,
51 name: name.into(),
52 arguments: serde_json::from_str(&args).unwrap(),
53 task: None,
54 })
55 .await
56 .unwrap()
57 .content[0]
58 .as_text()
59 .unwrap()
60 .text
61 .clone()
62 }
63 }
64 };
65
66 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68 let llm = OpenAIProvider::new()
69 .api_key(api_key)
70 .base_url("https://openrouter.ai/api/v1")
71 .model("google/gemini-3-flash-preview");
72
73 let agent = Agent::new(llm)
74 .system("You are a helpful assistant")
75 .external(mcp_tool_defs, mcp_tool_executor);
76
77 run_cli_loop(agent).await;
78
79 // Gracefully close the connection
80 service.cancel().await?;
81 Ok(())
82}Sourcepub fn api_key(self, value: impl Into<String>) -> Self
pub fn api_key(self, value: impl Into<String>) -> Self
Set the API key for authentication (default: empty string)
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new()
.api_key("sk-...");Examples found in repository?
examples/chatbot.rs (line 11)
7async fn main() {
8 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10 let llm = OpenAIProvider::new()
11 .api_key(api_key)
12 .base_url("https://openrouter.ai/api/v1")
13 .model("google/gemini-3-flash-preview");
14
15 let agent = Agent::new(llm).system("You are a helpful assistant");
16
17 run_cli_loop(agent).await
18}More examples
examples/history.rs (line 38)
34async fn main() {
35 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37 let llm = OpenAIProvider::new()
38 .api_key(api_key)
39 .base_url("https://openrouter.ai/api/v1")
40 .model("google/gemini-3-flash-preview");
41
42 let agent = Agent::new(llm)
43 .system("You are a helpful assistant")
44 .history(CustomHistory::new(3));
45
46 run_cli_loop(agent).await
47}examples/fn_tools.rs (line 31)
27async fn main() {
28 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30 let llm = OpenAIProvider::new()
31 .api_key(api_key)
32 .base_url("https://openrouter.ai/api/v1")
33 .model("google/gemini-3-flash-preview");
34
35 let agent = Agent::new(llm)
36 .system("You are a helpful assistant with access to tools")
37 .tool(get_weather)
38 .tool(add);
39
40 run_cli_loop(agent).await
41}examples/bind_tools.rs (line 67)
63async fn main() {
64 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66 let llm = OpenAIProvider::new()
67 .api_key(api_key)
68 .base_url("https://openrouter.ai/api/v1")
69 .model("google/gemini-3-flash-preview");
70
71 let mut data = HashMap::new();
72 data.insert("name".to_string(), "Alice".to_string());
73 data.insert("age".to_string(), "30".to_string());
74
75 let r = ReadonlyTool {
76 data: Arc::new(data),
77 };
78
79 let w = WritableTool {
80 data: Arc::new(Mutex::new(HashMap::new())),
81 };
82
83 let agent = Agent::new(llm)
84 .system("You are a helpful assistant with access to tools")
85 .bind(r.clone(), ReadonlyTool::fetch)
86 .bind(w.clone(), WritableTool::read)
87 .bind(w, WritableTool::write);
88
89 run_cli_loop(agent).await
90}examples/mcp.rs (line 69)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18 // Connect to a server running as a child process
19 let service = ()
20 .serve(TokioChildProcess::new(Command::new("npx").configure(
21 |cmd| {
22 cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23 },
24 ))?)
25 .await?;
26 println!("Connected to MCP server");
27
28 // List available tools and convert to tool definitions
29 let tools = service.list_tools(Default::default()).await?.tools;
30 let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31 println!("Available tools: {names:#?}");
32 let mcp_tool_defs = tools
33 .iter()
34 .map(|t| ToolDefinition {
35 tool_type: "function".into(),
36 function: ToolFunction {
37 name: t.name.to_string(),
38 description: t.description.as_deref().unwrap_or_default().to_string(),
39 parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40 },
41 })
42 .collect();
43
44 let mcp_tool_executor = {
45 let peer = service.clone();
46 move |name: String, args: String| {
47 let peer = peer.clone();
48 async move {
49 peer.call_tool(CallToolRequestParams {
50 meta: None,
51 name: name.into(),
52 arguments: serde_json::from_str(&args).unwrap(),
53 task: None,
54 })
55 .await
56 .unwrap()
57 .content[0]
58 .as_text()
59 .unwrap()
60 .text
61 .clone()
62 }
63 }
64 };
65
66 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68 let llm = OpenAIProvider::new()
69 .api_key(api_key)
70 .base_url("https://openrouter.ai/api/v1")
71 .model("google/gemini-3-flash-preview");
72
73 let agent = Agent::new(llm)
74 .system("You are a helpful assistant")
75 .external(mcp_tool_defs, mcp_tool_executor);
76
77 run_cli_loop(agent).await;
78
79 // Gracefully close the connection
80 service.cancel().await?;
81 Ok(())
82}Sourcepub fn model(self, value: impl Into<String>) -> Self
pub fn model(self, value: impl Into<String>) -> Self
Set the model name to use (default: gpt-4o)
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new()
.model("gpt-4o-mini");Examples found in repository?
examples/chatbot.rs (line 13)
7async fn main() {
8 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10 let llm = OpenAIProvider::new()
11 .api_key(api_key)
12 .base_url("https://openrouter.ai/api/v1")
13 .model("google/gemini-3-flash-preview");
14
15 let agent = Agent::new(llm).system("You are a helpful assistant");
16
17 run_cli_loop(agent).await
18}More examples
examples/history.rs (line 40)
34async fn main() {
35 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37 let llm = OpenAIProvider::new()
38 .api_key(api_key)
39 .base_url("https://openrouter.ai/api/v1")
40 .model("google/gemini-3-flash-preview");
41
42 let agent = Agent::new(llm)
43 .system("You are a helpful assistant")
44 .history(CustomHistory::new(3));
45
46 run_cli_loop(agent).await
47}examples/fn_tools.rs (line 33)
27async fn main() {
28 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30 let llm = OpenAIProvider::new()
31 .api_key(api_key)
32 .base_url("https://openrouter.ai/api/v1")
33 .model("google/gemini-3-flash-preview");
34
35 let agent = Agent::new(llm)
36 .system("You are a helpful assistant with access to tools")
37 .tool(get_weather)
38 .tool(add);
39
40 run_cli_loop(agent).await
41}examples/bind_tools.rs (line 69)
63async fn main() {
64 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66 let llm = OpenAIProvider::new()
67 .api_key(api_key)
68 .base_url("https://openrouter.ai/api/v1")
69 .model("google/gemini-3-flash-preview");
70
71 let mut data = HashMap::new();
72 data.insert("name".to_string(), "Alice".to_string());
73 data.insert("age".to_string(), "30".to_string());
74
75 let r = ReadonlyTool {
76 data: Arc::new(data),
77 };
78
79 let w = WritableTool {
80 data: Arc::new(Mutex::new(HashMap::new())),
81 };
82
83 let agent = Agent::new(llm)
84 .system("You are a helpful assistant with access to tools")
85 .bind(r.clone(), ReadonlyTool::fetch)
86 .bind(w.clone(), WritableTool::read)
87 .bind(w, WritableTool::write);
88
89 run_cli_loop(agent).await
90}examples/mcp.rs (line 71)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18 // Connect to a server running as a child process
19 let service = ()
20 .serve(TokioChildProcess::new(Command::new("npx").configure(
21 |cmd| {
22 cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23 },
24 ))?)
25 .await?;
26 println!("Connected to MCP server");
27
28 // List available tools and convert to tool definitions
29 let tools = service.list_tools(Default::default()).await?.tools;
30 let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31 println!("Available tools: {names:#?}");
32 let mcp_tool_defs = tools
33 .iter()
34 .map(|t| ToolDefinition {
35 tool_type: "function".into(),
36 function: ToolFunction {
37 name: t.name.to_string(),
38 description: t.description.as_deref().unwrap_or_default().to_string(),
39 parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40 },
41 })
42 .collect();
43
44 let mcp_tool_executor = {
45 let peer = service.clone();
46 move |name: String, args: String| {
47 let peer = peer.clone();
48 async move {
49 peer.call_tool(CallToolRequestParams {
50 meta: None,
51 name: name.into(),
52 arguments: serde_json::from_str(&args).unwrap(),
53 task: None,
54 })
55 .await
56 .unwrap()
57 .content[0]
58 .as_text()
59 .unwrap()
60 .text
61 .clone()
62 }
63 }
64 };
65
66 let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68 let llm = OpenAIProvider::new()
69 .api_key(api_key)
70 .base_url("https://openrouter.ai/api/v1")
71 .model("google/gemini-3-flash-preview");
72
73 let agent = Agent::new(llm)
74 .system("You are a helpful assistant")
75 .external(mcp_tool_defs, mcp_tool_executor);
76
77 run_cli_loop(agent).await;
78
79 // Gracefully close the connection
80 service.cancel().await?;
81 Ok(())
82}Sourcepub fn temperature(self, value: impl Into<Option<f32>>) -> Self
pub fn temperature(self, value: impl Into<Option<f32>>) -> Self
Set the temperature for response randomness (default: None)
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new()
.temperature(0.7);Sourcepub fn max_tokens(self, value: impl Into<Option<u32>>) -> Self
pub fn max_tokens(self, value: impl Into<Option<u32>>) -> Self
Set the maximum number of tokens to generate (default: None)
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new()
.max_tokens(1000);Sourcepub fn max_retries(self, retries: u32) -> Self
pub fn max_retries(self, retries: u32) -> Self
Set maximum number of retries on failure (default: 3)
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new()
.max_retries(5);Sourcepub fn retry_delay(self, delay_ms: u64) -> Self
pub fn retry_delay(self, delay_ms: u64) -> Self
Set delay between retries in milliseconds (default: 1000)
§Examples
use tiny_loop::llm::OpenAIProvider;
let provider = OpenAIProvider::new()
.retry_delay(2000);Trait Implementations§
Source§impl Default for OpenAIProvider
impl Default for OpenAIProvider
Source§impl LLMProvider for OpenAIProvider
impl LLMProvider for OpenAIProvider
Source§fn call<'life0, 'life1, 'life2, 'life3, 'async_trait>(
&'life0 self,
messages: &'life1 [Message],
tools: &'life2 [ToolDefinition],
stream_callback: Option<&'life3 mut StreamCallback>,
) -> Pin<Box<dyn Future<Output = Result<Message>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
'life1: 'async_trait,
'life2: 'async_trait,
'life3: 'async_trait,
fn call<'life0, 'life1, 'life2, 'life3, 'async_trait>(
&'life0 self,
messages: &'life1 [Message],
tools: &'life2 [ToolDefinition],
stream_callback: Option<&'life3 mut StreamCallback>,
) -> Pin<Box<dyn Future<Output = Result<Message>> + Send + 'async_trait>>where
Self: 'async_trait,
'life0: 'async_trait,
'life1: 'async_trait,
'life2: 'async_trait,
'life3: 'async_trait,
Call the LLM with messages and available tools, returning the assistant’s response Read more
Auto Trait Implementations§
impl Freeze for OpenAIProvider
impl !RefUnwindSafe for OpenAIProvider
impl Send for OpenAIProvider
impl Sync for OpenAIProvider
impl Unpin for OpenAIProvider
impl !UnwindSafe for OpenAIProvider
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more