Skip to main content

OpenAIProvider

Struct OpenAIProvider 

Source
pub struct OpenAIProvider { /* private fields */ }
Expand description

OpenAI-compatible LLM provider

§Examples

use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .api_key("sk-...")
    .model("gpt-4o")
    .temperature(0.7);

Implementations§

Source§

impl OpenAIProvider

Source

pub fn new() -> Self

Create a new OpenAI provider with default settings

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new();
Examples found in repository?
examples/chatbot.rs (line 10)
7async fn main() {
8    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10    let llm = OpenAIProvider::new()
11        .api_key(api_key)
12        .base_url("https://openrouter.ai/api/v1")
13        .model("google/gemini-3-flash-preview");
14
15    let agent = Agent::new(llm).system("You are a helpful assistant");
16
17    run_cli_loop(agent).await
18}
More examples
Hide additional examples
examples/history.rs (line 37)
34async fn main() {
35    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37    let llm = OpenAIProvider::new()
38        .api_key(api_key)
39        .base_url("https://openrouter.ai/api/v1")
40        .model("google/gemini-3-flash-preview");
41
42    let agent = Agent::new(llm)
43        .system("You are a helpful assistant")
44        .history(CustomHistory::new(3));
45
46    run_cli_loop(agent).await
47}
examples/fn_tools.rs (line 30)
27async fn main() {
28    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30    let llm = OpenAIProvider::new()
31        .api_key(api_key)
32        .base_url("https://openrouter.ai/api/v1")
33        .model("google/gemini-3-flash-preview");
34
35    let agent = Agent::new(llm)
36        .system("You are a helpful assistant with access to tools")
37        .tool(get_weather)
38        .tool(add);
39
40    run_cli_loop(agent).await
41}
examples/bind_tools.rs (line 66)
63async fn main() {
64    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66    let llm = OpenAIProvider::new()
67        .api_key(api_key)
68        .base_url("https://openrouter.ai/api/v1")
69        .model("google/gemini-3-flash-preview");
70
71    let mut data = HashMap::new();
72    data.insert("name".to_string(), "Alice".to_string());
73    data.insert("age".to_string(), "30".to_string());
74
75    let r = ReadonlyTool {
76        data: Arc::new(data),
77    };
78
79    let w = WritableTool {
80        data: Arc::new(Mutex::new(HashMap::new())),
81    };
82
83    let agent = Agent::new(llm)
84        .system("You are a helpful assistant with access to tools")
85        .bind(r.clone(), ReadonlyTool::fetch)
86        .bind(w.clone(), WritableTool::read)
87        .bind(w, WritableTool::write);
88
89    run_cli_loop(agent).await
90}
examples/mcp.rs (line 68)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18    // Connect to a server running as a child process
19    let service = ()
20        .serve(TokioChildProcess::new(Command::new("npx").configure(
21            |cmd| {
22                cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23            },
24        ))?)
25        .await?;
26    println!("Connected to MCP server");
27
28    // List available tools and convert to tool definitions
29    let tools = service.list_tools(Default::default()).await?.tools;
30    let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31    println!("Available tools: {names:#?}");
32    let mcp_tool_defs = tools
33        .iter()
34        .map(|t| ToolDefinition {
35            tool_type: "function".into(),
36            function: ToolFunction {
37                name: t.name.to_string(),
38                description: t.description.as_deref().unwrap_or_default().to_string(),
39                parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40            },
41        })
42        .collect();
43
44    let mcp_tool_executor = {
45        let peer = service.clone();
46        move |name: String, args: String| {
47            let peer = peer.clone();
48            async move {
49                peer.call_tool(CallToolRequestParams {
50                    meta: None,
51                    name: name.into(),
52                    arguments: serde_json::from_str(&args).unwrap(),
53                    task: None,
54                })
55                .await
56                .unwrap()
57                .content[0]
58                    .as_text()
59                    .unwrap()
60                    .text
61                    .clone()
62            }
63        }
64    };
65
66    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68    let llm = OpenAIProvider::new()
69        .api_key(api_key)
70        .base_url("https://openrouter.ai/api/v1")
71        .model("google/gemini-3-flash-preview");
72
73    let agent = Agent::new(llm)
74        .system("You are a helpful assistant")
75        .external(mcp_tool_defs, mcp_tool_executor);
76
77    run_cli_loop(agent).await;
78
79    // Gracefully close the connection
80    service.cancel().await?;
81    Ok(())
82}
Source

pub fn base_url(self, value: impl Into<String>) -> Self

Set the base URL for the API endpoint (default: https://api.openai.com/v1)

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .base_url("https://api.custom.com/v1");
Examples found in repository?
examples/chatbot.rs (line 12)
7async fn main() {
8    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10    let llm = OpenAIProvider::new()
11        .api_key(api_key)
12        .base_url("https://openrouter.ai/api/v1")
13        .model("google/gemini-3-flash-preview");
14
15    let agent = Agent::new(llm).system("You are a helpful assistant");
16
17    run_cli_loop(agent).await
18}
More examples
Hide additional examples
examples/history.rs (line 39)
34async fn main() {
35    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37    let llm = OpenAIProvider::new()
38        .api_key(api_key)
39        .base_url("https://openrouter.ai/api/v1")
40        .model("google/gemini-3-flash-preview");
41
42    let agent = Agent::new(llm)
43        .system("You are a helpful assistant")
44        .history(CustomHistory::new(3));
45
46    run_cli_loop(agent).await
47}
examples/fn_tools.rs (line 32)
27async fn main() {
28    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30    let llm = OpenAIProvider::new()
31        .api_key(api_key)
32        .base_url("https://openrouter.ai/api/v1")
33        .model("google/gemini-3-flash-preview");
34
35    let agent = Agent::new(llm)
36        .system("You are a helpful assistant with access to tools")
37        .tool(get_weather)
38        .tool(add);
39
40    run_cli_loop(agent).await
41}
examples/bind_tools.rs (line 68)
63async fn main() {
64    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66    let llm = OpenAIProvider::new()
67        .api_key(api_key)
68        .base_url("https://openrouter.ai/api/v1")
69        .model("google/gemini-3-flash-preview");
70
71    let mut data = HashMap::new();
72    data.insert("name".to_string(), "Alice".to_string());
73    data.insert("age".to_string(), "30".to_string());
74
75    let r = ReadonlyTool {
76        data: Arc::new(data),
77    };
78
79    let w = WritableTool {
80        data: Arc::new(Mutex::new(HashMap::new())),
81    };
82
83    let agent = Agent::new(llm)
84        .system("You are a helpful assistant with access to tools")
85        .bind(r.clone(), ReadonlyTool::fetch)
86        .bind(w.clone(), WritableTool::read)
87        .bind(w, WritableTool::write);
88
89    run_cli_loop(agent).await
90}
examples/mcp.rs (line 70)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18    // Connect to a server running as a child process
19    let service = ()
20        .serve(TokioChildProcess::new(Command::new("npx").configure(
21            |cmd| {
22                cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23            },
24        ))?)
25        .await?;
26    println!("Connected to MCP server");
27
28    // List available tools and convert to tool definitions
29    let tools = service.list_tools(Default::default()).await?.tools;
30    let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31    println!("Available tools: {names:#?}");
32    let mcp_tool_defs = tools
33        .iter()
34        .map(|t| ToolDefinition {
35            tool_type: "function".into(),
36            function: ToolFunction {
37                name: t.name.to_string(),
38                description: t.description.as_deref().unwrap_or_default().to_string(),
39                parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40            },
41        })
42        .collect();
43
44    let mcp_tool_executor = {
45        let peer = service.clone();
46        move |name: String, args: String| {
47            let peer = peer.clone();
48            async move {
49                peer.call_tool(CallToolRequestParams {
50                    meta: None,
51                    name: name.into(),
52                    arguments: serde_json::from_str(&args).unwrap(),
53                    task: None,
54                })
55                .await
56                .unwrap()
57                .content[0]
58                    .as_text()
59                    .unwrap()
60                    .text
61                    .clone()
62            }
63        }
64    };
65
66    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68    let llm = OpenAIProvider::new()
69        .api_key(api_key)
70        .base_url("https://openrouter.ai/api/v1")
71        .model("google/gemini-3-flash-preview");
72
73    let agent = Agent::new(llm)
74        .system("You are a helpful assistant")
75        .external(mcp_tool_defs, mcp_tool_executor);
76
77    run_cli_loop(agent).await;
78
79    // Gracefully close the connection
80    service.cancel().await?;
81    Ok(())
82}
Source

pub fn api_key(self, value: impl Into<String>) -> Self

Set the API key for authentication (default: empty string)

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .api_key("sk-...");
Examples found in repository?
examples/chatbot.rs (line 11)
7async fn main() {
8    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10    let llm = OpenAIProvider::new()
11        .api_key(api_key)
12        .base_url("https://openrouter.ai/api/v1")
13        .model("google/gemini-3-flash-preview");
14
15    let agent = Agent::new(llm).system("You are a helpful assistant");
16
17    run_cli_loop(agent).await
18}
More examples
Hide additional examples
examples/history.rs (line 38)
34async fn main() {
35    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37    let llm = OpenAIProvider::new()
38        .api_key(api_key)
39        .base_url("https://openrouter.ai/api/v1")
40        .model("google/gemini-3-flash-preview");
41
42    let agent = Agent::new(llm)
43        .system("You are a helpful assistant")
44        .history(CustomHistory::new(3));
45
46    run_cli_loop(agent).await
47}
examples/fn_tools.rs (line 31)
27async fn main() {
28    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30    let llm = OpenAIProvider::new()
31        .api_key(api_key)
32        .base_url("https://openrouter.ai/api/v1")
33        .model("google/gemini-3-flash-preview");
34
35    let agent = Agent::new(llm)
36        .system("You are a helpful assistant with access to tools")
37        .tool(get_weather)
38        .tool(add);
39
40    run_cli_loop(agent).await
41}
examples/bind_tools.rs (line 67)
63async fn main() {
64    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66    let llm = OpenAIProvider::new()
67        .api_key(api_key)
68        .base_url("https://openrouter.ai/api/v1")
69        .model("google/gemini-3-flash-preview");
70
71    let mut data = HashMap::new();
72    data.insert("name".to_string(), "Alice".to_string());
73    data.insert("age".to_string(), "30".to_string());
74
75    let r = ReadonlyTool {
76        data: Arc::new(data),
77    };
78
79    let w = WritableTool {
80        data: Arc::new(Mutex::new(HashMap::new())),
81    };
82
83    let agent = Agent::new(llm)
84        .system("You are a helpful assistant with access to tools")
85        .bind(r.clone(), ReadonlyTool::fetch)
86        .bind(w.clone(), WritableTool::read)
87        .bind(w, WritableTool::write);
88
89    run_cli_loop(agent).await
90}
examples/mcp.rs (line 69)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18    // Connect to a server running as a child process
19    let service = ()
20        .serve(TokioChildProcess::new(Command::new("npx").configure(
21            |cmd| {
22                cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23            },
24        ))?)
25        .await?;
26    println!("Connected to MCP server");
27
28    // List available tools and convert to tool definitions
29    let tools = service.list_tools(Default::default()).await?.tools;
30    let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31    println!("Available tools: {names:#?}");
32    let mcp_tool_defs = tools
33        .iter()
34        .map(|t| ToolDefinition {
35            tool_type: "function".into(),
36            function: ToolFunction {
37                name: t.name.to_string(),
38                description: t.description.as_deref().unwrap_or_default().to_string(),
39                parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40            },
41        })
42        .collect();
43
44    let mcp_tool_executor = {
45        let peer = service.clone();
46        move |name: String, args: String| {
47            let peer = peer.clone();
48            async move {
49                peer.call_tool(CallToolRequestParams {
50                    meta: None,
51                    name: name.into(),
52                    arguments: serde_json::from_str(&args).unwrap(),
53                    task: None,
54                })
55                .await
56                .unwrap()
57                .content[0]
58                    .as_text()
59                    .unwrap()
60                    .text
61                    .clone()
62            }
63        }
64    };
65
66    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68    let llm = OpenAIProvider::new()
69        .api_key(api_key)
70        .base_url("https://openrouter.ai/api/v1")
71        .model("google/gemini-3-flash-preview");
72
73    let agent = Agent::new(llm)
74        .system("You are a helpful assistant")
75        .external(mcp_tool_defs, mcp_tool_executor);
76
77    run_cli_loop(agent).await;
78
79    // Gracefully close the connection
80    service.cancel().await?;
81    Ok(())
82}
Source

pub fn model(self, value: impl Into<String>) -> Self

Set the model name to use (default: gpt-4o)

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .model("gpt-4o-mini");
Examples found in repository?
examples/chatbot.rs (line 13)
7async fn main() {
8    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
9
10    let llm = OpenAIProvider::new()
11        .api_key(api_key)
12        .base_url("https://openrouter.ai/api/v1")
13        .model("google/gemini-3-flash-preview");
14
15    let agent = Agent::new(llm).system("You are a helpful assistant");
16
17    run_cli_loop(agent).await
18}
More examples
Hide additional examples
examples/history.rs (line 40)
34async fn main() {
35    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
36
37    let llm = OpenAIProvider::new()
38        .api_key(api_key)
39        .base_url("https://openrouter.ai/api/v1")
40        .model("google/gemini-3-flash-preview");
41
42    let agent = Agent::new(llm)
43        .system("You are a helpful assistant")
44        .history(CustomHistory::new(3));
45
46    run_cli_loop(agent).await
47}
examples/fn_tools.rs (line 33)
27async fn main() {
28    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
29
30    let llm = OpenAIProvider::new()
31        .api_key(api_key)
32        .base_url("https://openrouter.ai/api/v1")
33        .model("google/gemini-3-flash-preview");
34
35    let agent = Agent::new(llm)
36        .system("You are a helpful assistant with access to tools")
37        .tool(get_weather)
38        .tool(add);
39
40    run_cli_loop(agent).await
41}
examples/bind_tools.rs (line 69)
63async fn main() {
64    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
65
66    let llm = OpenAIProvider::new()
67        .api_key(api_key)
68        .base_url("https://openrouter.ai/api/v1")
69        .model("google/gemini-3-flash-preview");
70
71    let mut data = HashMap::new();
72    data.insert("name".to_string(), "Alice".to_string());
73    data.insert("age".to_string(), "30".to_string());
74
75    let r = ReadonlyTool {
76        data: Arc::new(data),
77    };
78
79    let w = WritableTool {
80        data: Arc::new(Mutex::new(HashMap::new())),
81    };
82
83    let agent = Agent::new(llm)
84        .system("You are a helpful assistant with access to tools")
85        .bind(r.clone(), ReadonlyTool::fetch)
86        .bind(w.clone(), WritableTool::read)
87        .bind(w, WritableTool::write);
88
89    run_cli_loop(agent).await
90}
examples/mcp.rs (line 71)
17async fn main() -> Result<(), Box<dyn std::error::Error>> {
18    // Connect to a server running as a child process
19    let service = ()
20        .serve(TokioChildProcess::new(Command::new("npx").configure(
21            |cmd| {
22                cmd.args(&["-y", "@modelcontextprotocol/server-filesystem", "."]);
23            },
24        ))?)
25        .await?;
26    println!("Connected to MCP server");
27
28    // List available tools and convert to tool definitions
29    let tools = service.list_tools(Default::default()).await?.tools;
30    let names = tools.iter().map(|t| t.name.to_string()).collect::<Vec<_>>();
31    println!("Available tools: {names:#?}");
32    let mcp_tool_defs = tools
33        .iter()
34        .map(|t| ToolDefinition {
35            tool_type: "function".into(),
36            function: ToolFunction {
37                name: t.name.to_string(),
38                description: t.description.as_deref().unwrap_or_default().to_string(),
39                parameters: Parameters::from_object(t.input_schema.as_ref().clone()),
40            },
41        })
42        .collect();
43
44    let mcp_tool_executor = {
45        let peer = service.clone();
46        move |name: String, args: String| {
47            let peer = peer.clone();
48            async move {
49                peer.call_tool(CallToolRequestParams {
50                    meta: None,
51                    name: name.into(),
52                    arguments: serde_json::from_str(&args).unwrap(),
53                    task: None,
54                })
55                .await
56                .unwrap()
57                .content[0]
58                    .as_text()
59                    .unwrap()
60                    .text
61                    .clone()
62            }
63        }
64    };
65
66    let api_key = std::env::var("LLM_API_KEY").expect("LLM_API_KEY not set");
67
68    let llm = OpenAIProvider::new()
69        .api_key(api_key)
70        .base_url("https://openrouter.ai/api/v1")
71        .model("google/gemini-3-flash-preview");
72
73    let agent = Agent::new(llm)
74        .system("You are a helpful assistant")
75        .external(mcp_tool_defs, mcp_tool_executor);
76
77    run_cli_loop(agent).await;
78
79    // Gracefully close the connection
80    service.cancel().await?;
81    Ok(())
82}
Source

pub fn temperature(self, value: impl Into<Option<f32>>) -> Self

Set the temperature for response randomness (default: None)

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .temperature(0.7);
Source

pub fn max_tokens(self, value: impl Into<Option<u32>>) -> Self

Set the maximum number of tokens to generate (default: None)

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .max_tokens(1000);
Source

pub fn header(self, key: impl Into<String>, value: impl Into<String>) -> Self

Add a custom HTTP header to requests

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .header("x-custom-header", "value");
§Panics

Panics if the header name or value contains invalid characters.

Source

pub fn max_retries(self, retries: u32) -> Self

Set maximum number of retries on failure (default: 3)

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .max_retries(5);
Source

pub fn retry_delay(self, delay_ms: u64) -> Self

Set delay between retries in milliseconds (default: 1000)

§Examples
use tiny_loop::llm::OpenAIProvider;

let provider = OpenAIProvider::new()
    .retry_delay(2000);

Trait Implementations§

Source§

impl Default for OpenAIProvider

Source§

fn default() -> Self

Returns the “default value” for a type. Read more
Source§

impl LLMProvider for OpenAIProvider

Source§

fn call<'life0, 'life1, 'life2, 'life3, 'async_trait>( &'life0 self, messages: &'life1 [Message], tools: &'life2 [ToolDefinition], stream_callback: Option<&'life3 mut StreamCallback>, ) -> Pin<Box<dyn Future<Output = Result<Message>> + Send + 'async_trait>>
where Self: 'async_trait, 'life0: 'async_trait, 'life1: 'async_trait, 'life2: 'async_trait, 'life3: 'async_trait,

Call the LLM with messages and available tools, returning the assistant’s response Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more