LLMConfig

Struct LLMConfig 

Source
pub struct LLMConfig {
    pub model: String,
    pub temperature: Option<f32>,
    pub max_tokens: Option<u32>,
}

Fields§

§model: String§temperature: Option<f32>§max_tokens: Option<u32>

Implementations§

Source§

impl LLMConfig

Source

pub fn new(model: impl Into<String>) -> LLMConfig

Examples found in repository?
examples/react_loop.rs (line 102)
10async fn main() -> Result<()> {
11    println!("╔════════════════════════════════════════════════════════════╗");
12    println!("║         Praxis React Agent - Interactive Demo             ║");
13    println!("╚════════════════════════════════════════════════════════════╝");
14    println!();
15    println!("This demo shows a React agent that can:");
16    println!("  • Think through problems (reasoning)");
17    println!("  • Use tools from MCP servers");
18    println!("  • Respond to your questions");
19    println!();
20    println!("Prerequisites:");
21    println!("  1. Set OPENAI_API_KEY: export OPENAI_API_KEY=your_key");
22    println!("  2. Set MCP_SERVERS: export MCP_SERVERS=\"http://localhost:8000/mcp,http://localhost:8001/mcp\"");
23    println!("  3. Start MCP servers: cd mcp_servers/weather && uv run python weather.py");
24    println!();
25    println!("Type 'exit' to quit");
26    println!();
27
28    // Get API key from environment
29    let api_key = std::env::var("OPENAI_API_KEY").expect(
30        "OPENAI_API_KEY must be set in environment. Run: export OPENAI_API_KEY=your_key_here"
31    );
32
33    // Parse MCP servers from environment
34    let mcp_servers = std::env::var("MCP_SERVERS")
35        .unwrap_or_else(|_| "http://localhost:8000/mcp".to_string());
36
37    // Create MCP tool executor (aggregates multiple servers)
38    let mcp_executor = Arc::new(MCPToolExecutor::new());
39
40    // Connect to each MCP server
41    println!("Connecting to MCP servers...");
42    for url in mcp_servers.split(',') {
43        let url = url.trim();
44        if !url.is_empty() {
45            print!("  Connecting to {}... ", url);
46            io::stdout().flush()?;
47            match MCPClient::new_http(
48                &format!("mcp-{}", uuid::Uuid::new_v4()),
49                url
50            ).await {
51                Ok(client) => {
52                    mcp_executor.add_server(client).await?;
53                    println!("✓");
54                }
55                Err(e) => {
56                    println!("✗ Failed: {}", e);
57                    println!("Make sure the MCP server is running at {}", url);
58                    return Err(e);
59                }
60            }
61        }
62    }
63    println!();
64
65    // Create LLM client
66    let llm_client = Arc::new(OpenAIClient::new(api_key)?);
67
68    // Create graph config
69    let config = GraphConfig::default();
70
71    // Create graph
72    let graph = Graph::new(llm_client, mcp_executor, config);
73
74    // Conversation loop
75    let conversation_id = uuid::Uuid::new_v4().to_string();
76    
77    loop {
78        // Get user input
79        print!("\n\x1b[1;36m You: \x1b[0m");
80        io::stdout().flush()?;
81
82        let mut input = String::new();
83        io::stdin().read_line(&mut input)?;
84        let input = input.trim();
85
86        if input.is_empty() {
87            continue;
88        }
89
90        if input.eq_ignore_ascii_case("exit") {
91            println!("\nGoodbye!");
92            break;
93        }
94
95        // Create message
96        let user_message = Message::Human {
97            content: Content::text(input),
98            name: None,
99        };
100
101        // Create graph input
102        let llm_config = LLMConfig::new("gpt-4")
103            .with_temperature(0.7)
104            .with_max_tokens(4096);
105
106        let graph_input = GraphInput::new(conversation_id.clone(), user_message, llm_config);
107
108        // Spawn execution
109        let mut event_rx = graph.spawn_run(graph_input);
110
111        // Print assistant label
112        print!("\n\x1b[1;32mAssistant:\x1b[0m ");
113        io::stdout().flush()?;
114
115        let mut in_reasoning = false;
116        let mut in_message = false;
117
118        // Process events
119        while let Some(event) = event_rx.recv().await {
120            match event {
121                StreamEvent::InitStream { .. } => {
122                    // Silent - just track
123                }
124
125                StreamEvent::Reasoning { content } => {
126                    if !in_reasoning {
127                        print!("\n\x1b[2;3mReasoning: ");
128                        in_reasoning = true;
129                        in_message = false;
130                    }
131                    print!("{}", content);
132                    io::stdout().flush()?;
133                }
134
135                StreamEvent::Message { content } => {
136                    if !in_message {
137                        if in_reasoning {
138                            print!("\x1b[0m\n\n");
139                        }
140                        print!("\x1b[0m");
141                        in_message = true;
142                        in_reasoning = false;
143                    }
144                    print!("{}", content);
145                    io::stdout().flush()?;
146                }
147
148                StreamEvent::ToolCall {
149                    index: _,
150                    id: _,
151                    name,
152                    arguments,
153                } => {
154                    if in_reasoning {
155                        print!("\x1b[0m\n");
156                    }
157                    if let Some(name) = name {
158                        if let Some(args) = arguments {
159                            print!("\n\x1b[1;33mCalling tool: {} ({})\x1b[0m", name, args);
160                        } else {
161                            print!("\n\x1b[1;33mCalling tool: {}\x1b[0m", name);
162                        }
163                        io::stdout().flush()?;
164                    }
165                    in_reasoning = false;
166                    in_message = false;
167                }
168
169                StreamEvent::ToolResult {
170                    tool_call_id: _,
171                    result,
172                    is_error,
173                    duration_ms,
174                } => {
175                    if is_error {
176                        print!(
177                            "\n\x1b[1;31mTool error ({}ms): {}\x1b[0m",
178                            duration_ms, result
179                        );
180                    } else {
181                        // Truncate long results
182                        let display_result = if result.len() > 100 {
183                            format!("{}...", &result[..100])
184                        } else {
185                            result
186                        };
187                        print!(
188                            "\n\x1b[1;32mTool result ({}ms): {}\x1b[0m",
189                            duration_ms, display_result
190                        );
191                    }
192                    io::stdout().flush()?;
193                    in_reasoning = false;
194                    in_message = false;
195                }
196
197                StreamEvent::Done { finish_reason: _ } => {
198                    // LLM stream done, continue to next node
199                }
200
201                StreamEvent::Error { message, .. } => {
202                    print!("\n\n\x1b[1;31mError: {}\x1b[0m", message);
203                    io::stdout().flush()?;
204                    break;
205                }
206
207                StreamEvent::EndStream {
208                    status: _,
209                    total_duration_ms,
210                } => {
211                    print!("\n\n\x1b[2m[Completed in {}ms]\x1b[0m", total_duration_ms);
212                    io::stdout().flush()?;
213                    break;
214                }
215            }
216        }
217
218        println!(); // Final newline
219    }
220
221    Ok(())
222}
Source

pub fn with_temperature(self, temp: f32) -> LLMConfig

Examples found in repository?
examples/react_loop.rs (line 103)
10async fn main() -> Result<()> {
11    println!("╔════════════════════════════════════════════════════════════╗");
12    println!("║         Praxis React Agent - Interactive Demo             ║");
13    println!("╚════════════════════════════════════════════════════════════╝");
14    println!();
15    println!("This demo shows a React agent that can:");
16    println!("  • Think through problems (reasoning)");
17    println!("  • Use tools from MCP servers");
18    println!("  • Respond to your questions");
19    println!();
20    println!("Prerequisites:");
21    println!("  1. Set OPENAI_API_KEY: export OPENAI_API_KEY=your_key");
22    println!("  2. Set MCP_SERVERS: export MCP_SERVERS=\"http://localhost:8000/mcp,http://localhost:8001/mcp\"");
23    println!("  3. Start MCP servers: cd mcp_servers/weather && uv run python weather.py");
24    println!();
25    println!("Type 'exit' to quit");
26    println!();
27
28    // Get API key from environment
29    let api_key = std::env::var("OPENAI_API_KEY").expect(
30        "OPENAI_API_KEY must be set in environment. Run: export OPENAI_API_KEY=your_key_here"
31    );
32
33    // Parse MCP servers from environment
34    let mcp_servers = std::env::var("MCP_SERVERS")
35        .unwrap_or_else(|_| "http://localhost:8000/mcp".to_string());
36
37    // Create MCP tool executor (aggregates multiple servers)
38    let mcp_executor = Arc::new(MCPToolExecutor::new());
39
40    // Connect to each MCP server
41    println!("Connecting to MCP servers...");
42    for url in mcp_servers.split(',') {
43        let url = url.trim();
44        if !url.is_empty() {
45            print!("  Connecting to {}... ", url);
46            io::stdout().flush()?;
47            match MCPClient::new_http(
48                &format!("mcp-{}", uuid::Uuid::new_v4()),
49                url
50            ).await {
51                Ok(client) => {
52                    mcp_executor.add_server(client).await?;
53                    println!("✓");
54                }
55                Err(e) => {
56                    println!("✗ Failed: {}", e);
57                    println!("Make sure the MCP server is running at {}", url);
58                    return Err(e);
59                }
60            }
61        }
62    }
63    println!();
64
65    // Create LLM client
66    let llm_client = Arc::new(OpenAIClient::new(api_key)?);
67
68    // Create graph config
69    let config = GraphConfig::default();
70
71    // Create graph
72    let graph = Graph::new(llm_client, mcp_executor, config);
73
74    // Conversation loop
75    let conversation_id = uuid::Uuid::new_v4().to_string();
76    
77    loop {
78        // Get user input
79        print!("\n\x1b[1;36m You: \x1b[0m");
80        io::stdout().flush()?;
81
82        let mut input = String::new();
83        io::stdin().read_line(&mut input)?;
84        let input = input.trim();
85
86        if input.is_empty() {
87            continue;
88        }
89
90        if input.eq_ignore_ascii_case("exit") {
91            println!("\nGoodbye!");
92            break;
93        }
94
95        // Create message
96        let user_message = Message::Human {
97            content: Content::text(input),
98            name: None,
99        };
100
101        // Create graph input
102        let llm_config = LLMConfig::new("gpt-4")
103            .with_temperature(0.7)
104            .with_max_tokens(4096);
105
106        let graph_input = GraphInput::new(conversation_id.clone(), user_message, llm_config);
107
108        // Spawn execution
109        let mut event_rx = graph.spawn_run(graph_input);
110
111        // Print assistant label
112        print!("\n\x1b[1;32mAssistant:\x1b[0m ");
113        io::stdout().flush()?;
114
115        let mut in_reasoning = false;
116        let mut in_message = false;
117
118        // Process events
119        while let Some(event) = event_rx.recv().await {
120            match event {
121                StreamEvent::InitStream { .. } => {
122                    // Silent - just track
123                }
124
125                StreamEvent::Reasoning { content } => {
126                    if !in_reasoning {
127                        print!("\n\x1b[2;3mReasoning: ");
128                        in_reasoning = true;
129                        in_message = false;
130                    }
131                    print!("{}", content);
132                    io::stdout().flush()?;
133                }
134
135                StreamEvent::Message { content } => {
136                    if !in_message {
137                        if in_reasoning {
138                            print!("\x1b[0m\n\n");
139                        }
140                        print!("\x1b[0m");
141                        in_message = true;
142                        in_reasoning = false;
143                    }
144                    print!("{}", content);
145                    io::stdout().flush()?;
146                }
147
148                StreamEvent::ToolCall {
149                    index: _,
150                    id: _,
151                    name,
152                    arguments,
153                } => {
154                    if in_reasoning {
155                        print!("\x1b[0m\n");
156                    }
157                    if let Some(name) = name {
158                        if let Some(args) = arguments {
159                            print!("\n\x1b[1;33mCalling tool: {} ({})\x1b[0m", name, args);
160                        } else {
161                            print!("\n\x1b[1;33mCalling tool: {}\x1b[0m", name);
162                        }
163                        io::stdout().flush()?;
164                    }
165                    in_reasoning = false;
166                    in_message = false;
167                }
168
169                StreamEvent::ToolResult {
170                    tool_call_id: _,
171                    result,
172                    is_error,
173                    duration_ms,
174                } => {
175                    if is_error {
176                        print!(
177                            "\n\x1b[1;31mTool error ({}ms): {}\x1b[0m",
178                            duration_ms, result
179                        );
180                    } else {
181                        // Truncate long results
182                        let display_result = if result.len() > 100 {
183                            format!("{}...", &result[..100])
184                        } else {
185                            result
186                        };
187                        print!(
188                            "\n\x1b[1;32mTool result ({}ms): {}\x1b[0m",
189                            duration_ms, display_result
190                        );
191                    }
192                    io::stdout().flush()?;
193                    in_reasoning = false;
194                    in_message = false;
195                }
196
197                StreamEvent::Done { finish_reason: _ } => {
198                    // LLM stream done, continue to next node
199                }
200
201                StreamEvent::Error { message, .. } => {
202                    print!("\n\n\x1b[1;31mError: {}\x1b[0m", message);
203                    io::stdout().flush()?;
204                    break;
205                }
206
207                StreamEvent::EndStream {
208                    status: _,
209                    total_duration_ms,
210                } => {
211                    print!("\n\n\x1b[2m[Completed in {}ms]\x1b[0m", total_duration_ms);
212                    io::stdout().flush()?;
213                    break;
214                }
215            }
216        }
217
218        println!(); // Final newline
219    }
220
221    Ok(())
222}
Source

pub fn with_max_tokens(self, tokens: u32) -> LLMConfig

Examples found in repository?
examples/react_loop.rs (line 104)
10async fn main() -> Result<()> {
11    println!("╔════════════════════════════════════════════════════════════╗");
12    println!("║         Praxis React Agent - Interactive Demo             ║");
13    println!("╚════════════════════════════════════════════════════════════╝");
14    println!();
15    println!("This demo shows a React agent that can:");
16    println!("  • Think through problems (reasoning)");
17    println!("  • Use tools from MCP servers");
18    println!("  • Respond to your questions");
19    println!();
20    println!("Prerequisites:");
21    println!("  1. Set OPENAI_API_KEY: export OPENAI_API_KEY=your_key");
22    println!("  2. Set MCP_SERVERS: export MCP_SERVERS=\"http://localhost:8000/mcp,http://localhost:8001/mcp\"");
23    println!("  3. Start MCP servers: cd mcp_servers/weather && uv run python weather.py");
24    println!();
25    println!("Type 'exit' to quit");
26    println!();
27
28    // Get API key from environment
29    let api_key = std::env::var("OPENAI_API_KEY").expect(
30        "OPENAI_API_KEY must be set in environment. Run: export OPENAI_API_KEY=your_key_here"
31    );
32
33    // Parse MCP servers from environment
34    let mcp_servers = std::env::var("MCP_SERVERS")
35        .unwrap_or_else(|_| "http://localhost:8000/mcp".to_string());
36
37    // Create MCP tool executor (aggregates multiple servers)
38    let mcp_executor = Arc::new(MCPToolExecutor::new());
39
40    // Connect to each MCP server
41    println!("Connecting to MCP servers...");
42    for url in mcp_servers.split(',') {
43        let url = url.trim();
44        if !url.is_empty() {
45            print!("  Connecting to {}... ", url);
46            io::stdout().flush()?;
47            match MCPClient::new_http(
48                &format!("mcp-{}", uuid::Uuid::new_v4()),
49                url
50            ).await {
51                Ok(client) => {
52                    mcp_executor.add_server(client).await?;
53                    println!("✓");
54                }
55                Err(e) => {
56                    println!("✗ Failed: {}", e);
57                    println!("Make sure the MCP server is running at {}", url);
58                    return Err(e);
59                }
60            }
61        }
62    }
63    println!();
64
65    // Create LLM client
66    let llm_client = Arc::new(OpenAIClient::new(api_key)?);
67
68    // Create graph config
69    let config = GraphConfig::default();
70
71    // Create graph
72    let graph = Graph::new(llm_client, mcp_executor, config);
73
74    // Conversation loop
75    let conversation_id = uuid::Uuid::new_v4().to_string();
76    
77    loop {
78        // Get user input
79        print!("\n\x1b[1;36m You: \x1b[0m");
80        io::stdout().flush()?;
81
82        let mut input = String::new();
83        io::stdin().read_line(&mut input)?;
84        let input = input.trim();
85
86        if input.is_empty() {
87            continue;
88        }
89
90        if input.eq_ignore_ascii_case("exit") {
91            println!("\nGoodbye!");
92            break;
93        }
94
95        // Create message
96        let user_message = Message::Human {
97            content: Content::text(input),
98            name: None,
99        };
100
101        // Create graph input
102        let llm_config = LLMConfig::new("gpt-4")
103            .with_temperature(0.7)
104            .with_max_tokens(4096);
105
106        let graph_input = GraphInput::new(conversation_id.clone(), user_message, llm_config);
107
108        // Spawn execution
109        let mut event_rx = graph.spawn_run(graph_input);
110
111        // Print assistant label
112        print!("\n\x1b[1;32mAssistant:\x1b[0m ");
113        io::stdout().flush()?;
114
115        let mut in_reasoning = false;
116        let mut in_message = false;
117
118        // Process events
119        while let Some(event) = event_rx.recv().await {
120            match event {
121                StreamEvent::InitStream { .. } => {
122                    // Silent - just track
123                }
124
125                StreamEvent::Reasoning { content } => {
126                    if !in_reasoning {
127                        print!("\n\x1b[2;3mReasoning: ");
128                        in_reasoning = true;
129                        in_message = false;
130                    }
131                    print!("{}", content);
132                    io::stdout().flush()?;
133                }
134
135                StreamEvent::Message { content } => {
136                    if !in_message {
137                        if in_reasoning {
138                            print!("\x1b[0m\n\n");
139                        }
140                        print!("\x1b[0m");
141                        in_message = true;
142                        in_reasoning = false;
143                    }
144                    print!("{}", content);
145                    io::stdout().flush()?;
146                }
147
148                StreamEvent::ToolCall {
149                    index: _,
150                    id: _,
151                    name,
152                    arguments,
153                } => {
154                    if in_reasoning {
155                        print!("\x1b[0m\n");
156                    }
157                    if let Some(name) = name {
158                        if let Some(args) = arguments {
159                            print!("\n\x1b[1;33mCalling tool: {} ({})\x1b[0m", name, args);
160                        } else {
161                            print!("\n\x1b[1;33mCalling tool: {}\x1b[0m", name);
162                        }
163                        io::stdout().flush()?;
164                    }
165                    in_reasoning = false;
166                    in_message = false;
167                }
168
169                StreamEvent::ToolResult {
170                    tool_call_id: _,
171                    result,
172                    is_error,
173                    duration_ms,
174                } => {
175                    if is_error {
176                        print!(
177                            "\n\x1b[1;31mTool error ({}ms): {}\x1b[0m",
178                            duration_ms, result
179                        );
180                    } else {
181                        // Truncate long results
182                        let display_result = if result.len() > 100 {
183                            format!("{}...", &result[..100])
184                        } else {
185                            result
186                        };
187                        print!(
188                            "\n\x1b[1;32mTool result ({}ms): {}\x1b[0m",
189                            duration_ms, display_result
190                        );
191                    }
192                    io::stdout().flush()?;
193                    in_reasoning = false;
194                    in_message = false;
195                }
196
197                StreamEvent::Done { finish_reason: _ } => {
198                    // LLM stream done, continue to next node
199                }
200
201                StreamEvent::Error { message, .. } => {
202                    print!("\n\n\x1b[1;31mError: {}\x1b[0m", message);
203                    io::stdout().flush()?;
204                    break;
205                }
206
207                StreamEvent::EndStream {
208                    status: _,
209                    total_duration_ms,
210                } => {
211                    print!("\n\n\x1b[2m[Completed in {}ms]\x1b[0m", total_duration_ms);
212                    io::stdout().flush()?;
213                    break;
214                }
215            }
216        }
217
218        println!(); // Final newline
219    }
220
221    Ok(())
222}

Trait Implementations§

Source§

impl Clone for LLMConfig

Source§

fn clone(&self) -> LLMConfig

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for LLMConfig

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error>

Formats the value using the given formatter. Read more
Source§

impl Default for LLMConfig

Source§

fn default() -> LLMConfig

Returns the “default value” for a type. Read more
Source§

impl<'de> Deserialize<'de> for LLMConfig

Source§

fn deserialize<__D>( __deserializer: __D, ) -> Result<LLMConfig, <__D as Deserializer<'de>>::Error>
where __D: Deserializer<'de>,

Deserialize this value from the given Serde deserializer. Read more
Source§

impl Serialize for LLMConfig

Source§

fn serialize<__S>( &self, __serializer: __S, ) -> Result<<__S as Serializer>::Ok, <__S as Serializer>::Error>
where __S: Serializer,

Serialize this value into the given Serde serializer. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> DynClone for T
where T: Clone,

Source§

fn __clone_box(&self, _: Private) -> *mut ()

Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> DeserializeOwned for T
where T: for<'de> Deserialize<'de>,