openrouter_reasoning/
openrouter_reasoning.rs1use openai_api_rs::v1::api::OpenAIClient;
2use openai_api_rs::v1::chat_completion::chat_completion::ChatCompletionRequest;
3use openai_api_rs::v1::chat_completion::{self, Reasoning, ReasoningEffort, ReasoningMode};
4use std::env;
5
6#[tokio::main]
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8 let api_key = env::var("OPENROUTER_API_KEY").unwrap().to_string();
9 let mut client = OpenAIClient::builder()
10 .with_endpoint("https://openrouter.ai/api/v1")
11 .with_api_key(api_key)
12 .build()?;
13
14 let mut req = ChatCompletionRequest::new(
16 "x-ai/grok-3-mini".to_string(), vec![chat_completion::ChatCompletionMessage {
18 role: chat_completion::MessageRole::user,
19 content: chat_completion::Content::Text(String::from(
20 "Explain quantum computing in simple terms.",
21 )),
22 name: None,
23 tool_calls: None,
24 tool_call_id: None,
25 }],
26 );
27
28 req.reasoning = Some(Reasoning {
30 mode: Some(ReasoningMode::Effort {
31 effort: ReasoningEffort::High,
32 }),
33 exclude: Some(false), enabled: None,
35 });
36
37 let result = client.chat_completion(req).await?;
38 println!("Content: {:?}", result.choices[0].message.content);
39
40 let mut req2 = ChatCompletionRequest::new(
42 "anthropic/claude-4-sonnet".to_string(), vec![chat_completion::ChatCompletionMessage {
44 role: chat_completion::MessageRole::user,
45 content: chat_completion::Content::Text(String::from(
46 "What's the most efficient sorting algorithm?",
47 )),
48 name: None,
49 tool_calls: None,
50 tool_call_id: None,
51 }],
52 );
53
54 req2.reasoning = Some(Reasoning {
56 mode: Some(ReasoningMode::MaxTokens { max_tokens: 2000 }),
57 exclude: None,
58 enabled: None,
59 });
60
61 let result2 = client.chat_completion(req2).await?;
62 println!("Content: {:?}", result2.choices[0].message.content);
63
64 Ok(())
65}
66
67