openrouter_reasoning/
openrouter_reasoning.rs1use openai_api_rs::v1::api::OpenAIClient;
2use openai_api_rs::v1::chat_completion::{
3 self, ChatCompletionRequest, Reasoning, ReasoningEffort, ReasoningMode,
4};
5use std::env;
6
7#[tokio::main]
8async fn main() -> Result<(), Box<dyn std::error::Error>> {
9 let api_key = env::var("OPENROUTER_API_KEY").unwrap().to_string();
10 let mut client = OpenAIClient::builder()
11 .with_endpoint("https://openrouter.ai/api/v1")
12 .with_api_key(api_key)
13 .build()?;
14
15 let mut req = ChatCompletionRequest::new(
17 "x-ai/grok-3-mini".to_string(), vec![chat_completion::ChatCompletionMessage {
19 role: chat_completion::MessageRole::user,
20 content: chat_completion::Content::Text(String::from(
21 "Explain quantum computing in simple terms.",
22 )),
23 name: None,
24 tool_calls: None,
25 tool_call_id: None,
26 }],
27 );
28
29 req.reasoning = Some(Reasoning {
31 mode: Some(ReasoningMode::Effort {
32 effort: ReasoningEffort::High,
33 }),
34 exclude: Some(false), enabled: None,
36 });
37
38 let result = client.chat_completion(req).await?;
39 println!("Content: {:?}", result.choices[0].message.content);
40
41 let mut req2 = ChatCompletionRequest::new(
43 "anthropic/claude-4-sonnet".to_string(), vec![chat_completion::ChatCompletionMessage {
45 role: chat_completion::MessageRole::user,
46 content: chat_completion::Content::Text(String::from(
47 "What's the most efficient sorting algorithm?",
48 )),
49 name: None,
50 tool_calls: None,
51 tool_call_id: None,
52 }],
53 );
54
55 req2.reasoning = Some(Reasoning {
57 mode: Some(ReasoningMode::MaxTokens { max_tokens: 2000 }),
58 exclude: None,
59 enabled: None,
60 });
61
62 let result2 = client.chat_completion(req2).await?;
63 println!("Content: {:?}", result2.choices[0].message.content);
64
65 Ok(())
66}
67
68