debug_openai_transport/
debug_openai_transport.rs1use ai_lib::transport::{HttpClient, HttpTransport};
3use serde_json::json;
4use std::collections::HashMap;
5
6#[tokio::main]
7async fn main() -> Result<(), Box<dyn std::error::Error>> {
8 println!("🔍 OpenAI Transport Layer Debugging");
9 println!("==================================");
10
11 let api_key = match std::env::var("OPENAI_API_KEY") {
12 Ok(key) => key,
13 Err(_) => {
14 println!("❌ OPENAI_API_KEY not set");
15 return Ok(());
16 }
17 };
18
19 let transport = HttpTransport::new();
21
22 println!("\n📋 Test GET request (model list):");
24 let mut headers = HashMap::new();
25 headers.insert("Authorization".to_string(), format!("Bearer {}", api_key));
26
27 match transport
28 .get::<serde_json::Value>("https://api.openai.com/v1/models", Some(headers))
29 .await
30 {
31 Ok(response) => {
32 let model_count = response["data"]
33 .as_array()
34 .map(|arr| arr.len())
35 .unwrap_or(0);
36 println!("✅ GET request successful, got {} models", model_count);
37 }
38 Err(e) => {
39 println!("❌ GET request failed: {}", e);
40 }
41 }
42
43 println!("\n💬 Test POST request (chat completion):");
45 let mut headers = HashMap::new();
46 headers.insert("Authorization".to_string(), format!("Bearer {}", api_key));
47
48 let request_body = json!({
49 "model": "gpt-3.5-turbo",
50 "messages": [
51 {
52 "role": "user",
53 "content": "Say 'test' in one word."
54 }
55 ],
56 "max_tokens": 5
57 });
58
59 println!(
60 "Request body: {}",
61 serde_json::to_string_pretty(&request_body)?
62 );
63
64 match transport
65 .post::<serde_json::Value, serde_json::Value>(
66 "https://api.openai.com/v1/chat/completions",
67 Some(headers),
68 &request_body,
69 )
70 .await
71 {
72 Ok(response) => {
73 println!("✅ POST request successful!");
74 println!("Response: {}", serde_json::to_string_pretty(&response)?);
75 }
76 Err(e) => {
77 println!("❌ POST request failed: {}", e);
78
79 let error_str = e.to_string();
81 if error_str.contains("you must provide a model parameter") {
82 println!("🔍 This error is strange because we did provide the model parameter");
83 println!(" Possible reasons:");
84 println!(" 1. Proxy server modified the request body");
85 println!(" 2. Content-Type header issue");
86 println!(" 3. JSON serialization issue");
87 }
88 }
89 }
90
91 println!("\n💡 Debug Conclusion:");
92 println!(" • GET request works → authentication and network connection OK");
93 println!(" • POST request fails → may be proxy or request format issue");
94 println!(" • Recommend checking proxy server's POST request handling");
95
96 Ok(())
97}