Skip to main content

every_other_token/
providers.rs

1//! Provider plugin system and SSE wire types.
2//!
3//! Each supported LLM provider is represented by a zero-sized struct that
4//! implements [`ProviderPlugin`].  The [`TokenInterceptor`](crate::TokenInterceptor)
5//! selects the appropriate plugin at construction time and uses it to build
6//! authenticated HTTP requests and parse streaming responses.
7//!
8//! ## Supported providers
9//!
10//! | Variant | Plugin | Endpoint |
11//! |---------|--------|----------|
12//! | `openai` | [`OpenAiPlugin`] | `https://api.openai.com/v1/chat/completions` |
13//! | `anthropic` | [`AnthropicPlugin`] | `https://api.anthropic.com/v1/messages` |
14//! | `mock` | (inline fixture) | n/a -- returns canned tokens for tests |
15
16use clap::ValueEnum;
17use serde::{Deserialize, Serialize};
18
19// ---------------------------------------------------------------------------
20// Provider plugin trait
21// ---------------------------------------------------------------------------
22
23/// Trait implemented by each provider plug-in.
24///
25/// Concrete implementations ([`OpenAiPlugin`], [`AnthropicPlugin`]) supply the
26/// provider-specific details (URL, authentication header, request shape) so
27/// that the token interceptor can switch providers without branching.
28pub trait ProviderPlugin: Send + Sync {
29    /// Lowercase display name of the provider (e.g. `"openai"`, `"anthropic"`).
30    fn name(&self) -> &str;
31    /// Default model string to use when the user has not explicitly chosen one.
32    fn default_model(&self) -> &str;
33    /// Base URL for the provider's streaming chat completions endpoint.
34    ///
35    /// This centralises API endpoint knowledge so that callers building HTTP
36    /// requests do not hard-code provider URLs and future providers only need
37    /// to implement this single method.
38    fn api_url(&self) -> &str;
39    /// Build a JSON request body for the provider's streaming chat API.
40    fn build_request(&self, prompt: &str, system: Option<&str>, model: &str) -> serde_json::Value;
41}
42
43/// Provider plug-in for the OpenAI Chat Completions API.
44pub struct OpenAiPlugin;
45/// Provider plug-in for the Anthropic Messages API.
46pub struct AnthropicPlugin;
47
48impl ProviderPlugin for OpenAiPlugin {
49    fn name(&self) -> &str {
50        "openai"
51    }
52    fn default_model(&self) -> &str {
53        "gpt-3.5-turbo"
54    }
55    fn api_url(&self) -> &str {
56        "https://api.openai.com/v1/chat/completions"
57    }
58    fn build_request(&self, prompt: &str, system: Option<&str>, model: &str) -> serde_json::Value {
59        let mut messages = Vec::new();
60        if let Some(sys) = system {
61            messages.push(serde_json::json!({ "role": "system", "content": sys }));
62        }
63        messages.push(serde_json::json!({ "role": "user", "content": prompt }));
64        serde_json::json!({
65            "model": model,
66            "messages": messages,
67            "stream": true,
68            "temperature": 0.7,
69            "logprobs": true,
70            "top_logprobs": 5,
71        })
72    }
73}
74
75/// Anthropic API version header value. Update here when Anthropic releases a new stable version.
76/// As of 2026-03: Anthropic has not published a newer stable version header. Revisit quarterly.
77pub const ANTHROPIC_API_VERSION: &str = "2023-06-01";
78
79impl ProviderPlugin for AnthropicPlugin {
80    fn name(&self) -> &str {
81        "anthropic"
82    }
83    fn default_model(&self) -> &str {
84        "claude-sonnet-4-6"
85    }
86    fn api_url(&self) -> &str {
87        "https://api.anthropic.com/v1/messages"
88    }
89    fn build_request(&self, prompt: &str, system: Option<&str>, model: &str) -> serde_json::Value {
90        let mut req = serde_json::json!({
91            "model": model,
92            "messages": [{ "role": "user", "content": prompt }],
93            "max_tokens": 1024,
94            "stream": true,
95            "temperature": 0.7,
96        });
97        if let Some(sys) = system {
98            req["system"] = serde_json::Value::String(sys.to_string());
99        }
100        req
101    }
102}
103
104// -- Token probability / logprob types --------------------------------------
105
106/// One alternative token returned alongside a logprob entry.
107#[derive(Debug, Clone, Serialize, Deserialize)]
108pub struct OpenAITopLogprob {
109    pub token: String,
110    pub logprob: f32,
111}
112
113/// Per-API-token logprob entry in a streaming chunk.
114#[derive(Debug, Clone, Deserialize)]
115pub struct OpenAILogprobContent {
116    pub token: String,
117    pub logprob: f32,
118    #[serde(default)]
119    pub top_logprobs: Vec<OpenAITopLogprob>,
120}
121
122/// The `logprobs` block on a streaming choice.
123#[derive(Debug, Clone, Deserialize)]
124pub struct OpenAIChunkLogprobs {
125    #[serde(default)]
126    pub content: Vec<OpenAILogprobContent>,
127}
128
129/// Selectable LLM provider.
130///
131/// Used as a CLI argument (`--provider`) and throughout the codebase to branch
132/// on provider-specific behaviour.
133#[derive(Debug, Clone, ValueEnum, PartialEq)]
134pub enum Provider {
135    /// OpenAI Chat Completions API (GPT-3.5, GPT-4, etc.).
136    Openai,
137    /// Anthropic Messages API (Claude family).
138    Anthropic,
139    /// In-process mock provider for tests and dry-run mode.
140    Mock,
141}
142
143impl std::fmt::Display for Provider {
144    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
145        match self {
146            Provider::Openai => write!(f, "openai"),
147            Provider::Anthropic => write!(f, "anthropic"),
148            Provider::Mock => write!(f, "mock"),
149        }
150    }
151}
152
153impl std::str::FromStr for Provider {
154    type Err = String;
155
156    fn from_str(s: &str) -> Result<Self, Self::Err> {
157        match s.to_lowercase().as_str() {
158            "openai" => Ok(Provider::Openai),
159            "anthropic" => Ok(Provider::Anthropic),
160            "mock" => Ok(Provider::Mock),
161            other => Err(format!(
162                "unknown provider: '{}' (expected openai, anthropic, or mock)",
163                other
164            )),
165        }
166    }
167}
168
169// -- OpenAI SSE types -------------------------------------------------------
170
171/// A single message in an OpenAI chat request (role + content pair).
172#[derive(Debug, Serialize)]
173pub struct OpenAIChatMessage {
174    /// Role of the message author: `"system"`, `"user"`, or `"assistant"`.
175    pub role: String,
176    /// Text content of the message.
177    pub content: String,
178}
179
180/// Full JSON body for an OpenAI streaming chat completions request.
181#[derive(Debug, Serialize)]
182pub struct OpenAIChatRequest {
183    /// Model identifier (e.g. `"gpt-4"`).
184    pub model: String,
185    /// Conversation history including the current user turn.
186    pub messages: Vec<OpenAIChatMessage>,
187    /// Must be `true` to enable SSE streaming.
188    pub stream: bool,
189    /// Sampling temperature (0.0–2.0).
190    pub temperature: f32,
191    /// Whether to include per-token log probabilities in the response.
192    pub logprobs: bool,
193    /// Number of top alternative tokens per position (0–20).
194    pub top_logprobs: u8,
195}
196
197/// Incremental content fragment within a streaming choice delta.
198#[derive(Debug, Deserialize)]
199pub struct OpenAIDelta {
200    /// Text fragment, absent on the final chunk where `finish_reason` is set.
201    pub content: Option<String>,
202}
203
204/// One streaming choice from an OpenAI chunk event.
205#[derive(Debug, Deserialize)]
206pub struct OpenAIChoice {
207    /// Incremental content delta for this chunk.
208    pub delta: OpenAIDelta,
209    /// Populated on the final chunk (`"stop"`, `"length"`, etc.).
210    #[allow(dead_code)]
211    pub finish_reason: Option<String>,
212    /// Log probability data, present when `logprobs=true` was requested.
213    #[serde(default)]
214    pub logprobs: Option<OpenAIChunkLogprobs>,
215}
216
217/// One server-sent event chunk from the OpenAI streaming API.
218#[derive(Debug, Deserialize)]
219pub struct OpenAIChunk {
220    /// List of choice objects (typically one entry for non-parallel requests).
221    pub choices: Vec<OpenAIChoice>,
222}
223
224// -- Anthropic SSE types ----------------------------------------------------
225
226/// A single message in an Anthropic Messages API request.
227#[derive(Debug, Serialize)]
228pub struct AnthropicMessage {
229    /// Role: `"user"` or `"assistant"`.
230    pub role: String,
231    /// Text content of the message.
232    pub content: String,
233}
234
235/// Full JSON body for an Anthropic streaming messages request.
236#[derive(Debug, Serialize)]
237pub struct AnthropicRequest {
238    /// Model identifier (e.g. `"claude-sonnet-4-6"`).
239    pub model: String,
240    /// Conversation messages.
241    pub messages: Vec<AnthropicMessage>,
242    /// Maximum tokens to generate.
243    pub max_tokens: u32,
244    /// Must be `true` to enable SSE streaming.
245    pub stream: bool,
246    /// Sampling temperature (0.0–1.0 for Anthropic).
247    pub temperature: f32,
248    /// Optional system prompt prepended before the conversation.
249    #[serde(skip_serializing_if = "Option::is_none")]
250    pub system: Option<String>,
251}
252
253/// Incremental text delta from an Anthropic `content_block_delta` event.
254#[derive(Debug, Deserialize)]
255pub struct AnthropicContentDelta {
256    /// New text fragment, present only on `text_delta` sub-events.
257    #[serde(default)]
258    pub text: Option<String>,
259}
260
261/// One server-sent event from the Anthropic streaming API.
262#[derive(Debug, Deserialize)]
263pub struct AnthropicStreamEvent {
264    /// Event type: `"content_block_delta"`, `"message_start"`, `"ping"`, etc.
265    #[serde(rename = "type")]
266    pub event_type: String,
267    /// Content delta, present only on `content_block_delta` events.
268    #[serde(default)]
269    pub delta: Option<AnthropicContentDelta>,
270}
271
272// -- Orchestrator MCP types -------------------------------------------------
273
274/// JSON-RPC 2.0 request sent to the MCP orchestrator (`tools/call infer`).
275#[derive(Debug, Serialize)]
276pub struct McpInferRequest {
277    /// Always `"2.0"`.
278    pub jsonrpc: String,
279    /// Always `"tools/call"`.
280    pub method: String,
281    /// Caller-assigned request identifier.
282    pub id: u64,
283    /// Typed parameters block.
284    pub params: McpInferParams,
285}
286
287/// Parameters block for an MCP `tools/call` request.
288#[derive(Debug, Serialize)]
289pub struct McpInferParams {
290    /// Tool name to invoke (e.g. `"infer"`).
291    pub name: String,
292    /// Arguments forwarded to the tool.
293    pub arguments: McpInferArguments,
294}
295
296/// Arguments forwarded to the MCP `infer` tool.
297#[derive(Debug, Serialize)]
298pub struct McpInferArguments {
299    /// Text prompt to enrich or process.
300    pub prompt: String,
301    /// Target worker backend (e.g. `"llama_cpp"`).
302    pub worker: String,
303}
304
305/// JSON-RPC 2.0 response returned by the MCP orchestrator.
306#[derive(Debug, Deserialize)]
307pub struct McpInferResponse {
308    /// Protocol version echo, always `"2.0"` when present.
309    #[allow(dead_code)]
310    pub jsonrpc: Option<String>,
311    /// Successful result payload; mutually exclusive with `error`.
312    pub result: Option<McpInferResult>,
313    /// Error payload; mutually exclusive with `result`.
314    pub error: Option<McpError>,
315}
316
317/// Successful result from an MCP `infer` call.
318#[derive(Debug, Deserialize)]
319pub struct McpInferResult {
320    /// List of content items returned by the tool.
321    pub content: Vec<McpContent>,
322}
323
324/// A single content item within an MCP tool result.
325#[derive(Debug, Deserialize)]
326pub struct McpContent {
327    /// Text value of this content item, or `None` for non-text items.
328    pub text: Option<String>,
329}
330
331/// Error payload from an MCP JSON-RPC response.
332#[derive(Debug, Deserialize)]
333pub struct McpError {
334    /// Human-readable error message.
335    pub message: String,
336}
337
338#[cfg(test)]
339mod tests {
340    use super::*;
341
342    #[test]
343    fn test_provider_display() {
344        assert_eq!(Provider::Openai.to_string(), "openai");
345        assert_eq!(Provider::Anthropic.to_string(), "anthropic");
346        assert_eq!(Provider::Mock.to_string(), "mock");
347    }
348
349    #[test]
350    fn test_provider_equality() {
351        assert_eq!(Provider::Openai, Provider::Openai);
352        assert_eq!(Provider::Anthropic, Provider::Anthropic);
353        assert_eq!(Provider::Mock, Provider::Mock);
354        assert_ne!(Provider::Openai, Provider::Anthropic);
355        assert_ne!(Provider::Openai, Provider::Mock);
356        assert_ne!(Provider::Anthropic, Provider::Mock);
357    }
358
359    #[test]
360    fn test_provider_openai_display_lowercase() {
361        let s = format!("{}", Provider::Openai);
362        assert_eq!(s, "openai");
363        assert!(s.chars().all(|c| c.is_lowercase() || c.is_alphanumeric()));
364    }
365
366    #[test]
367    fn test_provider_anthropic_display_lowercase() {
368        assert_eq!(format!("{}", Provider::Anthropic), "anthropic");
369    }
370
371    #[test]
372    fn test_provider_clone() {
373        let p = Provider::Openai;
374        let p2 = p.clone();
375        assert_eq!(p, p2);
376    }
377
378    #[test]
379    fn test_mcp_request_serializes() {
380        let req = McpInferRequest {
381            jsonrpc: "2.0".to_string(),
382            method: "tools/call".to_string(),
383            id: 1,
384            params: McpInferParams {
385                name: "infer".to_string(),
386                arguments: McpInferArguments {
387                    prompt: "hello".to_string(),
388                    worker: "llama_cpp".to_string(),
389                },
390            },
391        };
392        let json = serde_json::to_string(&req).expect("serialization failed");
393        assert!(json.contains("\"jsonrpc\":\"2.0\""));
394        assert!(json.contains("\"worker\":\"llama_cpp\""));
395        assert!(json.contains("\"prompt\":\"hello\""));
396    }
397
398    #[test]
399    fn test_mcp_response_deserializes_success() {
400        let json = r#"{"jsonrpc":"2.0","result":{"content":[{"text":"enriched prompt"}]}}"#;
401        let resp: McpInferResponse = serde_json::from_str(json).expect("deser failed");
402        assert!(resp.error.is_none());
403        let text = resp
404            .result
405            .as_ref()
406            .and_then(|r| r.content.first())
407            .and_then(|c| c.text.as_ref());
408        assert_eq!(text, Some(&"enriched prompt".to_string()));
409    }
410
411    #[test]
412    fn test_mcp_response_deserializes_error() {
413        let json = r#"{"jsonrpc":"2.0","error":{"message":"pipeline down"}}"#;
414        let resp: McpInferResponse = serde_json::from_str(json).expect("deser failed");
415        assert!(resp.result.is_none());
416        assert_eq!(
417            resp.error.as_ref().map(|e| &e.message[..]),
418            Some("pipeline down")
419        );
420    }
421
422    #[test]
423    fn test_anthropic_content_block_delta_deserializes() {
424        let json = r#"{"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello"}}"#;
425        let event: AnthropicStreamEvent = serde_json::from_str(json).expect("deser failed");
426        assert_eq!(event.event_type, "content_block_delta");
427        assert_eq!(
428            event
429                .delta
430                .as_ref()
431                .and_then(|d| d.text.as_ref())
432                .map(|s| s.as_str()),
433            Some("Hello")
434        );
435    }
436
437    #[test]
438    fn test_anthropic_message_start_deserializes() {
439        let json = r#"{"type":"message_start","message":{"id":"msg_123"}}"#;
440        let event: AnthropicStreamEvent = serde_json::from_str(json).expect("deser failed");
441        assert_eq!(event.event_type, "message_start");
442        assert!(event.delta.is_none());
443    }
444
445    #[test]
446    fn test_openai_chunk_deserializes() {
447        let json = r#"{"id":"chatcmpl-abc","choices":[{"index":0,"delta":{"content":"Hi"},"finish_reason":null}]}"#;
448        let chunk: OpenAIChunk = serde_json::from_str(json).expect("deser failed");
449        assert_eq!(chunk.choices.len(), 1);
450        assert_eq!(
451            chunk.choices[0].delta.content.as_ref().map(|s| s.as_str()),
452            Some("Hi")
453        );
454    }
455
456    #[test]
457    fn test_openai_chunk_empty_delta() {
458        let json =
459            r#"{"id":"chatcmpl-abc","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}"#;
460        let chunk: OpenAIChunk = serde_json::from_str(json).expect("deser failed");
461        assert!(chunk.choices[0].delta.content.is_none());
462    }
463
464    #[test]
465    fn test_mcp_infer_request_contains_all_fields() {
466        let req = McpInferRequest {
467            jsonrpc: "2.0".to_string(),
468            method: "tools/call".to_string(),
469            id: 42,
470            params: McpInferParams {
471                name: "infer".to_string(),
472                arguments: McpInferArguments {
473                    prompt: "test prompt".to_string(),
474                    worker: "llama_cpp".to_string(),
475                },
476            },
477        };
478        let json = serde_json::to_string(&req).expect("serialize");
479        let parsed: serde_json::Value = serde_json::from_str(&json).expect("parse");
480        assert_eq!(parsed["jsonrpc"], "2.0");
481        assert_eq!(parsed["method"], "tools/call");
482        assert_eq!(parsed["id"], 42);
483        assert_eq!(parsed["params"]["arguments"]["prompt"], "test prompt");
484    }
485
486    #[test]
487    fn test_mcp_response_empty_content() {
488        let json = r#"{"jsonrpc":"2.0","result":{"content":[]}}"#;
489        let resp: McpInferResponse = serde_json::from_str(json).expect("deser");
490        assert!(resp.result.as_ref().expect("result").content.is_empty());
491    }
492
493    #[test]
494    fn test_mcp_response_null_text() {
495        let json = r#"{"jsonrpc":"2.0","result":{"content":[{"text":null}]}}"#;
496        let resp: McpInferResponse = serde_json::from_str(json).expect("deser");
497        assert!(resp.result.as_ref().expect("result").content[0]
498            .text
499            .is_none());
500    }
501
502    #[test]
503    fn test_openai_chunk_multiple_choices() {
504        let json = r#"{"id":"chatcmpl-x","choices":[{"index":0,"delta":{"content":"A"},"finish_reason":null},{"index":1,"delta":{"content":"B"},"finish_reason":null}]}"#;
505        let chunk: OpenAIChunk = serde_json::from_str(json).expect("deser");
506        assert_eq!(chunk.choices.len(), 2);
507        assert_eq!(chunk.choices[0].delta.content.as_deref(), Some("A"));
508        assert_eq!(chunk.choices[1].delta.content.as_deref(), Some("B"));
509    }
510
511    #[test]
512    fn test_openai_chunk_no_choices() {
513        let json = r#"{"id":"chatcmpl-x","choices":[]}"#;
514        let chunk: OpenAIChunk = serde_json::from_str(json).expect("deser");
515        assert!(chunk.choices.is_empty());
516    }
517
518    #[test]
519    fn test_anthropic_event_message_delta() {
520        let json = r#"{"type":"message_delta","delta":{"stop_reason":"end_turn"}}"#;
521        let event: AnthropicStreamEvent = serde_json::from_str(json).expect("deser");
522        assert_eq!(event.event_type, "message_delta");
523        assert!(event.delta.as_ref().and_then(|d| d.text.as_ref()).is_none());
524    }
525
526    #[test]
527    fn test_anthropic_event_ping() {
528        let json = r#"{"type":"ping"}"#;
529        let event: AnthropicStreamEvent = serde_json::from_str(json).expect("deser");
530        assert_eq!(event.event_type, "ping");
531        assert!(event.delta.is_none());
532    }
533
534    // -- Logprob type tests --
535
536    #[test]
537    fn test_openai_chat_request_has_logprobs_fields() {
538        let req = OpenAIChatRequest {
539            model: "gpt-4".to_string(),
540            messages: vec![OpenAIChatMessage {
541                role: "user".to_string(),
542                content: "hi".to_string(),
543            }],
544            stream: true,
545            temperature: 0.7,
546            logprobs: true,
547            top_logprobs: 5,
548        };
549        let json = serde_json::to_string(&req).expect("serialize");
550        assert!(json.contains("\"logprobs\":true"));
551        assert!(json.contains("\"top_logprobs\":5"));
552    }
553
554    #[test]
555    fn test_openai_top_logprob_deserializes() {
556        let json = r#"{"token":"hello","logprob":-0.5}"#;
557        let tlp: OpenAITopLogprob = serde_json::from_str(json).expect("deser");
558        assert_eq!(tlp.token, "hello");
559        assert!((tlp.logprob - (-0.5)).abs() < 1e-5);
560    }
561
562    #[test]
563    fn test_openai_logprob_content_deserializes() {
564        let json = r#"{"token":"world","logprob":-1.2,"top_logprobs":[{"token":"world","logprob":-1.2},{"token":"earth","logprob":-2.5}]}"#;
565        let lc: OpenAILogprobContent = serde_json::from_str(json).expect("deser");
566        assert_eq!(lc.token, "world");
567        assert_eq!(lc.top_logprobs.len(), 2);
568        assert_eq!(lc.top_logprobs[1].token, "earth");
569    }
570
571    #[test]
572    fn test_openai_chunk_logprobs_empty_content() {
573        let json = r#"{"content":[]}"#;
574        let cl: OpenAIChunkLogprobs = serde_json::from_str(json).expect("deser");
575        assert!(cl.content.is_empty());
576    }
577
578    #[test]
579    fn test_openai_choice_with_logprobs_deserializes() {
580        let json = r#"{"delta":{"content":"Hi"},"finish_reason":null,"logprobs":{"content":[{"token":"Hi","logprob":-0.1,"top_logprobs":[{"token":"Hi","logprob":-0.1},{"token":"Hey","logprob":-2.3}]}]}}"#;
581        let choice: OpenAIChoice = serde_json::from_str(json).expect("deser");
582        assert_eq!(choice.delta.content.as_deref(), Some("Hi"));
583        let lp = choice.logprobs.as_ref().expect("logprobs present");
584        assert_eq!(lp.content.len(), 1);
585        assert!((lp.content[0].logprob - (-0.1)).abs() < 1e-5);
586        assert_eq!(lp.content[0].top_logprobs[1].token, "Hey");
587    }
588
589    #[test]
590    fn test_openai_choice_without_logprobs_is_none() {
591        let json = r#"{"delta":{"content":"Hi"},"finish_reason":null}"#;
592        let choice: OpenAIChoice = serde_json::from_str(json).expect("deser");
593        assert!(choice.logprobs.is_none());
594    }
595
596    #[test]
597    fn test_anthropic_request_with_system_serializes() {
598        let req = AnthropicRequest {
599            model: "claude-sonnet-4-20250514".to_string(),
600            messages: vec![AnthropicMessage {
601                role: "user".to_string(),
602                content: "hi".to_string(),
603            }],
604            max_tokens: 1024,
605            stream: true,
606            temperature: 0.7,
607            system: Some("You are a helpful assistant.".to_string()),
608        };
609        let json = serde_json::to_string(&req).expect("serialize");
610        assert!(json.contains("\"system\":\"You are a helpful assistant.\""));
611    }
612
613    #[test]
614    fn test_anthropic_request_without_system_omits_field() {
615        let req = AnthropicRequest {
616            model: "claude-sonnet-4-20250514".to_string(),
617            messages: vec![AnthropicMessage {
618                role: "user".to_string(),
619                content: "hi".to_string(),
620            }],
621            max_tokens: 1024,
622            stream: true,
623            temperature: 0.7,
624            system: None,
625        };
626        let json = serde_json::to_string(&req).expect("serialize");
627        assert!(!json.contains("system"));
628    }
629
630    #[test]
631    fn test_openai_top_logprob_clone() {
632        let t = OpenAITopLogprob {
633            token: "foo".to_string(),
634            logprob: -1.0,
635        };
636        let t2 = t.clone();
637        assert_eq!(t2.token, t.token);
638        assert!((t2.logprob - t.logprob).abs() < 1e-6);
639    }
640
641    #[test]
642    fn test_openai_logprob_content_no_top_logprobs() {
643        let json = r#"{"token":"test","logprob":-0.8}"#;
644        let lc: OpenAILogprobContent = serde_json::from_str(json).expect("deser");
645        assert!(lc.top_logprobs.is_empty());
646    }
647
648    #[test]
649    fn test_openai_top_logprob_serializes() {
650        let t = OpenAITopLogprob {
651            token: "bar".to_string(),
652            logprob: -2.0,
653        };
654        let json = serde_json::to_string(&t).expect("serialize");
655        assert!(json.contains("\"token\":\"bar\""));
656        assert!(json.contains("\"logprob\":-2.0"));
657    }
658
659    // ---- ProviderPlugin api_url() tests ----
660
661    #[test]
662    fn test_openai_plugin_api_url_https() {
663        assert!(OpenAiPlugin.api_url().starts_with("https://"));
664    }
665
666    #[test]
667    fn test_anthropic_plugin_api_url_https() {
668        assert!(AnthropicPlugin.api_url().starts_with("https://"));
669    }
670
671    #[test]
672    fn test_openai_plugin_api_url_contains_openai() {
673        assert!(OpenAiPlugin.api_url().contains("openai.com"));
674    }
675
676    #[test]
677    fn test_anthropic_plugin_api_url_contains_anthropic() {
678        assert!(AnthropicPlugin.api_url().contains("anthropic.com"));
679    }
680
681    #[test]
682    fn test_openai_plugin_name_matches_display() {
683        assert_eq!(OpenAiPlugin.name(), Provider::Openai.to_string());
684    }
685
686    #[test]
687    fn test_anthropic_plugin_name_matches_display() {
688        assert_eq!(AnthropicPlugin.name(), Provider::Anthropic.to_string());
689    }
690
691    #[test]
692    fn test_openai_plugin_default_model_nonempty() {
693        assert!(!OpenAiPlugin.default_model().is_empty());
694    }
695
696    #[test]
697    fn test_anthropic_plugin_default_model_nonempty() {
698        assert!(!AnthropicPlugin.default_model().is_empty());
699    }
700
701    // -- ANTHROPIC_API_VERSION constant tests (#14) --
702
703    #[test]
704    fn test_anthropic_api_version_nonempty() {
705        assert!(!super::ANTHROPIC_API_VERSION.is_empty());
706    }
707
708    #[test]
709    fn test_anthropic_api_version_format() {
710        // Should be a date in YYYY-MM-DD format
711        let v = super::ANTHROPIC_API_VERSION;
712        assert_eq!(v.len(), 10, "version should be YYYY-MM-DD");
713        assert!(v.chars().nth(4) == Some('-'), "4th char should be -");
714        assert!(v.chars().nth(7) == Some('-'), "7th char should be -");
715    }
716}