Skip to main content

construct/providers/
telnyx.rs

1//! Telnyx AI inference provider.
2//!
3//! Telnyx provides AI inference through an OpenAI-compatible API at
4//! https://api.telnyx.com/v2/ai with access to 53+ models including
5//! GPT-4o, Claude, Llama, Mistral, and more.
6//!
7//! # Configuration
8//!
9//! Set the `TELNYX_API_KEY` environment variable or configure in `config.toml`:
10//!
11//! ```toml
12//! default_provider = "telnyx"
13//! default_model = "openai/gpt-4o"
14//! ```
15
16use crate::providers::traits::{ChatMessage, Provider};
17use async_trait::async_trait;
18use reqwest::Client;
19use serde::Deserialize;
20
21/// Telnyx AI inference provider.
22///
23/// Uses the OpenAI-compatible chat completions API at `/v2/ai/chat/completions`.
24/// Supports 53+ models including OpenAI, Anthropic (via API), Meta Llama,
25/// Mistral, and more.
26///
27/// # Example
28///
29/// ```rust,ignore
30/// use construct::providers::telnyx::TelnyxProvider;
31/// use construct::providers::Provider;
32///
33/// let provider = TelnyxProvider::new(Some("your-api-key"));
34/// let response = provider.chat("Hello!", "openai/gpt-4o", 0.7).await?;
35/// ```
36pub struct TelnyxProvider {
37    /// Telnyx API key
38    api_key: Option<String>,
39    /// HTTP client for API requests
40    client: Client,
41}
42
43impl TelnyxProvider {
44    /// Telnyx AI API base URL
45    const BASE_URL: &'static str = "https://api.telnyx.com/v2/ai";
46
47    /// Create a new Telnyx AI provider.
48    ///
49    /// The API key can be provided directly or will be resolved from:
50    /// 1. `TELNYX_API_KEY` environment variable
51    /// 2. `CONSTRUCT_API_KEY` environment variable (fallback)
52    pub fn new(api_key: Option<&str>) -> Self {
53        let resolved_key = resolve_telnyx_api_key(api_key);
54        Self {
55            api_key: resolved_key,
56            client: Client::builder()
57                .timeout(std::time::Duration::from_secs(120))
58                .connect_timeout(std::time::Duration::from_secs(10))
59                .build()
60                .unwrap_or_else(|_| Client::new()),
61        }
62    }
63
64    /// Create a provider with a custom base URL (for testing or proxies).
65    pub fn with_base_url(api_key: Option<&str>, _base_url: &str) -> Self {
66        // Note: custom base URL support for testing
67        Self::new(api_key)
68    }
69
70    /// List available models from Telnyx AI.
71    ///
72    /// Returns a list of model IDs that can be used with the chat API.
73    pub async fn list_models(&self) -> anyhow::Result<Vec<String>> {
74        let api_key = self.api_key.as_ref().ok_or_else(|| {
75            anyhow::anyhow!("Telnyx API key not set. Set TELNYX_API_KEY environment variable.")
76        })?;
77
78        let response = self
79            .client
80            .get(format!("{}/models", Self::BASE_URL))
81            .header("Authorization", format!("Bearer {}", api_key))
82            .send()
83            .await?;
84
85        if !response.status().is_success() {
86            let error = response.text().await?;
87            anyhow::bail!("Failed to list Telnyx models: {}", error);
88        }
89
90        let models_response: ModelsResponse = response.json().await?;
91        Ok(models_response.data.into_iter().map(|m| m.id).collect())
92    }
93
94    /// Build the chat completions URL
95    fn chat_url(&self) -> String {
96        format!("{}/chat/completions", Self::BASE_URL)
97    }
98}
99
100/// Resolve Telnyx API key from parameter or environment.
101fn resolve_telnyx_api_key(api_key: Option<&str>) -> Option<String> {
102    if let Some(key) = api_key.map(str::trim).filter(|k| !k.is_empty()) {
103        return Some(key.to_string());
104    }
105
106    // Try Telnyx-specific env var first
107    if let Ok(key) = std::env::var("TELNYX_API_KEY") {
108        let key = key.trim();
109        if !key.is_empty() {
110            return Some(key.to_string());
111        }
112    }
113
114    // Fall back to generic env vars
115    for env_var in ["CONSTRUCT_API_KEY", "API_KEY"] {
116        if let Ok(key) = std::env::var(env_var) {
117            let key = key.trim();
118            if !key.is_empty() {
119                return Some(key.to_string());
120            }
121        }
122    }
123
124    None
125}
126
127/// Response from the /models endpoint
128#[derive(Debug, Deserialize)]
129struct ModelsResponse {
130    data: Vec<ModelInfo>,
131}
132
133#[derive(Debug, Deserialize)]
134struct ModelInfo {
135    id: String,
136}
137
138/// Request body for chat completions
139#[derive(Debug, serde::Serialize)]
140struct ChatRequest {
141    model: String,
142    messages: Vec<Message>,
143    temperature: f64,
144}
145
146#[derive(Debug, serde::Serialize)]
147struct Message {
148    role: String,
149    content: String,
150}
151
152/// Response from chat completions API
153#[derive(Debug, Deserialize)]
154struct ChatResponse {
155    choices: Vec<Choice>,
156}
157
158#[derive(Debug, Deserialize)]
159struct Choice {
160    message: ResponseMessage,
161}
162
163#[derive(Debug, Deserialize)]
164struct ResponseMessage {
165    content: String,
166}
167
168#[async_trait]
169impl Provider for TelnyxProvider {
170    async fn chat_with_system(
171        &self,
172        system_prompt: Option<&str>,
173        message: &str,
174        model: &str,
175        temperature: f64,
176    ) -> anyhow::Result<String> {
177        let api_key = self.api_key.as_ref().ok_or_else(|| {
178            anyhow::anyhow!(
179                "Telnyx API key not set. Set TELNYX_API_KEY environment variable or run `construct onboard`."
180            )
181        })?;
182
183        let mut messages = Vec::new();
184
185        if let Some(sys) = system_prompt {
186            messages.push(Message {
187                role: "system".to_string(),
188                content: sys.to_string(),
189            });
190        }
191
192        messages.push(Message {
193            role: "user".to_string(),
194            content: message.to_string(),
195        });
196
197        let request = ChatRequest {
198            model: model.to_string(),
199            messages,
200            temperature,
201        };
202
203        let response = self
204            .client
205            .post(self.chat_url())
206            .header("Authorization", format!("Bearer {}", api_key))
207            .header("Content-Type", "application/json")
208            .json(&request)
209            .send()
210            .await?;
211
212        if !response.status().is_success() {
213            let status = response.status();
214            let error = response.text().await?;
215            let sanitized = super::sanitize_api_error(&error);
216            anyhow::bail!("Telnyx API error ({}): {}", status, sanitized);
217        }
218
219        let chat_response: ChatResponse = response.json().await?;
220
221        chat_response
222            .choices
223            .into_iter()
224            .next()
225            .map(|c| c.message.content)
226            .ok_or_else(|| anyhow::anyhow!("No response from Telnyx"))
227    }
228
229    async fn chat_with_history(
230        &self,
231        messages: &[ChatMessage],
232        model: &str,
233        temperature: f64,
234    ) -> anyhow::Result<String> {
235        let api_key = self.api_key.as_ref().ok_or_else(|| {
236            anyhow::anyhow!(
237                "Telnyx API key not set. Set TELNYX_API_KEY environment variable or run `construct onboard`."
238            )
239        })?;
240
241        let api_messages: Vec<Message> = messages
242            .iter()
243            .map(|m| Message {
244                role: m.role.clone(),
245                content: m.content.clone(),
246            })
247            .collect();
248
249        let request = ChatRequest {
250            model: model.to_string(),
251            messages: api_messages,
252            temperature,
253        };
254
255        let response = self
256            .client
257            .post(self.chat_url())
258            .header("Authorization", format!("Bearer {}", api_key))
259            .header("Content-Type", "application/json")
260            .json(&request)
261            .send()
262            .await?;
263
264        if !response.status().is_success() {
265            let status = response.status();
266            let error = response.text().await?;
267            let sanitized = super::sanitize_api_error(&error);
268            anyhow::bail!("Telnyx API error ({}): {}", status, sanitized);
269        }
270
271        let chat_response: ChatResponse = response.json().await?;
272
273        chat_response
274            .choices
275            .into_iter()
276            .next()
277            .map(|c| c.message.content)
278            .ok_or_else(|| anyhow::anyhow!("No response from Telnyx"))
279    }
280
281    async fn warmup(&self) -> anyhow::Result<()> {
282        // Pre-warm the connection pool
283        let _ = self
284            .client
285            .get(format!("{}/models", Self::BASE_URL))
286            .send()
287            .await;
288        Ok(())
289    }
290}
291
292/// Popular Telnyx AI models for easy reference.
293pub mod models {
294    /// OpenAI GPT-4o (recommended for most tasks)
295    pub const GPT_4O: &str = "openai/gpt-4o";
296    /// OpenAI GPT-4o Mini (fast and cost-effective)
297    pub const GPT_4O_MINI: &str = "openai/gpt-4o-mini";
298    /// OpenAI GPT-4 Turbo
299    pub const GPT_4_TURBO: &str = "openai/gpt-4-turbo";
300    /// Anthropic Claude 3.5 Sonnet (via Telnyx proxy)
301    pub const CLAUDE_3_5_SONNET: &str = "anthropic/claude-3.5-sonnet";
302    /// Meta Llama 3.1 70B Instruct
303    pub const LLAMA_3_1_70B: &str = "meta-llama/llama-3.1-70b-instruct";
304    /// Meta Llama 3.1 8B Instruct (fast)
305    pub const LLAMA_3_1_8B: &str = "meta-llama/llama-3.1-8b-instruct";
306    /// Mistral Large
307    pub const MISTRAL_LARGE: &str = "mistralai/mistral-large";
308    /// Mistral Small (fast)
309    pub const MISTRAL_SMALL: &str = "mistralai/mistral-small";
310}
311
312#[cfg(test)]
313mod tests {
314    use super::*;
315
316    #[test]
317    fn creates_provider_with_key() {
318        let provider = TelnyxProvider::new(Some("test-key"));
319        assert!(provider.api_key.is_some());
320    }
321
322    #[test]
323    fn creates_provider_without_key() {
324        let _provider = TelnyxProvider::new(None);
325        // Will be None if env vars not set
326    }
327
328    #[test]
329    fn model_constants_are_valid() {
330        assert!(models::GPT_4O.starts_with("openai/"));
331        assert!(models::CLAUDE_3_5_SONNET.starts_with("anthropic/"));
332        assert!(models::LLAMA_3_1_70B.starts_with("meta-llama/"));
333        assert!(models::MISTRAL_LARGE.starts_with("mistralai/"));
334    }
335
336    #[test]
337    fn resolve_key_from_parameter() {
338        let key = resolve_telnyx_api_key(Some("direct-key"));
339        assert_eq!(key, Some("direct-key".to_string()));
340    }
341
342    #[test]
343    fn resolve_key_trims_whitespace() {
344        let key = resolve_telnyx_api_key(Some("  spaced-key  "));
345        assert_eq!(key, Some("spaced-key".to_string()));
346    }
347
348    #[test]
349    fn models_response_deserializes() {
350        let json = r#"{
351            "data": [
352                {"id": "openai/gpt-4o"},
353                {"id": "anthropic/claude-3.5-sonnet"}
354            ]
355        }"#;
356
357        let response: ModelsResponse = serde_json::from_str(json).unwrap();
358        assert_eq!(response.data.len(), 2);
359        assert_eq!(response.data[0].id, "openai/gpt-4o");
360    }
361
362    #[test]
363    fn chat_request_serializes() {
364        let req = ChatRequest {
365            model: "openai/gpt-4o".to_string(),
366            messages: vec![
367                Message {
368                    role: "system".to_string(),
369                    content: "You are helpful.".to_string(),
370                },
371                Message {
372                    role: "user".to_string(),
373                    content: "Hello".to_string(),
374                },
375            ],
376            temperature: 0.7,
377        };
378
379        let json = serde_json::to_string(&req).unwrap();
380        assert!(json.contains("openai/gpt-4o"));
381        assert!(json.contains("system"));
382        assert!(json.contains("user"));
383    }
384
385    #[test]
386    fn chat_response_deserializes() {
387        let json = r#"{"choices":[{"message":{"content":"Hello from Telnyx!"}}]}"#;
388        let resp: ChatResponse = serde_json::from_str(json).unwrap();
389        assert_eq!(resp.choices[0].message.content, "Hello from Telnyx!");
390    }
391}