llm_kit_azure/lib.rs
1//! # Azure OpenAI Provider for LLM Kit
2//!
3//! This crate provides an Azure OpenAI provider implementation for the LLM Kit.
4//! It allows you to use Azure OpenAI models for text generation, embeddings,
5//! and image generation.
6//!
7//! ## Features
8//!
9//! - **Chat Models**: GPT-4, GPT-3.5-turbo, and other chat models
10//! - **Completion Models**: GPT-3.5-turbo-instruct and other completion models
11//! - **Embedding Models**: text-embedding-ada-002 and other embedding models
12//! - **Image Models**: DALL-E 3 and other image generation models
13//! - **Azure-specific Authentication**: Uses `api-key` header
14//! - **Flexible URL Formats**: Supports both v1 API and deployment-based URLs
15//!
16//! ## Quick Start (Recommended: Builder Pattern)
17//!
18//! ```no_run
19//! use llm_kit_azure::AzureClient;
20//! use llm_kit_provider::LanguageModel;
21//!
22//! #[tokio::main]
23//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
24//! // Create Azure OpenAI provider using the builder
25//! let provider = AzureClient::new()
26//! .resource_name("my-azure-resource")
27//! .api_key("your-api-key")
28//! .build();
29//!
30//! // Get a chat model using your deployment name
31//! let model = provider.chat_model("gpt-4-deployment");
32//!
33//! println!("Model: {}", model.model_id());
34//! println!("Provider: {}", model.provider());
35//!
36//! Ok(())
37//! }
38//! ```
39//!
40//! ## Alternative: Direct Instantiation
41//!
42//! ```no_run
43//! use llm_kit_azure::{AzureOpenAIProvider, AzureOpenAIProviderSettings};
44//! use llm_kit_provider::LanguageModel;
45//!
46//! #[tokio::main]
47//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
48//! // Create provider with settings
49//! let provider = AzureOpenAIProvider::new(
50//! AzureOpenAIProviderSettings::new()
51//! .with_resource_name("my-azure-resource")
52//! .with_api_key("your-api-key")
53//! );
54//!
55//! let model = provider.chat_model("gpt-4-deployment");
56//!
57//! println!("Model: {}", model.model_id());
58//! Ok(())
59//! }
60//! ```
61//!
62//! ## Configuration Options
63//!
64//! ### Using Resource Name
65//!
66//! ```no_run
67//! use llm_kit_azure::AzureClient;
68//!
69//! let provider = AzureClient::new()
70//! .resource_name("my-resource")
71//! .api_key("key")
72//! .build();
73//! ```
74//!
75//! ### Using Custom Base URL
76//!
77//! ```no_run
78//! use llm_kit_azure::AzureClient;
79//!
80//! let provider = AzureClient::new()
81//! .base_url("https://my-resource.openai.azure.com/openai")
82//! .api_key("key")
83//! .build();
84//! ```
85//!
86//! ### With Custom API Version
87//!
88//! ```no_run
89//! use llm_kit_azure::AzureClient;
90//!
91//! let provider = AzureClient::new()
92//! .resource_name("my-resource")
93//! .api_key("key")
94//! .api_version("2024-02-15-preview")
95//! .build();
96//! ```
97//!
98//! ### With Custom Headers
99//!
100//! ```no_run
101//! use llm_kit_azure::AzureClient;
102//!
103//! let provider = AzureClient::new()
104//! .resource_name("my-resource")
105//! .api_key("key")
106//! .header("X-Custom-Header", "value")
107//! .build();
108//! ```
109//!
110//! ### With Deployment-Based URLs (Legacy Format)
111//!
112//! ```no_run
113//! use llm_kit_azure::AzureClient;
114//!
115//! let provider = AzureClient::new()
116//! .resource_name("my-resource")
117//! .api_key("key")
118//! .use_deployment_based_urls(true)
119//! .build();
120//! ```
121//!
122//! ## URL Formats
123//!
124//! Azure OpenAI supports two URL formats:
125//!
126//! ### V1 API Format (Default)
127//! ```text
128//! https://{resource}.openai.azure.com/openai/v1{path}?api-version={version}
129//! ```
130//!
131//! ### Deployment-Based Format (Legacy)
132//! ```text
133//! https://{resource}.openai.azure.com/openai/deployments/{deployment}{path}?api-version={version}
134//! ```
135//!
136//! Use `.with_use_deployment_based_urls(true)` to enable the legacy format.
137//!
138//! ## Environment Variables
139//!
140//! The provider will read from these environment variables if not explicitly configured:
141//!
142//! - `AZURE_API_KEY` - API key for authentication
143//! - `AZURE_RESOURCE_NAME` - Azure OpenAI resource name
144//!
145//! ## Model Types
146//!
147//! ### Chat Models
148//! Use `.chat_model()` or `.model()` for conversational AI:
149//! ```no_run
150//! # use llm_kit_azure::AzureClient;
151//! # let provider = AzureClient::new().resource_name("test").api_key("key").build();
152//! let model = provider.chat_model("gpt-4-deployment");
153//! ```
154//!
155//! ### Completion Models
156//! Use `.completion_model()` for text completion:
157//! ```no_run
158//! # use llm_kit_azure::AzureClient;
159//! # let provider = AzureClient::new().resource_name("test").api_key("key").build();
160//! let model = provider.completion_model("gpt-35-turbo-instruct");
161//! ```
162//!
163//! ### Embedding Models
164//! Use `.text_embedding_model()` for embeddings:
165//! ```no_run
166//! # use llm_kit_azure::AzureClient;
167//! # let provider = AzureClient::new().resource_name("test").api_key("key").build();
168//! let model = provider.text_embedding_model("text-embedding-ada-002");
169//! ```
170//!
171//! ### Image Models
172//! Use `.image_model()` for image generation:
173//! ```no_run
174//! # use llm_kit_azure::AzureClient;
175//! # let provider = AzureClient::new().resource_name("test").api_key("key").build();
176//! let model = provider.image_model("dall-e-3");
177//! ```
178
179mod client;
180mod provider;
181mod settings;
182
183pub use client::AzureClient;
184pub use provider::AzureOpenAIProvider;
185pub use settings::AzureOpenAIProviderSettings;
186
187#[cfg(test)]
188mod tests {
189 use super::*;
190
191 #[test]
192 fn test_azure_client_builder() {
193 let provider = AzureClient::new()
194 .resource_name("test-resource")
195 .api_key("test-key")
196 .build();
197
198 assert_eq!(provider.name(), "azure");
199 }
200
201 #[test]
202 fn test_provider_direct_instantiation() {
203 let provider = AzureOpenAIProvider::new(
204 AzureOpenAIProviderSettings::new()
205 .with_resource_name("test-resource")
206 .with_api_key("test-key"),
207 );
208
209 assert_eq!(provider.name(), "azure");
210 }
211
212 #[test]
213 fn test_provider_methods() {
214 let provider = AzureClient::new()
215 .resource_name("test-resource")
216 .api_key("test-key")
217 .build();
218
219 // Test chat model
220 let chat_model = provider.chat_model("gpt-4");
221 assert_eq!(chat_model.provider(), "azure.chat");
222 assert_eq!(chat_model.model_id(), "gpt-4");
223
224 // Test completion model
225 let completion_model = provider.completion_model("gpt-35-turbo-instruct");
226 assert_eq!(completion_model.provider(), "azure.completion");
227 assert_eq!(completion_model.model_id(), "gpt-35-turbo-instruct");
228
229 // Test embedding model
230 let embedding_model = provider.text_embedding_model("text-embedding-ada-002");
231 assert_eq!(embedding_model.provider(), "azure.embedding");
232 assert_eq!(embedding_model.model_id(), "text-embedding-ada-002");
233
234 // Test image model
235 let image_model = provider.image_model("dall-e-3");
236 assert_eq!(image_model.provider(), "azure.image");
237 assert_eq!(image_model.model_id(), "dall-e-3");
238 }
239}