openmodex 0.1.1

Official Rust SDK for the OpenModex API
Documentation
//! # OpenModex SDK for Rust
//!
//! Official Rust SDK for the [OpenModex API](https://docs.openmodex.com).
//!
//! OpenModex is a unified API gateway for accessing multiple LLM providers
//! through a single OpenAI-compatible interface.
//!
//! ## Quick Start
//!
//! ```no_run
//! use openmodex::{OpenModex, ChatCompletionRequest, ChatMessage, Error};
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Error> {
//!     let client = OpenModex::new("omx_sk_...")?;
//!
//!     let response = client.chat().completions().create(
//!         ChatCompletionRequest::new("gpt-4o")
//!             .message(ChatMessage::user("Hello!"))
//!             .temperature(0.7)
//!     ).await?;
//!
//!     println!("{}", response.choices[0].message.as_ref()
//!         .and_then(|m| m.content.as_deref())
//!         .unwrap_or(""));
//!
//!     Ok(())
//! }
//! ```
//!
//! ## Streaming
//!
//! ```no_run
//! use openmodex::{OpenModex, ChatCompletionRequest, ChatMessage, Error};
//! use futures::StreamExt;
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Error> {
//!     let client = OpenModex::new("omx_sk_...")?;
//!
//!     let mut stream = client.chat().completions().create_stream(
//!         ChatCompletionRequest::new("gpt-4o")
//!             .message(ChatMessage::user("Tell me a story"))
//!     ).await?;
//!
//!     while let Some(chunk) = stream.next().await {
//!         let chunk = chunk?;
//!         if let Some(content) = chunk.choices.first()
//!             .and_then(|c| c.delta.content.as_ref())
//!         {
//!             print!("{content}");
//!         }
//!     }
//!
//!     Ok(())
//! }
//! ```

mod chat;
mod client;
mod completion;
mod embedding;
mod error;
mod model;
mod streaming;
mod types;

// Re-export the primary client type.
pub use client::{ClientBuilder, OpenModex, DEFAULT_BASE_URL, VERSION};

// Re-export error types.
pub use error::{ApiError, Error};

// Re-export all domain types.
pub use types::{
    CacheConfig, ChatChoice, ChatCompletionChunk, ChatCompletionRequest, ChatCompletionResponse,
    ChatMessage, ComparisonHighlights, ComparisonItem, ComparisonPerformance, ComparisonPricing,
    ComparisonQuality, CompletionChoice, CompletionRequest, CompletionResponse, EmbeddingData,
    EmbeddingRequest, EmbeddingResponse, Modalities, Model, ModelCompareResponse, ModelListResponse,
    ModelPerformance, ModelPricing, ModelUsageStats, OpenModexMetadata, QualityScores, RoutingConfig,
    StreamChoice, StreamDelta, Usage,
};

// Re-export the streaming type.
pub use streaming::ChatCompletionStream;

// Re-export service types for documentation.
pub use chat::{ChatCompletions, ChatService};
pub use completion::CompletionService;
pub use embedding::EmbeddingService;
pub use model::ModelService;