miyabi_llm_openai/lib.rs
1//! OpenAI GPT SDK for Miyabi LLM
2//!
3//! This crate provides OpenAI GPT API integration for the Miyabi LLM framework.
4//! It implements the `LlmClient` and `LlmStreamingClient` traits from `miyabi-llm-core`.
5//!
6//! # Features
7//!
8//! - GPT-4o, GPT-4o-mini, GPT-4 Turbo support
9//! - Tool/function calling
10//! - Streaming responses via SSE
11//! - Environment variable configuration
12//!
13//! # Example
14//!
15//! ```no_run
16//! use miyabi_llm_openai::OpenAIClient;
17//! use miyabi_llm_core::{LlmClient, Message};
18//!
19//! #[tokio::main]
20//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
21//! let client = OpenAIClient::from_env()?;
22//! let messages = vec![Message::user("Hello!")];
23//! let response = client.chat(messages).await?;
24//! println!("Response: {}", response);
25//! Ok(())
26//! }
27//! ```
28
29mod client;
30mod types;
31
32pub use client::OpenAIClient;
33pub use types::{
34 OpenAIChoice, OpenAIFunction, OpenAIFunctionCall, OpenAIMessage, OpenAIResponse,
35 OpenAIResponseMessage, OpenAITool, OpenAIToolCall, OpenAIUsage,
36};
37
38// Re-export core types for convenience
39pub use miyabi_llm_core::{
40 LlmClient, LlmError, LlmStreamingClient, Message, Result, Role, StreamEvent, StreamResponse,
41 ToolCall, ToolCallResponse, ToolDefinition,
42};