Expand description
llmoxide is a small, provider-agnostic Rust SDK surface for LLM APIs.
This crate will provide a normalized interface across providers (starting with OpenAI and Anthropic), with robust streaming, retries, and tracing hooks.
Re-exports§
pub use crate::chat::Chat;pub use crate::chat::ChatSession;pub use crate::client::Client;pub use crate::client::ClientConfig;pub use crate::error::Error;pub use crate::error::Result;pub use crate::providers::ListModels;pub use crate::types::ContentPart;pub use crate::types::DEFAULT_ANTHROPIC_MODEL;pub use crate::types::DEFAULT_GEMINI_MODEL;pub use crate::types::DEFAULT_OLLAMA_MODEL;pub use crate::types::DEFAULT_OPENAI_MODEL;pub use crate::types::Event;pub use crate::types::Message;pub use crate::types::Model;pub use crate::types::ModelInfo;pub use crate::types::Provider;pub use crate::types::Response;pub use crate::types::ResponseRequest;pub use crate::types::Role;pub use crate::types::ToolCall;pub use crate::types::ToolSpec;
Modules§
Structs§
- Prompt
- Send a single user string per request, or call
Prompt::list_models.