ai_types/lib.rs
1//! # ai-types
2//!
3//! **Write AI applications that work with any provider** π
4//!
5//! This crate provides unified trait abstractions for AI models, letting you write code once
6//! and switch between providers (`OpenAI`, `Anthropic`, local models, etc.) without changing your application logic.
7//!
8//!
9//! ```text
10//! βββββββββββββββββββ ββββββββββββββββββββ βββββββββββββββββββ
11//! β Your App βββββΆβ ai-types ββββββ Providers β
12//! β β β (this crate) β β β
13//! β - Chat bots β β β β - openai β
14//! β - Search β β - LanguageModel β β - anthropic β
15//! β - Content gen β β - EmbeddingModel β β - llama.cpp β
16//! β - Voice apps β β - ImageGenerator β β - whisper β
17//! βββββββββββββββββββ ββββββββββββββββββββ βββββββββββββββββββ
18//! ```
19
20//!
21//! ## Supported AI Capabilities
22//!
23//! | Capability | Trait | Description |
24//! |------------|-------|-------------|
25//! | **Language Models** | [`LanguageModel`] | Text generation, conversations, structured output |
26//! | **Text Streaming** | [`TextStream`] | Unified interface for streaming text responses |
27//! | **Embeddings** | [`EmbeddingModel`] | Convert text to vectors for semantic search |
28//! | **Image Generation** | [`ImageGenerator`] | Create images with progressive quality improvement |
29//! | **Text-to-Speech** | [`AudioGenerator`] | Generate speech audio from text |
30//! | **Speech-to-Text** | [`AudioTranscriber`] | Transcribe audio to text |
31//! | **Content Moderation** | [`Moderation`] | Detect policy violations with confidence scores |
32//!
33//! ## Examples
34//!
35//! ### Basic Chat Bot
36//!
37//! ```rust
38//! use ai_types::{LanguageModel, llm::{Message, Request}};
39//! use futures_lite::StreamExt;
40//!
41//! async fn chat_example(model: impl LanguageModel) -> ai_types::Result {
42//! let messages = [
43//! Message::system("You are a helpful assistant"),
44//! Message::user("What's the capital of France?")
45//! ];
46//!
47//! let request = Request::new(messages);
48//! let mut response = model.respond(request);
49//!
50//! Ok(response.await?)
51//! }
52//! ```
53//!
54//! ### Structured Output with Tools
55//!
56//! ```rust
57//! use ai_types::{LanguageModel, llm::{Message, Request, Tool}};
58//! use serde::{Deserialize, Serialize};
59//! use schemars::JsonSchema;
60//!
61//! #[derive(JsonSchema, Deserialize, Serialize)]
62//! struct WeatherQuery {
63//! location: String,
64//! units: Option<String>,
65//! }
66//!
67//! struct WeatherTool;
68//!
69//! impl Tool for WeatherTool {
70//! const NAME: &str = "get_weather";
71//! const DESCRIPTION: &str = "Get current weather for a location";
72//! type Arguments = WeatherQuery;
73//!
74//! async fn call(&mut self, args: Self::Arguments) -> ai_types::Result {
75//! Ok(format!("Weather in {}: 22Β°C, sunny", args.location))
76//! }
77//! }
78//!
79//! async fn weather_bot(model: impl LanguageModel) -> ai_types::Result {
80//! let request = Request::new(vec![
81//! Message::user("What's the weather like in Tokyo?")
82//! ]).with_tool(WeatherTool);
83//!
84//! // Model can now call the weather tool automatically
85//! let response: String = model.generate(request).await?;
86//! Ok(response)
87//! }
88//! ```
89//!
90//! See [`llm::tool`] for more details on using tools with language models.
91//!
92//! ### Semantic Search with Embeddings
93//!
94//! ```rust
95//! use ai_types::EmbeddingModel;
96//!
97//! async fn find_similar_docs(
98//! model: impl EmbeddingModel,
99//! query: &str,
100//! documents: &[&str]
101//! ) -> ai_types::Result<Vec<f32>> {
102//! // Convert query to vector
103//! let query_embedding = model.embed(query).await?;
104//!
105//! // In a real app, you'd compare with document embeddings
106//! // and find the most similar ones using cosine similarity
107//!
108//! Ok(query_embedding)
109//! }
110//! ```
111//!
112//! ### Progressive Image Generation
113//!
114//! ```rust
115//! use ai_types::{ImageGenerator, image::{Prompt, Size}};
116//! use futures_lite::StreamExt;
117//!
118//! async fn generate_image(generator: impl ImageGenerator) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
119//! let prompt = Prompt::new("A beautiful sunset over mountains");
120//! let size = Size::square(1024);
121//!
122//! let mut image_stream = generator.create(prompt, size);
123//! let mut final_image = Vec::new();
124//!
125//! // Each iteration gives us a complete image with progressively better quality
126//! while let Some(image_result) = image_stream.next().await {
127//! let current_image = image_result?;
128//! final_image = current_image; // Keep the latest (highest quality) version
129//!
130//! // Optional: Display preview of current quality level
131//! println!("Received image update, {} bytes", final_image.len());
132//! }
133//!
134//! Ok(final_image) // Return the final highest-quality image
135//! }
136//! ```
137//!
138//!
139
140#![doc(
141 html_logo_url = "https://raw.githubusercontent.com/lexoliu/ai-types/main/logo.svg",
142 html_favicon_url = "https://raw.githubusercontent.com/lexoliu/ai-types/main/logo.svg"
143)]
144#![no_std]
145extern crate alloc;
146
147/// Audio generation and transcription.
148///
149/// Contains [`AudioGenerator`] and [`AudioTranscriber`] traits.
150pub mod audio;
151/// Text embeddings.
152pub mod embedding;
153/// Text-to-image generation.
154///
155/// Contains [`ImageGenerator`] trait for creating images from text.
156pub mod image;
157pub mod llm;
158
159/// Content moderation utilities.
160///
161/// Contains traits and types for detecting and handling unsafe or inappropriate content.
162pub mod moderation;
163
164use alloc::string::String;
165
166#[doc(inline)]
167pub use audio::{AudioGenerator, AudioTranscriber};
168#[doc(inline)]
169pub use embedding::EmbeddingModel;
170#[doc(inline)]
171pub use image::ImageGenerator;
172#[doc(inline)]
173pub use llm::{LanguageModel, TextStream};
174#[doc(inline)]
175pub use moderation::Moderation;
176
177/// Result type used throughout the crate.
178///
179/// Type alias for [`anyhow::Result<T>`](anyhow::Result) with [`String`] as default success type.
180pub type Result<T = String> = anyhow::Result<T>;
181
182pub use anyhow::Error;
183
184// Re-export procedural macros
185#[cfg(feature = "derive")]
186pub use crate::llm::tool::tool;