1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
/// rstructor: A Rust library for structured outputs from LLMs
///
/// # Overview
///
/// rstructor simplifies getting validated, strongly-typed outputs from Large Language Models
/// (LLMs) like GPT-4 and Claude. It automatically generates JSON Schema from your Rust types,
/// sends the schema to LLMs, parses responses, and validates against the schema.
///
/// Key features:
/// - Derive macro for automatic JSON Schema generation
/// - Built-in OpenAI and Anthropic API clients
/// - Validation of responses against schemas
/// - Type-safe conversion from LLM outputs to Rust structs and enums
/// - Customizable client configurations
///
/// # Quick Start
///
/// ```no_run
/// use rstructor::{LLMClient, OpenAIClient, Instructor};
/// use serde::{Serialize, Deserialize};
///
/// #[derive(Instructor, Serialize, Deserialize, Debug)]
/// struct Person {
/// name: String,
/// age: u8,
/// bio: String,
/// }
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn std::error::Error>> {
/// // Create a client
/// let client = OpenAIClient::new("your-openai-api-key")?;
///
/// // Generate a structured response
/// let person: Person = client.materialize("Describe a fictional person").await?;
///
/// println!("Name: {}", person.name);
/// println!("Age: {}", person.age);
/// println!("Bio: {}", person.bio);
///
/// Ok(())
/// }
/// ```
// Re-exports for convenience
pub use ;
pub use Instructor;
pub use ;
pub use ;
pub use ;
pub use ;
pub use ;
pub use Instructor;
pub use LLMClient;
pub use ModelInfo;
pub use ThinkingLevel;
pub use ;