pub struct Client { /* private fields */ }Expand description
LLM client for structured data extraction.
Supports OpenAI (via structured output), Anthropic (via tool use), Google Gemini (via response_schema), and any compatible API.
Implementations§
Source§impl Client
impl Client
Sourcepub fn openai(api_key: impl Into<String>) -> Self
pub fn openai(api_key: impl Into<String>) -> Self
Create a client for OpenAI models.
let client = instructors::Client::openai("sk-...");Sourcepub fn anthropic(api_key: impl Into<String>) -> Self
pub fn anthropic(api_key: impl Into<String>) -> Self
Create a client for Anthropic models.
let client = instructors::Client::anthropic("sk-ant-...");Sourcepub fn anthropic_compatible(
api_key: impl Into<String>,
base_url: impl Into<String>,
) -> Self
pub fn anthropic_compatible( api_key: impl Into<String>, base_url: impl Into<String>, ) -> Self
Create a client for any Anthropic-compatible API.
let client = instructors::Client::anthropic_compatible(
"sk-ant-...",
"https://custom-anthropic-proxy.example.com/v1",
);Sourcepub fn openai_compatible(
api_key: impl Into<String>,
base_url: impl Into<String>,
) -> Self
pub fn openai_compatible( api_key: impl Into<String>, base_url: impl Into<String>, ) -> Self
Create a client for any OpenAI-compatible API (e.g. DeepSeek, Together, local).
let client = instructors::Client::openai_compatible(
"sk-...",
"https://api.deepseek.com/v1",
);Sourcepub fn gemini(api_key: impl Into<String>) -> Self
pub fn gemini(api_key: impl Into<String>) -> Self
Create a client for Google Gemini models.
let client = instructors::Client::gemini("AIza...");Sourcepub fn gemini_compatible(
api_key: impl Into<String>,
base_url: impl Into<String>,
) -> Self
pub fn gemini_compatible( api_key: impl Into<String>, base_url: impl Into<String>, ) -> Self
Create a client for any Gemini-compatible API.
let client = instructors::Client::gemini_compatible(
"AIza...",
"https://custom-gemini-proxy.example.com/v1beta",
);Sourcepub fn with_model(self, model: impl Into<String>) -> Self
pub fn with_model(self, model: impl Into<String>) -> Self
Set the default model for all extractions.
Sourcepub fn with_system(self, system: impl Into<String>) -> Self
pub fn with_system(self, system: impl Into<String>) -> Self
Set the default system prompt.
Sourcepub fn with_max_retries(self, retries: u32) -> Self
pub fn with_max_retries(self, retries: u32) -> Self
Set the default max retries on parse/validation failure.
Sourcepub fn with_temperature(self, temp: f64) -> Self
pub fn with_temperature(self, temp: f64) -> Self
Set the default temperature.
Sourcepub fn with_fallback(self, fallback: Client) -> Self
pub fn with_fallback(self, fallback: Client) -> Self
Add a fallback client. If the primary provider fails after exhausting retries, the request is retried using the fallback client. Multiple fallbacks can be chained and are tried in order.
Sourcepub fn with_max_tokens(self, tokens: u32) -> Self
pub fn with_max_tokens(self, tokens: u32) -> Self
Set the default max output tokens.
Sourcepub fn with_retry_backoff(self, config: BackoffConfig) -> Self
pub fn with_retry_backoff(self, config: BackoffConfig) -> Self
Enable exponential backoff for retryable HTTP errors (429, 503).
When configured, HTTP 429 (Too Many Requests) and 503 (Service Unavailable) errors are retried with exponential backoff before being treated as failures.
Sourcepub fn with_timeout(self, timeout: Duration) -> Self
pub fn with_timeout(self, timeout: Duration) -> Self
Set the overall request timeout. Default: 60 seconds.
This timeout covers the entire extraction including all retries, backoff, and fallback attempts.
Sourcepub fn extract_batch<T>(&self, prompts: Vec<String>) -> BatchBuilder<'_, T>
pub fn extract_batch<T>(&self, prompts: Vec<String>) -> BatchBuilder<'_, T>
Begin a batch extraction over multiple prompts with configurable concurrency.
Returns a crate::BatchBuilder that processes prompts concurrently.
#[derive(Deserialize, JsonSchema)]
struct Contact { name: String }
let client = instructors::Client::openai("sk-...");
let prompts = vec!["John Doe".into(), "Jane Smith".into()];
let results = client.extract_batch::<Contact>(prompts)
.concurrency(5)
.run()
.await;
for result in results {
println!("{}", result?.value.name);
}Sourcepub fn extract_many<T>(
&self,
prompt: impl Into<String>,
) -> ExtractBuilder<'_, Vec<T>>where
T: DeserializeOwned + JsonSchema,
pub fn extract_many<T>(
&self,
prompt: impl Into<String>,
) -> ExtractBuilder<'_, Vec<T>>where
T: DeserializeOwned + JsonSchema,
Extract a list of items from the prompt.
Internally wraps the target type in a Vec<T> for the LLM to populate.
#[derive(Deserialize, JsonSchema)]
struct Entity { name: String, entity_type: String }
let client = instructors::Client::openai("sk-...");
let entities: Vec<Entity> = client
.extract_many("Apple CEO Tim Cook met with Google CEO Sundar Pichai")
.await?.value;Sourcepub fn extract<T>(&self, prompt: impl Into<String>) -> ExtractBuilder<'_, T>where
T: DeserializeOwned + JsonSchema,
pub fn extract<T>(&self, prompt: impl Into<String>) -> ExtractBuilder<'_, T>where
T: DeserializeOwned + JsonSchema,
Begin an extraction request. The return type T must implement
serde::Deserialize and schemars::JsonSchema.
#[derive(Deserialize, JsonSchema)]
struct Contact { name: String }
let client = instructors::Client::openai("sk-...");
let contact: Contact = client.extract("John Doe, john@example.com").await?.value;