pub struct Client { /* private fields */ }Expand description
LLM client for structured data extraction.
Supports OpenAI (via structured output), Anthropic (via tool use), and any OpenAI-compatible API.
Implementations§
Source§impl Client
impl Client
Sourcepub fn openai(api_key: impl Into<String>) -> Self
pub fn openai(api_key: impl Into<String>) -> Self
Create a client for OpenAI models.
let client = instructors::Client::openai("sk-...");Sourcepub fn anthropic(api_key: impl Into<String>) -> Self
pub fn anthropic(api_key: impl Into<String>) -> Self
Create a client for Anthropic models.
let client = instructors::Client::anthropic("sk-ant-...");Sourcepub fn anthropic_compatible(
api_key: impl Into<String>,
base_url: impl Into<String>,
) -> Self
pub fn anthropic_compatible( api_key: impl Into<String>, base_url: impl Into<String>, ) -> Self
Create a client for any Anthropic-compatible API.
let client = instructors::Client::anthropic_compatible(
"sk-ant-...",
"https://custom-anthropic-proxy.example.com/v1",
);Sourcepub fn openai_compatible(
api_key: impl Into<String>,
base_url: impl Into<String>,
) -> Self
pub fn openai_compatible( api_key: impl Into<String>, base_url: impl Into<String>, ) -> Self
Create a client for any OpenAI-compatible API (e.g. DeepSeek, Together, local).
let client = instructors::Client::openai_compatible(
"sk-...",
"https://api.deepseek.com/v1",
);Sourcepub fn with_model(self, model: impl Into<String>) -> Self
pub fn with_model(self, model: impl Into<String>) -> Self
Set the default model for all extractions.
Sourcepub fn with_system(self, system: impl Into<String>) -> Self
pub fn with_system(self, system: impl Into<String>) -> Self
Set the default system prompt.
Sourcepub fn with_max_retries(self, retries: u32) -> Self
pub fn with_max_retries(self, retries: u32) -> Self
Set the default max retries on parse/validation failure.
Sourcepub fn with_temperature(self, temp: f64) -> Self
pub fn with_temperature(self, temp: f64) -> Self
Set the default temperature.
Sourcepub fn with_max_tokens(self, tokens: u32) -> Self
pub fn with_max_tokens(self, tokens: u32) -> Self
Set the default max output tokens.
Sourcepub fn extract_batch<T>(&self, prompts: Vec<String>) -> BatchBuilder<'_, T>
pub fn extract_batch<T>(&self, prompts: Vec<String>) -> BatchBuilder<'_, T>
Begin a batch extraction over multiple prompts with configurable concurrency.
Returns a [BatchBuilder] that processes prompts concurrently.
#[derive(Deserialize, JsonSchema)]
struct Contact { name: String }
let client = instructors::Client::openai("sk-...");
let prompts = vec!["John Doe".into(), "Jane Smith".into()];
let results = client.extract_batch::<Contact>(prompts)
.concurrency(5)
.run()
.await;
for result in results {
println!("{}", result?.value.name);
}Sourcepub fn extract_many<T>(
&self,
prompt: impl Into<String>,
) -> ExtractBuilder<'_, Vec<T>>where
T: DeserializeOwned + JsonSchema,
pub fn extract_many<T>(
&self,
prompt: impl Into<String>,
) -> ExtractBuilder<'_, Vec<T>>where
T: DeserializeOwned + JsonSchema,
Extract a list of items from the prompt.
Internally wraps the target type in a Vec<T> for the LLM to populate.
#[derive(Deserialize, JsonSchema)]
struct Entity { name: String, entity_type: String }
let client = instructors::Client::openai("sk-...");
let entities: Vec<Entity> = client
.extract_many("Apple CEO Tim Cook met with Google CEO Sundar Pichai")
.await?.value;Sourcepub fn extract<T>(&self, prompt: impl Into<String>) -> ExtractBuilder<'_, T>where
T: DeserializeOwned + JsonSchema,
pub fn extract<T>(&self, prompt: impl Into<String>) -> ExtractBuilder<'_, T>where
T: DeserializeOwned + JsonSchema,
Begin an extraction request. The return type T must implement
serde::Deserialize and schemars::JsonSchema.
#[derive(Deserialize, JsonSchema)]
struct Contact { name: String }
let client = instructors::Client::openai("sk-...");
let contact: Contact = client.extract("John Doe, john@example.com").await?.value;