Extract structured, validated data from LLMs using native Rust types. Define your schema as structs/enums, and rstructor handles JSON Schema generation, API communication, parsing, and validation.
The Rust equivalent of Instructor for Python.
- Type-safe schemas — Define models as Rust structs/enums with derive macros
- Multi-provider — OpenAI, Anthropic, Grok (xAI), and Gemini with unified API
- Auto-validation — Type checking plus custom business rules with automatic retry
- Complex types — Nested objects, arrays, optionals, enums with associated data
- Extended thinking — Native support for reasoning models (GPT-5.2, Claude 4.5, Gemini 3)
[dependencies]
rstructor = "0.2"
serde = { version = "1.0", features = ["derive"] }
tokio = { version = "1.0", features = ["rt-multi-thread", "macros"] }use rstructor::{Instructor, LLMClient, OpenAIClient};
use serde::{Deserialize, Serialize};
#[derive(Instructor, Serialize, Deserialize, Debug)]
struct Movie {
#[llm(description = "Title of the movie")]
title: String,
#[llm(description = "Director of the movie")]
director: String,
#[llm(description = "Year released", example = 2010)]
year: u16,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = OpenAIClient::from_env()?
.temperature(0.0);
let movie: Movie = client.materialize("Tell me about Inception").await?;
println!("{}: {} ({})", movie.title, movie.director, movie.year);
Ok(())
}use rstructor::{OpenAIClient, AnthropicClient, GrokClient, GeminiClient, LLMClient};
// OpenAI (reads OPENAI_API_KEY)
let client = OpenAIClient::from_env()?.model("gpt-5.2");
// Anthropic (reads ANTHROPIC_API_KEY)
let client = AnthropicClient::from_env()?.model("claude-sonnet-4-5-20250929");
// Grok/xAI (reads XAI_API_KEY)
let client = GrokClient::from_env()?.model("grok-4-1-fast-non-reasoning");
// Gemini (reads GEMINI_API_KEY)
let client = GeminiClient::from_env()?.model("gemini-3-flash-preview");
// Custom endpoint (local LLMs, proxies)
let client = OpenAIClient::new("key")?
.base_url("http://localhost:1234/v1")
.model("llama-3.1-70b");Add custom validation with automatic retry on failure:
use rstructor::{Instructor, RStructorError, Result};
#[derive(Instructor, Serialize, Deserialize)]
#[llm(validate = "validate_movie")]
struct Movie {
title: String,
year: u16,
rating: f32,
}
fn validate_movie(movie: &Movie) -> Result<()> {
if movie.year < 1888 || movie.year > 2030 {
return Err(RStructorError::ValidationError(
format!("Invalid year: {}", movie.year)
));
}
if movie.rating < 0.0 || movie.rating > 10.0 {
return Err(RStructorError::ValidationError(
format!("Rating must be 0-10, got {}", movie.rating)
));
}
Ok(())
}
// Retries are enabled by default (3 attempts with error feedback)
// To increase retries:
let client = OpenAIClient::from_env()?.max_retries(5);
// To disable retries:
let client = OpenAIClient::from_env()?.no_retries();#[derive(Instructor, Serialize, Deserialize)]
struct Ingredient {
name: String,
amount: f32,
unit: String,
}
#[derive(Instructor, Serialize, Deserialize)]
struct Recipe {
name: String,
ingredients: Vec<Ingredient>,
prep_time_minutes: u16,
}#[derive(Instructor, Serialize, Deserialize)]
enum PaymentMethod {
#[llm(description = "Credit card payment")]
Card { number: String, expiry: String },
#[llm(description = "PayPal account")]
PayPal(String),
#[llm(description = "Cash on delivery")]
CashOnDelivery,
}rstructor respects #[serde(rename)] and #[serde(rename_all)] attributes:
#[derive(Instructor, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct UserProfile {
first_name: String, // becomes "firstName" in schema
last_name: String, // becomes "lastName" in schema
email_address: String, // becomes "emailAddress" in schema
}
#[derive(Instructor, Serialize, Deserialize)]
struct CommitMessage {
#[serde(rename = "type")] // use "type" as JSON key
commit_type: String,
description: String,
}
#[derive(Instructor, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
enum CommitType {
Fix, // becomes "fix"
Feat, // becomes "feat"
Refactor, // becomes "refactor"
}Supported case conversions: lowercase, UPPERCASE, camelCase, PascalCase, snake_case, SCREAMING_SNAKE_CASE, kebab-case, SCREAMING-KEBAB-CASE.
use chrono::{DateTime, Utc};
use rstructor::schema::CustomTypeSchema;
impl CustomTypeSchema for DateTime<Utc> {
fn schema_type() -> &'static str { "string" }
fn schema_format() -> Option<&'static str> { Some("date-time") }
}
#[derive(Instructor, Serialize, Deserialize)]
struct Event {
name: String,
start_time: DateTime<Utc>,
}Configure reasoning depth for supported models:
use rstructor::ThinkingLevel;
// GPT-5.2, Claude 4.5 (Sonnet/Opus), Gemini 3
let client = OpenAIClient::from_env()?
.model("gpt-5.2")
.thinking_level(ThinkingLevel::High);
// Levels: Off, Minimal, Low, Medium, Highlet result = client.materialize_with_metadata::<Movie>("...").await?;
println!("Movie: {}", result.data.title);
if let Some(usage) = result.usage {
println!("Tokens: {} in, {} out", usage.input_tokens, usage.output_tokens);
}use rstructor::{ApiErrorKind, RStructorError};
match client.materialize::<Movie>("...").await {
Ok(movie) => println!("{:?}", movie),
Err(e) if e.is_retryable() => {
println!("Transient error: {}", e);
if let Some(delay) = e.retry_delay() {
tokio::time::sleep(delay).await;
}
}
Err(e) => match e.api_error_kind() {
Some(ApiErrorKind::RateLimited { retry_after }) => { /* ... */ }
Some(ApiErrorKind::AuthenticationFailed) => { /* ... */ }
_ => eprintln!("Error: {}", e),
}
}[dependencies]
rstructor = { version = "0.2", features = ["openai", "anthropic", "grok", "gemini"] }openai,anthropic,grok,gemini— Provider backendsderive— Derive macro (default)logging— Tracing integration
See examples/ for complete working examples:
export OPENAI_API_KEY=your_key
cargo run --example structured_movie_info
cargo run --example nested_objects_example
cargo run --example enum_with_data_example
cargo run --example serde_rename_exampleIf you're coming from Python and searching for:
- "pydantic rust" or "rust pydantic" — rstructor provides similar schema validation and type safety
- "instructor rust" or "rust instructor" — same structured LLM output extraction pattern
- "structured output rust" or "llm structured output" — exactly what rstructor does
- "type-safe llm rust" — ensures type safety from LLM responses to Rust structs
MIT — see LICENSE