llmoxide 0.1.0

Provider-agnostic Rust SDK for OpenAI, Anthropic, Gemini, and Ollama (streaming + tools)
Documentation
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize)]
#[non_exhaustive]
pub enum Provider {
    OpenAi,
    Anthropic,
    Gemini,
    Ollama,
}

pub const DEFAULT_OPENAI_MODEL: &str = "gpt-4.1-mini";
pub const DEFAULT_ANTHROPIC_MODEL: &str = "claude-sonnet-4-6";
pub const DEFAULT_GEMINI_MODEL: &str = "gemini-2.5-flash";
pub const DEFAULT_OLLAMA_MODEL: &str = "llama3.1";

#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub struct Model(pub String);

impl Model {
    pub fn new(model: impl Into<String>) -> Self {
        Self(model.into())
    }
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[non_exhaustive]
pub enum Role {
    System,
    User,
    Assistant,
    Tool,
}

#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[non_exhaustive]
pub enum ContentPart {
    Text(String),
    /// Image referenced by URL (multimodal **user** / **system** turns). Provider support varies.
    ImageUrl {
        url: String,
    },
    /// Inline image as base64 (multimodal **user** / **system** turns).
    ImageBase64 {
        /// IANA media type, e.g. `image/png`.
        media_type: String,
        data: String,
    },
    /// Assistant “thinking” / reasoning text. Wire format differs by provider; adapters map this
    /// where supported (e.g. Anthropic `thinking` blocks). On providers without native support it
    /// may be degraded to plain text.
    Thinking {
        text: String,
        #[serde(default, skip_serializing_if = "Option::is_none")]
        signature: Option<String>,
    },
    /// Opaque citation / reference payload (document URLs, cited spans, etc.). Prefer
    /// [`Response::metadata`](Response::metadata) for response-time citation metadata until shapes
    /// converge across providers.
    Citation {
        data: serde_json::Value,
    },
    /// A provider-agnostic tool call. Some providers require that tool calls are part of the
    /// conversation history so tool results can reference them.
    ToolCall {
        id: String,
        name: String,
        arguments: serde_json::Value,
    },
    /// A provider-agnostic tool result. Provider adapters will format this appropriately.
    ToolResult {
        /// Tool call id / tool use id (provider-specific meaning).
        id: String,
        /// Function name (e.g. Gemini `functionResponse.name`). Optional for OpenAI/Anthropic.
        #[serde(default, skip_serializing_if = "Option::is_none")]
        function_name: Option<String>,
        content: serde_json::Value,
    },
}

#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub struct Message {
    pub role: Role,
    pub content: Vec<ContentPart>,
}

impl Message {
    pub fn text(role: Role, text: impl Into<String>) -> Self {
        Self {
            role,
            content: vec![ContentPart::Text(text.into())],
        }
    }

    pub fn tool_result(id: impl Into<String>, content: serde_json::Value) -> Self {
        Self {
            role: Role::Tool,
            content: vec![ContentPart::ToolResult {
                id: id.into(),
                function_name: None,
                content,
            }],
        }
    }

    /// Tool result including the function name (needed for providers like Gemini).
    pub fn tool_result_named(
        id: impl Into<String>,
        function_name: impl Into<String>,
        content: serde_json::Value,
    ) -> Self {
        Self {
            role: Role::Tool,
            content: vec![ContentPart::ToolResult {
                id: id.into(),
                function_name: Some(function_name.into()),
                content,
            }],
        }
    }

    pub fn tool_call(
        id: impl Into<String>,
        name: impl Into<String>,
        arguments: serde_json::Value,
    ) -> Self {
        Self {
            role: Role::Assistant,
            content: vec![ContentPart::ToolCall {
                id: id.into(),
                name: name.into(),
                arguments,
            }],
        }
    }

    pub fn text_content(&self) -> Option<String> {
        let mut out = String::new();
        for part in &self.content {
            match part {
                ContentPart::Text(t) => out.push_str(t),
                ContentPart::Thinking { text, .. } => out.push_str(text),
                ContentPart::ImageUrl { .. }
                | ContentPart::ImageBase64 { .. }
                | ContentPart::Citation { .. }
                | ContentPart::ToolCall { .. }
                | ContentPart::ToolResult { .. } => {}
            }
        }
        if out.is_empty() { None } else { Some(out) }
    }
}

#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResponseRequest {
    pub model: Option<Model>,
    pub messages: Vec<Message>,
    pub max_output_tokens: Option<u32>,
    pub tools: Vec<ToolSpec>,
}

impl ResponseRequest {
    pub fn new(model: impl Into<String>) -> Self {
        Self {
            model: Some(Model::new(model)),
            messages: Vec::new(),
            max_output_tokens: None,
            tools: Vec::new(),
        }
    }

    pub fn new_auto() -> Self {
        Self {
            model: None,
            messages: Vec::new(),
            max_output_tokens: None,
            tools: Vec::new(),
        }
    }

    pub fn model(mut self, model: impl Into<String>) -> Self {
        self.model = Some(Model::new(model));
        self
    }

    pub fn push_message(mut self, message: Message) -> Self {
        self.messages.push(message);
        self
    }

    pub fn max_output_tokens(mut self, max: u32) -> Self {
        self.max_output_tokens = Some(max);
        self
    }

    pub fn tools(mut self, tools: Vec<ToolSpec>) -> Self {
        self.tools = tools;
        self
    }
}

#[derive(Debug, Clone, PartialEq, Eq)]
#[non_exhaustive]
pub struct Response {
    pub model: Model,
    pub message: Message,
    pub tool_calls: Vec<ToolCall>,
    /// Provider-agnostic metadata / extension point. Providers may attach extra data here over time
    /// (usage, citations, safety flags, etc.) without requiring a breaking change.
    pub metadata: serde_json::Value,
    /// Provider-native response payload (opt-in; may be large).
    #[cfg(feature = "raw-json")]
    pub raw_json: Option<serde_json::Value>,
}

impl Response {
    pub fn text(&self) -> Option<String> {
        self.message.text_content()
    }

    pub fn metadata(&self) -> &serde_json::Value {
        &self.metadata
    }

    pub fn with_tool_calls(mut self, tool_calls: Vec<ToolCall>) -> Self {
        self.tool_calls = tool_calls;
        self
    }

    #[cfg(feature = "raw-json")]
    pub fn raw_json(&self) -> Option<&serde_json::Value> {
        self.raw_json.as_ref()
    }
}

#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize)]
pub struct ModelInfo {
    pub id: String,
    pub display_name: Option<String>,
    pub provider: Provider,
    pub created_at: Option<String>,
    pub max_input_tokens: Option<u32>,
    pub max_output_tokens: Option<u32>,
}

#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub struct ToolSpec {
    pub name: String,
    #[serde(default)]
    pub description: Option<String>,
    /// JSON Schema (typically draft-07-ish). Provider adapters may transform this shape.
    pub parameters: serde_json::Value,
}

#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub struct ToolCall {
    pub id: Option<String>,
    pub name: String,
    pub arguments: serde_json::Value,
}

#[derive(Debug, Clone, PartialEq, Eq)]
#[non_exhaustive]
pub enum Event {
    TextDelta(String),
    ToolCall(ToolCall),
    Completed(Response),
}