mini_langchain/
llm.rs

1pub mod traits;
2pub mod openai;
3pub mod anthropic;
4pub mod qwen;
5pub mod deepseek;
6pub mod ollama;
7pub mod tokens;
8pub mod error;
9
10
11use serde::{Serialize, Deserialize};
12use serde_json::Value as JsonValue;
13use tokens::TokenUsage;
14
15/// Result of a text generation from an LLM.
16#[derive(Debug, Serialize, Deserialize, Clone, Default)]
17pub struct GenerateResult {
18    pub tokens: TokenUsage,
19    pub generation: String,
20    /// Optional structured tool calls the LLM signaled during this generation.
21    /// Each entry contains the tool name and the arguments object the LLM wants
22    /// the agent to pass when invoking that tool.
23    #[serde(default)]
24    pub call_tools: Vec<CallInfo>,
25}
26
27/// Structured information about a single tool call requested by the LLM.
28#[derive(Debug, Serialize, Deserialize, Clone)]
29pub struct CallInfo {
30    pub name: String,
31    #[serde(default)]
32    pub args: JsonValue,
33}
34
35/// Result type for LLM operations.
36pub type LLMResult<T> = std::result::Result<T, error::LLMError>;