Skip to main content

agentix/
lib.rs

1/*!
2agentix — Multi-provider LLM agent framework for Rust.
3
4Supports DeepSeek, OpenAI, Anthropic, and Gemini out of the box.
5Agents are actor-style handles; multiple agents wire together into a
6[`Graph`] via typed [`Msg`] channels.
7
8# Quickstart
9
10```no_run
11use agentix::Msg;
12
13#[tokio::main]
14async fn main() {
15    let agent = agentix::deepseek(std::env::var("DEEPSEEK_API_KEY").unwrap())
16        .system_prompt("You are helpful.")
17        .max_tokens(1024);
18
19    let mut rx = agent.subscribe();
20    agent.send("Hello!").await;
21
22    while let Ok(msg) = rx.recv().await {
23        match msg {
24            Msg::Token(t) => print!("{t}"),
25            Msg::Done     => break,
26            _             => {}
27        }
28    }
29}
30```
31
32# Multi-agent pipeline
33
34```no_run
35use agentix::{Graph, Node, PromptTemplate, OutputParser, Msg};
36
37#[tokio::main]
38async fn main() {
39    let prompt  = PromptTemplate::new("Score this review 0-10:\n{input}");
40    let scorer  = agentix::deepseek(std::env::var("KEY").unwrap())
41                    .system_prompt("Respond with only JSON: {\"score\": N}");
42    let parser  = OutputParser::new(|s| {
43        serde_json::from_str::<serde_json::Value>(&s)
44            .ok()
45            .and_then(|v| v["score"].as_i64().map(|n| n.to_string()))
46            .unwrap_or_else(|| "0".into())
47    });
48
49    let _handle = Graph::new()
50        .middleware(|msg| { eprintln!("[edge] {msg:?}"); Some(msg) })
51        .edge(&prompt, &scorer)
52        .edge(&scorer, &parser)
53        .into_handle();
54
55    prompt.input()
56        .send(Msg::User(vec!["Great product, fast shipping!".into()]))
57        .await
58        .unwrap();
59}
60```
61
62# Assembled vs streaming events
63
64Every [`EventBus`] can be consumed two ways:
65
66```no_run
67# use agentix::Msg;
68# use futures::StreamExt;
69# #[tokio::main] async fn main() {
70let agent = agentix::deepseek(std::env::var("KEY").unwrap());
71
72// Raw — one Token per streaming chunk
73let mut rx = agent.subscribe();
74
75// Assembled — many Token chunks folded into one Token(full_text)
76let mut stream = Box::pin(agent.event_bus().subscribe_assembled());
77while let Some(msg) = stream.next().await {
78    match msg {
79        Msg::Token(full) => println!("{full}"),
80        Msg::Done        => break,
81        _                => {}
82    }
83}
84# }
85```
86*/
87
88pub mod bus;
89pub mod client;
90pub mod config;
91pub mod error;
92pub mod markers;
93pub mod memory;
94pub mod msg;
95pub mod provider;
96pub mod raw;
97pub mod request;
98pub mod tool_trait;
99pub mod types;
100mod agent;
101pub mod context;
102pub mod node;
103
104#[cfg(feature = "mcp")]
105pub mod mcp;
106#[cfg(feature = "mcp-server")]
107pub mod mcp_server;
108
109// ── Public API ────────────────────────────────────────────────────────────────
110
111pub use agent::Agent;
112pub use bus::EventBus;
113pub use client::LlmClient;
114pub use config::AgentConfig;
115pub use error::ApiError;
116pub use memory::{InMemory, Memory, SlidingWindow};
117pub use msg::{CustomMsg, Msg};
118pub use provider::{AnthropicProvider, DeepSeekProvider, GeminiProvider, OpenAIProvider, Provider};
119pub use request::{ImageContent, ImageData, Message, Request, ResponseFormat, ToolChoice, UserContent};
120pub use context::SharedContext;
121pub use node::{Graph, GraphHandle, MiddlewareFn, Node, OutputParser, PromptTemplate};
122pub use tool_trait::{Tool, ToolBundle};
123
124pub use agentix_macros::tool;
125pub use schemars;
126pub use serde;
127pub use serde_json;
128pub use async_trait;
129
130#[cfg(feature = "mcp")]
131pub use mcp::McpTool;
132#[cfg(feature = "mcp-server")]
133pub use mcp_server::{McpServer, McpServerError};
134
135// ── Free-function entry points ────────────────────────────────────────────────
136
137/// Create an agent backed by the DeepSeek API. Default model: `deepseek-chat`.
138pub fn deepseek(token: impl Into<String>) -> Agent {
139    Agent::new(LlmClient::deepseek(token))
140}
141
142/// Create an agent backed by the OpenAI API (or any compatible endpoint). Default model: `gpt-4o`.
143pub fn openai(token: impl Into<String>) -> Agent {
144    Agent::new(LlmClient::openai(token))
145}
146
147/// Create an agent backed by the Anthropic Messages API. Default model: `claude-opus-4-5`.
148pub fn anthropic(token: impl Into<String>) -> Agent {
149    Agent::new(LlmClient::anthropic(token))
150}
151
152/// Create an agent backed by the Google Gemini API. Default model: `gemini-2.0-flash`.
153pub fn gemini(token: impl Into<String>) -> Agent {
154    Agent::new(LlmClient::gemini(token))
155}