Aether Core
Aether Core is a Rust library for building AI agents (LLM + prompt + tools, running in a loop).
By default, agents have no system prompt and no tools — every token in the context window is yours to control. Tools come exclusively from MCP servers, so you can extend agents in any language.
Agents run in dedicated tokio tasks and communicate via async message passing. Hardware permitting, you can run hundreds of agents in a single process.
Table of Contents
Installation
Add Aether to your Cargo.toml:
[dependencies]
aether-agent-core = "0.1"
Examples
Minimal Agent (No Tools)
use aether_core::core::{AgentMessage, Prompt, UserMessage, agent};
use llm::providers::openrouter::OpenRouterProvider;
use std::io::{self, Write};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let llm = OpenRouterProvider::default("z-ai/glm-4.6")?;
let (tx, mut rx, _handle) = agent(llm) .system_prompt(Prompt::text("You are a helpful assistant.")) .spawn() .await?;
tx.send(UserMessage::text("Explain async Rust in one paragraph"))
.await?;
loop {
match rx.recv().await {
Some(AgentMessage::Text { chunk, is_complete, .. }) => {
if !is_complete { print!("{chunk}"); io::stdout().flush()?; }
}
Some(AgentMessage::Done) => break,
Some(AgentMessage::Error { message }) => { eprintln!("Error: {message}"); break; }
_ => {}
}
}
Ok(())
}
Agent with Tools and AGENTS.md system prompt
Create a mcp.json file in the current working directory:
{
"servers": {
"filesystem": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-filesystem", "/path/to/allowed/directory"]
},
"playwright": {
"command": "npx",
"args": ["-y", "@executeautomation/playwright-mcp-server"]
}
}
}
And create an AGENTS.md file with a system prompt:
# BotBot
You are Mr. BotBot, a kickass coding agent equipped with SOTA filesystem and web browsing tools...
And bring Mr. BotBot to life!
use aether_core::core::{AgentMessage, UserMessage, Prompt, agent};
use aether_core::mcp::{mcp, McpSpawnResult};
use llm::providers::openrouter::OpenRouterProvider;
use std::io::{self, Write};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let llm = OpenRouterProvider::default("z-ai/glm-4.6")?;
let McpSpawnResult {
tool_definitions: tools,
instructions: _,
command_tx: mcp_tx,
event_rx: _,
handle: _mcp_handle,
..
} = mcp()
.from_json_files(&["mcp.json"]) .await?
.spawn() .await?;
let (tx, mut rx, _handle) = agent(llm)
.system_prompt(Prompt::from_globs(vec!["AGENTS.md".into()], ".".into())) .tools(mcp_tx, tools) .spawn()
.await?;
tx.send(UserMessage::text(
"Read the README.md file and summarize it",
))
.await?;
loop {
use AgentMessage::*;
match rx.recv().await {
Some(Text { chunk, is_complete, .. }) => {
if !is_complete {
print!("{chunk}");
io::stdout().flush().unwrap();
} else {
println!();
}
}
Some(ToolCall { request, .. }) => {
println!("\nCalling tool: {}", request.name);
}
Some(ToolResult { result, .. }) => {
println!("Tool '{}' completed", result.name);
}
Some(ToolError { error, .. }) => {
eprintln!("Tool '{}' failed: {}", error.name, error.error);
}
Some(ToolProgress { .. }) => {
}
Some(Done) => {
println!("\nAgent finished");
break;
}
Some(Error { message }) => {
eprintln!("Error: {message}");
break;
}
Some(Cancelled { .. }) => {
eprintln!("Agent cancelled");
break;
}
_ => {}
None => break,
}
}
Ok(())
}
License
MIT