llama-cpp-v3-agent-sdk 0.1.7

Agentic tool-use loop on top of llama-cpp-v3 — local LLM agents with built-in tools
Documentation
#![doc = include_str!("../README.md")]
//!
//! ---
//!
//! # Multi-Agent Workflow Guide (In-Depth Walkthrough)
//!
//! Below is the complete technical walkthrough for building complex multi-agent pipelines.
//!

#![doc = include_str!("../DOCS_DRAMA_WORKFLOW.md")]

pub mod agent;
pub mod agent_loop;
pub mod agents_md;
pub mod conversation;
pub mod error;
pub mod inference;
pub mod permission;
pub mod skills;
pub mod tool;
pub mod tools;
pub mod workflow;

// Re-export the primary public API
pub use agent::{Agent, AgentBuilder};
pub use agent_loop::{AgentEvent, AgentLoopConfig, CompletionReason, KvCacheState};
pub use agents_md::{AgentsMdFile, AgentsMdRegistry};
pub use conversation::{Conversation, Message, Role};
pub use error::AgentError;
pub use inference::{templates, InferenceConfig, InferenceEngine, InferenceScheduler};
pub use permission::{PermissionDecision, PermissionMode, PermissionRequest, PermissionTracker};
pub use skills::{Skill, SkillMeta, SkillRegistry};
pub use tool::{Tool, ToolCall, ToolRegistry, ToolResult};
pub use workflow::*;