Skip to main content

kaneru/
lib.rs

1// Copyright 2019-2026 Apilium Technologies OÜ. All rights reserved.
2// SPDX-License-Identifier: Apache-2.0 OR Commercial
3
4#![doc = include_str!("../README.md")]
5//! # Kaneru — Unified Multi-Agent Execution System
6//!
7//! Autonomous AI agents framework for AIngle semantic networks.
8//!
9//! ## Overview
10//!
11//! Kaneru provides a complete framework for building autonomous AI agents that can:
12//! - **Observe** their environment (IoT sensors, network events, user inputs)
13//! - **Decide** based on learned policies and hierarchical goals
14//! - **Execute** actions in the AIngle network
15//! - **Learn** and adapt over time using reinforcement learning
16//!
17//! This crate is designed for use cases ranging from simple reactive agents to complex
18//! multi-agent systems with learning capabilities
19//!
20//! ## Architecture
21//!
22//! ```text
23//! ┌─────────────────────────────────────────────────────────────┐
24//! │                      Kaneru Agent                            │
25//! ├─────────────────────────────────────────────────────────────┤
26//! │                                                              │
27//! │  ┌──────────────┐  ┌──────────────┐  ┌──────────────────┐  │
28//! │  │   Sensors    │  │   Policy     │  │    Actuators     │  │
29//! │  │              │  │   Engine     │  │                  │  │
30//! │  │ • IoT data   │─►│              │─►│ • Network calls  │  │
31//! │  │ • Events     │  │ • Goals      │  │ • State changes  │  │
32//! │  │ • Messages   │  │ • Rules      │  │ • Messages       │  │
33//! │  └──────────────┘  │ • Learning   │  └──────────────────┘  │
34//! │                    └──────┬───────┘                         │
35//! │                           │                                 │
36//! │                    ┌──────▼───────┐                         │
37//! │                    │   Memory     │                         │
38//! │                    │ (Titans)     │                         │
39//! │                    │              │                         │
40//! │                    │ STM ◄──► LTM │                         │
41//! │                    └──────────────┘                         │
42//! │                                                              │
43//! └─────────────────────────────────────────────────────────────┘
44//! ```
45//!
46//! ## Quick Start
47//!
48//! ### Simple Reactive Agent
49//!
50//! ```rust,ignore
51//! use kaneru::{Agent, SimpleAgent, Goal, Observation, Rule, Condition, Action};
52//!
53//! // Create a simple reactive agent
54//! let mut agent = SimpleAgent::new("sensor_monitor");
55//!
56//! // Add a rule: if temperature > 30, alert
57//! let rule = Rule::new(
58//!     "high_temp",
59//!     Condition::above("temperature", 30.0),
60//!     Action::alert("Temperature too high!"),
61//! );
62//! agent.add_rule(rule);
63//!
64//! // Process observations
65//! let obs = Observation::sensor("temperature", 35.0);
66//! agent.observe(obs.clone());
67//! let action = agent.decide();
68//! let result = agent.execute(action.clone());
69//! agent.learn(&obs, &action, &result);
70//! ```
71//!
72//! ### Kaneru Agent with Learning
73//!
74//! ```rust,ignore
75//! use kaneru::{KaneruAgent, KaneruConfig, Observation, Goal, Priority, Outcome};
76//!
77//! // Create a Kaneru agent with learning, prediction, and hierarchical goals
78//! let mut agent = KaneruAgent::with_default_config();
79//!
80//! // Set a goal
81//! let goal = Goal::maintain("temperature", 20.0..25.0)
82//!     .with_priority(Priority::High);
83//! agent.set_goal(goal);
84//!
85//! // Agent loop with reinforcement learning
86//! for episode in 0..100 {
87//!     let obs = Observation::sensor("temperature", 22.0);
88//!     let action = agent.step(obs.clone());
89//!
90//!     // Execute action in environment and get reward
91//!     let reward = 1.0; // Example reward
92//!     let next_obs = Observation::sensor("temperature", 21.0);
93//!
94//!     let outcome = Outcome::new(action, result, reward, next_obs, false);
95//!     agent.learn(outcome);
96//! }
97//! ```
98//!
99//! ### Multi-Agent Coordination
100//!
101//! ```rust,ignore
102//! use kaneru::{AgentCoordinator, KaneruAgent, Message, Observation};
103//! use std::collections::HashMap;
104//!
105//! // Create coordinator
106//! let mut coordinator = AgentCoordinator::new();
107//!
108//! // Register agents
109//! let agent1 = KaneruAgent::with_default_config();
110//! let agent2 = KaneruAgent::with_default_config();
111//!
112//! let id1 = coordinator.register_agent(agent1);
113//! let id2 = coordinator.register_agent(agent2);
114//!
115//! // Broadcast message
116//! coordinator.broadcast(Message::new("update", "System status changed"));
117//!
118//! // Step all agents
119//! let mut observations = HashMap::new();
120//! observations.insert(id1, Observation::sensor("temp", 20.0));
121//! observations.insert(id2, Observation::sensor("humidity", 60.0));
122//!
123//! let actions = coordinator.step_all(observations);
124//! ```
125//!
126//! ### State Persistence
127//!
128//! ```rust,ignore
129//! use kaneru::{KaneruAgent, AgentPersistence};
130//! use std::path::Path;
131//!
132//! let mut agent = KaneruAgent::with_default_config();
133//!
134//! // Train the agent...
135//!
136//! // Save agent state
137//! agent.save_to_file(Path::new("agent_state.json")).unwrap();
138//!
139//! // Later, load agent state
140//! let loaded_agent = KaneruAgent::load_from_file(Path::new("agent_state.json")).unwrap();
141//! ```
142//!
143//! ## Agent Types
144//!
145//! - **ReactiveAgent**: Simple stimulus-response behavior
146//! - **GoalBasedAgent**: Works toward explicit goals
147//! - **LearningAgent**: Adapts behavior over time
148//! - **CooperativeAgent**: Coordinates with other agents
149
150pub mod action;
151pub mod agent;
152pub mod config;
153pub mod coordination;
154pub mod error;
155pub mod goal;
156pub mod hierarchical;
157pub mod kaneru_agent;
158pub mod learning;
159#[cfg(feature = "memory")]
160pub mod memory;
161pub mod observation;
162pub mod persistence;
163pub mod policy;
164pub mod predictive;
165pub mod types;
166
167pub use action::{Action, ActionResult, ActionType};
168pub use agent::{Agent, AgentId, AgentState, SimpleAgent};
169pub use config::AgentConfig;
170pub use coordination::{
171    AgentCoordinator, ConsensusResult, CoordinationError, Message, MessageBus, MessageId,
172    MessagePayload, MessagePriority, SharedMemory,
173};
174pub use error::{Error, Result};
175pub use goal::{Goal, GoalPriority, GoalStatus, GoalType};
176pub use hierarchical::{
177    default_decomposition_rules, ConflictResolution, ConflictType, DecompositionResult,
178    DecompositionRule, DecompositionStrategy, GoalConflict, GoalTree, GoalTypeFilter,
179    HierarchicalGoalSolver, ParallelStrategy, SequentialStrategy,
180};
181pub use kaneru_agent::{
182    AgentStats, GoalSelectionStrategy, KaneruAgent, KaneruConfig, OperationMode, Outcome,
183    SerializedState,
184};
185pub use learning::{
186    ActionId, Experience, LearningAlgorithm, LearningConfig, LearningEngine, QValue,
187    StateActionPair, StateId,
188};
189pub use observation::{Observation, ObservationType, Sensor};
190pub use persistence::{
191    AgentPersistence, CheckpointManager, LearningSnapshot, PersistenceError, PersistenceFormat,
192    PersistenceOptions,
193};
194pub use policy::{Condition, Policy, PolicyEngine, Rule};
195pub use predictive::{
196    AnomalyDetector, PredictedState, PredictiveConfig, PredictiveModel, StateEncoder,
197    StateSnapshot, Trajectory, TransitionModel,
198};
199pub use types::*;
200
201/// Kaneru framework version
202pub const VERSION: &str = env!("CARGO_PKG_VERSION");
203
204/// Creates a simple agent with default configuration.
205///
206/// This is a convenience function that creates a [`SimpleAgent`] with standard settings
207/// suitable for general-purpose use. The agent will have learning enabled, a maximum of
208/// 10 goals, and default policy engine settings.
209///
210/// # Arguments
211///
212/// * `name` - A unique identifier for the agent. This will be used in logging and coordination.
213///
214/// # Examples
215///
216/// ```
217/// use kaneru::{create_agent, Agent};
218///
219/// let agent = create_agent("my_agent");
220/// assert_eq!(agent.name(), "my_agent");
221/// ```
222///
223/// # See Also
224///
225/// - [`SimpleAgent::new`] for direct construction
226/// - [`SimpleAgent::with_config`] for custom configuration
227/// - [`create_iot_agent`] for IoT-optimized agents with reduced memory footprint
228pub fn create_agent(name: &str) -> SimpleAgent {
229    SimpleAgent::new(name)
230}
231
232/// Creates an IoT-optimized agent with reduced memory footprint.
233///
234/// This creates a [`SimpleAgent`] configured for resource-constrained environments
235/// with memory limits suitable for embedded devices. IoT agents trade some capabilities
236/// for reduced resource usage, making them ideal for edge computing scenarios.
237///
238/// # Arguments
239///
240/// * `name` - A unique identifier for the agent.
241///
242/// # Examples
243///
244/// ```
245/// use kaneru::{create_iot_agent, Agent};
246///
247/// let agent = create_iot_agent("sensor_agent");
248/// assert!(agent.config().max_memory_bytes <= 128 * 1024);
249/// ```
250///
251/// # Configuration
252///
253/// IoT agents have:
254/// - Maximum memory: 128KB
255/// - Learning disabled by default (can be re-enabled)
256/// - Reduced observation buffer size
257/// - Maximum of 5 concurrent goals (vs. 10 for standard agents)
258/// - Simplified policy engine with fewer rules
259///
260/// # See Also
261///
262/// - [`AgentConfig::iot_mode`] for manual configuration
263/// - [`create_agent`] for standard agents with full capabilities
264pub fn create_iot_agent(name: &str) -> SimpleAgent {
265    SimpleAgent::with_config(name, AgentConfig::iot_mode())
266}
267
268#[cfg(test)]
269mod tests {
270    use super::*;
271
272    #[test]
273    fn test_create_agent() {
274        let agent = create_agent("test_agent");
275        assert_eq!(agent.name(), "test_agent");
276    }
277
278    #[test]
279    fn test_create_iot_agent() {
280        let agent = create_iot_agent("iot_agent");
281        assert!(agent.config().max_memory_bytes <= 128 * 1024);
282    }
283}