1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
//! Configuration for HOPE Agents.
use serde::{Deserialize, Serialize};
use std::time::Duration;
/// Defines the configuration for a HOPE agent.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AgentConfig {
/// The human-readable name of the agent.
pub name: String,
/// The maximum memory usage in bytes the agent should consume.
pub max_memory_bytes: usize,
/// The time interval between aget's decision-making loops.
pub decision_interval: Duration,
/// The maximum number of concurrent goals the agent can manage.
pub max_goals: usize,
/// A flag to enable or disable the agent's learning capabilities.
pub learning_enabled: bool,
/// The learning rate (alpha) for reinforcement learning algorithms (typically 0.0 to 1.0).
pub learning_rate: f32,
/// The exploration rate (epsilon) for epsilon-greedy policies, determining the balance
/// between exploring new actions and exploiting known ones (0.0 to 1.0).
pub exploration_rate: f32,
/// The maximum number of rules the agent's policy engine can hold.
pub max_rules: usize,
/// The default interval for polling sensors for new observations.
pub sensor_interval: Duration,
/// The default timeout for actions executed by the agent.
pub action_timeout: Duration,
}
impl Default for AgentConfig {
/// Provides a default, balanced configuration for a standard agent.
fn default() -> Self {
Self {
name: "agent".to_string(),
max_memory_bytes: 512 * 1024, // 512KB
decision_interval: Duration::from_millis(100),
max_goals: 10,
learning_enabled: true,
learning_rate: 0.1,
exploration_rate: 0.1,
max_rules: 100,
sensor_interval: Duration::from_millis(50),
action_timeout: Duration::from_secs(5),
}
}
}
impl AgentConfig {
/// Creates a new configuration with a specified name.
pub fn new(name: &str) -> Self {
Self {
name: name.to_string(),
..Default::default()
}
}
/// Returns a configuration optimized for IoT and resource-constrained environments.
/// This mode has a smaller memory footprint and disables learning by default.
pub fn iot_mode() -> Self {
Self {
name: "iot_agent".to_string(),
max_memory_bytes: 64 * 1024, // 64KB
decision_interval: Duration::from_millis(50),
max_goals: 5,
learning_enabled: false, // Save resources
learning_rate: 0.0,
exploration_rate: 0.0,
max_rules: 20,
sensor_interval: Duration::from_millis(100),
action_timeout: Duration::from_secs(2),
}
}
/// Returns a configuration suitable for a more capable AI agent with learning enabled.
/// This mode allocates more resources for memory and learning processes.
pub fn ai_mode() -> Self {
Self {
name: "ai_agent".to_string(),
max_memory_bytes: 2 * 1024 * 1024, // 2MB
decision_interval: Duration::from_millis(200),
max_goals: 20,
learning_enabled: true,
learning_rate: 0.15,
exploration_rate: 0.2,
max_rules: 500,
sensor_interval: Duration::from_millis(100),
action_timeout: Duration::from_secs(10),
}
}
/// Sets the name of the agent in the configuration.
pub fn with_name(mut self, name: &str) -> Self {
self.name = name.to_string();
self
}
/// Enables or disables the learning capability in the configuration.
pub fn with_learning(mut self, enabled: bool) -> Self {
self.learning_enabled = enabled;
self
}
/// Sets the maximum memory limit in bytes.
pub fn with_memory_limit(mut self, bytes: usize) -> Self {
self.max_memory_bytes = bytes;
self
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default_config() {
let config = AgentConfig::default();
assert_eq!(config.name, "agent");
assert!(config.learning_enabled);
}
#[test]
fn test_iot_config() {
let config = AgentConfig::iot_mode();
assert!(config.max_memory_bytes <= 128 * 1024);
assert!(!config.learning_enabled);
}
#[test]
fn test_ai_config() {
let config = AgentConfig::ai_mode();
assert!(config.learning_enabled);
assert!(config.max_rules >= 100);
}
}