pub struct Executor { /* private fields */ }Implementations§
Source§impl Executor
impl Executor
Sourcepub fn new() -> Self
pub fn new() -> Self
Examples found in repository?
examples/advanced_flow.rs (line 19)
6async fn main() -> anyhow::Result<()> {
7 // Initialize tracing
8 tracing_subscriber::fmt()
9 .with_max_level(tracing::Level::DEBUG)
10 .init();
11
12 println!("=== Rust Logic Graph - Advanced Flow Example ===");
13 println!("Scenario: User Analytics Report Generation with Permission Checks\n");
14
15 // Load graph definition
16 let def = GraphIO::load_from_file("examples/advanced_flow.json")?;
17
18 // Create custom executor with specific node configurations
19 let mut executor = Executor::new();
20
21 // Register nodes with custom logic
22 executor.register_node(Box::new(RuleNode::new(
23 "validate_input",
24 "user_id > 0"
25 )));
26
27 executor.register_node(Box::new(DBNode::new(
28 "fetch_user_data",
29 "SELECT * FROM users WHERE id = ?"
30 )));
31
32 executor.register_node(Box::new(RuleNode::new(
33 "check_permissions",
34 "user_role == \"admin\""
35 )));
36
37 executor.register_node(Box::new(DBNode::new(
38 "query_analytics",
39 "SELECT * FROM analytics WHERE user_id = ?"
40 )));
41
42 executor.register_node(Box::new(AINode::new(
43 "generate_report",
44 "Generate comprehensive analytics report from data"
45 )));
46
47 executor.register_node(Box::new(AINode::new(
48 "send_notification",
49 "Send notification to user about report status"
50 )));
51
52 // Create graph and set initial context
53 let mut graph = Graph::new(def);
54
55 // Simulate input data
56 graph.context.data.insert("user_id".to_string(), json!(1001));
57 graph.context.data.insert("user_role".to_string(), json!("admin"));
58
59 println!("Initial Context:");
60 println!(" - user_id: 1001");
61 println!(" - user_role: admin\n");
62
63 // Execute the graph
64 println!("Starting advanced flow execution...\n");
65 executor.execute(&mut graph).await?;
66
67 // Display results
68 println!("\n=== Execution Results ===");
69 println!("\nContext Data:");
70 for (key, value) in &graph.context.data {
71 if key.ends_with("_result") {
72 println!(" {}: {}", key, serde_json::to_string_pretty(&value)?);
73 }
74 }
75
76 println!("\n=== Flow Complete ===");
77 Ok(())
78}More examples
examples/grl_graph_flow.rs (line 19)
6async fn main() -> anyhow::Result<()> {
7 // Initialize tracing
8 tracing_subscriber::fmt()
9 .with_max_level(tracing::Level::INFO)
10 .init();
11
12 println!("=== Rust Logic Graph - GRL Integration Example ===");
13 println!("Scenario: Loan Application with Advanced GRL Rules\n");
14
15 // Load graph definition
16 let def = GraphIO::load_from_file("examples/grl_graph_flow.json")?;
17
18 // Create custom executor with GRL-powered nodes
19 let mut executor = Executor::new();
20
21 // Input validation with GRL
22 executor.register_node(Box::new(RuleNode::new(
23 "input_validation",
24 "loan_amount > 0 && loan_amount <= 1000000"
25 )));
26
27 // Fetch customer data
28 executor.register_node(Box::new(DBNode::new(
29 "fetch_customer",
30 "SELECT * FROM customers WHERE id = ?"
31 )));
32
33 // Risk assessment with complex GRL rules
34 executor.register_node(Box::new(RuleNode::new(
35 "risk_assessment",
36 "credit_score >= 600 && income >= loan_amount * 3"
37 )));
38
39 // Fraud detection AI
40 executor.register_node(Box::new(AINode::new(
41 "fraud_detection",
42 "Analyze transaction patterns for fraud indicators"
43 )));
44
45 // Final approval decision
46 executor.register_node(Box::new(RuleNode::new(
47 "approval_decision",
48 "risk_score < 50 && fraud_score < 30"
49 )));
50
51 // Notification
52 executor.register_node(Box::new(AINode::new(
53 "notification",
54 "Generate approval/rejection notification email"
55 )));
56
57 // Create graph with initial context
58 let mut graph = Graph::new(def);
59
60 // Set application data
61 graph.context.data.insert("loan_amount".to_string(), json!(50000));
62 graph.context.data.insert("credit_score".to_string(), json!(720));
63 graph.context.data.insert("income".to_string(), json!(180000));
64 graph.context.data.insert("customer_id".to_string(), json!(12345));
65
66 println!("Application Data:");
67 println!(" Loan Amount: $50,000");
68 println!(" Credit Score: 720");
69 println!(" Annual Income: $180,000");
70 println!(" Customer ID: 12345\n");
71
72 // Execute the graph
73 println!("Processing loan application through GRL-powered workflow...\n");
74 executor.execute(&mut graph).await?;
75
76 // Display results
77 println!("\n=== Application Results ===\n");
78
79 if let Some(validation) = graph.context.data.get("input_validation_result") {
80 println!("✓ Input Validation: {}", validation);
81 }
82
83 if let Some(customer) = graph.context.data.get("fetch_customer_result") {
84 println!("✓ Customer Data Retrieved");
85 }
86
87 if let Some(risk) = graph.context.data.get("risk_assessment_result") {
88 println!("✓ Risk Assessment: {}", risk);
89 }
90
91 if let Some(fraud) = graph.context.data.get("fraud_detection_result") {
92 println!("✓ Fraud Detection Completed");
93 }
94
95 if let Some(decision) = graph.context.data.get("approval_decision_result") {
96 println!("✓ Approval Decision: {}", decision);
97 }
98
99 println!("\n=== GRL-Powered Workflow Complete ===");
100
101 // Demonstrate standalone GRL engine
102 println!("\n=== Bonus: Advanced GRL Rules ===\n");
103
104 let mut grl_engine = RuleEngine::new();
105
106 let advanced_rules = r#"
107rule "HighValueLoan" salience 100 {
108 when
109 loan_amount > 100000 && credit_score < 750
110 then
111 requires_manual_review = true;
112 approval_tier = "senior";
113}
114
115rule "StandardApproval" salience 50 {
116 when
117 loan_amount <= 100000 && credit_score >= 650
118 then
119 auto_approve = true;
120 approval_tier = "standard";
121}
122
123rule "RiskMitigation" salience 25 {
124 when
125 debt_to_income_ratio > 0.4
126 then
127 requires_collateral = true;
128 interest_rate_adjustment = 1.5;
129}
130"#;
131
132 grl_engine.add_grl_rule(advanced_rules)?;
133
134 println!("Advanced GRL rules loaded:");
135 println!(" - High Value Loan Review");
136 println!(" - Standard Approval Process");
137 println!(" - Risk Mitigation Measures");
138
139 println!("\n✅ All systems operational with rust-rule-engine integration!");
140
141 Ok(())
142}Sourcepub fn with_cache(cache: CacheManager) -> Self
pub fn with_cache(cache: CacheManager) -> Self
Create a new executor with caching enabled
Examples found in repository?
examples/cache_flow.rs (line 99)
84async fn demo_basic_caching() -> anyhow::Result<()> {
85 println!("\n=== Demo 1: Basic Caching ===\n");
86
87 // Create cache with default configuration
88 let cache_config = CacheConfig {
89 max_entries: 100,
90 max_memory_bytes: 10 * 1024 * 1024, // 10MB
91 default_ttl: None, // No expiration
92 eviction_policy: EvictionPolicy::LRU,
93 enable_background_cleanup: false,
94 };
95
96 let cache = CacheManager::new(cache_config).await?;
97
98 // Create executor with cache
99 let mut executor = Executor::with_cache(cache.clone());
100
101 // Register expensive computation node
102 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 1000)));
103
104 // Create simple graph
105 let graph_def = GraphDef {
106 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
107 .into_iter()
108 .collect(),
109 edges: vec![],
110 };
111
112 // First execution - cache miss
113 info!("First execution (cache miss):");
114 let mut graph1 = Graph::new(graph_def.clone());
115 graph1.context.set("input", json!(5))?;
116
117 let start = std::time::Instant::now();
118 executor.execute(&mut graph1).await?;
119 let duration1 = start.elapsed();
120
121 println!("First execution time: {:?}", duration1);
122 println!("Cache stats: {:?}", cache.stats());
123
124 // Second execution - cache hit
125 info!("\nSecond execution (cache hit):");
126 let mut graph2 = Graph::new(graph_def.clone());
127 graph2.context.set("input", json!(5))?; // Same input
128
129 let start = std::time::Instant::now();
130 executor.execute(&mut graph2).await?;
131 let duration2 = start.elapsed();
132
133 println!("Second execution time: {:?}", duration2);
134 println!("Cache stats: {:?}", cache.stats());
135 println!("Speedup: {:.2}x", duration1.as_secs_f64() / duration2.as_secs_f64());
136
137 // Third execution with different input - cache miss
138 info!("\nThird execution with different input (cache miss):");
139 let mut graph3 = Graph::new(graph_def);
140 graph3.context.set("input", json!(10))?; // Different input
141
142 let start = std::time::Instant::now();
143 executor.execute(&mut graph3).await?;
144 let duration3 = start.elapsed();
145
146 println!("Third execution time: {:?}", duration3);
147 println!("Final cache stats: {:?}", cache.stats());
148 println!("Hit rate: {:.1}%", cache.stats().hit_rate());
149
150 Ok(())
151}
152
153async fn demo_ttl_expiration() -> anyhow::Result<()> {
154 println!("\n\n=== Demo 2: TTL Expiration ===\n");
155
156 // Create cache with short TTL
157 let cache_config = CacheConfig {
158 max_entries: 100,
159 max_memory_bytes: 10 * 1024 * 1024,
160 default_ttl: Some(Duration::from_secs(2)), // 2 second TTL
161 eviction_policy: EvictionPolicy::LRU,
162 enable_background_cleanup: true,
163 };
164
165 let cache = CacheManager::new(cache_config).await?;
166 let mut executor = Executor::with_cache(cache.clone());
167 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 500)));
168
169 let graph_def = GraphDef {
170 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
171 .into_iter()
172 .collect(),
173 edges: vec![],
174 };
175
176 // First execution
177 info!("First execution:");
178 let mut graph1 = Graph::new(graph_def.clone());
179 graph1.context.set("input", json!(7))?;
180 executor.execute(&mut graph1).await?;
181 println!("Cache stats after first execution: {:?}", cache.stats());
182
183 // Immediate second execution - should hit cache
184 info!("\nImmediate second execution (within TTL):");
185 let mut graph2 = Graph::new(graph_def.clone());
186 graph2.context.set("input", json!(7))?;
187 executor.execute(&mut graph2).await?;
188 println!("Cache stats: {:?}", cache.stats());
189
190 // Wait for TTL to expire
191 println!("\nWaiting for TTL to expire (3 seconds)...");
192 tokio::time::sleep(Duration::from_secs(3)).await;
193
194 // Third execution after TTL - should miss cache
195 info!("\nThird execution (after TTL expiration):");
196 let mut graph3 = Graph::new(graph_def);
197 graph3.context.set("input", json!(7))?;
198 executor.execute(&mut graph3).await?;
199 println!("Cache stats after expiration: {:?}", cache.stats());
200
201 Ok(())
202}
203
204async fn demo_eviction_policies() -> anyhow::Result<()> {
205 println!("\n\n=== Demo 3: Eviction Policies ===\n");
206
207 for policy in [EvictionPolicy::LRU, EvictionPolicy::FIFO, EvictionPolicy::LFU] {
208 println!("\n--- Testing {:?} Policy ---", policy);
209
210 let cache_config = CacheConfig {
211 max_entries: 3, // Small limit to trigger eviction
212 max_memory_bytes: 10 * 1024 * 1024,
213 default_ttl: None,
214 eviction_policy: policy,
215 enable_background_cleanup: false,
216 };
217
218 let cache = CacheManager::new(cache_config).await?;
219 let mut executor = Executor::with_cache(cache.clone());
220 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 100)));
221
222 let graph_def = GraphDef {
223 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
224 .into_iter()
225 .collect(),
226 edges: vec![],
227 };
228
229 // Add 4 entries (should evict 1)
230 for i in 1..=4 {
231 let mut graph = Graph::new(graph_def.clone());
232 graph.context.set("input", json!(i))?;
233 executor.execute(&mut graph).await?;
234 println!("Added entry {}, cache size: {}", i, cache.len());
235 }
236
237 let stats = cache.stats();
238 println!("Final stats: entries={}, evictions={}",
239 stats.current_entries, stats.evictions);
240 }
241
242 Ok(())
243}
244
245async fn demo_memory_limits() -> anyhow::Result<()> {
246 println!("\n\n=== Demo 4: Memory Limits ===\n");
247
248 // Create cache with small memory limit
249 let cache_config = CacheConfig {
250 max_entries: 1000,
251 max_memory_bytes: 1024, // Only 1KB
252 default_ttl: None,
253 eviction_policy: EvictionPolicy::LRU,
254 enable_background_cleanup: false,
255 };
256
257 let cache = CacheManager::new(cache_config).await?;
258 let mut executor = Executor::with_cache(cache.clone());
259 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 50)));
260
261 let graph_def = GraphDef {
262 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
263 .into_iter()
264 .collect(),
265 edges: vec![],
266 };
267
268 // Add entries until memory limit is reached
269 for i in 1..=20 {
270 let mut graph = Graph::new(graph_def.clone());
271 graph.context.set("input", json!(i))?;
272 graph.context.set("large_data", json!(vec![i; 100]))?; // Add some bulk
273 executor.execute(&mut graph).await?;
274
275 let stats = cache.stats();
276 println!(
277 "Entry {}: size={} entries, memory={} bytes, evictions={}",
278 i, stats.current_entries, stats.current_memory_bytes, stats.evictions
279 );
280 }
281
282 let stats = cache.stats();
283 println!("\nFinal memory usage: {} bytes (limit: 1024 bytes)",
284 stats.current_memory_bytes);
285 println!("Total evictions: {}", stats.evictions);
286
287 Ok(())
288}
289
290async fn demo_cache_invalidation() -> anyhow::Result<()> {
291 println!("\n\n=== Demo 5: Cache Invalidation ===\n");
292
293 let cache = CacheManager::new(CacheConfig::default()).await?;
294 let mut executor = Executor::with_cache(cache.clone());
295 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 200)));
296
297 let graph_def = GraphDef {
298 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
299 .into_iter()
300 .collect(),
301 edges: vec![],
302 };
303
304 // Execute multiple times with different inputs
305 for i in 1..=5 {
306 let mut graph = Graph::new(graph_def.clone());
307 graph.context.set("input", json!(i))?;
308 executor.execute(&mut graph).await?;
309 }
310
311 println!("Cache populated with {} entries", cache.len());
312 println!("Cache stats: {:?}", cache.stats());
313
314 // Invalidate specific node
315 println!("\nInvalidating all entries for node 'compute'...");
316 let invalidated = cache.invalidate_node("compute");
317 println!("Invalidated {} entries", invalidated);
318 println!("Cache size after invalidation: {}", cache.len());
319
320 // Re-execute - should be cache miss
321 info!("\nRe-executing after invalidation:");
322 let mut graph = Graph::new(graph_def);
323 graph.context.set("input", json!(1))?;
324 executor.execute(&mut graph).await?;
325 println!("Final cache stats: {:?}", cache.stats());
326
327 Ok(())
328}Sourcepub fn set_cache(&mut self, cache: CacheManager)
pub fn set_cache(&mut self, cache: CacheManager)
Enable caching for this executor
Sourcepub fn cache(&self) -> Option<&CacheManager>
pub fn cache(&self) -> Option<&CacheManager>
Get the cache manager (if enabled)
Sourcepub fn from_graph_def(def: &GraphDef) -> Result<Self>
pub fn from_graph_def(def: &GraphDef) -> Result<Self>
Build executor from graph definition
Sourcepub fn register_node(&mut self, node: Box<dyn Node>)
pub fn register_node(&mut self, node: Box<dyn Node>)
Register a node with the executor
Examples found in repository?
examples/advanced_flow.rs (lines 22-25)
6async fn main() -> anyhow::Result<()> {
7 // Initialize tracing
8 tracing_subscriber::fmt()
9 .with_max_level(tracing::Level::DEBUG)
10 .init();
11
12 println!("=== Rust Logic Graph - Advanced Flow Example ===");
13 println!("Scenario: User Analytics Report Generation with Permission Checks\n");
14
15 // Load graph definition
16 let def = GraphIO::load_from_file("examples/advanced_flow.json")?;
17
18 // Create custom executor with specific node configurations
19 let mut executor = Executor::new();
20
21 // Register nodes with custom logic
22 executor.register_node(Box::new(RuleNode::new(
23 "validate_input",
24 "user_id > 0"
25 )));
26
27 executor.register_node(Box::new(DBNode::new(
28 "fetch_user_data",
29 "SELECT * FROM users WHERE id = ?"
30 )));
31
32 executor.register_node(Box::new(RuleNode::new(
33 "check_permissions",
34 "user_role == \"admin\""
35 )));
36
37 executor.register_node(Box::new(DBNode::new(
38 "query_analytics",
39 "SELECT * FROM analytics WHERE user_id = ?"
40 )));
41
42 executor.register_node(Box::new(AINode::new(
43 "generate_report",
44 "Generate comprehensive analytics report from data"
45 )));
46
47 executor.register_node(Box::new(AINode::new(
48 "send_notification",
49 "Send notification to user about report status"
50 )));
51
52 // Create graph and set initial context
53 let mut graph = Graph::new(def);
54
55 // Simulate input data
56 graph.context.data.insert("user_id".to_string(), json!(1001));
57 graph.context.data.insert("user_role".to_string(), json!("admin"));
58
59 println!("Initial Context:");
60 println!(" - user_id: 1001");
61 println!(" - user_role: admin\n");
62
63 // Execute the graph
64 println!("Starting advanced flow execution...\n");
65 executor.execute(&mut graph).await?;
66
67 // Display results
68 println!("\n=== Execution Results ===");
69 println!("\nContext Data:");
70 for (key, value) in &graph.context.data {
71 if key.ends_with("_result") {
72 println!(" {}: {}", key, serde_json::to_string_pretty(&value)?);
73 }
74 }
75
76 println!("\n=== Flow Complete ===");
77 Ok(())
78}More examples
examples/cache_flow.rs (line 102)
84async fn demo_basic_caching() -> anyhow::Result<()> {
85 println!("\n=== Demo 1: Basic Caching ===\n");
86
87 // Create cache with default configuration
88 let cache_config = CacheConfig {
89 max_entries: 100,
90 max_memory_bytes: 10 * 1024 * 1024, // 10MB
91 default_ttl: None, // No expiration
92 eviction_policy: EvictionPolicy::LRU,
93 enable_background_cleanup: false,
94 };
95
96 let cache = CacheManager::new(cache_config).await?;
97
98 // Create executor with cache
99 let mut executor = Executor::with_cache(cache.clone());
100
101 // Register expensive computation node
102 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 1000)));
103
104 // Create simple graph
105 let graph_def = GraphDef {
106 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
107 .into_iter()
108 .collect(),
109 edges: vec![],
110 };
111
112 // First execution - cache miss
113 info!("First execution (cache miss):");
114 let mut graph1 = Graph::new(graph_def.clone());
115 graph1.context.set("input", json!(5))?;
116
117 let start = std::time::Instant::now();
118 executor.execute(&mut graph1).await?;
119 let duration1 = start.elapsed();
120
121 println!("First execution time: {:?}", duration1);
122 println!("Cache stats: {:?}", cache.stats());
123
124 // Second execution - cache hit
125 info!("\nSecond execution (cache hit):");
126 let mut graph2 = Graph::new(graph_def.clone());
127 graph2.context.set("input", json!(5))?; // Same input
128
129 let start = std::time::Instant::now();
130 executor.execute(&mut graph2).await?;
131 let duration2 = start.elapsed();
132
133 println!("Second execution time: {:?}", duration2);
134 println!("Cache stats: {:?}", cache.stats());
135 println!("Speedup: {:.2}x", duration1.as_secs_f64() / duration2.as_secs_f64());
136
137 // Third execution with different input - cache miss
138 info!("\nThird execution with different input (cache miss):");
139 let mut graph3 = Graph::new(graph_def);
140 graph3.context.set("input", json!(10))?; // Different input
141
142 let start = std::time::Instant::now();
143 executor.execute(&mut graph3).await?;
144 let duration3 = start.elapsed();
145
146 println!("Third execution time: {:?}", duration3);
147 println!("Final cache stats: {:?}", cache.stats());
148 println!("Hit rate: {:.1}%", cache.stats().hit_rate());
149
150 Ok(())
151}
152
153async fn demo_ttl_expiration() -> anyhow::Result<()> {
154 println!("\n\n=== Demo 2: TTL Expiration ===\n");
155
156 // Create cache with short TTL
157 let cache_config = CacheConfig {
158 max_entries: 100,
159 max_memory_bytes: 10 * 1024 * 1024,
160 default_ttl: Some(Duration::from_secs(2)), // 2 second TTL
161 eviction_policy: EvictionPolicy::LRU,
162 enable_background_cleanup: true,
163 };
164
165 let cache = CacheManager::new(cache_config).await?;
166 let mut executor = Executor::with_cache(cache.clone());
167 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 500)));
168
169 let graph_def = GraphDef {
170 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
171 .into_iter()
172 .collect(),
173 edges: vec![],
174 };
175
176 // First execution
177 info!("First execution:");
178 let mut graph1 = Graph::new(graph_def.clone());
179 graph1.context.set("input", json!(7))?;
180 executor.execute(&mut graph1).await?;
181 println!("Cache stats after first execution: {:?}", cache.stats());
182
183 // Immediate second execution - should hit cache
184 info!("\nImmediate second execution (within TTL):");
185 let mut graph2 = Graph::new(graph_def.clone());
186 graph2.context.set("input", json!(7))?;
187 executor.execute(&mut graph2).await?;
188 println!("Cache stats: {:?}", cache.stats());
189
190 // Wait for TTL to expire
191 println!("\nWaiting for TTL to expire (3 seconds)...");
192 tokio::time::sleep(Duration::from_secs(3)).await;
193
194 // Third execution after TTL - should miss cache
195 info!("\nThird execution (after TTL expiration):");
196 let mut graph3 = Graph::new(graph_def);
197 graph3.context.set("input", json!(7))?;
198 executor.execute(&mut graph3).await?;
199 println!("Cache stats after expiration: {:?}", cache.stats());
200
201 Ok(())
202}
203
204async fn demo_eviction_policies() -> anyhow::Result<()> {
205 println!("\n\n=== Demo 3: Eviction Policies ===\n");
206
207 for policy in [EvictionPolicy::LRU, EvictionPolicy::FIFO, EvictionPolicy::LFU] {
208 println!("\n--- Testing {:?} Policy ---", policy);
209
210 let cache_config = CacheConfig {
211 max_entries: 3, // Small limit to trigger eviction
212 max_memory_bytes: 10 * 1024 * 1024,
213 default_ttl: None,
214 eviction_policy: policy,
215 enable_background_cleanup: false,
216 };
217
218 let cache = CacheManager::new(cache_config).await?;
219 let mut executor = Executor::with_cache(cache.clone());
220 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 100)));
221
222 let graph_def = GraphDef {
223 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
224 .into_iter()
225 .collect(),
226 edges: vec![],
227 };
228
229 // Add 4 entries (should evict 1)
230 for i in 1..=4 {
231 let mut graph = Graph::new(graph_def.clone());
232 graph.context.set("input", json!(i))?;
233 executor.execute(&mut graph).await?;
234 println!("Added entry {}, cache size: {}", i, cache.len());
235 }
236
237 let stats = cache.stats();
238 println!("Final stats: entries={}, evictions={}",
239 stats.current_entries, stats.evictions);
240 }
241
242 Ok(())
243}
244
245async fn demo_memory_limits() -> anyhow::Result<()> {
246 println!("\n\n=== Demo 4: Memory Limits ===\n");
247
248 // Create cache with small memory limit
249 let cache_config = CacheConfig {
250 max_entries: 1000,
251 max_memory_bytes: 1024, // Only 1KB
252 default_ttl: None,
253 eviction_policy: EvictionPolicy::LRU,
254 enable_background_cleanup: false,
255 };
256
257 let cache = CacheManager::new(cache_config).await?;
258 let mut executor = Executor::with_cache(cache.clone());
259 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 50)));
260
261 let graph_def = GraphDef {
262 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
263 .into_iter()
264 .collect(),
265 edges: vec![],
266 };
267
268 // Add entries until memory limit is reached
269 for i in 1..=20 {
270 let mut graph = Graph::new(graph_def.clone());
271 graph.context.set("input", json!(i))?;
272 graph.context.set("large_data", json!(vec![i; 100]))?; // Add some bulk
273 executor.execute(&mut graph).await?;
274
275 let stats = cache.stats();
276 println!(
277 "Entry {}: size={} entries, memory={} bytes, evictions={}",
278 i, stats.current_entries, stats.current_memory_bytes, stats.evictions
279 );
280 }
281
282 let stats = cache.stats();
283 println!("\nFinal memory usage: {} bytes (limit: 1024 bytes)",
284 stats.current_memory_bytes);
285 println!("Total evictions: {}", stats.evictions);
286
287 Ok(())
288}
289
290async fn demo_cache_invalidation() -> anyhow::Result<()> {
291 println!("\n\n=== Demo 5: Cache Invalidation ===\n");
292
293 let cache = CacheManager::new(CacheConfig::default()).await?;
294 let mut executor = Executor::with_cache(cache.clone());
295 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 200)));
296
297 let graph_def = GraphDef {
298 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
299 .into_iter()
300 .collect(),
301 edges: vec![],
302 };
303
304 // Execute multiple times with different inputs
305 for i in 1..=5 {
306 let mut graph = Graph::new(graph_def.clone());
307 graph.context.set("input", json!(i))?;
308 executor.execute(&mut graph).await?;
309 }
310
311 println!("Cache populated with {} entries", cache.len());
312 println!("Cache stats: {:?}", cache.stats());
313
314 // Invalidate specific node
315 println!("\nInvalidating all entries for node 'compute'...");
316 let invalidated = cache.invalidate_node("compute");
317 println!("Invalidated {} entries", invalidated);
318 println!("Cache size after invalidation: {}", cache.len());
319
320 // Re-execute - should be cache miss
321 info!("\nRe-executing after invalidation:");
322 let mut graph = Graph::new(graph_def);
323 graph.context.set("input", json!(1))?;
324 executor.execute(&mut graph).await?;
325 println!("Final cache stats: {:?}", cache.stats());
326
327 Ok(())
328}examples/grl_graph_flow.rs (lines 22-25)
6async fn main() -> anyhow::Result<()> {
7 // Initialize tracing
8 tracing_subscriber::fmt()
9 .with_max_level(tracing::Level::INFO)
10 .init();
11
12 println!("=== Rust Logic Graph - GRL Integration Example ===");
13 println!("Scenario: Loan Application with Advanced GRL Rules\n");
14
15 // Load graph definition
16 let def = GraphIO::load_from_file("examples/grl_graph_flow.json")?;
17
18 // Create custom executor with GRL-powered nodes
19 let mut executor = Executor::new();
20
21 // Input validation with GRL
22 executor.register_node(Box::new(RuleNode::new(
23 "input_validation",
24 "loan_amount > 0 && loan_amount <= 1000000"
25 )));
26
27 // Fetch customer data
28 executor.register_node(Box::new(DBNode::new(
29 "fetch_customer",
30 "SELECT * FROM customers WHERE id = ?"
31 )));
32
33 // Risk assessment with complex GRL rules
34 executor.register_node(Box::new(RuleNode::new(
35 "risk_assessment",
36 "credit_score >= 600 && income >= loan_amount * 3"
37 )));
38
39 // Fraud detection AI
40 executor.register_node(Box::new(AINode::new(
41 "fraud_detection",
42 "Analyze transaction patterns for fraud indicators"
43 )));
44
45 // Final approval decision
46 executor.register_node(Box::new(RuleNode::new(
47 "approval_decision",
48 "risk_score < 50 && fraud_score < 30"
49 )));
50
51 // Notification
52 executor.register_node(Box::new(AINode::new(
53 "notification",
54 "Generate approval/rejection notification email"
55 )));
56
57 // Create graph with initial context
58 let mut graph = Graph::new(def);
59
60 // Set application data
61 graph.context.data.insert("loan_amount".to_string(), json!(50000));
62 graph.context.data.insert("credit_score".to_string(), json!(720));
63 graph.context.data.insert("income".to_string(), json!(180000));
64 graph.context.data.insert("customer_id".to_string(), json!(12345));
65
66 println!("Application Data:");
67 println!(" Loan Amount: $50,000");
68 println!(" Credit Score: 720");
69 println!(" Annual Income: $180,000");
70 println!(" Customer ID: 12345\n");
71
72 // Execute the graph
73 println!("Processing loan application through GRL-powered workflow...\n");
74 executor.execute(&mut graph).await?;
75
76 // Display results
77 println!("\n=== Application Results ===\n");
78
79 if let Some(validation) = graph.context.data.get("input_validation_result") {
80 println!("✓ Input Validation: {}", validation);
81 }
82
83 if let Some(customer) = graph.context.data.get("fetch_customer_result") {
84 println!("✓ Customer Data Retrieved");
85 }
86
87 if let Some(risk) = graph.context.data.get("risk_assessment_result") {
88 println!("✓ Risk Assessment: {}", risk);
89 }
90
91 if let Some(fraud) = graph.context.data.get("fraud_detection_result") {
92 println!("✓ Fraud Detection Completed");
93 }
94
95 if let Some(decision) = graph.context.data.get("approval_decision_result") {
96 println!("✓ Approval Decision: {}", decision);
97 }
98
99 println!("\n=== GRL-Powered Workflow Complete ===");
100
101 // Demonstrate standalone GRL engine
102 println!("\n=== Bonus: Advanced GRL Rules ===\n");
103
104 let mut grl_engine = RuleEngine::new();
105
106 let advanced_rules = r#"
107rule "HighValueLoan" salience 100 {
108 when
109 loan_amount > 100000 && credit_score < 750
110 then
111 requires_manual_review = true;
112 approval_tier = "senior";
113}
114
115rule "StandardApproval" salience 50 {
116 when
117 loan_amount <= 100000 && credit_score >= 650
118 then
119 auto_approve = true;
120 approval_tier = "standard";
121}
122
123rule "RiskMitigation" salience 25 {
124 when
125 debt_to_income_ratio > 0.4
126 then
127 requires_collateral = true;
128 interest_rate_adjustment = 1.5;
129}
130"#;
131
132 grl_engine.add_grl_rule(advanced_rules)?;
133
134 println!("Advanced GRL rules loaded:");
135 println!(" - High Value Loan Review");
136 println!(" - Standard Approval Process");
137 println!(" - Risk Mitigation Measures");
138
139 println!("\n✅ All systems operational with rust-rule-engine integration!");
140
141 Ok(())
142}Sourcepub async fn execute(&self, graph: &mut Graph) -> Result<()>
pub async fn execute(&self, graph: &mut Graph) -> Result<()>
Execute the graph in topological order
Examples found in repository?
examples/advanced_flow.rs (line 65)
6async fn main() -> anyhow::Result<()> {
7 // Initialize tracing
8 tracing_subscriber::fmt()
9 .with_max_level(tracing::Level::DEBUG)
10 .init();
11
12 println!("=== Rust Logic Graph - Advanced Flow Example ===");
13 println!("Scenario: User Analytics Report Generation with Permission Checks\n");
14
15 // Load graph definition
16 let def = GraphIO::load_from_file("examples/advanced_flow.json")?;
17
18 // Create custom executor with specific node configurations
19 let mut executor = Executor::new();
20
21 // Register nodes with custom logic
22 executor.register_node(Box::new(RuleNode::new(
23 "validate_input",
24 "user_id > 0"
25 )));
26
27 executor.register_node(Box::new(DBNode::new(
28 "fetch_user_data",
29 "SELECT * FROM users WHERE id = ?"
30 )));
31
32 executor.register_node(Box::new(RuleNode::new(
33 "check_permissions",
34 "user_role == \"admin\""
35 )));
36
37 executor.register_node(Box::new(DBNode::new(
38 "query_analytics",
39 "SELECT * FROM analytics WHERE user_id = ?"
40 )));
41
42 executor.register_node(Box::new(AINode::new(
43 "generate_report",
44 "Generate comprehensive analytics report from data"
45 )));
46
47 executor.register_node(Box::new(AINode::new(
48 "send_notification",
49 "Send notification to user about report status"
50 )));
51
52 // Create graph and set initial context
53 let mut graph = Graph::new(def);
54
55 // Simulate input data
56 graph.context.data.insert("user_id".to_string(), json!(1001));
57 graph.context.data.insert("user_role".to_string(), json!("admin"));
58
59 println!("Initial Context:");
60 println!(" - user_id: 1001");
61 println!(" - user_role: admin\n");
62
63 // Execute the graph
64 println!("Starting advanced flow execution...\n");
65 executor.execute(&mut graph).await?;
66
67 // Display results
68 println!("\n=== Execution Results ===");
69 println!("\nContext Data:");
70 for (key, value) in &graph.context.data {
71 if key.ends_with("_result") {
72 println!(" {}: {}", key, serde_json::to_string_pretty(&value)?);
73 }
74 }
75
76 println!("\n=== Flow Complete ===");
77 Ok(())
78}More examples
examples/cache_flow.rs (line 118)
84async fn demo_basic_caching() -> anyhow::Result<()> {
85 println!("\n=== Demo 1: Basic Caching ===\n");
86
87 // Create cache with default configuration
88 let cache_config = CacheConfig {
89 max_entries: 100,
90 max_memory_bytes: 10 * 1024 * 1024, // 10MB
91 default_ttl: None, // No expiration
92 eviction_policy: EvictionPolicy::LRU,
93 enable_background_cleanup: false,
94 };
95
96 let cache = CacheManager::new(cache_config).await?;
97
98 // Create executor with cache
99 let mut executor = Executor::with_cache(cache.clone());
100
101 // Register expensive computation node
102 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 1000)));
103
104 // Create simple graph
105 let graph_def = GraphDef {
106 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
107 .into_iter()
108 .collect(),
109 edges: vec![],
110 };
111
112 // First execution - cache miss
113 info!("First execution (cache miss):");
114 let mut graph1 = Graph::new(graph_def.clone());
115 graph1.context.set("input", json!(5))?;
116
117 let start = std::time::Instant::now();
118 executor.execute(&mut graph1).await?;
119 let duration1 = start.elapsed();
120
121 println!("First execution time: {:?}", duration1);
122 println!("Cache stats: {:?}", cache.stats());
123
124 // Second execution - cache hit
125 info!("\nSecond execution (cache hit):");
126 let mut graph2 = Graph::new(graph_def.clone());
127 graph2.context.set("input", json!(5))?; // Same input
128
129 let start = std::time::Instant::now();
130 executor.execute(&mut graph2).await?;
131 let duration2 = start.elapsed();
132
133 println!("Second execution time: {:?}", duration2);
134 println!("Cache stats: {:?}", cache.stats());
135 println!("Speedup: {:.2}x", duration1.as_secs_f64() / duration2.as_secs_f64());
136
137 // Third execution with different input - cache miss
138 info!("\nThird execution with different input (cache miss):");
139 let mut graph3 = Graph::new(graph_def);
140 graph3.context.set("input", json!(10))?; // Different input
141
142 let start = std::time::Instant::now();
143 executor.execute(&mut graph3).await?;
144 let duration3 = start.elapsed();
145
146 println!("Third execution time: {:?}", duration3);
147 println!("Final cache stats: {:?}", cache.stats());
148 println!("Hit rate: {:.1}%", cache.stats().hit_rate());
149
150 Ok(())
151}
152
153async fn demo_ttl_expiration() -> anyhow::Result<()> {
154 println!("\n\n=== Demo 2: TTL Expiration ===\n");
155
156 // Create cache with short TTL
157 let cache_config = CacheConfig {
158 max_entries: 100,
159 max_memory_bytes: 10 * 1024 * 1024,
160 default_ttl: Some(Duration::from_secs(2)), // 2 second TTL
161 eviction_policy: EvictionPolicy::LRU,
162 enable_background_cleanup: true,
163 };
164
165 let cache = CacheManager::new(cache_config).await?;
166 let mut executor = Executor::with_cache(cache.clone());
167 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 500)));
168
169 let graph_def = GraphDef {
170 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
171 .into_iter()
172 .collect(),
173 edges: vec![],
174 };
175
176 // First execution
177 info!("First execution:");
178 let mut graph1 = Graph::new(graph_def.clone());
179 graph1.context.set("input", json!(7))?;
180 executor.execute(&mut graph1).await?;
181 println!("Cache stats after first execution: {:?}", cache.stats());
182
183 // Immediate second execution - should hit cache
184 info!("\nImmediate second execution (within TTL):");
185 let mut graph2 = Graph::new(graph_def.clone());
186 graph2.context.set("input", json!(7))?;
187 executor.execute(&mut graph2).await?;
188 println!("Cache stats: {:?}", cache.stats());
189
190 // Wait for TTL to expire
191 println!("\nWaiting for TTL to expire (3 seconds)...");
192 tokio::time::sleep(Duration::from_secs(3)).await;
193
194 // Third execution after TTL - should miss cache
195 info!("\nThird execution (after TTL expiration):");
196 let mut graph3 = Graph::new(graph_def);
197 graph3.context.set("input", json!(7))?;
198 executor.execute(&mut graph3).await?;
199 println!("Cache stats after expiration: {:?}", cache.stats());
200
201 Ok(())
202}
203
204async fn demo_eviction_policies() -> anyhow::Result<()> {
205 println!("\n\n=== Demo 3: Eviction Policies ===\n");
206
207 for policy in [EvictionPolicy::LRU, EvictionPolicy::FIFO, EvictionPolicy::LFU] {
208 println!("\n--- Testing {:?} Policy ---", policy);
209
210 let cache_config = CacheConfig {
211 max_entries: 3, // Small limit to trigger eviction
212 max_memory_bytes: 10 * 1024 * 1024,
213 default_ttl: None,
214 eviction_policy: policy,
215 enable_background_cleanup: false,
216 };
217
218 let cache = CacheManager::new(cache_config).await?;
219 let mut executor = Executor::with_cache(cache.clone());
220 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 100)));
221
222 let graph_def = GraphDef {
223 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
224 .into_iter()
225 .collect(),
226 edges: vec![],
227 };
228
229 // Add 4 entries (should evict 1)
230 for i in 1..=4 {
231 let mut graph = Graph::new(graph_def.clone());
232 graph.context.set("input", json!(i))?;
233 executor.execute(&mut graph).await?;
234 println!("Added entry {}, cache size: {}", i, cache.len());
235 }
236
237 let stats = cache.stats();
238 println!("Final stats: entries={}, evictions={}",
239 stats.current_entries, stats.evictions);
240 }
241
242 Ok(())
243}
244
245async fn demo_memory_limits() -> anyhow::Result<()> {
246 println!("\n\n=== Demo 4: Memory Limits ===\n");
247
248 // Create cache with small memory limit
249 let cache_config = CacheConfig {
250 max_entries: 1000,
251 max_memory_bytes: 1024, // Only 1KB
252 default_ttl: None,
253 eviction_policy: EvictionPolicy::LRU,
254 enable_background_cleanup: false,
255 };
256
257 let cache = CacheManager::new(cache_config).await?;
258 let mut executor = Executor::with_cache(cache.clone());
259 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 50)));
260
261 let graph_def = GraphDef {
262 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
263 .into_iter()
264 .collect(),
265 edges: vec![],
266 };
267
268 // Add entries until memory limit is reached
269 for i in 1..=20 {
270 let mut graph = Graph::new(graph_def.clone());
271 graph.context.set("input", json!(i))?;
272 graph.context.set("large_data", json!(vec![i; 100]))?; // Add some bulk
273 executor.execute(&mut graph).await?;
274
275 let stats = cache.stats();
276 println!(
277 "Entry {}: size={} entries, memory={} bytes, evictions={}",
278 i, stats.current_entries, stats.current_memory_bytes, stats.evictions
279 );
280 }
281
282 let stats = cache.stats();
283 println!("\nFinal memory usage: {} bytes (limit: 1024 bytes)",
284 stats.current_memory_bytes);
285 println!("Total evictions: {}", stats.evictions);
286
287 Ok(())
288}
289
290async fn demo_cache_invalidation() -> anyhow::Result<()> {
291 println!("\n\n=== Demo 5: Cache Invalidation ===\n");
292
293 let cache = CacheManager::new(CacheConfig::default()).await?;
294 let mut executor = Executor::with_cache(cache.clone());
295 executor.register_node(Box::new(ExpensiveComputeNode::new("compute", 200)));
296
297 let graph_def = GraphDef {
298 nodes: vec![("compute".to_string(), NodeType::RuleNode)]
299 .into_iter()
300 .collect(),
301 edges: vec![],
302 };
303
304 // Execute multiple times with different inputs
305 for i in 1..=5 {
306 let mut graph = Graph::new(graph_def.clone());
307 graph.context.set("input", json!(i))?;
308 executor.execute(&mut graph).await?;
309 }
310
311 println!("Cache populated with {} entries", cache.len());
312 println!("Cache stats: {:?}", cache.stats());
313
314 // Invalidate specific node
315 println!("\nInvalidating all entries for node 'compute'...");
316 let invalidated = cache.invalidate_node("compute");
317 println!("Invalidated {} entries", invalidated);
318 println!("Cache size after invalidation: {}", cache.len());
319
320 // Re-execute - should be cache miss
321 info!("\nRe-executing after invalidation:");
322 let mut graph = Graph::new(graph_def);
323 graph.context.set("input", json!(1))?;
324 executor.execute(&mut graph).await?;
325 println!("Final cache stats: {:?}", cache.stats());
326
327 Ok(())
328}examples/grl_graph_flow.rs (line 74)
6async fn main() -> anyhow::Result<()> {
7 // Initialize tracing
8 tracing_subscriber::fmt()
9 .with_max_level(tracing::Level::INFO)
10 .init();
11
12 println!("=== Rust Logic Graph - GRL Integration Example ===");
13 println!("Scenario: Loan Application with Advanced GRL Rules\n");
14
15 // Load graph definition
16 let def = GraphIO::load_from_file("examples/grl_graph_flow.json")?;
17
18 // Create custom executor with GRL-powered nodes
19 let mut executor = Executor::new();
20
21 // Input validation with GRL
22 executor.register_node(Box::new(RuleNode::new(
23 "input_validation",
24 "loan_amount > 0 && loan_amount <= 1000000"
25 )));
26
27 // Fetch customer data
28 executor.register_node(Box::new(DBNode::new(
29 "fetch_customer",
30 "SELECT * FROM customers WHERE id = ?"
31 )));
32
33 // Risk assessment with complex GRL rules
34 executor.register_node(Box::new(RuleNode::new(
35 "risk_assessment",
36 "credit_score >= 600 && income >= loan_amount * 3"
37 )));
38
39 // Fraud detection AI
40 executor.register_node(Box::new(AINode::new(
41 "fraud_detection",
42 "Analyze transaction patterns for fraud indicators"
43 )));
44
45 // Final approval decision
46 executor.register_node(Box::new(RuleNode::new(
47 "approval_decision",
48 "risk_score < 50 && fraud_score < 30"
49 )));
50
51 // Notification
52 executor.register_node(Box::new(AINode::new(
53 "notification",
54 "Generate approval/rejection notification email"
55 )));
56
57 // Create graph with initial context
58 let mut graph = Graph::new(def);
59
60 // Set application data
61 graph.context.data.insert("loan_amount".to_string(), json!(50000));
62 graph.context.data.insert("credit_score".to_string(), json!(720));
63 graph.context.data.insert("income".to_string(), json!(180000));
64 graph.context.data.insert("customer_id".to_string(), json!(12345));
65
66 println!("Application Data:");
67 println!(" Loan Amount: $50,000");
68 println!(" Credit Score: 720");
69 println!(" Annual Income: $180,000");
70 println!(" Customer ID: 12345\n");
71
72 // Execute the graph
73 println!("Processing loan application through GRL-powered workflow...\n");
74 executor.execute(&mut graph).await?;
75
76 // Display results
77 println!("\n=== Application Results ===\n");
78
79 if let Some(validation) = graph.context.data.get("input_validation_result") {
80 println!("✓ Input Validation: {}", validation);
81 }
82
83 if let Some(customer) = graph.context.data.get("fetch_customer_result") {
84 println!("✓ Customer Data Retrieved");
85 }
86
87 if let Some(risk) = graph.context.data.get("risk_assessment_result") {
88 println!("✓ Risk Assessment: {}", risk);
89 }
90
91 if let Some(fraud) = graph.context.data.get("fraud_detection_result") {
92 println!("✓ Fraud Detection Completed");
93 }
94
95 if let Some(decision) = graph.context.data.get("approval_decision_result") {
96 println!("✓ Approval Decision: {}", decision);
97 }
98
99 println!("\n=== GRL-Powered Workflow Complete ===");
100
101 // Demonstrate standalone GRL engine
102 println!("\n=== Bonus: Advanced GRL Rules ===\n");
103
104 let mut grl_engine = RuleEngine::new();
105
106 let advanced_rules = r#"
107rule "HighValueLoan" salience 100 {
108 when
109 loan_amount > 100000 && credit_score < 750
110 then
111 requires_manual_review = true;
112 approval_tier = "senior";
113}
114
115rule "StandardApproval" salience 50 {
116 when
117 loan_amount <= 100000 && credit_score >= 650
118 then
119 auto_approve = true;
120 approval_tier = "standard";
121}
122
123rule "RiskMitigation" salience 25 {
124 when
125 debt_to_income_ratio > 0.4
126 then
127 requires_collateral = true;
128 interest_rate_adjustment = 1.5;
129}
130"#;
131
132 grl_engine.add_grl_rule(advanced_rules)?;
133
134 println!("Advanced GRL rules loaded:");
135 println!(" - High Value Loan Review");
136 println!(" - Standard Approval Process");
137 println!(" - Risk Mitigation Measures");
138
139 println!("\n✅ All systems operational with rust-rule-engine integration!");
140
141 Ok(())
142}Trait Implementations§
Auto Trait Implementations§
impl Freeze for Executor
impl !RefUnwindSafe for Executor
impl Send for Executor
impl Sync for Executor
impl Unpin for Executor
impl !UnwindSafe for Executor
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more