dataflow_rs/engine/
mod.rs

1/*!
2# Engine Module
3
4This module implements the core workflow engine for dataflow-rs. The engine processes
5messages through workflows composed of tasks, providing a flexible and extensible
6data processing pipeline.
7
8## Key Components
9
10- **Engine**: The main engine that processes messages through workflows
11- **Workflow**: A collection of tasks with conditions that determine when they should be applied
12- **Task**: An individual processing unit that performs a specific function on a message
13- **AsyncFunctionHandler**: A trait implemented by task handlers to define custom async processing logic
14- **Message**: The data structure that flows through the engine, with data, metadata, and processing results
15*/
16
17pub mod error;
18pub mod functions;
19pub mod message;
20pub mod task;
21pub mod workflow;
22
23// Re-export key types for easier access
24pub use error::{DataflowError, ErrorInfo, Result};
25pub use functions::AsyncFunctionHandler;
26pub use message::Message;
27pub use task::Task;
28pub use workflow::Workflow;
29
30// Re-export the jsonlogic library under our namespace
31pub use datalogic_rs as jsonlogic;
32
33use chrono::Utc;
34use datalogic_rs::DataLogic;
35use log::{debug, error, info, warn};
36use message::AuditTrail;
37use serde_json::{json, Map, Number, Value};
38use std::{cell::RefCell, collections::HashMap};
39use tokio::time::sleep;
40
41// Thread-local DataLogic instance to avoid mutex contention
42thread_local! {
43    static THREAD_LOCAL_DATA_LOGIC: RefCell<DataLogic> = RefCell::new(DataLogic::new());
44}
45
46/// Configuration for retry behavior
47#[derive(Debug, Clone)]
48pub struct RetryConfig {
49    /// Maximum number of retries
50    pub max_retries: u32,
51    /// Delay between retries in milliseconds
52    pub retry_delay_ms: u64,
53    /// Whether to use exponential backoff
54    pub use_backoff: bool,
55}
56
57impl Default for RetryConfig {
58    fn default() -> Self {
59        Self {
60            max_retries: 3,
61            retry_delay_ms: 1000,
62            use_backoff: true,
63        }
64    }
65}
66
67/// Engine that processes messages through workflows using non-blocking async IO.
68///
69/// This engine is optimized for IO-bound workloads like HTTP requests, database access,
70/// and file operations. It uses Tokio for efficient async task execution.
71///
72/// Workflows are processed sequentially to ensure that later workflows can depend
73/// on the results of earlier workflows.
74pub struct Engine {
75    /// Registry of available workflows
76    workflows: HashMap<String, Workflow>,
77    /// Registry of function handlers that can be executed by tasks
78    task_functions: HashMap<String, Box<dyn AsyncFunctionHandler + Send + Sync>>,
79    /// Configuration for retry behavior
80    retry_config: RetryConfig,
81}
82
83impl Default for Engine {
84    fn default() -> Self {
85        Self::new()
86    }
87}
88
89impl Engine {
90    /// Creates a new Engine instance with built-in function handlers pre-registered.
91    ///
92    /// # Example
93    ///
94    /// ```
95    /// use dataflow_rs::Engine;
96    ///
97    /// let engine = Engine::new();
98    /// ```
99    pub fn new() -> Self {
100        let mut engine = Self {
101            workflows: HashMap::new(),
102            task_functions: HashMap::new(),
103            retry_config: RetryConfig::default(),
104        };
105
106        // Register built-in function handlers
107        for (name, handler) in functions::builtins::get_all_functions() {
108            engine.register_task_function(name, handler);
109        }
110
111        engine
112    }
113
114    /// Create a new engine instance without any pre-registered functions
115    pub fn new_empty() -> Self {
116        Self {
117            task_functions: HashMap::new(),
118            workflows: HashMap::new(),
119            retry_config: RetryConfig::default(),
120        }
121    }
122
123    /// Configure retry behavior
124    pub fn with_retry_config(mut self, config: RetryConfig) -> Self {
125        self.retry_config = config;
126        self
127    }
128
129    /// Adds a workflow to the engine.
130    ///
131    /// # Arguments
132    ///
133    /// * `workflow` - The workflow to add
134    pub fn add_workflow(&mut self, workflow: &Workflow) {
135        if workflow.validate().is_ok() {
136            self.workflows.insert(workflow.id.clone(), workflow.clone());
137        } else {
138            error!("Invalid workflow: {}", workflow.id);
139        }
140    }
141
142    /// Registers a custom function handler with the engine.
143    ///
144    /// # Arguments
145    ///
146    /// * `name` - The name of the function handler
147    /// * `handler` - The function handler implementation
148    pub fn register_task_function(
149        &mut self,
150        name: String,
151        handler: Box<dyn AsyncFunctionHandler + Send + Sync>,
152    ) {
153        self.task_functions.insert(name, handler);
154    }
155
156    /// Check if a function with the given name is registered
157    pub fn has_function(&self, name: &str) -> bool {
158        self.task_functions.contains_key(name)
159    }
160
161    /// Processes a message through workflows that match their conditions.
162    ///
163    /// This async method:
164    /// 1. Iterates through workflows sequentially in deterministic order (sorted by ID)
165    /// 2. Evaluates conditions for each workflow right before execution
166    /// 3. Executes matching workflows one after another (not concurrently)
167    /// 4. Updates the message with processing results and audit trail
168    ///
169    /// Workflows are executed sequentially because later workflows may depend
170    /// on the results of earlier workflows, and their conditions may change
171    /// based on modifications made by previous workflows.
172    ///
173    /// # Arguments
174    ///
175    /// * `message` - The message to process
176    ///
177    /// # Returns
178    ///
179    /// * `Result<()>` - Success or an error if processing failed
180    pub async fn process_message(&self, message: &mut Message) -> Result<()> {
181        debug!(
182            "Processing message {} sequentially through workflows",
183            message.id
184        );
185
186        // Collect and sort workflows by ID to ensure deterministic execution order
187        // This prevents non-deterministic behavior caused by HashMap iteration order
188        let mut sorted_workflows: Vec<_> = self.workflows.iter().collect();
189        sorted_workflows.sort_by_key(|(id, workflow)| (workflow.priority, id.as_str()));
190
191        // Process workflows sequentially in sorted order, evaluating conditions just before execution
192        for (_, workflow) in sorted_workflows {
193            // Evaluate workflow condition using current message state
194            let condition = workflow.condition.clone().unwrap_or(Value::Bool(true));
195
196            if !self
197                .evaluate_condition(&condition, &message.metadata)
198                .await?
199            {
200                debug!("Workflow {} skipped - condition not met", workflow.id);
201                continue;
202            }
203
204            info!("Processing workflow {}", workflow.id);
205
206            Self::process_workflow(workflow, message, &self.task_functions, &self.retry_config)
207                .await;
208
209            info!("Completed processing workflow {}", workflow.id);
210
211            // If there were errors in this workflow, we may want to decide whether to continue
212            // For now, we continue processing remaining workflows even if one fails
213        }
214
215        debug!(
216            "Completed processing all workflows for message {}",
217            message.id
218        );
219        Ok(())
220    }
221
222    /// Process a single workflow with sequential task execution
223    async fn process_workflow(
224        workflow: &Workflow,
225        message: &mut Message,
226        task_functions: &HashMap<String, Box<dyn AsyncFunctionHandler + Send + Sync>>,
227        retry_config: &RetryConfig,
228    ) {
229        let workflow_id = workflow.id.clone();
230        let mut workflow_errors = Vec::new();
231
232        // Process tasks SEQUENTIALLY within this workflow
233        // IMPORTANT: Task order matters! Results from previous tasks are used by subsequent tasks.
234        // We intentionally process tasks one after another rather than concurrently.
235        for task in &workflow.tasks {
236            let task_condition = task.condition.clone().unwrap_or(Value::Bool(true));
237
238            // Evaluate task condition using thread-local DataLogic
239            let should_execute = THREAD_LOCAL_DATA_LOGIC.with(|data_logic_cell| {
240                let mut data_logic = data_logic_cell.borrow_mut();
241                data_logic.reset_arena();
242                data_logic
243                    .evaluate_json(&task_condition, &message.metadata, None)
244                    .map_err(|e| {
245                        DataflowError::LogicEvaluation(format!("Error evaluating condition: {}", e))
246                    })
247                    .map(|result| result.as_bool().unwrap_or(false))
248            });
249
250            // Handle condition evaluation result
251            let should_execute = match should_execute {
252                Ok(result) => result,
253                Err(e) => {
254                    workflow_errors.push(ErrorInfo::new(
255                        Some(workflow_id.clone()),
256                        Some(task.id.clone()),
257                        e.clone(),
258                    ));
259                    false
260                }
261            };
262
263            if !should_execute {
264                debug!("Task {} skipped - condition not met", task.id);
265                continue;
266            }
267
268            // Execute task if we have a handler
269            if let Some(function) = task_functions.get(&task.function.name) {
270                let task_id = task.id.clone();
271                let function_input = task.function.input.clone();
272
273                // Execute this task (with retries)
274                match Self::execute_task_static(
275                    &task_id,
276                    &workflow_id,
277                    message,
278                    &function_input,
279                    function.as_ref(),
280                    retry_config,
281                )
282                .await
283                {
284                    Ok(_) => {
285                        debug!("Task {} completed successfully", task_id);
286                    }
287                    Err(error) => {
288                        workflow_errors.push(ErrorInfo::new(
289                            Some(workflow_id.clone()),
290                            Some(task_id.clone()),
291                            error.clone(),
292                        ));
293
294                        // Break the task sequence if a task fails
295                        break;
296                    }
297                }
298            } else {
299                let error =
300                    DataflowError::Workflow(format!("Function '{}' not found", task.function.name));
301
302                workflow_errors.push(ErrorInfo::new(
303                    Some(workflow_id.clone()),
304                    Some(task.id.clone()),
305                    error,
306                ));
307
308                // Break the task sequence if a function is not found
309                break;
310            }
311        }
312
313        // Add any errors encountered to the message
314        message.errors.extend(workflow_errors);
315    }
316
317    /// Static helper method to execute a task with retries
318    async fn execute_task_static(
319        task_id: &str,
320        workflow_id: &str,
321        message: &mut Message,
322        input_json: &Value,
323        function: &dyn AsyncFunctionHandler,
324        retry_config: &RetryConfig,
325    ) -> Result<()> {
326        info!("Executing task {} in workflow {}", task_id, workflow_id);
327
328        let mut last_error = None;
329        let mut retry_count = 0;
330
331        // Try executing the task with retries
332        while retry_count <= retry_config.max_retries {
333            match function.execute(message, input_json).await {
334                Ok((status_code, changes)) => {
335                    // Success! Record audit trail and return
336                    message.audit_trail.push(AuditTrail {
337                        workflow_id: workflow_id.to_string(),
338                        task_id: task_id.to_string(),
339                        timestamp: Utc::now().to_rfc3339(),
340                        changes,
341                        status_code,
342                    });
343
344                    info!("Task {} completed with status {}", task_id, status_code);
345
346                    // Add progress metadata
347                    let mut progress = Map::new();
348                    progress.insert("task_id".to_string(), Value::String(task_id.to_string()));
349                    progress.insert(
350                        "workflow_id".to_string(),
351                        Value::String(workflow_id.to_string()),
352                    );
353                    progress.insert(
354                        "status_code".to_string(),
355                        Value::Number(Number::from(status_code)),
356                    );
357                    progress.insert(
358                        "timestamp".to_string(),
359                        Value::String(Utc::now().to_rfc3339()),
360                    );
361
362                    if retry_count > 0 {
363                        progress.insert(
364                            "retries".to_string(),
365                            Value::Number(Number::from(retry_count)),
366                        );
367                    }
368
369                    message.metadata["progress"] = json!(progress);
370
371                    return Ok(());
372                }
373                Err(e) => {
374                    last_error = Some(e.clone());
375
376                    if retry_count < retry_config.max_retries {
377                        warn!(
378                            "Task {} execution failed, retry {}/{}: {:?}",
379                            task_id,
380                            retry_count + 1,
381                            retry_config.max_retries,
382                            e
383                        );
384
385                        // Calculate delay with optional exponential backoff
386                        let delay = if retry_config.use_backoff {
387                            retry_config.retry_delay_ms * (2_u64.pow(retry_count))
388                        } else {
389                            retry_config.retry_delay_ms
390                        };
391
392                        // Use tokio's non-blocking sleep
393                        sleep(std::time::Duration::from_millis(delay)).await;
394
395                        retry_count += 1;
396                    } else {
397                        break;
398                    }
399                }
400            }
401        }
402
403        // If we're here, all retries failed
404        let error = last_error.unwrap_or_else(|| {
405            DataflowError::Unknown("Unknown error during task execution".to_string())
406        });
407
408        error!(
409            "Task {} in workflow {} failed after {} retries: {:?}",
410            task_id, workflow_id, retry_count, error
411        );
412
413        Err(error)
414    }
415
416    /// Evaluates a condition using DataLogic
417    async fn evaluate_condition(&self, condition: &Value, data: &Value) -> Result<bool> {
418        // For simple boolean conditions, short-circuit
419        if let Value::Bool(b) = condition {
420            return Ok(*b);
421        }
422
423        // Use thread-local DataLogic instance instead of mutex-protected one
424        THREAD_LOCAL_DATA_LOGIC.with(|data_logic_cell| {
425            let mut data_logic = data_logic_cell.borrow_mut();
426            data_logic.reset_arena();
427            data_logic
428                .evaluate_json(condition, data, None)
429                .map_err(|e| {
430                    DataflowError::LogicEvaluation(format!("Error evaluating condition: {}", e))
431                })
432                .map(|result| result.as_bool().unwrap_or(false))
433        })
434    }
435}