adaptive_pipeline_domain/value_objects/pipeline_requirements.rs
1// /////////////////////////////////////////////////////////////////////////////
2// Adaptive Pipeline
3// Copyright (c) 2025 Michael Gardner, A Bit of Help, Inc.
4// SPDX-License-Identifier: BSD-3-Clause
5// See LICENSE file in the project root.
6// /////////////////////////////////////////////////////////////////////////////
7
8//! # Pipeline Requirements Value Object
9//!
10//! This module provides the [`PipelineRequirements`] value object for
11//! specifying performance, security, and processing requirements for adaptive
12//! pipeline operations. It enables configuration-driven optimization and
13//! ensures consistent requirement enforcement across the entire pipeline
14//! system.
15//!
16//! ## Features
17//!
18//! - **Performance Configuration**: Throughput, memory, and processing
19//! requirements
20//! - **Security Settings**: Encryption and compression requirement
21//! specification
22//! - **Resource Constraints**: Memory limits and chunk size optimization
23//! - **Adaptive Processing**: Parallel processing and optimization flags
24//! - **Serialization Support**: Full serde compatibility for configuration
25//! persistence
26//!
27//! ## Architecture
28//!
29//! The `PipelineRequirements` follows Domain-Driven Design principles as a
30//! value object, representing immutable configuration requirements that guide
31//! pipeline behavior. It integrates with the pipeline's optimization engine to
32//! ensure operations meet specified performance and security criteria.
33//!
34//! ## Usage Examples
35
36use serde::{Deserialize, Serialize};
37
38/// Pipeline requirements for optimization, security, and performance
39/// configuration.
40///
41/// `PipelineRequirements` is a value object that encapsulates all configuration
42/// requirements for pipeline operations, enabling adaptive optimization based
43/// on performance targets, security needs, and resource constraints.
44///
45/// ## Key Features
46///
47/// - **Security Configuration**: Encryption and compression requirement flags
48/// - **Performance Tuning**: Throughput targets and memory constraints
49/// - **Resource Management**: Chunk size and memory limit specification
50/// - **Processing Mode**: Parallel vs sequential processing configuration
51/// - **Adaptive Optimization**: Requirements guide automatic optimization
52/// decisions
53///
54/// ## Configuration Categories
55///
56/// ### Security Requirements
57/// - `compression_enabled`: Whether to apply compression to reduce
58/// storage/bandwidth
59/// - `encryption_enabled`: Whether to encrypt data for security compliance
60///
61/// ### Performance Requirements
62/// - `parallel_processing`: Enable multi-threaded processing for performance
63/// - `target_throughput_mbps`: Target processing speed in megabytes per second
64///
65/// ### Resource Requirements
66/// - `chunk_size_mb`: Processing chunk size for memory and I/O optimization
67/// - `max_memory_mb`: Maximum memory usage limit for resource-constrained
68/// environments
69///
70/// ## Usage Patterns
71///
72///
73/// ## Integration with Pipeline System
74///
75/// The requirements integrate with various pipeline components:
76///
77/// - **Optimization Engine**: Uses requirements to select optimal algorithms
78/// - **Resource Manager**: Enforces memory and processing constraints
79/// - **Security Layer**: Applies encryption and compression based on flags
80/// - **Performance Monitor**: Validates actual performance against targets
81///
82/// ## Thread Safety
83///
84/// `PipelineRequirements` is thread-safe through immutability. All fields are
85/// read-only after construction, making it safe to share across threads without
86/// synchronization.
87///
88/// ## Cross-Language Compatibility
89///
90/// The requirements structure maps well to other languages:
91///
92/// - **JSON**: Direct serialization for configuration files and APIs
93/// - **Go**: Struct with similar field types and JSON tags
94/// - **Python**: Dataclass with type hints for configuration management
95/// - **YAML**: Configuration file format for deployment settings
96///
97/// ## Performance Considerations
98///
99/// - Lightweight value object with minimal memory overhead
100/// - Efficient serialization through serde derive macros
101/// - Immutable design eliminates defensive copying
102/// - Optional fields reduce memory usage when constraints are not specified
103#[derive(Debug, Clone, Serialize, Deserialize)]
104pub struct PipelineRequirements {
105 /// Whether compression should be applied to reduce storage and bandwidth
106 /// usage
107 pub compression_enabled: bool,
108
109 /// Whether encryption should be applied for data security and compliance
110 pub encryption_enabled: bool,
111
112 /// Whether parallel processing should be used to improve performance
113 pub parallel_processing: bool,
114
115 /// Processing chunk size in megabytes for memory and I/O optimization
116 pub chunk_size_mb: usize,
117
118 /// Maximum memory usage limit in megabytes (None = no limit)
119 pub max_memory_mb: Option<usize>,
120
121 /// Target throughput in megabytes per second (None = no target)
122 pub target_throughput_mbps: Option<f64>,
123}
124
125impl Default for PipelineRequirements {
126 /// Creates default pipeline requirements optimized for security and
127 /// performance.
128 ///
129 /// The default configuration provides a balanced approach suitable for most
130 /// production environments, emphasizing security while maintaining good
131 /// performance.
132 ///
133 /// # Default Values
134 ///
135 /// - `compression_enabled`: `true` - Reduces storage and bandwidth usage
136 /// - `encryption_enabled`: `true` - Ensures data security by default
137 /// - `parallel_processing`: `true` - Leverages multi-core systems
138 /// - `chunk_size_mb`: `1` - Conservative chunk size for memory efficiency
139 /// - `max_memory_mb`: `None` - No memory limit (system-dependent)
140 /// - `target_throughput_mbps`: `None` - No specific throughput target
141 ///
142 /// # Examples
143 ///
144 ///
145 /// # Security by Default
146 ///
147 /// The default configuration follows security best practices by enabling
148 /// both compression and encryption. This ensures that data is protected
149 /// and storage is optimized unless explicitly configured otherwise.
150 ///
151 /// # Performance Considerations
152 ///
153 /// While security is prioritized, the defaults also enable parallel
154 /// processing to maintain good performance. The conservative 1MB chunk
155 /// size balances memory usage with processing efficiency.
156 fn default() -> Self {
157 Self {
158 compression_enabled: true,
159 encryption_enabled: true,
160 parallel_processing: true,
161 chunk_size_mb: 1,
162 max_memory_mb: None,
163 target_throughput_mbps: None,
164 }
165 }
166}