pub fn create_adaptive_engine_with_config(
_config: AdaptiveStreamConfig,
) -> AdaptiveStreamingEngine
Expand description
Convenience function to create a streaming engine with custom config
Examples found in repository?
examples/advanced_showcase.rs (line 208)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201 println!("\nš Adaptive Streaming Demonstration");
202 println!("===================================");
203
204 // Configure streaming engine
205 let config = AdaptiveStreamConfig::default();
206
207 println!("š§ Initializing adaptive streaming engine...");
208 let mut engine = create_adaptive_engine_with_config(config);
209
210 // Simulate streaming data
211 println!("š” Simulating data stream...");
212 let data = &dataset.data;
213 let chunksize = 20;
214 let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216 let mut total_processed = 0;
217 let start_time = Instant::now();
218
219 for i in 0..num_chunks {
220 let start_row = i * chunksize;
221 let end_row = (start_row + chunksize).min(data.nrows());
222
223 // Create chunk from dataset slice
224 let chunkdata = data
225 .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226 .to_owned();
227
228 let chunk = StreamChunk {
229 data: chunkdata,
230 timestamp: Instant::now(),
231 metadata: ChunkMetadata {
232 source_id: format!("demo_source_{i}"),
233 sequence_number: i as u64,
234 characteristics: DataCharacteristics {
235 moments: StatisticalMoments {
236 mean: 0.0,
237 variance: 1.0,
238 skewness: 0.0,
239 kurtosis: 0.0,
240 },
241 entropy: 1.0,
242 trend: TrendIndicators {
243 linear_slope: 0.1,
244 trend_strength: 0.5,
245 direction: TrendDirection::Increasing,
246 seasonality: 0.2,
247 },
248 anomaly_score: 0.1,
249 },
250 },
251 quality_score: 0.9,
252 };
253
254 // Process chunk
255 let results = engine.process_stream(chunk)?;
256 total_processed += results.len();
257
258 if !results.is_empty() {
259 println!(
260 " Processed batch {}: {} datasets generated",
261 i + 1,
262 results.len()
263 );
264 }
265 }
266
267 let streaming_time = start_time.elapsed();
268
269 println!(" Streaming completed in: {streaming_time:?}");
270 println!(" Total datasets processed: {total_processed}");
271
272 // Get performance metrics
273 println!("š Getting performance metrics...");
274 let perf_metrics = engine.get_performance_metrics()?;
275 println!(" Processing Latency: {:?}", perf_metrics.latency);
276 println!(" Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277 println!(
278 " Memory Efficiency: {:.1}%",
279 perf_metrics.memory_efficiency * 100.0
280 );
281
282 // Get quality metrics
283 let quality_metrics = engine.get_quality_metrics()?;
284 println!(" Quality Metrics:");
285 println!(
286 " Integrity: {:.1}%",
287 quality_metrics.integrity_score * 100.0
288 );
289 println!(
290 " Completeness: {:.1}%",
291 quality_metrics.completeness_score * 100.0
292 );
293 println!(
294 " Overall Quality: {:.1}%",
295 quality_metrics.overall_score * 100.0
296 );
297
298 // Get buffer statistics
299 let buffer_stats = engine.get_buffer_statistics()?;
300 println!(" Buffer Statistics:");
301 println!(" Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302 println!(" Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304 Ok(())
305}