AdaptiveStreamingEngine

Struct AdaptiveStreamingEngine 

Source
pub struct AdaptiveStreamingEngine { /* private fields */ }
Expand description

Advanced-advanced adaptive streaming processor

Implementations§

Source§

impl AdaptiveStreamingEngine

Source

pub fn new(config: AdaptiveStreamConfig) -> Self

Create a new adaptive streaming engine

Source

pub fn process_stream(&mut self, chunk: StreamChunk) -> Result<Vec<Dataset>>

Process incoming data stream

Examples found in repository?
examples/advanced_showcase.rs (line 255)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201    println!("\n🌊 Adaptive Streaming Demonstration");
202    println!("===================================");
203
204    // Configure streaming engine
205    let config = AdaptiveStreamConfig::default();
206
207    println!("🔧 Initializing adaptive streaming engine...");
208    let mut engine = create_adaptive_engine_with_config(config);
209
210    // Simulate streaming data
211    println!("📡 Simulating data stream...");
212    let data = &dataset.data;
213    let chunksize = 20;
214    let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216    let mut total_processed = 0;
217    let start_time = Instant::now();
218
219    for i in 0..num_chunks {
220        let start_row = i * chunksize;
221        let end_row = (start_row + chunksize).min(data.nrows());
222
223        // Create chunk from dataset slice
224        let chunkdata = data
225            .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226            .to_owned();
227
228        let chunk = StreamChunk {
229            data: chunkdata,
230            timestamp: Instant::now(),
231            metadata: ChunkMetadata {
232                source_id: format!("demo_source_{i}"),
233                sequence_number: i as u64,
234                characteristics: DataCharacteristics {
235                    moments: StatisticalMoments {
236                        mean: 0.0,
237                        variance: 1.0,
238                        skewness: 0.0,
239                        kurtosis: 0.0,
240                    },
241                    entropy: 1.0,
242                    trend: TrendIndicators {
243                        linear_slope: 0.1,
244                        trend_strength: 0.5,
245                        direction: TrendDirection::Increasing,
246                        seasonality: 0.2,
247                    },
248                    anomaly_score: 0.1,
249                },
250            },
251            quality_score: 0.9,
252        };
253
254        // Process chunk
255        let results = engine.process_stream(chunk)?;
256        total_processed += results.len();
257
258        if !results.is_empty() {
259            println!(
260                "   Processed batch {}: {} datasets generated",
261                i + 1,
262                results.len()
263            );
264        }
265    }
266
267    let streaming_time = start_time.elapsed();
268
269    println!("   Streaming completed in: {streaming_time:?}");
270    println!("   Total datasets processed: {total_processed}");
271
272    // Get performance metrics
273    println!("📈 Getting performance metrics...");
274    let perf_metrics = engine.get_performance_metrics()?;
275    println!("   Processing Latency: {:?}", perf_metrics.latency);
276    println!("   Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277    println!(
278        "   Memory Efficiency: {:.1}%",
279        perf_metrics.memory_efficiency * 100.0
280    );
281
282    // Get quality metrics
283    let quality_metrics = engine.get_quality_metrics()?;
284    println!("   Quality Metrics:");
285    println!(
286        "     Integrity: {:.1}%",
287        quality_metrics.integrity_score * 100.0
288    );
289    println!(
290        "     Completeness: {:.1}%",
291        quality_metrics.completeness_score * 100.0
292    );
293    println!(
294        "     Overall Quality: {:.1}%",
295        quality_metrics.overall_score * 100.0
296    );
297
298    // Get buffer statistics
299    let buffer_stats = engine.get_buffer_statistics()?;
300    println!("   Buffer Statistics:");
301    println!("     Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302    println!("     Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304    Ok(())
305}
Source

pub fn get_performance_metrics(&self) -> Result<PerformanceMetrics>

Get current performance metrics

Examples found in repository?
examples/advanced_showcase.rs (line 274)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201    println!("\n🌊 Adaptive Streaming Demonstration");
202    println!("===================================");
203
204    // Configure streaming engine
205    let config = AdaptiveStreamConfig::default();
206
207    println!("🔧 Initializing adaptive streaming engine...");
208    let mut engine = create_adaptive_engine_with_config(config);
209
210    // Simulate streaming data
211    println!("📡 Simulating data stream...");
212    let data = &dataset.data;
213    let chunksize = 20;
214    let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216    let mut total_processed = 0;
217    let start_time = Instant::now();
218
219    for i in 0..num_chunks {
220        let start_row = i * chunksize;
221        let end_row = (start_row + chunksize).min(data.nrows());
222
223        // Create chunk from dataset slice
224        let chunkdata = data
225            .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226            .to_owned();
227
228        let chunk = StreamChunk {
229            data: chunkdata,
230            timestamp: Instant::now(),
231            metadata: ChunkMetadata {
232                source_id: format!("demo_source_{i}"),
233                sequence_number: i as u64,
234                characteristics: DataCharacteristics {
235                    moments: StatisticalMoments {
236                        mean: 0.0,
237                        variance: 1.0,
238                        skewness: 0.0,
239                        kurtosis: 0.0,
240                    },
241                    entropy: 1.0,
242                    trend: TrendIndicators {
243                        linear_slope: 0.1,
244                        trend_strength: 0.5,
245                        direction: TrendDirection::Increasing,
246                        seasonality: 0.2,
247                    },
248                    anomaly_score: 0.1,
249                },
250            },
251            quality_score: 0.9,
252        };
253
254        // Process chunk
255        let results = engine.process_stream(chunk)?;
256        total_processed += results.len();
257
258        if !results.is_empty() {
259            println!(
260                "   Processed batch {}: {} datasets generated",
261                i + 1,
262                results.len()
263            );
264        }
265    }
266
267    let streaming_time = start_time.elapsed();
268
269    println!("   Streaming completed in: {streaming_time:?}");
270    println!("   Total datasets processed: {total_processed}");
271
272    // Get performance metrics
273    println!("📈 Getting performance metrics...");
274    let perf_metrics = engine.get_performance_metrics()?;
275    println!("   Processing Latency: {:?}", perf_metrics.latency);
276    println!("   Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277    println!(
278        "   Memory Efficiency: {:.1}%",
279        perf_metrics.memory_efficiency * 100.0
280    );
281
282    // Get quality metrics
283    let quality_metrics = engine.get_quality_metrics()?;
284    println!("   Quality Metrics:");
285    println!(
286        "     Integrity: {:.1}%",
287        quality_metrics.integrity_score * 100.0
288    );
289    println!(
290        "     Completeness: {:.1}%",
291        quality_metrics.completeness_score * 100.0
292    );
293    println!(
294        "     Overall Quality: {:.1}%",
295        quality_metrics.overall_score * 100.0
296    );
297
298    // Get buffer statistics
299    let buffer_stats = engine.get_buffer_statistics()?;
300    println!("   Buffer Statistics:");
301    println!("     Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302    println!("     Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304    Ok(())
305}
Source

pub fn get_quality_metrics(&self) -> Result<QualityMetrics>

Get current quality metrics

Examples found in repository?
examples/advanced_showcase.rs (line 283)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201    println!("\n🌊 Adaptive Streaming Demonstration");
202    println!("===================================");
203
204    // Configure streaming engine
205    let config = AdaptiveStreamConfig::default();
206
207    println!("🔧 Initializing adaptive streaming engine...");
208    let mut engine = create_adaptive_engine_with_config(config);
209
210    // Simulate streaming data
211    println!("📡 Simulating data stream...");
212    let data = &dataset.data;
213    let chunksize = 20;
214    let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216    let mut total_processed = 0;
217    let start_time = Instant::now();
218
219    for i in 0..num_chunks {
220        let start_row = i * chunksize;
221        let end_row = (start_row + chunksize).min(data.nrows());
222
223        // Create chunk from dataset slice
224        let chunkdata = data
225            .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226            .to_owned();
227
228        let chunk = StreamChunk {
229            data: chunkdata,
230            timestamp: Instant::now(),
231            metadata: ChunkMetadata {
232                source_id: format!("demo_source_{i}"),
233                sequence_number: i as u64,
234                characteristics: DataCharacteristics {
235                    moments: StatisticalMoments {
236                        mean: 0.0,
237                        variance: 1.0,
238                        skewness: 0.0,
239                        kurtosis: 0.0,
240                    },
241                    entropy: 1.0,
242                    trend: TrendIndicators {
243                        linear_slope: 0.1,
244                        trend_strength: 0.5,
245                        direction: TrendDirection::Increasing,
246                        seasonality: 0.2,
247                    },
248                    anomaly_score: 0.1,
249                },
250            },
251            quality_score: 0.9,
252        };
253
254        // Process chunk
255        let results = engine.process_stream(chunk)?;
256        total_processed += results.len();
257
258        if !results.is_empty() {
259            println!(
260                "   Processed batch {}: {} datasets generated",
261                i + 1,
262                results.len()
263            );
264        }
265    }
266
267    let streaming_time = start_time.elapsed();
268
269    println!("   Streaming completed in: {streaming_time:?}");
270    println!("   Total datasets processed: {total_processed}");
271
272    // Get performance metrics
273    println!("📈 Getting performance metrics...");
274    let perf_metrics = engine.get_performance_metrics()?;
275    println!("   Processing Latency: {:?}", perf_metrics.latency);
276    println!("   Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277    println!(
278        "   Memory Efficiency: {:.1}%",
279        perf_metrics.memory_efficiency * 100.0
280    );
281
282    // Get quality metrics
283    let quality_metrics = engine.get_quality_metrics()?;
284    println!("   Quality Metrics:");
285    println!(
286        "     Integrity: {:.1}%",
287        quality_metrics.integrity_score * 100.0
288    );
289    println!(
290        "     Completeness: {:.1}%",
291        quality_metrics.completeness_score * 100.0
292    );
293    println!(
294        "     Overall Quality: {:.1}%",
295        quality_metrics.overall_score * 100.0
296    );
297
298    // Get buffer statistics
299    let buffer_stats = engine.get_buffer_statistics()?;
300    println!("   Buffer Statistics:");
301    println!("     Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302    println!("     Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304    Ok(())
305}
Source

pub fn get_buffer_statistics(&self) -> Result<BufferStatistics>

Get buffer statistics

Examples found in repository?
examples/advanced_showcase.rs (line 299)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201    println!("\n🌊 Adaptive Streaming Demonstration");
202    println!("===================================");
203
204    // Configure streaming engine
205    let config = AdaptiveStreamConfig::default();
206
207    println!("🔧 Initializing adaptive streaming engine...");
208    let mut engine = create_adaptive_engine_with_config(config);
209
210    // Simulate streaming data
211    println!("📡 Simulating data stream...");
212    let data = &dataset.data;
213    let chunksize = 20;
214    let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216    let mut total_processed = 0;
217    let start_time = Instant::now();
218
219    for i in 0..num_chunks {
220        let start_row = i * chunksize;
221        let end_row = (start_row + chunksize).min(data.nrows());
222
223        // Create chunk from dataset slice
224        let chunkdata = data
225            .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226            .to_owned();
227
228        let chunk = StreamChunk {
229            data: chunkdata,
230            timestamp: Instant::now(),
231            metadata: ChunkMetadata {
232                source_id: format!("demo_source_{i}"),
233                sequence_number: i as u64,
234                characteristics: DataCharacteristics {
235                    moments: StatisticalMoments {
236                        mean: 0.0,
237                        variance: 1.0,
238                        skewness: 0.0,
239                        kurtosis: 0.0,
240                    },
241                    entropy: 1.0,
242                    trend: TrendIndicators {
243                        linear_slope: 0.1,
244                        trend_strength: 0.5,
245                        direction: TrendDirection::Increasing,
246                        seasonality: 0.2,
247                    },
248                    anomaly_score: 0.1,
249                },
250            },
251            quality_score: 0.9,
252        };
253
254        // Process chunk
255        let results = engine.process_stream(chunk)?;
256        total_processed += results.len();
257
258        if !results.is_empty() {
259            println!(
260                "   Processed batch {}: {} datasets generated",
261                i + 1,
262                results.len()
263            );
264        }
265    }
266
267    let streaming_time = start_time.elapsed();
268
269    println!("   Streaming completed in: {streaming_time:?}");
270    println!("   Total datasets processed: {total_processed}");
271
272    // Get performance metrics
273    println!("📈 Getting performance metrics...");
274    let perf_metrics = engine.get_performance_metrics()?;
275    println!("   Processing Latency: {:?}", perf_metrics.latency);
276    println!("   Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277    println!(
278        "   Memory Efficiency: {:.1}%",
279        perf_metrics.memory_efficiency * 100.0
280    );
281
282    // Get quality metrics
283    let quality_metrics = engine.get_quality_metrics()?;
284    println!("   Quality Metrics:");
285    println!(
286        "     Integrity: {:.1}%",
287        quality_metrics.integrity_score * 100.0
288    );
289    println!(
290        "     Completeness: {:.1}%",
291        quality_metrics.completeness_score * 100.0
292    );
293    println!(
294        "     Overall Quality: {:.1}%",
295        quality_metrics.overall_score * 100.0
296    );
297
298    // Get buffer statistics
299    let buffer_stats = engine.get_buffer_statistics()?;
300    println!("   Buffer Statistics:");
301    println!("     Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302    println!("     Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304    Ok(())
305}
Source§

impl AdaptiveStreamingEngine

Enhanced Adaptive Streaming Engine with Quantum and Neural Optimization

Source

pub fn with_quantum_neural_optimization(config: AdaptiveStreamConfig) -> Self

Create advanced streaming engine with quantum and neural optimization

Source

pub fn quantum_optimize( &mut self, performance_metrics: &PerformanceMetrics, ) -> Result<OptimizationConfig>

Optimize using quantum-inspired algorithms

Source

pub fn neural_adapt( &mut self, features: &Array1<f64>, targets: &Array1<f64>, ) -> Result<LearningStatistics>

Learn and adapt using neural system

Source

pub fn predict_future_performance( &self, horizon: Duration, ) -> Result<PerformancePredictionPoint>

Predict future performance using advanced models

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T> Same for T

Source§

type Output = T

Should always be Self
Source§

impl<SS, SP> SupersetOf<SS> for SP
where SS: SubsetOf<SP>,

Source§

fn to_subset(&self) -> Option<SS>

The inverse inclusion map: attempts to construct self from the equivalent element of its superset. Read more
Source§

fn is_in_subset(&self) -> bool

Checks if self is actually part of its subset T (and can be converted to it).
Source§

fn to_subset_unchecked(&self) -> SS

Use with care! Same as self.to_subset but without any property checks. Always succeeds.
Source§

fn from_subset(element: &SS) -> SP

The inclusion map: converts self to the equivalent element of its superset.
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V