pub struct AdaptiveStreamingEngine { /* private fields */ }
Expand description
Advanced-advanced adaptive streaming processor
Implementations§
Source§impl AdaptiveStreamingEngine
impl AdaptiveStreamingEngine
Sourcepub fn new(config: AdaptiveStreamConfig) -> Self
pub fn new(config: AdaptiveStreamConfig) -> Self
Create a new adaptive streaming engine
Sourcepub fn process_stream(&mut self, chunk: StreamChunk) -> Result<Vec<Dataset>>
pub fn process_stream(&mut self, chunk: StreamChunk) -> Result<Vec<Dataset>>
Process incoming data stream
Examples found in repository?
examples/advanced_showcase.rs (line 255)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201 println!("\n🌊 Adaptive Streaming Demonstration");
202 println!("===================================");
203
204 // Configure streaming engine
205 let config = AdaptiveStreamConfig::default();
206
207 println!("🔧 Initializing adaptive streaming engine...");
208 let mut engine = create_adaptive_engine_with_config(config);
209
210 // Simulate streaming data
211 println!("📡 Simulating data stream...");
212 let data = &dataset.data;
213 let chunksize = 20;
214 let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216 let mut total_processed = 0;
217 let start_time = Instant::now();
218
219 for i in 0..num_chunks {
220 let start_row = i * chunksize;
221 let end_row = (start_row + chunksize).min(data.nrows());
222
223 // Create chunk from dataset slice
224 let chunkdata = data
225 .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226 .to_owned();
227
228 let chunk = StreamChunk {
229 data: chunkdata,
230 timestamp: Instant::now(),
231 metadata: ChunkMetadata {
232 source_id: format!("demo_source_{i}"),
233 sequence_number: i as u64,
234 characteristics: DataCharacteristics {
235 moments: StatisticalMoments {
236 mean: 0.0,
237 variance: 1.0,
238 skewness: 0.0,
239 kurtosis: 0.0,
240 },
241 entropy: 1.0,
242 trend: TrendIndicators {
243 linear_slope: 0.1,
244 trend_strength: 0.5,
245 direction: TrendDirection::Increasing,
246 seasonality: 0.2,
247 },
248 anomaly_score: 0.1,
249 },
250 },
251 quality_score: 0.9,
252 };
253
254 // Process chunk
255 let results = engine.process_stream(chunk)?;
256 total_processed += results.len();
257
258 if !results.is_empty() {
259 println!(
260 " Processed batch {}: {} datasets generated",
261 i + 1,
262 results.len()
263 );
264 }
265 }
266
267 let streaming_time = start_time.elapsed();
268
269 println!(" Streaming completed in: {streaming_time:?}");
270 println!(" Total datasets processed: {total_processed}");
271
272 // Get performance metrics
273 println!("📈 Getting performance metrics...");
274 let perf_metrics = engine.get_performance_metrics()?;
275 println!(" Processing Latency: {:?}", perf_metrics.latency);
276 println!(" Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277 println!(
278 " Memory Efficiency: {:.1}%",
279 perf_metrics.memory_efficiency * 100.0
280 );
281
282 // Get quality metrics
283 let quality_metrics = engine.get_quality_metrics()?;
284 println!(" Quality Metrics:");
285 println!(
286 " Integrity: {:.1}%",
287 quality_metrics.integrity_score * 100.0
288 );
289 println!(
290 " Completeness: {:.1}%",
291 quality_metrics.completeness_score * 100.0
292 );
293 println!(
294 " Overall Quality: {:.1}%",
295 quality_metrics.overall_score * 100.0
296 );
297
298 // Get buffer statistics
299 let buffer_stats = engine.get_buffer_statistics()?;
300 println!(" Buffer Statistics:");
301 println!(" Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302 println!(" Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304 Ok(())
305}
Sourcepub fn get_performance_metrics(&self) -> Result<PerformanceMetrics>
pub fn get_performance_metrics(&self) -> Result<PerformanceMetrics>
Get current performance metrics
Examples found in repository?
examples/advanced_showcase.rs (line 274)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201 println!("\n🌊 Adaptive Streaming Demonstration");
202 println!("===================================");
203
204 // Configure streaming engine
205 let config = AdaptiveStreamConfig::default();
206
207 println!("🔧 Initializing adaptive streaming engine...");
208 let mut engine = create_adaptive_engine_with_config(config);
209
210 // Simulate streaming data
211 println!("📡 Simulating data stream...");
212 let data = &dataset.data;
213 let chunksize = 20;
214 let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216 let mut total_processed = 0;
217 let start_time = Instant::now();
218
219 for i in 0..num_chunks {
220 let start_row = i * chunksize;
221 let end_row = (start_row + chunksize).min(data.nrows());
222
223 // Create chunk from dataset slice
224 let chunkdata = data
225 .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226 .to_owned();
227
228 let chunk = StreamChunk {
229 data: chunkdata,
230 timestamp: Instant::now(),
231 metadata: ChunkMetadata {
232 source_id: format!("demo_source_{i}"),
233 sequence_number: i as u64,
234 characteristics: DataCharacteristics {
235 moments: StatisticalMoments {
236 mean: 0.0,
237 variance: 1.0,
238 skewness: 0.0,
239 kurtosis: 0.0,
240 },
241 entropy: 1.0,
242 trend: TrendIndicators {
243 linear_slope: 0.1,
244 trend_strength: 0.5,
245 direction: TrendDirection::Increasing,
246 seasonality: 0.2,
247 },
248 anomaly_score: 0.1,
249 },
250 },
251 quality_score: 0.9,
252 };
253
254 // Process chunk
255 let results = engine.process_stream(chunk)?;
256 total_processed += results.len();
257
258 if !results.is_empty() {
259 println!(
260 " Processed batch {}: {} datasets generated",
261 i + 1,
262 results.len()
263 );
264 }
265 }
266
267 let streaming_time = start_time.elapsed();
268
269 println!(" Streaming completed in: {streaming_time:?}");
270 println!(" Total datasets processed: {total_processed}");
271
272 // Get performance metrics
273 println!("📈 Getting performance metrics...");
274 let perf_metrics = engine.get_performance_metrics()?;
275 println!(" Processing Latency: {:?}", perf_metrics.latency);
276 println!(" Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277 println!(
278 " Memory Efficiency: {:.1}%",
279 perf_metrics.memory_efficiency * 100.0
280 );
281
282 // Get quality metrics
283 let quality_metrics = engine.get_quality_metrics()?;
284 println!(" Quality Metrics:");
285 println!(
286 " Integrity: {:.1}%",
287 quality_metrics.integrity_score * 100.0
288 );
289 println!(
290 " Completeness: {:.1}%",
291 quality_metrics.completeness_score * 100.0
292 );
293 println!(
294 " Overall Quality: {:.1}%",
295 quality_metrics.overall_score * 100.0
296 );
297
298 // Get buffer statistics
299 let buffer_stats = engine.get_buffer_statistics()?;
300 println!(" Buffer Statistics:");
301 println!(" Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302 println!(" Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304 Ok(())
305}
Sourcepub fn get_quality_metrics(&self) -> Result<QualityMetrics>
pub fn get_quality_metrics(&self) -> Result<QualityMetrics>
Get current quality metrics
Examples found in repository?
examples/advanced_showcase.rs (line 283)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201 println!("\n🌊 Adaptive Streaming Demonstration");
202 println!("===================================");
203
204 // Configure streaming engine
205 let config = AdaptiveStreamConfig::default();
206
207 println!("🔧 Initializing adaptive streaming engine...");
208 let mut engine = create_adaptive_engine_with_config(config);
209
210 // Simulate streaming data
211 println!("📡 Simulating data stream...");
212 let data = &dataset.data;
213 let chunksize = 20;
214 let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216 let mut total_processed = 0;
217 let start_time = Instant::now();
218
219 for i in 0..num_chunks {
220 let start_row = i * chunksize;
221 let end_row = (start_row + chunksize).min(data.nrows());
222
223 // Create chunk from dataset slice
224 let chunkdata = data
225 .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226 .to_owned();
227
228 let chunk = StreamChunk {
229 data: chunkdata,
230 timestamp: Instant::now(),
231 metadata: ChunkMetadata {
232 source_id: format!("demo_source_{i}"),
233 sequence_number: i as u64,
234 characteristics: DataCharacteristics {
235 moments: StatisticalMoments {
236 mean: 0.0,
237 variance: 1.0,
238 skewness: 0.0,
239 kurtosis: 0.0,
240 },
241 entropy: 1.0,
242 trend: TrendIndicators {
243 linear_slope: 0.1,
244 trend_strength: 0.5,
245 direction: TrendDirection::Increasing,
246 seasonality: 0.2,
247 },
248 anomaly_score: 0.1,
249 },
250 },
251 quality_score: 0.9,
252 };
253
254 // Process chunk
255 let results = engine.process_stream(chunk)?;
256 total_processed += results.len();
257
258 if !results.is_empty() {
259 println!(
260 " Processed batch {}: {} datasets generated",
261 i + 1,
262 results.len()
263 );
264 }
265 }
266
267 let streaming_time = start_time.elapsed();
268
269 println!(" Streaming completed in: {streaming_time:?}");
270 println!(" Total datasets processed: {total_processed}");
271
272 // Get performance metrics
273 println!("📈 Getting performance metrics...");
274 let perf_metrics = engine.get_performance_metrics()?;
275 println!(" Processing Latency: {:?}", perf_metrics.latency);
276 println!(" Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277 println!(
278 " Memory Efficiency: {:.1}%",
279 perf_metrics.memory_efficiency * 100.0
280 );
281
282 // Get quality metrics
283 let quality_metrics = engine.get_quality_metrics()?;
284 println!(" Quality Metrics:");
285 println!(
286 " Integrity: {:.1}%",
287 quality_metrics.integrity_score * 100.0
288 );
289 println!(
290 " Completeness: {:.1}%",
291 quality_metrics.completeness_score * 100.0
292 );
293 println!(
294 " Overall Quality: {:.1}%",
295 quality_metrics.overall_score * 100.0
296 );
297
298 // Get buffer statistics
299 let buffer_stats = engine.get_buffer_statistics()?;
300 println!(" Buffer Statistics:");
301 println!(" Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302 println!(" Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304 Ok(())
305}
Sourcepub fn get_buffer_statistics(&self) -> Result<BufferStatistics>
pub fn get_buffer_statistics(&self) -> Result<BufferStatistics>
Get buffer statistics
Examples found in repository?
examples/advanced_showcase.rs (line 299)
200fn demonstrate_adaptive_streaming(dataset: &Dataset) -> Result<(), Box<dyn std::error::Error>> {
201 println!("\n🌊 Adaptive Streaming Demonstration");
202 println!("===================================");
203
204 // Configure streaming engine
205 let config = AdaptiveStreamConfig::default();
206
207 println!("🔧 Initializing adaptive streaming engine...");
208 let mut engine = create_adaptive_engine_with_config(config);
209
210 // Simulate streaming data
211 println!("📡 Simulating data stream...");
212 let data = &dataset.data;
213 let chunksize = 20;
214 let num_chunks = (data.nrows() / chunksize).min(10); // Limit for demo
215
216 let mut total_processed = 0;
217 let start_time = Instant::now();
218
219 for i in 0..num_chunks {
220 let start_row = i * chunksize;
221 let end_row = (start_row + chunksize).min(data.nrows());
222
223 // Create chunk from dataset slice
224 let chunkdata = data
225 .slice(scirs2_core::ndarray::s![start_row..end_row, ..])
226 .to_owned();
227
228 let chunk = StreamChunk {
229 data: chunkdata,
230 timestamp: Instant::now(),
231 metadata: ChunkMetadata {
232 source_id: format!("demo_source_{i}"),
233 sequence_number: i as u64,
234 characteristics: DataCharacteristics {
235 moments: StatisticalMoments {
236 mean: 0.0,
237 variance: 1.0,
238 skewness: 0.0,
239 kurtosis: 0.0,
240 },
241 entropy: 1.0,
242 trend: TrendIndicators {
243 linear_slope: 0.1,
244 trend_strength: 0.5,
245 direction: TrendDirection::Increasing,
246 seasonality: 0.2,
247 },
248 anomaly_score: 0.1,
249 },
250 },
251 quality_score: 0.9,
252 };
253
254 // Process chunk
255 let results = engine.process_stream(chunk)?;
256 total_processed += results.len();
257
258 if !results.is_empty() {
259 println!(
260 " Processed batch {}: {} datasets generated",
261 i + 1,
262 results.len()
263 );
264 }
265 }
266
267 let streaming_time = start_time.elapsed();
268
269 println!(" Streaming completed in: {streaming_time:?}");
270 println!(" Total datasets processed: {total_processed}");
271
272 // Get performance metrics
273 println!("📈 Getting performance metrics...");
274 let perf_metrics = engine.get_performance_metrics()?;
275 println!(" Processing Latency: {:?}", perf_metrics.latency);
276 println!(" Throughput: {:.1} chunks/sec", perf_metrics.throughput);
277 println!(
278 " Memory Efficiency: {:.1}%",
279 perf_metrics.memory_efficiency * 100.0
280 );
281
282 // Get quality metrics
283 let quality_metrics = engine.get_quality_metrics()?;
284 println!(" Quality Metrics:");
285 println!(
286 " Integrity: {:.1}%",
287 quality_metrics.integrity_score * 100.0
288 );
289 println!(
290 " Completeness: {:.1}%",
291 quality_metrics.completeness_score * 100.0
292 );
293 println!(
294 " Overall Quality: {:.1}%",
295 quality_metrics.overall_score * 100.0
296 );
297
298 // Get buffer statistics
299 let buffer_stats = engine.get_buffer_statistics()?;
300 println!(" Buffer Statistics:");
301 println!(" Utilization: {:.1}%", buffer_stats.utilization * 100.0);
302 println!(" Memory Usage: {} bytes", buffer_stats.memory_usage);
303
304 Ok(())
305}
Source§impl AdaptiveStreamingEngine
Enhanced Adaptive Streaming Engine with Quantum and Neural Optimization
impl AdaptiveStreamingEngine
Enhanced Adaptive Streaming Engine with Quantum and Neural Optimization
Sourcepub fn with_quantum_neural_optimization(config: AdaptiveStreamConfig) -> Self
pub fn with_quantum_neural_optimization(config: AdaptiveStreamConfig) -> Self
Create advanced streaming engine with quantum and neural optimization
Sourcepub fn quantum_optimize(
&mut self,
performance_metrics: &PerformanceMetrics,
) -> Result<OptimizationConfig>
pub fn quantum_optimize( &mut self, performance_metrics: &PerformanceMetrics, ) -> Result<OptimizationConfig>
Optimize using quantum-inspired algorithms
Sourcepub fn neural_adapt(
&mut self,
features: &Array1<f64>,
targets: &Array1<f64>,
) -> Result<LearningStatistics>
pub fn neural_adapt( &mut self, features: &Array1<f64>, targets: &Array1<f64>, ) -> Result<LearningStatistics>
Learn and adapt using neural system
Sourcepub fn predict_future_performance(
&self,
horizon: Duration,
) -> Result<PerformancePredictionPoint>
pub fn predict_future_performance( &self, horizon: Duration, ) -> Result<PerformancePredictionPoint>
Predict future performance using advanced models
Auto Trait Implementations§
impl Freeze for AdaptiveStreamingEngine
impl RefUnwindSafe for AdaptiveStreamingEngine
impl Send for AdaptiveStreamingEngine
impl Sync for AdaptiveStreamingEngine
impl Unpin for AdaptiveStreamingEngine
impl UnwindSafe for AdaptiveStreamingEngine
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self
from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self
is actually part of its subset T
(and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset
but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self
to the equivalent element of its superset.