scirs2_cluster/
lib.rs

1#![allow(deprecated)]
2#![allow(clippy::all)]
3#![allow(unreachable_code)]
4#![allow(unused_mut)]
5#![allow(missing_docs)]
6#![allow(for_loops_over_fallibles)]
7#![allow(unused_parens)]
8#![allow(unexpected_cfgs)]
9#![allow(unused_attributes)]
10#![allow(dead_code)]
11//! # SciRS2 Cluster - Clustering Algorithms
12//!
13//! **scirs2-cluster** provides comprehensive clustering algorithms for unsupervised learning,
14//! offering k-means, hierarchical clustering, DBSCAN, spectral clustering, and advanced methods
15//! with parallel processing, SIMD acceleration, and evaluation metrics.
16//!
17//! ## 🎯 Key Features
18//!
19//! - **SciPy/scikit-learn Compatibility**: Similar APIs to `scipy.cluster` and `sklearn.cluster`
20//! - **Partitional Clustering**: K-means, K-means++, mini-batch K-means
21//! - **Hierarchical Clustering**: Agglomerative with various linkage methods
22//! - **Density-based**: DBSCAN, OPTICS, HDBSCAN for arbitrary-shaped clusters
23//! - **Graph-based**: Spectral clustering, affinity propagation
24//! - **Evaluation Metrics**: Silhouette, Davies-Bouldin, Calinski-Harabasz
25//! - **Performance**: Parallel execution, SIMD distance computation
26//!
27//! ## 📦 Module Overview
28//!
29//! | SciRS2 Module | Python Equivalent | Description |
30//! |---------------|-------------------|-------------|
31//! | `vq` | `scipy.cluster.vq` | K-means and vector quantization |
32//! | `hierarchy` | `scipy.cluster.hierarchy` | Hierarchical/agglomerative clustering |
33//! | `dbscan` | `sklearn.cluster.DBSCAN` | Density-based spatial clustering |
34//! | `spectral` | `sklearn.cluster.SpectralClustering` | Graph-based spectral clustering |
35//! | `metrics` | `sklearn.metrics` | Clustering evaluation metrics |
36//!
37//! ## 🚀 Quick Start
38//!
39//! ```toml
40//! [dependencies]
41//! scirs2-cluster = "0.1.0-rc.2"
42//! ```
43//!
44//! ```rust
45//! use scirs2_cluster::vq::kmeans;
46//! use scirs2_core::ndarray::Array2;
47//!
48//! // K-means clustering
49//! let data = Array2::from_shape_vec((6, 2), vec![
50//!     1.0, 2.0, 1.2, 1.8, 0.8, 1.9,
51//!     3.7, 4.2, 3.9, 3.9, 4.2, 4.1,
52//! ]).unwrap();
53//!
54//! let (centroids, labels) = kmeans(data.view(), 2, None, None, None, None).unwrap();
55//! ```
56//!
57//! ## 🔒 Version: 0.1.0-rc.2 (October 03, 2025)
58//!
59//! ## Features
60//!
61//! * **Vector Quantization**: K-means and K-means++ for partitioning data
62//! * **Hierarchical Clustering**: Agglomerative clustering with various linkage methods
63//! * **Density-based Clustering**: DBSCAN and OPTICS for finding clusters of arbitrary shape
64//! * **Mean Shift**: Non-parametric clustering based on density estimation
65//! * **Spectral Clustering**: Graph-based clustering using eigenvectors of the graph Laplacian
66//! * **Affinity Propagation**: Message-passing based clustering that identifies exemplars
67//! * **Evaluation Metrics**: Silhouette coefficient, Davies-Bouldin index, and other measures to evaluate clustering quality
68//! * **Data Preprocessing**: Utilities for normalizing, standardizing, and whitening data before clustering
69//!
70//! ## Examples
71//!
72//! ```
73//! use scirs2_core::ndarray::{Array2, ArrayView2};
74//! use scirs2_cluster::vq::kmeans;
75//! use scirs2_cluster::preprocess::standardize;
76//!
77//! // Example data with two clusters
78//! let data = Array2::from_shape_vec((6, 2), vec![
79//!     1.0, 2.0,
80//!     1.2, 1.8,
81//!     0.8, 1.9,
82//!     3.7, 4.2,
83//!     3.9, 3.9,
84//!     4.2, 4.1,
85//! ]).unwrap();
86//!
87//! // Standardize the data
88//! let standardized = standardize(data.view(), true).unwrap();
89//!
90//! // Run k-means with k=2
91//! let (centroids, labels) = kmeans(standardized.view(), 2, None, None, None, None).unwrap();
92//!
93//! // Print the results
94//! println!("Centroids: {:?}", centroids);
95//! println!("Cluster assignments: {:?}", labels);
96//! ```
97
98/// Cutting-edge clustering algorithms including quantum-inspired methods and advanced online learning.
99///
100/// This module provides state-of-the-art clustering algorithms that push the boundaries
101/// of traditional clustering methods. It includes quantum-inspired algorithms that leverage
102/// quantum computing principles and advanced online learning variants with concept drift detection.
103///
104/// # Features
105///
106/// * **Quantum K-means**: Uses quantum superposition principles for potentially better optimization
107/// * **Adaptive Online Clustering**: Automatically adapts to changing data distributions
108/// * **Concept Drift Detection**: Detects and adapts to changes in streaming data
109/// * **Dynamic Cluster Management**: Creates, merges, and removes clusters automatically
110/// * **Quantum Annealing**: Simulated quantum annealing for global optimization
111pub mod advanced;
112/// Advanced benchmarking and performance profiling system.
113///
114/// This module provides cutting-edge benchmarking capabilities for clustering algorithms,
115/// including comprehensive performance analysis, memory profiling, scalability analysis,
116/// performance regression detection, and AI-powered optimization suggestions.
117///
118/// # Features
119///
120/// * **Statistical Performance Analysis**: Comprehensive timing statistics with confidence intervals
121/// * **Memory Usage Profiling**: Real-time memory consumption tracking and leak detection
122/// * **Scalability Analysis**: Algorithm complexity estimation and performance predictions
123/// * **Regression Detection**: Automated detection of performance degradation
124/// * **Optimization Suggestions**: AI-powered recommendations for performance improvements
125/// * **Interactive Reporting**: Rich HTML reports with detailed analytics
126/// * **Cross-Platform Benchmarking**: Performance comparisons across different systems
127/// * **GPU vs CPU Analysis**: Comprehensive acceleration analysis
128pub mod advanced_benchmarking;
129/// Advanced Clustering - AI-Driven Quantum-Neuromorphic Clustering
130///
131/// This module represents the pinnacle of clustering intelligence, combining
132/// AI-driven algorithm selection with quantum-neuromorphic fusion algorithms
133/// to achieve unprecedented clustering performance. It leverages meta-learning,
134/// neural architecture search, and bio-quantum computing paradigms.
135///
136/// # Revolutionary Advanced Features
137///
138/// * **AI-Driven Clustering Selection** - Automatically select optimal clustering algorithms
139/// * **Quantum-Neuromorphic Clustering** - Fusion of quantum and spiking neural networks
140/// * **Meta-Learning Optimization** - Learn optimal hyperparameters from experience
141/// * **Adaptive Resource Allocation** - Dynamic GPU/CPU/QPU resource management
142/// * **Multi-Objective Clustering** - Optimize for accuracy, speed, and interpretability
143/// * **Continual Learning** - Adapt to changing data distributions in real-time
144/// * **Bio-Quantum Clustering** - Nature-inspired quantum clustering algorithms
145pub mod advanced_clustering;
146/// Enhanced visualization specifically for advanced clustering results.
147///
148/// This module provides specialized visualization capabilities for advanced clustering,
149/// including quantum state visualization, neuromorphic adaptation plots, and AI algorithm
150/// selection insights with real-time interactive capabilities.
151///
152/// # Features
153///
154/// * **Quantum State Visualization**: Real-time coherence and entanglement plots
155/// * **Neuromorphic Adaptation**: Spiking neuron activity and plasticity visualization
156/// * **AI Algorithm Selection**: Performance predictions and selection insights
157/// * **Performance Dashboard**: Comprehensive metrics comparison with classical methods
158/// * **Export Capabilities**: Multiple formats including interactive HTML and JSON
159pub mod advanced_visualization;
160pub mod affinity;
161pub mod birch;
162pub mod density;
163/// Distributed clustering algorithms for large-scale datasets.
164///
165/// This module provides distributed implementations of clustering algorithms that can
166/// handle datasets too large to fit in memory on a single machine. It supports
167/// distributed K-means, hierarchical clustering, and various data partitioning strategies.
168///
169/// # Features
170///
171/// * **Distributed K-means**: Multi-node K-means with coordination rounds
172/// * **Distributed Hierarchical Clustering**: Large-scale hierarchical clustering
173/// * **Data Partitioning**: Multiple strategies for distributing data across workers
174/// * **Load Balancing**: Dynamic and static load balancing strategies
175/// * **Memory Management**: Configurable memory limits and optimization
176/// * **Fault Tolerance**: Worker failure detection and recovery mechanisms
177pub mod distributed;
178/// Enhanced Advanced Features - Advanced AI-Driven Clustering Extensions
179///
180/// This module extends the advanced clustering capabilities with cutting-edge
181/// features including deep learning integration, quantum-inspired algorithms,
182/// and advanced ensemble methods for superior clustering performance.
183///
184/// # Revolutionary Deep Learning Features
185///
186/// * **Transformer-Based Embeddings** - Deep representations using attention mechanisms
187/// * **Graph Neural Networks** - Complex relationship modeling through graph convolutions
188/// * **Reinforcement Learning** - Adaptive clustering strategy optimization
189/// * **Neural Architecture Search** - Automatic design of optimal clustering networks
190/// * **Deep Ensemble Methods** - Robust clustering through uncertainty quantification
191/// * **Advanced Uncertainty Estimation** - Confidence intervals and reliability metrics
192pub mod enhanced_clustering_features;
193/// Ensemble clustering methods for improved robustness.
194///
195/// This module provides ensemble clustering techniques that combine multiple
196/// clustering algorithms or multiple runs of the same algorithm to achieve
197/// more robust and stable clustering results.
198pub mod ensemble;
199pub mod error;
200pub mod gmm;
201/// GPU acceleration module for clustering algorithms.
202///
203/// This module provides GPU acceleration interfaces and implementations for clustering
204/// algorithms. It supports multiple GPU backends including CUDA, OpenCL, ROCm, and others.
205/// When GPU acceleration is not available or disabled, algorithms automatically fall back
206/// to optimized CPU implementations.
207///
208/// # Features
209///
210/// * **Multiple GPU Backends**: Support for CUDA, OpenCL, ROCm, Intel OneAPI, and Metal
211/// * **Automatic Fallback**: Seamless fallback to CPU when GPU is not available
212/// * **Memory Management**: Efficient GPU memory allocation and pooling
213/// * **Performance Monitoring**: Built-in benchmarking and performance statistics
214/// * **Device Selection**: Automatic or manual GPU device selection strategies
215#[cfg(feature = "gpu")]
216pub mod gpu;
217/// Advanced GPU and Distributed Computing Extensions
218///
219/// This module provides GPU acceleration and distributed computing capabilities
220/// for advanced clustering, enabling massive scalability and performance
221/// improvements for large-scale clustering tasks.
222///
223/// # High-Performance Computing Features
224///
225/// * **GPU Acceleration** - CUDA/OpenCL/ROCm GPU acceleration with automatic fallback
226/// * **Distributed Computing** - Multi-node clustering with fault tolerance
227/// * **Hybrid GPU-Distributed** - Combined GPU and distributed processing
228/// * **Advanced Memory Management** - Optimized GPU memory allocation and transfer
229/// * **Load Balancing** - Dynamic workload distribution across nodes
230/// * **Fault Tolerance** - Automatic recovery from worker node failures
231pub mod gpu_distributed_clustering;
232/// Graph clustering and community detection algorithms.
233///
234/// This module provides implementations of various graph clustering algorithms for
235/// detecting communities and clusters in network data. These algorithms work with
236/// graph representations where nodes represent data points and edges represent
237/// similarities or connections between them.
238///
239/// # Features
240///
241/// * **Community Detection**: Louvain algorithm for modularity optimization
242/// * **Label Propagation**: Fast algorithm for community detection
243/// * **Hierarchical Methods**: Girvan-Newman algorithm for hierarchical communities
244/// * **Graph Construction**: k-NN graphs, adjacency matrix support
245/// * **Quality Metrics**: Modularity calculation and community evaluation
246pub mod graph;
247pub mod hierarchy;
248pub mod input_validation;
249pub mod leader;
250/// Mean Shift clustering implementation.
251///
252/// This module provides the Mean Shift clustering algorithm, which is a centroid-based
253/// algorithm that works by updating candidates for centroids to be the mean of the points
254/// within a given region. These candidates are then filtered in a post-processing stage to
255/// eliminate near-duplicates, forming the final set of centroids.
256///
257/// Mean Shift is a non-parametric clustering technique that doesn't require specifying the
258/// number of clusters in advance and can find clusters of arbitrary shapes.
259pub mod meanshift;
260pub mod metrics;
261pub mod neighbor_search;
262/// Native plotting capabilities for clustering results.
263///
264/// This module provides native plotting implementations using popular Rust visualization
265/// libraries like plotters and egui. It bridges the visualization data structures with
266/// actual plotting backends to create publication-ready plots.
267///
268/// # Features
269///
270/// * **Static Plots**: PNG, SVG, PDF output using plotters
271/// * **Interactive Plots**: Real-time visualization using egui
272/// * **Publication Ready**: High-quality plots with customizable styling
273/// * **Multiple Backends**: Support for different rendering backends
274/// * **Performance Optimized**: Efficient rendering for large datasets
275#[cfg(any(feature = "plotters", feature = "egui"))]
276pub mod plotting;
277pub mod preprocess;
278/// Python bindings for scirs2-cluster using PyO3.
279///
280/// This module provides Python bindings that make scirs2-cluster algorithms
281/// accessible from Python with scikit-learn compatible APIs. The bindings
282/// include all major clustering algorithms and evaluation metrics.
283///
284/// # Features
285///
286/// * **Scikit-learn Compatible**: Drop-in replacements for scikit-learn clustering algorithms
287/// * **K-means**: Python binding for K-means clustering with multiple initialization methods
288/// * **DBSCAN**: Python binding for DBSCAN density-based clustering
289/// * **Hierarchical**: Python binding for agglomerative clustering with various linkage methods
290/// * **Evaluation Metrics**: Silhouette score, Calinski-Harabasz score, and Davies-Bouldin score
291/// * **Numpy Integration**: Seamless integration with NumPy arrays
292#[cfg(feature = "pyo3")]
293pub mod python_bindings;
294pub mod quantum_clustering;
295pub mod serialization;
296pub mod sparse;
297pub mod spectral;
298pub mod stability;
299pub mod streaming;
300/// Text clustering algorithms with semantic similarity support.
301///
302/// This module provides specialized clustering algorithms for text data that leverage
303/// semantic similarity measures rather than traditional distance metrics. It includes
304/// algorithms optimized for document clustering, sentence clustering, and topic modeling.
305///
306/// # Features
307///
308/// * **Semantic K-means**: K-means clustering with semantic similarity metrics
309/// * **Hierarchical Text Clustering**: Agglomerative clustering for text data
310/// * **Topic-based Clustering**: Clustering based on topic modeling approaches
311/// * **Multiple Text Representations**: Support for TF-IDF, word embeddings, contextualized embeddings
312/// * **Semantic Similarity Metrics**: Cosine, Jaccard, Jensen-Shannon, and other text-specific metrics
313pub mod text_clustering;
314/// Time series clustering algorithms with specialized distance metrics.
315///
316/// This module provides clustering algorithms specifically designed for time series data,
317/// including dynamic time warping (DTW) distance and other temporal similarity measures.
318/// These algorithms can handle time series of different lengths and temporal alignments.
319///
320/// # Features
321///
322/// * **Dynamic Time Warping**: DTW distance with optional constraints
323/// * **Soft DTW**: Differentiable variant for gradient-based optimization
324/// * **Time Series K-means**: Clustering with DTW barycenter averaging
325/// * **Time Series K-medoids**: Robust clustering using actual time series as centers
326/// * **Hierarchical Clustering**: Agglomerative clustering with DTW distance
327pub mod time_series;
328/// Automatic hyperparameter tuning for clustering algorithms.
329///
330/// This module provides comprehensive hyperparameter optimization capabilities
331/// for all clustering algorithms in the scirs2-cluster crate. It supports
332/// grid search, random search, Bayesian optimization, and adaptive strategies.
333pub mod tuning;
334/// Utility modules for clustering algorithms
335///
336/// This module contains various utility functions and helpers used throughout
337/// the clustering library, including contingency matrix calculations and
338/// other shared functionality.
339pub mod utils;
340/// Enhanced visualization capabilities for clustering results.
341///
342/// This module provides comprehensive visualization tools for clustering algorithms,
343/// including scatter plots, 3D visualizations, dimensionality reduction plots,
344/// and interactive exploration tools for high-dimensional data.
345///
346/// # Features
347///
348/// * **2D/3D Scatter Plots**: Create scatter plot visualizations of clustering results
349/// * **Dimensionality Reduction**: Support for PCA, t-SNE, UMAP, and MDS for high-dimensional data
350/// * **Color Schemes**: Multiple color palettes including colorblind-friendly options
351/// * **Interactive Features**: Zoom, pan, and selection capabilities
352/// * **Animation Support**: Animate iterative algorithms and streaming data
353/// * **Export Capabilities**: Export to various formats (JSON, HTML, images)
354pub mod visualization;
355pub mod vq;
356
357// Re-exports
358pub use advanced::{
359    adaptive_online_clustering, deep_embedded_clustering, qaoa_clustering, quantum_kmeans,
360    rl_clustering, transfer_learning_clustering, variational_deep_embedding, vqe_clustering,
361    AdaptiveOnlineClustering, AdaptiveOnlineConfig, DeepClusteringConfig, DeepEmbeddedClustering,
362    FeatureAlignment, QAOAClustering, QAOAConfig, QAOACostFunction, QuantumConfig, QuantumKMeans,
363    RLClustering, RLClusteringConfig, RewardFunction, TransferLearningClustering,
364    TransferLearningConfig, VQEAnsatz, VQEClustering, VQEConfig, VariationalDeepEmbedding,
365};
366
367// Re-export quantum clustering from quantum_clustering module
368pub use quantum_clustering::{
369    quantum_annealing_clustering, CoolingSchedule, QuantumAnnealingClustering,
370    QuantumAnnealingConfig,
371};
372
373// Re-export advanced clustering capabilities
374pub use advanced_clustering::{
375    AdvancedClusterer, AdvancedClusteringResult, AdvancedConfig, AdvancedPerformanceMetrics,
376};
377
378// Re-export advanced visualization capabilities
379pub use advanced_visualization::{
380    create_advanced_visualization_report, visualize_advanced_results, AISelectionPlot,
381    AdvancedVisualizationConfig, AdvancedVisualizationOutput, AdvancedVisualizer, ClusterPlot,
382    NeuromorphicAdaptationPlot, PerformanceDashboard, QuantumCoherencePlot, QuantumColorScheme,
383    VisualizationExportFormat,
384};
385
386// Re-export enhanced advanced features
387pub use enhanced_clustering_features::{
388    DeepAdvancedClusterer, DeepAdvancedResult, DeepEnsembleCoordinator, EnsembleConsensus,
389    GraphNeuralNetworkProcessor, GraphStructureInsights, NeuralArchitectureSearchEngine,
390    OptimalArchitecture, ReinforcementLearningAgent, SpectralProperties,
391    TransformerClusterEmbedder,
392};
393
394// Re-export GPU and distributed advanced features
395pub use gpu_distributed_clustering::{
396    CommunicationOverhead, CoordinationStrategy, DistributedAdvancedClusterer,
397    DistributedAdvancedResult, DistributedProcessingMetrics, GpuAccelerationConfig,
398    GpuAccelerationMetrics, GpuAdvancedClusterer, GpuAdvancedResult, GpuDeviceSelection,
399    GpuMemoryStrategy, GpuOptimizationLevel, HybridGpuDistributedClusterer,
400    HybridGpuDistributedResult, LoadBalancingStats, WorkerNodeConfig, WorkerPerformanceStats,
401};
402
403// Re-export advanced benchmarking capabilities
404pub use advanced_benchmarking::{
405    create_comprehensive_report, AdvancedBenchmark, AlgorithmBenchmark, AlgorithmComparison,
406    BenchmarkConfig, BenchmarkResults, ComplexityClass, GpuVsCpuComparison, MemoryProfile,
407    OptimizationCategory, OptimizationPriority, OptimizationSuggestion, PerformanceStatistics,
408    QualityMetrics, RegressionAlert, RegressionSeverity, ScalabilityAnalysis, SystemInfo,
409};
410
411pub use affinity::{affinity_propagation, AffinityPropagationOptions};
412pub use birch::{birch, Birch, BirchOptions, BirchStatistics};
413pub use density::hdbscan::{
414    dbscan_clustering, hdbscan, ClusterSelectionMethod, HDBSCANOptions, HDBSCANResult, StoreCenter,
415};
416pub use density::optics::{extract_dbscan_clustering, extract_xi_clusters, OPTICSResult};
417pub use density::*;
418pub use ensemble::convenience::{
419    bootstrap_ensemble, ensemble_clustering, multi_algorithm_ensemble,
420};
421pub use ensemble::{
422    ClusteringAlgorithm, ClusteringResult, ConsensusMethod, ConsensusStatistics, DiversityMetrics,
423    DiversityStrategy, EnsembleClusterer, EnsembleConfig, EnsembleResult, NoiseType,
424    ParameterRange, SamplingStrategy,
425};
426pub use gmm::{gaussian_mixture, CovarianceType, GMMInit, GMMOptions, GaussianMixture};
427pub use graph::{
428    girvan_newman, graph_clustering, label_propagation, louvain, Graph, GraphClusteringAlgorithm,
429    GraphClusteringConfig,
430};
431pub use hierarchy::*;
432pub use input_validation::{
433    check_duplicate_points, suggest_clustering_algorithm, validate_clustering_data,
434    validate_convergence_parameters, validate_distance_parameter, validate_integer_parameter,
435    validate_n_clusters, validate_sample_weights, ValidationConfig,
436};
437pub use leader::{
438    euclidean_distance, leader_clustering, manhattan_distance, LeaderClustering, LeaderNode,
439    LeaderTree,
440};
441pub use meanshift::{estimate_bandwidth, get_bin_seeds, mean_shift, MeanShift, MeanShiftOptions};
442pub use metrics::{
443    adjusted_rand_index, calinski_harabasz_score, davies_bouldin_score,
444    homogeneity_completeness_v_measure, normalized_mutual_info, silhouette_samples,
445    silhouette_score,
446};
447
448// Re-export ensemble validation methods (from core stability module)
449pub use metrics::bootstrap_confidence_interval;
450
451// Re-export information-theoretic methods
452pub use metrics::information_theoretic::normalized_variation_of_information;
453pub use metrics::jensen_shannon_divergence;
454
455// Re-export advanced metrics
456pub use metrics::advanced::{bic_score, dunn_index};
457pub use neighbor_search::{
458    create_neighbor_searcher, BallTree, BruteForceSearch, KDTree, NeighborResult,
459    NeighborSearchAlgorithm, NeighborSearchConfig, NeighborSearcher,
460};
461pub use preprocess::{min_max_scale, normalize, standardize, whiten, NormType};
462pub use serialization::{
463    affinity_propagation_to_model,
464    birch_to_model,
465    compatibility,
466    dbscan_to_model,
467    gmm_to_model,
468    hierarchy_to_model,
469    kmeans_to_model,
470    leader_to_model,
471    leadertree_to_model,
472    meanshift_to_model,
473    save_affinity_propagation,
474    save_birch,
475    save_gmm,
476    save_hierarchy,
477    save_kmeans,
478    save_leader,
479    save_leadertree,
480    save_spectral_clustering,
481    spectral_clustering_to_model,
482    AdvancedExport,
483    AffinityPropagationModel,
484    AlgorithmState,
485    AutoSaveConfig,
486    BirchModel,
487    // Unified workflow management
488    ClusteringWorkflow,
489    ClusteringWorkflowManager,
490    DBSCANModel,
491    DataCharacteristics,
492    // Enhanced serialization with metadata and versioning
493    EnhancedModel,
494    EnhancedModelMetadata,
495    ExportFormat,
496    GMMModel,
497    HierarchicalModel,
498    KMeansModel,
499    LeaderModel,
500    LeaderTreeModel,
501    MeanShiftModel,
502    ModelMetadata,
503    PlatformInfo,
504    SerializableModel,
505    SpectralClusteringModel,
506    TrainingMetrics,
507    TrainingStep,
508    WorkflowConfig,
509};
510
511// Re-export compatibility utilities for scikit-learn and SciPy integration
512pub use serialization::compatibility::{
513    create_sklearn_param_grid,
514    // TODO: Fix these function imports (they may be methods, not functions)
515    // export_to_scipy_json,
516    // export_to_sklearn_json,
517    from_joblib_format,
518    from_numpy_format,
519    from_sklearn_format,
520    generate_sklearn_model_summary,
521    // import_scipy_hierarchy,
522    // Import functions for external model formats
523    // import_sklearn_kmeans,
524    to_arrow_schema,
525    to_huggingface_card,
526    to_joblib_format,
527    to_mlflow_format,
528    to_numpy_format,
529    to_onnx_metadata,
530    to_pandas_clustering_report,
531    to_pandas_format,
532    to_pickle_like_format,
533    to_pytorch_checkpoint,
534    to_r_format,
535    to_scipy_dendrogram_format,
536    to_scipy_linkage_format,
537    to_sklearn_clustering_result,
538    to_sklearn_format,
539};
540pub use sparse::{
541    sparse_epsilon_graph, sparse_knn_graph, SparseDistanceMatrix, SparseHierarchicalClustering,
542};
543pub use spectral::{
544    spectral_bipartition, spectral_clustering, AffinityMode, SpectralClusteringOptions,
545};
546pub use stability::{
547    BootstrapValidator, ConsensusClusterer, OptimalKSelector, StabilityConfig, StabilityResult,
548};
549pub use streaming::{
550    ChunkedDistanceMatrix, ProgressiveHierarchical, StreamingConfig, StreamingKMeans,
551};
552pub use text_clustering::{
553    semantic_hierarchical, semantic_kmeans, topic_clustering, SemanticClusteringConfig,
554    SemanticHierarchical, SemanticKMeans, SemanticSimilarity, TextPreprocessing,
555    TextRepresentation, TopicBasedClustering,
556};
557pub use time_series::{
558    dtw_barycenter_averaging, dtw_distance, dtw_distance_custom, dtw_hierarchical_clustering,
559    dtw_k_means, dtw_k_medoids, soft_dtw_distance, time_series_clustering, TimeSeriesAlgorithm,
560    TimeSeriesClusteringConfig,
561};
562pub use tuning::{
563    AcquisitionFunction, AutoTuner, BayesianState, CVStrategy, ConvergenceInfo,
564    CrossValidationConfig, EarlyStoppingConfig, EnsembleResults, EvaluationMetric,
565    EvaluationResult, ExplorationStats, HyperParameter, KernelType, LoadBalancingStrategy,
566    ParallelConfig, ResourceConstraints, SearchSpace, SearchStrategy, StandardSearchSpaces,
567    StoppingReason, SurrogateModel, TuningConfig, TuningResult,
568};
569
570// Re-export visualization and animation capabilities
571pub use visualization::{
572    create_scatter_plot_2d, create_scatter_plot_3d, AnimationConfig, BoundaryType, ClusterBoundary,
573    ColorScheme, DimensionalityReduction, EasingFunction, LegendEntry, ScatterPlot2D,
574    ScatterPlot3D, VisualizationConfig,
575};
576
577// Re-export animation features
578pub use visualization::animation::{
579    AnimationFrame, IterativeAnimationConfig, IterativeAnimationRecorder, StreamingVisualizer,
580};
581
582// Re-export interactive visualization features
583pub use visualization::interactive::{
584    ClusterStats, InteractiveConfig, InteractiveState, InteractiveVisualizer,
585};
586
587// Re-export export capabilities
588pub use visualization::export::{
589    export_scatter_2d_to_html, export_scatter_2d_to_json, export_scatter_3d_to_html,
590    export_scatter_3d_to_json, save_visualization_to_file,
591};
592
593// Re-export native plotting capabilities (when plotting features are enabled)
594#[cfg(feature = "plotters")]
595pub use plotting::{
596    plot_dendrogram, plot_scatter_2d, save_clustering_plot, save_dendrogram_plot, PlotFormat,
597    PlotOutput,
598};
599
600#[cfg(feature = "egui")]
601pub use plotting::{launch_interactive_visualization, InteractiveClusteringApp};
602
603// Re-export distributed clustering capabilities
604pub use distributed::{
605    DataPartition, DistributedKMeans, DistributedKMeansConfig, PartitioningStrategy, WorkerStatus,
606};
607
608// Re-export distributed utilities - not available in current implementation
609// pub use distributed::utils::{estimate_optimal_workers, generate_large_dataset};
610pub use vq::*;
611
612// GPU acceleration re-exports (when GPU feature is enabled)
613#[cfg(feature = "gpu")]
614pub use gpu::{
615    DeviceSelection, DistanceMetric as GpuDistanceMetric, GpuBackend, GpuConfig, GpuContext,
616    GpuDevice, GpuDistanceMatrix, GpuMemoryManager, MemoryStats, MemoryStrategy,
617};
618
619#[cfg(feature = "gpu")]
620/// GPU acceleration benchmark utilities
621pub mod gpu_benchmark {
622    // Note: benchmark module may not exist yet
623}
624
625#[cfg(feature = "gpu")]
626/// High-level GPU-accelerated clustering with automatic fallback
627pub mod accelerated {
628    // Note: accelerated module may not exist yet
629}
630
631// GPU acceleration interface (when GPU feature is enabled)
632#[cfg(feature = "gpu")]
633/// GPU-accelerated clustering with automatic CPU fallback
634///
635/// This module provides high-level clustering algorithms that automatically
636/// use GPU acceleration when available, falling back to CPU implementations
637/// when GPU is not available or optimal.
638pub mod gpu_accelerated {
639    // Note: accelerated module may not exist yet
640}
641
642#[cfg(test)]
643mod tests;
644
645#[cfg(test)]
646mod property_tests;