use crate::error::OptimizeError;
use crate::unconstrained::{minimize, Method, Options};
use scirs2_core::ndarray::{Array1, Array2, ArrayView1};
use std::collections::HashMap;
#[derive(Debug, Clone)]
pub struct ClusteringOptions {
pub distance_threshold: f64,
pub function_tolerance: f64,
pub max_clusters: Option<usize>,
pub algorithm: ClusteringAlgorithm,
pub normalize_coordinates: bool,
pub use_function_values: bool,
pub function_weight: f64,
}
impl Default for ClusteringOptions {
fn default() -> Self {
Self {
distance_threshold: 1e-3,
function_tolerance: 1e-6,
max_clusters: None,
algorithm: ClusteringAlgorithm::Hierarchical,
normalize_coordinates: true,
use_function_values: true,
function_weight: 0.1,
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum ClusteringAlgorithm {
Hierarchical,
KMeans,
Density,
Threshold,
}
#[derive(Debug, Clone)]
pub struct LocalMinimum<S> {
pub x: Array1<f64>,
pub f: f64,
pub fun_value: S,
pub nit: usize,
pub func_evals: usize,
pub success: bool,
pub start_point: Array1<f64>,
pub cluster_id: Option<usize>,
pub cluster_distance: Option<f64>,
}
#[derive(Debug, Clone)]
pub struct ClusteringResult<S> {
pub minima: Vec<LocalMinimum<S>>,
pub centroids: Vec<ClusterCentroid>,
pub num_clusters: usize,
pub silhouette_score: Option<f64>,
pub wcss: f64,
pub best_minimum: Option<LocalMinimum<S>>,
}
#[derive(Debug, Clone)]
pub struct ClusterCentroid {
pub x: Array1<f64>,
pub f_avg: f64,
pub f_min: f64,
pub size: usize,
pub radius: f64,
}
#[allow(dead_code)]
pub fn multi_start_with_clustering<F, S>(
fun: F,
start_points: &[Array1<f64>],
method: Method,
options: Option<Options>,
clustering_options: Option<ClusteringOptions>,
) -> Result<ClusteringResult<S>, OptimizeError>
where
F: FnMut(&ArrayView1<f64>) -> S + Clone,
S: Into<f64> + Clone + From<f64>,
{
let clustering_opts = clustering_options.unwrap_or_default();
let mut minima = Vec::new();
for start_point in start_points {
let fun_clone = fun.clone();
match minimize(
fun_clone,
start_point.as_slice().expect("Operation failed"),
method,
options.clone(),
) {
Ok(result) => {
let minimum = LocalMinimum {
x: result.x.clone(),
f: result.fun.clone().into(),
fun_value: result.fun,
nit: result.nit,
func_evals: result.func_evals,
success: result.success,
start_point: start_point.clone(),
cluster_id: None,
cluster_distance: None,
};
minima.push(minimum);
}
Err(_) => {
continue;
}
}
}
if minima.is_empty() {
return Err(OptimizeError::ComputationError(
"No successful optimizations found".to_string(),
));
}
cluster_minima(&mut minima, &clustering_opts)?;
let centroids = compute_cluster_centroids(&minima)?;
let num_clusters = centroids.len();
let wcss = compute_wcss(&minima, ¢roids);
let silhouette_score = compute_silhouette_score(&minima);
let best_minimum = minima
.iter()
.filter(|m| m.success)
.min_by(|a, b| a.f.partial_cmp(&b.f).expect("Operation failed"))
.cloned();
Ok(ClusteringResult {
minima,
centroids,
num_clusters,
silhouette_score,
wcss,
best_minimum,
})
}
#[allow(dead_code)]
fn cluster_minima<S>(
minima: &mut [LocalMinimum<S>],
options: &ClusteringOptions,
) -> Result<(), OptimizeError>
where
S: Clone,
{
if minima.is_empty() {
return Ok(());
}
match options.algorithm {
ClusteringAlgorithm::Hierarchical => hierarchical_clustering(minima, options),
ClusteringAlgorithm::KMeans => kmeans_clustering(minima, options),
ClusteringAlgorithm::Density => density_clustering(minima, options),
ClusteringAlgorithm::Threshold => threshold_clustering(minima, options),
}
}
#[allow(dead_code)]
fn hierarchical_clustering<S>(
minima: &mut [LocalMinimum<S>],
options: &ClusteringOptions,
) -> Result<(), OptimizeError>
where
S: Clone,
{
let n = minima.len();
if n <= 1 {
if n == 1 {
minima[0].cluster_id = Some(0);
}
return Ok(());
}
let distances = compute_distance_matrix(minima, options);
let mut cluster_assignments = vec![None; n];
let mut next_cluster_id = n;
for (i, assignment) in cluster_assignments.iter_mut().enumerate().take(n) {
*assignment = Some(i);
}
let mut active_clusters: Vec<Vec<usize>> = (0..n).map(|i| vec![i]).collect();
while active_clusters.len() > 1 {
let mut min_dist = f64::INFINITY;
let mut merge_pair = (0, 1);
for i in 0..active_clusters.len() {
for j in (i + 1)..active_clusters.len() {
let cluster_dist =
compute_cluster_distance(&active_clusters[i], &active_clusters[j], &distances);
if cluster_dist < min_dist {
min_dist = cluster_dist;
merge_pair = (i, j);
}
}
}
if min_dist > options.distance_threshold {
break;
}
if let Some(max_clusters) = options.max_clusters {
if active_clusters.len() <= max_clusters {
break;
}
}
let (i, j) = merge_pair;
let mut merged_cluster = active_clusters[i].clone();
merged_cluster.extend(&active_clusters[j]);
for &point_idx in &merged_cluster {
cluster_assignments[point_idx] = Some(next_cluster_id);
}
let mut new_clusters = Vec::new();
for (k, cluster) in active_clusters.iter().enumerate() {
if k != i && k != j {
new_clusters.push(cluster.clone());
}
}
new_clusters.push(merged_cluster);
active_clusters = new_clusters;
next_cluster_id += 1;
}
let mut cluster_map = HashMap::new();
let mut final_cluster_id = 0;
for cluster_id in cluster_assignments.iter().flatten() {
if !cluster_map.contains_key(cluster_id) {
cluster_map.insert(*cluster_id, final_cluster_id);
final_cluster_id += 1;
}
}
for (i, minimum) in minima.iter_mut().enumerate() {
if let Some(cluster_id) = cluster_assignments[i] {
minimum.cluster_id = cluster_map.get(&cluster_id).copied();
}
}
Ok(())
}
#[allow(dead_code)]
fn kmeans_clustering<S>(
minima: &mut [LocalMinimum<S>],
options: &ClusteringOptions,
) -> Result<(), OptimizeError>
where
S: Clone,
{
let n = minima.len();
if n <= 1 {
if n == 1 {
minima[0].cluster_id = Some(0);
}
return Ok(());
}
let k = if let Some(max_k) = options.max_clusters {
std::cmp::min(max_k, n)
} else {
std::cmp::min((n as f64).sqrt() as usize + 1, n)
};
if k >= n {
for (i, minimum) in minima.iter_mut().enumerate() {
minimum.cluster_id = Some(i);
}
return Ok(());
}
let features = extract_features(minima, options);
let dim = features.ncols();
let mut centroids = initialize_centroids_plus_plus(&features, k);
let mut assignments = vec![0; n];
let max_iter = 100;
let tolerance = 1e-6;
for _iter in 0..max_iter {
let mut changed = false;
for (i, assignment) in assignments.iter_mut().enumerate().take(n) {
let mut min_dist = f64::INFINITY;
let mut best_cluster = 0;
for j in 0..k {
let dist = euclidean_distance(&features.row(i), ¢roids.row(j));
if dist < min_dist {
min_dist = dist;
best_cluster = j;
}
}
if *assignment != best_cluster {
*assignment = best_cluster;
changed = true;
}
}
if !changed {
break;
}
let mut new_centroids = Array2::zeros((k, dim));
let mut cluster_sizes = vec![0; k];
for i in 0..n {
let cluster = assignments[i];
cluster_sizes[cluster] += 1;
for d in 0..dim {
new_centroids[[cluster, d]] += features[[i, d]];
}
}
for j in 0..k {
if cluster_sizes[j] > 0 {
for d in 0..dim {
new_centroids[[j, d]] /= cluster_sizes[j] as f64;
}
}
}
let centroid_change = (¢roids - &new_centroids).mapv(|x| x.abs()).sum();
centroids = new_centroids;
if centroid_change < tolerance {
break;
}
}
for (i, minimum) in minima.iter_mut().enumerate() {
minimum.cluster_id = Some(assignments[i]);
}
Ok(())
}
#[allow(dead_code)]
fn density_clustering<S>(
minima: &mut [LocalMinimum<S>],
options: &ClusteringOptions,
) -> Result<(), OptimizeError>
where
S: Clone,
{
let n = minima.len();
if n <= 1 {
if n == 1 {
minima[0].cluster_id = Some(0);
}
return Ok(());
}
let eps = options.distance_threshold;
let min_pts = 2;
let distances = compute_distance_matrix(minima, options);
let mut cluster_assignments = vec![None; n];
let mut visited = vec![false; n];
let mut cluster_id = 0;
for i in 0..n {
if visited[i] {
continue;
}
visited[i] = true;
let neighbors: Vec<usize> = (0..n).filter(|&j| distances[[i, j]] <= eps).collect();
if neighbors.len() < min_pts {
continue;
}
let mut to_visit = neighbors.clone();
cluster_assignments[i] = Some(cluster_id);
let mut idx = 0;
while idx < to_visit.len() {
let point = to_visit[idx];
if !visited[point] {
visited[point] = true;
let point_neighbors: Vec<usize> =
(0..n).filter(|&j| distances[[point, j]] <= eps).collect();
if point_neighbors.len() >= min_pts {
for &neighbor in &point_neighbors {
if !to_visit.contains(&neighbor) {
to_visit.push(neighbor);
}
}
}
}
if cluster_assignments[point].is_none() {
cluster_assignments[point] = Some(cluster_id);
}
idx += 1;
}
cluster_id += 1;
}
for (i, minimum) in minima.iter_mut().enumerate() {
minimum.cluster_id = cluster_assignments[i];
}
Ok(())
}
#[allow(dead_code)]
fn threshold_clustering<S>(
minima: &mut [LocalMinimum<S>],
options: &ClusteringOptions,
) -> Result<(), OptimizeError>
where
S: Clone,
{
let n = minima.len();
if n <= 1 {
if n == 1 {
minima[0].cluster_id = Some(0);
}
return Ok(());
}
let mut cluster_assignments = vec![None; n];
let mut cluster_id = 0;
for i in 0..n {
if cluster_assignments[i].is_some() {
continue;
}
cluster_assignments[i] = Some(cluster_id);
for j in (i + 1)..n {
if cluster_assignments[j].is_some() {
continue;
}
let distance = compute_distance(&minima[i], &minima[j], options);
if distance <= options.distance_threshold {
cluster_assignments[j] = Some(cluster_id);
}
}
cluster_id += 1;
}
for (i, minimum) in minima.iter_mut().enumerate() {
minimum.cluster_id = cluster_assignments[i];
}
Ok(())
}
#[allow(dead_code)]
fn compute_distance_matrix<S>(
minima: &[LocalMinimum<S>],
options: &ClusteringOptions,
) -> Array2<f64>
where
S: Clone,
{
let n = minima.len();
let mut distances = Array2::zeros((n, n));
for i in 0..n {
for j in (i + 1)..n {
let dist = compute_distance(&minima[i], &minima[j], options);
distances[[i, j]] = dist;
distances[[j, i]] = dist;
}
}
distances
}
#[allow(dead_code)]
fn compute_distance<S>(
min1: &LocalMinimum<S>,
min2: &LocalMinimum<S>,
options: &ClusteringOptions,
) -> f64
where
S: Clone,
{
let coord_dist = (&min1.x - &min2.x).mapv(|x| x.powi(2)).sum().sqrt();
if !options.use_function_values {
return coord_dist;
}
let func_dist = (min1.f - min2.f).abs();
coord_dist + options.function_weight * func_dist
}
#[allow(dead_code)]
fn compute_cluster_distance(
cluster1: &[usize],
cluster2: &[usize],
distances: &Array2<f64>,
) -> f64 {
let mut min_dist = f64::INFINITY;
for &i in cluster1 {
for &j in cluster2 {
let dist = distances[[i, j]];
if dist < min_dist {
min_dist = dist;
}
}
}
min_dist
}
#[allow(dead_code)]
fn extract_features<S>(minima: &[LocalMinimum<S>], options: &ClusteringOptions) -> Array2<f64>
where
S: Clone,
{
let n = minima.len();
if n == 0 {
return Array2::zeros((0, 0));
}
let coord_dim = minima[0].x.len();
let func_dim = if options.use_function_values { 1 } else { 0 };
let total_dim = coord_dim + func_dim;
let mut features = Array2::zeros((n, total_dim));
for (i, minimum) in minima.iter().enumerate() {
for j in 0..coord_dim {
features[[i, j]] = minimum.x[j];
}
}
if options.use_function_values {
for (i, minimum) in minima.iter().enumerate() {
features[[i, coord_dim]] = minimum.f * options.function_weight;
}
}
if options.normalize_coordinates {
normalize_features(&mut features, coord_dim);
}
features
}
#[allow(dead_code)]
fn normalize_features(features: &mut Array2<f64>, coord_dim: usize) {
let n = features.nrows();
if n == 0 {
return;
}
for j in 0..coord_dim {
let col = features.column(j);
let min_val = col.iter().fold(f64::INFINITY, |a, &b| f64::min(a, b));
let max_val = col.iter().fold(f64::NEG_INFINITY, |a, &b| f64::max(a, b));
if (max_val - min_val).abs() > 1e-10 {
for i in 0..n {
features[[i, j]] = (features[[i, j]] - min_val) / (max_val - min_val);
}
}
}
}
#[allow(dead_code)]
fn initialize_centroids_plus_plus(features: &Array2<f64>, k: usize) -> Array2<f64> {
let (n, dim) = features.dim();
let mut centroids = Array2::zeros((k, dim));
if n == 0 || k == 0 {
return centroids;
}
let first_idx = 0; centroids.row_mut(0).assign(&features.row(first_idx));
for c in 1..k {
let mut distances = vec![f64::INFINITY; n];
for (i, distance) in distances.iter_mut().enumerate().take(n) {
let point = features.row(i);
for j in 0..c {
let centroid = centroids.row(j);
let dist = euclidean_distance(&point, ¢roid);
*distance = distance.min(dist);
}
}
let next_idx = distances
.iter()
.enumerate()
.max_by(|a, b| a.1.partial_cmp(b.1).expect("Operation failed"))
.map(|(i, _)| i)
.unwrap_or(0);
centroids.row_mut(c).assign(&features.row(next_idx));
}
centroids
}
#[allow(dead_code)]
fn euclidean_distance(a: &ArrayView1<f64>, b: &ArrayView1<f64>) -> f64 {
(a - b).mapv(|x| x.powi(2)).sum().sqrt()
}
#[allow(dead_code)]
fn compute_cluster_centroids<S>(
minima: &[LocalMinimum<S>],
) -> Result<Vec<ClusterCentroid>, OptimizeError>
where
S: Clone,
{
if minima.is_empty() {
return Ok(Vec::new());
}
let mut clusters: HashMap<usize, Vec<&LocalMinimum<S>>> = HashMap::new();
for minimum in minima {
if let Some(cluster_id) = minimum.cluster_id {
clusters.entry(cluster_id).or_default().push(minimum);
}
}
let mut centroids = Vec::new();
for (_, cluster_minima) in clusters {
if cluster_minima.is_empty() {
continue;
}
let dim = cluster_minima[0].x.len();
let mut centroid_x = Array1::zeros(dim);
let mut f_sum = 0.0;
let mut f_min = f64::INFINITY;
for minimum in &cluster_minima {
centroid_x = ¢roid_x + &minimum.x;
f_sum += minimum.f;
f_min = f_min.min(minimum.f);
}
let size = cluster_minima.len();
centroid_x /= size as f64;
let f_avg = f_sum / size as f64;
let mut max_radius = 0.0;
for minimum in &cluster_minima {
let dist = (&minimum.x - ¢roid_x).mapv(|x| x.powi(2)).sum().sqrt();
max_radius = f64::max(max_radius, dist);
}
centroids.push(ClusterCentroid {
x: centroid_x,
f_avg,
f_min,
size,
radius: max_radius,
});
}
Ok(centroids)
}
#[allow(dead_code)]
fn compute_wcss<S>(minima: &[LocalMinimum<S>], centroids: &[ClusterCentroid]) -> f64
where
S: Clone,
{
let mut wcss = 0.0;
for minimum in minima {
if let Some(cluster_id) = minimum.cluster_id {
if cluster_id < centroids.len() {
let centroid = ¢roids[cluster_id];
let dist = (&minimum.x - ¢roid.x).mapv(|x| x.powi(2)).sum();
wcss += dist;
}
}
}
wcss
}
#[allow(dead_code)]
fn compute_silhouette_score<S>(minima: &[LocalMinimum<S>]) -> Option<f64>
where
S: Clone,
{
if minima.len() < 2 {
return None;
}
let mut silhouette_sum = 0.0;
let mut valid_points = 0;
for (i, minimum) in minima.iter().enumerate() {
if let Some(cluster_id) = minimum.cluster_id {
let mut intra_sum = 0.0;
let mut intra_count = 0;
let mut min_inter = f64::INFINITY;
let mut cluster_inter_sums: HashMap<usize, f64> = HashMap::new();
let mut cluster_inter_counts: HashMap<usize, usize> = HashMap::new();
for (j, other) in minima.iter().enumerate() {
if i == j {
continue;
}
if let Some(other_cluster_id) = other.cluster_id {
let dist = (&minimum.x - &other.x).mapv(|x| x.powi(2)).sum().sqrt();
if other_cluster_id == cluster_id {
intra_sum += dist;
intra_count += 1;
} else {
*cluster_inter_sums.entry(other_cluster_id).or_insert(0.0) += dist;
*cluster_inter_counts.entry(other_cluster_id).or_insert(0) += 1;
}
}
}
for (other_cluster_id, sum) in cluster_inter_sums {
let count = cluster_inter_counts[&other_cluster_id];
if count > 0 {
let avg_inter = sum / count as f64;
min_inter = min_inter.min(avg_inter);
}
}
if intra_count > 0 && min_inter < f64::INFINITY {
let a = intra_sum / intra_count as f64;
let b = min_inter;
let silhouette = (b - a) / f64::max(a, b);
silhouette_sum += silhouette;
valid_points += 1;
}
}
}
if valid_points > 0 {
Some(silhouette_sum / valid_points as f64)
} else {
None
}
}
#[allow(dead_code)]
pub fn generate_diverse_start_points(
bounds: &[(f64, f64)],
num_points: usize,
strategy: StartPointStrategy,
) -> Vec<Array1<f64>> {
match strategy {
StartPointStrategy::Random => generate_random_points(bounds, num_points),
StartPointStrategy::LatinHypercube => generate_latin_hypercube_points(bounds, num_points),
StartPointStrategy::Grid => generate_grid_points(bounds, num_points),
StartPointStrategy::Sobol => generate_sobol_points(bounds, num_points),
}
}
#[derive(Debug, Clone, Copy)]
pub enum StartPointStrategy {
Random,
LatinHypercube,
Grid,
Sobol,
}
#[allow(dead_code)]
fn generate_random_points(bounds: &[(f64, f64)], num_points: usize) -> Vec<Array1<f64>> {
let dim = bounds.len();
let mut points = Vec::new();
for _ in 0..num_points {
let mut point = Array1::zeros(dim);
for (i, &(low, high)) in bounds.iter().enumerate() {
let t = (i * num_points + points.len()) as f64 / (num_points * dim) as f64;
let random_val = (t * 17.0).fract(); point[i] = low + random_val * (high - low);
}
points.push(point);
}
points
}
#[allow(dead_code)]
fn generate_latin_hypercube_points(bounds: &[(f64, f64)], num_points: usize) -> Vec<Array1<f64>> {
let dim = bounds.len();
let mut points = Vec::new();
for i in 0..num_points {
let mut point = Array1::zeros(dim);
for (j, &(low, high)) in bounds.iter().enumerate() {
let segment = (i as f64 + 0.5) / num_points as f64; point[j] = low + segment * (high - low);
}
points.push(point);
}
points
}
#[allow(dead_code)]
fn generate_grid_points(bounds: &[(f64, f64)], num_points: usize) -> Vec<Array1<f64>> {
let dim = bounds.len();
if dim == 0 {
return Vec::new();
}
let points_per_dim = ((num_points as f64).powf(1.0 / dim as f64)).ceil() as usize;
let mut _points = Vec::new();
fn generate_grid_recursive(
bounds: &[(f64, f64)],
points_per_dim: usize,
current_point: &mut Array1<f64>,
dim_idx: usize,
points: &mut Vec<Array1<f64>>,
) {
if dim_idx >= bounds.len() {
points.push(current_point.clone());
return;
}
let (low, high) = bounds[dim_idx];
for i in 0..points_per_dim {
let t = if points_per_dim == 1 {
0.5
} else {
i as f64 / (points_per_dim - 1) as f64
};
current_point[dim_idx] = low + t * (high - low);
generate_grid_recursive(bounds, points_per_dim, current_point, dim_idx + 1, points);
}
}
let mut current_point = Array1::zeros(dim);
generate_grid_recursive(bounds, points_per_dim, &mut current_point, 0, &mut _points);
_points.truncate(num_points);
_points
}
#[allow(dead_code)]
fn generate_sobol_points(bounds: &[(f64, f64)], num_points: usize) -> Vec<Array1<f64>> {
let dim = bounds.len();
let mut points = Vec::new();
for i in 0..num_points {
let mut point = Array1::zeros(dim);
for (j, &(low, high)) in bounds.iter().enumerate() {
let mut n = i + 1;
let base = 2 + j; let mut result = 0.0;
let mut f = 1.0 / base as f64;
while n > 0 {
result += (n % base) as f64 * f;
n /= base;
f /= base as f64;
}
point[j] = low + result * (high - low);
}
points.push(point);
}
points
}
#[cfg(test)]
mod tests {
use super::*;
use approx::assert_abs_diff_eq;
#[test]
fn test_simple_clustering() {
let mut minima = vec![
LocalMinimum {
x: Array1::from_vec(vec![0.0, 0.0]),
f: 1.0,
fun_value: 1.0,
nit: 10,
func_evals: 20,
success: true,
start_point: Array1::from_vec(vec![1.0, 1.0]),
cluster_id: None,
cluster_distance: None,
},
LocalMinimum {
x: Array1::from_vec(vec![0.1, 0.1]),
f: 1.1,
fun_value: 1.1,
nit: 12,
func_evals: 24,
success: true,
start_point: Array1::from_vec(vec![1.1, 1.1]),
cluster_id: None,
cluster_distance: None,
},
LocalMinimum {
x: Array1::from_vec(vec![5.0, 5.0]),
f: 2.0,
fun_value: 2.0,
nit: 15,
func_evals: 30,
success: true,
start_point: Array1::from_vec(vec![5.5, 5.5]),
cluster_id: None,
cluster_distance: None,
},
];
let options = ClusteringOptions {
distance_threshold: 1.0,
algorithm: ClusteringAlgorithm::Threshold,
..Default::default()
};
threshold_clustering(&mut minima, &options).expect("Operation failed");
assert_eq!(minima[0].cluster_id, minima[1].cluster_id);
assert_ne!(minima[0].cluster_id, minima[2].cluster_id);
}
#[test]
fn test_distance_computation() {
let min1 = LocalMinimum {
x: Array1::from_vec(vec![0.0, 0.0]),
f: 1.0,
fun_value: 1.0,
nit: 10,
func_evals: 20,
success: true,
start_point: Array1::from_vec(vec![1.0, 1.0]),
cluster_id: None,
cluster_distance: None,
};
let min2 = LocalMinimum {
x: Array1::from_vec(vec![3.0, 4.0]),
f: 2.0,
fun_value: 2.0,
nit: 12,
func_evals: 24,
success: true,
start_point: Array1::from_vec(vec![3.5, 4.5]),
cluster_id: None,
cluster_distance: None,
};
let options = ClusteringOptions::default();
let distance = compute_distance(&min1, &min2, &options);
assert_abs_diff_eq!(distance, 5.1, epsilon = 1e-10);
}
#[test]
fn test_start_point_generation() {
let bounds = vec![(0.0, 10.0), (-5.0, 5.0)];
let num_points = 5;
let random_points =
generate_diverse_start_points(&bounds, num_points, StartPointStrategy::Random);
assert_eq!(random_points.len(), num_points);
for point in &random_points {
assert!(point[0] >= 0.0 && point[0] <= 10.0);
assert!(point[1] >= -5.0 && point[1] <= 5.0);
}
let grid_points =
generate_diverse_start_points(&bounds, num_points, StartPointStrategy::Grid);
assert_eq!(grid_points.len(), num_points);
for point in &grid_points {
assert!(point[0] >= 0.0 && point[0] <= 10.0);
assert!(point[1] >= -5.0 && point[1] <= 5.0);
}
}
}