Skip to main content

swarmkit_sailing/
init.rs

1use crate::dynamics::get_travel_time_range;
2use crate::path_baseline::PathBaseline;
3use crate::route_bounds::RouteBounds;
4use crate::spherical::Segment;
5use crate::units::Path;
6use crate::{LandmassSource, Sailboat, SailboatFitData, SeaPathBias, WindSource};
7use rand::{Rng, RngExt as _};
8use std::f64::consts::PI;
9use std::ops::Range;
10use swarmkit::{FitCalc, Group, Particle, ParticleInit};
11
12/// Allocation shares across init shape families. Weights are
13/// normalised at allocation; negative is clamped to 0; all-zero falls
14/// back to `sin_k1`-only. The largest-share family absorbs rounding
15/// drift.
16#[derive(Clone, Copy, Debug)]
17pub struct InitShares {
18    /// Smooth arc `sin(π·t)`; curvature spread across `[-1, 1]`.
19    pub sin_k1: f64,
20    /// S-curve `sin(2π·t)`. One inflection.
21    pub sin_k2: f64,
22    /// 1-tack zigzag `sin(3π·t)`. Two inflections.
23    pub sin_k3: f64,
24    /// 1.5-tack zigzag `sin(4π·t)`. Three inflections.
25    pub sin_k4: f64,
26    /// Random anchor with triangular falloff to both endpoints —
27    /// asymmetric one-feature paths.
28    pub anchor: f64,
29    /// Independent per-waypoint Gaussian perpendicular noise.
30    pub gaussian: f64,
31}
32
33impl Default for InitShares {
34    fn default() -> Self {
35        Self {
36            sin_k1: 0.30,
37            sin_k2: 0.20,
38            sin_k3: 0.15,
39            sin_k4: 0.10,
40            anchor: 0.15,
41            gaussian: 0.10,
42        }
43    }
44}
45
46/// Allocation shares across baseline reference paths.
47///
48/// The shape families ([`InitShares`]) give *shape* diversity around
49/// one baseline; multiple baselines give *topology* diversity (e.g.
50/// around-north vs around-south) that no shape family can interpolate
51/// between. Same normalisation rules as [`InitShares`].
52#[derive(Clone, Copy, Debug)]
53pub struct BaselineShares {
54    /// Straight-line lerp. Insurance for short routes where
55    /// "straight line + big kick" is competitive with detouring.
56    pub straight_line: f64,
57    /// A* polyline biased north (south of the straight line is blocked).
58    pub polyline_north: f64,
59    /// Mirror of `polyline_north`.
60    pub polyline_south: f64,
61}
62
63impl Default for BaselineShares {
64    fn default() -> Self {
65        // 80% polyline (40/40 north/south) + 20% straight-line.
66        Self {
67            straight_line: 0.2,
68            polyline_north: 0.4,
69            polyline_south: 0.4,
70        }
71    }
72}
73
74#[derive(Copy, Clone, Debug)]
75enum Family {
76    SinK1,
77    SinK2,
78    SinK3,
79    SinK4,
80    Anchor,
81    Gaussian,
82}
83
84/// Per-particle `(family, within_family_index, family_count)`. Falls
85/// back to all-`SinK1` for degenerate input.
86fn allocate_families(particle_count: usize, shares: &InitShares) -> Vec<(Family, usize, usize)> {
87    let raw = [
88        (Family::SinK1, shares.sin_k1.max(0.0)),
89        (Family::SinK2, shares.sin_k2.max(0.0)),
90        (Family::SinK3, shares.sin_k3.max(0.0)),
91        (Family::SinK4, shares.sin_k4.max(0.0)),
92        (Family::Anchor, shares.anchor.max(0.0)),
93        (Family::Gaussian, shares.gaussian.max(0.0)),
94    ];
95    let total: f64 = raw.iter().map(|(_, s)| s).sum();
96    if total <= 0.0 || particle_count == 0 {
97        return (0..particle_count)
98            .map(|i| (Family::SinK1, i, particle_count))
99            .collect();
100    }
101    let mut counts: Vec<usize> = raw
102        .iter()
103        .map(|(_, s)| (particle_count as f64 * s / total).round() as usize)
104        .collect();
105    let sum: usize = counts.iter().sum();
106    if sum != particle_count {
107        // `total_cmp` is total on f64 (NaN sorts somewhere), eliminating
108        // the `partial_cmp().unwrap()` pair. The outer `unwrap_or(0)` is
109        // defensive: `raw` is non-empty in every observed call path
110        // (the function has 6 families, and the `total <= 0` guard
111        // above returns before we get here), but the fallback to 0
112        // keeps `counts[largest] += …` well-defined if a future caller
113        // ever passes an empty `raw`.
114        let largest = raw
115            .iter()
116            .enumerate()
117            .max_by(|a, b| a.1.1.total_cmp(&b.1.1))
118            .map_or(0, |(i, _)| i);
119        if sum < particle_count {
120            counts[largest] += particle_count - sum;
121        } else {
122            counts[largest] = counts[largest].saturating_sub(sum - particle_count);
123        }
124    }
125    let mut out = Vec::with_capacity(particle_count);
126    for (i, (family, _)) in raw.iter().enumerate() {
127        let n = counts[i];
128        for j in 0..n {
129            out.push((*family, j, n));
130        }
131    }
132    out
133}
134
135pub struct PathInit<'a, const N: usize, SB, WS, TFit>
136where
137    SB: Sailboat,
138    WS: WindSource,
139    TFit: FitCalc<T = Path<N>> + SailboatFitData,
140{
141    bounds: &'a RouteBounds,
142    boat: &'a SB,
143    wind_source: &'a WS,
144    fit_calc: &'a TFit,
145    particle_count: usize,
146    init_shares: InitShares,
147    baseline_shares: BaselineShares,
148}
149
150impl<'a, const N: usize, SB, WS, TFit> PathInit<'a, N, SB, WS, TFit>
151where
152    SB: Sailboat,
153    WS: WindSource,
154    TFit: FitCalc<T = Path<N>> + SailboatFitData,
155{
156    pub fn new(
157        bounds: &'a RouteBounds,
158        boat: &'a SB,
159        wind_source: &'a WS,
160        fit_calc: &'a TFit,
161        particle_count: usize,
162        init_shares: InitShares,
163        baseline_shares: BaselineShares,
164    ) -> Self {
165        PathInit {
166            bounds,
167            boat,
168            wind_source,
169            fit_calc,
170            particle_count,
171            init_shares,
172            baseline_shares,
173        }
174    }
175
176    /// Same as [`ParticleInit::init`] plus a per-baseline partition
177    /// for the niched topology — one `Range` per cohort in the order
178    /// the internal baseline-computation routine emits, empty cohorts
179    /// absent. RNG draws match `init` so the same seed produces the
180    /// identical layout.
181    pub fn init_with_partition<R: Rng>(&self, rng: &mut R) -> (Group<Path<N>>, Vec<Range<usize>>) {
182        let baselines =
183            compute_baselines::<N, _>(self.bounds, self.fit_calc.landmass(), &self.baseline_shares);
184        let counts = allocate_particles_across_baselines(self.particle_count, &baselines);
185
186        let mut positions: Vec<Path<N>> = Vec::with_capacity(self.particle_count);
187        let mut partition: Vec<Range<usize>> = Vec::with_capacity(counts.len());
188        let mut cursor = 0usize;
189        for ((baseline, _share), count) in baselines.iter().zip(counts.iter()) {
190            let ctx = InitContext {
191                baseline,
192                bounds: self.bounds,
193                boat: self.boat,
194                wind_source: self.wind_source,
195            };
196            let mut sub = init_diverse_per_baseline(&ctx, *count, &self.init_shares, rng);
197            positions.append(&mut sub);
198            partition.push(cursor..cursor + count);
199            cursor += count;
200        }
201
202        let velocities = self.init_vel(rng);
203        debug_assert_eq!(
204            positions.len(),
205            velocities.len(),
206            "init_vel must produce one velocity per position",
207        );
208
209        let mut group = Group::<Path<N>>::with_capacity(self.particle_count);
210        for (pos, vel) in positions.into_iter().zip(velocities) {
211            let fit = self.fit_calc.calculate_fit(pos);
212            group.push(Particle {
213                pos,
214                vel,
215                fit,
216                best_pos: pos,
217                best_fit: fit,
218            });
219        }
220        (group, partition)
221    }
222}
223
224impl<const N: usize, SB, WS, TFit> ParticleInit for PathInit<'_, N, SB, WS, TFit>
225where
226    SB: Sailboat,
227    WS: WindSource,
228    TFit: FitCalc<T = Path<N>> + SailboatFitData,
229{
230    type T = Path<N>;
231
232    fn init_pos<R: Rng>(&self, rng: &mut R) -> Vec<Self::T> {
233        let baselines =
234            compute_baselines::<N, _>(self.bounds, self.fit_calc.landmass(), &self.baseline_shares);
235        let counts = allocate_particles_across_baselines(self.particle_count, &baselines);
236        let mut out: Vec<Self::T> = Vec::with_capacity(self.particle_count);
237        for ((baseline, _share), count) in baselines.iter().zip(counts.iter()) {
238            let ctx = InitContext {
239                baseline,
240                bounds: self.bounds,
241                boat: self.boat,
242                wind_source: self.wind_source,
243            };
244            let mut sub = init_diverse_per_baseline(&ctx, *count, &self.init_shares, rng);
245            out.append(&mut sub);
246        }
247        out
248    }
249
250    fn init_vel<R: Rng>(&self, rng: &mut R) -> Vec<Self::T> {
251        (0..self.particle_count)
252            .map(|_| init_random_vel_particle(self.bounds, rng, 0.3))
253            .collect()
254    }
255}
256
257/// Active baselines for the next init pass. Falls back to a single
258/// straight-line baseline if every share is zero or every A* fails.
259fn compute_baselines<const N: usize, LS: LandmassSource>(
260    bounds: &RouteBounds,
261    landmass: &LS,
262    shares: &BaselineShares,
263) -> Vec<(PathBaseline<N>, f64)> {
264    let mut out: Vec<(PathBaseline<N>, f64)> = Vec::new();
265    if shares.straight_line > 0.0 {
266        out.push((
267            PathBaseline::straight_line(bounds),
268            shares.straight_line.max(0.0),
269        ));
270    }
271    if shares.polyline_north > 0.0
272        && let Some(poly) = landmass.find_sea_path(
273            bounds.origin,
274            bounds.destination,
275            bounds,
276            SeaPathBias::North,
277        )
278    {
279        out.push((
280            PathBaseline::from_polyline(&poly, bounds, landmass),
281            shares.polyline_north.max(0.0),
282        ));
283    }
284    if shares.polyline_south > 0.0
285        && let Some(poly) = landmass.find_sea_path(
286            bounds.origin,
287            bounds.destination,
288            bounds,
289            SeaPathBias::South,
290        )
291    {
292        out.push((
293            PathBaseline::from_polyline(&poly, bounds, landmass),
294            shares.polyline_south.max(0.0),
295        ));
296    }
297    if out.is_empty() {
298        out.push((PathBaseline::straight_line(bounds), 1.0));
299    }
300    out
301}
302
303/// Split `particle_count` across baselines proportional to their share,
304/// rounding to integers and assigning the rounding drift to the
305/// largest-share baseline. Returns one count per input baseline.
306fn allocate_particles_across_baselines<const N: usize>(
307    particle_count: usize,
308    baselines: &[(PathBaseline<N>, f64)],
309) -> Vec<usize> {
310    if baselines.is_empty() || particle_count == 0 {
311        return vec![0; baselines.len()];
312    }
313    let total: f64 = baselines.iter().map(|(_, s)| s).sum();
314    if total <= 0.0 {
315        // Degenerate — assign everything to the first baseline.
316        let mut counts = vec![0; baselines.len()];
317        counts[0] = particle_count;
318        return counts;
319    }
320    let mut counts: Vec<usize> = baselines
321        .iter()
322        .map(|(_, s)| (particle_count as f64 * s / total).round() as usize)
323        .collect();
324    let sum: usize = counts.iter().sum();
325    if sum != particle_count {
326        let largest = baselines
327            .iter()
328            .enumerate()
329            .max_by(|a, b| {
330                a.1.1
331                    .partial_cmp(&b.1.1)
332                    .unwrap_or(std::cmp::Ordering::Equal)
333            })
334            .map(|(i, _)| i)
335            .unwrap_or(0);
336        if sum < particle_count {
337            counts[largest] += particle_count - sum;
338        } else {
339            counts[largest] = counts[largest].saturating_sub(sum - particle_count);
340        }
341    }
342    counts
343}
344
345// Inner-PSO `Time<N>` init lives in
346// `time.rs::build_inner_group_from_cache` now — see
347// `bywind/profiling/stage2-counters.md` for why.
348
349// Free functions: the path-building machinery the inits dispatch into.
350
351/// Borrows shared by the init helpers.
352pub(crate) struct InitContext<'a, const N: usize, SB, WS> {
353    pub baseline: &'a PathBaseline<N>,
354    pub bounds: &'a RouteBounds,
355    pub boat: &'a SB,
356    pub wind_source: &'a WS,
357}
358
359/// One slot from [`allocate_families`]: family + `(index, count)` for
360/// positioning within the family's curvature-parameter row.
361#[derive(Copy, Clone)]
362struct FamilyAllocation {
363    family: Family,
364    within_index: usize,
365    family_count: usize,
366}
367
368/// Multi-baseline allocation entry point used by `PathInit`.
369pub(crate) fn init_diverse_per_baseline<const N: usize, SB, WS, R>(
370    ctx: &InitContext<'_, N, SB, WS>,
371    particle_count: usize,
372    shares: &InitShares,
373    rng: &mut R,
374) -> Vec<Path<N>>
375where
376    SB: Sailboat,
377    WS: WindSource,
378    R: Rng,
379{
380    let allocation = allocate_families(particle_count, shares);
381    allocation
382        .into_iter()
383        .map(|(family, within_index, family_count)| {
384            init_diverse_particle(
385                ctx,
386                rng,
387                FamilyAllocation {
388                    family,
389                    within_index,
390                    family_count,
391                },
392            )
393        })
394        .collect()
395}
396
397fn init_diverse_particle<const N: usize, SB, WS, R>(
398    ctx: &InitContext<'_, N, SB, WS>,
399    rng: &mut R,
400    alloc: FamilyAllocation,
401) -> Path<N>
402where
403    SB: Sailboat,
404    WS: WindSource,
405    R: Rng,
406{
407    let FamilyAllocation {
408        family,
409        within_index,
410        family_count,
411    } = alloc;
412    let offsets: [f64; N] = match family {
413        Family::SinK1 => offsets_sinusoidal::<N>(1, within_index, family_count),
414        Family::SinK2 => offsets_sinusoidal::<N>(2, within_index, family_count),
415        Family::SinK3 => offsets_sinusoidal::<N>(3, within_index, family_count),
416        Family::SinK4 => offsets_sinusoidal::<N>(4, within_index, family_count),
417        Family::Anchor => offsets_anchor::<N, _>(rng),
418        Family::Gaussian => offsets_gaussian::<N, _>(rng),
419    };
420
421    build_path_with_offsets(ctx, rng, &offsets)
422}
423
424/// Build a `Path<N>` by perturbing `baseline` with unitless `offsets`.
425/// Each interior waypoint is placed at `baseline.positions[i] +
426/// (perpendicular metres in tangent frame, converted back to
427/// (lon°, lat°))`. Endpoints are pinned to the bounds.
428///
429/// `bounds` is consulted only for endpoint pinning and per-segment
430/// random time seeding; the *position* of interior waypoints comes
431/// entirely from the baseline + offsets.
432fn build_path_with_offsets<const N: usize, SB, WS, R>(
433    ctx: &InitContext<'_, N, SB, WS>,
434    rng: &mut R,
435    offsets: &[f64; N],
436) -> Path<N>
437where
438    SB: Sailboat,
439    WS: WindSource,
440    R: Rng,
441{
442    let InitContext {
443        baseline,
444        bounds,
445        boat,
446        wind_source,
447    } = *ctx;
448    let mut p: Path<N> = Path::default();
449    bounds.constrain_endpoints_xyt(&mut p);
450    let mut accumulated_time = 0.0;
451
452    for (i, &offset) in offsets.iter().enumerate().skip(1) {
453        if i != N - 1 {
454            let here = baseline.positions[i];
455            let scale = if offset >= 0.0 {
456                baseline.perp_scale_pos[i]
457            } else {
458                baseline.perp_scale_neg[i]
459            };
460            let amplitude_m = offset * scale;
461            let perp_metres = baseline.perpendiculars[i] * amplitude_m;
462            // Bbox-clamp: without it, a large `perp_scale` on a long
463            // baseline can push past ±90° lat and crash spherical math
464            // downstream with NaN / infinite travel times.
465            p.xy.set_lat_lon(i, bounds.clamp(here.offset_by(perp_metres)));
466        }
467        init_random_time(
468            bounds,
469            boat,
470            wind_source,
471            rng,
472            &mut p,
473            &mut accumulated_time,
474            i,
475        );
476    }
477
478    p
479}
480
481fn init_random_vel_particle<const N: usize, R: Rng>(
482    bounds: &RouteBounds,
483    rng: &mut R,
484    magnitude_coefficient_01: f64,
485) -> Path<N> {
486    // Endpoint velocities stay zero (positions constrained); the rest
487    // are tangent-frame metres scaled to the bbox diagonal. First-iter
488    // cognitive/social pulls dominate this anyway.
489    let mut p: Path<N> = Path::default();
490    let scale_m = bounds.bbox.diagonal_m() * magnitude_coefficient_01;
491
492    for i in 1..N - 1 {
493        let dx = rng.random::<f64>() - 0.5;
494        let dy = rng.random::<f64>() - 0.5;
495        let mag = dx.hypot(dy).max(1e-12);
496        p.xy.0[i] = (dx / mag) * scale_m;
497        p.xy.1[i] = (dy / mag) * scale_m;
498    }
499
500    p
501}
502
503fn init_random_time<const N: usize, SB, WS, R>(
504    bounds: &RouteBounds,
505    boat: &SB,
506    wind_source: &WS,
507    rng: &mut R,
508    p: &mut Path<N>,
509    accumulated_time: &mut f64,
510    i: usize,
511) where
512    SB: Sailboat,
513    WS: WindSource,
514    R: Rng,
515{
516    let (t_min, t_max) = get_travel_time_range(
517        boat,
518        wind_source,
519        Segment {
520            origin: p.xy.lat_lon(i - 1),
521            destination: p.xy.lat_lon(i),
522            origin_time: *accumulated_time,
523            step_distance_max: bounds.step_distance_max,
524        },
525    );
526
527    p.t[i] = if t_min < t_max {
528        rng.random_range(t_min..t_max)
529    } else {
530        t_min
531    };
532    *accumulated_time += p.t[i];
533}
534
535/// Unitless `sin(k·π·t)` offsets in `[-1/k, 1/k]`. Curvature factor
536/// `c` spreads across `[-1, 1]` over the family; amplitude divided by
537/// `k` keeps total path length comparable across frequencies.
538fn offsets_sinusoidal<const N: usize>(
539    k: u32,
540    within_index: usize,
541    family_count: usize,
542) -> [f64; N] {
543    let mut offsets = [0.0; N];
544    let c = if family_count > 1 {
545        let middle = (family_count as f64 - 1.0) / 2.0;
546        (within_index as f64 - middle) / middle
547    } else {
548        0.0
549    };
550    let amplitude = c / k as f64;
551    for (i, slot) in offsets
552        .iter_mut()
553        .enumerate()
554        .skip(1)
555        .take(N.saturating_sub(2))
556    {
557        let t = (i as f64) / (N as f64);
558        *slot = (k as f64 * PI * t).sin() * amplitude;
559    }
560    offsets
561}
562
563/// Unitless offsets: one anchor displacement in `[-1, 1]` with linear
564/// falloff to both endpoints. Anchor `t ∈ [0.2, 0.8]` avoids degenerate
565/// near-endpoint falloffs.
566fn offsets_anchor<const N: usize, R: Rng>(rng: &mut R) -> [f64; N] {
567    let mut offsets = [0.0; N];
568    if N < 3 {
569        return offsets;
570    }
571    let anchor_t = rng.random_range(0.2_f64..0.8);
572    let displacement = rng.random_range(-1.0_f64..1.0);
573    let falloff_width = anchor_t.max(1.0 - anchor_t);
574    for (i, slot) in offsets
575        .iter_mut()
576        .enumerate()
577        .skip(1)
578        .take(N.saturating_sub(2))
579    {
580        let t = (i as f64) / (N as f64);
581        let dist = (t - anchor_t).abs();
582        let factor = if dist >= falloff_width {
583            0.0
584        } else {
585            1.0 - dist / falloff_width
586        };
587        *slot = displacement * factor;
588    }
589    offsets
590}
591
592/// Independent per-waypoint Box–Muller Gaussian offsets, σ = 0.2.
593fn offsets_gaussian<const N: usize, R: Rng>(rng: &mut R) -> [f64; N] {
594    const SIGMA: f64 = 0.2;
595    let mut offsets = [0.0; N];
596    for slot in offsets.iter_mut().skip(1).take(N.saturating_sub(2)) {
597        let u1 = rng.random::<f64>().max(f64::MIN_POSITIVE);
598        let u2 = rng.random::<f64>();
599        let z = (-2.0 * u1.ln()).sqrt() * (2.0 * PI * u2).cos();
600        *slot = z * SIGMA;
601    }
602    offsets
603}