Skip to main content

solverforge_scoring/stream/
uni_stream.rs

1// Zero-erasure uni-constraint stream for single-entity constraint patterns.
2//
3// A `UniConstraintStream` operates on a single entity type and supports
4// filtering, weighting, and constraint finalization. All type information
5// is preserved at compile time - no Arc, no dyn, fully monomorphized.
6
7use std::hash::Hash;
8use std::marker::PhantomData;
9
10use solverforge_core::score::Score;
11use solverforge_core::{ConstraintRef, ImpactType};
12
13use crate::constraint::incremental::IncrementalUniConstraint;
14
15use crate::constraint::if_exists::ExistenceMode;
16
17use super::balance_stream::BalanceConstraintStream;
18use super::bi_stream::BiConstraintStream;
19use super::collector::UniCollector;
20use super::cross_bi_stream::CrossBiConstraintStream;
21use super::filter::{
22    AndBiFilter, AndUniFilter, FnBiFilter, FnUniFilter, TrueFilter, UniFilter, UniLeftBiFilter,
23};
24use super::grouped_stream::GroupedConstraintStream;
25use super::if_exists_stream::IfExistsStream;
26use super::joiner::EqualJoiner;
27
28// Zero-erasure constraint stream over a single entity type.
29//
30// `UniConstraintStream` accumulates filters and can be finalized into
31// an `IncrementalUniConstraint` via `penalize()` or `reward()`.
32//
33// All type parameters are concrete - no trait objects, no Arc allocations
34// in the hot path.
35//
36// # Type Parameters
37//
38// - `S` - Solution type
39// - `A` - Entity type
40// - `E` - Extractor function type
41// - `F` - Combined filter type
42// - `Sc` - Score type
43pub struct UniConstraintStream<S, A, E, F, Sc>
44where
45    Sc: Score,
46{
47    extractor: E,
48    filter: F,
49    _phantom: PhantomData<(fn() -> S, fn() -> A, fn() -> Sc)>,
50}
51
52impl<S, A, E, Sc> UniConstraintStream<S, A, E, TrueFilter, Sc>
53where
54    S: Send + Sync + 'static,
55    A: Clone + Send + Sync + 'static,
56    E: Fn(&S) -> &[A] + Send + Sync,
57    Sc: Score + 'static,
58{
59    // Creates a new uni-constraint stream with the given extractor.
60    pub fn new(extractor: E) -> Self {
61        Self {
62            extractor,
63            filter: TrueFilter,
64            _phantom: PhantomData,
65        }
66    }
67}
68
69impl<S, A, E, F, Sc> UniConstraintStream<S, A, E, F, Sc>
70where
71    S: Send + Sync + 'static,
72    A: Clone + Send + Sync + 'static,
73    E: Fn(&S) -> &[A] + Send + Sync,
74    F: UniFilter<S, A>,
75    Sc: Score + 'static,
76{
77    // Adds a filter predicate to the stream.
78    //
79    // Multiple filters are combined with AND semantics at compile time.
80    // Each filter adds a new type layer, preserving zero-erasure.
81    //
82    // To access related entities, use shadow variables on your entity type
83    // (e.g., `#[inverse_relation_shadow_variable]`) rather than solution traversal.
84    pub fn filter<P>(
85        self,
86        predicate: P,
87    ) -> UniConstraintStream<
88        S,
89        A,
90        E,
91        AndUniFilter<F, FnUniFilter<impl Fn(&S, &A) -> bool + Send + Sync>>,
92        Sc,
93    >
94    where
95        P: Fn(&A) -> bool + Send + Sync + 'static,
96    {
97        UniConstraintStream {
98            extractor: self.extractor,
99            filter: AndUniFilter::new(
100                self.filter,
101                FnUniFilter::new(move |_s: &S, a: &A| predicate(a)),
102            ),
103            _phantom: PhantomData,
104        }
105    }
106
107    // Joins this stream with itself to create pairs (zero-erasure).
108    //
109    // Requires an `EqualJoiner` to enable key-based indexing for O(k) lookups.
110    // For self-joins, pairs are ordered (i < j) to avoid duplicates.
111    //
112    // Any filters accumulated on this stream are applied to both entities
113    // individually before the join.
114    pub fn join_self<K, KA, KB>(
115        self,
116        joiner: EqualJoiner<KA, KB, K>,
117    ) -> BiConstraintStream<
118        S,
119        A,
120        K,
121        E,
122        impl Fn(&S, &A, usize) -> K + Send + Sync,
123        UniLeftBiFilter<F, A>,
124        Sc,
125    >
126    where
127        A: Hash + PartialEq,
128        K: Eq + Hash + Clone + Send + Sync,
129        KA: Fn(&A) -> K + Send + Sync,
130        KB: Fn(&A) -> K + Send + Sync,
131    {
132        let (key_extractor, _) = joiner.into_keys();
133
134        // Wrap key_extractor to match the new KE: Fn(&S, &A, usize) -> K signature.
135        // The static stream API doesn't need solution/index, so ignore them.
136        let wrapped_ke = move |_s: &S, a: &A, _idx: usize| key_extractor(a);
137
138        // Convert uni-filter to bi-filter that applies to left entity
139        let bi_filter = UniLeftBiFilter::new(self.filter);
140
141        BiConstraintStream::new_self_join_with_filter(self.extractor, wrapped_ke, bi_filter)
142    }
143
144    // Joins this stream with another collection to create cross-entity pairs (zero-erasure).
145    //
146    // Requires an `EqualJoiner` to enable key-based indexing for O(1) lookups.
147    // Unlike `join_self` which pairs entities within the same collection,
148    // `join_keyed` creates pairs from two different collections (e.g., Shift joined
149    // with Employee).
150    //
151    // Any filters accumulated on this stream are applied to the A entity
152    // before the join.
153    pub fn join_keyed<B, EB, K, KA, KB>(
154        self,
155        extractor_b: EB,
156        joiner: EqualJoiner<KA, KB, K>,
157    ) -> CrossBiConstraintStream<S, A, B, K, E, EB, KA, KB, UniLeftBiFilter<F, B>, Sc>
158    where
159        B: Clone + Send + Sync + 'static,
160        EB: Fn(&S) -> &[B] + Send + Sync,
161        K: Eq + Hash + Clone + Send + Sync,
162        KA: Fn(&A) -> K + Send + Sync,
163        KB: Fn(&B) -> K + Send + Sync,
164    {
165        let (key_a, key_b) = joiner.into_keys();
166
167        // Convert uni-filter to bi-filter that applies to left entity only
168        let bi_filter = UniLeftBiFilter::new(self.filter);
169
170        CrossBiConstraintStream::new_with_filter(
171            self.extractor,
172            extractor_b,
173            key_a,
174            key_b,
175            bi_filter,
176        )
177    }
178
179    // Joins this stream with another stream using a predicate (O(n*m) nested loop).
180    //
181    // This is the ergonomic join API. Use `join_keyed` for performance-critical joins.
182    pub fn join<B, EB, FB, P>(
183        self,
184        other: UniConstraintStream<S, B, EB, FB, Sc>,
185        predicate: P,
186    ) -> CrossBiConstraintStream<
187        S,
188        A,
189        B,
190        u8,
191        E,
192        EB,
193        fn(&A) -> u8,
194        fn(&B) -> u8,
195        AndBiFilter<UniLeftBiFilter<F, B>, FnBiFilter<impl Fn(&S, &A, &B) -> bool + Send + Sync>>,
196        Sc,
197    >
198    where
199        B: Clone + Send + Sync + 'static,
200        EB: Fn(&S) -> &[B] + Send + Sync,
201        FB: UniFilter<S, B>,
202        P: Fn(&A, &B) -> bool + Send + Sync + 'static,
203    {
204        let (extractor_b, _filter_b) = other.into_parts();
205        let bi_filter = UniLeftBiFilter::new(self.filter);
206        let pred_filter = FnBiFilter::new(move |_s: &S, a: &A, b: &B| predicate(a, b));
207        CrossBiConstraintStream::new_with_filter(
208            self.extractor,
209            extractor_b,
210            (|_: &A| 0u8) as fn(&A) -> u8,
211            (|_: &B| 0u8) as fn(&B) -> u8,
212            AndBiFilter::new(bi_filter, pred_filter),
213        )
214    }
215
216    // Groups entities by key and aggregates with a collector.
217    //
218    // Returns a zero-erasure `GroupedConstraintStream` that can be penalized
219    // or rewarded based on the aggregated result for each group.
220    pub fn group_by<K, KF, C>(
221        self,
222        key_fn: KF,
223        collector: C,
224    ) -> GroupedConstraintStream<S, A, K, E, F, KF, C, Sc>
225    where
226        K: Clone + Eq + Hash + Send + Sync + 'static,
227        KF: Fn(&A) -> K + Send + Sync,
228        C: UniCollector<A> + Send + Sync + 'static,
229        C::Accumulator: Send + Sync,
230        C::Result: Clone + Send + Sync,
231    {
232        GroupedConstraintStream::new(self.extractor, self.filter, key_fn, collector)
233    }
234
235    // Creates a balance constraint that penalizes uneven distribution across groups.
236    //
237    // Unlike `group_by` which scores each group independently, `balance` computes
238    // a GLOBAL standard deviation across all group counts and produces a single score.
239    //
240    // The `key_fn` returns `Option<K>` to allow skipping entities (e.g., unassigned shifts).
241    // Any filters accumulated on this stream are also applied.
242    //
243    // # Example
244    //
245    // ```
246    // use solverforge_scoring::stream::ConstraintFactory;
247    // use solverforge_scoring::api::constraint_set::IncrementalConstraint;
248    // use solverforge_core::score::SoftScore;
249    //
250    // #[derive(Clone)]
251    // struct Shift { employee_id: Option<usize> }
252    //
253    // #[derive(Clone)]
254    // struct Solution { shifts: Vec<Shift> }
255    //
256    // let constraint = ConstraintFactory::<Solution, SoftScore>::new()
257    //     .for_each(|s: &Solution| &s.shifts)
258    //     .balance(|shift: &Shift| shift.employee_id)
259    //     .penalize(SoftScore::of(1000))
260    //     .as_constraint("Balance workload");
261    //
262    // let solution = Solution {
263    //     shifts: vec![
264    //         Shift { employee_id: Some(0) },
265    //         Shift { employee_id: Some(0) },
266    //         Shift { employee_id: Some(0) },
267    //         Shift { employee_id: Some(1) },
268    //     ],
269    // };
270    //
271    // // Employee 0: 3 shifts, Employee 1: 1 shift
272    // // std_dev = 1.0, penalty = -1000
273    // assert_eq!(constraint.evaluate(&solution), SoftScore::of(-1000));
274    // ```
275    pub fn balance<K, KF>(self, key_fn: KF) -> BalanceConstraintStream<S, A, K, E, F, KF, Sc>
276    where
277        K: Clone + Eq + Hash + Send + Sync + 'static,
278        KF: Fn(&A) -> Option<K> + Send + Sync,
279    {
280        BalanceConstraintStream::new(self.extractor, self.filter, key_fn)
281    }
282
283    // Filters A entities based on whether a matching B entity exists.
284    //
285    // Use this when the B collection needs filtering (e.g., only vacationing employees).
286    // The `extractor_b` returns a `Vec<B>` to allow for filtering.
287    //
288    // Any filters accumulated on this stream are applied to A entities.
289    //
290    // # Example
291    //
292    // ```
293    // use solverforge_scoring::stream::ConstraintFactory;
294    // use solverforge_scoring::stream::joiner::equal_bi;
295    // use solverforge_scoring::api::constraint_set::IncrementalConstraint;
296    // use solverforge_core::score::SoftScore;
297    //
298    // #[derive(Clone)]
299    // struct Shift { id: usize, employee_idx: Option<usize> }
300    //
301    // #[derive(Clone)]
302    // struct Employee { id: usize, on_vacation: bool }
303    //
304    // #[derive(Clone)]
305    // struct Schedule { shifts: Vec<Shift>, employees: Vec<Employee> }
306    //
307    // // Penalize shifts assigned to employees who are on vacation
308    // let constraint = ConstraintFactory::<Schedule, SoftScore>::new()
309    //     .for_each(|s: &Schedule| s.shifts.as_slice())
310    //     .filter(|shift: &Shift| shift.employee_idx.is_some())
311    //     .if_exists_filtered(
312    //         |s: &Schedule| s.employees.iter().filter(|e| e.on_vacation).cloned().collect(),
313    //         equal_bi(
314    //             |shift: &Shift| shift.employee_idx,
315    //             |emp: &Employee| Some(emp.id),
316    //         ),
317    //     )
318    //     .penalize(SoftScore::of(1))
319    //     .as_constraint("Vacation conflict");
320    //
321    // let schedule = Schedule {
322    //     shifts: vec![
323    //         Shift { id: 0, employee_idx: Some(0) },  // assigned to vacationing emp
324    //         Shift { id: 1, employee_idx: Some(1) },  // assigned to working emp
325    //         Shift { id: 2, employee_idx: None },     // unassigned (filtered out)
326    //     ],
327    //     employees: vec![
328    //         Employee { id: 0, on_vacation: true },
329    //         Employee { id: 1, on_vacation: false },
330    //     ],
331    // };
332    //
333    // // Only shift 0 matches (assigned to employee 0 who is on vacation)
334    // assert_eq!(constraint.evaluate(&schedule), SoftScore::of(-1));
335    // ```
336    pub fn if_exists_filtered<B, EB, K, KA, KB>(
337        self,
338        extractor_b: EB,
339        joiner: EqualJoiner<KA, KB, K>,
340    ) -> IfExistsStream<S, A, B, K, E, EB, KA, KB, F, Sc>
341    where
342        B: Clone + Send + Sync + 'static,
343        EB: Fn(&S) -> Vec<B> + Send + Sync,
344        K: Eq + Hash + Clone + Send + Sync,
345        KA: Fn(&A) -> K + Send + Sync,
346        KB: Fn(&B) -> K + Send + Sync,
347    {
348        let (key_a, key_b) = joiner.into_keys();
349        IfExistsStream::new(
350            ExistenceMode::Exists,
351            self.extractor,
352            extractor_b,
353            key_a,
354            key_b,
355            self.filter,
356        )
357    }
358
359    // Filters A entities based on whether NO matching B entity exists.
360    //
361    // Use this when the B collection needs filtering.
362    // The `extractor_b` returns a `Vec<B>` to allow for filtering.
363    //
364    // Any filters accumulated on this stream are applied to A entities.
365    //
366    // # Example
367    //
368    // ```
369    // use solverforge_scoring::stream::ConstraintFactory;
370    // use solverforge_scoring::stream::joiner::equal_bi;
371    // use solverforge_scoring::api::constraint_set::IncrementalConstraint;
372    // use solverforge_core::score::SoftScore;
373    //
374    // #[derive(Clone)]
375    // struct Task { id: usize, assignee: Option<usize> }
376    //
377    // #[derive(Clone)]
378    // struct Worker { id: usize, available: bool }
379    //
380    // #[derive(Clone)]
381    // struct Schedule { tasks: Vec<Task>, workers: Vec<Worker> }
382    //
383    // // Penalize tasks assigned to workers who are not available
384    // let constraint = ConstraintFactory::<Schedule, SoftScore>::new()
385    //     .for_each(|s: &Schedule| s.tasks.as_slice())
386    //     .filter(|task: &Task| task.assignee.is_some())
387    //     .if_not_exists_filtered(
388    //         |s: &Schedule| s.workers.iter().filter(|w| w.available).cloned().collect(),
389    //         equal_bi(
390    //             |task: &Task| task.assignee,
391    //             |worker: &Worker| Some(worker.id),
392    //         ),
393    //     )
394    //     .penalize(SoftScore::of(1))
395    //     .as_constraint("Unavailable worker");
396    //
397    // let schedule = Schedule {
398    //     tasks: vec![
399    //         Task { id: 0, assignee: Some(0) },  // worker 0 is unavailable
400    //         Task { id: 1, assignee: Some(1) },  // worker 1 is available
401    //         Task { id: 2, assignee: None },     // unassigned (filtered out)
402    //     ],
403    //     workers: vec![
404    //         Worker { id: 0, available: false },
405    //         Worker { id: 1, available: true },
406    //     ],
407    // };
408    //
409    // // Task 0's worker (id=0) is NOT in the available workers list
410    // assert_eq!(constraint.evaluate(&schedule), SoftScore::of(-1));
411    // ```
412    pub fn if_not_exists_filtered<B, EB, K, KA, KB>(
413        self,
414        extractor_b: EB,
415        joiner: EqualJoiner<KA, KB, K>,
416    ) -> IfExistsStream<S, A, B, K, E, EB, KA, KB, F, Sc>
417    where
418        B: Clone + Send + Sync + 'static,
419        EB: Fn(&S) -> Vec<B> + Send + Sync,
420        K: Eq + Hash + Clone + Send + Sync,
421        KA: Fn(&A) -> K + Send + Sync,
422        KB: Fn(&B) -> K + Send + Sync,
423    {
424        let (key_a, key_b) = joiner.into_keys();
425        IfExistsStream::new(
426            ExistenceMode::NotExists,
427            self.extractor,
428            extractor_b,
429            key_a,
430            key_b,
431            self.filter,
432        )
433    }
434
435    // Penalizes each matching entity with a fixed weight.
436    pub fn penalize(
437        self,
438        weight: Sc,
439    ) -> UniConstraintBuilder<S, A, E, F, impl Fn(&A) -> Sc + Send + Sync, Sc>
440    where
441        Sc: Copy,
442    {
443        // Detect if this is a hard constraint by checking if hard level is non-zero
444        let is_hard = weight
445            .to_level_numbers()
446            .first()
447            .map(|&h| h != 0)
448            .unwrap_or(false);
449        UniConstraintBuilder {
450            extractor: self.extractor,
451            filter: self.filter,
452            impact_type: ImpactType::Penalty,
453            weight: move |_: &A| weight,
454            is_hard,
455            expected_descriptor: None,
456            _phantom: PhantomData,
457        }
458    }
459
460    // Penalizes each matching entity with a dynamic weight.
461    //
462    // Note: For dynamic weights, use `penalize_hard_with` to explicitly mark as a hard constraint,
463    // since the weight function cannot be evaluated at build time.
464    pub fn penalize_with<W>(self, weight_fn: W) -> UniConstraintBuilder<S, A, E, F, W, Sc>
465    where
466        W: Fn(&A) -> Sc + Send + Sync,
467    {
468        UniConstraintBuilder {
469            extractor: self.extractor,
470            filter: self.filter,
471            impact_type: ImpactType::Penalty,
472            weight: weight_fn,
473            is_hard: false, // Can't detect at build time; use penalize_hard_with for hard constraints
474            expected_descriptor: None,
475            _phantom: PhantomData,
476        }
477    }
478
479    // Penalizes each matching entity with a dynamic weight, explicitly marked as a hard constraint.
480    pub fn penalize_hard_with<W>(self, weight_fn: W) -> UniConstraintBuilder<S, A, E, F, W, Sc>
481    where
482        W: Fn(&A) -> Sc + Send + Sync,
483    {
484        UniConstraintBuilder {
485            extractor: self.extractor,
486            filter: self.filter,
487            impact_type: ImpactType::Penalty,
488            weight: weight_fn,
489            is_hard: true,
490            expected_descriptor: None,
491            _phantom: PhantomData,
492        }
493    }
494
495    // Rewards each matching entity with a fixed weight.
496    pub fn reward(
497        self,
498        weight: Sc,
499    ) -> UniConstraintBuilder<S, A, E, F, impl Fn(&A) -> Sc + Send + Sync, Sc>
500    where
501        Sc: Copy,
502    {
503        // Detect if this is a hard constraint by checking if hard level is non-zero
504        let is_hard = weight
505            .to_level_numbers()
506            .first()
507            .map(|&h| h != 0)
508            .unwrap_or(false);
509        UniConstraintBuilder {
510            extractor: self.extractor,
511            filter: self.filter,
512            impact_type: ImpactType::Reward,
513            weight: move |_: &A| weight,
514            is_hard,
515            expected_descriptor: None,
516            _phantom: PhantomData,
517        }
518    }
519
520    // Rewards each matching entity with a dynamic weight.
521    //
522    // Note: For dynamic weights, use `reward_hard_with` to explicitly mark as a hard constraint,
523    // since the weight function cannot be evaluated at build time.
524    pub fn reward_with<W>(self, weight_fn: W) -> UniConstraintBuilder<S, A, E, F, W, Sc>
525    where
526        W: Fn(&A) -> Sc + Send + Sync,
527    {
528        UniConstraintBuilder {
529            extractor: self.extractor,
530            filter: self.filter,
531            impact_type: ImpactType::Reward,
532            weight: weight_fn,
533            is_hard: false, // Can't detect at build time; use reward_hard_with for hard constraints
534            expected_descriptor: None,
535            _phantom: PhantomData,
536        }
537    }
538
539    // Rewards each matching entity with a dynamic weight, explicitly marked as a hard constraint.
540    pub fn reward_hard_with<W>(self, weight_fn: W) -> UniConstraintBuilder<S, A, E, F, W, Sc>
541    where
542        W: Fn(&A) -> Sc + Send + Sync,
543    {
544        UniConstraintBuilder {
545            extractor: self.extractor,
546            filter: self.filter,
547            impact_type: ImpactType::Reward,
548            weight: weight_fn,
549            is_hard: true,
550            expected_descriptor: None,
551            _phantom: PhantomData,
552        }
553    }
554
555    // Penalizes each matching entity with one hard score unit.
556    pub fn penalize_hard(
557        self,
558    ) -> UniConstraintBuilder<S, A, E, F, impl Fn(&A) -> Sc + Send + Sync, Sc>
559    where
560        Sc: Copy,
561    {
562        self.penalize(Sc::one_hard())
563    }
564
565    // Penalizes each matching entity with one soft score unit.
566    pub fn penalize_soft(
567        self,
568    ) -> UniConstraintBuilder<S, A, E, F, impl Fn(&A) -> Sc + Send + Sync, Sc>
569    where
570        Sc: Copy,
571    {
572        self.penalize(Sc::one_soft())
573    }
574
575    // Rewards each matching entity with one hard score unit.
576    pub fn reward_hard(
577        self,
578    ) -> UniConstraintBuilder<S, A, E, F, impl Fn(&A) -> Sc + Send + Sync, Sc>
579    where
580        Sc: Copy,
581    {
582        self.reward(Sc::one_hard())
583    }
584
585    // Rewards each matching entity with one soft score unit.
586    pub fn reward_soft(
587        self,
588    ) -> UniConstraintBuilder<S, A, E, F, impl Fn(&A) -> Sc + Send + Sync, Sc>
589    where
590        Sc: Copy,
591    {
592        self.reward(Sc::one_soft())
593    }
594}
595
596impl<S, A, E, F, Sc: Score> UniConstraintStream<S, A, E, F, Sc> {
597    #[doc(hidden)]
598    pub fn extractor(&self) -> &E {
599        &self.extractor
600    }
601
602    #[doc(hidden)]
603    pub fn into_parts(self) -> (E, F) {
604        (self.extractor, self.filter)
605    }
606
607    #[doc(hidden)]
608    pub fn from_parts(extractor: E, filter: F) -> Self {
609        Self {
610            extractor,
611            filter,
612            _phantom: PhantomData,
613        }
614    }
615}
616
617impl<S, A, E, F, Sc: Score> std::fmt::Debug for UniConstraintStream<S, A, E, F, Sc> {
618    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
619        f.debug_struct("UniConstraintStream").finish()
620    }
621}
622
623// Zero-erasure builder for finalizing a uni-constraint.
624pub struct UniConstraintBuilder<S, A, E, F, W, Sc>
625where
626    Sc: Score,
627{
628    extractor: E,
629    filter: F,
630    impact_type: ImpactType,
631    weight: W,
632    is_hard: bool,
633    expected_descriptor: Option<usize>,
634    _phantom: PhantomData<(fn() -> S, fn() -> A, fn() -> Sc)>,
635}
636
637impl<S, A, E, F, W, Sc> UniConstraintBuilder<S, A, E, F, W, Sc>
638where
639    S: Send + Sync + 'static,
640    A: Clone + Send + Sync + 'static,
641    E: Fn(&S) -> &[A] + Send + Sync,
642    F: UniFilter<S, A>,
643    W: Fn(&A) -> Sc + Send + Sync,
644    Sc: Score + 'static,
645{
646    // Restricts this constraint to only fire for the given descriptor index.
647    //
648    // Required when multiple entity classes exist (e.g., FurnaceAssignment at 0,
649    // ShiftAssignment at 1). Without this, on_insert/on_retract fire for all entity
650    // classes using the constraint's entity_index, which indexes into the wrong slice.
651    pub fn for_descriptor(mut self, descriptor_index: usize) -> Self {
652        self.expected_descriptor = Some(descriptor_index);
653        self
654    }
655
656    // Alias for `as_constraint`.
657    pub fn named(
658        self,
659        name: &str,
660    ) -> IncrementalUniConstraint<S, A, E, impl Fn(&S, &A) -> bool + Send + Sync, W, Sc> {
661        self.as_constraint(name)
662    }
663
664    // Finalizes the builder into a zero-erasure `IncrementalUniConstraint`.
665    pub fn as_constraint(
666        self,
667        name: &str,
668    ) -> IncrementalUniConstraint<S, A, E, impl Fn(&S, &A) -> bool + Send + Sync, W, Sc> {
669        let filter = self.filter;
670        let combined_filter = move |s: &S, a: &A| filter.test(s, a);
671
672        let mut constraint = IncrementalUniConstraint::new(
673            ConstraintRef::new("", name),
674            self.impact_type,
675            self.extractor,
676            combined_filter,
677            self.weight,
678            self.is_hard,
679        );
680        if let Some(d) = self.expected_descriptor {
681            constraint = constraint.with_descriptor(d);
682        }
683        constraint
684    }
685}
686
687impl<S, A, E, F, W, Sc: Score> std::fmt::Debug for UniConstraintBuilder<S, A, E, F, W, Sc> {
688    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
689        f.debug_struct("UniConstraintBuilder")
690            .field("impact_type", &self.impact_type)
691            .finish()
692    }
693}