GroupedConstraintStream

Struct GroupedConstraintStream 

Source
pub struct GroupedConstraintStream<S, A, K, E, KF, C, Sc>
where Sc: Score,
{ /* private fields */ }
Expand description

Zero-erasure constraint stream over grouped entities.

GroupedConstraintStream is created by UniConstraintStream::group_by() and operates on (key, collector_result) tuples.

All type parameters are concrete - no trait objects, no Arc allocations.

§Type Parameters

  • S - Solution type
  • A - Entity type
  • K - Group key type
  • E - Extractor function for entities
  • KF - Key function
  • C - Collector type
  • Sc - Score type

§Example

use solverforge_scoring::stream::ConstraintFactory;
use solverforge_scoring::stream::collector::count;
use solverforge_scoring::api::constraint_set::IncrementalConstraint;
use solverforge_core::score::SimpleScore;

#[derive(Clone, Hash, PartialEq, Eq)]
struct Shift { employee_id: usize }

#[derive(Clone)]
struct Solution { shifts: Vec<Shift> }

let constraint = ConstraintFactory::<Solution, SimpleScore>::new()
    .for_each(|s: &Solution| &s.shifts)
    .group_by(|shift: &Shift| shift.employee_id, count())
    .penalize_with(|count: &usize| SimpleScore::of((*count * *count) as i64))
    .as_constraint("Balanced workload");

let solution = Solution {
    shifts: vec![
        Shift { employee_id: 1 },
        Shift { employee_id: 1 },
        Shift { employee_id: 1 },
        Shift { employee_id: 2 },
    ],
};

// Employee 1: 3² = 9, Employee 2: 1² = 1, Total: -10
assert_eq!(constraint.evaluate(&solution), SimpleScore::of(-10));

Implementations§

Source§

impl<S, A, K, E, KF, C, Sc> GroupedConstraintStream<S, A, K, E, KF, C, Sc>
where S: Send + Sync + 'static, A: Clone + Send + Sync + 'static, K: Clone + Eq + Hash + Send + Sync + 'static, E: Fn(&S) -> &[A] + Send + Sync, KF: Fn(&A) -> K + Send + Sync, C: UniCollector<A> + Send + Sync + 'static, C::Accumulator: Send + Sync, C::Result: Clone + Send + Sync, Sc: Score + 'static,

Source

pub fn penalize_with<W>( self, weight_fn: W, ) -> GroupedConstraintBuilder<S, A, K, E, KF, C, W, Sc>
where W: Fn(&C::Result) -> Sc + Send + Sync,

Penalizes each group with a weight based on the collector result.

§Example
use solverforge_scoring::stream::ConstraintFactory;
use solverforge_scoring::stream::collector::count;
use solverforge_scoring::api::constraint_set::IncrementalConstraint;
use solverforge_core::score::SimpleScore;

#[derive(Clone, Hash, PartialEq, Eq)]
struct Task { priority: u32 }

#[derive(Clone)]
struct Solution { tasks: Vec<Task> }

let constraint = ConstraintFactory::<Solution, SimpleScore>::new()
    .for_each(|s: &Solution| &s.tasks)
    .group_by(|t: &Task| t.priority, count())
    .penalize_with(|count: &usize| SimpleScore::of(*count as i64))
    .as_constraint("Priority distribution");

let solution = Solution {
    tasks: vec![
        Task { priority: 1 },
        Task { priority: 1 },
        Task { priority: 2 },
    ],
};

// Priority 1: 2 tasks, Priority 2: 1 task, Total: -3
assert_eq!(constraint.evaluate(&solution), SimpleScore::of(-3));
Source

pub fn penalize_hard_with<W>( self, weight_fn: W, ) -> GroupedConstraintBuilder<S, A, K, E, KF, C, W, Sc>
where W: Fn(&C::Result) -> Sc + Send + Sync,

Penalizes each group with a weight, explicitly marked as hard constraint.

Source

pub fn reward_with<W>( self, weight_fn: W, ) -> GroupedConstraintBuilder<S, A, K, E, KF, C, W, Sc>
where W: Fn(&C::Result) -> Sc + Send + Sync,

Rewards each group with a weight based on the collector result.

Source

pub fn reward_hard_with<W>( self, weight_fn: W, ) -> GroupedConstraintBuilder<S, A, K, E, KF, C, W, Sc>
where W: Fn(&C::Result) -> Sc + Send + Sync,

Rewards each group with a weight, explicitly marked as hard constraint.

Source

pub fn complement<B, EB, KB, D>( self, extractor_b: EB, key_b: KB, default_fn: D, ) -> ComplementedConstraintStream<S, A, B, K, E, EB, impl Fn(&A) -> Option<K> + Send + Sync, KB, C, D, Sc>
where B: Clone + Send + Sync + 'static, EB: Fn(&S) -> &[B] + Send + Sync, KB: Fn(&B) -> K + Send + Sync, D: Fn(&B) -> C::Result + Send + Sync,

Adds complement entities with default values for missing keys.

This ensures all keys from the complement source are represented, using the grouped value if present, or the default value otherwise.

Note: The key function for A entities wraps the original key to return Some(K). For filtering (skipping entities without valid keys), use complement_filtered instead.

§Example
use solverforge_scoring::stream::ConstraintFactory;
use solverforge_scoring::stream::collector::count;
use solverforge_scoring::api::constraint_set::IncrementalConstraint;
use solverforge_core::score::SimpleScore;

#[derive(Clone, Hash, PartialEq, Eq)]
struct Employee { id: usize }

#[derive(Clone, Hash, PartialEq, Eq)]
struct Shift { employee_id: usize }

#[derive(Clone)]
struct Schedule {
    employees: Vec<Employee>,
    shifts: Vec<Shift>,
}

// Count shifts per employee, including employees with 0 shifts
let constraint = ConstraintFactory::<Schedule, SimpleScore>::new()
    .for_each(|s: &Schedule| &s.shifts)
    .group_by(|shift: &Shift| shift.employee_id, count())
    .complement(
        |s: &Schedule| s.employees.as_slice(),
        |emp: &Employee| emp.id,
        |_emp: &Employee| 0usize,
    )
    .penalize_with(|count: &usize| SimpleScore::of(*count as i64))
    .as_constraint("Shift count");

let schedule = Schedule {
    employees: vec![Employee { id: 0 }, Employee { id: 1 }],
    shifts: vec![
        Shift { employee_id: 0 },
        Shift { employee_id: 0 },
    ],
};

// Employee 0: 2, Employee 1: 0 → Total: -2
assert_eq!(constraint.evaluate(&schedule), SimpleScore::of(-2));
Source

pub fn complement_with_key<B, EB, KA2, KB, D>( self, extractor_b: EB, key_a: KA2, key_b: KB, default_fn: D, ) -> ComplementedConstraintStream<S, A, B, K, E, EB, KA2, KB, C, D, Sc>
where B: Clone + Send + Sync + 'static, EB: Fn(&S) -> &[B] + Send + Sync, KA2: Fn(&A) -> Option<K> + Send + Sync, KB: Fn(&B) -> K + Send + Sync, D: Fn(&B) -> C::Result + Send + Sync,

Adds complement entities with a custom key function for filtering.

Like complement, but allows providing a custom key function for A entities that returns Option<K>. Entities returning None are skipped.

§Example
use solverforge_scoring::stream::ConstraintFactory;
use solverforge_scoring::stream::collector::count;
use solverforge_scoring::api::constraint_set::IncrementalConstraint;
use solverforge_core::score::SimpleScore;

#[derive(Clone, Hash, PartialEq, Eq)]
struct Employee { id: usize }

#[derive(Clone, Hash, PartialEq, Eq)]
struct Shift { employee_id: Option<usize> }

#[derive(Clone)]
struct Schedule {
    employees: Vec<Employee>,
    shifts: Vec<Shift>,
}

// Count shifts per employee, skipping unassigned shifts
// The group_by key is ignored; complement_with_key provides its own
let constraint = ConstraintFactory::<Schedule, SimpleScore>::new()
    .for_each(|s: &Schedule| &s.shifts)
    .group_by(|_shift: &Shift| 0usize, count())  // Placeholder key, will be overridden
    .complement_with_key(
        |s: &Schedule| s.employees.as_slice(),
        |shift: &Shift| shift.employee_id,  // Option<usize>
        |emp: &Employee| emp.id,            // usize
        |_emp: &Employee| 0usize,
    )
    .penalize_with(|count: &usize| SimpleScore::of(*count as i64))
    .as_constraint("Shift count");

let schedule = Schedule {
    employees: vec![Employee { id: 0 }, Employee { id: 1 }],
    shifts: vec![
        Shift { employee_id: Some(0) },
        Shift { employee_id: Some(0) },
        Shift { employee_id: None },  // Skipped
    ],
};

// Employee 0: 2, Employee 1: 0 → Total: -2
assert_eq!(constraint.evaluate(&schedule), SimpleScore::of(-2));

Trait Implementations§

Source§

impl<S, A, K, E, KF, C, Sc: Score> Debug for GroupedConstraintStream<S, A, K, E, KF, C, Sc>

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<S, A, K, E, KF, C, Sc> Freeze for GroupedConstraintStream<S, A, K, E, KF, C, Sc>
where E: Freeze, KF: Freeze, C: Freeze,

§

impl<S, A, K, E, KF, C, Sc> RefUnwindSafe for GroupedConstraintStream<S, A, K, E, KF, C, Sc>

§

impl<S, A, K, E, KF, C, Sc> Send for GroupedConstraintStream<S, A, K, E, KF, C, Sc>
where E: Send, KF: Send, C: Send, S: Send, A: Send, K: Send,

§

impl<S, A, K, E, KF, C, Sc> Sync for GroupedConstraintStream<S, A, K, E, KF, C, Sc>
where E: Sync, KF: Sync, C: Sync, S: Sync, A: Sync, K: Sync,

§

impl<S, A, K, E, KF, C, Sc> Unpin for GroupedConstraintStream<S, A, K, E, KF, C, Sc>
where E: Unpin, KF: Unpin, C: Unpin, S: Unpin, A: Unpin, K: Unpin, Sc: Unpin,

§

impl<S, A, K, E, KF, C, Sc> UnwindSafe for GroupedConstraintStream<S, A, K, E, KF, C, Sc>

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.