1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
//! Items related to validating `Solution`s.

use crate::{
    constraint_vm::{
        self,
        error::{CheckError, ConstraintErrors, ConstraintsUnsatisfied},
    },
    state_read_vm::{
        self, asm::FromBytesError, error::StateReadError, Access, BytecodeMapped, Gas, GasLimit,
        SolutionAccess, StateRead, StateSlotSlice, StateSlots,
    },
    types::{
        predicate::{Directive, Predicate},
        solution::{Solution, SolutionData, SolutionDataIndex},
        Key, PredicateAddress, Word,
    },
};
use constraint_vm::TransientData;
#[cfg(feature = "tracing")]
use essential_hash::content_addr;
use std::{collections::HashSet, fmt, sync::Arc};
use thiserror::Error;
use tokio::task::JoinSet;
#[cfg(feature = "tracing")]
use tracing::Instrument;

/// Configuration options passed to [`check_predicate`].
#[derive(Clone, Debug, Default, Eq, Hash, PartialEq)]
pub struct CheckPredicateConfig {
    /// Whether or not to wait and collect all failures after a single state
    /// read or constraint fails.
    ///
    /// Potentially useful for debugging or testing tools.
    ///
    /// Default: `false`
    pub collect_all_failures: bool,
}

/// [`check`] error.
#[derive(Debug, Error)]
pub enum InvalidSolution {
    /// Invalid solution data.
    #[error("invalid solution data: {0}")]
    Data(#[from] InvalidSolutionData),
    /// State mutations validation failed.
    #[error("state mutations validation failed: {0}")]
    StateMutations(#[from] InvalidStateMutations),
    /// Transient data validation failed.
    #[error("transient data validation failed: {0}")]
    TransientData(#[from] InvalidTransientData),
}

/// [`check_data`] error.
#[derive(Debug, Error)]
pub enum InvalidSolutionData {
    /// There must be at least one solution data.
    #[error("must be at least one solution data")]
    Empty,
    /// The number of solution data exceeds the limit.
    #[error("the number of solution data ({0}) exceeds the limit ({MAX_SOLUTION_DATA})")]
    TooMany(usize),
    /// A solution data expects too many decision variables.
    #[error("data {0} expects too many decision vars {1} (limit: {MAX_DECISION_VARIABLES})")]
    TooManyDecisionVariables(usize, usize),
    /// State mutation entry error.
    #[error("Invalid state mutation entry: {0}")]
    StateMutationEntry(KvError),
    /// Transient data entry error.
    #[error("Invalid transient data entry: {0}")]
    TransientDataEntry(KvError),
    /// Decision variable value too large.
    #[error("Decision variable value len {0} exceeds limit {MAX_VALUE_SIZE}")]
    DecVarValueTooLarge(usize),
}

/// Error with a slot key or value.
#[derive(Debug, Error)]
pub enum KvError {
    /// The key is too large.
    #[error("key with length {0} exceeds limit {MAX_KEY_SIZE}")]
    KeyTooLarge(usize),
    /// The value is too large.
    #[error("value with length {0} exceeds limit {MAX_VALUE_SIZE}")]
    ValueTooLarge(usize),
}

/// [`check_state_mutations`] error.
#[derive(Debug, Error)]
pub enum InvalidStateMutations {
    /// The number of state mutations exceeds the limit.
    #[error("the number of state mutations ({0}) exceeds the limit ({MAX_STATE_MUTATIONS})")]
    TooMany(usize),
    /// State mutation pathway at the given index is out of range of solution data.
    #[error("state mutation pathway {0} out of range of solution data")]
    PathwayOutOfRangeOfSolutionData(u16),
    /// Discovered multiple mutations to the same slot.
    #[error("attempt to apply multiple mutations to the same slot: {0:?} {1:?}")]
    MultipleMutationsForSlot(PredicateAddress, Key),
}

/// [`check_transient_data`] error.
#[derive(Debug, Error)]
pub enum InvalidTransientData {
    /// The number of transient data exceeds the limit.
    #[error("the number of transient data ({0}) exceeds the limit ({MAX_TRANSIENT_DATA})")]
    TooMany(usize),
}

/// [`check_predicates`] error.
#[derive(Debug, Error)]
pub enum PredicatesError<E> {
    /// One or more solution data failed their associated predicate checks.
    #[error("{0}")]
    Failed(#[from] PredicateErrors<E>),
    /// One or more tasks failed to join.
    #[error("one or more spawned tasks failed to join: {0}")]
    Join(#[from] tokio::task::JoinError),
    /// Summing solution data utility resulted in overflow.
    #[error("summing solution data utility overflowed")]
    UtilityOverflowed,
    /// Summing solution data gas resulted in overflow.
    #[error("summing solution data gas overflowed")]
    GasOverflowed,
}

/// Predicate checking failed for the solution data at the given indices.
#[derive(Debug, Error)]
pub struct PredicateErrors<E>(pub Vec<(SolutionDataIndex, PredicateError<E>)>);

/// [`check_predicate`] error.
#[derive(Debug, Error)]
pub enum PredicateError<E> {
    /// Failed to parse ops from bytecode during bytecode mapping.
    #[error("failed to parse an op during bytecode mapping: {0}")]
    OpsFromBytesError(#[from] FromBytesError),
    /// Failed to read state.
    #[error("state read execution error: {0}")]
    StateRead(#[from] StateReadError<E>),
    /// Constraint checking failed.
    #[error("constraint checking failed: {0}")]
    Constraints(#[from] PredicateConstraintsError),
}

/// The number of decision variables provided by the solution data differs to
/// the number expected by the predicate.
#[derive(Debug, Error)]
#[error("number of solution data decision variables ({data}) differs from predicate ({predicate})")]
pub struct InvalidDecisionVariablesLength {
    /// Number of decision variables provided by solution data.
    pub data: usize,
    /// Number of decision variables expected by the solution data's associated predicate.
    pub predicate: u32,
}

/// [`check_predicate_constraints`] error.
#[derive(Debug, Error)]
pub enum PredicateConstraintsError {
    /// Constraint checking failed.
    #[error("check failed: {0}")]
    Check(#[from] constraint_vm::error::CheckError),
    /// Failed to receive result from spawned task.
    #[error("failed to recv: {0}")]
    Recv(#[from] tokio::sync::oneshot::error::RecvError),
    /// Failed to calculate the utility.
    #[error("failed to calculate utility: {0}")]
    Utility(#[from] UtilityError),
}

/// The utility score of a solution.
pub type Utility = f64;

/// `calculate_utility` error.
#[derive(Debug, Error)]
pub enum UtilityError {
    /// The range specified by the predicate's directive is invalid.
    #[error("the range specified by the directive [{0}..{1}] is invalid")]
    InvalidDirectiveRange(Word, Word),
    /// The stack returned from directive execution is invalid.
    #[error("invalid stack result after directive execution: {0}")]
    InvalidStack(#[from] constraint_vm::error::StackError),
    /// Failed to execute the directive using the constraint VM.
    #[error("directive execution with constraint VM failed: {0}")]
    Execution(#[from] constraint_vm::error::ConstraintError),
    /// Failed to receive result from spawned task.
    #[error("failed to recv: {0}")]
    Recv(#[from] tokio::sync::oneshot::error::RecvError),
}

/// Maximum number of decision variables of a solution.
pub const MAX_DECISION_VARIABLES: u32 = 100;
/// Maximum number of solution data of a solution.
pub const MAX_SOLUTION_DATA: usize = 100;
/// Maximum number of state mutations of a solution.
pub const MAX_STATE_MUTATIONS: usize = 1000;
/// Maximum number of transient data of a solution.
pub const MAX_TRANSIENT_DATA: usize = 1000;
/// Maximum number of words in a slot value.
pub const MAX_VALUE_SIZE: usize = 10_000;
/// Maximum number of words in a slot key.
pub const MAX_KEY_SIZE: usize = 1000;

impl<E: fmt::Display> fmt::Display for PredicateErrors<E> {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        f.write_str("predicate checking failed for one or more solution data:\n")?;
        for (ix, err) in &self.0 {
            f.write_str(&format!("  {ix}: {err}\n"))?;
        }
        Ok(())
    }
}

/// Validate a solution, to the extent it can be validated without reference to
/// its associated predicates.
///
/// This includes solution data and state mutations.
#[cfg_attr(feature = "tracing", tracing::instrument(skip_all, fields(solution = %content_addr(&solution.data)), err))]
pub fn check(solution: &Solution) -> Result<(), InvalidSolution> {
    check_data(&solution.data)?;
    check_state_mutations(solution)?;
    check_transient_data(solution)?;
    Ok(())
}

fn check_value_size(value: &[Word]) -> Result<(), KvError> {
    if value.len() > MAX_VALUE_SIZE {
        Err(KvError::ValueTooLarge(value.len()))
    } else {
        Ok(())
    }
}

fn check_key_size(value: &[Word]) -> Result<(), KvError> {
    if value.len() > MAX_KEY_SIZE {
        Err(KvError::KeyTooLarge(value.len()))
    } else {
        Ok(())
    }
}

/// Validate the solution's slice of [`SolutionData`].
pub fn check_data(data_slice: &[SolutionData]) -> Result<(), InvalidSolutionData> {
    // Validate solution data.
    // Ensure that at solution has at least one solution data.
    if data_slice.is_empty() {
        return Err(InvalidSolutionData::Empty);
    }
    // Ensure that solution data length is below limit length.
    if data_slice.len() > MAX_SOLUTION_DATA {
        return Err(InvalidSolutionData::TooMany(data_slice.len()));
    }

    // Check whether we have too many decision vars
    for (data_ix, data) in data_slice.iter().enumerate() {
        // Ensure the length limit is not exceeded.
        if data.decision_variables.len() > MAX_DECISION_VARIABLES as usize {
            return Err(InvalidSolutionData::TooManyDecisionVariables(
                data_ix,
                data.decision_variables.len(),
            ));
        }
        for v in &data.decision_variables {
            check_value_size(v).map_err(|_| InvalidSolutionData::DecVarValueTooLarge(v.len()))?;
        }
    }
    Ok(())
}

/// Validate the solution's state mutations.
pub fn check_state_mutations(solution: &Solution) -> Result<(), InvalidSolution> {
    // Validate state mutations.
    // Ensure that solution state mutations length is below limit length.
    if solution.state_mutations_len() > MAX_STATE_MUTATIONS {
        return Err(InvalidStateMutations::TooMany(solution.state_mutations_len()).into());
    }

    // Ensure that no more than one mutation per slot is proposed.
    for data in &solution.data {
        let mut mut_keys = HashSet::new();
        for mutation in &data.state_mutations {
            if !mut_keys.insert(&mutation.key) {
                return Err(InvalidStateMutations::MultipleMutationsForSlot(
                    data.predicate_to_solve.clone(),
                    mutation.key.clone(),
                )
                .into());
            }
            // Check key length.
            check_key_size(&mutation.key).map_err(InvalidSolutionData::StateMutationEntry)?;
            // Check value length.
            check_value_size(&mutation.value).map_err(InvalidSolutionData::StateMutationEntry)?;
        }
    }

    Ok(())
}

/// Validate the solution's transient data.
pub fn check_transient_data(solution: &Solution) -> Result<(), InvalidSolution> {
    // Validate transient data.
    // Ensure that solution transient data length is below limit length.
    if solution.transient_data_len() > MAX_TRANSIENT_DATA {
        return Err(InvalidTransientData::TooMany(solution.transient_data_len()).into());
    }

    // Ensure the lengths of keys and values are within limits.
    for data in &solution.data {
        for mutation in &data.transient_data {
            // Check key length.
            check_key_size(&mutation.key).map_err(InvalidSolutionData::TransientDataEntry)?;
            // Check value length.
            check_value_size(&mutation.value).map_err(InvalidSolutionData::TransientDataEntry)?;
        }
    }

    Ok(())
}

/// Checks all of a solution's `SolutionData` against its associated predicates.
///
/// For each of the solution's `data` elements, a single task is spawned that
/// reads the pre and post state slots for the associated predicate with access to
/// the given `pre_state` and `post_state`, then checks all constraints over the
/// resulting pre and post state slots.
///
/// **NOTE:** This assumes that the given `Solution` and all `Predicate`s
/// have already been independently validated using
/// [`solution::check`][crate::solution::check] and
/// [`predicate::check`][crate::predicate::check] respectively.
///
/// ## Arguments
///
/// - `pre_state` must provide access to state *prior to* mutations being applied.
/// - `post_state` must provide access to state *post* mutations being applied.
/// - `get_predicate` provides immediate access to a predicate associated with the given
///   solution. Calls to `predicate` must complete immediately. All necessary
///   predicates are assumed to have been read from storage and validated ahead of time.
///
/// Returns the utility score of the solution alongside the total gas spent.
#[cfg_attr(feature = "tracing", tracing::instrument(skip_all))]
pub async fn check_predicates<SA, SB>(
    pre_state: &SA,
    post_state: &SB,
    solution: Arc<Solution>,
    get_predicate: impl Fn(&PredicateAddress) -> Arc<Predicate>,
    config: Arc<CheckPredicateConfig>,
) -> Result<(Utility, Gas), PredicatesError<SA::Error>>
where
    SA: Clone + StateRead + Send + Sync + 'static,
    SB: Clone + StateRead<Error = SA::Error> + Send + Sync + 'static,
    SA::Future: Send,
    SB::Future: Send,
    SA::Error: Send,
{
    #[cfg(feature = "tracing")]
    tracing::trace!("{}", essential_hash::content_addr(&*solution));

    let transient_data: Arc<TransientData> =
        Arc::new(essential_constraint_vm::transient_data(&solution));
    // Read pre and post states then check constraints.
    let mut set: JoinSet<(_, Result<_, PredicateError<SA::Error>>)> = JoinSet::new();
    for (solution_data_index, data) in solution.data.iter().enumerate() {
        let solution_data_index: SolutionDataIndex = solution_data_index
            .try_into()
            .expect("solution data index already validated");
        let predicate = get_predicate(&data.predicate_to_solve);
        let solution = solution.clone();
        let transient_data = transient_data.clone();
        let pre_state: SA = pre_state.clone();
        let post_state: SB = post_state.clone();
        let config = config.clone();

        let future = async move {
            let pre_state = pre_state;
            let post_state = post_state;
            let res = check_predicate(
                &pre_state,
                &post_state,
                solution,
                predicate,
                solution_data_index,
                &config,
                transient_data,
            )
            .await;
            (solution_data_index, res)
        };

        #[cfg(feature = "tracing")]
        let future = future.in_current_span();

        set.spawn(future);
    }

    // Calculate total utility and gas used.
    // TODO: Gas is only calculated for state reads.
    // Add gas tracking for constraint checking.
    let mut total_gas: u64 = 0;
    let mut utility: f64 = 0.0;
    let mut failed = vec![];
    while let Some(res) = set.join_next().await {
        let (solution_data_ix, res) = res?;
        let (u, g) = match res {
            Ok(ok) => ok,
            Err(e) => {
                failed.push((solution_data_ix, e));
                if config.collect_all_failures {
                    continue;
                } else {
                    return Err(PredicateErrors(failed).into());
                }
            }
        };
        utility += u;

        if utility == f64::INFINITY {
            return Err(PredicatesError::UtilityOverflowed);
        }

        total_gas = total_gas
            .checked_add(g)
            .ok_or(PredicatesError::GasOverflowed)?;
    }

    // If any predicates failed, return an error.
    if !failed.is_empty() {
        return Err(PredicateErrors(failed).into());
    }

    Ok((utility, total_gas))
}

/// Checks a solution against a single predicate using the solution data at the given index.
///
/// Reads all pre and post state slots into memory, then checks all constraints.
///
/// **NOTE:** This assumes that the given `Solution` and `Predicate` have been
/// independently validated using [`solution::check`][crate::solution::check]
/// and [`predicate::check`][crate::predicate::check] respectively.
///
/// ## Arguments
///
/// - `pre_state` must provide access to state *prior to* mutations being applied.
/// - `post_state` must provide access to state *post* mutations being applied.
/// - `solution_data_index` represents the data within `solution.data` that claims
///   to solve this predicate.
///
/// Returns the utility score of the solution alongside the total gas spent.
#[cfg_attr(
    feature = "tracing",
    tracing::instrument(
        skip_all,
        fields(
            solution = %format!("{}", content_addr(&*solution))[0..8],
            data={solution_data_index},
        ),
    ),
)]
pub async fn check_predicate<SA, SB>(
    pre_state: &SA,
    post_state: &SB,
    solution: Arc<Solution>,
    predicate: Arc<Predicate>,
    solution_data_index: SolutionDataIndex,
    config: &CheckPredicateConfig,
    transient_data: Arc<TransientData>,
) -> Result<(Utility, Gas), PredicateError<SA::Error>>
where
    SA: StateRead + Sync,
    SB: StateRead<Error = SA::Error> + Sync,
{
    // Track the total gas spent over all execution.
    let mut total_gas = 0;

    // Initialize pre and post slots. These will contain all state slots for all state reads.
    let mut pre_slots: Vec<Vec<Word>> = Vec::new();
    let mut post_slots: Vec<Vec<Word>> = Vec::new();
    let mutable_keys = constraint_vm::mut_keys_set(&solution, solution_data_index);
    let solution_access = SolutionAccess::new(
        &solution,
        solution_data_index,
        &mutable_keys,
        &transient_data,
    );

    // Read pre and post states.
    for (state_read_index, state_read) in predicate.state_read.iter().enumerate() {
        #[cfg(not(feature = "tracing"))]
        let _ = state_read_index;

        // Map the bytecode ops ahead of execution to share the mapping
        // between both pre and post state slot reads.
        let state_read_mapped = BytecodeMapped::try_from(&state_read[..])?;

        // Read pre state slots and write them to the pre_slots slice.
        let future = read_state_slots(
            &state_read_mapped,
            Access {
                solution: solution_access,
                state_slots: StateSlots {
                    pre: &pre_slots,
                    post: &post_slots,
                },
            },
            pre_state,
        );
        #[cfg(feature = "tracing")]
        let (gas, new_pre_slots) = future
            .instrument(tracing::info_span!("pre", ix = state_read_index))
            .await?;
        #[cfg(not(feature = "tracing"))]
        let (gas, new_pre_slots) = future.await?;

        total_gas += gas;
        pre_slots.extend(new_pre_slots);

        // Read post state slots and write them to the post_slots slice.
        let future = read_state_slots(
            &state_read_mapped,
            Access {
                solution: solution_access,
                state_slots: StateSlots {
                    pre: &pre_slots,
                    post: &post_slots,
                },
            },
            post_state,
        );
        #[cfg(feature = "tracing")]
        let (gas, new_post_slots) = future
            .instrument(tracing::info_span!("post", ix = state_read_index))
            .await?;
        #[cfg(not(feature = "tracing"))]
        let (gas, new_post_slots) = future.await?;

        total_gas += gas;
        post_slots.extend(new_post_slots);
    }

    // Check constraints.
    let utility = check_predicate_constraints(
        solution,
        solution_data_index,
        predicate.clone(),
        Arc::from(pre_slots.into_boxed_slice()),
        Arc::from(post_slots.into_boxed_slice()),
        config,
        transient_data,
    )
    .await?;

    Ok((utility, total_gas))
}

/// Reads state slots from storage using the given bytecode.
///
/// The result is written to VM's memory.
///
/// Returns the gas spent alongside the state slots consumed from the VM's memory.
async fn read_state_slots<S>(
    bytecode_mapped: &BytecodeMapped<&[u8]>,
    access: Access<'_>,
    state_read: &S,
) -> Result<(Gas, Vec<Vec<Word>>), state_read_vm::error::StateReadError<S::Error>>
where
    S: StateRead,
{
    // Create a new state read VM.
    let mut vm = state_read_vm::Vm::default();

    // Read the state into the VM's memory.
    let gas_spent = vm
        .exec_bytecode(
            bytecode_mapped,
            access,
            state_read,
            &|_: &state_read_vm::asm::Op| 1,
            GasLimit::UNLIMITED,
        )
        .await?;

    Ok((gas_spent, vm.into_state_slots()))
}

/// Checks if the given solution data at the given index satisfies the
/// constraints of the given predicate.
///
/// Returns the utility of the solution for the given predicate.
#[cfg_attr(feature = "tracing", tracing::instrument(skip_all, "check"))]
pub async fn check_predicate_constraints(
    solution: Arc<Solution>,
    solution_data_index: SolutionDataIndex,
    predicate: Arc<Predicate>,
    pre_slots: Arc<StateSlotSlice>,
    post_slots: Arc<StateSlotSlice>,
    config: &CheckPredicateConfig,
    transient_data: Arc<TransientData>,
) -> Result<Utility, PredicateConstraintsError> {
    match check_predicate_constraints_parallel(
        solution.clone(),
        solution_data_index,
        predicate.clone(),
        pre_slots.clone(),
        post_slots.clone(),
        config,
        transient_data.clone(),
    )
    .await
    {
        Ok(()) => {
            #[cfg(feature = "tracing")]
            tracing::trace!("constraint check complete");

            match calculate_utility(
                solution,
                solution_data_index,
                predicate.clone(),
                pre_slots,
                post_slots,
                transient_data,
            )
            .await
            {
                Ok(util) => {
                    #[cfg(feature = "tracing")]
                    tracing::trace!("utility: {}", util);
                    Ok(util)
                }
                Err(err) => {
                    #[cfg(feature = "tracing")]
                    tracing::trace!("error calculating utility: {}", err);
                    Err(err.into())
                }
            }
        }
        Err(err) => {
            #[cfg(feature = "tracing")]
            tracing::trace!("error checking constraints: {}", err);
            Err(err)
        }
    }
}

/// Check predicates in parallel without sleeping any threads.
async fn check_predicate_constraints_parallel(
    solution: Arc<Solution>,
    solution_data_index: SolutionDataIndex,
    predicate: Arc<Predicate>,
    pre_slots: Arc<StateSlotSlice>,
    post_slots: Arc<StateSlotSlice>,
    config: &CheckPredicateConfig,
    transient_data: Arc<TransientData>,
) -> Result<(), PredicateConstraintsError> {
    let mut handles = Vec::with_capacity(predicate.constraints.len());

    // Spawn each constraint onto a rayon thread and
    // check them in parallel.
    for ix in 0..predicate.constraints.len() {
        // Spawn this sync code onto a rayon thread.
        // This is a non-blocking operation.
        let (tx, rx) = tokio::sync::oneshot::channel();
        handles.push(rx);

        // These are all cheap Arc clones.
        let solution = solution.clone();
        let transient_data = transient_data.clone();
        let pre_slots = pre_slots.clone();
        let post_slots = post_slots.clone();
        let predicate = predicate.clone();

        #[cfg(feature = "tracing")]
        let span = tracing::Span::current();

        rayon::spawn(move || {
            #[cfg(feature = "tracing")]
            let span = tracing::trace_span!(parent: &span, "constraint", ix = ix as u32);
            #[cfg(feature = "tracing")]
            let guard = span.enter();

            let mutable_keys = constraint_vm::mut_keys_set(&solution, solution_data_index);
            let solution_access = SolutionAccess::new(
                &solution,
                solution_data_index,
                &mutable_keys,
                &transient_data,
            );
            let access = Access {
                solution: solution_access,
                state_slots: StateSlots {
                    pre: &pre_slots,
                    post: &post_slots,
                },
            };
            let res = constraint_vm::eval_bytecode_iter(
                predicate
                    .constraints
                    .get(ix)
                    .expect("Safe due to above len check")
                    .iter()
                    .copied(),
                access,
            );
            // Send the result back to the main thread.
            // Send errors are ignored as if the recv is gone there's no one to send to.
            let _ = tx.send((ix, res));

            #[cfg(feature = "tracing")]
            drop(guard)
        })
    }

    // There's no way to know the size of these.
    let mut failed = Vec::new();
    let mut unsatisfied = Vec::new();

    // Wait for all constraints to finish.
    // The order of waiting on handles is not important as all
    // constraints make progress independently.
    for handle in handles {
        // Get the index and result from the handle.
        let (ix, res): (usize, Result<bool, _>) = handle.await?;
        match res {
            // If the constraint failed, add it to the failed list.
            Err(err) => {
                failed.push((ix, err));
                if !config.collect_all_failures {
                    break;
                }
            }
            // If the constraint was unsatisfied, add it to the unsatisfied list.
            Ok(b) if !b => unsatisfied.push(ix),
            // Otherwise, the constraint was satisfied.
            _ => (),
        }
    }

    // If there are any failed constraints, return an error.
    if !failed.is_empty() {
        return Err(CheckError::from(ConstraintErrors(failed)).into());
    }

    // If there are any unsatisfied constraints, return an error.
    if !unsatisfied.is_empty() {
        return Err(CheckError::from(ConstraintsUnsatisfied(unsatisfied)).into());
    }
    Ok(())
}

/// Calculates utility of solution for predicate.
///
/// Returns utility.
async fn calculate_utility(
    solution: Arc<Solution>,
    solution_data_index: SolutionDataIndex,
    predicate: Arc<Predicate>,
    pre_slots: Arc<StateSlotSlice>,
    post_slots: Arc<StateSlotSlice>,
    transient_data: Arc<TransientData>,
) -> Result<Utility, UtilityError> {
    match &predicate.directive {
        Directive::Satisfy => return Ok(1.0),
        Directive::Maximize(_) | Directive::Minimize(_) => (),
    }

    // Spawn this sync code onto a rayon thread.
    let (tx, rx) = tokio::sync::oneshot::channel();

    #[cfg(feature = "tracing")]
    let span = tracing::Span::current();

    rayon::spawn(move || {
        #[cfg(feature = "tracing")]
        let span = tracing::trace_span!(parent: &span, "utility");
        #[cfg(feature = "tracing")]
        let guard = span.enter();

        let mutable_keys = constraint_vm::mut_keys_set(&solution, solution_data_index);
        let solution_access = SolutionAccess::new(
            &solution,
            solution_data_index,
            &mutable_keys,
            &transient_data,
        );
        let access = Access {
            solution: solution_access,
            state_slots: StateSlots {
                pre: &pre_slots,
                post: &post_slots,
            },
        };
        // Extract the directive code.
        let code = match predicate.directive {
            Directive::Maximize(ref code) | Directive::Minimize(ref code) => code,
            _ => unreachable!("As this is already checked above"),
        };

        // Execute the directive code.
        let res = constraint_vm::exec_bytecode_iter(code.iter().copied(), access)
            .map_err(UtilityError::from)
            .and_then(|mut stack| {
                let [start, end, value] = stack.pop3()?;
                let util = normalize_utility(value, start, end)?;
                Ok(util)
            });

        // Send errors are ignored as if the recv is dropped.
        let _ = tx.send(res);

        #[cfg(feature = "tracing")]
        drop(guard)
    });

    // Await the result of the utility calculation.
    rx.await?
}

fn normalize_utility(value: Word, start: Word, end: Word) -> Result<Utility, UtilityError> {
    if start >= end {
        return Err(UtilityError::InvalidDirectiveRange(start, end));
    }
    let normalized = (value - start) as f64 / (end - start) as f64;
    Ok(normalized.clamp(0.0, 1.0))
}