1use std::{
2 collections::HashMap,
3 fmt::{Debug, Display},
4 sync::Arc,
5};
6
7use accurate::{sum::Klein, traits::*};
8use auto_ops::*;
9use dyn_clone::DynClone;
10use fastrand::Rng;
11use ganesh::{
12 traits::AbortSignal, Ensemble, Function, Minimizer, Sampler, Status, Swarm, SwarmMinimizer,
13};
14use laddu_core::{
15 amplitudes::{central_difference, AmplitudeValues, Evaluator, GradientValues, Model},
16 data::Dataset,
17 resources::Parameters,
18 Complex, DVector, Float, LadduError,
19};
20
21#[cfg(feature = "mpi")]
22use laddu_core::mpi::LadduMPI;
23
24#[cfg(feature = "mpi")]
25use mpi::{datatype::PartitionMut, topology::SimpleCommunicator, traits::*};
26
27#[cfg(feature = "python")]
28use crate::ganesh_ext::py_ganesh::{
29 py_parse_mcmc_options, py_parse_minimizer_options, py_parse_swarm_options, PyEnsemble,
30 PyStatus, PySwarm,
31};
32#[cfg(feature = "python")]
33use laddu_python::{
34 amplitudes::{PyEvaluator, PyModel},
35 data::PyDataset,
36};
37#[cfg(feature = "python")]
38use numpy::PyArray1;
39#[cfg(feature = "python")]
40use pyo3::{exceptions::PyTypeError, prelude::*, types::PyList};
41#[cfg(feature = "rayon")]
42use rayon::{prelude::*, ThreadPool, ThreadPoolBuilder};
43
44use crate::ganesh_ext::{MCMCOptions, MinimizerOptions, SwarmOptions};
45
46pub trait LikelihoodTerm: DynClone + Send + Sync {
49 fn evaluate(&self, parameters: &[Float]) -> Float;
51 fn evaluate_gradient(&self, parameters: &[Float]) -> DVector<Float> {
53 central_difference(parameters, |parameters: &[Float]| self.evaluate(parameters))
54 }
55 fn parameters(&self) -> Vec<String>;
57}
58
59dyn_clone::clone_trait_object!(LikelihoodTerm);
60
61#[cfg(feature = "python")]
68#[pyclass(name = "LikelihoodTerm", module = "laddu")]
69#[derive(Clone)]
70pub struct PyLikelihoodTerm(pub Box<dyn LikelihoodTerm>);
71
72#[derive(Clone)]
74pub struct NLL {
75 pub data_evaluator: Evaluator,
77 pub accmc_evaluator: Evaluator,
79}
80
81impl NLL {
82 pub fn new(model: &Model, ds_data: &Arc<Dataset>, ds_accmc: &Arc<Dataset>) -> Box<Self> {
85 Self {
86 data_evaluator: model.load(ds_data),
87 accmc_evaluator: model.load(ds_accmc),
88 }
89 .into()
90 }
91 pub fn activate<T: AsRef<str>>(&self, name: T) -> Result<(), LadduError> {
93 self.data_evaluator.activate(&name)?;
94 self.accmc_evaluator.activate(&name)
95 }
96 pub fn activate_many<T: AsRef<str>>(&self, names: &[T]) -> Result<(), LadduError> {
98 self.data_evaluator.activate_many(names)?;
99 self.accmc_evaluator.activate_many(names)
100 }
101 pub fn activate_all(&self) {
103 self.data_evaluator.activate_all();
104 self.accmc_evaluator.activate_all();
105 }
106 pub fn deactivate<T: AsRef<str>>(&self, name: T) -> Result<(), LadduError> {
108 self.data_evaluator.deactivate(&name)?;
109 self.accmc_evaluator.deactivate(&name)
110 }
111 pub fn deactivate_many<T: AsRef<str>>(&self, names: &[T]) -> Result<(), LadduError> {
113 self.data_evaluator.deactivate_many(names)?;
114 self.accmc_evaluator.deactivate_many(names)
115 }
116 pub fn deactivate_all(&self) {
118 self.data_evaluator.deactivate_all();
119 self.accmc_evaluator.deactivate_all();
120 }
121 pub fn isolate<T: AsRef<str>>(&self, name: T) -> Result<(), LadduError> {
123 self.data_evaluator.isolate(&name)?;
124 self.accmc_evaluator.isolate(&name)
125 }
126 pub fn isolate_many<T: AsRef<str>>(&self, names: &[T]) -> Result<(), LadduError> {
128 self.data_evaluator.isolate_many(names)?;
129 self.accmc_evaluator.isolate_many(names)
130 }
131
132 pub fn project_local(
141 &self,
142 parameters: &[Float],
143 mc_evaluator: Option<Evaluator>,
144 ) -> Vec<Float> {
145 let (mc_dataset, result) = if let Some(mc_evaluator) = mc_evaluator {
146 (
147 mc_evaluator.dataset.clone(),
148 mc_evaluator.evaluate_local(parameters),
149 )
150 } else {
151 (
152 self.accmc_evaluator.dataset.clone(),
153 self.accmc_evaluator.evaluate_local(parameters),
154 )
155 };
156 let n_mc = self.accmc_evaluator.dataset.n_events();
157 #[cfg(feature = "rayon")]
158 let output: Vec<Float> = result
159 .par_iter()
160 .zip(mc_dataset.events.par_iter())
161 .map(|(l, e)| e.weight * l.re / n_mc as Float)
162 .collect();
163
164 #[cfg(not(feature = "rayon"))]
165 let output: Vec<Float> = result
166 .iter()
167 .zip(mc_dataset.events.iter())
168 .map(|(l, e)| e.weight * l.re / n_mc as Float)
169 .collect();
170 output
171 }
172
173 #[cfg(feature = "mpi")]
182 pub fn project_mpi(
183 &self,
184 parameters: &[Float],
185 mc_evaluator: Option<Evaluator>,
186 world: &SimpleCommunicator,
187 ) -> Vec<Float> {
188 let n_events = mc_evaluator
189 .as_ref()
190 .unwrap_or(&self.accmc_evaluator)
191 .dataset
192 .n_events();
193 let local_projection = self.project_local(parameters, mc_evaluator);
194 let mut buffer: Vec<Float> = vec![0.0; n_events];
195 let (counts, displs) = world.get_counts_displs(n_events);
196 {
197 let mut partitioned_buffer = PartitionMut::new(&mut buffer, counts, displs);
198 world.all_gather_varcount_into(&local_projection, &mut partitioned_buffer);
199 }
200 buffer
201 }
202
203 pub fn project(&self, parameters: &[Float], mc_evaluator: Option<Evaluator>) -> Vec<Float> {
217 #[cfg(feature = "mpi")]
218 {
219 if let Some(world) = laddu_core::mpi::get_world() {
220 return self.project_mpi(parameters, mc_evaluator, &world);
221 }
222 }
223 self.project_local(parameters, mc_evaluator)
224 }
225
226 pub fn project_gradient_local(
235 &self,
236 parameters: &[Float],
237 mc_evaluator: Option<Evaluator>,
238 ) -> (Vec<Float>, Vec<DVector<Float>>) {
239 let (mc_dataset, result, result_gradient) = if let Some(mc_evaluator) = mc_evaluator {
240 (
241 mc_evaluator.dataset.clone(),
242 mc_evaluator.evaluate_local(parameters),
243 mc_evaluator.evaluate_gradient_local(parameters),
244 )
245 } else {
246 (
247 self.accmc_evaluator.dataset.clone(),
248 self.accmc_evaluator.evaluate_local(parameters),
249 self.accmc_evaluator.evaluate_gradient_local(parameters),
250 )
251 };
252 let n_mc = self.accmc_evaluator.dataset.n_events() as Float;
253 #[cfg(feature = "rayon")]
254 {
255 (
256 result
257 .par_iter()
258 .zip(mc_dataset.events.par_iter())
259 .map(|(l, e)| e.weight * l.re / n_mc)
260 .collect(),
261 result_gradient
262 .par_iter()
263 .zip(mc_dataset.events.par_iter())
264 .map(|(grad_l, e)| grad_l.map(|g| g.re).scale(e.weight / n_mc))
265 .collect(),
266 )
267 }
268 #[cfg(not(feature = "rayon"))]
269 {
270 (
271 result
272 .iter()
273 .zip(mc_dataset.events.iter())
274 .map(|(l, e)| e.weight * l.re / n_mc)
275 .collect(),
276 result_gradient
277 .iter()
278 .zip(mc_dataset.events.iter())
279 .map(|(grad_l, e)| grad_l.map(|g| g.re).scale(e.weight / n_mc))
280 .collect(),
281 )
282 }
283 }
284
285 #[cfg(feature = "mpi")]
294 pub fn project_gradient_mpi(
295 &self,
296 parameters: &[Float],
297 mc_evaluator: Option<Evaluator>,
298 world: &SimpleCommunicator,
299 ) -> (Vec<Float>, Vec<DVector<Float>>) {
300 let n_events = mc_evaluator
301 .as_ref()
302 .unwrap_or(&self.accmc_evaluator)
303 .dataset
304 .n_events();
305 let (local_projection, local_gradient_projection) =
306 self.project_gradient_local(parameters, mc_evaluator);
307 let mut projection_result: Vec<Float> = vec![0.0; n_events];
308 let (counts, displs) = world.get_counts_displs(n_events);
309 {
310 let mut partitioned_buffer = PartitionMut::new(&mut projection_result, counts, displs);
311 world.all_gather_varcount_into(&local_projection, &mut partitioned_buffer);
312 }
313
314 let flattened_local_gradient_projection = local_gradient_projection
315 .iter()
316 .flat_map(|g| g.data.as_vec().to_vec())
317 .collect::<Vec<Float>>();
318 let (counts, displs) = world.get_flattened_counts_displs(n_events, parameters.len());
319 let mut flattened_result_buffer = vec![0.0; n_events * parameters.len()];
320 let mut partitioned_flattened_result_buffer =
321 PartitionMut::new(&mut flattened_result_buffer, counts, displs);
322 world.all_gather_varcount_into(
323 &flattened_local_gradient_projection,
324 &mut partitioned_flattened_result_buffer,
325 );
326 let gradient_projection_result = flattened_result_buffer
327 .chunks(parameters.len())
328 .map(DVector::from_row_slice)
329 .collect();
330 (projection_result, gradient_projection_result)
331 }
332 pub fn project_gradient(
346 &self,
347 parameters: &[Float],
348 mc_evaluator: Option<Evaluator>,
349 ) -> (Vec<Float>, Vec<DVector<Float>>) {
350 #[cfg(feature = "mpi")]
351 {
352 if let Some(world) = laddu_core::mpi::get_world() {
353 return self.project_gradient_mpi(parameters, mc_evaluator, &world);
354 }
355 }
356 self.project_gradient_local(parameters, mc_evaluator)
357 }
358
359 pub fn project_with_local<T: AsRef<str>>(
369 &self,
370 parameters: &[Float],
371 names: &[T],
372 mc_evaluator: Option<Evaluator>,
373 ) -> Result<Vec<Float>, LadduError> {
374 if let Some(mc_evaluator) = &mc_evaluator {
375 let current_active_mc = mc_evaluator.resources.read().active.clone();
376 mc_evaluator.isolate_many(names)?;
377 let mc_dataset = mc_evaluator.dataset.clone();
378 let result = mc_evaluator.evaluate_local(parameters);
379 let n_mc = self.accmc_evaluator.dataset.n_events();
380 #[cfg(feature = "rayon")]
381 let output: Vec<Float> = result
382 .par_iter()
383 .zip(mc_dataset.events.par_iter())
384 .map(|(l, e)| e.weight * l.re / n_mc as Float)
385 .collect();
386 #[cfg(not(feature = "rayon"))]
387 let output: Vec<Float> = result
388 .iter()
389 .zip(mc_dataset.events.iter())
390 .map(|(l, e)| e.weight * l.re / n_mc as Float)
391 .collect();
392 mc_evaluator.resources.write().active = current_active_mc;
393 Ok(output)
394 } else {
395 let current_active_data = self.data_evaluator.resources.read().active.clone();
396 let current_active_accmc = self.accmc_evaluator.resources.read().active.clone();
397 self.isolate_many(names)?;
398 let mc_dataset = &self.accmc_evaluator.dataset;
399 let result = self.accmc_evaluator.evaluate_local(parameters);
400 let n_mc = self.accmc_evaluator.dataset.n_events();
401 #[cfg(feature = "rayon")]
402 let output: Vec<Float> = result
403 .par_iter()
404 .zip(mc_dataset.events.par_iter())
405 .map(|(l, e)| e.weight * l.re / n_mc as Float)
406 .collect();
407 #[cfg(not(feature = "rayon"))]
408 let output: Vec<Float> = result
409 .iter()
410 .zip(mc_dataset.events.iter())
411 .map(|(l, e)| e.weight * l.re / n_mc as Float)
412 .collect();
413 self.data_evaluator.resources.write().active = current_active_data;
414 self.accmc_evaluator.resources.write().active = current_active_accmc;
415 Ok(output)
416 }
417 }
418
419 #[cfg(feature = "mpi")]
429 pub fn project_with_mpi<T: AsRef<str>>(
430 &self,
431 parameters: &[Float],
432 names: &[T],
433 mc_evaluator: Option<Evaluator>,
434 world: &SimpleCommunicator,
435 ) -> Result<Vec<Float>, LadduError> {
436 let n_events = mc_evaluator
437 .as_ref()
438 .unwrap_or(&self.accmc_evaluator)
439 .dataset
440 .n_events();
441 let local_projection = self.project_with_local(parameters, names, mc_evaluator)?;
442 let mut buffer: Vec<Float> = vec![0.0; n_events];
443 let (counts, displs) = world.get_counts_displs(n_events);
444 {
445 let mut partitioned_buffer = PartitionMut::new(&mut buffer, counts, displs);
446 world.all_gather_varcount_into(&local_projection, &mut partitioned_buffer);
447 }
448 Ok(buffer)
449 }
450
451 pub fn project_with<T: AsRef<str>>(
468 &self,
469 parameters: &[Float],
470 names: &[T],
471 mc_evaluator: Option<Evaluator>,
472 ) -> Result<Vec<Float>, LadduError> {
473 #[cfg(feature = "mpi")]
474 {
475 if let Some(world) = laddu_core::mpi::get_world() {
476 return self.project_with_mpi(parameters, names, mc_evaluator, &world);
477 }
478 }
479 self.project_with_local(parameters, names, mc_evaluator)
480 }
481
482 pub fn project_gradient_with_local<T: AsRef<str>>(
493 &self,
494 parameters: &[Float],
495 names: &[T],
496 mc_evaluator: Option<Evaluator>,
497 ) -> Result<(Vec<Float>, Vec<DVector<Float>>), LadduError> {
498 if let Some(mc_evaluator) = &mc_evaluator {
499 let current_active_mc = mc_evaluator.resources.read().active.clone();
500 mc_evaluator.isolate_many(names)?;
501 let mc_dataset = mc_evaluator.dataset.clone();
502 let result = mc_evaluator.evaluate_local(parameters);
503 let result_gradient = mc_evaluator.evaluate_gradient(parameters);
504 let n_mc = self.accmc_evaluator.dataset.n_events() as Float;
505 #[cfg(feature = "rayon")]
506 let (res, res_gradient) = {
507 (
508 result
509 .par_iter()
510 .zip(mc_dataset.events.par_iter())
511 .map(|(l, e)| e.weight * l.re / n_mc)
512 .collect(),
513 result_gradient
514 .par_iter()
515 .zip(mc_dataset.events.par_iter())
516 .map(|(grad_l, e)| grad_l.map(|g| g.re).scale(e.weight / n_mc))
517 .collect(),
518 )
519 };
520 #[cfg(not(feature = "rayon"))]
521 let (res, res_gradient) = {
522 (
523 result
524 .iter()
525 .zip(mc_dataset.events.iter())
526 .map(|(l, e)| e.weight * l.re / n_mc)
527 .collect(),
528 result_gradient
529 .iter()
530 .zip(mc_dataset.events.iter())
531 .map(|(grad_l, e)| grad_l.map(|g| g.re).scale(e.weight / n_mc))
532 .collect(),
533 )
534 };
535 mc_evaluator.resources.write().active = current_active_mc;
536 Ok((res, res_gradient))
537 } else {
538 let current_active_data = self.data_evaluator.resources.read().active.clone();
539 let current_active_accmc = self.accmc_evaluator.resources.read().active.clone();
540 self.isolate_many(names)?;
541 let mc_dataset = &self.accmc_evaluator.dataset;
542 let result = self.accmc_evaluator.evaluate_local(parameters);
543 let result_gradient = self.accmc_evaluator.evaluate_gradient(parameters);
544 let n_mc = self.accmc_evaluator.dataset.n_events() as Float;
545 #[cfg(feature = "rayon")]
546 let (res, res_gradient) = {
547 (
548 result
549 .par_iter()
550 .zip(mc_dataset.events.par_iter())
551 .map(|(l, e)| e.weight * l.re / n_mc)
552 .collect(),
553 result_gradient
554 .par_iter()
555 .zip(mc_dataset.events.par_iter())
556 .map(|(grad_l, e)| grad_l.map(|g| g.re).scale(e.weight / n_mc))
557 .collect(),
558 )
559 };
560 #[cfg(not(feature = "rayon"))]
561 let (res, res_gradient) = {
562 (
563 result
564 .iter()
565 .zip(mc_dataset.events.iter())
566 .map(|(l, e)| e.weight * l.re / n_mc)
567 .collect(),
568 result_gradient
569 .iter()
570 .zip(mc_dataset.events.iter())
571 .map(|(grad_l, e)| grad_l.map(|g| g.re).scale(e.weight / n_mc))
572 .collect(),
573 )
574 };
575 self.data_evaluator.resources.write().active = current_active_data;
576 self.accmc_evaluator.resources.write().active = current_active_accmc;
577 Ok((res, res_gradient))
578 }
579 }
580
581 #[cfg(feature = "mpi")]
592 pub fn project_gradient_with_mpi<T: AsRef<str>>(
593 &self,
594 parameters: &[Float],
595 names: &[T],
596 mc_evaluator: Option<Evaluator>,
597 world: &SimpleCommunicator,
598 ) -> Result<(Vec<Float>, Vec<DVector<Float>>), LadduError> {
599 let n_events = mc_evaluator
600 .as_ref()
601 .unwrap_or(&self.accmc_evaluator)
602 .dataset
603 .n_events();
604 let (local_projection, local_gradient_projection) =
605 self.project_gradient_with_local(parameters, names, mc_evaluator)?;
606 let mut projection_result: Vec<Float> = vec![0.0; n_events];
607 let (counts, displs) = world.get_counts_displs(n_events);
608 {
609 let mut partitioned_buffer = PartitionMut::new(&mut projection_result, counts, displs);
610 world.all_gather_varcount_into(&local_projection, &mut partitioned_buffer);
611 }
612
613 let flattened_local_gradient_projection = local_gradient_projection
614 .iter()
615 .flat_map(|g| g.data.as_vec().to_vec())
616 .collect::<Vec<Float>>();
617 let (counts, displs) = world.get_flattened_counts_displs(n_events, parameters.len());
618 let mut flattened_result_buffer = vec![0.0; n_events * parameters.len()];
619 let mut partitioned_flattened_result_buffer =
620 PartitionMut::new(&mut flattened_result_buffer, counts, displs);
621 world.all_gather_varcount_into(
622 &flattened_local_gradient_projection,
623 &mut partitioned_flattened_result_buffer,
624 );
625 let gradient_projection_result = flattened_result_buffer
626 .chunks(parameters.len())
627 .map(DVector::from_row_slice)
628 .collect();
629 Ok((projection_result, gradient_projection_result))
630 }
631 pub fn project_gradient_with<T: AsRef<str>>(
650 &self,
651 parameters: &[Float],
652 names: &[T],
653 mc_evaluator: Option<Evaluator>,
654 ) -> Result<(Vec<Float>, Vec<DVector<Float>>), LadduError> {
655 #[cfg(feature = "mpi")]
656 {
657 if let Some(world) = laddu_core::mpi::get_world() {
658 return self.project_gradient_with_mpi(parameters, names, mc_evaluator, &world);
659 }
660 }
661 self.project_gradient_with_local(parameters, names, mc_evaluator)
662 }
663
664 fn evaluate_local(&self, parameters: &[Float]) -> Float {
665 let data_result = self.data_evaluator.evaluate_local(parameters);
666 let mc_result = self.accmc_evaluator.evaluate_local(parameters);
667 let n_mc = self.accmc_evaluator.dataset.n_events() as Float;
668 #[cfg(feature = "rayon")]
669 let data_term: Float = data_result
670 .par_iter()
671 .zip(self.data_evaluator.dataset.events.par_iter())
672 .map(|(l, e)| e.weight * Float::ln(l.re))
673 .parallel_sum_with_accumulator::<Klein<Float>>();
674 #[cfg(feature = "rayon")]
675 let mc_term: Float = mc_result
676 .par_iter()
677 .zip(self.accmc_evaluator.dataset.events.par_iter())
678 .map(|(l, e)| e.weight * l.re)
679 .parallel_sum_with_accumulator::<Klein<Float>>();
680 #[cfg(not(feature = "rayon"))]
681 let data_term: Float = data_result
682 .iter()
683 .zip(self.data_evaluator.dataset.events.iter())
684 .map(|(l, e)| e.weight * Float::ln(l.re))
685 .sum_with_accumulator::<Klein<Float>>();
686 #[cfg(not(feature = "rayon"))]
687 let mc_term: Float = mc_result
688 .iter()
689 .zip(self.accmc_evaluator.dataset.events.iter())
690 .map(|(l, e)| e.weight * l.re)
691 .sum_with_accumulator::<Klein<Float>>();
692 -2.0 * (data_term - mc_term / n_mc)
693 }
694
695 #[cfg(feature = "mpi")]
696 fn evaluate_mpi(&self, parameters: &[Float], world: &SimpleCommunicator) -> Float {
697 let local_evaluation = self.evaluate_local(parameters);
698 let mut buffer: Vec<Float> = vec![0.0; world.size() as usize];
699 world.all_gather_into(&local_evaluation, &mut buffer);
700 buffer.iter().sum()
701 }
702
703 fn evaluate_gradient_local(&self, parameters: &[Float]) -> DVector<Float> {
704 let data_resources = self.data_evaluator.resources.read();
705 let data_parameters = Parameters::new(parameters, &data_resources.constants);
706 let mc_resources = self.accmc_evaluator.resources.read();
707 let mc_parameters = Parameters::new(parameters, &mc_resources.constants);
708 let n_mc = self.accmc_evaluator.dataset.n_events() as Float;
709 #[cfg(feature = "rayon")]
710 let data_term: DVector<Float> = self
711 .data_evaluator
712 .dataset
713 .events
714 .par_iter()
715 .zip(data_resources.caches.par_iter())
716 .map(|(event, cache)| {
717 let mut gradient_values =
718 vec![DVector::zeros(parameters.len()); self.data_evaluator.amplitudes.len()];
719 self.data_evaluator
720 .amplitudes
721 .iter()
722 .zip(data_resources.active.iter())
723 .zip(gradient_values.iter_mut())
724 .for_each(|((amp, active), grad)| {
725 if *active {
726 amp.compute_gradient(&data_parameters, event, cache, grad)
727 }
728 });
729 (
730 event.weight,
731 AmplitudeValues(
732 self.data_evaluator
733 .amplitudes
734 .iter()
735 .zip(data_resources.active.iter())
736 .map(|(amp, active)| {
737 if *active {
738 amp.compute(&data_parameters, event, cache)
739 } else {
740 Complex::ZERO
741 }
742 })
743 .collect(),
744 ),
745 GradientValues(parameters.len(), gradient_values),
746 )
747 })
748 .map(|(weight, amp_vals, grad_vals)| {
749 (
750 weight,
751 self.data_evaluator.expression.evaluate(&_vals),
752 self.data_evaluator
753 .expression
754 .evaluate_gradient(&_vals, &grad_vals),
755 )
756 })
757 .map(|(w, l, g)| g.map(|gi| gi.re * w / l.re))
758 .collect::<Vec<DVector<Float>>>()
759 .iter()
760 .sum(); #[cfg(feature = "rayon")]
762 let mc_term: DVector<Float> = self
763 .accmc_evaluator
764 .dataset
765 .events
766 .par_iter()
767 .zip(mc_resources.caches.par_iter())
768 .map(|(event, cache)| {
769 let mut gradient_values =
770 vec![DVector::zeros(parameters.len()); self.accmc_evaluator.amplitudes.len()];
771 self.accmc_evaluator
772 .amplitudes
773 .iter()
774 .zip(mc_resources.active.iter())
775 .zip(gradient_values.iter_mut())
776 .for_each(|((amp, active), grad)| {
777 if *active {
778 amp.compute_gradient(&mc_parameters, event, cache, grad)
779 }
780 });
781 (
782 event.weight,
783 AmplitudeValues(
784 self.accmc_evaluator
785 .amplitudes
786 .iter()
787 .zip(mc_resources.active.iter())
788 .map(|(amp, active)| {
789 if *active {
790 amp.compute(&mc_parameters, event, cache)
791 } else {
792 Complex::ZERO
793 }
794 })
795 .collect(),
796 ),
797 GradientValues(parameters.len(), gradient_values),
798 )
799 })
800 .map(|(weight, amp_vals, grad_vals)| {
801 (
802 weight,
803 self.accmc_evaluator
804 .expression
805 .evaluate_gradient(&_vals, &grad_vals),
806 )
807 })
808 .map(|(w, g)| w * g.map(|gi| gi.re))
809 .collect::<Vec<DVector<Float>>>()
810 .iter()
811 .sum();
812 #[cfg(not(feature = "rayon"))]
813 let data_term: DVector<Float> = self
814 .data_evaluator
815 .dataset
816 .events
817 .iter()
818 .zip(data_resources.caches.iter())
819 .map(|(event, cache)| {
820 let mut gradient_values =
821 vec![DVector::zeros(parameters.len()); self.data_evaluator.amplitudes.len()];
822 self.data_evaluator
823 .amplitudes
824 .iter()
825 .zip(data_resources.active.iter())
826 .zip(gradient_values.iter_mut())
827 .for_each(|((amp, active), grad)| {
828 if *active {
829 amp.compute_gradient(&data_parameters, event, cache, grad)
830 }
831 });
832 (
833 event.weight,
834 AmplitudeValues(
835 self.data_evaluator
836 .amplitudes
837 .iter()
838 .zip(data_resources.active.iter())
839 .map(|(amp, active)| {
840 if *active {
841 amp.compute(&data_parameters, event, cache)
842 } else {
843 Complex::ZERO
844 }
845 })
846 .collect(),
847 ),
848 GradientValues(parameters.len(), gradient_values),
849 )
850 })
851 .map(|(weight, amp_vals, grad_vals)| {
852 (
853 weight,
854 self.data_evaluator.expression.evaluate(&_vals),
855 self.data_evaluator
856 .expression
857 .evaluate_gradient(&_vals, &grad_vals),
858 )
859 })
860 .map(|(w, l, g)| g.map(|gi| gi.re * w / l.re))
861 .sum();
862 #[cfg(not(feature = "rayon"))]
863 let mc_term: DVector<Float> = self
864 .accmc_evaluator
865 .dataset
866 .events
867 .iter()
868 .zip(mc_resources.caches.iter())
869 .map(|(event, cache)| {
870 let mut gradient_values =
871 vec![DVector::zeros(parameters.len()); self.accmc_evaluator.amplitudes.len()];
872 self.accmc_evaluator
873 .amplitudes
874 .iter()
875 .zip(mc_resources.active.iter())
876 .zip(gradient_values.iter_mut())
877 .for_each(|((amp, active), grad)| {
878 if *active {
879 amp.compute_gradient(&mc_parameters, event, cache, grad)
880 }
881 });
882 (
883 event.weight,
884 AmplitudeValues(
885 self.accmc_evaluator
886 .amplitudes
887 .iter()
888 .zip(mc_resources.active.iter())
889 .map(|(amp, active)| {
890 if *active {
891 amp.compute(&mc_parameters, event, cache)
892 } else {
893 Complex::ZERO
894 }
895 })
896 .collect(),
897 ),
898 GradientValues(parameters.len(), gradient_values),
899 )
900 })
901 .map(|(weight, amp_vals, grad_vals)| {
902 (
903 weight,
904 self.accmc_evaluator
905 .expression
906 .evaluate_gradient(&_vals, &grad_vals),
907 )
908 })
909 .map(|(w, g)| w * g.map(|gi| gi.re))
910 .sum();
911 -2.0 * (data_term - mc_term / n_mc)
912 }
913
914 #[cfg(feature = "mpi")]
915 fn evaluate_gradient_mpi(
916 &self,
917 parameters: &[Float],
918 world: &SimpleCommunicator,
919 ) -> DVector<Float> {
920 let local_evaluation_vec = self
921 .evaluate_gradient_local(parameters)
922 .data
923 .as_vec()
924 .to_vec();
925 let mut flattened_result_buffer = vec![0.0; world.size() as usize * parameters.len()];
926 world.all_gather_into(&local_evaluation_vec, &mut flattened_result_buffer);
927 flattened_result_buffer
928 .chunks(parameters.len())
929 .map(DVector::from_row_slice)
930 .sum::<DVector<Float>>()
931 }
932}
933
934impl LikelihoodTerm for NLL {
935 fn parameters(&self) -> Vec<String> {
938 self.data_evaluator
939 .resources
940 .read()
941 .parameters
942 .iter()
943 .cloned()
944 .collect()
945 }
946
947 fn evaluate(&self, parameters: &[Float]) -> Float {
957 #[cfg(feature = "mpi")]
958 {
959 if let Some(world) = laddu_core::mpi::get_world() {
960 return self.evaluate_mpi(parameters, &world);
961 }
962 }
963 self.evaluate_local(parameters)
964 }
965
966 fn evaluate_gradient(&self, parameters: &[Float]) -> DVector<Float> {
972 #[cfg(feature = "mpi")]
973 {
974 if let Some(world) = laddu_core::mpi::get_world() {
975 return self.evaluate_gradient_mpi(parameters, &world);
976 }
977 }
978 self.evaluate_gradient_local(parameters)
979 }
980}
981
982#[cfg(feature = "rayon")]
983impl Function<ThreadPool, LadduError> for NLL {
984 fn evaluate(
985 &self,
986 parameters: &[Float],
987 thread_pool: &mut ThreadPool,
988 ) -> Result<Float, LadduError> {
989 Ok(thread_pool.install(|| LikelihoodTerm::evaluate(self, parameters)))
990 }
991 fn gradient(
992 &self,
993 parameters: &[Float],
994 thread_pool: &mut ThreadPool,
995 ) -> Result<DVector<Float>, LadduError> {
996 Ok(thread_pool.install(|| LikelihoodTerm::evaluate_gradient(self, parameters)))
997 }
998}
999
1000#[cfg(not(feature = "rayon"))]
1001impl Function<(), LadduError> for NLL {
1002 fn evaluate(&self, parameters: &[Float], _user_data: &mut ()) -> Result<Float, LadduError> {
1003 Ok(LikelihoodTerm::evaluate(self, parameters))
1004 }
1005 fn gradient(
1006 &self,
1007 parameters: &[Float],
1008 _user_data: &mut (),
1009 ) -> Result<DVector<Float>, LadduError> {
1010 Ok(LikelihoodTerm::evaluate_gradient(self, parameters))
1011 }
1012}
1013
1014pub(crate) struct LogLikelihood<'a>(&'a NLL);
1015
1016#[cfg(feature = "rayon")]
1017impl Function<ThreadPool, LadduError> for LogLikelihood<'_> {
1018 fn evaluate(
1019 &self,
1020 parameters: &[Float],
1021 thread_pool: &mut ThreadPool,
1022 ) -> Result<Float, LadduError> {
1023 Function::evaluate(self.0, parameters, thread_pool).map(|res| -res)
1024 }
1025 fn gradient(
1026 &self,
1027 parameters: &[Float],
1028 thread_pool: &mut ThreadPool,
1029 ) -> Result<DVector<Float>, LadduError> {
1030 Function::gradient(self.0, parameters, thread_pool).map(|res| -res)
1031 }
1032}
1033
1034#[cfg(not(feature = "rayon"))]
1035impl Function<(), LadduError> for LogLikelihood<'_> {
1036 fn evaluate(&self, parameters: &[Float], user_data: &mut ()) -> Result<Float, LadduError> {
1037 Function::evaluate(self.0, parameters, user_data).map(|res| -res)
1038 }
1039 fn gradient(
1040 &self,
1041 parameters: &[Float],
1042 user_data: &mut (),
1043 ) -> Result<DVector<Float>, LadduError> {
1044 Function::gradient(self.0, parameters, user_data).map(|res| -res)
1045 }
1046}
1047
1048impl NLL {
1049 pub fn minimize(
1052 &self,
1053 p0: &[Float],
1054 bounds: Option<Vec<(Float, Float)>>,
1055 options: Option<MinimizerOptions>,
1056 ) -> Result<Status, LadduError> {
1057 let options = options.unwrap_or_default();
1058 let mut m = Minimizer::new(options.algorithm, self.parameters().len())
1059 .with_max_steps(options.max_steps);
1060 if let Some(bounds) = bounds {
1061 m = m.with_bounds(bounds);
1062 }
1063 for observer in options.observers {
1064 m = m.with_observer(observer);
1065 }
1066 #[cfg(feature = "rayon")]
1067 {
1068 m.minimize(
1069 self,
1070 p0,
1071 &mut ThreadPoolBuilder::new()
1072 .num_threads(options.threads)
1073 .build()
1074 .map_err(LadduError::from)?,
1075 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
1076 )?;
1077 }
1078 #[cfg(not(feature = "rayon"))]
1079 {
1080 m.minimize(
1081 self,
1082 p0,
1083 &mut (),
1084 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
1085 )?;
1086 }
1087 Ok(m.status)
1088 }
1089 pub fn mcmc<T: AsRef<[DVector<Float>]>>(
1091 &self,
1092 p0: T,
1093 n_steps: usize,
1094 options: Option<MCMCOptions>,
1095 rng: Rng,
1096 ) -> Result<Ensemble, LadduError> {
1097 let options = options.unwrap_or(MCMCOptions::default_with_rng(rng));
1098 let mut m = Sampler::new(options.algorithm, p0.as_ref().to_vec());
1099 for observer in options.observers {
1100 m = m.with_observer(observer);
1101 }
1102 let func = LogLikelihood(self);
1103 #[cfg(feature = "rayon")]
1104 {
1105 m.sample(
1106 &func,
1107 &mut ThreadPoolBuilder::new()
1108 .num_threads(options.threads)
1109 .build()
1110 .map_err(LadduError::from)?,
1111 n_steps,
1112 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
1113 )?;
1114 }
1115 #[cfg(not(feature = "rayon"))]
1116 {
1117 m.sample(
1118 &func,
1119 &mut (),
1120 n_steps,
1121 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
1122 )?;
1123 }
1124 Ok(m.ensemble)
1125 }
1126 pub fn swarm(
1128 &self,
1129 swarm: Swarm,
1130 options: Option<SwarmOptions>,
1131 rng: Rng,
1132 ) -> Result<Swarm, LadduError> {
1133 let options = options.unwrap_or(SwarmOptions::default_with_rng(rng));
1134 let mut m = SwarmMinimizer::new(options.algorithm, swarm);
1135 for observer in options.observers {
1136 m = m.with_observer(observer);
1137 }
1138 #[cfg(feature = "rayon")]
1139 {
1140 m.minimize(
1141 self,
1142 &mut ThreadPoolBuilder::new()
1143 .num_threads(options.threads)
1144 .build()
1145 .map_err(LadduError::from)?,
1146 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
1147 )?;
1148 }
1149 #[cfg(not(feature = "rayon"))]
1150 {
1151 m.minimize(
1152 self,
1153 &mut (),
1154 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
1155 )?;
1156 }
1157 Ok(m.swarm)
1158 }
1159}
1160
1161#[cfg(feature = "python")]
1173#[pyclass(name = "NLL", module = "laddu")]
1174#[derive(Clone)]
1175pub struct PyNLL(pub Box<NLL>);
1176
1177#[cfg(feature = "python")]
1178#[pymethods]
1179impl PyNLL {
1180 #[new]
1181 #[pyo3(signature = (model, ds_data, ds_accmc))]
1182 fn new(model: &PyModel, ds_data: &PyDataset, ds_accmc: &PyDataset) -> Self {
1183 Self(NLL::new(&model.0, &ds_data.0, &ds_accmc.0))
1184 }
1185 #[getter]
1192 fn data(&self) -> PyDataset {
1193 PyDataset(self.0.data_evaluator.dataset.clone())
1194 }
1195 #[getter]
1202 fn accmc(&self) -> PyDataset {
1203 PyDataset(self.0.accmc_evaluator.dataset.clone())
1204 }
1205 fn as_term(&self) -> PyLikelihoodTerm {
1213 PyLikelihoodTerm(self.0.clone())
1214 }
1215 #[getter]
1222 fn parameters(&self) -> Vec<String> {
1223 self.0.parameters()
1224 }
1225 fn activate(&self, arg: &Bound<'_, PyAny>) -> PyResult<()> {
1240 if let Ok(string_arg) = arg.extract::<String>() {
1241 self.0.activate(&string_arg)?;
1242 } else if let Ok(list_arg) = arg.downcast::<PyList>() {
1243 let vec: Vec<String> = list_arg.extract()?;
1244 self.0.activate_many(&vec)?;
1245 } else {
1246 return Err(PyTypeError::new_err(
1247 "Argument must be either a string or a list of strings",
1248 ));
1249 }
1250 Ok(())
1251 }
1252 fn activate_all(&self) {
1255 self.0.activate_all();
1256 }
1257 fn deactivate(&self, arg: &Bound<'_, PyAny>) -> PyResult<()> {
1274 if let Ok(string_arg) = arg.extract::<String>() {
1275 self.0.deactivate(&string_arg)?;
1276 } else if let Ok(list_arg) = arg.downcast::<PyList>() {
1277 let vec: Vec<String> = list_arg.extract()?;
1278 self.0.deactivate_many(&vec)?;
1279 } else {
1280 return Err(PyTypeError::new_err(
1281 "Argument must be either a string or a list of strings",
1282 ));
1283 }
1284 Ok(())
1285 }
1286 fn deactivate_all(&self) {
1289 self.0.deactivate_all();
1290 }
1291 fn isolate(&self, arg: &Bound<'_, PyAny>) -> PyResult<()> {
1308 if let Ok(string_arg) = arg.extract::<String>() {
1309 self.0.isolate(&string_arg)?;
1310 } else if let Ok(list_arg) = arg.downcast::<PyList>() {
1311 let vec: Vec<String> = list_arg.extract()?;
1312 self.0.isolate_many(&vec)?;
1313 } else {
1314 return Err(PyTypeError::new_err(
1315 "Argument must be either a string or a list of strings",
1316 ));
1317 }
1318 Ok(())
1319 }
1320 #[pyo3(signature = (parameters, *, threads=None))]
1344 fn evaluate(&self, parameters: Vec<Float>, threads: Option<usize>) -> PyResult<Float> {
1345 #[cfg(feature = "rayon")]
1346 {
1347 Ok(ThreadPoolBuilder::new()
1348 .num_threads(threads.unwrap_or_else(num_cpus::get))
1349 .build()
1350 .map_err(LadduError::from)?
1351 .install(|| LikelihoodTerm::evaluate(self.0.as_ref(), ¶meters)))
1352 }
1353 #[cfg(not(feature = "rayon"))]
1354 {
1355 Ok(LikelihoodTerm::evaluate(self.0.as_ref(), ¶meters))
1356 }
1357 }
1358 #[pyo3(signature = (parameters, *, threads=None))]
1379 fn evaluate_gradient<'py>(
1380 &self,
1381 py: Python<'py>,
1382 parameters: Vec<Float>,
1383 threads: Option<usize>,
1384 ) -> PyResult<Bound<'py, PyArray1<Float>>> {
1385 #[cfg(feature = "rayon")]
1386 {
1387 Ok(PyArray1::from_slice(
1388 py,
1389 ThreadPoolBuilder::new()
1390 .num_threads(threads.unwrap_or_else(num_cpus::get))
1391 .build()
1392 .map_err(LadduError::from)?
1393 .install(|| self.0.evaluate_gradient(¶meters))
1394 .as_slice(),
1395 ))
1396 }
1397 #[cfg(not(feature = "rayon"))]
1398 {
1399 Ok(PyArray1::from_slice(
1400 py,
1401 self.0.evaluate_gradient(¶meters).as_slice(),
1402 ))
1403 }
1404 }
1405 #[pyo3(signature = (parameters, *, mc_evaluator = None, threads=None))]
1432 fn project<'py>(
1433 &self,
1434 py: Python<'py>,
1435 parameters: Vec<Float>,
1436 mc_evaluator: Option<PyEvaluator>,
1437 threads: Option<usize>,
1438 ) -> PyResult<Bound<'py, PyArray1<Float>>> {
1439 #[cfg(feature = "rayon")]
1440 {
1441 Ok(PyArray1::from_slice(
1442 py,
1443 ThreadPoolBuilder::new()
1444 .num_threads(threads.unwrap_or_else(num_cpus::get))
1445 .build()
1446 .map_err(LadduError::from)?
1447 .install(|| {
1448 self.0
1449 .project(¶meters, mc_evaluator.map(|pyeval| pyeval.0.clone()))
1450 })
1451 .as_slice(),
1452 ))
1453 }
1454 #[cfg(not(feature = "rayon"))]
1455 {
1456 Ok(PyArray1::from_slice(
1457 py,
1458 self.0
1459 .project(¶meters, mc_evaluator.map(|pyeval| pyeval.0.clone()))
1460 .as_slice(),
1461 ))
1462 }
1463 }
1464
1465 #[pyo3(signature = (parameters, arg, *, mc_evaluator = None, threads=None))]
1503 fn project_with<'py>(
1504 &self,
1505 py: Python<'py>,
1506 parameters: Vec<Float>,
1507 arg: &Bound<'_, PyAny>,
1508 mc_evaluator: Option<PyEvaluator>,
1509 threads: Option<usize>,
1510 ) -> PyResult<Bound<'py, PyArray1<Float>>> {
1511 let names = if let Ok(string_arg) = arg.extract::<String>() {
1512 vec![string_arg]
1513 } else if let Ok(list_arg) = arg.downcast::<PyList>() {
1514 let vec: Vec<String> = list_arg.extract()?;
1515 vec
1516 } else {
1517 return Err(PyTypeError::new_err(
1518 "Argument must be either a string or a list of strings",
1519 ));
1520 };
1521 #[cfg(feature = "rayon")]
1522 {
1523 Ok(PyArray1::from_slice(
1524 py,
1525 ThreadPoolBuilder::new()
1526 .num_threads(threads.unwrap_or_else(num_cpus::get))
1527 .build()
1528 .map_err(LadduError::from)?
1529 .install(|| {
1530 self.0.project_with(
1531 ¶meters,
1532 &names,
1533 mc_evaluator.map(|pyeval| pyeval.0.clone()),
1534 )
1535 })?
1536 .as_slice(),
1537 ))
1538 }
1539 #[cfg(not(feature = "rayon"))]
1540 {
1541 Ok(PyArray1::from_slice(
1542 py,
1543 self.0
1544 .project_with(
1545 ¶meters,
1546 &names,
1547 mc_evaluator.map(|pyeval| pyeval.0.clone()),
1548 )?
1549 .as_slice(),
1550 ))
1551 }
1552 }
1553
1554 #[pyo3(signature = (p0, *, bounds=None, method=None, observers=None, max_steps=4000, debug=false, verbose=false, show_step=true, show_x=true, show_fx=true, skip_hessian=false, threads=None))]
1599 #[allow(clippy::too_many_arguments)]
1600 fn minimize(
1601 &self,
1602 p0: Vec<Float>,
1603 bounds: Option<Vec<(Option<Float>, Option<Float>)>>,
1604 method: Option<Bound<'_, PyAny>>,
1605 observers: Option<Bound<'_, PyAny>>,
1606 max_steps: usize,
1607 debug: bool,
1608 verbose: bool,
1609 show_step: bool,
1610 show_x: bool,
1611 show_fx: bool,
1612 skip_hessian: bool,
1613 threads: Option<usize>,
1614 ) -> PyResult<PyStatus> {
1615 let bounds = bounds.map(|bounds_vec| {
1616 bounds_vec
1617 .iter()
1618 .map(|(opt_lb, opt_ub)| {
1619 (
1620 opt_lb.unwrap_or(Float::NEG_INFINITY),
1621 opt_ub.unwrap_or(Float::INFINITY),
1622 )
1623 })
1624 .collect()
1625 });
1626 let options = py_parse_minimizer_options(
1627 method,
1628 observers,
1629 max_steps,
1630 debug,
1631 verbose,
1632 show_step,
1633 show_x,
1634 show_fx,
1635 skip_hessian,
1636 threads,
1637 )?;
1638 let status = self.0.minimize(&p0, bounds, Some(options))?;
1639 Ok(PyStatus(status))
1640 }
1641 #[pyo3(signature = (p0, n_steps, *, method=None, observers=None, debug=false, verbose=false, seed=0, threads=None))]
1684 #[allow(clippy::too_many_arguments)]
1685 fn mcmc(
1686 &self,
1687 p0: Vec<Vec<Float>>,
1688 n_steps: usize,
1689 method: Option<Bound<'_, PyAny>>,
1690 observers: Option<Bound<'_, PyAny>>,
1691 debug: bool,
1692 verbose: bool,
1693 seed: u64,
1694 threads: Option<usize>,
1695 ) -> PyResult<PyEnsemble> {
1696 let p0 = p0.into_iter().map(DVector::from_vec).collect::<Vec<_>>();
1697 let mut rng = Rng::new();
1698 rng.seed(seed);
1699 let options =
1700 py_parse_mcmc_options(method, observers, debug, verbose, threads, rng.clone())?;
1701 let ensemble = self.0.mcmc(&p0, n_steps, Some(options), rng)?;
1702 Ok(PyEnsemble(ensemble))
1703 }
1704
1705 #[pyo3(signature = (swarm, *, method=None, observers=None, debug=false, verbose=false, seed=0, threads=None))]
1738 #[allow(clippy::too_many_arguments)]
1739 fn swarm(
1740 &self,
1741 swarm: PySwarm,
1742 method: Option<Bound<'_, PyAny>>,
1743 observers: Option<Bound<'_, PyAny>>,
1744 debug: bool,
1745 verbose: bool,
1746 seed: u64,
1747 threads: Option<usize>,
1748 ) -> PyResult<PySwarm> {
1749 let mut rng = Rng::new();
1750 rng.seed(seed);
1751 let options =
1752 py_parse_swarm_options(method, observers, debug, verbose, threads, rng.clone())?;
1753 let swarm = self.0.swarm(swarm.0, Some(options), rng)?;
1754 Ok(PySwarm(swarm))
1755 }
1756}
1757
1758#[derive(Clone, Debug)]
1761pub struct LikelihoodID(usize, Option<String>);
1762
1763impl Display for LikelihoodID {
1764 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1765 if let Some(name) = &self.1 {
1766 write!(f, "{}({})", name, self.0)
1767 } else {
1768 write!(f, "({})", self.0)
1769 }
1770 }
1771}
1772
1773#[cfg(feature = "python")]
1780#[pyclass(name = "LikelihoodID", module = "laddu")]
1781#[derive(Clone)]
1782pub struct PyLikelihoodID(LikelihoodID);
1783
1784#[cfg(feature = "python")]
1785#[pymethods]
1786impl PyLikelihoodID {
1787 fn __add__(&self, other: &Bound<'_, PyAny>) -> PyResult<PyLikelihoodExpression> {
1788 if let Ok(other_aid) = other.extract::<PyRef<PyLikelihoodID>>() {
1789 Ok(PyLikelihoodExpression(self.0.clone() + other_aid.0.clone()))
1790 } else if let Ok(other_expr) = other.extract::<PyLikelihoodExpression>() {
1791 Ok(PyLikelihoodExpression(
1792 self.0.clone() + other_expr.0.clone(),
1793 ))
1794 } else if let Ok(int) = other.extract::<usize>() {
1795 if int == 0 {
1796 Ok(PyLikelihoodExpression(LikelihoodExpression::Term(
1797 self.0.clone(),
1798 )))
1799 } else {
1800 Err(PyTypeError::new_err(
1801 "Addition with an integer for this type is only defined for 0",
1802 ))
1803 }
1804 } else {
1805 Err(PyTypeError::new_err("Unsupported operand type for +"))
1806 }
1807 }
1808 fn __radd__(&self, other: &Bound<'_, PyAny>) -> PyResult<PyLikelihoodExpression> {
1809 if let Ok(other_aid) = other.extract::<PyRef<PyLikelihoodID>>() {
1810 Ok(PyLikelihoodExpression(other_aid.0.clone() + self.0.clone()))
1811 } else if let Ok(other_expr) = other.extract::<PyLikelihoodExpression>() {
1812 Ok(PyLikelihoodExpression(
1813 other_expr.0.clone() + self.0.clone(),
1814 ))
1815 } else if let Ok(int) = other.extract::<usize>() {
1816 if int == 0 {
1817 Ok(PyLikelihoodExpression(LikelihoodExpression::Term(
1818 self.0.clone(),
1819 )))
1820 } else {
1821 Err(PyTypeError::new_err(
1822 "Addition with an integer for this type is only defined for 0",
1823 ))
1824 }
1825 } else {
1826 Err(PyTypeError::new_err("Unsupported operand type for +"))
1827 }
1828 }
1829 fn __mul__(&self, other: &Bound<'_, PyAny>) -> PyResult<PyLikelihoodExpression> {
1830 if let Ok(other_aid) = other.extract::<PyRef<PyLikelihoodID>>() {
1831 Ok(PyLikelihoodExpression(self.0.clone() * other_aid.0.clone()))
1832 } else if let Ok(other_expr) = other.extract::<PyLikelihoodExpression>() {
1833 Ok(PyLikelihoodExpression(
1834 self.0.clone() * other_expr.0.clone(),
1835 ))
1836 } else {
1837 Err(PyTypeError::new_err("Unsupported operand type for *"))
1838 }
1839 }
1840 fn __rmul__(&self, other: &Bound<'_, PyAny>) -> PyResult<PyLikelihoodExpression> {
1841 if let Ok(other_aid) = other.extract::<PyRef<PyLikelihoodID>>() {
1842 Ok(PyLikelihoodExpression(other_aid.0.clone() * self.0.clone()))
1843 } else if let Ok(other_expr) = other.extract::<PyLikelihoodExpression>() {
1844 Ok(PyLikelihoodExpression(
1845 other_expr.0.clone() * self.0.clone(),
1846 ))
1847 } else {
1848 Err(PyTypeError::new_err("Unsupported operand type for *"))
1849 }
1850 }
1851 fn __str__(&self) -> String {
1852 format!("{}", self.0)
1853 }
1854 fn __repr__(&self) -> String {
1855 format!("{:?}", self.0)
1856 }
1857}
1858
1859#[derive(Default, Clone)]
1861pub struct LikelihoodManager {
1862 terms: Vec<Box<dyn LikelihoodTerm>>,
1863 param_name_to_index: HashMap<String, usize>,
1864 param_names: Vec<String>,
1865 param_layouts: Vec<Vec<usize>>,
1866 param_counts: Vec<usize>,
1867}
1868
1869impl LikelihoodManager {
1870 pub fn register(&mut self, term: Box<dyn LikelihoodTerm>) -> LikelihoodID {
1873 let term_idx = self.terms.len();
1874 for param_name in term.parameters() {
1875 if !self.param_name_to_index.contains_key(¶m_name) {
1876 self.param_name_to_index
1877 .insert(param_name.clone(), self.param_name_to_index.len());
1878 self.param_names.push(param_name.clone());
1879 }
1880 }
1881 let param_layout: Vec<usize> = term
1882 .parameters()
1883 .iter()
1884 .map(|name| self.param_name_to_index[name])
1885 .collect();
1886 let param_count = term.parameters().len();
1887 self.param_layouts.push(param_layout);
1888 self.param_counts.push(param_count);
1889 self.terms.push(term.clone());
1890
1891 LikelihoodID(term_idx, None)
1892 }
1893
1894 pub fn register_with_name<T: AsRef<str>>(
1897 &mut self,
1898 term: Box<dyn LikelihoodTerm>,
1899 name: T,
1900 ) -> LikelihoodID {
1901 let term_idx = self.terms.len();
1902 for param_name in term.parameters() {
1903 if !self.param_name_to_index.contains_key(¶m_name) {
1904 self.param_name_to_index
1905 .insert(param_name.clone(), self.param_name_to_index.len());
1906 self.param_names.push(param_name.clone());
1907 }
1908 }
1909 let param_layout: Vec<usize> = term
1910 .parameters()
1911 .iter()
1912 .map(|name| self.param_name_to_index[name])
1913 .collect();
1914 let param_count = term.parameters().len();
1915 self.param_layouts.push(param_layout);
1916 self.param_counts.push(param_count);
1917 self.terms.push(term.clone());
1918
1919 LikelihoodID(term_idx, Some(name.as_ref().to_string().clone()))
1920 }
1921
1922 pub fn parameters(&self) -> Vec<String> {
1926 self.param_names.clone()
1927 }
1928
1929 pub fn load(&self, likelihood_expression: &LikelihoodExpression) -> LikelihoodEvaluator {
1932 LikelihoodEvaluator {
1933 likelihood_manager: self.clone(),
1934 likelihood_expression: likelihood_expression.clone(),
1935 }
1936 }
1937}
1938
1939#[cfg(feature = "python")]
1942#[pyclass(name = "LikelihoodManager", module = "laddu")]
1943#[derive(Clone)]
1944pub struct PyLikelihoodManager(LikelihoodManager);
1945
1946#[cfg(feature = "python")]
1947#[pymethods]
1948impl PyLikelihoodManager {
1949 #[new]
1950 fn new() -> Self {
1951 Self(LikelihoodManager::default())
1952 }
1953 #[pyo3(signature = (likelihood_term, *, name = None))]
1967 fn register(
1968 &mut self,
1969 likelihood_term: &PyLikelihoodTerm,
1970 name: Option<String>,
1971 ) -> PyLikelihoodID {
1972 if let Some(name) = name {
1973 PyLikelihoodID(self.0.register_with_name(likelihood_term.0.clone(), name))
1974 } else {
1975 PyLikelihoodID(self.0.register(likelihood_term.0.clone()))
1976 }
1977 }
1978 fn parameters(&self) -> Vec<String> {
1986 self.0.parameters()
1987 }
1988 fn load(&self, likelihood_expression: &PyLikelihoodExpression) -> PyLikelihoodEvaluator {
2013 PyLikelihoodEvaluator(self.0.load(&likelihood_expression.0))
2014 }
2015}
2016
2017#[derive(Debug)]
2018struct LikelihoodValues(Vec<Float>);
2019
2020#[derive(Debug)]
2021struct LikelihoodGradients(Vec<DVector<Float>>);
2022
2023#[derive(Clone, Default)]
2025pub enum LikelihoodExpression {
2026 #[default]
2028 Zero,
2029 One,
2031 Term(LikelihoodID),
2033 Add(Box<LikelihoodExpression>, Box<LikelihoodExpression>),
2035 Mul(Box<LikelihoodExpression>, Box<LikelihoodExpression>),
2037}
2038
2039impl Debug for LikelihoodExpression {
2040 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
2041 self.write_tree(f, "", "", "")
2042 }
2043}
2044
2045impl Display for LikelihoodExpression {
2046 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
2047 write!(f, "{:?}", self)
2048 }
2049}
2050
2051impl LikelihoodExpression {
2052 fn evaluate(&self, likelihood_values: &LikelihoodValues) -> Float {
2053 match self {
2054 LikelihoodExpression::Zero => 0.0,
2055 LikelihoodExpression::One => 1.0,
2056 LikelihoodExpression::Term(lid) => likelihood_values.0[lid.0],
2057 LikelihoodExpression::Add(a, b) => {
2058 a.evaluate(likelihood_values) + b.evaluate(likelihood_values)
2059 }
2060 LikelihoodExpression::Mul(a, b) => {
2061 a.evaluate(likelihood_values) * b.evaluate(likelihood_values)
2062 }
2063 }
2064 }
2065 fn evaluate_gradient(
2066 &self,
2067 likelihood_values: &LikelihoodValues,
2068 likelihood_gradients: &LikelihoodGradients,
2069 ) -> DVector<Float> {
2070 match self {
2071 LikelihoodExpression::Zero => DVector::zeros(0),
2072 LikelihoodExpression::One => DVector::zeros(0),
2073 LikelihoodExpression::Term(lid) => likelihood_gradients.0[lid.0].clone(),
2074 LikelihoodExpression::Add(a, b) => {
2075 a.evaluate_gradient(likelihood_values, likelihood_gradients)
2076 + b.evaluate_gradient(likelihood_values, likelihood_gradients)
2077 }
2078 LikelihoodExpression::Mul(a, b) => {
2079 a.evaluate_gradient(likelihood_values, likelihood_gradients)
2080 * b.evaluate(likelihood_values)
2081 + b.evaluate_gradient(likelihood_values, likelihood_gradients)
2082 * a.evaluate(likelihood_values)
2083 }
2084 }
2085 }
2086 fn write_tree(
2088 &self,
2089 f: &mut std::fmt::Formatter<'_>,
2090 parent_prefix: &str,
2091 immediate_prefix: &str,
2092 parent_suffix: &str,
2093 ) -> std::fmt::Result {
2094 let display_string = match self {
2095 Self::Zero => "0".to_string(),
2096 Self::One => "1".to_string(),
2097 Self::Term(lid) => {
2098 format!("{}", lid.0)
2099 }
2100 Self::Add(_, _) => "+".to_string(),
2101 Self::Mul(_, _) => "*".to_string(),
2102 };
2103 writeln!(f, "{}{}{}", parent_prefix, immediate_prefix, display_string)?;
2104 match self {
2105 Self::Term(_) | Self::Zero | Self::One => {}
2106 Self::Add(a, b) | Self::Mul(a, b) => {
2107 let terms = [a, b];
2108 let mut it = terms.iter().peekable();
2109 let child_prefix = format!("{}{}", parent_prefix, parent_suffix);
2110 while let Some(child) = it.next() {
2111 match it.peek() {
2112 Some(_) => child.write_tree(f, &child_prefix, "├─ ", "│ "),
2113 None => child.write_tree(f, &child_prefix, "└─ ", " "),
2114 }?;
2115 }
2116 }
2117 }
2118 Ok(())
2119 }
2120}
2121
2122impl_op_ex!(+ |a: &LikelihoodExpression, b: &LikelihoodExpression| -> LikelihoodExpression { LikelihoodExpression::Add(Box::new(a.clone()), Box::new(b.clone()))});
2123impl_op_ex!(
2124 *|a: &LikelihoodExpression, b: &LikelihoodExpression| -> LikelihoodExpression {
2125 LikelihoodExpression::Mul(Box::new(a.clone()), Box::new(b.clone()))
2126 }
2127);
2128impl_op_ex_commutative!(+ |a: &LikelihoodID, b: &LikelihoodExpression| -> LikelihoodExpression { LikelihoodExpression::Add(Box::new(LikelihoodExpression::Term(a.clone())), Box::new(b.clone()))});
2129impl_op_ex_commutative!(
2130 *|a: &LikelihoodID, b: &LikelihoodExpression| -> LikelihoodExpression {
2131 LikelihoodExpression::Mul(
2132 Box::new(LikelihoodExpression::Term(a.clone())),
2133 Box::new(b.clone()),
2134 )
2135 }
2136);
2137impl_op_ex!(+ |a: &LikelihoodID, b: &LikelihoodID| -> LikelihoodExpression { LikelihoodExpression::Add(Box::new(LikelihoodExpression::Term(a.clone())), Box::new(LikelihoodExpression::Term(b.clone())))});
2138impl_op_ex!(
2139 *|a: &LikelihoodID, b: &LikelihoodID| -> LikelihoodExpression {
2140 LikelihoodExpression::Mul(
2141 Box::new(LikelihoodExpression::Term(a.clone())),
2142 Box::new(LikelihoodExpression::Term(b.clone())),
2143 )
2144 }
2145);
2146
2147#[cfg(feature = "python")]
2150#[pyclass(name = "LikelihoodExpression", module = "laddu")]
2151#[derive(Clone)]
2152pub struct PyLikelihoodExpression(LikelihoodExpression);
2153
2154#[cfg(feature = "python")]
2157#[pyfunction(name = "likelihood_sum")]
2158pub fn py_likelihood_sum(terms: Vec<Bound<'_, PyAny>>) -> PyResult<PyLikelihoodExpression> {
2159 if terms.is_empty() {
2160 return Ok(PyLikelihoodExpression(LikelihoodExpression::Zero));
2161 }
2162 if terms.len() == 1 {
2163 let term = &terms[0];
2164 if let Ok(expression) = term.extract::<PyLikelihoodExpression>() {
2165 return Ok(expression);
2166 }
2167 if let Ok(py_amplitude_id) = term.extract::<PyLikelihoodID>() {
2168 return Ok(PyLikelihoodExpression(LikelihoodExpression::Term(
2169 py_amplitude_id.0,
2170 )));
2171 }
2172 return Err(PyTypeError::new_err(
2173 "Item is neither a PyLikelihoodExpression nor a PyLikelihoodID",
2174 ));
2175 }
2176 let mut iter = terms.iter();
2177 let Some(first_term) = iter.next() else {
2178 return Ok(PyLikelihoodExpression(LikelihoodExpression::Zero));
2179 };
2180 if let Ok(first_expression) = first_term.extract::<PyLikelihoodExpression>() {
2181 let mut summation = first_expression.clone();
2182 for term in iter {
2183 summation = summation.__add__(term)?;
2184 }
2185 return Ok(summation);
2186 }
2187 if let Ok(first_likelihood_id) = first_term.extract::<PyLikelihoodID>() {
2188 let mut summation =
2189 PyLikelihoodExpression(LikelihoodExpression::Term(first_likelihood_id.0));
2190 for term in iter {
2191 summation = summation.__add__(term)?;
2192 }
2193 return Ok(summation);
2194 }
2195 Err(PyTypeError::new_err(
2196 "Elements must be PyLikelihoodExpression or PyLikelihoodID",
2197 ))
2198}
2199
2200#[cfg(feature = "python")]
2203#[pyfunction(name = "likelihood_product")]
2204pub fn py_likelihood_product(terms: Vec<Bound<'_, PyAny>>) -> PyResult<PyLikelihoodExpression> {
2205 if terms.is_empty() {
2206 return Ok(PyLikelihoodExpression(LikelihoodExpression::One));
2207 }
2208 if terms.len() == 1 {
2209 let term = &terms[0];
2210 if let Ok(expression) = term.extract::<PyLikelihoodExpression>() {
2211 return Ok(expression);
2212 }
2213 if let Ok(py_amplitude_id) = term.extract::<PyLikelihoodID>() {
2214 return Ok(PyLikelihoodExpression(LikelihoodExpression::Term(
2215 py_amplitude_id.0,
2216 )));
2217 }
2218 return Err(PyTypeError::new_err(
2219 "Item is neither a PyLikelihoodExpression nor a PyLikelihoodID",
2220 ));
2221 }
2222 let mut iter = terms.iter();
2223 let Some(first_term) = iter.next() else {
2224 return Ok(PyLikelihoodExpression(LikelihoodExpression::One));
2225 };
2226 if let Ok(first_expression) = first_term.extract::<PyLikelihoodExpression>() {
2227 let mut product = first_expression.clone();
2228 for term in iter {
2229 product = product.__mul__(term)?;
2230 }
2231 return Ok(product);
2232 }
2233 if let Ok(first_likelihood_id) = first_term.extract::<PyLikelihoodID>() {
2234 let mut product = PyLikelihoodExpression(LikelihoodExpression::Term(first_likelihood_id.0));
2235 for term in iter {
2236 product = product.__mul__(term)?;
2237 }
2238 return Ok(product);
2239 }
2240 Err(PyTypeError::new_err(
2241 "Elements must be PyLikelihoodExpression or PyLikelihoodID",
2242 ))
2243}
2244
2245#[cfg(feature = "python")]
2248#[pyfunction(name = "LikelihoodZero")]
2249pub fn py_likelihood_zero() -> PyLikelihoodExpression {
2250 PyLikelihoodExpression(LikelihoodExpression::Zero)
2251}
2252
2253#[cfg(feature = "python")]
2256#[pyfunction(name = "LikelihoodOne")]
2257pub fn py_likelihood_one() -> PyLikelihoodExpression {
2258 PyLikelihoodExpression(LikelihoodExpression::One)
2259}
2260
2261#[cfg(feature = "python")]
2262#[pymethods]
2263impl PyLikelihoodExpression {
2264 fn __add__(&self, other: &Bound<'_, PyAny>) -> PyResult<PyLikelihoodExpression> {
2265 if let Ok(other_aid) = other.extract::<PyRef<PyLikelihoodID>>() {
2266 Ok(PyLikelihoodExpression(self.0.clone() + other_aid.0.clone()))
2267 } else if let Ok(other_expr) = other.extract::<PyLikelihoodExpression>() {
2268 Ok(PyLikelihoodExpression(
2269 self.0.clone() + other_expr.0.clone(),
2270 ))
2271 } else if let Ok(int) = other.extract::<usize>() {
2272 if int == 0 {
2273 Ok(PyLikelihoodExpression(self.0.clone()))
2274 } else {
2275 Err(PyTypeError::new_err(
2276 "Addition with an integer for this type is only defined for 0",
2277 ))
2278 }
2279 } else {
2280 Err(PyTypeError::new_err("Unsupported operand type for +"))
2281 }
2282 }
2283 fn __radd__(&self, other: &Bound<'_, PyAny>) -> PyResult<PyLikelihoodExpression> {
2284 if let Ok(other_aid) = other.extract::<PyRef<PyLikelihoodID>>() {
2285 Ok(PyLikelihoodExpression(other_aid.0.clone() + self.0.clone()))
2286 } else if let Ok(other_expr) = other.extract::<PyLikelihoodExpression>() {
2287 Ok(PyLikelihoodExpression(
2288 other_expr.0.clone() + self.0.clone(),
2289 ))
2290 } else if let Ok(int) = other.extract::<usize>() {
2291 if int == 0 {
2292 Ok(PyLikelihoodExpression(self.0.clone()))
2293 } else {
2294 Err(PyTypeError::new_err(
2295 "Addition with an integer for this type is only defined for 0",
2296 ))
2297 }
2298 } else {
2299 Err(PyTypeError::new_err("Unsupported operand type for +"))
2300 }
2301 }
2302 fn __mul__(&self, other: &Bound<'_, PyAny>) -> PyResult<PyLikelihoodExpression> {
2303 if let Ok(other_aid) = other.extract::<PyRef<PyLikelihoodID>>() {
2304 Ok(PyLikelihoodExpression(self.0.clone() * other_aid.0.clone()))
2305 } else if let Ok(other_expr) = other.extract::<PyLikelihoodExpression>() {
2306 Ok(PyLikelihoodExpression(
2307 self.0.clone() * other_expr.0.clone(),
2308 ))
2309 } else {
2310 Err(PyTypeError::new_err("Unsupported operand type for *"))
2311 }
2312 }
2313 fn __rmul__(&self, other: &Bound<'_, PyAny>) -> PyResult<PyLikelihoodExpression> {
2314 if let Ok(other_aid) = other.extract::<PyRef<PyLikelihoodID>>() {
2315 Ok(PyLikelihoodExpression(self.0.clone() * other_aid.0.clone()))
2316 } else if let Ok(other_expr) = other.extract::<PyLikelihoodExpression>() {
2317 Ok(PyLikelihoodExpression(
2318 self.0.clone() * other_expr.0.clone(),
2319 ))
2320 } else {
2321 Err(PyTypeError::new_err("Unsupported operand type for *"))
2322 }
2323 }
2324 fn __str__(&self) -> String {
2325 format!("{}", self.0)
2326 }
2327 fn __repr__(&self) -> String {
2328 format!("{:?}", self.0)
2329 }
2330}
2331
2332pub struct LikelihoodEvaluator {
2334 likelihood_manager: LikelihoodManager,
2335 likelihood_expression: LikelihoodExpression,
2336}
2337
2338#[cfg(feature = "rayon")]
2339impl Function<ThreadPool, LadduError> for LikelihoodEvaluator {
2340 fn evaluate(
2341 &self,
2342 parameters: &[Float],
2343 thread_pool: &mut ThreadPool,
2344 ) -> Result<Float, LadduError> {
2345 thread_pool.install(|| self.evaluate(parameters))
2346 }
2347 fn gradient(
2348 &self,
2349 parameters: &[Float],
2350 thread_pool: &mut ThreadPool,
2351 ) -> Result<DVector<Float>, LadduError> {
2352 thread_pool.install(|| self.evaluate_gradient(parameters))
2353 }
2354}
2355
2356#[cfg(not(feature = "rayon"))]
2357impl Function<(), LadduError> for LikelihoodEvaluator {
2358 fn evaluate(&self, parameters: &[Float], _user_data: &mut ()) -> Result<Float, LadduError> {
2359 self.evaluate(parameters)
2360 }
2361 fn gradient(
2362 &self,
2363 parameters: &[Float],
2364 _user_data: &mut (),
2365 ) -> Result<DVector<Float>, LadduError> {
2366 self.evaluate_gradient(parameters)
2367 }
2368}
2369
2370pub(crate) struct NegativeLikelihoodEvaluator<'a>(&'a LikelihoodEvaluator);
2371#[cfg(feature = "rayon")]
2372impl Function<ThreadPool, LadduError> for NegativeLikelihoodEvaluator<'_> {
2373 fn evaluate(
2374 &self,
2375 parameters: &[Float],
2376 thread_pool: &mut ThreadPool,
2377 ) -> Result<Float, LadduError> {
2378 Function::evaluate(self.0, parameters, thread_pool).map(|res| -res)
2379 }
2380 fn gradient(
2381 &self,
2382 parameters: &[Float],
2383 thread_pool: &mut ThreadPool,
2384 ) -> Result<DVector<Float>, LadduError> {
2385 Function::gradient(self.0, parameters, thread_pool).map(|res| -res)
2386 }
2387}
2388
2389#[cfg(not(feature = "rayon"))]
2390impl Function<(), LadduError> for NegativeLikelihoodEvaluator<'_> {
2391 fn evaluate(&self, parameters: &[Float], user_data: &mut ()) -> Result<Float, LadduError> {
2392 Function::evaluate(self.0, parameters, user_data).map(|res| -res)
2393 }
2394 fn gradient(
2395 &self,
2396 parameters: &[Float],
2397 user_data: &mut (),
2398 ) -> Result<DVector<Float>, LadduError> {
2399 Function::gradient(self.0, parameters, user_data).map(|res| -res)
2400 }
2401}
2402
2403impl LikelihoodEvaluator {
2404 pub fn parameters(&self) -> Vec<String> {
2406 self.likelihood_manager.parameters()
2407 }
2408 pub fn evaluate(&self, parameters: &[Float]) -> Result<Float, LadduError> {
2411 let mut param_buffers: Vec<Vec<Float>> = self
2412 .likelihood_manager
2413 .param_counts
2414 .iter()
2415 .map(|&count| vec![0.0; count])
2416 .collect();
2417 for (layout, buffer) in self
2418 .likelihood_manager
2419 .param_layouts
2420 .iter()
2421 .zip(param_buffers.iter_mut())
2422 {
2423 for (buffer_idx, ¶m_idx) in layout.iter().enumerate() {
2424 buffer[buffer_idx] = parameters[param_idx];
2425 }
2426 }
2427 let likelihood_values = LikelihoodValues(
2428 self.likelihood_manager
2429 .terms
2430 .iter()
2431 .zip(param_buffers.iter())
2432 .map(|(term, buffer)| term.evaluate(buffer))
2433 .collect(),
2434 );
2435 Ok(self.likelihood_expression.evaluate(&likelihood_values))
2436 }
2437
2438 pub fn evaluate_gradient(&self, parameters: &[Float]) -> Result<DVector<Float>, LadduError> {
2441 let mut param_buffers: Vec<Vec<Float>> = self
2442 .likelihood_manager
2443 .param_counts
2444 .iter()
2445 .map(|&count| vec![0.0; count])
2446 .collect();
2447 for (layout, buffer) in self
2448 .likelihood_manager
2449 .param_layouts
2450 .iter()
2451 .zip(param_buffers.iter_mut())
2452 {
2453 for (buffer_idx, ¶m_idx) in layout.iter().enumerate() {
2454 buffer[buffer_idx] = parameters[param_idx];
2455 }
2456 }
2457 let likelihood_values = LikelihoodValues(
2458 self.likelihood_manager
2459 .terms
2460 .iter()
2461 .zip(param_buffers.iter())
2462 .map(|(term, buffer)| term.evaluate(buffer))
2463 .collect(),
2464 );
2465 let mut gradient_buffers: Vec<DVector<Float>> = (0..self.likelihood_manager.terms.len())
2466 .map(|_| DVector::zeros(self.likelihood_manager.param_names.len()))
2467 .collect();
2468 for (((term, param_buffer), gradient_buffer), layout) in self
2469 .likelihood_manager
2470 .terms
2471 .iter()
2472 .zip(param_buffers.iter())
2473 .zip(gradient_buffers.iter_mut())
2474 .zip(self.likelihood_manager.param_layouts.iter())
2475 {
2476 let term_gradient = term.evaluate_gradient(param_buffer); for (term_idx, &buffer_idx) in layout.iter().enumerate() {
2478 gradient_buffer[buffer_idx] = term_gradient[term_idx] }
2480 }
2481 let likelihood_gradients = LikelihoodGradients(gradient_buffers);
2482 Ok(self
2483 .likelihood_expression
2484 .evaluate_gradient(&likelihood_values, &likelihood_gradients))
2485 }
2486
2487 pub fn minimize(
2492 &self,
2493 p0: &[Float],
2494 bounds: Option<Vec<(Float, Float)>>,
2495 options: Option<MinimizerOptions>,
2496 ) -> Result<Status, LadduError> {
2497 let options = options.unwrap_or_default();
2498 let mut m = Minimizer::new(options.algorithm, self.parameters().len())
2499 .with_max_steps(options.max_steps);
2500 if let Some(bounds) = bounds {
2501 m = m.with_bounds(bounds);
2502 }
2503 for observer in options.observers {
2504 m = m.with_observer(observer)
2505 }
2506 #[cfg(feature = "rayon")]
2507 {
2508 m.minimize(
2509 self,
2510 p0,
2511 &mut ThreadPoolBuilder::new()
2512 .num_threads(options.threads)
2513 .build()
2514 .map_err(LadduError::from)?,
2515 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
2516 )?;
2517 }
2518 #[cfg(not(feature = "rayon"))]
2519 {
2520 m.minimize(
2521 self,
2522 p0,
2523 &mut (),
2524 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
2525 )?;
2526 }
2527 Ok(m.status)
2528 }
2529
2530 pub fn mcmc<T: AsRef<[DVector<Float>]>>(
2536 &self,
2537 p0: T,
2538 n_steps: usize,
2539 options: Option<MCMCOptions>,
2540 rng: Rng,
2541 ) -> Result<Ensemble, LadduError> {
2542 let options = options.unwrap_or(MCMCOptions::default_with_rng(rng));
2543 let mut m = Sampler::new(options.algorithm, p0.as_ref().to_vec());
2544 for observer in options.observers {
2545 m = m.with_observer(observer);
2546 }
2547 let func = NegativeLikelihoodEvaluator(self);
2548 #[cfg(feature = "rayon")]
2549 {
2550 m.sample(
2551 &func,
2552 &mut ThreadPoolBuilder::new()
2553 .num_threads(options.threads)
2554 .build()
2555 .map_err(LadduError::from)?,
2556 n_steps,
2557 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
2558 )?;
2559 }
2560 #[cfg(not(feature = "rayon"))]
2561 {
2562 m.sample(
2563 &func,
2564 &mut (),
2565 n_steps,
2566 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
2567 )?;
2568 }
2569 Ok(m.ensemble)
2570 }
2571 pub fn swarm(
2577 &self,
2578 swarm: Swarm,
2579 options: Option<SwarmOptions>,
2580 rng: Rng,
2581 ) -> Result<Swarm, LadduError> {
2582 let options = options.unwrap_or(SwarmOptions::default_with_rng(rng));
2583 let mut m = SwarmMinimizer::new(options.algorithm, swarm);
2584 for observer in options.observers {
2585 m = m.with_observer(observer);
2586 }
2587 #[cfg(feature = "rayon")]
2588 {
2589 m.minimize(
2590 self,
2591 &mut ThreadPoolBuilder::new()
2592 .num_threads(options.threads)
2593 .build()
2594 .map_err(LadduError::from)?,
2595 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
2596 )?;
2597 }
2598 #[cfg(not(feature = "rayon"))]
2599 {
2600 m.minimize(
2601 self,
2602 &mut (),
2603 ganesh::abort_signal::CtrlCAbortSignal::new().boxed(),
2604 )?;
2605 }
2606 Ok(m.swarm)
2607 }
2608}
2609
2610#[cfg(feature = "python")]
2614#[pyclass(name = "LikelihoodEvaluator", module = "laddu")]
2615pub struct PyLikelihoodEvaluator(LikelihoodEvaluator);
2616
2617#[cfg(feature = "python")]
2618#[pymethods]
2619impl PyLikelihoodEvaluator {
2620 #[getter]
2627 fn parameters(&self) -> Vec<String> {
2628 self.0.parameters()
2629 }
2630 #[pyo3(signature = (parameters, *, threads=None))]
2650 fn evaluate(&self, parameters: Vec<Float>, threads: Option<usize>) -> PyResult<Float> {
2651 #[cfg(feature = "rayon")]
2652 {
2653 Ok(ThreadPoolBuilder::new()
2654 .num_threads(threads.unwrap_or_else(num_cpus::get))
2655 .build()
2656 .map_err(LadduError::from)?
2657 .install(|| self.0.evaluate(¶meters))?)
2658 }
2659 #[cfg(not(feature = "rayon"))]
2660 {
2661 Ok(self.0.evaluate(¶meters)?)
2662 }
2663 }
2664 #[pyo3(signature = (parameters, *, threads=None))]
2686 fn evaluate_gradient<'py>(
2687 &self,
2688 py: Python<'py>,
2689 parameters: Vec<Float>,
2690 threads: Option<usize>,
2691 ) -> PyResult<Bound<'py, PyArray1<Float>>> {
2692 #[cfg(feature = "rayon")]
2693 {
2694 Ok(PyArray1::from_slice(
2695 py,
2696 ThreadPoolBuilder::new()
2697 .num_threads(threads.unwrap_or_else(num_cpus::get))
2698 .build()
2699 .map_err(LadduError::from)?
2700 .install(|| self.0.evaluate_gradient(¶meters))?
2701 .as_slice(),
2702 ))
2703 }
2704 #[cfg(not(feature = "rayon"))]
2705 {
2706 Ok(PyArray1::from_slice(
2707 py,
2708 self.0.evaluate_gradient(¶meters)?.as_slice(),
2709 ))
2710 }
2711 }
2712
2713 #[pyo3(signature = (p0, *, bounds=None, method=None, observers=None, max_steps=4000, debug=false, verbose=false, show_step=true, show_x=true, show_fx=true, skip_hessian=false, threads=None))]
2758 #[allow(clippy::too_many_arguments)]
2759 fn minimize(
2760 &self,
2761 p0: Vec<Float>,
2762 bounds: Option<Vec<(Option<Float>, Option<Float>)>>,
2763 method: Option<Bound<'_, PyAny>>,
2764 observers: Option<Bound<'_, PyAny>>,
2765 max_steps: usize,
2766 debug: bool,
2767 verbose: bool,
2768 show_step: bool,
2769 show_x: bool,
2770 show_fx: bool,
2771 skip_hessian: bool,
2772 threads: Option<usize>,
2773 ) -> PyResult<PyStatus> {
2774 let bounds = bounds.map(|bounds_vec| {
2775 bounds_vec
2776 .iter()
2777 .map(|(opt_lb, opt_ub)| {
2778 (
2779 opt_lb.unwrap_or(Float::NEG_INFINITY),
2780 opt_ub.unwrap_or(Float::INFINITY),
2781 )
2782 })
2783 .collect()
2784 });
2785 let options = py_parse_minimizer_options(
2786 method,
2787 observers,
2788 max_steps,
2789 debug,
2790 verbose,
2791 show_step,
2792 show_x,
2793 show_fx,
2794 skip_hessian,
2795 threads,
2796 )?;
2797 let status = self.0.minimize(&p0, bounds, Some(options))?;
2798 Ok(PyStatus(status))
2799 }
2800
2801 #[pyo3(signature = (p0, n_steps, *, method=None, observers=None, debug=false, verbose=false, seed=0, threads=None))]
2844 #[allow(clippy::too_many_arguments)]
2845 fn mcmc(
2846 &self,
2847 p0: Vec<Vec<Float>>,
2848 n_steps: usize,
2849 method: Option<Bound<'_, PyAny>>,
2850 observers: Option<Bound<'_, PyAny>>,
2851 debug: bool,
2852 verbose: bool,
2853 seed: u64,
2854 threads: Option<usize>,
2855 ) -> PyResult<PyEnsemble> {
2856 let p0 = p0.into_iter().map(DVector::from_vec).collect::<Vec<_>>();
2857 let mut rng = Rng::new();
2858 rng.seed(seed);
2859 let options =
2860 py_parse_mcmc_options(method, observers, debug, verbose, threads, rng.clone())?;
2861 let ensemble = self.0.mcmc(&p0, n_steps, Some(options), rng)?;
2862 Ok(PyEnsemble(ensemble))
2863 }
2864
2865 #[pyo3(signature = (swarm, *, method=None, observers=None, debug=false, verbose=false, seed=0, threads=None))]
2898 #[allow(clippy::too_many_arguments)]
2899 fn swarm(
2900 &self,
2901 swarm: PySwarm,
2902 method: Option<Bound<'_, PyAny>>,
2903 observers: Option<Bound<'_, PyAny>>,
2904 debug: bool,
2905 verbose: bool,
2906 seed: u64,
2907 threads: Option<usize>,
2908 ) -> PyResult<PySwarm> {
2909 let mut rng = Rng::new();
2910 rng.seed(seed);
2911 let options =
2912 py_parse_swarm_options(method, observers, debug, verbose, threads, rng.clone())?;
2913 let swarm = self.0.swarm(swarm.0, Some(options), rng)?;
2914 Ok(PySwarm(swarm))
2915 }
2916}
2917
2918#[derive(Clone)]
2920pub struct LikelihoodScalar(String);
2921
2922impl LikelihoodScalar {
2923 pub fn new<T: AsRef<str>>(name: T) -> Box<Self> {
2925 Self(name.as_ref().into()).into()
2926 }
2927}
2928
2929impl LikelihoodTerm for LikelihoodScalar {
2930 fn evaluate(&self, parameters: &[Float]) -> Float {
2931 parameters[0]
2932 }
2933
2934 fn evaluate_gradient(&self, _parameters: &[Float]) -> DVector<Float> {
2935 DVector::from_vec(vec![1.0])
2936 }
2937
2938 fn parameters(&self) -> Vec<String> {
2939 vec![self.0.clone()]
2940 }
2941}
2942
2943#[cfg(feature = "python")]
2955#[pyfunction(name = "LikelihoodScalar")]
2956pub fn py_likelihood_scalar(name: String) -> PyLikelihoodTerm {
2957 PyLikelihoodTerm(LikelihoodScalar::new(name))
2958}