quantrs2_ml/tensorflow_compatibility/
finitedifferencedifferentiator_traits.rs

1//! # FiniteDifferenceDifferentiator - Trait Implementations
2//!
3//! This module contains trait implementations for `FiniteDifferenceDifferentiator`.
4//!
5//! ## Implemented Traits
6//!
7//! - `Default`
8//! - `Differentiator`
9//!
10//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
11
12use crate::error::{MLError, Result};
13use crate::simulator_backends::{DynamicCircuit, Observable, SimulationResult, SimulatorBackend};
14use quantrs2_circuit::prelude::*;
15use quantrs2_core::prelude::*;
16
17use super::functions::Differentiator;
18use super::types::FiniteDifferenceDifferentiator;
19
20impl Default for FiniteDifferenceDifferentiator {
21    fn default() -> Self {
22        Self::new()
23    }
24}
25
26impl Differentiator for FiniteDifferenceDifferentiator {
27    fn differentiate(
28        &self,
29        circuit: &DynamicCircuit,
30        parameters: &[f64],
31        observable: &Observable,
32        backend: &dyn SimulatorBackend,
33    ) -> Result<Vec<f64>> {
34        let mut gradients = Vec::with_capacity(parameters.len());
35        for i in 0..parameters.len() {
36            let mut params_plus = parameters.to_vec();
37            params_plus[i] += self.epsilon;
38            let exp_plus = backend.expectation_value(circuit, &params_plus, observable)?;
39            let mut params_minus = parameters.to_vec();
40            params_minus[i] -= self.epsilon;
41            let exp_minus = backend.expectation_value(circuit, &params_minus, observable)?;
42            let gradient = (exp_plus - exp_minus) / (2.0 * self.epsilon);
43            gradients.push(gradient);
44        }
45        Ok(gradients)
46    }
47    fn name(&self) -> &str {
48        "FiniteDifference"
49    }
50}