pub fn minimize_adam<F>(
grad_func: F,
x: Array1<f64>,
data_provider: Box<dyn DataProvider>,
options: AdamOptions,
) -> Result<OptimizeResult<f64>, OptimizeError>where
F: StochasticGradientFunction,Expand description
ADAM optimizer implementation