var searchIndex = {}; searchIndex["optimization"] = {"doc":"Collection of various optimization algorithms and strategies.","items":[[3,"NumericalDifferentiation","optimization","Wraps a function for which to provide numeric differentiation.",null,null],[3,"FixedStepWidth","","Uses a fixed step width `γ` in each iteration instead of performing an actual line search.",null,null],[3,"ExactLineSearch","","Brute-force line search minimizing the objective function over a set of\nstep width candidates, also known as exact line search.",null,null],[3,"ArmijoLineSearch","","Backtracking line search evaluating the Armijo rule at each step width.",null,null],[3,"GradientDescent","","A simple Gradient Descent optimizer.",null,null],[0,"problems","","Common optimization problems for testing purposes.",null,null],[3,"Sphere","optimization::problems","n-dimensional Sphere function.",null,null],[3,"Rosenbrock","","Two-dimensional Rosenbrock function.",null,null],[8,"Problem","","Specifies a well known optimization problem.",null,null],[10,"dimensions","","Returns the dimensionality of the input domain.",0,null],[10,"domain","","Returns the input domain of the function in terms of upper and lower,\nrespectively, for each input dimension.",0,null],[10,"minimum","","Returns the position as well as the value of the global minimum.",0,null],[10,"random_start","","Generates a random and **feasible** position to start a minimization.",0,null],[11,"is_legal_position","","Tests whether the supplied position is legal for this function.",0,null],[11,"clone","","",1,null],[11,"fmt","","",1,null],[11,"new","","",1,{"inputs":[{"name":"usize"}],"output":{"name":"sphere"}}],[11,"default","","",1,{"inputs":[],"output":{"name":"self"}}],[11,"value","","",1,null],[11,"gradient","","",1,null],[11,"dimensions","","",1,null],[11,"domain","","",1,null],[11,"minimum","","",1,null],[11,"random_start","","",1,null],[11,"clone","","",2,null],[11,"fmt","","",2,null],[11,"new","","Creates a new `Rosenbrock` function given `a` and `b`, commonly definied\nwith 1 and 100, respectively, which also corresponds to the `default`.",2,{"inputs":[{"name":"f64"},{"name":"f64"}],"output":{"name":"rosenbrock"}}],[11,"default","","",2,{"inputs":[],"output":{"name":"self"}}],[11,"value","","",2,null],[11,"gradient","","",2,null],[11,"dimensions","","",2,null],[11,"domain","","",2,null],[11,"minimum","","",2,null],[11,"random_start","","",2,null],[11,"new","optimization","Creates a new differentiable function by using the supplied `function` in\ncombination with numeric differentiation to find the derivatives.",3,{"inputs":[{"name":"f"}],"output":{"name":"self"}}],[11,"value","","",3,null],[11,"gradient","","",3,null],[11,"default","","",3,{"inputs":[],"output":{"name":"self"}}],[11,"dimensions","","",3,null],[11,"domain","","",3,null],[11,"minimum","","",3,null],[11,"random_start","","",3,null],[11,"clone","","",4,null],[11,"fmt","","",4,null],[11,"new","","Creates a new `FixedStepWidth` given the static step width.",4,{"inputs":[{"name":"f64"}],"output":{"name":"fixedstepwidth"}}],[11,"search","","",4,null],[11,"clone","","",5,null],[11,"fmt","","",5,null],[11,"new","","Creates a new `ExactLineSearch` given the `start_step_width`, the `stop_step_width`\nand the `increase_factor`. The set of evaluated step widths `γ` is specified as\n`{ γ | γ = start_step_width · increase_factorⁱ, i ∈ N, γ <= stop_step_width }`,\nassuming that `start_step_width` < `stop_step_width` and `increase_factor` > 1.",5,{"inputs":[{"name":"f64"},{"name":"f64"},{"name":"f64"}],"output":{"name":"exactlinesearch"}}],[11,"search","","",5,null],[11,"clone","","",6,null],[11,"fmt","","",6,null],[11,"new","","Creates a new `ArmijoLineSearch` given the `control_parameter` ∈ (0, 1), the\n`initial_step_width` > 0 and the `decay_factor` ∈ (0, 1).",6,{"inputs":[{"name":"f64"},{"name":"f64"},{"name":"f64"}],"output":{"name":"armijolinesearch"}}],[11,"search","","",6,null],[11,"new","","Creates a new `GradientDescent` optimizer using the following defaults:",7,{"inputs":[],"output":{"name":"gradientdescent"}}],[11,"line_search","","Specifies the line search method to use.",7,null],[11,"gradient_tolerance","","Adjusts the gradient tolerance which is used as abort criterion to decide\nwhether we reached a plateau.",7,null],[11,"max_iterations","","Adjusts the number of maximally run iterations. A value of `None` instructs the\noptimizer to ignore the nubmer of iterations.",7,null],[11,"minimize","","",7,null],[8,"Function","","Defines an objective function `f` that is subject to minimization.",null,null],[10,"value","","Computes the objective function at a given `position` `x`, i.e., `f(x) = y`.",8,null],[8,"Derivative1","","Defines an objective function `f` that is able to compute the first derivative\n`f'(x)` analytically.",null,null],[10,"gradient","","Computes the gradient of the objective function at a given `position` `x`,\ni.e., `∀ᵢ ∂/∂xᵢ f(x) = ∇f(x)`.",9,null],[8,"Minimizer","","Defines an optimizer that is able to minimize a given objective function `F`.",null,null],[16,"Solution","","",10,null],[10,"minimize","","Performs the actual minimization and returns a solution that\nmight be better than the initially provided one.",10,null],[8,"Evaluation","","Captures the essence of a function evaluation.",null,null],[10,"position","","Position `x` with the lowest corresponding value `f(x)`.",11,null],[10,"value","","The actual value `f(x)`.",11,null],[8,"LineSearch","","Define a line search method, i.e., choosing an appropriate step width.",null,null],[10,"search","","Performs the actual line search given the current `position` `x` and a `direction` to go to.\nReturns the new position.",12,null]],"paths":[[8,"Problem"],[3,"Sphere"],[3,"Rosenbrock"],[3,"NumericalDifferentiation"],[3,"FixedStepWidth"],[3,"ExactLineSearch"],[3,"ArmijoLineSearch"],[3,"GradientDescent"],[8,"Function"],[8,"Derivative1"],[8,"Minimizer"],[8,"Evaluation"],[8,"LineSearch"]]}; initSearch(searchIndex);