[][src]Trait opencv::ml::SVM

pub trait SVM: StatModel {
    pub fn as_raw_SVM(&self) -> *const c_void;
pub fn as_raw_mut_SVM(&mut self) -> *mut c_void; pub fn get_type(&self) -> Result<i32> { ... }
pub fn set_type(&mut self, val: i32) -> Result<()> { ... }
pub fn get_gamma(&self) -> Result<f64> { ... }
pub fn set_gamma(&mut self, val: f64) -> Result<()> { ... }
pub fn get_coef0(&self) -> Result<f64> { ... }
pub fn set_coef0(&mut self, val: f64) -> Result<()> { ... }
pub fn get_degree(&self) -> Result<f64> { ... }
pub fn set_degree(&mut self, val: f64) -> Result<()> { ... }
pub fn get_c(&self) -> Result<f64> { ... }
pub fn set_c(&mut self, val: f64) -> Result<()> { ... }
pub fn get_nu(&self) -> Result<f64> { ... }
pub fn set_nu(&mut self, val: f64) -> Result<()> { ... }
pub fn get_p(&self) -> Result<f64> { ... }
pub fn set_p(&mut self, val: f64) -> Result<()> { ... }
pub fn get_class_weights(&self) -> Result<Mat> { ... }
pub fn set_class_weights(&mut self, val: &Mat) -> Result<()> { ... }
pub fn get_term_criteria(&self) -> Result<TermCriteria> { ... }
pub fn set_term_criteria(&mut self, val: TermCriteria) -> Result<()> { ... }
pub fn get_kernel_type(&self) -> Result<i32> { ... }
pub fn set_kernel(&mut self, kernel_type: i32) -> Result<()> { ... }
pub fn set_custom_kernel(
        &mut self,
        _kernel: &Ptr<dyn SVM_Kernel>
    ) -> Result<()> { ... }
pub fn train_auto(
        &mut self,
        data: &Ptr<dyn TrainData>,
        k_fold: i32,
        cgrid: ParamGrid,
        gamma_grid: ParamGrid,
        p_grid: ParamGrid,
        nu_grid: ParamGrid,
        coeff_grid: ParamGrid,
        degree_grid: ParamGrid,
        balanced: bool
    ) -> Result<bool> { ... }
pub fn train_auto_with_data(
        &mut self,
        samples: &dyn ToInputArray,
        layout: i32,
        responses: &dyn ToInputArray,
        k_fold: i32,
        cgrid: Ptr<ParamGrid>,
        gamma_grid: Ptr<ParamGrid>,
        p_grid: Ptr<ParamGrid>,
        nu_grid: Ptr<ParamGrid>,
        coeff_grid: Ptr<ParamGrid>,
        degree_grid: Ptr<ParamGrid>,
        balanced: bool
    ) -> Result<bool> { ... }
pub fn get_support_vectors(&self) -> Result<Mat> { ... }
pub fn get_uncompressed_support_vectors(&self) -> Result<Mat> { ... }
pub fn get_decision_function(
        &self,
        i: i32,
        alpha: &mut dyn ToOutputArray,
        svidx: &mut dyn ToOutputArray
    ) -> Result<f64> { ... } }

Support Vector Machines.

See also

@ref ml_intro_svm

Required methods

pub fn as_raw_SVM(&self) -> *const c_void[src]

pub fn as_raw_mut_SVM(&mut self) -> *mut c_void[src]

Loading content...

Provided methods

pub fn get_type(&self) -> Result<i32>[src]

Type of a %SVM formulation. See SVM::Types. Default value is SVM::C_SVC.

See also

setType

pub fn set_type(&mut self, val: i32) -> Result<()>[src]

Type of a %SVM formulation. See SVM::Types. Default value is SVM::C_SVC.

See also

setType getType

pub fn get_gamma(&self) -> Result<f64>[src]

Parameter inline formula of a kernel function. For SVM::POLY, SVM::RBF, SVM::SIGMOID or SVM::CHI2. Default value is 1.

See also

setGamma

pub fn set_gamma(&mut self, val: f64) -> Result<()>[src]

Parameter inline formula of a kernel function. For SVM::POLY, SVM::RBF, SVM::SIGMOID or SVM::CHI2. Default value is 1.

See also

setGamma getGamma

pub fn get_coef0(&self) -> Result<f64>[src]

Parameter coef0 of a kernel function. For SVM::POLY or SVM::SIGMOID. Default value is 0.

See also

setCoef0

pub fn set_coef0(&mut self, val: f64) -> Result<()>[src]

Parameter coef0 of a kernel function. For SVM::POLY or SVM::SIGMOID. Default value is 0.

See also

setCoef0 getCoef0

pub fn get_degree(&self) -> Result<f64>[src]

Parameter degree of a kernel function. For SVM::POLY. Default value is 0.

See also

setDegree

pub fn set_degree(&mut self, val: f64) -> Result<()>[src]

Parameter degree of a kernel function. For SVM::POLY. Default value is 0.

See also

setDegree getDegree

pub fn get_c(&self) -> Result<f64>[src]

Parameter C of a %SVM optimization problem. For SVM::C_SVC, SVM::EPS_SVR or SVM::NU_SVR. Default value is 0.

See also

setC

pub fn set_c(&mut self, val: f64) -> Result<()>[src]

Parameter C of a %SVM optimization problem. For SVM::C_SVC, SVM::EPS_SVR or SVM::NU_SVR. Default value is 0.

See also

setC getC

pub fn get_nu(&self) -> Result<f64>[src]

Parameter inline formula of a %SVM optimization problem. For SVM::NU_SVC, SVM::ONE_CLASS or SVM::NU_SVR. Default value is 0.

See also

setNu

pub fn set_nu(&mut self, val: f64) -> Result<()>[src]

Parameter inline formula of a %SVM optimization problem. For SVM::NU_SVC, SVM::ONE_CLASS or SVM::NU_SVR. Default value is 0.

See also

setNu getNu

pub fn get_p(&self) -> Result<f64>[src]

Parameter inline formula of a %SVM optimization problem. For SVM::EPS_SVR. Default value is 0.

See also

setP

pub fn set_p(&mut self, val: f64) -> Result<()>[src]

Parameter inline formula of a %SVM optimization problem. For SVM::EPS_SVR. Default value is 0.

See also

setP getP

pub fn get_class_weights(&self) -> Result<Mat>[src]

Optional weights in the SVM::C_SVC problem, assigned to particular classes. They are multiplied by C so the parameter C of class i becomes classWeights(i) * C. Thus these weights affect the misclassification penalty for different classes. The larger weight, the larger penalty on misclassification of data from the corresponding class. Default value is empty Mat.

See also

setClassWeights

pub fn set_class_weights(&mut self, val: &Mat) -> Result<()>[src]

Optional weights in the SVM::C_SVC problem, assigned to particular classes. They are multiplied by C so the parameter C of class i becomes classWeights(i) * C. Thus these weights affect the misclassification penalty for different classes. The larger weight, the larger penalty on misclassification of data from the corresponding class. Default value is empty Mat.

See also

setClassWeights getClassWeights

pub fn get_term_criteria(&self) -> Result<TermCriteria>[src]

Termination criteria of the iterative %SVM training procedure which solves a partial case of constrained quadratic optimization problem. You can specify tolerance and/or the maximum number of iterations. Default value is TermCriteria( TermCriteria::MAX_ITER + TermCriteria::EPS, 1000, FLT_EPSILON );

See also

setTermCriteria

pub fn set_term_criteria(&mut self, val: TermCriteria) -> Result<()>[src]

Termination criteria of the iterative %SVM training procedure which solves a partial case of constrained quadratic optimization problem. You can specify tolerance and/or the maximum number of iterations. Default value is TermCriteria( TermCriteria::MAX_ITER + TermCriteria::EPS, 1000, FLT_EPSILON );

See also

setTermCriteria getTermCriteria

pub fn get_kernel_type(&self) -> Result<i32>[src]

Type of a %SVM kernel. See SVM::KernelTypes. Default value is SVM::RBF.

pub fn set_kernel(&mut self, kernel_type: i32) -> Result<()>[src]

Initialize with one of predefined kernels. See SVM::KernelTypes.

pub fn set_custom_kernel(&mut self, _kernel: &Ptr<dyn SVM_Kernel>) -> Result<()>[src]

Initialize with custom kernel. See SVM::Kernel class for implementation details

pub fn train_auto(
    &mut self,
    data: &Ptr<dyn TrainData>,
    k_fold: i32,
    cgrid: ParamGrid,
    gamma_grid: ParamGrid,
    p_grid: ParamGrid,
    nu_grid: ParamGrid,
    coeff_grid: ParamGrid,
    degree_grid: ParamGrid,
    balanced: bool
) -> Result<bool>
[src]

Trains an %SVM with optimal parameters.

Parameters

  • data: the training data that can be constructed using TrainData::create or TrainData::loadFromCSV.
  • kFold: Cross-validation parameter. The training set is divided into kFold subsets. One subset is used to test the model, the others form the train set. So, the %SVM algorithm is executed kFold times.
  • Cgrid: grid for C
  • gammaGrid: grid for gamma
  • pGrid: grid for p
  • nuGrid: grid for nu
  • coeffGrid: grid for coeff
  • degreeGrid: grid for degree
  • balanced: If true and the problem is 2-class classification then the method creates more balanced cross-validation subsets that is proportions between classes in subsets are close to such proportion in the whole train dataset.

The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p, nu, coef0, degree. Parameters are considered optimal when the cross-validation estimate of the test set error is minimal.

If there is no need to optimize a parameter, the corresponding grid step should be set to any value less than or equal to 1. For example, to avoid optimization in gamma, set gammaGrid.step = 0, gammaGrid.minVal, gamma_grid.maxVal as arbitrary numbers. In this case, the value Gamma is taken for gamma.

And, finally, if the optimization in a parameter is required but the corresponding grid is unknown, you may call the function SVM::getDefaultGrid. To generate a grid, for example, for gamma, call SVM::getDefaultGrid(SVM::GAMMA).

This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and the usual %SVM with parameters specified in params is executed.

C++ default parameters

  • k_fold: 10
  • cgrid: getDefaultGrid(C)
  • gamma_grid: getDefaultGrid(GAMMA)
  • p_grid: getDefaultGrid(P)
  • nu_grid: getDefaultGrid(NU)
  • coeff_grid: getDefaultGrid(COEF)
  • degree_grid: getDefaultGrid(DEGREE)
  • balanced: false

pub fn train_auto_with_data(
    &mut self,
    samples: &dyn ToInputArray,
    layout: i32,
    responses: &dyn ToInputArray,
    k_fold: i32,
    cgrid: Ptr<ParamGrid>,
    gamma_grid: Ptr<ParamGrid>,
    p_grid: Ptr<ParamGrid>,
    nu_grid: Ptr<ParamGrid>,
    coeff_grid: Ptr<ParamGrid>,
    degree_grid: Ptr<ParamGrid>,
    balanced: bool
) -> Result<bool>
[src]

Trains an %SVM with optimal parameters

Parameters

  • samples: training samples
  • layout: See ml::SampleTypes.
  • responses: vector of responses associated with the training samples.
  • kFold: Cross-validation parameter. The training set is divided into kFold subsets. One subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  • Cgrid: grid for C
  • gammaGrid: grid for gamma
  • pGrid: grid for p
  • nuGrid: grid for nu
  • coeffGrid: grid for coeff
  • degreeGrid: grid for degree
  • balanced: If true and the problem is 2-class classification then the method creates more balanced cross-validation subsets that is proportions between classes in subsets are close to such proportion in the whole train dataset.

The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p, nu, coef0, degree. Parameters are considered optimal when the cross-validation estimate of the test set error is minimal.

This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only offers rudimentary parameter options.

This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and the usual %SVM with parameters specified in params is executed.

C++ default parameters

  • k_fold: 10
  • cgrid: SVM::getDefaultGridPtr(SVM::C)
  • gamma_grid: SVM::getDefaultGridPtr(SVM::GAMMA)
  • p_grid: SVM::getDefaultGridPtr(SVM::P)
  • nu_grid: SVM::getDefaultGridPtr(SVM::NU)
  • coeff_grid: SVM::getDefaultGridPtr(SVM::COEF)
  • degree_grid: SVM::getDefaultGridPtr(SVM::DEGREE)
  • balanced: false

pub fn get_support_vectors(&self) -> Result<Mat>[src]

Retrieves all the support vectors

The method returns all the support vectors as a floating-point matrix, where support vectors are stored as matrix rows.

pub fn get_uncompressed_support_vectors(&self) -> Result<Mat>[src]

Retrieves all the uncompressed support vectors of a linear %SVM

The method returns all the uncompressed support vectors of a linear %SVM that the compressed support vector, used for prediction, was derived from. They are returned in a floating-point matrix, where the support vectors are stored as matrix rows.

pub fn get_decision_function(
    &self,
    i: i32,
    alpha: &mut dyn ToOutputArray,
    svidx: &mut dyn ToOutputArray
) -> Result<f64>
[src]

Retrieves the decision function

Parameters

  • i: the index of the decision function. If the problem solved is regression, 1-class or 2-class classification, then there will be just one decision function and the index should always be 0. Otherwise, in the case of N-class classification, there will be inline formula decision functions.
  • alpha: the optional output vector for weights, corresponding to different support vectors. In the case of linear %SVM all the alpha's will be 1's.
  • svidx: the optional output vector of indices of support vectors within the matrix of support vectors (which can be retrieved by SVM::getSupportVectors). In the case of linear %SVM each decision function consists of a single "compressed" support vector.

The method returns rho parameter of the decision function, a scalar subtracted from the weighted sum of kernel responses.

Loading content...

Implementations

impl<'_> dyn SVM + '_[src]

pub fn get_default_grid(param_id: i32) -> Result<ParamGrid>[src]

Generates a grid for %SVM parameters.

Parameters

  • param_id: %SVM parameters IDs that must be one of the SVM::ParamTypes. The grid is generated for the parameter with this ID.

The function generates a grid for the specified parameter of the %SVM algorithm. The grid may be passed to the function SVM::trainAuto.

pub fn get_default_grid_ptr(param_id: i32) -> Result<Ptr<ParamGrid>>[src]

Generates a grid for %SVM parameters.

Parameters

  • param_id: %SVM parameters IDs that must be one of the SVM::ParamTypes. The grid is generated for the parameter with this ID.

The function generates a grid pointer for the specified parameter of the %SVM algorithm. The grid may be passed to the function SVM::trainAuto.

pub fn create() -> Result<Ptr<dyn SVM>>[src]

Creates empty model. Use StatModel::train to train the model. Since %SVM has several parameters, you may want to find the best parameters for your problem, it can be done with SVM::trainAuto.

pub fn load(filepath: &str) -> Result<Ptr<dyn SVM>>[src]

Loads and creates a serialized svm from a file

Use SVM::save to serialize and store an SVM to disk. Load the SVM from this file again, by calling this function with the path to the file.

Parameters

  • filepath: path to serialized svm

Implementors

impl SVM for PtrOfSVM[src]

Loading content...