use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
use crate::booster::config::MissingNodeTreatment;
use crate::booster::config::*;
use crate::constraints::ConstraintMap;
use crate::errors::PerpetualError;
use crate::objective::Objective;
use crate::{ColumnarMatrix, Matrix, PerpetualBooster};
#[derive(Clone, Serialize, Deserialize)]
pub struct MultiOutputBooster {
pub n_boosters: usize,
pub cfg: BoosterConfig,
pub boosters: Vec<PerpetualBooster>,
#[serde(default)]
pub metadata: HashMap<String, String>,
}
impl Default for MultiOutputBooster {
fn default() -> Self {
let cfg = BoosterConfig::default();
let n_boosters = 1;
let boosters = vec![{
PerpetualBooster {
cfg: cfg.clone(),
..Default::default()
}
}];
Self {
n_boosters,
cfg,
boosters,
metadata: HashMap::new(),
}
}
}
impl MultiOutputBooster {
#[allow(clippy::too_many_arguments)]
pub fn new(
n_boosters: usize,
objective: Objective,
budget: f32,
max_bin: u16,
num_threads: Option<usize>,
monotone_constraints: Option<ConstraintMap>,
interaction_constraints: Option<Vec<Vec<usize>>>,
force_children_to_bound_parent: bool,
missing: f64,
allow_missing_splits: bool,
create_missing_branch: bool,
terminate_missing_features: HashSet<usize>,
missing_node_treatment: MissingNodeTreatment,
log_iterations: usize,
seed: u64,
reset: Option<bool>,
categorical_features: Option<HashSet<usize>>,
timeout: Option<f32>,
iteration_limit: Option<usize>,
memory_limit: Option<f32>,
stopping_rounds: Option<usize>,
save_node_stats: bool,
) -> Result<Self, PerpetualError> {
let cfg = BoosterConfig {
objective: objective.clone(),
budget,
max_bin,
num_threads,
monotone_constraints: monotone_constraints.clone(),
interaction_constraints: interaction_constraints.clone(),
force_children_to_bound_parent,
missing,
allow_missing_splits,
create_missing_branch,
terminate_missing_features: terminate_missing_features.clone(),
missing_node_treatment,
log_iterations,
seed,
reset,
categorical_features: categorical_features.clone(),
timeout: timeout.map(|t| t / n_boosters.max(1) as f32),
iteration_limit,
memory_limit,
stopping_rounds,
save_node_stats,
calibration_method: CalibrationMethod::default(),
};
let template_booster = {
PerpetualBooster {
cfg: cfg.clone(),
..Default::default()
}
};
template_booster.validate_parameters()?;
let boosters = vec![template_booster; n_boosters.max(1)];
Ok(MultiOutputBooster {
n_boosters: n_boosters.max(1),
cfg,
boosters,
metadata: HashMap::new(),
})
}
pub fn fit(
&mut self,
data: &Matrix<f64>,
y: &Matrix<f64>,
sample_weight: Option<&[f64]>,
group: Option<&[u64]>,
) -> Result<(), PerpetualError> {
for i in 0..self.n_boosters {
let _ = self.boosters[i].fit(data, y.get_col(i), sample_weight, group);
}
Ok(())
}
pub fn fit_columnar(
&mut self,
data: &ColumnarMatrix<f64>,
y: &Matrix<f64>,
sample_weight: Option<&[f64]>,
group: Option<&[u64]>,
) -> Result<(), PerpetualError> {
for i in 0..self.n_boosters {
let _ = self.boosters[i].fit_columnar(data, y.get_col(i), sample_weight, group);
}
Ok(())
}
pub fn prune(
&mut self,
data: &Matrix<f64>,
y: &Matrix<f64>,
sample_weight: Option<&[f64]>,
group: Option<&[u64]>,
) -> Result<(), PerpetualError> {
for i in 0..self.n_boosters {
let _ = self.boosters[i].prune(data, y.get_col(i), sample_weight, group);
}
Ok(())
}
pub fn calibrate(
&mut self,
method: CalibrationMethod,
data_cal: (&Matrix<f64>, &Matrix<f64>, &[f64]),
) -> Result<(), PerpetualError> {
if !self.cfg.save_node_stats {
return Err(PerpetualError::InvalidParameter(
"save_node_stats".to_string(),
"true".to_string(),
"false".to_string(),
));
}
self.cfg.calibration_method = method;
let (x_cal, ys_cal, alpha) = data_cal;
for i in 0..self.n_boosters {
let y_cal_col = ys_cal.get_col(i);
self.boosters[i].calibrate(method, (x_cal, y_cal_col, alpha))?;
}
Ok(())
}
pub fn calibrate_conformal(
&mut self,
data: &Matrix<f64>,
y: &Matrix<f64>,
sample_weight: Option<&[f64]>,
group: Option<&[u64]>,
data_cal: (&Matrix<f64>, &Matrix<f64>, &[f64]),
) -> Result<(), PerpetualError> {
self.cfg.calibration_method = CalibrationMethod::Conformal;
let (x_cal, ys_cal, alpha) = data_cal;
for i in 0..self.n_boosters {
let y_cal_col = ys_cal.get_col(i);
self.boosters[i].calibrate_conformal(
data,
y.get_col(i),
sample_weight,
group,
(x_cal, y_cal_col, alpha),
)?;
}
Ok(())
}
pub fn calibrate_columnar(
&mut self,
method: CalibrationMethod,
data_cal: (&ColumnarMatrix<f64>, &Matrix<f64>, &[f64]),
) -> Result<(), PerpetualError> {
if !self.cfg.save_node_stats {
return Err(PerpetualError::InvalidParameter(
"save_node_stats".to_string(),
"true".to_string(),
"false".to_string(),
));
}
self.cfg.calibration_method = method;
let (x_cal, ys_cal, alpha) = data_cal;
for i in 0..self.n_boosters {
let y_cal_col = ys_cal.get_col(i);
self.boosters[i].calibrate_columnar(method, (x_cal, y_cal_col, alpha))?;
}
Ok(())
}
pub fn calibrate_conformal_columnar(
&mut self,
data: &ColumnarMatrix<f64>,
y: &Matrix<f64>,
sample_weight: Option<&[f64]>,
group: Option<&[u64]>,
data_cal: (&ColumnarMatrix<f64>, &Matrix<f64>, &[f64]),
) -> Result<(), PerpetualError> {
self.cfg.calibration_method = CalibrationMethod::Conformal;
let (x_cal, ys_cal, alpha) = data_cal;
for i in 0..self.n_boosters {
let y_cal_col = ys_cal.get_col(i);
self.boosters[i].calibrate_conformal_columnar(
data,
y.get_col(i),
sample_weight,
group,
(x_cal, y_cal_col, alpha),
)?;
}
Ok(())
}
pub fn get_boosters(&self) -> &[PerpetualBooster] {
&self.boosters
}
pub fn set_n_boosters(mut self, n_boosters: usize) -> Self {
self.n_boosters = n_boosters;
self.boosters = (0..n_boosters).map(|_| self.boosters[0].clone()).collect();
self
}
pub fn set_objective(mut self, objective: Objective) -> Self {
let tree_objective = objective.clone();
self.boosters = self
.boosters
.into_iter()
.map(|b| b.set_objective(tree_objective.clone()))
.collect();
self.cfg.objective = objective;
self
}
pub fn set_budget(mut self, budget: f32) -> Self {
self.cfg.budget = budget;
self.boosters = self.boosters.iter().map(|b| b.clone().set_budget(budget)).collect();
self
}
pub fn set_max_bin(mut self, max_bin: u16) -> Self {
self.cfg.max_bin = max_bin;
self.boosters = self.boosters.iter().map(|b| b.clone().set_max_bin(max_bin)).collect();
self
}
pub fn set_num_threads(mut self, num_threads: Option<usize>) -> Self {
self.cfg.num_threads = num_threads;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_num_threads(num_threads))
.collect();
self
}
pub fn set_monotone_constraints(mut self, monotone_constraints: Option<ConstraintMap>) -> Self {
self.cfg.monotone_constraints = monotone_constraints.clone();
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_monotone_constraints(monotone_constraints.clone()))
.collect();
self
}
pub fn set_interaction_constraints(mut self, interaction_constraints: Option<Vec<Vec<usize>>>) -> Self {
self.cfg.interaction_constraints = interaction_constraints.clone();
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_interaction_constraints(interaction_constraints.clone()))
.collect();
self
}
pub fn set_force_children_to_bound_parent(mut self, force_children_to_bound_parent: bool) -> Self {
self.cfg.force_children_to_bound_parent = force_children_to_bound_parent;
self.boosters = self
.boosters
.iter()
.map(|b| {
b.clone()
.set_force_children_to_bound_parent(force_children_to_bound_parent)
})
.collect();
self
}
pub fn set_missing(mut self, missing: f64) -> Self {
self.cfg.missing = missing;
self.boosters = self.boosters.iter().map(|b| b.clone().set_missing(missing)).collect();
self
}
pub fn set_allow_missing_splits(mut self, allow_missing_splits: bool) -> Self {
self.cfg.create_missing_branch = allow_missing_splits;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_allow_missing_splits(allow_missing_splits))
.collect();
self
}
pub fn set_create_missing_branch(mut self, create_missing_branch: bool) -> Self {
self.cfg.create_missing_branch = create_missing_branch;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_create_missing_branch(create_missing_branch))
.collect();
self
}
pub fn set_terminate_missing_features(mut self, terminate_missing_features: HashSet<usize>) -> Self {
self.cfg.terminate_missing_features = terminate_missing_features.clone();
self.boosters = self
.boosters
.iter()
.map(|b| {
b.clone()
.set_terminate_missing_features(terminate_missing_features.clone())
})
.collect();
self
}
pub fn set_missing_node_treatment(mut self, missing_node_treatment: MissingNodeTreatment) -> Self {
self.cfg.missing_node_treatment = missing_node_treatment;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_missing_node_treatment(missing_node_treatment))
.collect();
self
}
pub fn set_log_iterations(mut self, log_iterations: usize) -> Self {
self.cfg.log_iterations = log_iterations;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_log_iterations(log_iterations))
.collect();
self
}
pub fn set_seed(mut self, seed: u64) -> Self {
self.cfg.seed = seed;
self.boosters = self.boosters.iter().map(|b| b.clone().set_seed(seed)).collect();
self
}
pub fn set_reset(mut self, reset: Option<bool>) -> Self {
self.cfg.reset = reset;
self.boosters = self.boosters.iter().map(|b| b.clone().set_reset(reset)).collect();
self
}
pub fn set_categorical_features(mut self, categorical_features: Option<HashSet<usize>>) -> Self {
self.cfg.categorical_features = categorical_features.clone();
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_categorical_features(categorical_features.clone()))
.collect();
self
}
pub fn set_timeout(mut self, timeout: Option<f32>) -> Self {
self.cfg.timeout = timeout.map(|t| t / self.n_boosters.max(1) as f32);
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_timeout(self.cfg.timeout))
.collect();
self
}
pub fn set_iteration_limit(mut self, iteration_limit: Option<usize>) -> Self {
self.cfg.iteration_limit = iteration_limit;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_iteration_limit(iteration_limit))
.collect();
self
}
pub fn set_memory_limit(mut self, memory_limit: Option<f32>) -> Self {
self.cfg.memory_limit = memory_limit;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_memory_limit(memory_limit))
.collect();
self
}
pub fn set_stopping_rounds(mut self, stopping_rounds: Option<usize>) -> Self {
self.cfg.stopping_rounds = stopping_rounds;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_stopping_rounds(stopping_rounds))
.collect();
self
}
pub fn set_save_node_stats(mut self, save_node_stats: bool) -> Self {
self.cfg.save_node_stats = save_node_stats;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_save_node_stats(save_node_stats))
.collect();
self
}
pub fn set_calibration_method(mut self, calibration_method: CalibrationMethod) -> Self {
self.cfg.calibration_method = calibration_method;
self.boosters = self
.boosters
.iter()
.map(|b| b.clone().set_calibration_method(calibration_method))
.collect();
self
}
pub fn insert_metadata(&mut self, key: String, value: String) {
self.metadata.insert(key, value);
}
pub fn get_metadata(&self, key: &String) -> Option<String> {
self.metadata.get(key).cloned()
}
pub fn value_partial_dependence(&self, feature: usize, value: f64) -> f64 {
self.boosters
.iter()
.map(|b| b.value_partial_dependence(feature, value))
.sum::<f64>()
/ self.n_boosters as f64
}
pub fn calculate_feature_importance(&self, method: ImportanceMethod, normalize: bool) -> HashMap<usize, f32> {
let cumulative_importance = self.boosters.iter().fold(HashMap::new(), |mut acc, booster| {
let importance = booster.calculate_feature_importance(method.clone(), normalize);
for (feature, value) in importance {
*acc.entry(feature).or_insert(0.0) += value;
}
acc
});
cumulative_importance
.into_iter()
.map(|(k, v)| (k, v / self.n_boosters as f32))
.collect()
}
}
impl BoosterIO for MultiOutputBooster {
fn from_json(json_str: &str) -> Result<Self, PerpetualError> {
let mut value: serde_json::Value =
serde_json::from_str(json_str).map_err(|e| PerpetualError::UnableToRead(e.to_string()))?;
crate::booster::core::fix_legacy_value(&mut value);
serde_json::from_value::<Self>(value).map_err(|e| PerpetualError::UnableToRead(e.to_string()))
}
}
#[cfg(test)]
mod multi_output_booster_test {
use crate::Matrix;
use crate::objective::Objective;
use crate::{MultiOutputBooster, utils::between};
use std::error::Error;
use std::fs::File;
use std::io::BufReader;
fn read_data(path: &str, feature_names: &[&str]) -> Result<(Vec<f64>, Vec<f64>), Box<dyn Error>> {
let target_name = "Cover_Type";
let file = File::open(path)?;
let reader = BufReader::new(file);
let mut csv_reader = csv::ReaderBuilder::new().has_headers(true).from_reader(reader);
let headers = csv_reader.headers()?.clone();
let feature_indices: Vec<usize> = feature_names
.iter()
.map(|&name| headers.iter().position(|h| h == name).unwrap())
.collect();
let target_index = headers.iter().position(|h| h == target_name).unwrap();
let mut data_columns: Vec<Vec<f64>> = vec![Vec::new(); feature_names.len()];
let mut y = Vec::new();
for result in csv_reader.records() {
let record = result?;
let target_str = &record[target_index];
let target_val = if target_str.is_empty() {
f64::NAN
} else {
target_str.parse::<f64>().unwrap_or(f64::NAN)
};
y.push(target_val);
for (i, &idx) in feature_indices.iter().enumerate() {
let val_str = &record[idx];
let val = if val_str.is_empty() {
f64::NAN
} else {
val_str.parse::<f64>().unwrap_or(f64::NAN)
};
data_columns[i].push(val);
}
}
let data: Vec<f64> = data_columns.into_iter().flatten().collect();
Ok((data, y))
}
#[test]
fn test_multi_output_booster() -> Result<(), Box<dyn Error>> {
let n_classes = 7;
let n_columns = 54;
let n_rows = 500;
let max_bin = 5;
let mut features: Vec<&str> = [
"Elevation",
"Aspect",
"Slope",
"Horizontal_Distance_To_Hydrology",
"Vertical_Distance_To_Hydrology",
"Horizontal_Distance_To_Roadways",
"Hillshade_9am",
"Hillshade_Noon",
"Hillshade_3pm",
"Horizontal_Distance_To_Fire_Points",
"Wilderness_Area_0",
"Wilderness_Area_1",
"Wilderness_Area_2",
"Wilderness_Area_3",
]
.to_vec();
let soil_types = (0..40).map(|i| format!("{}_{}", "Soil_Type", i)).collect::<Vec<_>>();
let s_types = soil_types.iter().map(|s| s.as_str()).collect::<Vec<_>>();
features.extend(s_types);
let (data_full, y_full) = read_data("resources/cover_types_test.csv", &features)?;
let rows_full = y_full.len();
let limit = n_rows.min(rows_full);
let mut data = Vec::new();
for c in 0..n_columns {
let col_start = c * rows_full;
data.extend_from_slice(&data_full[col_start..col_start + limit]);
}
let y_test = y_full[0..limit].to_vec();
let data_matrix = Matrix::new(&data, y_test.len(), n_columns);
let mut y_vec: Vec<Vec<f64>> = Vec::new();
for i in 0..n_classes {
y_vec.push(
y_test
.iter()
.map(|y| if (*y as usize) == (i + 1) { 1.0 } else { 0.0 })
.collect(),
);
}
let y_data = y_vec.into_iter().flatten().collect::<Vec<f64>>();
let y = Matrix::new(&y_data, y_test.len(), n_classes);
let mut booster = MultiOutputBooster::default()
.set_objective(Objective::LogLoss)
.set_max_bin(max_bin)
.set_n_boosters(n_classes)
.set_budget(0.1)
.set_iteration_limit(Some(5))
.set_memory_limit(Some(0.001));
println!("The number of boosters: {:?}", booster.get_boosters().len());
assert!(booster.get_boosters().len() == n_classes);
booster.fit(&data_matrix, &y, None, None).unwrap();
let probas = booster.predict_proba(&data_matrix, true);
assert!(between(0.999, 1.001, probas[0..n_classes].iter().sum::<f64>() as f32));
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::Matrix;
use crate::PerpetualBooster;
use crate::booster::config::BoosterIO;
use crate::objective::Objective;
#[test]
fn test_multi_output_new() {
let booster = MultiOutputBooster::new(
2,
Objective::SquaredLoss,
0.5,
256,
None,
None,
None,
false,
f64::NAN,
true,
true,
std::collections::HashSet::new(),
crate::booster::config::MissingNodeTreatment::AverageNodeWeight,
10,
42,
None,
None,
None,
None,
None,
None,
false,
)
.unwrap();
assert_eq!(booster.n_boosters, 2);
assert_eq!(booster.boosters.len(), 2);
}
#[test]
fn test_multi_output_setters() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(3);
assert_eq!(booster.n_boosters, 3);
assert_eq!(booster.boosters.len(), 3);
booster = booster.set_objective(Objective::LogLoss);
for b in &booster.boosters {
match b.cfg.objective {
Objective::LogLoss => {}
_ => panic!("Objective is not LogLoss"),
}
}
booster = booster.set_budget(1.0);
for b in &booster.boosters {
assert_eq!(b.cfg.budget, 1.0);
}
booster = booster.set_max_bin(128);
for b in &booster.boosters {
assert_eq!(b.cfg.max_bin, 128);
}
}
#[test]
fn test_multi_output_serialization() {
let booster = MultiOutputBooster {
n_boosters: 1,
boosters: vec![PerpetualBooster::default()],
..Default::default()
};
let json = booster.json_dump().unwrap();
let booster2 = MultiOutputBooster::from_json(&json).unwrap();
assert_eq!(booster2.n_boosters, 1);
}
#[test]
fn test_multi_output_fit() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(2);
booster = booster.set_budget(0.1);
let data = Matrix::new(&[1.0, 2.0, 3.0, 4.0], 2, 2);
let y = Matrix::new(&[1.0, 0.0, 0.0, 1.0], 2, 2);
booster.fit(&data, &y, None, None).unwrap();
assert_eq!(booster.boosters.len(), 2);
assert!(!booster.boosters[0].trees.is_empty());
assert!(!booster.boosters[1].trees.is_empty());
}
#[test]
fn test_multi_output_calibrate() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(2);
booster = booster.set_save_node_stats(true);
booster = booster.set_budget(0.1);
let data = Matrix::new(&[1.0, 2.0, 3.0, 4.0], 2, 2);
let y = Matrix::new(&[1.0, 0.0, 0.0, 1.0], 2, 2);
booster.fit(&data, &y, None, None).unwrap();
let alpha = vec![0.05, 0.95];
let data_cal = (&data, &y, alpha.as_slice());
booster
.calibrate(crate::booster::config::CalibrationMethod::WeightVariance, data_cal)
.unwrap();
for b in &booster.boosters {
assert!(!b.cal_params.is_empty());
}
}
#[test]
fn test_multi_output_calibrate_conformal() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(2);
booster = booster.set_budget(0.1);
let data = Matrix::new(&[1.0, 2.0, 3.0, 4.0], 2, 2);
let y = Matrix::new(&[1.0, 0.0, 0.0, 1.0], 2, 2);
booster.fit(&data, &y, None, None).unwrap();
let alpha = vec![0.05, 0.95];
let data_cal = (&data, &y, alpha.as_slice());
booster.calibrate_conformal(&data, &y, None, None, data_cal).unwrap();
for b in &booster.boosters {
assert!(!b.cal_models.is_empty());
}
}
#[test]
fn test_multi_output_fit_columnar() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(2);
booster = booster.set_budget(0.1);
let data_vec = [1.0, 2.0, 3.0, 4.0];
let col0 = &data_vec[0..2];
let col1 = &data_vec[2..4];
let data = ColumnarMatrix::new(vec![col0, col1], None, 2);
let y = Matrix::new(&[1.0, 0.0, 0.0, 1.0], 2, 2);
booster.fit_columnar(&data, &y, None, None).unwrap();
assert!(!booster.boosters[0].trees.is_empty());
}
#[test]
fn test_multi_output_prune() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(2);
booster = booster.set_budget(0.1);
let data = Matrix::new(&[1.0, 2.0, 3.0, 4.0], 2, 2);
let y = Matrix::new(&[1.0, 0.0, 0.0, 1.0], 2, 2);
booster.fit(&data, &y, None, None).unwrap();
booster.prune(&data, &y, None, None).unwrap();
}
#[test]
fn test_multi_output_calibrate_columnar() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(2);
booster = booster.set_save_node_stats(true);
booster = booster.set_budget(0.1);
let data_vec = [1.0, 2.0, 3.0, 4.0];
let col0 = &data_vec[0..2];
let col1 = &data_vec[2..4];
let data = ColumnarMatrix::new(vec![col0, col1], None, 2);
let y = Matrix::new(&[1.0, 0.0, 0.0, 1.0], 2, 2);
booster.fit_columnar(&data, &y, None, None).unwrap();
let alpha = vec![0.05, 0.95];
let data_cal = (&data, &y, alpha.as_slice());
booster
.calibrate_columnar(crate::booster::config::CalibrationMethod::WeightVariance, data_cal)
.unwrap();
}
#[test]
fn test_multi_output_metadata() {
let mut booster = MultiOutputBooster::default();
booster.insert_metadata("key".to_string(), "value".to_string());
assert_eq!(booster.get_metadata(&"key".to_string()), Some("value".to_string()));
}
#[test]
fn test_multi_output_partial_dependence() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(2);
booster = booster.set_budget(0.1);
let data = Matrix::new(&[1.0, 2.0, 3.0, 4.0], 2, 2);
let y = Matrix::new(&[1.0, 0.0, 0.0, 1.0], 2, 2);
booster.fit(&data, &y, None, None).unwrap();
let pd = booster.value_partial_dependence(0, 1.5);
assert!(pd != 0.0);
}
#[test]
fn test_multi_output_feature_importance() {
let mut booster = MultiOutputBooster::default();
booster = booster.set_n_boosters(2);
booster = booster.set_budget(0.1);
let data = Matrix::new(&[1.0, 2.0, 3.0, 4.0], 2, 2);
let y = Matrix::new(&[1.0, 0.0, 0.0, 1.0], 2, 2);
booster.fit(&data, &y, None, None).unwrap();
let importance = booster.calculate_feature_importance(ImportanceMethod::Weight, true);
assert!(!importance.is_empty());
}
#[test]
fn test_multi_output_all_setters() {
let booster = MultiOutputBooster::default()
.set_num_threads(Some(2))
.set_monotone_constraints(None)
.set_interaction_constraints(None)
.set_force_children_to_bound_parent(true)
.set_missing(f64::NAN)
.set_allow_missing_splits(true)
.set_create_missing_branch(true)
.set_terminate_missing_features(HashSet::new())
.set_missing_node_treatment(MissingNodeTreatment::None)
.set_log_iterations(0)
.set_seed(123)
.set_reset(None)
.set_categorical_features(None)
.set_timeout(None)
.set_iteration_limit(None)
.set_memory_limit(None)
.set_stopping_rounds(None)
.set_save_node_stats(false)
.set_calibration_method(CalibrationMethod::WeightVariance);
assert_eq!(booster.cfg.seed, 123);
}
}