gbrt_rs/core/
mod.rs

1//! Core configurations and loss functions for gradient boosting.
2//!
3//! This module defines the fundamental building blocks of the gradient boosting
4//! library, including configuration structures for models and trees, loss function
5//! abstractions, and the central error handling types used throughout the crate.
6//!
7//! # Submodules
8//!
9//! - [`config`]: Configuration structures for gradient boosters and decision trees
10//! - [`loss`]: Loss function trait and implementations for optimization
11//!
12//! # Key Components
13//!
14//! The core module provides:
15//!
16//! - [`GBRTConfig`]: Main configuration for gradient boosting models
17//! - [`LossFunction`]: Enumeration of supported loss functions
18//! - [`TreeConfig`]: Configuration for individual decision trees
19//! - [`Loss`] and [`GradientLoss`]: Loss function abstractions
20//! - [`CoreError`]: Central error type for the library
21//! - [`Result<T>`]: Convenient result type alias
22//!
23//! # Error Handling
24//!
25//! The [`CoreError`] enum serves as the primary error type for all operations
26//! that can fail in the core library. It provides three main error categories:
27//!
28//! - Configuration errors
29//! - Training errors  
30//! - Prediction errors
31
32mod config;
33mod loss;
34
35/// Re-exported configuration types for gradient boosting models.
36///
37/// These configurations control model behavior, tree structure, and optimization
38/// parameters.
39pub use config::{
40    // Main configuration for gradient boosting models.
41    GBRTConfig, 
42    // Enumeration of supported loss functions (MSE, Log Loss, etc.).
43    LossFunction, 
44    // Configuration for individual decision trees (depth, leaf size, etc.).
45    TreeConfig
46};
47
48/// Re-exported loss function abstractions and factory.
49///
50/// Loss functions define the objective that the gradient booster optimizes.
51pub use loss::{
52    // Core trait for loss functions.
53    Loss, 
54    // Trait for loss functions that provide gradients and hessians.
55    GradientLoss, 
56    // Factory function to create loss functions by name.
57    create_loss
58};
59
60/// Central error type for the gradient boosting library.
61///
62/// `CoreError` represents all possible failure modes in the core library operations,
63/// including configuration issues, training failures, and prediction errors.
64///
65/// # Variants
66///
67/// - [`ConfigError`](CoreError::ConfigError): Invalid model or tree configuration
68/// - [`TrainingError`](CoreError::TrainingError): Failure during model training
69/// - [`PredictionError`](CoreError::PredictionError): Failure during prediction
70///
71/// # Note
72///
73/// The error type uses `thiserror` for automatic `std::error::Error` implementation.
74/// The `thiserror` crate must be in scope via `use thiserror::Error` or a re-export.
75#[derive(thiserror::Error, Debug)]
76pub enum CoreError {
77    /// Invalid configuration parameter or incompatible settings.
78    #[error("Invalid configuration: {0}")]
79    ConfigError(String),
80    
81    /// Error during model training (e.g., insufficient data, numerical instability).
82    #[error("Training error: {0}")]
83    TrainingError(String),
84    
85    /// Error during prediction (e.g., mismatched feature dimensions, missing data).
86    #[error("Prediction error: {0}")]
87    PredictionError(String),
88}
89
90/// Convenient type alias for `Result<T, CoreError>`.
91///
92/// This alias is used throughout the library for operations that can fail
93/// and return a `CoreError`. It provides a consistent error handling pattern
94/// across the codebase.
95///
96pub type Result<T> = std::result::Result<T, CoreError>;
97