1use scirs2_core::CoreError;
4use thiserror::Error;
5
6#[derive(Error, Debug, Clone)]
8pub enum LinalgError {
9 #[error("Computation error: {0}")]
11 ComputationError(String),
12
13 #[error("Domain error: {0}")]
15 DomainError(String),
16
17 #[error("Convergence error: {0}")]
19 ConvergenceError(String),
20
21 #[error("Dimension mismatch error: {0}")]
23 DimensionError(String),
24
25 #[error("Shape error: {0}")]
27 ShapeError(String),
28
29 #[error("Index out of bounds: {0}")]
31 IndexError(String),
32
33 #[error("Singular matrix error: {0}")]
35 SingularMatrixError(String),
36
37 #[error("Non-positive definite matrix error: {0}")]
39 NonPositiveDefiniteError(String),
40
41 #[error("Not implemented: {0}")]
43 NotImplementedError(String),
44
45 #[error("Implementation error: {0}")]
47 ImplementationError(String),
48
49 #[error("Value error: {0}")]
51 ValueError(String),
52
53 #[error("Invalid input error: {0}")]
55 InvalidInputError(String),
56
57 #[error("Invalid input: {0}")]
59 InvalidInput(String),
60
61 #[error("Numerical error: {0}")]
63 NumericalError(String),
64}
65
66impl LinalgError {
68 pub fn singularmatrix_with_suggestions(
70 operation: &str,
71 matrixshape: (usize, usize),
72 condition_number: Option<f64>,
73 ) -> Self {
74 let base_msg = format!("Matrix is singular during {operation} operation");
75 let rows = matrixshape.0;
76 let cols = matrixshape.1;
77 let shape_info = format!("Matrix shape: {rows}×{cols}");
78
79 let mut suggestions = vec![
80 "Consider the following regularization approaches:".to_string(),
81 "1. Ridge regularization: Add λI to the matrix (small positive λ)".to_string(),
82 "2. Pseudo-inverse: Use SVD-based pseudo-inverse for rank-deficient matrices"
83 .to_string(),
84 "3. Truncated SVD: Remove small singular values below a threshold".to_string(),
85 ];
86
87 if let Some(cond) = condition_number {
88 suggestions.push(format!(
89 "4. Condition number: {cond:.2e} (>1e12 indicates ill-conditioning)"
90 ));
91 if cond > 1e12 {
92 suggestions.push(
93 "5. Use extended precision arithmetic for better numerical stability"
94 .to_string(),
95 );
96 }
97 }
98
99 suggestions.extend_from_slice(&[
100 "6. Check input data for linear dependencies or scaling issues".to_string(),
101 "7. Use iterative refinement for improved accuracy".to_string(),
102 ]);
103
104 let suggestions_str = suggestions.join("\n");
105 let full_msg = format!("{base_msg}\n{shape_info}\n{suggestions_str}");
106 LinalgError::SingularMatrixError(full_msg)
107 }
108
109 pub fn non_positive_definite_with_suggestions(
111 operation: &str,
112 matrixshape: (usize, usize),
113 negative_eigenvalues: Option<usize>,
114 ) -> Self {
115 let base_msg = format!("Matrix is not positive definite during {operation} operation");
116 let rows = matrixshape.0;
117 let cols = matrixshape.1;
118 let shape_info = format!("Matrix shape: {rows}×{cols}");
119
120 let mut suggestions = vec![
121 "Consider the following regularization approaches:".to_string(),
122 "1. Diagonal regularization: Add λI where λ > |most negative eigenvalue|".to_string(),
123 "2. Modified Cholesky: Use algorithms that ensure positive definiteness".to_string(),
124 "3. Eigenvalue clipping: Replace negative _eigenvalues with small positive values"
125 .to_string(),
126 "4. Use LDL decomposition instead of Cholesky for indefinite matrices".to_string(),
127 ];
128
129 if let Some(neg_count) = negative_eigenvalues {
130 suggestions.push(format!(
131 "5. Found {neg_count} negative eigenvalue(s) - consider spectral regularization"
132 ));
133 }
134
135 suggestions.extend_from_slice(&[
136 "6. Check if matrix is symmetric (required for Cholesky)".to_string(),
137 "7. Use pivoted Cholesky for rank-deficient positive semidefinite matrices".to_string(),
138 "8. Consider using QR or LU decomposition for non-symmetric matrices".to_string(),
139 ]);
140
141 let suggestions_str = suggestions.join("\n");
142 let full_msg = format!("{base_msg}\n{shape_info}\n{suggestions_str}");
143 LinalgError::NonPositiveDefiniteError(full_msg)
144 }
145
146 pub fn convergence_with_suggestions(
148 algorithm: &str,
149 iterations: usize,
150 tolerance: f64,
151 current_residual: Option<f64>,
152 ) -> Self {
153 let base_msg = format!("{algorithm} failed to converge after {iterations} iterations");
154 let tolerance_info = format!("Target tolerance: {tolerance:.2e}");
155
156 let mut suggestions = vec![
157 "Consider the following approaches to improve convergence:".to_string(),
158 "1. Increase maximum iterations limit".to_string(),
159 "2. Relax convergence tolerance".to_string(),
160 "3. Use preconditioning to improve condition number".to_string(),
161 "4. Try different initial guess or starting point".to_string(),
162 ];
163
164 if let Some(_residual) = current_residual {
165 suggestions.push(format!(
166 "5. Current _residual: {_residual:.2e} (target: {tolerance:.2e})"
167 ));
168 if _residual / tolerance < 10.0 {
169 suggestions.push(
170 "6. Close to convergence - try increasing iterations slightly".to_string(),
171 );
172 } else {
173 suggestions
174 .push("6. Far from convergence - consider algorithm changes".to_string());
175 }
176 }
177
178 suggestions.extend_from_slice(&[
179 "7. Use mixed precision arithmetic for better numerical stability".to_string(),
180 "8. Check matrix conditioning - use regularization if poorly conditioned".to_string(),
181 "9. Consider switching to direct methods for smaller problems".to_string(),
182 ]);
183
184 let suggestions_str = suggestions.join("\n");
185 let full_msg = format!("{base_msg}\n{tolerance_info}\n{suggestions_str}");
186 LinalgError::ConvergenceError(full_msg)
187 }
188}
189
190pub type LinalgResult<T> = Result<T, LinalgError>;
192
193impl From<CoreError> for LinalgError {
195 fn from(error: CoreError) -> Self {
196 match error {
197 CoreError::ShapeError(msg) => LinalgError::ShapeError(msg.to_string()),
198 CoreError::DimensionError(msg) => LinalgError::DimensionError(msg.to_string()),
199 CoreError::IndexError(msg) => LinalgError::IndexError(msg.to_string()),
200 CoreError::ValueError(msg) => LinalgError::ValueError(msg.to_string()),
201 CoreError::InvalidInput(msg) => LinalgError::InvalidInput(msg.to_string()),
202 CoreError::ComputationError(msg) => LinalgError::ComputationError(msg.to_string()),
203 CoreError::NotImplementedError(msg) => {
204 LinalgError::NotImplementedError(msg.to_string())
205 }
206 CoreError::ImplementationError(msg) => {
207 LinalgError::ImplementationError(msg.to_string())
208 }
209 _ => LinalgError::ComputationError(format!("Core error: {error}")),
211 }
212 }
213}
214
215#[allow(dead_code)]
227pub fn check_domain<S: AsRef<str>>(condition: bool, message: S) -> LinalgResult<()> {
228 if condition {
229 Ok(())
230 } else {
231 Err(LinalgError::DomainError(message.as_ref().to_string()))
232 }
233}
234
235#[allow(dead_code)]
252pub fn check_dimensions<S: AsRef<str>>(condition: bool, message: S) -> LinalgResult<()> {
253 if condition {
254 Ok(())
255 } else {
256 Err(LinalgError::DimensionError(message.as_ref().to_string()))
257 }
258}
259
260#[allow(dead_code)]
277pub fn check_value<S: AsRef<str>>(condition: bool, message: S) -> LinalgResult<()> {
278 if condition {
279 Ok(())
280 } else {
281 Err(LinalgError::ValueError(message.as_ref().to_string()))
282 }
283}