1use faer::{
2 Mat, Side,
3 linalg::solvers::Solve,
4 sparse::linalg::solvers::{Llt, SymbolicLlt},
5 sparse::{SparseColMat, Triplet},
6};
7use std::ops::Mul;
8
9use crate::linalg::{LinAlgError, LinAlgResult, SparseLinearSolver};
10
11#[derive(Debug, Clone)]
12pub struct SparseCholeskySolver {
13 factorizer: Option<Llt<usize, f64>>,
14
15 symbolic_factorization: Option<SymbolicLlt<usize>>,
20
21 hessian: Option<SparseColMat<usize, f64>>,
25
26 gradient: Option<Mat<f64>>,
30
31 covariance_matrix: Option<Mat<f64>>,
35 standard_errors: Option<Mat<f64>>,
41}
42
43impl SparseCholeskySolver {
44 pub fn new() -> Self {
45 SparseCholeskySolver {
46 factorizer: None,
47 symbolic_factorization: None,
48 hessian: None,
49 gradient: None,
50 covariance_matrix: None,
51 standard_errors: None,
52 }
53 }
54
55 pub fn hessian(&self) -> Option<&SparseColMat<usize, f64>> {
56 self.hessian.as_ref()
57 }
58
59 pub fn gradient(&self) -> Option<&Mat<f64>> {
60 self.gradient.as_ref()
61 }
62
63 pub fn compute_standard_errors(&mut self) -> Option<&Mat<f64>> {
64 if self.covariance_matrix.is_none() {
66 self.compute_covariance_matrix();
67 }
68
69 let hessian = self.hessian.as_ref()?;
71 let n = hessian.ncols();
72 if let Some(cov) = &self.covariance_matrix {
74 let mut std_errors = Mat::zeros(n, 1);
75 for i in 0..n {
76 let diag_val = cov[(i, i)];
77 if diag_val >= 0.0 {
78 std_errors[(i, 0)] = diag_val.sqrt();
79 } else {
80 return None;
82 }
83 }
84 self.standard_errors = Some(std_errors);
85 }
86 self.standard_errors.as_ref()
87 }
88
89 pub fn reset_covariance(&mut self) {
91 self.covariance_matrix = None;
92 self.standard_errors = None;
93 }
94}
95
96impl Default for SparseCholeskySolver {
97 fn default() -> Self {
98 Self::new()
99 }
100}
101impl SparseLinearSolver for SparseCholeskySolver {
102 fn solve_normal_equation(
103 &mut self,
104 residuals: &Mat<f64>,
105 jacobians: &SparseColMat<usize, f64>,
106 ) -> LinAlgResult<Mat<f64>> {
107 let jt = jacobians.as_ref().transpose();
109 let hessian = jt
110 .to_col_major()
111 .map_err(|e| {
112 LinAlgError::MatrixConversion(
113 "Failed to convert transposed Jacobian to column-major format".to_string(),
114 )
115 .log_with_source(e)
116 })?
117 .mul(jacobians.as_ref());
118
119 let gradient = jacobians.as_ref().transpose().mul(residuals);
121
122 let sym = if let Some(ref cached_sym) = self.symbolic_factorization {
123 cached_sym.clone()
128 } else {
129 let new_sym = SymbolicLlt::try_new(hessian.symbolic(), Side::Lower).map_err(|e| {
131 LinAlgError::FactorizationFailed(
132 "Symbolic Cholesky decomposition failed".to_string(),
133 )
134 .log_with_source(e)
135 })?;
136 self.symbolic_factorization = Some(new_sym.clone());
138 new_sym
139 };
140
141 let cholesky =
143 Llt::try_new_with_symbolic(sym, hessian.as_ref(), Side::Lower).map_err(|e| {
144 LinAlgError::SingularMatrix(
145 "Cholesky factorization failed (matrix may be singular)".to_string(),
146 )
147 .log_with_source(e)
148 })?;
149
150 let dx = cholesky.solve(-&gradient);
151 self.hessian = Some(hessian);
152 self.gradient = Some(gradient);
153 self.factorizer = Some(cholesky);
154
155 Ok(dx)
156 }
157
158 fn solve_augmented_equation(
159 &mut self,
160 residuals: &Mat<f64>,
161 jacobians: &SparseColMat<usize, f64>,
162 lambda: f64,
163 ) -> LinAlgResult<Mat<f64>> {
164 let n = jacobians.ncols();
165
166 let jt = jacobians.as_ref().transpose();
168 let hessian = jt
169 .to_col_major()
170 .map_err(|e| {
171 LinAlgError::MatrixConversion(
172 "Failed to convert transposed Jacobian to column-major format".to_string(),
173 )
174 .log_with_source(e)
175 })?
176 .mul(jacobians.as_ref());
177
178 let gradient = jacobians.as_ref().transpose().mul(residuals);
180
181 let mut lambda_i_triplets = Vec::with_capacity(n);
183 for i in 0..n {
184 lambda_i_triplets.push(Triplet::new(i, i, lambda));
185 }
186 let lambda_i =
187 SparseColMat::try_new_from_triplets(n, n, &lambda_i_triplets).map_err(|e| {
188 LinAlgError::SparseMatrixCreation("Failed to create lambda*I matrix".to_string())
189 .log_with_source(e)
190 })?;
191
192 let augmented_hessian = &hessian + lambda_i;
193
194 let sym = if let Some(ref cached_sym) = self.symbolic_factorization {
195 cached_sym.clone()
200 } else {
201 let new_sym =
203 SymbolicLlt::try_new(augmented_hessian.symbolic(), Side::Lower).map_err(|e| {
204 LinAlgError::FactorizationFailed(
205 "Symbolic Cholesky decomposition failed for augmented system".to_string(),
206 )
207 .log_with_source(e)
208 })?;
209 self.symbolic_factorization = Some(new_sym.clone());
211 new_sym
212 };
213
214 let cholesky = Llt::try_new_with_symbolic(sym, augmented_hessian.as_ref(), Side::Lower)
216 .map_err(|e| {
217 LinAlgError::SingularMatrix(
218 "Cholesky factorization failed (matrix may be singular)".to_string(),
219 )
220 .log_with_source(e)
221 })?;
222
223 let dx = cholesky.solve(-&gradient);
224 self.hessian = Some(hessian);
225 self.gradient = Some(gradient);
226 self.factorizer = Some(cholesky);
227
228 Ok(dx)
229 }
230
231 fn get_hessian(&self) -> Option<&SparseColMat<usize, f64>> {
232 self.hessian.as_ref()
233 }
234
235 fn get_gradient(&self) -> Option<&Mat<f64>> {
236 self.gradient.as_ref()
237 }
238
239 fn compute_covariance_matrix(&mut self) -> Option<&Mat<f64>> {
240 if self.factorizer.is_some()
242 && self.hessian.is_some()
243 && self.covariance_matrix.is_none()
244 && let (Some(factorizer), Some(hessian)) = (&self.factorizer, &self.hessian)
245 {
246 let n = hessian.ncols();
247 let identity = Mat::identity(n, n);
249
250 let cov_matrix = factorizer.solve(&identity);
252 self.covariance_matrix = Some(cov_matrix);
253 }
254 self.covariance_matrix.as_ref()
255 }
256
257 fn get_covariance_matrix(&self) -> Option<&Mat<f64>> {
258 self.covariance_matrix.as_ref()
259 }
260}
261
262#[cfg(test)]
263mod tests {
264 use super::*;
265
266 const TOLERANCE: f64 = 1e-10;
267
268 type TestResult = Result<(), Box<dyn std::error::Error>>;
269
270 fn create_test_data()
272 -> Result<(SparseColMat<usize, f64>, Mat<f64>), faer::sparse::CreationError> {
273 let triplets = vec![
275 Triplet::new(0, 0, 2.0),
276 Triplet::new(0, 1, 1.0),
277 Triplet::new(1, 0, 1.0),
278 Triplet::new(1, 1, 3.0),
279 Triplet::new(1, 2, 1.0),
280 Triplet::new(2, 1, 1.0),
281 Triplet::new(2, 2, 2.0),
282 Triplet::new(3, 0, 1.5), Triplet::new(3, 2, 0.5),
284 ];
285 let jacobian = SparseColMat::try_new_from_triplets(4, 3, &triplets)?;
286
287 let residuals = Mat::from_fn(4, 1, |i, _| match i {
288 0 => 1.0,
289 1 => -2.0,
290 2 => 0.5,
291 3 => 1.2,
292 _ => 0.0,
293 });
294
295 Ok((jacobian, residuals))
296 }
297
298 #[test]
300 fn test_solver_creation() {
301 let solver = SparseCholeskySolver::new();
302 assert!(solver.factorizer.is_none());
303
304 let default_solver = SparseCholeskySolver::default();
305 assert!(default_solver.factorizer.is_none());
306 }
307
308 #[test]
310 fn test_solve_normal_equation_well_conditioned() -> TestResult {
311 let mut solver = SparseCholeskySolver::new();
312 let (jacobian, residuals) = create_test_data()?;
313
314 let solution = solver.solve_normal_equation(&residuals, &jacobian)?;
315 assert_eq!(solution.nrows(), 3);
316 assert_eq!(solution.ncols(), 1);
317
318 assert!(solver.factorizer.is_some());
320 Ok(())
321 }
322
323 #[test]
325 fn test_symbolic_pattern_caching() -> TestResult {
326 let mut solver = SparseCholeskySolver::new();
327 let (jacobian, residuals) = create_test_data()?;
328
329 let sol1 = solver.solve_normal_equation(&residuals, &jacobian)?;
331 assert!(solver.factorizer.is_some());
332
333 let sol2 = solver.solve_normal_equation(&residuals, &jacobian)?;
335
336 for i in 0..sol1.nrows() {
338 assert!((sol1[(i, 0)] - sol2[(i, 0)]).abs() < TOLERANCE);
339 }
340 Ok(())
341 }
342
343 #[test]
345 fn test_solve_augmented_equation() -> TestResult {
346 let mut solver = SparseCholeskySolver::new();
347 let (jacobian, residuals) = create_test_data()?;
348 let lambda = 0.1;
349
350 let solution = solver.solve_augmented_equation(&residuals, &jacobian, lambda)?;
351 assert_eq!(solution.nrows(), 3);
352 assert_eq!(solution.ncols(), 1);
353 Ok(())
354 }
355
356 #[test]
358 fn test_augmented_equation_different_lambdas() -> TestResult {
359 let mut solver = SparseCholeskySolver::new();
360 let (jacobian, residuals) = create_test_data()?;
361
362 let lambda1 = 0.01;
363 let lambda2 = 1.0;
364
365 let sol1 = solver.solve_augmented_equation(&residuals, &jacobian, lambda1)?;
366 let sol2 = solver.solve_augmented_equation(&residuals, &jacobian, lambda2)?;
367
368 let mut different = false;
370 for i in 0..sol1.nrows() {
371 if (sol1[(i, 0)] - sol2[(i, 0)]).abs() > TOLERANCE {
372 different = true;
373 break;
374 }
375 }
376 assert!(
377 different,
378 "Solutions should differ with different lambda values"
379 );
380 Ok(())
381 }
382
383 #[test]
385 fn test_singular_matrix() -> TestResult {
386 let mut solver = SparseCholeskySolver::new();
387
388 let triplets = vec![
390 Triplet::new(0, 0, 1.0),
391 Triplet::new(0, 1, 2.0),
392 Triplet::new(1, 0, 2.0),
393 Triplet::new(1, 1, 4.0), ];
395 let singular_jacobian = SparseColMat::try_new_from_triplets(2, 2, &triplets)?;
396 let residuals = Mat::from_fn(2, 1, |i, _| i as f64);
397
398 let result = solver.solve_normal_equation(&residuals, &singular_jacobian);
399 assert!(result.is_err(), "Singular matrix should return Err");
401 Ok(())
402 }
403
404 #[test]
406 fn test_empty_matrix() -> TestResult {
407 let mut solver = SparseCholeskySolver::new();
408
409 let empty_jacobian = SparseColMat::try_new_from_triplets(0, 0, &[])?;
410 let empty_residuals = Mat::zeros(0, 1);
411
412 let result = solver.solve_normal_equation(&empty_residuals, &empty_jacobian);
413 if let Ok(solution) = result {
414 assert_eq!(solution.nrows(), 0);
415 }
416 Ok(())
417 }
418
419 #[test]
421 fn test_numerical_accuracy() -> TestResult {
422 let mut solver = SparseCholeskySolver::new();
423
424 let triplets = vec![
426 Triplet::new(0, 0, 1.0),
427 Triplet::new(0, 1, 0.0),
428 Triplet::new(1, 0, 0.0),
429 Triplet::new(1, 1, 1.0),
430 ];
431 let jacobian = SparseColMat::try_new_from_triplets(2, 2, &triplets)?;
432 let residuals = Mat::from_fn(2, 1, |i, _| -((i + 1) as f64)); let solution = solver.solve_normal_equation(&residuals, &jacobian)?;
435 assert!((solution[(0, 0)] - 1.0).abs() < TOLERANCE);
437 assert!((solution[(1, 0)] - 2.0).abs() < TOLERANCE);
438 Ok(())
439 }
440
441 #[test]
443 fn test_solver_clone() {
444 let solver1 = SparseCholeskySolver::new();
445 let solver2 = solver1.clone();
446
447 assert!(solver1.factorizer.is_none());
448 assert!(solver2.factorizer.is_none());
449 }
450
451 #[test]
453 fn test_cholesky_covariance_computation() -> TestResult {
454 let mut solver = SparseCholeskySolver::new();
455 let (jacobian, residuals) = create_test_data()?;
456
457 solver.solve_normal_equation(&residuals, &jacobian)?;
459
460 let cov_matrix = solver.compute_covariance_matrix();
462 assert!(cov_matrix.is_some());
463
464 if let Some(cov) = cov_matrix {
465 assert_eq!(cov.nrows(), 3); assert_eq!(cov.ncols(), 3);
467
468 for i in 0..3 {
470 for j in 0..3 {
471 assert!(
472 (cov[(i, j)] - cov[(j, i)]).abs() < TOLERANCE,
473 "Covariance matrix should be symmetric"
474 );
475 }
476 }
477
478 for i in 0..3 {
480 assert!(
481 cov[(i, i)] > 0.0,
482 "Diagonal elements (variances) should be positive"
483 );
484 }
485 }
486 Ok(())
487 }
488
489 #[test]
491 fn test_cholesky_standard_errors_computation() -> TestResult {
492 let mut solver = SparseCholeskySolver::new();
493 let (jacobian, residuals) = create_test_data()?;
494
495 solver.solve_normal_equation(&residuals, &jacobian)?;
497
498 solver.compute_standard_errors();
500
501 assert!(solver.covariance_matrix.is_some());
503 assert!(solver.standard_errors.is_some());
504
505 if let (Some(cov), Some(errors)) = (&solver.covariance_matrix, &solver.standard_errors) {
506 assert_eq!(errors.nrows(), 3); assert_eq!(errors.ncols(), 1);
508
509 for i in 0..3 {
511 assert!(errors[(i, 0)] > 0.0, "Standard errors should be positive");
512 }
513
514 for i in 0..3 {
516 let expected_std_error = cov[(i, i)].sqrt();
517 assert!(
518 (errors[(i, 0)] - expected_std_error).abs() < TOLERANCE,
519 "Standard error should equal sqrt of covariance diagonal"
520 );
521 }
522 }
523 Ok(())
524 }
525
526 #[test]
528 fn test_cholesky_covariance_positive_definite() -> TestResult {
529 let mut solver = SparseCholeskySolver::new();
530
531 let triplets = vec![
533 Triplet::new(0, 0, 3.0),
534 Triplet::new(0, 1, 1.0),
535 Triplet::new(1, 0, 1.0),
536 Triplet::new(1, 1, 2.0),
537 ];
538 let jacobian = SparseColMat::try_new_from_triplets(2, 2, &triplets)?;
539 let residuals = Mat::from_fn(2, 1, |i, _| (i + 1) as f64);
540
541 solver.solve_normal_equation(&residuals, &jacobian)?;
542
543 let cov_matrix = solver.compute_covariance_matrix();
544 assert!(cov_matrix.is_some());
545
546 if let Some(cov) = cov_matrix {
547 assert!((cov[(0, 0)] - 0.2).abs() < TOLERANCE);
550 assert!((cov[(1, 1)] - 0.4).abs() < TOLERANCE);
551 assert!((cov[(0, 1)] - (-0.2)).abs() < TOLERANCE);
552 assert!((cov[(1, 0)] - (-0.2)).abs() < TOLERANCE);
553 }
554 Ok(())
555 }
556
557 #[test]
559 fn test_cholesky_covariance_caching() -> TestResult {
560 let mut solver = SparseCholeskySolver::new();
561 let (jacobian, residuals) = create_test_data()?;
562
563 solver.solve_normal_equation(&residuals, &jacobian)?;
565
566 solver.compute_covariance_matrix();
568 assert!(solver.covariance_matrix.is_some());
569
570 if let Some(cov1) = &solver.covariance_matrix {
572 let cov1_ptr = cov1.as_ptr();
573
574 solver.compute_covariance_matrix();
576 assert!(solver.covariance_matrix.is_some());
577
578 if let Some(cov2) = &solver.covariance_matrix {
580 let cov2_ptr = cov2.as_ptr();
581
582 assert_eq!(cov1_ptr, cov2_ptr, "Covariance matrix should be cached");
584 }
585 }
586 Ok(())
587 }
588
589 #[test]
591 fn test_cholesky_decomposition_properties() -> TestResult {
592 let mut solver = SparseCholeskySolver::new();
593
594 let triplets = vec![Triplet::new(0, 0, 2.0), Triplet::new(1, 1, 3.0)];
596 let jacobian = SparseColMat::try_new_from_triplets(2, 2, &triplets)?;
597 let residuals = Mat::from_fn(2, 1, |i, _| (i + 1) as f64);
598
599 solver.solve_normal_equation(&residuals, &jacobian)?;
600
601 assert!(solver.factorizer.is_some());
603 assert!(solver.hessian.is_some());
604
605 if let Some(hessian) = &solver.hessian {
607 assert_eq!(hessian.nrows(), 2);
608 assert_eq!(hessian.ncols(), 2);
609 }
610 Ok(())
611 }
612
613 #[test]
615 fn test_cholesky_numerical_stability() -> TestResult {
616 let mut solver = SparseCholeskySolver::new();
617
618 let triplets = vec![
620 Triplet::new(0, 0, 1.0),
621 Triplet::new(1, 1, 1.0),
622 Triplet::new(2, 2, 1.0),
623 ];
624 let jacobian = SparseColMat::try_new_from_triplets(3, 3, &triplets)?;
625 let residuals = Mat::from_fn(3, 1, |i, _| -((i + 1) as f64)); let solution = solver.solve_normal_equation(&residuals, &jacobian)?;
628 for i in 0..3 {
630 let expected = (i + 1) as f64;
631 assert!(
632 (solution[(i, 0)] - expected).abs() < TOLERANCE,
633 "Expected {}, got {}",
634 expected,
635 solution[(i, 0)]
636 );
637 }
638
639 let cov_matrix = solver.compute_covariance_matrix();
641 assert!(cov_matrix.is_some());
642 if let Some(cov) = cov_matrix {
643 for i in 0..3 {
644 for j in 0..3 {
645 let expected = if i == j { 1.0 } else { 0.0 };
646 assert!(
647 (cov[(i, j)] - expected).abs() < TOLERANCE,
648 "Covariance[{}, {}] expected {}, got {}",
649 i,
650 j,
651 expected,
652 cov[(i, j)]
653 );
654 }
655 }
656 }
657 Ok(())
658 }
659}