1use faer::{linalg::solvers::Solve, sparse, sparse::linalg::solvers};
2use std::ops::Mul;
3
4use crate::linalg::{LinAlgError, LinAlgResult, SparseLinearSolver};
5
6#[derive(Debug, Clone)]
7pub struct SparseCholeskySolver {
8 factorizer: Option<solvers::Llt<usize, f64>>,
9
10 symbolic_factorization: Option<solvers::SymbolicLlt<usize>>,
15
16 hessian: Option<sparse::SparseColMat<usize, f64>>,
20
21 gradient: Option<faer::Mat<f64>>,
25
26 covariance_matrix: Option<faer::Mat<f64>>,
30 standard_errors: Option<faer::Mat<f64>>,
36}
37
38impl SparseCholeskySolver {
39 pub fn new() -> Self {
40 SparseCholeskySolver {
41 factorizer: None,
42 symbolic_factorization: None,
43 hessian: None,
44 gradient: None,
45 covariance_matrix: None,
46 standard_errors: None,
47 }
48 }
49
50 pub fn hessian(&self) -> Option<&sparse::SparseColMat<usize, f64>> {
51 self.hessian.as_ref()
52 }
53
54 pub fn gradient(&self) -> Option<&faer::Mat<f64>> {
55 self.gradient.as_ref()
56 }
57
58 pub fn compute_standard_errors(&mut self) -> Option<&faer::Mat<f64>> {
59 if self.covariance_matrix.is_none() {
61 self.compute_covariance_matrix();
62 }
63
64 let n = self.hessian.as_ref().unwrap().ncols();
65 if let Some(cov) = &self.covariance_matrix {
67 let mut std_errors = faer::Mat::zeros(n, 1);
68 for i in 0..n {
69 let diag_val = cov[(i, i)];
70 if diag_val >= 0.0 {
71 std_errors[(i, 0)] = diag_val.sqrt();
72 } else {
73 return None;
75 }
76 }
77 self.standard_errors = Some(std_errors);
78 }
79 self.standard_errors.as_ref()
80 }
81
82 pub fn reset_covariance(&mut self) {
84 self.covariance_matrix = None;
85 self.standard_errors = None;
86 }
87}
88
89impl Default for SparseCholeskySolver {
90 fn default() -> Self {
91 Self::new()
92 }
93}
94impl SparseLinearSolver for SparseCholeskySolver {
95 fn solve_normal_equation(
96 &mut self,
97 residuals: &faer::Mat<f64>,
98 jacobians: &sparse::SparseColMat<usize, f64>,
99 ) -> LinAlgResult<faer::Mat<f64>> {
100 let jt = jacobians.as_ref().transpose();
102 let hessian = jt
103 .to_col_major()
104 .map_err(|_| {
105 LinAlgError::MatrixConversion(
106 "Failed to convert transposed Jacobian to column-major format".to_string(),
107 )
108 })?
109 .mul(jacobians.as_ref());
110
111 let gradient = jacobians.as_ref().transpose().mul(residuals);
113
114 let sym = if let Some(ref cached_sym) = self.symbolic_factorization {
115 cached_sym.clone()
120 } else {
121 let new_sym = solvers::SymbolicLlt::try_new(hessian.symbolic(), faer::Side::Lower)
123 .map_err(|_| {
124 LinAlgError::FactorizationFailed(
125 "Symbolic Cholesky decomposition failed".to_string(),
126 )
127 })?;
128 self.symbolic_factorization = Some(new_sym.clone());
130 new_sym
131 };
132
133 let cholesky =
135 solvers::Llt::try_new_with_symbolic(sym, hessian.as_ref(), faer::Side::Lower)
136 .map_err(|_| LinAlgError::SingularMatrix)?;
137
138 let dx = cholesky.solve(-&gradient);
139 self.hessian = Some(hessian);
140 self.gradient = Some(gradient);
141 self.factorizer = Some(cholesky);
142
143 Ok(dx)
144 }
145
146 fn solve_augmented_equation(
147 &mut self,
148 residuals: &faer::Mat<f64>,
149 jacobians: &sparse::SparseColMat<usize, f64>,
150 lambda: f64,
151 ) -> LinAlgResult<faer::Mat<f64>> {
152 let n = jacobians.ncols();
153
154 let jt = jacobians.as_ref().transpose();
156 let hessian = jt
157 .to_col_major()
158 .map_err(|_| {
159 LinAlgError::MatrixConversion(
160 "Failed to convert transposed Jacobian to column-major format".to_string(),
161 )
162 })?
163 .mul(jacobians.as_ref());
164
165 let gradient = jacobians.as_ref().transpose().mul(residuals);
167
168 let mut lambda_i_triplets = Vec::with_capacity(n);
170 for i in 0..n {
171 lambda_i_triplets.push(faer::sparse::Triplet::new(i, i, lambda));
172 }
173 let lambda_i = sparse::SparseColMat::try_new_from_triplets(n, n, &lambda_i_triplets)
174 .map_err(|e| {
175 LinAlgError::SparseMatrixCreation(format!(
176 "Failed to create lambda*I matrix: {:?}",
177 e
178 ))
179 })?;
180
181 let augmented_hessian = &hessian + lambda_i;
182
183 let sym = if let Some(ref cached_sym) = self.symbolic_factorization {
184 cached_sym.clone()
189 } else {
190 let new_sym =
192 solvers::SymbolicLlt::try_new(augmented_hessian.symbolic(), faer::Side::Lower)
193 .map_err(|_| {
194 LinAlgError::FactorizationFailed(
195 "Symbolic Cholesky decomposition failed for augmented system"
196 .to_string(),
197 )
198 })?;
199 self.symbolic_factorization = Some(new_sym.clone());
201 new_sym
202 };
203
204 let cholesky =
206 solvers::Llt::try_new_with_symbolic(sym, augmented_hessian.as_ref(), faer::Side::Lower)
207 .map_err(|_| LinAlgError::SingularMatrix)?;
208
209 let dx = cholesky.solve(-&gradient);
210 self.hessian = Some(hessian);
211 self.gradient = Some(gradient);
212 self.factorizer = Some(cholesky);
213
214 Ok(dx)
215 }
216
217 fn get_hessian(&self) -> Option<&sparse::SparseColMat<usize, f64>> {
218 self.hessian.as_ref()
219 }
220
221 fn get_gradient(&self) -> Option<&faer::Mat<f64>> {
222 self.gradient.as_ref()
223 }
224
225 fn compute_covariance_matrix(&mut self) -> Option<&faer::Mat<f64>> {
226 if self.factorizer.is_some()
228 && self.hessian.is_some()
229 && self.covariance_matrix.is_none()
230 && let (Some(factorizer), Some(hessian)) = (&self.factorizer, &self.hessian)
231 {
232 let n = hessian.ncols();
233 let identity = faer::Mat::identity(n, n);
235
236 let cov_matrix = factorizer.solve(&identity);
238 self.covariance_matrix = Some(cov_matrix);
239 }
240 self.covariance_matrix.as_ref()
241 }
242
243 fn get_covariance_matrix(&self) -> Option<&faer::Mat<f64>> {
244 self.covariance_matrix.as_ref()
245 }
246}
247
248#[cfg(test)]
249mod tests {
250 use super::*;
251 use faer::Mat;
252 use faer::sparse::SparseColMat;
253
254 const TOLERANCE: f64 = 1e-10;
255
256 fn create_test_data() -> (SparseColMat<usize, f64>, Mat<f64>) {
258 let triplets = vec![
260 faer::sparse::Triplet::new(0, 0, 2.0),
261 faer::sparse::Triplet::new(0, 1, 1.0),
262 faer::sparse::Triplet::new(1, 0, 1.0),
263 faer::sparse::Triplet::new(1, 1, 3.0),
264 faer::sparse::Triplet::new(1, 2, 1.0),
265 faer::sparse::Triplet::new(2, 1, 1.0),
266 faer::sparse::Triplet::new(2, 2, 2.0),
267 faer::sparse::Triplet::new(3, 0, 1.5), faer::sparse::Triplet::new(3, 2, 0.5),
269 ];
270 let jacobian = SparseColMat::try_new_from_triplets(4, 3, &triplets).unwrap();
271
272 let residuals = Mat::from_fn(4, 1, |i, _| match i {
273 0 => 1.0,
274 1 => -2.0,
275 2 => 0.5,
276 3 => 1.2,
277 _ => 0.0,
278 });
279
280 (jacobian, residuals)
281 }
282
283 #[test]
285 fn test_solver_creation() {
286 let solver = SparseCholeskySolver::new();
287 assert!(solver.factorizer.is_none());
288
289 let default_solver = SparseCholeskySolver::default();
290 assert!(default_solver.factorizer.is_none());
291 }
292
293 #[test]
295 fn test_solve_normal_equation_well_conditioned() {
296 let mut solver = SparseCholeskySolver::new();
297 let (jacobian, residuals) = create_test_data();
298
299 let result = solver.solve_normal_equation(&residuals, &jacobian);
300 assert!(result.is_ok());
301
302 let solution = result.unwrap();
303 assert_eq!(solution.nrows(), 3);
304 assert_eq!(solution.ncols(), 1);
305
306 assert!(solver.factorizer.is_some());
308 }
309
310 #[test]
312 fn test_symbolic_pattern_caching() {
313 let mut solver = SparseCholeskySolver::new();
314 let (jacobian, residuals) = create_test_data();
315
316 let result1 = solver.solve_normal_equation(&residuals, &jacobian);
318 assert!(result1.is_ok());
319 assert!(solver.factorizer.is_some());
320
321 let result2 = solver.solve_normal_equation(&residuals, &jacobian);
323 assert!(result2.is_ok());
324
325 let sol1 = result1.unwrap();
327 let sol2 = result2.unwrap();
328 for i in 0..sol1.nrows() {
329 assert!((sol1[(i, 0)] - sol2[(i, 0)]).abs() < TOLERANCE);
330 }
331 }
332
333 #[test]
335 fn test_solve_augmented_equation() {
336 let mut solver = SparseCholeskySolver::new();
337 let (jacobian, residuals) = create_test_data();
338 let lambda = 0.1;
339
340 let result = solver.solve_augmented_equation(&residuals, &jacobian, lambda);
341 assert!(result.is_ok());
342
343 let solution = result.unwrap();
344 assert_eq!(solution.nrows(), 3);
345 assert_eq!(solution.ncols(), 1);
346 }
347
348 #[test]
350 fn test_augmented_equation_different_lambdas() {
351 let mut solver = SparseCholeskySolver::new();
352 let (jacobian, residuals) = create_test_data();
353
354 let lambda1 = 0.01;
355 let lambda2 = 1.0;
356
357 let result1 = solver.solve_augmented_equation(&residuals, &jacobian, lambda1);
358 let result2 = solver.solve_augmented_equation(&residuals, &jacobian, lambda2);
359
360 assert!(result1.is_ok());
361 assert!(result2.is_ok());
362
363 let sol1 = result1.unwrap();
365 let sol2 = result2.unwrap();
366 let mut different = false;
367 for i in 0..sol1.nrows() {
368 if (sol1[(i, 0)] - sol2[(i, 0)]).abs() > TOLERANCE {
369 different = true;
370 break;
371 }
372 }
373 assert!(
374 different,
375 "Solutions should differ with different lambda values"
376 );
377 }
378
379 #[test]
381 fn test_singular_matrix() {
382 let mut solver = SparseCholeskySolver::new();
383
384 let triplets = vec![
386 faer::sparse::Triplet::new(0, 0, 1.0),
387 faer::sparse::Triplet::new(0, 1, 2.0),
388 faer::sparse::Triplet::new(1, 0, 2.0),
389 faer::sparse::Triplet::new(1, 1, 4.0), ];
391 let singular_jacobian = SparseColMat::try_new_from_triplets(2, 2, &triplets).unwrap();
392 let residuals = Mat::from_fn(2, 1, |i, _| i as f64);
393
394 let result = solver.solve_normal_equation(&residuals, &singular_jacobian);
395 assert!(result.is_err(), "Singular matrix should return Err");
397 }
398
399 #[test]
401 fn test_empty_matrix() {
402 let mut solver = SparseCholeskySolver::new();
403
404 let empty_jacobian = SparseColMat::try_new_from_triplets(0, 0, &[]).unwrap();
405 let empty_residuals = Mat::zeros(0, 1);
406
407 let result = solver.solve_normal_equation(&empty_residuals, &empty_jacobian);
408 if let Ok(solution) = result {
409 assert_eq!(solution.nrows(), 0);
410 }
411 }
412
413 #[test]
415 fn test_numerical_accuracy() {
416 let mut solver = SparseCholeskySolver::new();
417
418 let triplets = vec![
420 faer::sparse::Triplet::new(0, 0, 1.0),
421 faer::sparse::Triplet::new(0, 1, 0.0),
422 faer::sparse::Triplet::new(1, 0, 0.0),
423 faer::sparse::Triplet::new(1, 1, 1.0),
424 ];
425 let jacobian = SparseColMat::try_new_from_triplets(2, 2, &triplets).unwrap();
426 let residuals = Mat::from_fn(2, 1, |i, _| -((i + 1) as f64)); let result = solver.solve_normal_equation(&residuals, &jacobian);
429 assert!(result.is_ok());
430
431 let solution = result.unwrap();
432 assert!((solution[(0, 0)] - 1.0).abs() < TOLERANCE);
434 assert!((solution[(1, 0)] - 2.0).abs() < TOLERANCE);
435 }
436
437 #[test]
439 fn test_solver_clone() {
440 let solver1 = SparseCholeskySolver::new();
441 let solver2 = solver1.clone();
442
443 assert!(solver1.factorizer.is_none());
444 assert!(solver2.factorizer.is_none());
445 }
446
447 #[test]
449 fn test_cholesky_covariance_computation() {
450 let mut solver = SparseCholeskySolver::new();
451 let (jacobian, residuals) = create_test_data();
452
453 let result = solver.solve_normal_equation(&residuals, &jacobian);
455 assert!(result.is_ok());
456
457 let cov_matrix = solver.compute_covariance_matrix();
459 assert!(cov_matrix.is_some());
460
461 let cov = cov_matrix.unwrap();
462 assert_eq!(cov.nrows(), 3); assert_eq!(cov.ncols(), 3);
464
465 for i in 0..3 {
467 for j in 0..3 {
468 assert!(
469 (cov[(i, j)] - cov[(j, i)]).abs() < TOLERANCE,
470 "Covariance matrix should be symmetric"
471 );
472 }
473 }
474
475 for i in 0..3 {
477 assert!(
478 cov[(i, i)] > 0.0,
479 "Diagonal elements (variances) should be positive"
480 );
481 }
482 }
483
484 #[test]
486 fn test_cholesky_standard_errors_computation() {
487 let mut solver = SparseCholeskySolver::new();
488 let (jacobian, residuals) = create_test_data();
489
490 let result = solver.solve_normal_equation(&residuals, &jacobian);
492 assert!(result.is_ok());
493
494 solver.compute_standard_errors();
496
497 assert!(solver.covariance_matrix.is_some());
499 assert!(solver.standard_errors.is_some());
500
501 let cov = solver.covariance_matrix.as_ref().unwrap();
502 let errors = solver.standard_errors.as_ref().unwrap();
503
504 assert_eq!(errors.nrows(), 3); assert_eq!(errors.ncols(), 1);
506
507 for i in 0..3 {
509 assert!(errors[(i, 0)] > 0.0, "Standard errors should be positive");
510 }
511
512 for i in 0..3 {
514 let expected_std_error = cov[(i, i)].sqrt();
515 assert!(
516 (errors[(i, 0)] - expected_std_error).abs() < TOLERANCE,
517 "Standard error should equal sqrt of covariance diagonal"
518 );
519 }
520 }
521
522 #[test]
524 fn test_cholesky_covariance_positive_definite() {
525 let mut solver = SparseCholeskySolver::new();
526
527 let triplets = vec![
529 faer::sparse::Triplet::new(0, 0, 3.0),
530 faer::sparse::Triplet::new(0, 1, 1.0),
531 faer::sparse::Triplet::new(1, 0, 1.0),
532 faer::sparse::Triplet::new(1, 1, 2.0),
533 ];
534 let jacobian = SparseColMat::try_new_from_triplets(2, 2, &triplets).unwrap();
535 let residuals = Mat::from_fn(2, 1, |i, _| (i + 1) as f64);
536
537 let result = solver.solve_normal_equation(&residuals, &jacobian);
538 assert!(result.is_ok());
539
540 let cov_matrix = solver.compute_covariance_matrix();
541 assert!(cov_matrix.is_some());
542
543 let cov = cov_matrix.unwrap();
544 assert!((cov[(0, 0)] - 0.2).abs() < TOLERANCE);
547 assert!((cov[(1, 1)] - 0.4).abs() < TOLERANCE);
548 assert!((cov[(0, 1)] - (-0.2)).abs() < TOLERANCE);
549 assert!((cov[(1, 0)] - (-0.2)).abs() < TOLERANCE);
550 }
551
552 #[test]
554 fn test_cholesky_covariance_caching() {
555 let mut solver = SparseCholeskySolver::new();
556 let (jacobian, residuals) = create_test_data();
557
558 let result = solver.solve_normal_equation(&residuals, &jacobian);
560 assert!(result.is_ok());
561
562 solver.compute_covariance_matrix();
564 assert!(solver.covariance_matrix.is_some());
565
566 let cov1_ptr = solver.covariance_matrix.as_ref().unwrap().as_ptr();
568
569 solver.compute_covariance_matrix();
571 assert!(solver.covariance_matrix.is_some());
572
573 let cov2_ptr = solver.covariance_matrix.as_ref().unwrap().as_ptr();
575
576 assert_eq!(cov1_ptr, cov2_ptr, "Covariance matrix should be cached");
578 }
579
580 #[test]
582 fn test_cholesky_decomposition_properties() {
583 let mut solver = SparseCholeskySolver::new();
584
585 let triplets = vec![
587 faer::sparse::Triplet::new(0, 0, 2.0),
588 faer::sparse::Triplet::new(1, 1, 3.0),
589 ];
590 let jacobian = SparseColMat::try_new_from_triplets(2, 2, &triplets).unwrap();
591 let residuals = Mat::from_fn(2, 1, |i, _| (i + 1) as f64);
592
593 let result = solver.solve_normal_equation(&residuals, &jacobian);
594 assert!(result.is_ok());
595
596 assert!(solver.factorizer.is_some());
598 assert!(solver.hessian.is_some());
599
600 let hessian = solver.hessian.as_ref().unwrap();
602 assert_eq!(hessian.nrows(), 2);
603 assert_eq!(hessian.ncols(), 2);
604 }
605
606 #[test]
608 fn test_cholesky_numerical_stability() {
609 let mut solver = SparseCholeskySolver::new();
610
611 let triplets = vec![
613 faer::sparse::Triplet::new(0, 0, 1.0),
614 faer::sparse::Triplet::new(1, 1, 1.0),
615 faer::sparse::Triplet::new(2, 2, 1.0),
616 ];
617 let jacobian = SparseColMat::try_new_from_triplets(3, 3, &triplets).unwrap();
618 let residuals = Mat::from_fn(3, 1, |i, _| -((i + 1) as f64)); let result = solver.solve_normal_equation(&residuals, &jacobian);
621 assert!(result.is_ok());
622
623 let solution = result.unwrap();
624 for i in 0..3 {
626 let expected = (i + 1) as f64;
627 assert!(
628 (solution[(i, 0)] - expected).abs() < TOLERANCE,
629 "Expected {}, got {}",
630 expected,
631 solution[(i, 0)]
632 );
633 }
634
635 let cov_matrix = solver.compute_covariance_matrix();
637 assert!(cov_matrix.is_some());
638 let cov = cov_matrix.unwrap();
639
640 for i in 0..3 {
641 for j in 0..3 {
642 let expected = if i == j { 1.0 } else { 0.0 };
643 assert!(
644 (cov[(i, j)] - expected).abs() < TOLERANCE,
645 "Covariance[{}, {}] expected {}, got {}",
646 i,
647 j,
648 expected,
649 cov[(i, j)]
650 );
651 }
652 }
653 }
654}