1use std::alloc::Layout;
2use std::cell::Cell;
3use std::marker::PhantomData;
4use std::mem;
5use std::ops::Index;
6use std::ptr::NonNull;
7use std::rc::Rc;
8
9pub use raw::{KluData, KluIndex};
10
11mod raw;
12#[cfg(test)]
13mod test;
14
15#[derive(Debug)]
16pub struct KluSettings<I: KluIndex> {
17 data: Box<I::KluCommon>,
18}
19
20impl<I: KluIndex> KluSettings<I> {
21 pub fn new() -> Self {
22 unsafe {
23 let raw = std::alloc::alloc(Layout::new::<I::KluCommon>()) as *mut I::KluCommon;
24 I::klu_defaults(raw);
25 Self {
26 data: Box::from_raw(raw),
27 }
28 }
29 }
30
31 pub fn as_ffi(&self) -> *mut I::KluCommon {
32 self.data.as_ref() as *const I::KluCommon as *mut I::KluCommon
33 }
34
35 pub fn check_status(&self) {
36 I::check_status(&self.data)
37 }
38
39 pub fn is_singular(&self) -> bool {
40 I::is_singular(&self.data)
41 }
42
43 pub fn get_rcond(&self) -> f64 {
44 I::get_rcond(&self.data)
45 }
46}
47
48impl<I: KluIndex> Default for KluSettings<I> {
49 fn default() -> Self {
50 Self::new()
51 }
52}
53
54pub struct FixedKluMatrix<I: KluIndex, D: KluData> {
56 spec: Rc<KluMatrixSpec<I>>,
57 data: Option<NonNull<[D]>>,
58 klu_numeric: Option<NonNull<I::KluNumeric>>,
59}
60
61impl<I: KluIndex, D: KluData> FixedKluMatrix<I, D> {
62 pub fn into_alloc(mut self) -> Vec<D> {
70 let klu_numeric = self.klu_numeric.take();
71 self.free_numeric(klu_numeric);
72 unsafe {
74 let data = self.data.take().unwrap().as_mut();
75 Box::from_raw(data).into()
76 }
77 }
78
79 pub fn new_with_alloc(spec: Rc<KluMatrixSpec<I>>, mut alloc: Vec<D>) -> Option<Self> {
90 if spec.row_indices.is_empty() {
91 return None;
92 }
93
94 alloc.fill(D::default());
95 alloc.resize(spec.row_indices.len(), D::default());
96 let data = Box::leak(alloc.into_boxed_slice());
101
102 Some(Self {
103 spec,
104 data: Some(data.into()),
105 klu_numeric: None,
106 })
107 }
108
109 pub fn new(spec: Rc<KluMatrixSpec<I>>) -> Option<Self> {
119 Self::new_with_alloc(spec, Vec::new())
120 }
121
122 pub fn data(&self) -> &[Cell<D>] {
123 unsafe { &*(self.data.unwrap().as_ptr() as *const [Cell<D>]) }
125 }
126
127 pub fn data_ptr(&self) -> *mut D {
137 self.data.unwrap().as_ptr() as *mut D
138 }
139
140 pub fn write_all(&self, val: D) {
141 for entry in self.data() {
142 entry.set(val)
143 }
144 }
145
146 pub fn write_zero(&self) {
147 unsafe { self.data_ptr().write_bytes(0, self.data().len()) }
149 }
150
151 pub fn lu_factorize(&mut self, refactor_threshold: Option<f64>) -> bool {
158 match (self.klu_numeric, refactor_threshold) {
159 (Some(klu_numeric), None) => unsafe {
160 D::klu_free_numeric::<I>(&mut klu_numeric.as_ptr(), self.spec.settings.as_ffi())
161 },
162 (Some(klu_numeric), Some(rcond_threshold)) => {
163 let res = unsafe {
164 D::klu_refactor(
165 self.spec.column_offsets.as_ptr(),
167 self.spec.row_indices.as_ptr(),
168 self.data_ptr(),
169 self.spec.klu_symbolic.as_ptr(),
170 klu_numeric.as_ptr(),
171 self.spec.settings.as_ffi(),
172 ) && D::klu_rcond::<I>(
173 self.spec.klu_symbolic.as_ptr(),
174 klu_numeric.as_ptr(),
175 self.spec.settings.as_ffi(),
176 )
177 };
178 self.spec.settings.check_status();
179 if !self.spec.settings.is_singular()
180 && self.spec.settings.get_rcond() <= rcond_threshold
181 {
182 assert!(res, "KLU produced unkown error");
184 return false;
185 }
186
187 unsafe {
188 D::klu_free_numeric::<I>(&mut klu_numeric.as_ptr(), self.spec.settings.as_ffi())
189 }
190 self.klu_numeric = None;
191 }
192
193 _ => (),
194 };
195
196 let klu_numeric = unsafe {
197 D::klu_factor(
198 self.spec.column_offsets.as_ptr(),
200 self.spec.row_indices.as_ptr(),
201 self.data_ptr(),
202 self.spec.klu_symbolic.as_ptr(),
203 self.spec.settings.as_ffi(),
204 )
205 };
206 self.spec.settings.check_status();
207 if self.spec.settings.is_singular() {
208 return true;
209 }
210
211 let klu_numeric = NonNull::new(klu_numeric).expect("KLU retruned a valid numeric object");
212 self.klu_numeric = Some(klu_numeric);
213 false
214 }
215
216 pub fn solve_linear_system(&self, rhs: &mut [D]) {
222 let klu_numeric = self
225 .klu_numeric
226 .expect("factorize must be called before solve");
227 let res = unsafe {
228 D::klu_solve::<I>(
229 self.spec.klu_symbolic.as_ptr(),
230 klu_numeric.as_ptr(),
231 I::from_usize(rhs.len()),
232 I::from_usize(1),
233 rhs.as_mut_ptr(),
234 self.spec.settings.as_ffi(),
235 )
236 };
237
238 self.spec.settings.check_status();
239
240 assert!(res, "KLU produced unkown error");
241 }
242
243 pub fn solve_linear_tranose_system(&self, rhs: &mut [D]) {
249 let klu_numeric = self
252 .klu_numeric
253 .expect("factorize must be called before solve");
254 let res = unsafe {
255 D::klu_tsolve::<I>(
256 self.spec.klu_symbolic.as_ptr(),
257 klu_numeric.as_ptr(),
258 I::from_usize(rhs.len()),
259 I::from_usize(1),
260 rhs.as_mut_ptr(),
261 self.spec.settings.as_ffi(),
262 )
263 };
264
265 self.spec.settings.check_status();
266
267 assert!(res, "KLU produced unkown error");
268 }
269 fn free_numeric(&self, klu_numeric: Option<NonNull<I::KluNumeric>>) {
270 if let Some(klu_numeric) = klu_numeric {
271 unsafe {
272 D::klu_free_numeric::<I>(&mut klu_numeric.as_ptr(), self.spec.settings.as_ffi())
273 }
274 }
275 }
276 pub fn get(&self, column: I, row: I) -> Option<&Cell<D>> {
277 let offset = self.spec.offset(column, row)?;
278 Some(&self[offset])
279 }
280}
281
282impl<I: KluIndex, D: KluData> Index<usize> for FixedKluMatrix<I, D> {
283 type Output = Cell<D>;
284
285 fn index(&self, i: usize) -> &Self::Output {
286 &self.data()[i]
287 }
288}
289
290impl<I: KluIndex, D: KluData> Index<(I, I)> for FixedKluMatrix<I, D> {
291 type Output = Cell<D>;
292
293 fn index(&self, (column, row): (I, I)) -> &Self::Output {
294 self.get(column, row).unwrap()
295 }
296}
297
298impl<I: KluIndex, D: KluData> Drop for FixedKluMatrix<I, D> {
299 fn drop(&mut self) {
300 let klu_numeric = self.klu_numeric.take();
301 self.free_numeric(klu_numeric);
302
303 if let Some(data) = self.data.take() {
304 unsafe {
305 Box::from_raw(data.as_ptr());
306 }
307 }
308 }
309}
310
311#[derive(Debug)]
312pub struct KluMatrixSpec<I: KluIndex> {
313 column_offsets: Box<[I]>,
314 row_indices: Box<[I]>,
315 settings: KluSettings<I>,
316 klu_symbolic: NonNull<I::KluSymbolic>,
317 pd: PhantomData<I::KluSymbolic>,
318}
319
320impl<I: KluIndex> KluMatrixSpec<I> {
321 pub fn entry_cnt(&self) -> usize {
322 self.row_indices.len()
323 }
324
325 pub fn reinit(&mut self, columns: &[Vec<I>]) {
328 self.free_symbolic();
329 self.init(columns)
330 }
331
332 fn init(&mut self, columns: &[Vec<I>]) {
333 let mut column_offsets: Vec<_> =
334 mem::replace(&mut self.column_offsets, Box::new([])).into();
335 column_offsets.clear();
336
337 column_offsets.reserve(columns.len() + 1);
338 column_offsets.push(I::from_usize(0));
339 let mut num_entries = 0;
340 column_offsets.extend(columns.iter().map(|colum| {
341 num_entries += colum.len();
342 I::from_usize(num_entries)
343 }));
344
345 let num_cols = I::from_usize(columns.len());
346 let mut row_indices: Vec<_> = mem::replace(&mut self.row_indices, Box::new([])).into();
347 row_indices.clear();
348 row_indices.reserve(num_entries);
349
350 for colmun in columns {
351 row_indices.extend_from_slice(colmun)
352 }
353
354 let klu_symbolic = unsafe {
355 I::klu_analyze(
356 num_cols,
357 column_offsets.as_mut_ptr(),
358 row_indices.as_mut_ptr(),
359 self.settings.as_ffi(),
360 )
361 };
362
363 self.settings.check_status();
364 self.klu_symbolic = NonNull::new(klu_symbolic)
365 .expect("klu_analyze returns a non null pointer if the status is ok");
366 self.column_offsets = column_offsets.into_boxed_slice();
367 self.row_indices = row_indices.into_boxed_slice();
368 }
369
370 pub fn new(columns: &[Vec<I>], klu_settings: KluSettings<I>) -> Rc<Self> {
372 let mut res = Self {
373 column_offsets: Box::new([]),
374 row_indices: Box::new([]),
375 klu_symbolic: NonNull::dangling(),
376 settings: klu_settings,
377 pd: PhantomData,
378 };
379 res.init(columns);
380 Rc::new(res)
381 }
382
383 pub fn create_matrix<D: KluData>(self: Rc<Self>) -> Option<FixedKluMatrix<I, D>> {
384 FixedKluMatrix::new(self)
385 }
386
387 pub fn offset(&self, column: I, row: I) -> Option<usize> {
388 let column = column.into_usize();
389 let end = self.column_offsets[column + 1].into_usize();
390
391 let column_offset = self.column_offsets[column].into_usize();
392
393 let pos = self.row_indices[column_offset..end]
394 .iter()
395 .position(|&r| r == row)?;
396 Some(column_offset + pos)
397 }
398
399 fn free_symbolic(&self) {
400 unsafe { I::klu_free_symbolic(&mut self.klu_symbolic.as_ptr(), self.settings.as_ffi()) }
401 }
402}
403
404impl<I: KluIndex> Drop for KluMatrixSpec<I> {
405 fn drop(&mut self) {
406 self.free_symbolic()
407 }
408}
409
410pub struct KluMatrixBuilder<I: KluIndex> {
411 columns: Vec<Vec<I>>,
412 dim: I,
413}
414
415impl<I: KluIndex> KluMatrixBuilder<I> {
416 pub fn reset(&mut self, dim: I) {
417 self.ensure_dim(dim);
418 for column in &mut self.columns {
419 column.clear();
420 }
421 }
422
423 pub fn new(dim: I) -> Self {
424 Self {
425 columns: vec![Vec::with_capacity(64); dim.into_usize()],
426 dim,
427 }
428 }
429
430 fn ensure_dim(&mut self, dim: I) {
431 if self.dim < dim {
432 self.columns
433 .resize_with(dim.into_usize(), || Vec::with_capacity(64));
434 self.dim = dim;
435 }
436 }
437
438 pub fn add_entry(&mut self, column: I, row: I) {
439 self.ensure_dim(column + I::from_usize(1));
440 let column = &mut self.columns[column.into_usize()];
441 let dst = column.partition_point(|it| *it < row);
443 if column.get(dst).map_or(true, |&it| it != row) {
444 column.insert(dst, row)
445 }
446 }
447
448 pub fn columns(&self) -> &[Vec<I>] {
449 &self.columns[..self.dim.into_usize()]
450 }
451
452 pub fn finish(&self, klu_settings: KluSettings<I>) -> Rc<KluMatrixSpec<I>> {
453 KluMatrixSpec::new(self.columns(), klu_settings)
454 }
455
456 pub fn reinit(&self, spec: &mut KluMatrixSpec<I>) {
457 spec.reinit(self.columns())
458 }
459}