1#![cfg_attr(not(feature = "std"), no_std)]
161#![allow(non_snake_case)]
162#![warn(missing_docs)]
163#![warn(rustdoc::broken_intra_doc_links)]
164
165extern crate alloc;
166#[cfg(feature = "std")]
167extern crate std;
168
169#[macro_export]
171macro_rules! make_guard {
172 ($($name:ident),* $(,)?) => {$(
173 #[allow(unused_unsafe)]
174 let $name = unsafe { extern crate generativity; ::generativity::Id::new() };
175 #[allow(unused, unused_unsafe)]
176 let lifetime_brand = unsafe { extern crate generativity; ::generativity::LifetimeBrand::new(&$name) };
177 #[allow(unused_unsafe)]
178 let $name = unsafe { extern crate generativity; ::generativity::Guard::new($name) };
179 )*};
180}
181
182macro_rules! repeat_n {
183 ($e: expr, $n: expr) => {
184 iter::repeat_n($e, $n)
185 };
186}
187
188macro_rules! try_const {
189 ($e: expr) => {
190 ::pulp::try_const! { $e }
191 };
192}
193
194use core::num::NonZeroUsize;
195use core::sync::atomic::AtomicUsize;
196use equator::{assert, debug_assert};
197use faer_traits::*;
198
199#[macro_export]
201macro_rules! auto {
202 ($ty: ty $(,)?) => {
203 $crate::Auto::<$ty>::auto()
204 };
205}
206
207macro_rules! dispatch {
208 ($imp: expr, $ty: ident, $T: ty $(,)?) => {
209 if try_const! { <$T>::IS_NATIVE_C32 } {
210 unsafe { transmute(<ComplexImpl<f32> as ComplexField>::Arch::default().dispatch(transmute::<_, $ty<ComplexImpl<f32>>>($imp))) }
211 } else if try_const! { <$T>::IS_NATIVE_C64 } {
212 unsafe { transmute(<ComplexImpl<f64> as ComplexField>::Arch::default().dispatch(transmute::<_, $ty<ComplexImpl<f64>>>($imp))) }
213 } else {
214 <$T>::Arch::default().dispatch($imp)
215 }
216 };
217}
218
219macro_rules! stack_mat {
220 ($name: ident, $m: expr, $n: expr, $A: expr, $N: expr, $T: ty $(,)?) => {
221 let mut __tmp = {
222 #[repr(align(64))]
223 struct __Col<T, const A: usize>([T; A]);
224 struct __Mat<T, const A: usize, const N: usize>([__Col<T, A>; N]);
225
226 core::mem::MaybeUninit::<__Mat<$T, $A, $N>>::uninit()
227 };
228 let __stack = MemStack::new_any(core::slice::from_mut(&mut __tmp));
229 let mut $name = $crate::linalg::temp_mat_zeroed::<$T, _, _>($m, $n, __stack).0;
230 let mut $name = $name.as_mat_mut();
231 };
232
233 ($name: ident, $m: expr, $n: expr, $T: ty $(,)?) => {
234 stack_mat!($name, $m, $n, $m, $n, $T)
235 };
236}
237
238#[macro_export]
239#[doc(hidden)]
240macro_rules! __dbg {
241 () => {
242 std::eprintln!("[{}:{}:{}]", std::file!(), std::line!(), std::column!())
243 };
244 ($val:expr $(,)?) => {
245 match $val {
246 tmp => {
247 std::eprintln!("[{}:{}:{}] {} = {:16.12?}",
248 std::file!(), std::line!(), std::column!(), std::stringify!($val), &tmp);
249 tmp
250 }
251 }
252 };
253 ($($val:expr),+ $(,)?) => {
254 ($($crate::__dbg!($val)),+,)
255 };
256}
257
258#[cfg(feature = "perf-warn")]
259#[macro_export]
260#[doc(hidden)]
261macro_rules! __perf_warn {
262 ($name: ident) => {{
263 #[inline(always)]
264 #[allow(non_snake_case)]
265 fn $name() -> &'static ::core::sync::atomic::AtomicBool {
266 static $name: ::core::sync::atomic::AtomicBool = ::core::sync::atomic::AtomicBool::new(false);
267 &$name
268 }
269 ::core::matches!(
270 $name().compare_exchange(
271 false,
272 true,
273 ::core::sync::atomic::Ordering::Relaxed,
274 ::core::sync::atomic::Ordering::Relaxed,
275 ),
276 Ok(_)
277 )
278 }};
279}
280
281#[doc(hidden)]
282#[macro_export]
283macro_rules! with_dim {
284 ($name: ident, $value: expr $(,)?) => {
285 let __val__ = $value;
286 $crate::make_guard!($name);
287 let $name = $crate::utils::bound::Dim::new(__val__, $name);
288 };
289
290 ({$(let $name: ident = $value: expr;)*}) => {$(
291 let __val__ = $value;
292 $crate::make_guard!($name);
293 let $name = $crate::utils::bound::Dim::new(__val__, $name);
294 )*};
295}
296
297#[macro_export]
325macro_rules! zip {
326 ($head: expr $(,)?) => {
327 $crate::linalg::zip::LastEq($crate::linalg::zip::IntoView::into_view($head), ::core::marker::PhantomData)
328 };
329
330 ($head: expr, $($tail: expr),* $(,)?) => {
331 $crate::linalg::zip::ZipEq::new($crate::linalg::zip::IntoView::into_view($head), $crate::zip!($($tail,)*))
332 };
333}
334
335#[macro_export]
359macro_rules! Zip {
360 ($head: ty $(,)?) => {
361 $crate::linalg::zip::Last::<$head>
362 };
363
364 ($head: ty, $($tail: ty),* $(,)?) => {
365 $crate::linalg::zip::Zip::<$head, $crate::Zip!($($tail,)*)>
366 };
367}
368
369#[macro_export]
393macro_rules! unzip {
394 ($head: pat $(,)?) => {
395 $crate::linalg::zip::Last($head)
396 };
397
398 ($head: pat, $($tail: pat),* $(,)?) => {
399 $crate::linalg::zip::Zip($head, $crate::unzip!($($tail,)*))
400 };
401}
402
403#[macro_export]
404#[doc(hidden)]
405macro_rules! __transpose_impl {
406 ([$([$($col:expr),*])*] $($v:expr;)* ) => {
407 [$([$($col,)*],)* [$($v,)*]]
408 };
409 ([$([$($col:expr),*])*] $($v0:expr, $($v:expr),* ;)*) => {
410 $crate::__transpose_impl!([$([$($col),*])* [$($v0),*]] $($($v),* ;)*)
411 };
412}
413
414#[macro_export]
442macro_rules! mat {
443 () => {
444 {
445 compile_error!("number of columns in the matrix is ambiguous");
446 }
447 };
448
449 ($([$($v:expr),* $(,)?] ),* $(,)?) => {
450 {
451 let __data = ::core::mem::ManuallyDrop::new($crate::__transpose_impl!([] $($($v),* ;)*));
452 let __data = &*__data;
453 let __ncols = __data.len();
454 let __nrows = (*__data.get(0).unwrap()).len();
455
456 #[allow(unused_unsafe)]
457 unsafe {
458 $crate::mat::Mat::from_fn(__nrows, __ncols, |i, j| ::core::ptr::from_ref(&__data[j][i]).read())
459 }
460 }
461 };
462}
463
464#[macro_export]
477macro_rules! col {
478 ($($v: expr),* $(,)?) => {
479 {
480 let __data = ::core::mem::ManuallyDrop::new([$($v,)*]);
481 let __data = &*__data;
482 let __len = __data.len();
483
484 #[allow(unused_unsafe)]
485 unsafe {
486 $crate::col::Col::from_fn(__len, |i| ::core::ptr::from_ref(&__data[i]).read())
487 }
488 }
489 };
490}
491
492#[macro_export]
505macro_rules! row {
506 ($($v: expr),* $(,)?) => {
507 {
508 let __data = ::core::mem::ManuallyDrop::new([$($v,)*]);
509 let __data = &*__data;
510 let __len = __data.len();
511
512 #[allow(unused_unsafe)]
513 unsafe {
514 $crate::row::Row::from_fn(__len, |i| ::core::ptr::from_ref(&__data[i]).read())
515 }
516 }
517 };
518}
519
520#[doc(hidden)]
531#[track_caller]
532pub fn concat_impl<T: ComplexField>(blocks: &[&[(mat::MatRef<'_, T>, Conj)]]) -> mat::Mat<T> {
533 #[inline(always)]
534 fn count_total_columns<T: ComplexField>(block_row: &[(mat::MatRef<'_, T>, Conj)]) -> usize {
535 let mut out: usize = 0;
536 for (elem, _) in block_row.iter() {
537 out += elem.ncols();
538 }
539 out
540 }
541
542 #[inline(always)]
543 #[track_caller]
544 fn count_rows<T: ComplexField>(block_row: &[(mat::MatRef<'_, T>, Conj)]) -> usize {
545 let mut out: usize = 0;
546 for (i, (e, _)) in block_row.iter().enumerate() {
547 if i == 0 {
548 out = e.nrows();
549 } else {
550 assert!(e.nrows() == out);
551 }
552 }
553 out
554 }
555
556 let mut n: usize = 0;
558 let mut m: usize = 0;
559 for row in blocks.iter() {
560 n += count_rows(row);
561 }
562 for (i, row) in blocks.iter().enumerate() {
563 let cols = count_total_columns(row);
564 if i == 0 {
565 m = cols;
566 } else {
567 assert!(cols == m);
568 }
569 }
570
571 let mut mat = mat::Mat::<T>::zeros(n, m);
572 let mut ni: usize = 0;
573 let mut mj: usize;
574 for row in blocks.iter() {
575 mj = 0;
576
577 for (elem, conj) in row.iter() {
578 let mut dst = mat.as_mut().submatrix_mut(ni, mj, elem.nrows(), elem.ncols());
579 if *conj == Conj::No {
580 dst.copy_from(elem);
581 } else {
582 dst.copy_from(elem.conjugate());
583 }
584 mj += elem.ncols();
585 }
586 ni += row[0].0.nrows();
587 }
588
589 mat
590}
591
592#[macro_export]
600macro_rules! concat {
601 () => {
602 {
603 compile_error!("number of columns in the matrix is ambiguous");
604 }
605 };
606
607 ($([$($v:expr),* $(,)?] ),* $(,)?) => {
608 {
609 $crate::concat_impl(&[$(&[$(($v).as_ref().__canonicalize(),)*],)*])
610 }
611 };
612}
613
614pub mod utils;
616
617pub mod diag;
619pub mod mat;
621pub mod perm;
623
624pub mod col;
626pub mod row;
628
629pub mod linalg;
630#[path = "./operator/mod.rs"]
631pub mod matrix_free;
632pub mod sparse;
633
634#[cfg(feature = "std")]
636pub mod io;
637
638#[cfg(feature = "serde")]
639mod serde;
640
641pub trait Index: traits::IndexCore + traits::Index + seal::Seal {}
643impl<T: faer_traits::Index<Signed: seal::Seal> + seal::Seal> Index for T {}
644
645mod seal {
646 pub trait Seal {}
647 impl<T: faer_traits::Seal> Seal for T {}
648 impl Seal for crate::utils::bound::Dim<'_> {}
649 impl<I: crate::Index> Seal for crate::utils::bound::Idx<'_, I> {}
650 impl<I: crate::Index> Seal for crate::utils::bound::IdxInc<'_, I> {}
651 impl<I: crate::Index> Seal for crate::utils::bound::MaybeIdx<'_, I> {}
652 impl<I: crate::Index> Seal for crate::utils::bound::IdxIncOne<I> {}
653 impl<I: crate::Index> Seal for crate::utils::bound::MaybeIdxOne<I> {}
654 impl Seal for crate::utils::bound::One {}
655 impl Seal for crate::utils::bound::Zero {}
656 impl Seal for crate::ContiguousFwd {}
657 impl Seal for crate::ContiguousBwd {}
658}
659
660pub trait Unbind<I = usize>: Send + Sync + Copy + core::fmt::Debug + seal::Seal {
663 unsafe fn new_unbound(idx: I) -> Self;
667
668 fn unbound(self) -> I;
670}
671
672pub type Idx<Dim, I = usize> = <Dim as ShapeIdx>::Idx<I>;
674pub type IdxInc<Dim, I = usize> = <Dim as ShapeIdx>::IdxInc<I>;
676pub type MaybeIdx<Dim, I = usize> = <Dim as ShapeIdx>::MaybeIdx<I>;
678
679pub trait ShapeIdx {
681 type Idx<I: Index>: Unbind<I> + Ord + Eq;
683 type IdxInc<I: Index>: Unbind<I> + Ord + Eq + From<Idx<Self, I>>;
685 type MaybeIdx<I: Index>: Unbind<I::Signed> + Ord + Eq;
687}
688
689pub trait Shape: Unbind + Ord + ShapeIdx<Idx<usize>: Ord + Eq + PartialOrd<Self>, IdxInc<usize>: Ord + Eq + PartialOrd<Self>> {
691 const IS_BOUND: bool = true;
693
694 #[inline]
696 fn bind<'n>(self, guard: generativity::Guard<'n>) -> utils::bound::Dim<'n> {
697 utils::bound::Dim::new(self.unbound(), guard)
698 }
699
700 #[inline]
702 fn cast_idx_slice<I: Index>(slice: &[Idx<Self, I>]) -> &[I] {
703 unsafe { core::slice::from_raw_parts(slice.as_ptr() as _, slice.len()) }
704 }
705
706 #[inline]
708 fn cast_idx_inc_slice<I: Index>(slice: &[IdxInc<Self, I>]) -> &[I] {
709 unsafe { core::slice::from_raw_parts(slice.as_ptr() as _, slice.len()) }
710 }
711
712 #[inline(always)]
714 fn start() -> IdxInc<Self> {
715 unsafe { IdxInc::<Self>::new_unbound(0) }
716 }
717
718 #[inline(always)]
720 fn next(idx: Idx<Self>) -> IdxInc<Self> {
721 unsafe { IdxInc::<Self>::new_unbound(idx.unbound() + 1) }
722 }
723
724 #[inline(always)]
726 fn end(self) -> IdxInc<Self> {
727 unsafe { IdxInc::<Self>::new_unbound(self.unbound()) }
728 }
729
730 #[inline(always)]
732 fn idx(self, idx: usize) -> Option<Idx<Self>> {
733 if idx < self.unbound() {
734 Some(unsafe { Idx::<Self>::new_unbound(idx) })
735 } else {
736 None
737 }
738 }
739
740 #[inline(always)]
742 fn idx_inc(self, idx: usize) -> Option<IdxInc<Self>> {
743 if idx <= self.unbound() {
744 Some(unsafe { IdxInc::<Self>::new_unbound(idx) })
745 } else {
746 None
747 }
748 }
749
750 #[inline(always)]
752 fn checked_idx(self, idx: usize) -> Idx<Self> {
753 equator::assert!(idx < self.unbound());
754 unsafe { Idx::<Self>::new_unbound(idx) }
755 }
756
757 #[inline(always)]
759 fn checked_idx_inc(self, idx: usize) -> IdxInc<Self> {
760 equator::assert!(idx <= self.unbound());
761 unsafe { IdxInc::<Self>::new_unbound(idx) }
762 }
763
764 #[inline(always)]
768 unsafe fn unchecked_idx(self, idx: usize) -> Idx<Self> {
769 equator::debug_assert!(idx < self.unbound());
770 unsafe { Idx::<Self>::new_unbound(idx) }
771 }
772
773 #[inline(always)]
777 unsafe fn unchecked_idx_inc(self, idx: usize) -> IdxInc<Self> {
778 equator::debug_assert!(idx <= self.unbound());
779 unsafe { IdxInc::<Self>::new_unbound(idx) }
780 }
781
782 #[inline(always)]
784 fn indices(from: IdxInc<Self>, to: IdxInc<Self>) -> impl Clone + ExactSizeIterator + DoubleEndedIterator<Item = Idx<Self>> {
785 (from.unbound()..to.unbound()).map(
786 #[inline(always)]
787 |i| unsafe { Idx::<Self>::new_unbound(i) },
788 )
789 }
790}
791
792impl<T: Send + Sync + Copy + core::fmt::Debug + faer_traits::Seal> Unbind<T> for T {
793 #[inline(always)]
794 unsafe fn new_unbound(idx: T) -> Self {
795 idx
796 }
797
798 #[inline(always)]
799 fn unbound(self) -> T {
800 self
801 }
802}
803
804impl ShapeIdx for usize {
805 type Idx<I: Index> = I;
806 type IdxInc<I: Index> = I;
807 type MaybeIdx<I: Index> = I::Signed;
808}
809impl Shape for usize {
810 const IS_BOUND: bool = false;
811}
812
813pub trait Stride: seal::Seal + core::fmt::Debug + Copy + Send + Sync + 'static {
815 type Rev: Stride<Rev = Self>;
817 fn rev(self) -> Self::Rev;
819
820 fn element_stride(self) -> isize;
822}
823
824impl Stride for isize {
825 type Rev = Self;
826
827 #[inline(always)]
828 fn rev(self) -> Self::Rev {
829 -self
830 }
831
832 #[inline(always)]
833 fn element_stride(self) -> isize {
834 self
835 }
836}
837
838#[derive(Copy, Clone, Debug)]
840pub struct ContiguousFwd;
841#[derive(Copy, Clone, Debug)]
843pub struct ContiguousBwd;
844
845impl Stride for ContiguousFwd {
846 type Rev = ContiguousBwd;
847
848 #[inline(always)]
849 fn rev(self) -> Self::Rev {
850 ContiguousBwd
851 }
852
853 #[inline(always)]
854 fn element_stride(self) -> isize {
855 1
856 }
857}
858
859impl Stride for ContiguousBwd {
860 type Rev = ContiguousFwd;
861
862 #[inline(always)]
863 fn rev(self) -> Self::Rev {
864 ContiguousFwd
865 }
866
867 #[inline(always)]
868 fn element_stride(self) -> isize {
869 -1
870 }
871}
872
873#[derive(Copy, Clone, Debug, PartialEq, Eq)]
875pub enum TryReserveError {
876 CapacityOverflow,
878 AllocError {
880 layout: core::alloc::Layout,
882 },
883}
884
885#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
887pub enum Conj {
888 No,
890 Yes,
892}
893
894#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
895pub(crate) enum DiagStatus {
896 Unit,
897 Generic,
898}
899
900#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
902pub enum Accum {
903 Replace,
905 Add,
907}
908
909#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
911pub enum Side {
912 Lower,
914 Upper,
916}
917
918impl Conj {
919 #[inline]
921 pub const fn is_conj(self) -> bool {
922 matches!(self, Conj::Yes)
923 }
924
925 #[inline]
927 pub const fn compose(self, other: Self) -> Self {
928 match (self, other) {
929 (Conj::No, Conj::No) => Conj::No,
930 (Conj::Yes, Conj::Yes) => Conj::No,
931 (Conj::No, Conj::Yes) => Conj::Yes,
932 (Conj::Yes, Conj::No) => Conj::Yes,
933 }
934 }
935
936 #[inline]
938 pub const fn get<T: Conjugate>() -> Self {
939 if T::IS_CANONICAL { Self::No } else { Self::Yes }
940 }
941
942 #[inline]
943 pub(crate) fn apply<T: Conjugate>(value: &T) -> T::Canonical {
944 let value = unsafe { &*(value as *const T as *const T::Canonical) };
945
946 if try_const! { matches!(Self::get::<T>(), Conj::Yes) } {
947 T::Canonical::conj_impl(value)
948 } else {
949 T::Canonical::copy_impl(value)
950 }
951 }
952
953 #[inline]
954 pub(crate) fn apply_rt<T: ComplexField>(self, value: &T) -> T {
955 if self.is_conj() { T::conj_impl(value) } else { T::copy_impl(value) }
956 }
957}
958
959#[derive(Copy, Clone, Debug, PartialEq, Eq)]
961pub enum Par {
962 Seq,
964 #[cfg(feature = "rayon")]
966 Rayon(NonZeroUsize),
967}
968
969impl Par {
970 #[inline]
973 #[cfg(feature = "rayon")]
974 pub fn rayon(nthreads: usize) -> Self {
975 if nthreads == 0 {
976 Self::Rayon(NonZeroUsize::new(rayon::current_num_threads()).unwrap())
977 } else {
978 Self::Rayon(NonZeroUsize::new(nthreads).unwrap())
979 }
980 }
981
982 #[inline]
984 pub fn degree(&self) -> usize {
985 utils::thread::parallelism_degree(*self)
986 }
987}
988
989#[allow(non_camel_case_types)]
990pub type c32 = traits::c32;
992#[allow(non_camel_case_types)]
993pub type c64 = traits::c64;
995#[allow(non_camel_case_types)]
996pub type cx128 = traits::cx128;
998#[allow(non_camel_case_types)]
999pub type fx128 = traits::fx128;
1001
1002pub use col::{Col, ColMut, ColRef};
1003pub use mat::{Mat, MatMut, MatRef};
1004pub use row::{Row, RowMut, RowRef};
1005
1006#[allow(unused_imports, dead_code)]
1007mod internal_prelude {
1008 pub trait DivCeil: Sized {
1009 fn msrv_div_ceil(self, rhs: Self) -> Self;
1010 fn msrv_next_multiple_of(self, rhs: Self) -> Self;
1011 fn msrv_checked_next_multiple_of(self, rhs: Self) -> Option<Self>;
1012 }
1013
1014 impl DivCeil for usize {
1015 #[inline]
1016 fn msrv_div_ceil(self, rhs: Self) -> Self {
1017 let d = self / rhs;
1018 let r = self % rhs;
1019 if r > 0 { d + 1 } else { d }
1020 }
1021
1022 #[inline]
1023 fn msrv_next_multiple_of(self, rhs: Self) -> Self {
1024 match self % rhs {
1025 0 => self,
1026 r => self + (rhs - r),
1027 }
1028 }
1029
1030 #[inline]
1031 fn msrv_checked_next_multiple_of(self, rhs: Self) -> Option<Self> {
1032 {
1033 match self.checked_rem(rhs)? {
1034 0 => Some(self),
1035 r => self.checked_add(rhs - r),
1036 }
1037 }
1038 }
1039 }
1040
1041 #[cfg(test)]
1042 pub(crate) use std::dbg;
1043 #[cfg(test)]
1044 pub(crate) use {alloc::boxed::Box, alloc::vec, alloc::vec::Vec};
1045
1046 pub use faer_traits::{ComplexImpl, ComplexImplConj, Symbolic};
1047
1048 pub(crate) use crate::col::{Col, ColMut, ColRef};
1049 pub(crate) use crate::diag::{Diag, DiagMut, DiagRef};
1050 pub(crate) use crate::hacks::transmute;
1051 pub(crate) use crate::linalg::{self, temp_mat_scratch, temp_mat_uninit, temp_mat_zeroed};
1052 pub(crate) use crate::mat::{AsMat, AsMatMut, AsMatRef, Mat, MatMut, MatRef};
1053 pub(crate) use crate::perm::{Perm, PermRef};
1054 pub(crate) use crate::prelude::*;
1055 pub(crate) use crate::row::{AsRowMut, AsRowRef, Row, RowMut, RowRef};
1056 pub(crate) use crate::utils::bound::{Array, Dim, Idx, IdxInc, MaybeIdx};
1057 pub(crate) use crate::utils::simd::SimdCtx;
1058 pub(crate) use crate::{Auto, NonExhaustive, Side, Spec};
1059
1060 pub use num_complex::Complex;
1061
1062 pub use faer_macros::math;
1063 pub use faer_traits::math_utils::*;
1064 pub use faer_traits::{ComplexField, Conjugate, Index, IndexCore, Real, RealField, SignedIndex, SimdArch};
1065
1066 #[inline]
1067 pub fn simd_align(i: usize) -> usize {
1068 i.wrapping_neg()
1069 }
1070
1071 pub(crate) use crate::{Accum, Conj, ContiguousBwd, ContiguousFwd, DiagStatus, Par, Shape, Stride, Unbind, unzip, zip};
1072
1073 pub use {unzip as uz, zip as z};
1074
1075 pub use crate::make_guard;
1076 pub use dyn_stack::{MemStack, StackReq};
1077 pub use equator::{assert, assert as Assert, debug_assert, debug_assert as DebugAssert};
1078 pub use reborrow::*;
1079}
1080
1081#[allow(unused_imports)]
1082pub(crate) mod internal_prelude_sp {
1083 pub(crate) use crate::internal_prelude::*;
1084 pub(crate) use crate::sparse::{
1085 FaerError, NONE, Pair, SparseColMat, SparseColMatMut, SparseColMatRef, SparseRowMat, SparseRowMatMut, SparseRowMatRef, SymbolicSparseColMat,
1086 SymbolicSparseColMatRef, SymbolicSparseRowMat, SymbolicSparseRowMatRef, Triplet, csc_numeric, csc_symbolic, csr_numeric, csr_symbolic,
1087 linalg as linalg_sp, try_collect, try_zeroed, windows2,
1088 };
1089 pub(crate) use core::cell::Cell;
1090 pub(crate) use core::iter;
1091 pub(crate) use dyn_stack::MemBuffer;
1092}
1093
1094pub mod prelude {
1096 pub use reborrow::{IntoConst, Reborrow, ReborrowMut};
1097
1098 pub use super::{Par, Scale, c32, c64, col, mat, row, unzip, zip};
1099 pub use col::{Col, ColMut, ColRef};
1100 pub use mat::{Mat, MatMut, MatRef};
1101 pub use row::{Row, RowMut, RowRef};
1102
1103 #[cfg(feature = "linalg")]
1104 pub use super::linalg::solvers::{DenseSolve, Solve, SolveLstsq};
1105
1106 #[cfg(feature = "sparse")]
1107 pub use super::prelude_sp::*;
1108
1109 #[inline]
1111 pub fn default<T: Default>() -> T {
1112 Default::default()
1113 }
1114}
1115
1116#[cfg(feature = "sparse")]
1117mod prelude_sp {
1118 use crate::sparse;
1119
1120 pub use sparse::{SparseColMat, SparseColMatMut, SparseColMatRef, SparseRowMat, SparseRowMatMut, SparseRowMatRef};
1121}
1122
1123#[derive(Copy, Clone, Debug)]
1125#[repr(transparent)]
1126pub struct Scale<T>(pub T);
1127impl<T> Scale<T> {
1128 #[inline(always)]
1130 pub fn from_ref(value: &T) -> &Self {
1131 unsafe { &*(value as *const T as *const Self) }
1132 }
1133
1134 #[inline(always)]
1136 pub fn from_mut(value: &mut T) -> &mut Self {
1137 unsafe { &mut *(value as *mut T as *mut Self) }
1138 }
1139}
1140
1141static GLOBAL_PARALLELISM: AtomicUsize = {
1147 #[cfg(all(not(miri), feature = "rayon"))]
1148 {
1149 AtomicUsize::new(2)
1150 }
1151 #[cfg(not(all(not(miri), feature = "rayon")))]
1152 {
1153 AtomicUsize::new(1)
1154 }
1155};
1156
1157pub fn disable_global_parallelism() {
1159 GLOBAL_PARALLELISM.store(0, core::sync::atomic::Ordering::Relaxed);
1160}
1161
1162pub fn set_global_parallelism(par: Par) {
1164 let value = match par {
1165 Par::Seq => 1,
1166 #[cfg(feature = "rayon")]
1167 Par::Rayon(n) => n.get().saturating_add(2),
1168 };
1169 GLOBAL_PARALLELISM.store(value, core::sync::atomic::Ordering::Relaxed);
1170}
1171
1172#[track_caller]
1177pub fn get_global_parallelism() -> Par {
1178 let value = GLOBAL_PARALLELISM.load(core::sync::atomic::Ordering::Relaxed);
1179 match value {
1180 0 => panic!("Global parallelism is disabled."),
1181 1 => Par::Seq,
1182 #[cfg(feature = "rayon")]
1183 n => Par::rayon(n - 2),
1184 #[cfg(not(feature = "rayon"))]
1185 _ => unreachable!(),
1186 }
1187}
1188
1189#[doc(hidden)]
1190pub mod hacks;
1191
1192pub mod stats;
1194
1195mod non_exhaustive {
1196 #[doc(hidden)]
1197 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
1198 #[repr(transparent)]
1199 pub struct NonExhaustive(pub(crate) ());
1200}
1201pub(crate) use non_exhaustive::NonExhaustive;
1202
1203pub trait Auto<T> {
1206 fn auto() -> Self;
1208}
1209
1210pub struct Spec<Config, T> {
1212 pub config: Config,
1214 __marker: core::marker::PhantomData<fn() -> T>,
1215}
1216
1217impl<Config, T> core::ops::Deref for Spec<Config, T> {
1218 type Target = Config;
1219
1220 #[inline]
1221 fn deref(&self) -> &Self::Target {
1222 &self.config
1223 }
1224}
1225
1226impl<Config, T> core::ops::DerefMut for Spec<Config, T> {
1227 #[inline]
1228 fn deref_mut(&mut self) -> &mut Self::Target {
1229 &mut self.config
1230 }
1231}
1232
1233impl<Config: Copy, T> Copy for Spec<Config, T> {}
1234impl<Config: Clone, T> Clone for Spec<Config, T> {
1235 #[inline]
1236 fn clone(&self) -> Self {
1237 Self::new(self.config.clone())
1238 }
1239}
1240impl<Config: core::fmt::Debug, T> core::fmt::Debug for Spec<Config, T> {
1241 #[inline]
1242 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1243 self.config.fmt(f)
1244 }
1245}
1246
1247impl<Config, T> Spec<Config, T> {
1248 #[inline]
1250 pub fn new(config: Config) -> Self {
1251 Spec {
1252 config,
1253 __marker: core::marker::PhantomData,
1254 }
1255 }
1256}
1257
1258impl<T, Config> From<Config> for Spec<Config, T> {
1259 #[inline]
1260 fn from(config: Config) -> Self {
1261 Spec {
1262 config,
1263 __marker: core::marker::PhantomData,
1264 }
1265 }
1266}
1267
1268impl<T, Config: Auto<T>> Default for Spec<Config, T> {
1269 #[inline]
1270 fn default() -> Self {
1271 Spec {
1272 config: Auto::<T>::auto(),
1273 __marker: core::marker::PhantomData,
1274 }
1275 }
1276}
1277
1278mod into_range {
1279 use super::*;
1280
1281 pub trait IntoRange<I> {
1282 type Len<N: Shape>: Shape;
1283
1284 fn into_range(self, min: I, max: I) -> core::ops::Range<I>;
1285 }
1286
1287 impl<I> IntoRange<I> for core::ops::Range<I> {
1288 type Len<N: Shape> = usize;
1289
1290 #[inline]
1291 fn into_range(self, _: I, _: I) -> core::ops::Range<I> {
1292 self
1293 }
1294 }
1295 impl<I> IntoRange<I> for core::ops::RangeFrom<I> {
1296 type Len<N: Shape> = usize;
1297
1298 #[inline]
1299 fn into_range(self, _: I, max: I) -> core::ops::Range<I> {
1300 self.start..max
1301 }
1302 }
1303 impl<I> IntoRange<I> for core::ops::RangeTo<I> {
1304 type Len<N: Shape> = usize;
1305
1306 #[inline]
1307 fn into_range(self, min: I, _: I) -> core::ops::Range<I> {
1308 min..self.end
1309 }
1310 }
1311 impl<I> IntoRange<I> for core::ops::RangeFull {
1312 type Len<N: Shape> = N;
1313
1314 #[inline]
1315 fn into_range(self, min: I, max: I) -> core::ops::Range<I> {
1316 min..max
1317 }
1318 }
1319}
1320
1321mod sort;
1322
1323pub extern crate dyn_stack;
1324pub extern crate faer_traits as traits;
1325pub extern crate num_complex as complex;
1326pub extern crate reborrow;
1327
1328#[cfg(feature = "rand")]
1329#[cfg_attr(docs_rs, doc(cfg(feature = "rand")))]
1330pub extern crate rand;