nalgebra_sparse/pattern.rs
1//! Sparsity patterns for CSR and CSC matrices.
2
3#[cfg(feature = "serde-serialize")]
4mod pattern_serde;
5
6use crate::SparseFormatError;
7use crate::cs::transpose_cs;
8use std::error::Error;
9use std::fmt;
10
11/// A representation of the sparsity pattern of a CSR or CSC matrix.
12///
13/// CSR and CSC matrices store matrices in a very similar fashion. In fact, in a certain sense,
14/// they are transposed. More precisely, when reinterpreting the three data arrays of a CSR
15/// matrix as a CSC matrix, we obtain the CSC representation of its transpose.
16///
17/// [`SparsityPattern`] is an abstraction built on this observation. Whereas CSR matrices
18/// store a matrix row-by-row, and a CSC matrix stores a matrix column-by-column, a
19/// `SparsityPattern` represents only the index data structure of a matrix *lane-by-lane*.
20/// Here, a *lane* is a generalization of rows and columns. We further define *major lanes*
21/// and *minor lanes*. The sparsity pattern of a CSR matrix is then obtained by interpreting
22/// major/minor as row/column. Conversely, we obtain the sparsity pattern of a CSC matrix by
23/// interpreting major/minor as column/row.
24///
25/// This allows us to use a common abstraction to talk about sparsity patterns of CSR and CSC
26/// matrices. This is convenient, because at the abstract level, the invariants of the formats
27/// are the same. Hence we may encode the invariants of the index data structure separately from
28/// the scalar values of the matrix. This is especially useful in applications where the
29/// sparsity pattern is built ahead of the matrix values, or the same sparsity pattern is re-used
30/// between different matrices. Finally, we can use `SparsityPattern` to encode adjacency
31/// information in graphs.
32///
33/// # Format
34///
35/// The format is exactly the same as for the index data structures of CSR and CSC matrices.
36/// This means that the sparsity pattern of an `m x n` sparse matrix with `nnz` non-zeros,
37/// where in this case `m x n` does *not* mean `rows x columns`, but rather `majors x minors`,
38/// is represented by the following two arrays:
39///
40/// - `major_offsets`, an array of integers with length `m + 1`.
41/// - `minor_indices`, an array of integers with length `nnz`.
42///
43/// The invariants and relationship between `major_offsets` and `minor_indices` remain the same
44/// as for `row_offsets` and `col_indices` in the [CSR](`crate::csr::CsrMatrix`) format
45/// specification.
46#[derive(Debug, Clone, PartialEq, Eq)]
47// TODO: Make SparsityPattern parametrized by index type
48// (need a solid abstraction for index types though)
49pub struct SparsityPattern {
50 major_offsets: Vec<usize>,
51 minor_indices: Vec<usize>,
52 minor_dim: usize,
53}
54
55impl SparsityPattern {
56 /// Create a sparsity pattern of the given dimensions without explicitly stored entries.
57 pub fn zeros(major_dim: usize, minor_dim: usize) -> Self {
58 Self {
59 major_offsets: vec![0; major_dim + 1],
60 minor_indices: vec![],
61 minor_dim,
62 }
63 }
64
65 /// The offsets for the major dimension.
66 #[inline]
67 #[must_use]
68 pub fn major_offsets(&self) -> &[usize] {
69 &self.major_offsets
70 }
71
72 /// The indices for the minor dimension.
73 #[inline]
74 #[must_use]
75 pub fn minor_indices(&self) -> &[usize] {
76 &self.minor_indices
77 }
78
79 /// The number of major lanes in the pattern.
80 #[inline]
81 #[must_use]
82 pub fn major_dim(&self) -> usize {
83 assert!(!self.major_offsets.is_empty());
84 self.major_offsets.len() - 1
85 }
86
87 /// The number of minor lanes in the pattern.
88 #[inline]
89 #[must_use]
90 pub fn minor_dim(&self) -> usize {
91 self.minor_dim
92 }
93
94 /// The number of "non-zeros", i.e. explicitly stored entries in the pattern.
95 #[inline]
96 #[must_use]
97 pub fn nnz(&self) -> usize {
98 self.minor_indices.len()
99 }
100
101 /// Get the lane at the given index.
102 ///
103 /// Panics
104 /// ------
105 ///
106 /// Panics if `major_index` is out of bounds.
107 #[inline]
108 #[must_use]
109 pub fn lane(&self, major_index: usize) -> &[usize] {
110 self.get_lane(major_index).unwrap()
111 }
112
113 /// Get the lane at the given index, or `None` if out of bounds.
114 #[inline]
115 #[must_use]
116 pub fn get_lane(&self, major_index: usize) -> Option<&[usize]> {
117 let offset_begin = *self.major_offsets().get(major_index)?;
118 let offset_end = *self.major_offsets().get(major_index + 1)?;
119 Some(&self.minor_indices()[offset_begin..offset_end])
120 }
121
122 /// Try to construct a sparsity pattern from the given dimensions, major offsets
123 /// and minor indices.
124 ///
125 /// Returns an error if the data does not conform to the requirements.
126 pub fn try_from_offsets_and_indices(
127 major_dim: usize,
128 minor_dim: usize,
129 major_offsets: Vec<usize>,
130 minor_indices: Vec<usize>,
131 ) -> Result<Self, SparsityPatternFormatError> {
132 use SparsityPatternFormatError::*;
133
134 if major_offsets.len() != major_dim + 1 {
135 return Err(InvalidOffsetArrayLength);
136 }
137
138 // Check that the first and last offsets conform to the specification
139 {
140 let first_offset_ok = *major_offsets.first().unwrap() == 0;
141 let last_offset_ok = *major_offsets.last().unwrap() == minor_indices.len();
142 if !first_offset_ok || !last_offset_ok {
143 return Err(InvalidOffsetFirstLast);
144 }
145 }
146
147 // Test that each lane has strictly monotonically increasing minor indices, i.e.
148 // minor indices within a lane are sorted, unique. In addition, each minor index
149 // must be in bounds with respect to the minor dimension.
150 {
151 for lane_idx in 0..major_dim {
152 let range_start = major_offsets[lane_idx];
153 let range_end = major_offsets[lane_idx + 1];
154
155 // Test that major offsets are monotonically increasing
156 if range_start > range_end {
157 return Err(NonmonotonicOffsets);
158 }
159
160 let minor_indices = &minor_indices[range_start..range_end];
161
162 // We test for in-bounds, uniqueness and monotonicity at the same time
163 // to ensure that we only visit each minor index once
164 let mut iter = minor_indices.iter();
165 let mut prev: Option<usize> = None;
166
167 while let Some(next) = iter.next().copied() {
168 if next >= minor_dim {
169 return Err(MinorIndexOutOfBounds);
170 }
171
172 if let Some(prev) = prev {
173 match prev.cmp(&next) {
174 std::cmp::Ordering::Greater => return Err(NonmonotonicMinorIndices),
175 std::cmp::Ordering::Equal => return Err(DuplicateEntry),
176 std::cmp::Ordering::Less => {}
177 }
178 }
179 prev = Some(next);
180 }
181 }
182 }
183
184 Ok(Self {
185 major_offsets,
186 minor_indices,
187 minor_dim,
188 })
189 }
190
191 /// Try to construct a sparsity pattern from the given dimensions, major offsets
192 /// and minor indices.
193 ///
194 /// # Panics
195 ///
196 /// Panics if the number of major offsets is not exactly one greater than the major dimension
197 /// or if major offsets do not start with 0 and end with the number of minor indices.
198 ///
199 /// # Safety
200 ///
201 /// Assumes that the major offsets and indices adhere to the requirements of being a valid
202 /// sparsity pattern.
203 /// Specifically, that major offsets is monotonically increasing, and
204 /// `major_offsets[i]..major_offsets[i+1]` refers to a major lane in the sparsity pattern,
205 /// and `minor_indices[major_offsets[i]..major_offsets[i+1]]` is monotonically increasing.
206 pub unsafe fn from_offset_and_indices_unchecked(
207 major_dim: usize,
208 minor_dim: usize,
209 major_offsets: Vec<usize>,
210 minor_indices: Vec<usize>,
211 ) -> Self {
212 assert_eq!(major_offsets.len(), major_dim + 1);
213
214 // Check that the first and last offsets conform to the specification
215 {
216 let first_offset_ok = *major_offsets.first().unwrap() == 0;
217 let last_offset_ok = *major_offsets.last().unwrap() == minor_indices.len();
218 assert!(first_offset_ok && last_offset_ok);
219 }
220
221 Self {
222 major_offsets,
223 minor_indices,
224 minor_dim,
225 }
226 }
227
228 /// An iterator over the explicitly stored "non-zero" entries (i, j).
229 ///
230 /// The iteration happens in a lane-major fashion, meaning that the lane index i
231 /// increases monotonically, and the minor index j increases monotonically within each
232 /// lane i.
233 ///
234 /// Examples
235 /// --------
236 ///
237 /// ```
238 /// # use nalgebra_sparse::pattern::SparsityPattern;
239 /// let offsets = vec![0, 2, 3, 4];
240 /// let minor_indices = vec![0, 2, 1, 0];
241 /// let pattern = SparsityPattern::try_from_offsets_and_indices(3, 4, offsets, minor_indices)
242 /// .unwrap();
243 ///
244 /// let entries: Vec<_> = pattern.entries().collect();
245 /// assert_eq!(entries, vec![(0, 0), (0, 2), (1, 1), (2, 0)]);
246 /// ```
247 ///
248 #[must_use]
249 pub fn entries(&self) -> SparsityPatternIter<'_> {
250 SparsityPatternIter::from_pattern(self)
251 }
252
253 /// Returns the raw offset and index data for the sparsity pattern.
254 ///
255 /// Examples
256 /// --------
257 ///
258 /// ```
259 /// # use nalgebra_sparse::pattern::SparsityPattern;
260 /// let offsets = vec![0, 2, 3, 4];
261 /// let minor_indices = vec![0, 2, 1, 0];
262 /// let pattern = SparsityPattern::try_from_offsets_and_indices(
263 /// 3,
264 /// 4,
265 /// offsets.clone(),
266 /// minor_indices.clone())
267 /// .unwrap();
268 /// let (offsets2, minor_indices2) = pattern.disassemble();
269 /// assert_eq!(offsets2, offsets);
270 /// assert_eq!(minor_indices2, minor_indices);
271 /// ```
272 pub fn disassemble(self) -> (Vec<usize>, Vec<usize>) {
273 (self.major_offsets, self.minor_indices)
274 }
275
276 /// Computes the transpose of the sparsity pattern.
277 ///
278 /// This is analogous to matrix transposition, i.e. an entry `(i, j)` becomes `(j, i)` in the
279 /// new pattern.
280 #[must_use]
281 pub fn transpose(&self) -> Self {
282 // By using unit () values, we can use the same routines as for CSR/CSC matrices
283 let values = vec![(); self.nnz()];
284 let (new_offsets, new_indices, _) = transpose_cs(
285 self.major_dim(),
286 self.minor_dim(),
287 self.major_offsets(),
288 self.minor_indices(),
289 &values,
290 );
291 // TODO: Skip checks
292 Self::try_from_offsets_and_indices(
293 self.minor_dim(),
294 self.major_dim(),
295 new_offsets,
296 new_indices,
297 )
298 .expect("Internal error: Transpose should never fail.")
299 }
300}
301
302impl Default for SparsityPattern {
303 fn default() -> Self {
304 Self {
305 major_offsets: vec![0],
306 minor_indices: vec![],
307 minor_dim: 0,
308 }
309 }
310}
311
312/// Error type for `SparsityPattern` format errors.
313#[non_exhaustive]
314#[derive(Copy, Clone, Debug, PartialEq, Eq)]
315pub enum SparsityPatternFormatError {
316 /// Indicates an invalid number of offsets.
317 ///
318 /// The number of offsets must be equal to (major_dim + 1).
319 InvalidOffsetArrayLength,
320 /// Indicates that the first or last entry in the offset array did not conform to
321 /// specifications.
322 ///
323 /// The first entry must be 0, and the last entry must be exactly one greater than the
324 /// major dimension.
325 InvalidOffsetFirstLast,
326 /// Indicates that the major offsets are not monotonically increasing.
327 NonmonotonicOffsets,
328 /// One or more minor indices are out of bounds.
329 MinorIndexOutOfBounds,
330 /// One or more duplicate entries were detected.
331 ///
332 /// Two entries are considered duplicates if they are part of the same major lane and have
333 /// the same minor index.
334 DuplicateEntry,
335 /// Indicates that minor indices are not monotonically increasing within each lane.
336 NonmonotonicMinorIndices,
337}
338
339impl From<SparsityPatternFormatError> for SparseFormatError {
340 fn from(err: SparsityPatternFormatError) -> Self {
341 use crate::SparseFormatErrorKind;
342 use crate::SparseFormatErrorKind::*;
343 use SparsityPatternFormatError::DuplicateEntry as PatternDuplicateEntry;
344 use SparsityPatternFormatError::*;
345 match err {
346 InvalidOffsetArrayLength
347 | InvalidOffsetFirstLast
348 | NonmonotonicOffsets
349 | NonmonotonicMinorIndices => {
350 SparseFormatError::from_kind_and_error(InvalidStructure, Box::from(err))
351 }
352 MinorIndexOutOfBounds => {
353 SparseFormatError::from_kind_and_error(IndexOutOfBounds, Box::from(err))
354 }
355 PatternDuplicateEntry => SparseFormatError::from_kind_and_error(
356 #[allow(unused_qualifications)]
357 SparseFormatErrorKind::DuplicateEntry,
358 Box::from(err),
359 ),
360 }
361 }
362}
363
364impl fmt::Display for SparsityPatternFormatError {
365 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
366 match self {
367 SparsityPatternFormatError::InvalidOffsetArrayLength => {
368 write!(f, "Length of offset array is not equal to (major_dim + 1).")
369 }
370 SparsityPatternFormatError::InvalidOffsetFirstLast => {
371 write!(f, "First or last offset is incompatible with format.")
372 }
373 SparsityPatternFormatError::NonmonotonicOffsets => {
374 write!(f, "Offsets are not monotonically increasing.")
375 }
376 SparsityPatternFormatError::MinorIndexOutOfBounds => {
377 write!(f, "A minor index is out of bounds.")
378 }
379 SparsityPatternFormatError::DuplicateEntry => {
380 write!(f, "Input data contains duplicate entries.")
381 }
382 SparsityPatternFormatError::NonmonotonicMinorIndices => {
383 write!(
384 f,
385 "Minor indices are not monotonically increasing within each lane."
386 )
387 }
388 }
389 }
390}
391
392impl Error for SparsityPatternFormatError {}
393
394/// Iterator type for iterating over entries in a sparsity pattern.
395#[derive(Debug, Clone)]
396pub struct SparsityPatternIter<'a> {
397 // See implementation of Iterator::next for an explanation of how these members are used
398 major_offsets: &'a [usize],
399 minor_indices: &'a [usize],
400 current_lane_idx: usize,
401 remaining_minors_in_lane: &'a [usize],
402}
403
404impl<'a> SparsityPatternIter<'a> {
405 fn from_pattern(pattern: &'a SparsityPattern) -> Self {
406 let first_lane_end = pattern.major_offsets().get(1).unwrap_or(&0);
407 let minors_in_first_lane = &pattern.minor_indices()[0..*first_lane_end];
408 Self {
409 major_offsets: pattern.major_offsets(),
410 minor_indices: pattern.minor_indices(),
411 current_lane_idx: 0,
412 remaining_minors_in_lane: minors_in_first_lane,
413 }
414 }
415}
416
417impl<'a> Iterator for SparsityPatternIter<'a> {
418 type Item = (usize, usize);
419
420 #[inline]
421 fn next(&mut self) -> Option<Self::Item> {
422 // We ensure fast iteration across each lane by iteratively "draining" a slice
423 // corresponding to the remaining column indices in the particular lane.
424 // When we reach the end of this slice, we are at the end of a lane,
425 // and we must do some bookkeeping for preparing the iteration of the next lane
426 // (or stop iteration if we're through all lanes).
427 // This way we can avoid doing unnecessary bookkeeping on every iteration,
428 // instead paying a small price whenever we jump to a new lane.
429 if let Some(minor_idx) = self.remaining_minors_in_lane.first() {
430 let item = Some((self.current_lane_idx, *minor_idx));
431 self.remaining_minors_in_lane = &self.remaining_minors_in_lane[1..];
432 item
433 } else {
434 loop {
435 // Keep skipping lanes until we found a non-empty lane or there are no more lanes
436 if self.current_lane_idx + 2 >= self.major_offsets.len() {
437 // We've processed all lanes, so we're at the end of the iterator
438 // (note: keep in mind that offsets.len() == major_dim() + 1, hence we need +2)
439 return None;
440 } else {
441 // Bump lane index and check if the lane is non-empty
442 self.current_lane_idx += 1;
443 let lower = self.major_offsets[self.current_lane_idx];
444 let upper = self.major_offsets[self.current_lane_idx + 1];
445 if upper > lower {
446 self.remaining_minors_in_lane = &self.minor_indices[(lower + 1)..upper];
447 return Some((self.current_lane_idx, self.minor_indices[lower]));
448 }
449 }
450 }
451 }
452 }
453}