oxscape/sflow/
order.rs

1use crate::{Bazooka, GridMeta, NOT_A_DONOR};
2use anyhow::{Result, anyhow};
3use num_traits::Zero;
4use rayon::prelude::*;
5
6pub const NO_FLOW: u8 = 9;
7
8pub fn generate_boring_terrain(dem: &mut [f64], start: f64, delta: f64) {
9    dem.into_par_iter().enumerate().for_each(|(v, a)| {
10        *a = start + v as f64 * delta;
11    })
12}
13
14/// Flow metric.
15///
16/// To implement this, receivers should contain a flow direction as shown below
17/// or `oxscape::sflow::NO_FLOW` Anything else will give unpredictable results.
18/// Edge cells should also give `oxscape::sflow::NO_FLOW` For an example
19/// implementation, see the D8 `compute_receivers` function.
20///
21/// ```
22/// # let x = 4;
23/// let arr = [
24/// 1,2,3,
25/// 0,x,4,
26/// 7,6,5,
27/// ];
28///
29/// ```
30pub trait FlowMetric {
31    fn metric(&self, meta: &GridMeta, dem: &[f64], receivers: &mut [u8]) -> Result<()>;
32}
33
34#[cfg(test)]
35fn compute_donors(meta: &GridMeta, rec: &[u8], donor: &mut [[usize; 8]]) {
36    donor.fill([NOT_A_DONOR; 8]);
37    for c in 0..meta.size {
38        let receiver = rec[c];
39        if receiver == NO_FLOW {
40            continue;
41        }
42        //If this cell passes flow to a downhill cell, make a note of it in that
43        //downhill cell's donor array and increment its donor counter
44        let n = meta.shift(c, receiver);
45        donor[n][receiver as usize] = c;
46    }
47}
48
49fn compute_donors_par(meta: &GridMeta, rec: &[u8], donors: &mut [[usize; 8]]) {
50    assert_eq!(donors.len(), meta.size);
51    donors.fill([NOT_A_DONOR; 8]);
52    // cast &mut [[usize;8]] to &mut [usize] so we can access cell's directions
53    // in parallel without aliasing problems
54    let d_usize: &mut [usize] = bytemuck::cast_slice_mut(donors);
55    let b = Bazooka(d_usize.as_mut_ptr());
56    let r = &b;
57    rec.iter().enumerate().for_each(|(idx, dir)| {
58        if *dir == NO_FLOW {
59            return;
60        }
61        //If this cell passes flow to a downhill cell, make a note of it in that
62        //downhill cell's donor array at the direction's index
63        let n: usize = meta.shift(idx, *dir);
64        // bounds check
65        assert!(n < meta.size);
66        // SAFETY: we are the only cell from this direction
67        unsafe {
68            *r.0.add(n * 8 + GridMeta::rev(usize::from(*dir))) = idx;
69        }
70    });
71}
72
73///Cells must be ordered so that they can be traversed such that higher cells
74///are processed before their lower neighbouring cells. This method creates
75///such an order. It also produces a list of "levels": cells which are,
76///topologically, neither higher nor lower than each other. Cells in the same
77///level can all be processed simultaneously without having to worry about
78///race conditions.
79pub fn generate_order(
80    rec: &[u8],
81    donor: &[[usize; 8]],
82    stack: &mut Vec<usize>,
83    levels: &mut Vec<usize>,
84) {
85    levels.clear();
86    stack.clear();
87
88    //Since each value of the `levels` array is later used as the starting value
89    //of a for-loop, we include a zero at the beginning of the array.
90    levels.push(0);
91
92    // outer edge can be added immediately and is a single level
93    for (idx, c) in rec.iter().enumerate() {
94        if *c == NO_FLOW {
95            stack.push(idx);
96        }
97    }
98    levels.push(stack.len());
99
100    let mut level_bottom = 0; // first cell of current level
101    let mut level_top = stack.len(); // last cell of current level
102
103    // full BFS search, but we fill an array, so later it can be done in parallel
104    while level_bottom < level_top {
105        for si in level_bottom..level_top {
106            let c = stack[si];
107            // load donating cells of focal cell into the stack
108            for k in donor[c] {
109                if k != NOT_A_DONOR {
110                    stack.push(k);
111                }
112            }
113        }
114        level_bottom = level_top; // start at the previous level
115        level_top = stack.len(); // and process all cells that were added
116
117        levels.push(stack.len());
118    }
119    levels.pop();
120}
121
122pub struct LevelAccessor<'a, T: Send + Sync> {
123    arr: &'a Bazooka<T>,
124    idx: usize,
125    meta: &'a GridMeta,
126    donors: &'a [[usize; 8]],
127    receivers: &'a [u8],
128}
129
130impl<'a, T: Zero + Copy + Send + Sync> LevelAccessor<'a, T> {
131    /// SAFETY: It is your responsibility to ensure:
132    ///
133    /// For the lifetime of Self:
134    /// - nothing reads arr[idx]
135    /// - nothing writes donors or receivers of this cell
136    ///   - donors are defined as neighbouring cells that have flow pointing to this cell
137    ///   - receivers are neighbouring cells that this cell flows into
138    unsafe fn new(
139        arr: &'a Bazooka<T>,
140        idx: usize,
141        meta: &'a GridMeta,
142        donors: &'a [[usize; 8]],
143        receivers: &'a [u8],
144    ) -> Self {
145        Self {
146            arr,
147            idx,
148            meta,
149            donors,
150            receivers,
151        }
152    }
153
154    /// Get mutable access to the current cell
155    #[inline]
156    pub fn cell(&mut self) -> &mut T {
157        // SAFETY: nothing reads arr[idx]
158        unsafe { self.arr.0.add(self.idx).as_mut().unwrap() }
159    }
160
161    #[inline]
162    pub fn idx(&self) -> usize {
163        self.idx
164    }
165
166    #[inline]
167    pub fn recv_dir(&self) -> u8 {
168        self.receivers[self.idx]
169    }
170
171    #[inline]
172    pub fn receiver(&self) -> T {
173        let n = self.meta.shift(self.idx, self.receivers[self.idx]);
174        assert!(n < self.meta.size);
175        // SAFETY: topological sorting is based on this flow metric.
176        // Therefore, any direction that is NOT NO_FLOW_GEN is in a
177        // different level and can be safely accessed.
178        unsafe { self.arr.0.add(n).read() }
179    }
180
181    #[inline]
182    pub fn donors(&self) -> [T; 8] {
183        let mut res = [T::zero(); 8];
184        for n in 0..8 {
185            let don_idx = self.donors[self.idx][n];
186            if don_idx == NOT_A_DONOR {
187                continue;
188            }
189            // SAFETY: topological sorting is based on this flow metric.
190            // Therefore, any direction that is NOT NO_FLOW_GEN is in a
191            // different level and can be safely accessed.
192            unsafe { res[n] = self.arr.0.add(self.donors[self.idx][n]).read() }
193        }
194        res
195    }
196}
197
198pub struct Order {
199    meta: GridMeta,
200    donors: Vec<[usize; 8]>,
201    receivers: Vec<u8>,
202    stack: Vec<usize>,
203    levels: Vec<usize>,
204}
205
206impl Order {
207    pub fn n_levels(&self) -> usize {
208        self.levels.len() - 1
209    }
210
211    pub fn meta(&self) -> &GridMeta {
212        &self.meta
213    }
214
215    /// Create an uninitialized order
216    pub fn empty(meta: GridMeta) -> Self {
217        let receivers = vec![0; meta.size];
218        let donors = vec![[0; 8]; meta.size];
219        // SAFETY: stack and levels need to be empty so that no traversal is done
220        let stack = Vec::with_capacity(meta.size);
221        let levels = Vec::with_capacity(2 * meta.width + 2 * meta.height);
222        Self {
223            meta,
224            donors,
225            receivers,
226            stack,
227            levels,
228        }
229    }
230
231    pub fn from_dem_metric<M: FlowMetric>(meta: GridMeta, dem: &[f64], metric: M) -> Result<Self> {
232        if meta.size != dem.len() {
233            return Err(anyhow!("meta dem mismatch"));
234        }
235        let mut s = Self::empty(meta);
236        s.reorder(dem, metric)?;
237        Ok(s)
238    }
239
240    /// re-calculate order with the given flow metric
241    pub fn reorder<M: FlowMetric>(&mut self, dem: &[f64], metric: M) -> Result<()> {
242        metric.metric(&self.meta, dem, &mut self.receivers)?;
243        compute_donors_par(&self.meta, &self.receivers, &mut self.donors);
244        generate_order(
245            &self.receivers,
246            &self.donors,
247            &mut self.stack,
248            &mut self.levels,
249        );
250        Ok(())
251    }
252
253    pub fn for_lvls_bottom_up<T: Zero + Copy + Send + Sync, F: Fn(&mut LevelAccessor<T>) + Sync>(
254        &self,
255        data: &mut [T],
256        f: F,
257    ) {
258        // SAFETY: if data.len < self.meta.size, the LevelAccessor would access
259        // out-of-bounds data.
260        assert!(data.len() == self.meta.size);
261        let b = Bazooka(data.as_mut_ptr());
262        for level in self.levels.windows(2).map(|w| &self.stack[w[0]..w[1]]) {
263            level.into_par_iter().for_each(|v| {
264                if self.receivers[*v] == NO_FLOW {
265                    return;
266                }
267                // SAFETY: we have a sound topological sorting. The LevelAccessor
268                // only accesses donors and receivers, and those are on
269                // different levels
270                unsafe {
271                    f(&mut LevelAccessor::new(
272                        &b,
273                        *v,
274                        &self.meta,
275                        &self.donors,
276                        &self.receivers,
277                    ))
278                }
279            });
280        }
281    }
282
283    pub fn for_lvls_top_down<T: Zero + Copy + Send + Sync, F: Fn(&mut LevelAccessor<T>) + Sync>(
284        &self,
285        data: &mut [T],
286        f: F,
287    ) {
288        let b = Bazooka(data.as_mut_ptr());
289        for level in self
290            .levels
291            .windows(2)
292            .rev()
293            .map(|w| &self.stack[w[0]..w[1]])
294        {
295            level.into_par_iter().for_each(|v| {
296                // SAFETY: we have a sound topological sorting. The LevelAccessor
297                // only accesses donors and receivers, and those are on
298                // different levels
299                unsafe {
300                    f(&mut LevelAccessor::new(
301                        &b,
302                        *v,
303                        &self.meta,
304                        &self.donors,
305                        &self.receivers,
306                    ))
307                }
308            });
309        }
310    }
311}
312
313#[cfg(test)]
314mod test {
315    use std::panic;
316    use std::{
317        collections::HashSet,
318        panic::{AssertUnwindSafe, catch_unwind},
319    };
320
321    use super::*;
322
323    const META: GridMeta = GridMeta::new(6, 6);
324    #[rustfmt::skip]
325    mod consts {
326        use crate::NOT_A_DONOR;
327
328    pub const H_INIT: [f64;36] = [
329        //     0    1    2    3    4    5
330        /*0*/ 0.5, 1.0, 1.5, 2.0, 2.5, 3.0,
331        /*1*/ 3.5, 4.0, 4.5, 5.0, 5.5, 6.0,
332        /*2*/ 6.5, 7.0, 7.5, 8.0, 8.5, 9.0,
333        /*3*/ 9.5,10.0,10.5,11.0,11.5,12.0,
334        /*4*/12.5,13.0,13.5,14.0,14.5,15.0,
335        /*5*/15.5,16.0,16.5,17.0,17.5,18.0,
336    ];
337    //1 2 3
338    //0   4
339    //7 6 5
340    pub const REC: [u8;36] = [
341    //  0   1   2   3   4   5
342        9,  9,  9,  9,  9,  9,// 0
343        9,  2,  2,  2,  2,  9,// 1
344        9,  2,  2,  2,  2,  9,// 2
345        9,  2,  2,  2,  2,  9,// 3
346        9,  2,  2,  2,  2,  9,// 4
347        9,  9,  9,  9,  9,  9,// 5
348    ];
349    const ND: usize = NOT_A_DONOR;
350    pub const DONOR: [[usize;8];36] = [
351        //     0     1                        2                         3                         4                         5                         6                         7                         8                         9
352        //    [1   ] [ 1  2  3  4  5  6  7  8] [ 1  2  3  4  5  6  7  8] [ 1  2  3  4  5  6  7  8] [ 1  2  3  4  5  6  7  8] [ 1  2  3  4  5  6  7  8] [ 1  2  3  4  5  6  7  8] [ 1  2  3  4  5  6  7  8] [ 1  2  3  4  5  6  7  8] [ 1  2  3  4  5  6  7  8]
353        /* 0*/[ND;8],[ND,ND,ND,ND,ND,ND, 7,ND], [ND,ND,ND,ND,ND,ND, 8,ND],[ND,ND,ND,ND,ND,ND, 9,ND],[ND,ND,ND,ND,ND,ND,10,ND], [ND;8],
354              [ND;8],[ND,ND,ND,ND,ND,ND,13,ND], [ND,ND,ND,ND,ND,ND,14,ND],[ND,ND,ND,ND,ND,ND,15,ND],[ND,ND,ND,ND,ND,ND,16,ND], [ND;8],
355              [ND;8],[ND,ND,ND,ND,ND,ND,19,ND], [ND,ND,ND,ND,ND,ND,20,ND],[ND,ND,ND,ND,ND,ND,21,ND],[ND,ND,ND,ND,ND,ND,22,ND], [ND;8],
356              [ND;8],[ND,ND,ND,ND,ND,ND,25,ND], [ND,ND,ND,ND,ND,ND,26,ND],[ND,ND,ND,ND,ND,ND,27,ND],[ND,ND,ND,ND,ND,ND,28,ND], [ND;8],
357              [ND;8],[ND;8], [ND;8], [ND;8], [ND;8], [ND;8],
358              [ND;8],[ND;8], [ND;8], [ND;8], [ND;8], [ND;8]
359    ];
360    pub const LEVELS: [usize;6] = [0, 20, 24, 28, 32, 36];
361    pub const STACK: [usize;36] = [
362    //  0  1  2  3  4  5  6   7   8   9  10  11  12  13  14  15  16  17  18  19
363        0, 1, 2, 3, 4, 5, 6, 11, 12, 17, 18, 23, 24, 29, 30, 31, 32, 33, 34, 35,
364    // 20 21 22  23
365        7, 8, 9, 10,
366    //  24  25  26  27
367        13, 14, 15, 16,
368    //  28  29  30  31
369        19, 20, 21, 22,
370    //  32  33  34  35
371        25, 26, 27, 28,
372    ];
373    }
374
375    #[test]
376    fn test_compute_donors() {
377        let mut donor = vec![[0; 8]; META.size];
378        compute_donors_par(&META, &consts::REC, &mut donor);
379        assert_eq!(donor, consts::DONOR);
380    }
381
382    #[test]
383    fn test_generate_order() {
384        let mut levels = Vec::with_capacity(META.width * 2 + META.height * 2);
385        let mut stack = Vec::with_capacity(META.size);
386        generate_order(&consts::REC, &consts::DONOR, &mut stack, &mut levels);
387        assert_eq!(stack, consts::STACK);
388        assert_eq!(levels, consts::LEVELS);
389    }
390
391    #[test]
392    #[ignore = "Hour long test"]
393    fn test_generate_order_exhausive() {
394        // cargo test --release -- order_d8::test::test_generate_order_exhausive --ignored --nocapture
395        // silence panic output
396        let default_hook = panic::take_hook();
397        panic::set_hook(Box::new(|_| {}));
398
399        let meta = GridMeta::new(3, 3);
400        let mut donors = vec![[NOT_A_DONOR; 8]; meta.size];
401        let mut stack = Vec::with_capacity(meta.size);
402        let mut levels = Vec::with_capacity(meta.size);
403        let mut receivers: Vec<u8> = vec![9; meta.size];
404        let mut set = HashSet::with_capacity(meta.size);
405
406        let total = meta.size.pow(9);
407
408        for mut n in 68555889..total {
409            // TODO: reset to 0 when done
410            if (n % 9usize.pow(6)) == 0 {
411                println!("{n} out of {} iterations", 9usize.pow(9));
412            }
413            // decode n into base-9 digits
414            for i in 0..9 {
415                receivers[i] = (n % 9) as u8;
416                n /= 9;
417            }
418            if catch_unwind(AssertUnwindSafe(|| {
419                compute_donors(&meta, &receivers, &mut donors);
420                generate_order(&receivers, &donors, &mut stack, &mut levels);
421            }))
422            .is_err()
423            {
424                continue;
425            }
426            for level in levels.windows(2).map(|w| &stack[w[0]..w[1]]) {
427                // create a set to check duplicates
428                set.clear();
429                // level indices don't overlap
430                for idx in level {
431                    assert!(set.insert(*idx));
432                }
433                // allowed_indices point OUTSIDE the level
434                for idx in level {
435                    assert!(!set.contains(&meta.shift(*idx, receivers[*idx])));
436                    for n in 0..8 {
437                        let don_idx = donors[*idx][n];
438                        if don_idx != NOT_A_DONOR {
439                            assert!(!set.contains(&don_idx))
440                        }
441                    }
442                }
443            }
444        }
445
446        // restore hook so other tests behave normally
447        panic::set_hook(default_hook);
448    }
449
450    #[test]
451    fn test_compute_donors_loop() {
452        let meta = &GridMeta::new(2, 1);
453        let rec = [4, 0];
454        let mut donors = vec![[0; 8]; 2];
455        compute_donors_par(&meta, &rec, &mut donors);
456        const N: usize = NOT_A_DONOR;
457        assert_eq!(donors, [[N, N, N, N, 1, N, N, N], [0, N, N, N, N, N, N, N]]);
458    }
459
460    #[test]
461    fn test_generate_order_loop() {
462        let mut levels = Vec::with_capacity(100);
463        let mut stack = Vec::with_capacity(100);
464        let rec = [4, 0];
465        const N: usize = NOT_A_DONOR;
466        let donors = vec![[N, N, N, N, 1, N, N, N], [0, N, N, N, N, N, N, N]];
467        generate_order(&rec, &donors, &mut stack, &mut levels);
468        assert_eq!(stack, &[]);
469        assert_eq!(levels, &[0]);
470    }
471
472    #[test]
473    #[rustfmt::skip]
474    fn test_compute_donors_xwrap() {
475        let meta = &GridMeta::new(2, 2);
476        let rec = [
477            NO_FLOW,0,
478            0,0,
479        ];
480        let mut donors = vec![[0;8];4];
481        compute_donors(&meta, &rec, &mut donors);
482        const N: usize = NOT_A_DONOR;
483        assert_eq!(donors, [
484            [1,N,N,N,N,N,N,N],[2,N,N,N,N,N,N,N],
485            [3,N,N,N,N,N,N,N],[N,N,N,N,N,N,N,N]
486        ]);
487    }
488
489    #[test]
490    #[rustfmt::skip]
491    fn test_generate_order_xwrap() {
492        let rec = [
493            NO_FLOW,0,
494            0,0,
495        ];
496        const N: usize = NOT_A_DONOR;
497        let donors = [
498            [1,N,N,N,N,N,N,N],[2,N,N,N,N,N,N,N],
499            [3,N,N,N,N,N,N,N],[N,N,N,N,N,N,N,N]
500        ];
501        let mut levels = Vec::with_capacity(4);
502        let mut stack = Vec::with_capacity(4);
503        generate_order(&rec, &donors, &mut stack, &mut levels);
504        assert_eq!(stack, &[0,1,2,3]);
505        assert_eq!(levels, &[0,1,2,3,4]);
506    }
507
508    #[test]
509    #[rustfmt::skip]
510    fn test_compute_donors_xwrap_double() {
511        let meta = &GridMeta::new(2, 2);
512        let rec = [
513            NO_FLOW,0,
514            0,2,
515        ];
516        let mut donors = vec![[0;8];4];
517        compute_donors(&meta, &rec, &mut donors);
518        const N: usize = NOT_A_DONOR;
519        assert_eq!(donors, [
520            [1,N,N,N,N,N,N,N],[2,N,3,N,N,N,N,N],
521            [N,N,N,N,N,N,N,N],[N,N,N,N,N,N,N,N]
522        ]);
523        let mut stack = Vec::new();
524        let mut levels = Vec::new();
525        generate_order(&rec, &donors, &mut stack, &mut levels);
526        assert_eq!(stack, &[0,1,2,3]);
527        assert_eq!(levels, &[0,1,2,4]);
528    }
529
530    #[test]
531    #[should_panic(expected = "n < meta.size")]
532    fn test_par_donors_oob() {
533        let meta = &GridMeta::new(3, 1);
534        // these say that there are out-of-bounds receivers
535        let rec = [6, 6, 6];
536        let mut donors = [[NOT_A_DONOR; 8]; 3];
537        // this will access out-of-bounds memory addresses, which we catch
538        compute_donors_par(meta, &rec, &mut donors);
539    }
540
541    #[test]
542    fn test_order_empty_run() {
543        let order = Order::empty(GridMeta::new(2, 2));
544        order.for_lvls_bottom_up(&mut [0.0; 4], |_| panic!("I shouldn't run!"));
545        order.for_lvls_top_down(&mut [0.0; 4], |_| panic!("I shouldn't run!"));
546    }
547}