#![forbid(unsafe_code, missing_docs, missing_debug_implementations)]
#![no_std]
#[cfg(feature="std")]
#[macro_use]
extern crate std;
pub mod ska_sort;
pub mod american_flag_sort;
pub mod util;
#[cfg(feature="std")]
pub mod std_impls;
pub mod float;
const DEPTH_STACK_SIZE: usize = 128;
const STANDARD_STD_SORT_THRESHOLD: usize = 128;
const DEPTH_THRESHOLD: usize = 128;
pub const DEFAULT_MAX_DEPTH_THRESHOLD: usize = 128;
pub const DEFAULT_DELEGATION_SIZE: usize = 128;
pub trait RadixSortStrategy<K>: KeyBytes<K> + TrySorter<K> {}
impl<T, K> RadixSortStrategy<K> for T where T: KeyBytes<K> + TrySorter<K> {}
#[derive(Debug, Copy, Clone)]
pub struct RadixSortStrategyWith<B, S> {
pub bytes: B,
pub sort: S
}
impl<B, S, K> KeyBytes<K> for RadixSortStrategyWith<B, S> where B: KeyBytes<K> {
const HAS_CONST_KEY_LEN: bool = B::HAS_CONST_KEY_LEN;
const MAX_KEY_LEN: Option<usize> = B::MAX_KEY_LEN;
#[inline(always)] fn has_const_key_len(&self) -> bool { self.bytes.has_const_key_len() }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { self.bytes.max_key_len() }
#[inline(always)] fn key_len(&self, key: &K) -> usize { self.bytes.key_len(key) }
#[inline(always)] fn key_byte(&self, key: &K, byte: usize) -> u8 {
self.bytes.key_byte(key, byte) }
}
impl<B, S, K> TrySorter<K> for RadixSortStrategyWith<B, S> where S: TrySorter<K> {
#[inline(always)]
fn try_sort(&mut self, arr: &mut [K], depth: isize, iteration: usize, force: bool) -> bool {
self.sort.try_sort(arr, depth, iteration, force)
}
}
pub trait TrySorter<K> {
fn try_sort(&mut self, arr: &mut [K], depth: isize, iteration: usize, force: bool) -> bool;
}
impl<K, T> TrySorter<K> for &mut T where T: TrySorter<K> {
#[inline(always)]
fn try_sort(&mut self, arr: &mut [K], depth: isize, iteration: usize, force: bool) -> bool {
(*self).try_sort(arr, depth, iteration, force)
}
}
#[derive(Debug, Copy, Clone)]
pub struct TrySortWith<S>(pub S);
impl<S, K> TrySorter<K> for TrySortWith<S> where S: FnMut(&mut [K], isize, usize, bool) -> bool {
fn try_sort(&mut self, arr: &mut [K], depth: isize, iteration: usize, force: bool) -> bool {
self.0(arr, depth, iteration, force)
}
}
#[derive(Debug, Copy, Clone)]
pub struct DefaultStrategy;
impl<K: RadixSortKey> TrySorter<K> for DefaultStrategy {
fn try_sort(&mut self, arr: &mut [K], depth: isize, _iteration: usize, force: bool) -> bool {
if force
|| arr.len() <= K::std_sort_size_threshold()
|| depth > K::depth_threshold() as isize
{ arr.sort_unstable(); true }
else { false }
}
}
#[derive(Debug, Copy, Clone)]
pub struct ComparatorSort<C>(C);
impl<C, T> TrySorter<T> for ComparatorSort<C> where C: FnMut(&T, &T) -> core::cmp::Ordering {
fn try_sort(&mut self, arr: &mut [T], depth: isize, _iteration: usize, force: bool) -> bool {
if force
|| arr.len() <= DEFAULT_DELEGATION_SIZE
|| depth > DEFAULT_MAX_DEPTH_THRESHOLD as isize
{ arr.sort_unstable_by(&mut self.0); true }
else { false }
}
}
#[derive(Debug, Copy, Clone)]
pub struct KeyStrategy<F, T, K> {
pub key: F,
input: std::marker::PhantomData<T>,
output: std::marker::PhantomData<K>
}
impl<F, T, K> KeyBytes<T> for KeyStrategy<F, T, K> where K: RadixSortKey, F: Fn(&T) -> K {
const HAS_CONST_KEY_LEN: bool = K::HAS_CONST_KEY_LEN;
const MAX_KEY_LEN: Option<usize> = K::MAX_KEY_LEN;
#[inline(always)] fn has_const_key_len(&self) -> bool { K::HAS_CONST_KEY_LEN }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { K::MAX_KEY_LEN }
#[inline(always)] fn key_len(&self, key: &T) -> usize { (self.key)(key).key_len() }
#[inline(always)] fn key_byte(&self, key: &T, byte: usize) -> u8 {
(self.key)(key).key_byte(byte) }
}
pub fn key_strategy<F, T, K>(key: F) -> KeyStrategy<F, T, K> where F: FnMut(&T) -> K {
KeyStrategy { key, input: std::marker::PhantomData, output: std::marker::PhantomData }
}
impl<F, T, K> TrySorter<T> for KeyStrategy<F, T, K>
where
F: FnMut(&T) -> K,
K: RadixSortKey
{
fn try_sort(&mut self, arr: &mut [T], depth: isize, _iteration: usize, force: bool) -> bool {
if force
|| arr.len() <= K::std_sort_size_threshold()
|| depth > K::depth_threshold() as isize
{ arr.sort_unstable_by_key(&mut self.key); true }
else { false }
}
}
pub trait KeyBytes<K> {
const HAS_CONST_KEY_LEN: bool = false;
const MAX_KEY_LEN: Option<usize> = None;
#[inline(always)] fn has_const_key_len(&self) -> bool { false }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { None }
fn key_len(&self, key: &K) -> usize;
fn key_byte(&self, key: &K, byte: usize) -> u8;
}
impl<K, T> KeyBytes<K> for &T where T: KeyBytes<K> {
const HAS_CONST_KEY_LEN: bool = T::HAS_CONST_KEY_LEN;
const MAX_KEY_LEN: Option<usize> = T::MAX_KEY_LEN;
#[inline(always)] fn has_const_key_len(&self) -> bool { (*self).has_const_key_len() }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { (*self).max_key_len() }
#[inline(always)] fn key_len(&self, key: &K) -> usize { (*self).key_len(key) }
#[inline(always)] fn key_byte(&self, key: &K, byte: usize) -> u8 {
(*self).key_byte(key, byte) }
}
impl<K, T> KeyBytes<K> for &mut T where T: KeyBytes<K> {
const HAS_CONST_KEY_LEN: bool = T::HAS_CONST_KEY_LEN;
const MAX_KEY_LEN: Option<usize> = T::MAX_KEY_LEN;
#[inline(always)] fn has_const_key_len(&self) -> bool { (**self).has_const_key_len() }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { (**self).max_key_len() }
#[inline(always)] fn key_len(&self, key: &K) -> usize { (**self).key_len(key) }
#[inline(always)] fn key_byte(&self, key: &K, byte: usize) -> u8 {
(**self).key_byte(key, byte) }
}
impl<K: RadixSortKey> KeyBytes<K> for DefaultStrategy {
const HAS_CONST_KEY_LEN: bool = K::HAS_CONST_KEY_LEN;
const MAX_KEY_LEN: Option<usize> = K::MAX_KEY_LEN;
#[inline(always)] fn has_const_key_len(&self) -> bool { K::HAS_CONST_KEY_LEN }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { K::MAX_KEY_LEN }
#[inline(always)] fn key_len(&self, key: &K) -> usize { key.key_len() }
#[inline(always)] fn key_byte(&self, key: &K, byte: usize) -> u8 { key.key_byte(byte) }
}
#[derive(Debug, Copy, Clone)]
pub struct KeyBytesWith<B, L> {
pub bytes: B,
pub len: L
}
impl<B, L, K> KeyBytes<K> for KeyBytesWith<B, L> where L: KeyLength<K>, B: Fn(&K, usize) -> u8 {
const HAS_CONST_KEY_LEN: bool = L::HAS_CONST_KEY_LEN;
const MAX_KEY_LEN: Option<usize> = L::MAX_KEY_LEN;
#[inline(always)] fn has_const_key_len(&self) -> bool { self.len.has_const_key_len() }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { self.len.max_key_len() }
#[inline(always)] fn key_len(&self, key: &K) -> usize { self.len.key_len(key) }
#[inline(always)] fn key_byte(&self, key: &K, byte: usize) -> u8 { (self.bytes)(key, byte) }
}
pub trait KeyLength<K> {
const HAS_CONST_KEY_LEN: bool = false;
const MAX_KEY_LEN: Option<usize> = None;
#[inline(always)] fn has_const_key_len(&self) -> bool { false }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { None }
fn key_len(&self, key: &K) -> usize;
}
impl<K, F> KeyLength<K> for F where F: Fn(&K) -> usize {
fn key_len(&self, key: &K) -> usize { self(key) }
}
impl<K> KeyLength<K> for usize {
const HAS_CONST_KEY_LEN: bool = true;
const MAX_KEY_LEN: Option<usize> = None;
#[inline(always)] fn has_const_key_len(&self) -> bool { true }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { Some(*self) }
#[inline(always)] fn key_len(&self, _key: &K) -> usize { *self }
}
#[derive(Debug, Copy, Clone)]
pub struct KeyBytesOf<F>(pub F);
impl<F, T, K> KeyBytes<T> for KeyBytesOf<F> where K: RadixSortKey, F: Fn(&T) -> K {
const HAS_CONST_KEY_LEN: bool = K::HAS_CONST_KEY_LEN;
const MAX_KEY_LEN: Option<usize> = K::MAX_KEY_LEN;
#[inline(always)] fn has_const_key_len(&self) -> bool { K::HAS_CONST_KEY_LEN }
#[inline(always)] fn max_key_len(&self) -> Option<usize> { K::MAX_KEY_LEN }
#[inline(always)] fn key_len(&self, key: &T) -> usize { self.0(key).key_len() }
#[inline(always)] fn key_byte(&self, key: &T, byte: usize) -> u8 { self.0(key).key_byte(byte) }
}
pub trait RadixSortKey: Ord {
const DELEGATION_SIZE: usize = DEFAULT_DELEGATION_SIZE;
const HAS_CONST_KEY_LEN: bool = false;
const MAX_KEY_LEN: Option<usize> = None;
fn key_len(&self) -> usize;
fn key_byte(&self, byte: usize) -> u8;
#[inline(always)] fn std_sort_size_threshold() -> usize { STANDARD_STD_SORT_THRESHOLD }
#[inline(always)] fn depth_threshold() -> usize { DEPTH_THRESHOLD }
}
pub trait ConstRadixSortKey: Ord {
const CONST_DELEGATION_SIZE: usize = DEFAULT_DELEGATION_SIZE;
const CONST_KEY_LEN: usize;
fn const_key_byte(&self, byte: usize) -> u8;
}
impl<C: ConstRadixSortKey> RadixSortKey for C {
const DELEGATION_SIZE: usize = Self::CONST_DELEGATION_SIZE;
const HAS_CONST_KEY_LEN: bool = true;
const MAX_KEY_LEN: Option<usize> = Some(C::CONST_KEY_LEN);
#[inline(always)] fn key_len(&self) -> usize { C::CONST_KEY_LEN }
#[inline(always)] fn key_byte(&self, byte: usize) -> u8 { self.const_key_byte(byte) }
}
impl ConstRadixSortKey for bool {
const CONST_KEY_LEN: usize = 1;
#[inline(always)] fn const_key_byte(&self, _byte: usize) -> u8 { *self as u8 }
}
impl ConstRadixSortKey for u8 {
const CONST_KEY_LEN: usize = 1;
#[inline(always)] fn const_key_byte(&self, _byte: usize) -> u8 { *self }
}
impl ConstRadixSortKey for i8 {
const CONST_KEY_LEN: usize = 1;
#[inline(always)] fn const_key_byte(&self, _byte: usize) -> u8 {
self.wrapping_add(core::i8::MIN) as u8
}
}
impl ConstRadixSortKey for u16 {
const CONST_KEY_LEN: usize = 2;
#[inline(always)] fn const_key_byte(&self, byte: usize) -> u8 { self.to_be_bytes()[byte] }
}
impl ConstRadixSortKey for i16 {
const CONST_KEY_LEN: usize = 2;
#[inline(always)] fn const_key_byte(&self, byte: usize) -> u8 {
(self.wrapping_add(core::i16::MIN) as u16).const_key_byte(byte)
}
}
impl ConstRadixSortKey for u32 {
const CONST_KEY_LEN: usize = 4;
#[inline(always)] fn const_key_byte(&self, byte: usize) -> u8 { self.to_be_bytes()[byte] }
}
impl ConstRadixSortKey for i32 {
const CONST_KEY_LEN: usize = 4;
#[inline(always)] fn const_key_byte(&self, byte: usize) -> u8 {
(self.wrapping_add(core::i32::MIN) as u32).const_key_byte(byte)
}
}
impl ConstRadixSortKey for u64 {
const CONST_KEY_LEN: usize = 8;
#[inline(always)] fn const_key_byte(&self, byte: usize) -> u8 { self.to_be_bytes()[byte] }
}
impl ConstRadixSortKey for i64 {
const CONST_KEY_LEN: usize = 8;
#[inline(always)] fn const_key_byte(&self, byte: usize) -> u8 {
(self.wrapping_add(core::i64::MIN) as u64).const_key_byte(byte)
}
}
impl ConstRadixSortKey for usize {
const CONST_KEY_LEN: usize = core::mem::size_of::<usize>();
#[inline(always)] fn const_key_byte(&self, byte: usize) -> u8 { self.to_be_bytes()[byte] }
}
impl ConstRadixSortKey for isize {
const CONST_KEY_LEN: usize = core::mem::size_of::<isize>();
#[inline(always)] fn const_key_byte(&self, byte: usize) -> u8 {
(self.wrapping_add(core::isize::MIN) as usize).const_key_byte(byte)
}
}
impl<K> RadixSortKey for &'_ [K] where K: ConstRadixSortKey {
#[inline(always)] fn key_len(&self) -> usize { self.len() * K::CONST_KEY_LEN }
#[inline(always)] fn key_byte(&self, byte: usize) -> u8 {
self[byte / K::CONST_KEY_LEN].key_byte(byte % K::CONST_KEY_LEN)
}
}
impl RadixSortKey for &'_ str {
#[inline(always)] fn key_len(&self) -> usize { self.as_bytes().len() }
#[inline(always)] fn key_byte(&self, byte: usize) -> u8 { self.as_bytes()[byte] }
}