#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(feature = "missing_mpl", feature(plugin))]
#![cfg_attr(feature = "missing_mpl", plugin(missing_mpl))]
#![cfg_attr(feature = "missing_mpl", deny(missing_mpl))]
#![warn(missing_docs)]
#[cfg(not(feature = "std"))]
extern crate core as std;
#[cfg(not(feature = "std"))]
#[macro_use]
extern crate std;
#[cfg(test)]
#[macro_use(expect)]
extern crate expectest;
extern crate num_traits;
extern crate ordered_iter;
extern crate arrayvec;
pub mod dense;
pub mod sparse;
use std::ops::{Add, AddAssign, Sub, SubAssign, Mul, MulAssign, Div, DivAssign};
use num_traits::{MulAdd, MulAddAssign, real::Real};
pub mod prelude {
pub use super::{
VectorOps, VectorAssignOps,
Vector, VectorRef,
VectorAssign, VectorAssignRef,
Dot, Distance
};
}
pub trait VectorOps<Vector, Scalar>: Sized
+ Add<Vector, Output = Self>
+ Sub<Vector, Output = Self>
+ Mul<Scalar, Output = Self>
+ Div<Scalar, Output = Self>
+ MulAdd<Scalar, Vector, Output = Self>
{}
pub trait VectorAssignOps<Vector, Scalar>: Sized
+ AddAssign<Vector>
+ SubAssign<Vector>
+ MulAssign<Scalar>
+ DivAssign<Scalar>
+ MulAddAssign<Scalar, Vector>
{}
pub trait Vector<Scalar>: PartialEq + VectorOps<Self, Scalar> {
type Scalar;
}
pub trait VectorRef<Scalar>: Vector<Scalar> + for<'a> VectorOps<&'a Self, Scalar> { }
impl<T, S> VectorRef<S> for T
where
T: Vector<S> + for<'a> VectorOps<&'a T, S>
{}
pub trait VectorAssign<Scalar>: Vector<Scalar> + VectorAssignOps<Self, Scalar> {}
impl<T, S> VectorAssign<S> for T
where
T: Vector<S> + VectorAssignOps<Self, S>
{}
pub trait VectorAssignRef<Scalar>: VectorAssign<Scalar> + for<'a> VectorAssignOps<&'a Self, Scalar> { }
impl<T, S> VectorAssignRef<S> for T
where
T: VectorAssign<S> + for<'a> VectorAssignOps<&'a T, S>
{}
pub trait Dot: Sized {
type Scalar;
fn dot(&self, rhs: &Self) -> Self::Scalar;
}
pub trait Distance: Sized {
type Scalar;
fn squared_distance(&self, rhs: &Self) -> Self::Scalar;
fn distance(&self, rhs: &Self) -> Self::Scalar
where
Self::Scalar: Real,
{
self.squared_distance(rhs).sqrt()
}
}