#![crate_name="ndarray"]
#![cfg_attr(has_deprecated, feature(deprecated))]
#![doc(html_root_url = "http://bluss.github.io/rust-ndarray/doc/")]
#![cfg_attr(feature = "assign_ops", feature(augmented_assignments,
op_assign_traits))]
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "rustc-serialize")]
extern crate rustc_serialize as serialize;
extern crate itertools as it;
#[cfg(not(nocomplex))]
extern crate num as libnum;
use libnum::Float;
use std::cmp;
use std::mem;
use std::ops::{Add, Sub, Mul, Div, Rem, Neg, Not, Shr, Shl,
BitAnd,
BitOr,
BitXor,
};
use std::rc::Rc;
use std::slice::{self, Iter, IterMut};
use it::ZipSlices;
pub use dimension::{Dimension, RemoveAxis};
pub use indexes::Indexes;
pub use shape_error::ShapeError;
pub use si::{Si, S};
use dimension::stride_offset;
use iterators::Baseiter;
pub use iterators::{
InnerIter,
InnerIterMut,
};
#[allow(deprecated)]
use linalg::{Field, Ring};
pub mod linalg;
mod arraytraits;
#[cfg(feature = "serde")]
mod arrayserialize;
mod arrayformat;
#[cfg(feature = "rblas")]
pub mod blas;
mod dimension;
mod indexes;
mod iterators;
mod si;
mod shape_error;
pub type Ix = u32;
pub type Ixs = i32;
pub struct ArrayBase<S, D> where S: Data {
data: S,
ptr: *mut S::Elem,
dim: D,
strides: D,
}
pub unsafe trait Data {
type Elem;
fn slice(&self) -> &[Self::Elem];
}
pub unsafe trait DataMut : Data {
fn slice_mut(&mut self) -> &mut [Self::Elem];
fn ensure_unique<D>(&mut ArrayBase<Self, D>)
where Self: Sized, D: Dimension
{
}
}
pub unsafe trait DataClone : Data {
unsafe fn clone_with_ptr(&self, ptr: *mut Self::Elem) -> (Self, *mut Self::Elem);
}
unsafe impl<A> Data for Rc<Vec<A>> {
type Elem = A;
fn slice(&self) -> &[A] { self }
}
unsafe impl<A> DataMut for Rc<Vec<A>> where A: Clone {
fn slice_mut(&mut self) -> &mut [A] { &mut Rc::make_mut(self)[..] }
fn ensure_unique<D>(self_: &mut ArrayBase<Self, D>)
where Self: Sized, D: Dimension
{
if Rc::get_mut(&mut self_.data).is_some() {
return
}
if self_.dim.size() <= self_.data.len() / 2 {
unsafe {
*self_ = Array::from_vec_dim(self_.dim.clone(),
self_.iter().map(|x| x.clone()).collect());
}
return;
}
let our_off = (self_.ptr as isize - self_.data.as_ptr() as isize)
/ mem::size_of::<A>() as isize;
let rvec = Rc::make_mut(&mut self_.data);
unsafe {
self_.ptr = rvec.as_mut_ptr().offset(our_off);
}
}
}
unsafe impl<A> DataClone for Rc<Vec<A>> {
unsafe fn clone_with_ptr(&self, ptr: *mut Self::Elem)
-> (Self, *mut Self::Elem)
{
(self.clone(), ptr)
}
}
unsafe impl<A> Data for Vec<A> {
type Elem = A;
fn slice(&self) -> &[A] { self }
}
unsafe impl<A> DataMut for Vec<A> {
fn slice_mut(&mut self) -> &mut [A] { self }
}
unsafe impl<A> DataClone for Vec<A> where A: Clone {
unsafe fn clone_with_ptr(&self, ptr: *mut Self::Elem)
-> (Self, *mut Self::Elem)
{
let mut u = self.clone();
let our_off = (self.as_ptr() as isize - ptr as isize)
/ mem::size_of::<A>() as isize;
let new_ptr = u.as_mut_ptr().offset(our_off);
(u, new_ptr)
}
}
unsafe impl<'a, A> Data for &'a [A] {
type Elem = A;
fn slice(&self) -> &[A] { self }
}
unsafe impl<'a, A> DataClone for &'a [A] {
unsafe fn clone_with_ptr(&self, ptr: *mut Self::Elem)
-> (Self, *mut Self::Elem)
{
(*self, ptr)
}
}
unsafe impl<'a, A> Data for &'a mut [A] {
type Elem = A;
fn slice(&self) -> &[A] { self }
}
unsafe impl<'a, A> DataMut for &'a mut [A] {
fn slice_mut(&mut self) -> &mut [A] { self }
}
pub unsafe trait DataOwned : Data {
fn new(elements: Vec<Self::Elem>) -> Self;
fn into_shared(self) -> Rc<Vec<Self::Elem>>;
}
pub unsafe trait DataShared : Clone + DataClone { }
unsafe impl<A> DataShared for Rc<Vec<A>> { }
unsafe impl<'a, A> DataShared for &'a [A] { }
unsafe impl<A> DataOwned for Vec<A> {
fn new(elements: Vec<A>) -> Self { elements }
fn into_shared(self) -> Rc<Vec<A>> { Rc::new(self) }
}
unsafe impl<A> DataOwned for Rc<Vec<A>> {
fn new(elements: Vec<A>) -> Self { Rc::new(elements) }
fn into_shared(self) -> Rc<Vec<A>> { self }
}
pub type Array<A, D> = ArrayBase<Rc<Vec<A>>, D>;
pub type OwnedArray<A, D> = ArrayBase<Vec<A>, D>;
pub type ArrayView<'a, A, D> = ArrayBase<&'a [A], D>;
pub type ArrayViewMut<'a, A, D> = ArrayBase<&'a mut [A], D>;
impl<S: DataClone, D: Clone> Clone for ArrayBase<S, D>
{
fn clone(&self) -> ArrayBase<S, D> {
unsafe {
let (data, ptr) = self.data.clone_with_ptr(self.ptr);
ArrayBase {
data: data,
ptr: ptr,
dim: self.dim.clone(),
strides: self.strides.clone(),
}
}
}
}
impl<S: DataClone + Copy, D: Copy> Copy for ArrayBase<S, D> { }
impl<S> ArrayBase<S, Ix>
where S: DataOwned,
{
pub fn from_vec(v: Vec<S::Elem>) -> ArrayBase<S, Ix> {
unsafe {
Self::from_vec_dim(v.len() as Ix, v)
}
}
pub fn from_iter<I: IntoIterator<Item=S::Elem>>(iterable: I) -> ArrayBase<S, Ix> {
Self::from_vec(iterable.into_iter().collect())
}
pub fn linspace<F>(start: F, end: F, n: usize) -> ArrayBase<S, Ix>
where S: Data<Elem=F>,
F: libnum::Float,
usize: it::misc::ToFloat<F>,
{
Self::from_iter(it::linspace(start, end, n))
}
#[cfg_attr(has_deprecated, deprecated(note="use ArrayBase::linspace() instead"))]
pub fn range(start: f32, end: f32) -> ArrayBase<S, Ix>
where S: Data<Elem=f32>,
{
let n = (end - start) as usize;
let span = if n > 0 { (n - 1) as f32 } else { 0. };
Self::linspace(start, start + span, n)
}
}
impl<S, A, D> ArrayBase<S, D>
where S: DataOwned<Elem=A>,
D: Dimension,
{
pub fn from_elem(dim: D, elem: A) -> ArrayBase<S, D> where A: Clone
{
let v = vec![elem; dim.size()];
unsafe {
Self::from_vec_dim(dim, v)
}
}
pub fn zeros(dim: D) -> ArrayBase<S, D> where A: Clone + libnum::Zero
{
Self::from_elem(dim, libnum::zero())
}
pub fn default(dim: D) -> ArrayBase<S, D>
where A: Default
{
let v = (0..dim.size()).map(|_| A::default()).collect();
unsafe {
Self::from_vec_dim(dim, v)
}
}
pub unsafe fn from_vec_dim(dim: D, mut v: Vec<A>) -> ArrayBase<S, D>
{
debug_assert!(dim.size() == v.len());
ArrayBase {
ptr: v.as_mut_ptr(),
data: DataOwned::new(v),
strides: dim.default_strides(),
dim: dim
}
}
}
impl<'a, A, D> ArrayView<'a, A, D>
where D: Dimension,
{
#[inline]
fn into_base_iter(self) -> Baseiter<'a, A, D> {
unsafe {
Baseiter::new(self.ptr, self.dim.clone(), self.strides.clone())
}
}
#[inline]
fn into_elements_base(self) -> ElementsBase<'a, A, D> {
ElementsBase { inner: self.into_base_iter() }
}
fn into_iter_(self) -> Elements<'a, A, D> {
Elements {
inner:
if let Some(slc) = self.into_slice() {
ElementsRepr::Slice(slc.iter())
} else {
ElementsRepr::Counted(self.into_elements_base())
}
}
}
fn into_slice(&self) -> Option<&'a [A]> {
if self.is_standard_layout() {
unsafe {
Some(slice::from_raw_parts(self.ptr, self.len()))
}
} else {
None
}
}
}
impl<'a, A, D> ArrayViewMut<'a, A, D>
where D: Dimension,
{
#[inline]
fn into_base_iter(self) -> Baseiter<'a, A, D> {
unsafe {
Baseiter::new(self.ptr, self.dim.clone(), self.strides.clone())
}
}
#[inline]
fn into_elements_base(self) -> ElementsBaseMut<'a, A, D> {
ElementsBaseMut { inner: self.into_base_iter() }
}
fn into_iter_(self) -> ElementsMut<'a, A, D> {
ElementsMut {
inner:
if self.is_standard_layout() {
let slc = unsafe {
slice::from_raw_parts_mut(self.ptr, self.len())
};
ElementsRepr::Slice(slc.iter_mut())
} else {
ElementsRepr::Counted(self.into_elements_base())
}
}
}
fn _into_slice_mut(self) -> Option<&'a mut [A]>
{
if self.is_standard_layout() {
unsafe {
Some(slice::from_raw_parts_mut(self.ptr, self.len()))
}
} else {
None
}
}
}
impl<A, S, D> ArrayBase<S, D> where S: Data<Elem=A>, D: Dimension
{
pub fn len(&self) -> usize
{
self.dim.size()
}
pub fn dim(&self) -> D {
self.dim.clone()
}
pub fn shape(&self) -> &[Ix] {
self.dim.slice()
}
pub fn strides(&self) -> &[Ixs] {
let s = self.strides.slice();
unsafe {
slice::from_raw_parts(s.as_ptr() as *const _, s.len())
}
}
pub fn view(&self) -> ArrayView<A, D> {
debug_assert!(self.pointer_is_inbounds());
ArrayView {
ptr: self.ptr,
dim: self.dim.clone(),
strides: self.strides.clone(),
data: self.raw_data(),
}
}
pub fn view_mut(&mut self) -> ArrayViewMut<A, D>
where S: DataMut,
{
self.ensure_unique();
ArrayViewMut {
ptr: self.ptr,
dim: self.dim.clone(),
strides: self.strides.clone(),
data: self.data.slice_mut(),
}
}
pub fn to_owned(&self) -> OwnedArray<A, D>
where A: Clone
{
let data = if let Some(slc) = self.as_slice() {
slc.to_vec()
} else {
self.iter().cloned().collect()
};
unsafe {
ArrayBase::from_vec_dim(self.dim.clone(), data)
}
}
pub fn to_shared(&self) -> Array<A, D>
where A: Clone
{
self.to_owned().into_shared()
}
pub fn into_shared(self) -> Array<A, D>
where S: DataOwned,
{
let data = self.data.into_shared();
ArrayBase {
data: data,
ptr: self.ptr,
dim: self.dim,
strides: self.strides,
}
}
pub fn iter(&self) -> Elements<A, D> {
debug_assert!(self.pointer_is_inbounds());
self.view().into_iter_()
}
pub fn indexed_iter(&self) -> Indexed<A, D> {
Indexed(self.view().into_elements_base())
}
pub fn iter_mut(&mut self) -> ElementsMut<A, D>
where S: DataMut,
{
self.ensure_unique();
self.view_mut().into_iter_()
}
pub fn indexed_iter_mut(&mut self) -> IndexedMut<A, D>
where S: DataMut,
{
IndexedMut(self.view_mut().into_elements_base())
}
pub fn slice(&self, indexes: &D::SliceArg) -> Self
where S: DataShared
{
let mut arr = self.clone();
arr.islice(indexes);
arr
}
pub fn islice(&mut self, indexes: &D::SliceArg)
{
let offset = Dimension::do_slices(&mut self.dim, &mut self.strides, indexes);
unsafe {
self.ptr = self.ptr.offset(offset);
}
}
pub fn slice_iter(&self, indexes: &D::SliceArg) -> Elements<A, D>
{
let mut it = self.view();
it.islice(indexes);
it.into_iter_()
}
pub fn slice_mut(&mut self, indexes: &D::SliceArg) -> ArrayViewMut<A, D>
where S: DataMut
{
let mut arr = self.view_mut();
arr.islice(indexes);
arr
}
#[cfg_attr(has_deprecated, deprecated(note="use .slice_mut() instead"))]
pub fn slice_iter_mut(&mut self, indexes: &D::SliceArg) -> ElementsMut<A, D>
where S: DataMut,
{
self.slice_mut(indexes).into_iter()
}
pub fn get(&self, index: D) -> Option<&A> {
let ptr = self.ptr;
self.dim.stride_offset_checked(&self.strides, &index)
.map(move |offset| unsafe {
&*ptr.offset(offset)
})
}
#[cfg_attr(has_deprecated, deprecated(note="use .get() instead"))]
pub fn at(&self, index: D) -> Option<&A> {
self.get(index)
}
pub fn get_mut(&mut self, index: D) -> Option<&mut A>
where S: DataMut,
{
self.ensure_unique();
let ptr = self.ptr;
self.dim.stride_offset_checked(&self.strides, &index)
.map(move |offset| unsafe {
&mut *ptr.offset(offset)
})
}
#[cfg_attr(has_deprecated, deprecated(note="use .get_mut() instead"))]
pub fn at_mut(&mut self, index: D) -> Option<&mut A>
where S: DataMut,
{
self.get_mut(index)
}
#[inline]
pub unsafe fn uget(&self, index: D) -> &A {
debug_assert!(self.dim.stride_offset_checked(&self.strides, &index).is_some());
let off = Dimension::stride_offset(&index, &self.strides);
&*self.ptr.offset(off)
}
#[cfg_attr(has_deprecated, deprecated(note="use .uget() instead"))]
#[inline]
pub unsafe fn uchk_at(&self, index: D) -> &A {
self.uget(index)
}
#[inline]
pub unsafe fn uget_mut(&mut self, index: D) -> &mut A
where S: DataMut
{
debug_assert!(self.dim.stride_offset_checked(&self.strides, &index).is_some());
let off = Dimension::stride_offset(&index, &self.strides);
&mut *self.ptr.offset(off)
}
#[cfg_attr(has_deprecated, deprecated(note="use .uget_mut() instead"))]
#[inline]
pub unsafe fn uchk_at_mut(&mut self, index: D) -> &mut A
where S: DataMut
{
self.uget_mut(index)
}
pub fn swap_axes(&mut self, ax: usize, bx: usize)
{
self.dim.slice_mut().swap(ax, bx);
self.strides.slice_mut().swap(ax, bx);
}
pub fn subview(&self, axis: usize, index: Ix) -> ArrayBase<S, <D as RemoveAxis>::Smaller>
where D: RemoveAxis,
S: DataShared,
{
let mut res = self.clone();
res.isubview(axis, index);
ArrayBase {
data: res.data,
ptr: res.ptr,
dim: res.dim.remove_axis(axis),
strides: res.strides.remove_axis(axis),
}
}
pub fn isubview(&mut self, axis: usize, index: Ix)
{
dimension::do_sub(&mut self.dim, &mut self.ptr, &self.strides, axis, index)
}
pub fn subview_mut(&mut self, axis: usize, index: Ix)
-> ArrayViewMut<A, D::Smaller>
where S: DataMut,
D: RemoveAxis,
{
let mut res = self.view_mut();
res.isubview(axis, index);
ArrayBase {
data: res.data,
ptr: res.ptr,
dim: res.dim.remove_axis(axis),
strides: res.strides.remove_axis(axis),
}
}
#[cfg_attr(has_deprecated, deprecated(note="use .subview_mut() instead"))]
pub fn sub_iter_mut(&mut self, axis: usize, index: Ix)
-> ElementsMut<A, D>
where S: DataMut,
{
let mut it = self.view_mut();
dimension::do_sub(&mut it.dim, &mut it.ptr, &it.strides, axis, index);
it.into_iter_()
}
pub fn inner_iter(&self) -> InnerIter<A, D> {
iterators::new_outer(self.view())
}
pub fn inner_iter_mut(&mut self) -> InnerIterMut<A, D>
where S: DataMut
{
iterators::new_outer_mut(self.view_mut())
}
fn diag_params(&self) -> (Ix, Ixs)
{
let len = self.dim.slice().iter().map(|x| *x).min().unwrap_or(1);
let stride = self.strides.slice().iter()
.map(|x| *x as Ixs)
.fold(0, |sum, s| sum + s);
return (len, stride)
}
pub fn diag_iter(&self) -> Elements<A, Ix>
{
let (len, stride) = self.diag_params();
let view = ArrayBase {
data: self.raw_data(),
ptr: self.ptr,
dim: len,
strides: stride as Ix,
};
view.into_iter_()
}
pub fn diag(&self) -> ArrayBase<S, Ix>
where S: DataShared,
{
let (len, stride) = self.diag_params();
ArrayBase {
data: self.data.clone(),
ptr: self.ptr,
dim: len,
strides: stride as Ix,
}
}
pub fn diag_mut(&mut self) -> ArrayViewMut<A, Ix>
where S: DataMut,
{
self.ensure_unique();
let (len, stride) = self.diag_params();
ArrayViewMut {
ptr: self.ptr,
data: self.raw_data_mut(),
dim: len,
strides: stride as Ix,
}
}
#[cfg_attr(has_deprecated, deprecated(note="use .diag_mut() instead"))]
pub fn diag_iter_mut(&mut self) -> ElementsMut<A, Ix>
where S: DataMut,
{
self.diag_mut().into_iter_()
}
fn ensure_unique(&mut self)
where S: DataMut
{
debug_assert!(self.pointer_is_inbounds());
S::ensure_unique(self);
debug_assert!(self.pointer_is_inbounds());
}
#[cfg(feature = "rblas")]
fn ensure_standard_layout(&mut self)
where S: DataOwned,
A: Clone
{
if !self.is_standard_layout() {
let mut v: Vec<A> = self.iter().cloned().collect();
self.ptr = v.as_mut_ptr();
self.data = DataOwned::new(v);
self.strides = self.dim.default_strides();
}
}
pub fn is_standard_layout(&self) -> bool
{
let defaults = self.dim.default_strides();
if self.strides == defaults {
return true;
}
for (&dim, (&s, &ds)) in zipsl(self.dim.slice(),
zipsl(self.strides(), defaults.slice()))
{
if dim != 1 && s != (ds as Ixs) {
return false;
}
}
true
}
pub fn as_slice(&self) -> Option<&[A]> {
if self.is_standard_layout() {
unsafe {
Some(slice::from_raw_parts(self.ptr, self.len()))
}
} else {
None
}
}
pub fn as_slice_mut(&mut self) -> Option<&mut [A]>
where S: DataMut
{
if self.is_standard_layout() {
self.ensure_unique();
unsafe {
Some(slice::from_raw_parts_mut(self.ptr, self.len()))
}
} else {
None
}
}
pub fn reshape<E: Dimension>(&self, shape: E) -> ArrayBase<S, E>
where S: DataShared + DataOwned, A: Clone,
{
if shape.size() != self.dim.size() {
panic!("Incompatible shapes in reshape, attempted from: {:?}, to: {:?}",
self.dim.slice(), shape.slice())
}
if self.is_standard_layout() {
let cl = self.clone();
ArrayBase {
data: cl.data,
ptr: cl.ptr,
strides: shape.default_strides(),
dim: shape,
}
} else {
let v = self.iter().map(|x| x.clone()).collect::<Vec<A>>();
unsafe {
ArrayBase::from_vec_dim(shape, v)
}
}
}
pub fn into_shape<E>(self, shape: E) -> Result<ArrayBase<S, E>, ShapeError>
where E: Dimension
{
if shape.size() != self.dim.size() {
return Err(ShapeError::IncompatibleShapes(
self.dim.slice().to_vec().into_boxed_slice(),
shape.slice().to_vec().into_boxed_slice()));
}
if self.is_standard_layout() {
Ok(ArrayBase {
data: self.data,
ptr: self.ptr,
strides: shape.default_strides(),
dim: shape,
})
} else {
Err(ShapeError::IncompatibleLayout)
}
}
pub fn broadcast<E>(&self, dim: E)
-> Option<ArrayView<A, E>>
where E: Dimension
{
fn upcast<D: Dimension, E: Dimension>(to: &D, from: &E, stride: &E) -> Option<D> {
let mut new_stride = to.clone();
if to.ndim() < from.ndim() {
return None
}
{
let mut new_stride_iter = new_stride.slice_mut().iter_mut().rev();
for ((er, es), dr) in from.slice().iter().rev()
.zip(stride.slice().iter().rev())
.zip(new_stride_iter.by_ref())
{
if *dr == *er {
*dr = *es;
} else if *er == 1 {
*dr = 0
} else {
return None;
}
}
for dr in new_stride_iter {
*dr = 0;
}
}
Some(new_stride)
}
let broadcast_strides =
match upcast(&dim, &self.dim, &self.strides) {
Some(st) => st,
None => return None,
};
Some(ArrayView {
data: self.raw_data(),
ptr: self.ptr,
dim: dim,
strides: broadcast_strides,
})
}
#[cfg_attr(has_deprecated, deprecated(note="use .broadcast() instead"))]
pub fn broadcast_iter<E>(&self, dim: E) -> Option<Elements<A, E>>
where E: Dimension,
{
self.broadcast(dim).map(|v| v.into_iter_())
}
#[inline]
fn broadcast_unwrap<E>(&self, dim: E) -> ArrayView<A, E>
where E: Dimension,
{
match self.broadcast(dim.clone()) {
Some(it) => it,
None => Self::broadcast_panic(&self.dim, &dim),
}
}
#[inline(never)]
fn broadcast_panic<E: Dimension>(from: &D, to: &E) -> ! {
panic!("Could not broadcast array from shape: {:?} to: {:?}",
from.slice(), to.slice())
}
pub fn raw_data(&self) -> &[A] {
self.data.slice()
}
pub fn raw_data_mut(&mut self) -> &mut [A]
where S: DataMut,
{
self.ensure_unique();
self.data.slice_mut()
}
fn pointer_is_inbounds(&self) -> bool {
let slc = self.data.slice();
if slc.is_empty() {
return true;
}
let ptr = slc.as_ptr() as *mut _;
let end = unsafe {
ptr.offset(slc.len() as isize)
};
self.ptr >= ptr && self.ptr <= end
}
pub fn assign<E: Dimension, S2>(&mut self, rhs: &ArrayBase<S2, E>)
where S: DataMut,
A: Clone,
S2: Data<Elem=A>,
{
self.zip_mut_with(rhs, |x, y| *x = y.clone());
}
pub fn assign_scalar(&mut self, x: &A)
where S: DataMut, A: Clone,
{
self.unordered_foreach_mut(move |elt| *elt = x.clone());
}
fn unordered_foreach_mut<F>(&mut self, mut f: F)
where S: DataMut,
F: FnMut(&mut A)
{
if let Some(slc) = self.as_slice_mut() {
for elt in slc {
f(elt);
}
return;
}
for row in self.inner_iter_mut() {
for elt in row {
f(elt);
}
}
}
fn zip_with_mut_same_shape<B, S2, E, F>(&mut self, rhs: &ArrayBase<S2, E>, mut f: F)
where S: DataMut,
S2: Data<Elem=B>,
E: Dimension,
F: FnMut(&mut A, &B)
{
debug_assert_eq!(self.shape(), rhs.shape());
if let Some(self_s) = self.as_slice_mut() {
if let Some(rhs_s) = rhs.as_slice() {
let len = cmp::min(self_s.len(), rhs_s.len());
let s = &mut self_s[..len];
let r = &rhs_s[..len];
for i in 0..len {
f(&mut s[i], &r[i]);
}
return;
}
}
self.zip_with_mut_outer_iter(rhs, f);
}
#[inline(always)]
fn zip_with_mut_outer_iter<B, S2, E, F>(&mut self, rhs: &ArrayBase<S2, E>, mut f: F)
where S: DataMut,
S2: Data<Elem=B>,
E: Dimension,
F: FnMut(&mut A, &B)
{
debug_assert_eq!(self.shape(), rhs.shape());
let mut try_slices = true;
let mut rows = self.inner_iter_mut().zip(rhs.inner_iter());
for (mut s_row, r_row) in &mut rows {
if try_slices {
if let Some(self_s) = s_row.as_slice_mut() {
if let Some(rhs_s) = r_row.as_slice() {
let len = cmp::min(self_s.len(), rhs_s.len());
let s = &mut self_s[..len];
let r = &rhs_s[..len];
for i in 0..len {
f(&mut s[i], &r[i]);
}
continue;
}
}
try_slices = false;
}
for (y, x) in s_row.iter_mut().zip(r_row) {
f(y, x);
}
}
}
#[inline]
pub fn zip_mut_with<B, S2, E, F>(&mut self, rhs: &ArrayBase<S2, E>, mut f: F)
where S: DataMut,
S2: Data<Elem=B>,
E: Dimension,
F: FnMut(&mut A, &B)
{
if self.dim.ndim() == rhs.dim.ndim() && self.shape() == rhs.shape() {
self.zip_with_mut_same_shape(rhs, f);
} else if rhs.dim.ndim() == 0 {
unsafe {
let rhs_elem = &*rhs.ptr;
let f_ = &mut f;
self.unordered_foreach_mut(move |elt| f_(elt, rhs_elem));
}
} else {
let rhs_broadcast = rhs.broadcast_unwrap(self.dim());
self.zip_with_mut_outer_iter(&rhs_broadcast, f);
}
}
pub fn fold<'a, F, B>(&'a self, mut init: B, mut f: F) -> B
where F: FnMut(B, &'a A) -> B, A: 'a
{
if let Some(slc) = self.as_slice() {
for elt in slc {
init = f(init, elt);
}
return init;
}
for row in self.inner_iter() {
for elt in row {
init = f(init, elt);
}
}
init
}
pub fn map<'a, B, F>(&'a self, mut f: F) -> OwnedArray<B, D>
where F: FnMut(&'a A) -> B,
A: 'a,
{
let mut res = Vec::with_capacity(self.dim.size());
for elt in self.iter() {
res.push(f(elt))
}
unsafe {
ArrayBase::from_vec_dim(self.dim.clone(), res)
}
}
}
pub fn zeros<A, D>(dim: D) -> OwnedArray<A, D>
where A: Clone + libnum::Zero, D: Dimension,
{
ArrayBase::zeros(dim)
}
pub fn arr0<A>(x: A) -> Array<A, ()>
{
unsafe { Array::from_vec_dim((), vec![x]) }
}
pub fn arr1<A: Clone>(xs: &[A]) -> Array<A, Ix>
{
Array::from_vec(xs.to_vec())
}
pub fn aview0<A>(x: &A) -> ArrayView<A, ()> {
let data = unsafe {
std::slice::from_raw_parts(x, 1)
};
ArrayView {
data: data,
ptr: data.as_ptr() as *mut _,
dim: (),
strides: (),
}
}
pub fn aview1<A>(xs: &[A]) -> ArrayView<A, Ix> {
ArrayView {
data: xs,
ptr: xs.as_ptr() as *mut _,
dim: xs.len() as Ix,
strides: 1,
}
}
pub fn aview2<A, V: FixedInitializer<Elem=A>>(xs: &[V]) -> ArrayView<A, (Ix, Ix)> {
let cols = V::len();
let rows = xs.len();
let data = unsafe {
std::slice::from_raw_parts(xs.as_ptr() as *const A, cols * rows)
};
let dim = (rows as Ix, cols as Ix);
ArrayView {
data: data,
ptr: data.as_ptr() as *mut _,
strides: dim.default_strides(),
dim: dim,
}
}
pub fn aview_mut1<A>(xs: &mut [A]) -> ArrayViewMut<A, Ix> {
ArrayViewMut {
ptr: xs.as_mut_ptr(),
dim: xs.len() as Ix,
strides: 1,
data: xs,
}
}
pub unsafe trait Initializer {
type Elem;
fn as_init_slice(&self) -> &[Self::Elem];
fn is_fixed_size() -> bool { false }
}
pub unsafe trait FixedInitializer: Initializer {
fn len() -> usize;
}
unsafe impl<T> Initializer for [T] {
type Elem = T;
fn as_init_slice(&self) -> &[T] {
self
}
}
macro_rules! impl_arr_init {
(__impl $n: expr) => (
unsafe impl<T> Initializer for [T; $n] {
type Elem = T;
fn as_init_slice(&self) -> &[T] { self }
fn is_fixed_size() -> bool { true }
}
unsafe impl<T> FixedInitializer for [T; $n] {
fn len() -> usize { $n }
}
);
() => ();
($n: expr, $($m:expr,)*) => (
impl_arr_init!(__impl $n);
impl_arr_init!($($m,)*);
)
}
impl_arr_init!(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,);
pub fn arr2<A: Clone, V: Initializer<Elem=A>>(xs: &[V]) -> Array<A, (Ix, Ix)>
{
let (m, n) = (xs.len() as Ix,
xs.get(0).map_or(0, |snd| snd.as_init_slice().len() as Ix));
let dim = (m, n);
let mut result = Vec::<A>::with_capacity(dim.size());
for snd in xs.iter() {
let snd = snd.as_init_slice();
assert!(<V as Initializer>::is_fixed_size() || snd.len() as Ix == n);
result.extend(snd.iter().map(|x| x.clone()))
}
unsafe {
Array::from_vec_dim(dim, result)
}
}
pub fn arr3<A: Clone, V: Initializer<Elem=U>, U: Initializer<Elem=A>>(xs: &[V])
-> Array<A, (Ix, Ix, Ix)>
{
let m = xs.len() as Ix;
let fst = xs.get(0).map(|snd| snd.as_init_slice());
let thr = fst.and_then(|elt| elt.get(0).map(|elt2| elt2.as_init_slice()));
let n = fst.map_or(0, |v| v.len() as Ix);
let o = thr.map_or(0, |v| v.len() as Ix);
let dim = (m, n, o);
let mut result = Vec::<A>::with_capacity(dim.size());
for snd in xs.iter() {
let snd = snd.as_init_slice();
assert!(<V as Initializer>::is_fixed_size() || snd.len() as Ix == n);
for thr in snd.iter() {
let thr = thr.as_init_slice();
assert!(<U as Initializer>::is_fixed_size() || thr.len() as Ix == o);
result.extend(thr.iter().map(|x| x.clone()))
}
}
unsafe {
Array::from_vec_dim(dim, result)
}
}
impl<A, S, D> ArrayBase<S, D>
where S: Data<Elem=A>,
D: Dimension,
{
pub fn sum(&self, axis: usize) -> OwnedArray<A, <D as RemoveAxis>::Smaller>
where A: Clone + Add<Output=A>,
D: RemoveAxis,
{
let n = self.shape()[axis];
let mut res = self.view().subview(axis, 0).to_owned();
for i in 1..n {
let view = self.view().subview(axis, i);
res.iadd(&view);
}
res
}
pub fn scalar_sum(&self) -> A
where A: Clone + Add<Output=A> + libnum::Zero,
{
if let Some(slc) = self.as_slice() {
return Self::unrolled_sum(slc);
}
let mut sum = A::zero();
for row in self.inner_iter() {
if let Some(slc) = row.as_slice() {
sum = sum + Self::unrolled_sum(slc);
} else {
sum = sum + row.fold(A::zero(), |acc, elt| acc + elt.clone());
}
}
sum
}
fn unrolled_sum(mut xs: &[A]) -> A
where A: Clone + Add<Output=A> + libnum::Zero,
{
let mut sum = A::zero();
let (mut p0, mut p1, mut p2, mut p3,
mut p4, mut p5, mut p6, mut p7) =
(A::zero(), A::zero(), A::zero(), A::zero(),
A::zero(), A::zero(), A::zero(), A::zero());
while xs.len() >= 8 {
p0 = p0 + xs[0].clone();
p1 = p1 + xs[1].clone();
p2 = p2 + xs[2].clone();
p3 = p3 + xs[3].clone();
p4 = p4 + xs[4].clone();
p5 = p5 + xs[5].clone();
p6 = p6 + xs[6].clone();
p7 = p7 + xs[7].clone();
xs = &xs[8..];
}
sum = sum.clone() + (p0 + p4);
sum = sum.clone() + (p1 + p5);
sum = sum.clone() + (p2 + p6);
sum = sum.clone() + (p3 + p7);
for elt in xs {
sum = sum.clone() + elt.clone();
}
sum
}
#[allow(deprecated)]
pub fn mean(&self, axis: usize) -> OwnedArray<A, <D as RemoveAxis>::Smaller>
where A: Copy + Field,
D: RemoveAxis,
{
let n = self.shape()[axis];
let mut sum = self.sum(axis);
let one = libnum::one::<A>();
let mut cnt = one;
for _ in 1..n {
cnt = cnt + one;
}
sum.idiv_scalar(&cnt);
sum
}
pub fn allclose<S2>(&self, rhs: &ArrayBase<S2, D>, tol: A) -> bool
where A: Float + PartialOrd,
S2: Data<Elem=A>,
{
self.shape() == rhs.shape() &&
self.iter().zip(rhs.iter()).all(|(x, y)| (*x - *y).abs() <= tol)
}
}
impl<A, S> ArrayBase<S, (Ix, Ix)>
where S: Data<Elem=A>,
{
unsafe fn one_dimensional_iter<'a>(ptr: *mut A, len: Ix, stride: Ix)
-> Elements<'a, A, Ix>
{
let view = ArrayView {
data: &[],
ptr: ptr,
dim: len,
strides: stride,
};
view.into_iter_()
}
pub fn row_iter(&self, index: Ix) -> Elements<A, Ix>
{
let (m, n) = self.dim;
let (sr, sc) = self.strides;
assert!(index < m);
unsafe {
Self::one_dimensional_iter(self.ptr.offset(stride_offset(index, sr)), n, sc)
}
}
pub fn col_iter(&self, index: Ix) -> Elements<A, Ix>
{
let (m, n) = self.dim;
let (sr, sc) = self.strides;
assert!(index < n);
unsafe {
Self::one_dimensional_iter(self.ptr.offset(stride_offset(index, sc)), m, sr)
}
}
#[allow(deprecated)]
pub fn mat_mul(&self, rhs: &ArrayBase<S, (Ix, Ix)>) -> Array<A, (Ix, Ix)>
where A: Copy + Ring
{
let ((m, a), (b, n)) = (self.dim, rhs.dim);
let (self_columns, other_rows) = (a, b);
assert!(self_columns == other_rows);
let mut res_elems = Vec::<A>::with_capacity(m as usize * n as usize);
unsafe {
res_elems.set_len(m as usize * n as usize);
}
let mut i = 0;
let mut j = 0;
for rr in res_elems.iter_mut() {
unsafe {
*rr = (0..a).fold(libnum::zero::<A>(),
move |s, k| s + *self.uget((i, k)) * *rhs.uget((k, j))
);
}
j += 1;
if j == n {
j = 0;
i += 1;
}
}
unsafe {
ArrayBase::from_vec_dim((m, n), res_elems)
}
}
#[allow(deprecated)]
pub fn mat_mul_col(&self, rhs: &ArrayBase<S, Ix>) -> Array<A, Ix>
where A: Copy + Ring
{
let ((m, a), n) = (self.dim, rhs.dim);
let (self_columns, other_rows) = (a, n);
assert!(self_columns == other_rows);
let mut res_elems = Vec::<A>::with_capacity(m as usize);
unsafe {
res_elems.set_len(m as usize);
}
let mut i = 0;
for rr in res_elems.iter_mut() {
unsafe {
*rr = (0..a).fold(libnum::zero::<A>(),
move |s, k| s + *self.uget((i, k)) * *rhs.uget(k)
);
}
i += 1;
}
unsafe {
ArrayBase::from_vec_dim(m, res_elems)
}
}
}
macro_rules! impl_binary_op_inherent(
($trt:ident, $mth:ident, $imethod:ident, $imth_scalar:ident, $doc:expr) => (
#[doc=$doc]
pub fn $imethod <E: Dimension, S2> (&mut self, rhs: &ArrayBase<S2, E>)
where A: Clone + $trt<A, Output=A>,
S2: Data<Elem=A>,
{
self.zip_mut_with(rhs, |x, y| {
*x = x.clone().$mth(y.clone());
});
}
#[doc=$doc]
pub fn $imth_scalar (&mut self, x: &A)
where A: Clone + $trt<A, Output=A>,
{
self.unordered_foreach_mut(move |elt| {
*elt = elt.clone(). $mth (x.clone());
});
}
);
);
impl<A, S, D> ArrayBase<S, D>
where S: DataMut<Elem=A>,
D: Dimension,
{
impl_binary_op_inherent!(Add, add, iadd, iadd_scalar, "addition");
impl_binary_op_inherent!(Sub, sub, isub, isub_scalar, "subtraction");
impl_binary_op_inherent!(Mul, mul, imul, imul_scalar, "multiplication");
impl_binary_op_inherent!(Div, div, idiv, idiv_scalar, "division");
impl_binary_op_inherent!(Rem, rem, irem, irem_scalar, "remainder");
impl_binary_op_inherent!(BitAnd, bitand, ibitand, ibitand_scalar, "bit and");
impl_binary_op_inherent!(BitOr, bitor, ibitor, ibitor_scalar, "bit or");
impl_binary_op_inherent!(BitXor, bitxor, ibitxor, ibitxor_scalar, "bit xor");
impl_binary_op_inherent!(Shl, shl, ishl, ishl_scalar, "left shift");
impl_binary_op_inherent!(Shr, shr, ishr, ishr_scalar, "right shift");
pub fn ineg(&mut self)
where A: Clone + Neg<Output=A>,
{
self.unordered_foreach_mut(|elt| {
*elt = elt.clone().neg()
});
}
pub fn inot(&mut self)
where A: Clone + Not<Output=A>,
{
self.unordered_foreach_mut(|elt| {
*elt = elt.clone().not()
});
}
}
macro_rules! impl_binary_op(
($trt:ident, $mth:ident, $doc:expr) => (
#[doc=$doc]
impl<A, S, S2, D, E> $trt<ArrayBase<S2, E>> for ArrayBase<S, D>
where A: Clone + $trt<A, Output=A>,
S: DataMut<Elem=A>,
S2: Data<Elem=A>,
D: Dimension,
E: Dimension,
{
type Output = ArrayBase<S, D>;
fn $mth (mut self, rhs: ArrayBase<S2, E>) -> ArrayBase<S, D>
{
self.zip_mut_with(&rhs, |x, y| {
*x = x.clone(). $mth (y.clone());
});
self
}
}
#[doc=$doc]
impl<'a, A, S, S2, D, E> $trt<&'a ArrayBase<S2, E>> for &'a ArrayBase<S, D>
where A: Clone + $trt<A, Output=A>,
S: Data<Elem=A>,
S2: Data<Elem=A>,
D: Dimension,
E: Dimension,
{
type Output = OwnedArray<A, D>;
fn $mth (self, rhs: &'a ArrayBase<S2, E>) -> OwnedArray<A, D>
{
self.to_owned().$mth(rhs.view())
}
}
);
);
mod arithmetic_ops {
use super::*;
use std::ops::*;
impl_binary_op!(Add, add, "addition");
impl_binary_op!(Sub, sub, "subtraction");
impl_binary_op!(Mul, mul, "multiplication");
impl_binary_op!(Div, div, "division");
impl_binary_op!(Rem, rem, "remainder");
impl_binary_op!(BitAnd, bitand, "bit and");
impl_binary_op!(BitOr, bitor, "bit or");
impl_binary_op!(BitXor, bitxor, "bit xor");
impl_binary_op!(Shl, shl, "left shift");
impl_binary_op!(Shr, shr, "right shift");
impl<A, S, D> Neg for ArrayBase<S, D>
where A: Clone + Neg<Output=A>,
S: DataMut<Elem=A>,
D: Dimension
{
type Output = Self;
fn neg(mut self) -> Self {
self.ineg();
self
}
}
impl<A, S, D> Not for ArrayBase<S, D>
where A: Clone + Not<Output=A>,
S: DataMut<Elem=A>,
D: Dimension
{
type Output = Self;
fn not(mut self) -> Self {
self.inot();
self
}
}
}
#[cfg(feature = "assign_ops")]
mod assign_ops {
use super::*;
use std::ops::{
AddAssign,
SubAssign,
MulAssign,
DivAssign,
RemAssign,
BitAndAssign,
BitOrAssign,
BitXorAssign,
};
macro_rules! impl_assign_op {
($trt:ident, $method:ident, $doc:expr) => {
#[doc=$doc]
impl<'a, A, S, S2, D, E> $trt<&'a ArrayBase<S2, E>> for ArrayBase<S, D>
where A: Clone + $trt<A>,
S: DataMut<Elem=A>,
S2: Data<Elem=A>,
D: Dimension,
E: Dimension,
{
fn $method(&mut self, rhs: &ArrayBase<S2, E>) {
self.zip_mut_with(rhs, |x, y| {
x.$method(y.clone());
});
}
}
};
}
impl_assign_op!(AddAssign, add_assign,
"Perform `self += rhs` as elementwise addition (in place).\n");
impl_assign_op!(SubAssign, sub_assign,
"Perform `self -= rhs` as elementwise subtraction (in place).\n");
impl_assign_op!(MulAssign, mul_assign,
"Perform `self *= rhs` as elementwise multiplication (in place).\n");
impl_assign_op!(DivAssign, div_assign,
"Perform `self /= rhs` as elementwise division (in place).\n");
impl_assign_op!(RemAssign, rem_assign,
"Perform `self %= rhs` as elementwise remainder (in place).\n");
impl_assign_op!(BitAndAssign, bitand_assign,
"Perform `self &= rhs` as elementwise bit and (in place).\n");
impl_assign_op!(BitOrAssign, bitor_assign,
"Perform `self |= rhs` as elementwise bit or (in place).\n");
impl_assign_op!(BitXorAssign, bitxor_assign,
"Perform `self ^= rhs` as elementwise bit xor (in place).\n");
}
pub struct Elements<'a, A: 'a, D> {
inner: ElementsRepr<Iter<'a, A>, ElementsBase<'a, A, D>>,
}
struct ElementsBase<'a, A: 'a, D> {
inner: Baseiter<'a, A, D>,
}
pub struct ElementsMut<'a, A: 'a, D> {
inner: ElementsRepr<IterMut<'a, A>, ElementsBaseMut<'a, A, D>>,
}
struct ElementsBaseMut<'a, A: 'a, D> {
inner: Baseiter<'a, A, D>,
}
#[derive(Clone)]
pub struct Indexed<'a, A: 'a, D>(ElementsBase<'a, A, D>);
pub struct IndexedMut<'a, A: 'a, D>(ElementsBaseMut<'a, A, D>);
fn zipsl<T, U>(t: T, u: U) -> ZipSlices<T, U>
where T: it::misc::Slice, U: it::misc::Slice
{
ZipSlices::from_slices(t, u)
}
enum ElementsRepr<S, C> {
Slice(S),
Counted(C),
}