rten_tensor/
lib.rs

1//! rten_tensor provides multi-dimensional arrays, commonly referred to as
2//! _tensors_ in a machine learning context.
3//!
4//! # Storage and layout
5//!
6//! A tensor is a combination of data storage and a layout. The data storage
7//! determines the element type and how the data is owned. A tensor can be:
8//!
9//! - Owned (like `Vec<T>`)
10//! - Borrowed (like `&[T]` or `&mut [T]`)
11//! - Maybe-owned (like `Cow[T]`)
12//! - Shared / reference-counted (like `Arc<[T]>`)
13//!
14//! The layout determines the number of dimensions (the _rank_) and size of each
15//! (the _shape_) and how indices map to offsets in the storage. The dimension
16//! count can be static (fixed at compile time) or dynamic (variable at
17//! runtime).
18//!
19//! # Tensor types and traits
20//!
21//! The base type for all tensors is [TensorBase]. This is not normally used
22//! directly but instead via a type alias which specifies the data ownership
23//! and layout:
24//!
25//! | Rank    | Owned | Borrowed | Mutably borrowed | Owned or borrowed | Reference counted |
26//! | ----    | ----- | -------- | ---------------- | ----------------- | ----------------- |
27//! | Static  | [NdTensor] | [NdTensorView] | [NdTensorViewMut] | [CowNdTensor] | [ArcNdTensor] |
28//! | Dynamic | [Tensor]   | [TensorView]   | [TensorViewMut]   | [CowTensor]   | [ArcTensor]   |
29//!
30//! All tensors implement the [Layout] trait, which provide methods to query
31//! the shape, dimension count and strides of the tensor. Tensor views provide
32//! various methods for indexing, iterating, slicing and transforming them.
33//! The [AsView] trait provides access to these methods for owned and mutably
34//! borrowed tensors. Conceptually it is similar to how [Deref](std::ops::Deref)
35//! allows accesing methods for `&[T]` on a `Vec<T>`. The preferred way to
36//! import the traits is via the prelude:
37//!
38//! ```
39//! use rten_tensor::prelude::*;
40//! use rten_tensor::NdTensor;
41//!
42//! let tensor = NdTensor::from([[1, 2], [3, 4]]);
43//!
44//! let transposed_elems: Vec<_> = tensor.transposed().iter().copied().collect();
45//! assert_eq!(transposed_elems, [1, 3, 2, 4]);
46//! ```
47//!
48//! # Serialization
49//!
50//! Tensors can be serialized and deserialized using [serde](https://serde.rs)
51//! if the `serde` feature is enabled. The serialized representation of a
52//! tensor includes its shape and elements in row-major (C) order. The JSON
53//! serialization of a matrix (`NdTensor<f32, 2>`) looks like this for example:
54//!
55//! ```json
56//! {
57//!   "shape": [2, 2],
58//!   "data": [0.5, 1.0, 1.5, 2.0]
59//! }
60//! ```
61
62mod assume_init;
63mod copy;
64pub mod errors;
65mod index_iterator;
66mod iterators;
67mod layout;
68mod macros;
69mod overlap;
70mod slice_range;
71mod storage;
72pub mod type_num;
73
74mod impl_debug;
75#[cfg(feature = "serde")]
76mod impl_serialize;
77mod tensor;
78
79/// Trait for sources of random data for tensors, for use with [`Tensor::rand`].
80pub trait RandomSource<T> {
81    /// Generate the next random value.
82    fn next(&mut self) -> T;
83}
84
85/// Storage allocation trait.
86///
87/// This is used by various methods on [`TensorBase`] with an `_in` suffix,
88/// which allow the caller to control the allocation of the data buffer for
89/// the returned owned tensor.
90pub trait Alloc {
91    /// Allocate storage for an owned tensor.
92    ///
93    /// The returned `Vec` should be empty but have the given capacity.
94    fn alloc<T>(&self, capacity: usize) -> Vec<T>;
95}
96
97impl<A: Alloc> Alloc for &A {
98    fn alloc<T>(&self, capacity: usize) -> Vec<T> {
99        A::alloc(self, capacity)
100    }
101}
102
103/// Implementation of [`Alloc`] which wraps the global allocator.
104pub struct GlobalAlloc {}
105
106impl GlobalAlloc {
107    pub const fn new() -> GlobalAlloc {
108        GlobalAlloc {}
109    }
110}
111
112impl Default for GlobalAlloc {
113    fn default() -> Self {
114        Self::new()
115    }
116}
117
118impl Alloc for GlobalAlloc {
119    fn alloc<T>(&self, capacity: usize) -> Vec<T> {
120        Vec::with_capacity(capacity)
121    }
122}
123
124pub use assume_init::AssumeInit;
125pub use index_iterator::{DynIndices, Indices, NdIndices};
126pub use iterators::{
127    AxisChunks, AxisChunksMut, AxisIter, AxisIterMut, InnerIter, InnerIterMut, Iter, IterMut, Lane,
128    Lanes, LanesMut,
129};
130pub use layout::{
131    AsIndex, DynLayout, IntoLayout, Layout, MatrixLayout, MutLayout, NdLayout, OverlapPolicy,
132    ResizeLayout, TrustedLayout, is_valid_permutation,
133};
134pub use slice_range::{DynSliceItems, IntoSliceItems, SliceItem, SliceRange, to_slice_items};
135
136pub use tensor::{
137    ArcNdTensor, ArcTensor, AsView, CowNdTensor, CowTensor, Matrix, MatrixMut, NdTensor,
138    NdTensorView, NdTensorViewMut, Scalar, Tensor, TensorBase, TensorView, TensorViewMut,
139    WeaklyCheckedView,
140};
141
142pub use storage::{CowData, IntoStorage, Storage, StorageMut, ViewData, ViewMutData};
143
144/// This module provides a convenient way to import the most common traits
145/// from this library via a glob import.
146pub mod prelude {
147    pub use super::{AsView, Layout};
148}
149
150// These modules are public for use by other crates in this repo, but
151// currently considered internal to the project.
152#[doc(hidden)]
153pub mod rng;
154#[doc(hidden)]
155pub mod test_util;