Expand description
§Volta
A minimal automatic differentiation library implementing PyTorch-like tensor operations from scratch in pure Rust. This library provides:
- Dynamic computation graphs for automatic differentiation
- Broadcasting support for tensor operations
- Common neural network operations (matmul, activations, etc.)
- Numerical gradient checking for validation
§Architecture
The library uses reference-counted interior mutability (Rc<RefCell<RawTensor>>) to build
dynamic computation graphs. Each tensor operation creates new tensors and stores gradient
functions that know how to backpropagate through that operation.
Re-exports§
pub use dtype::DType;pub use storage::Storage;pub use gpu::GpuBuffer;pub use gpu::GpuContext;pub use gpu::get_gpu_context;pub use gpu::gpu_cleanup;pub use gpu::gpu_compact;pub use gpu::gpu_pending_count;pub use gpu::gpu_pool_stats;pub use gpu::gpu_sync;pub use gpu::gpu_sync_threshold;pub use gpu::is_gpu_available;pub use autograd::GradFn;pub use device::Device;pub use nn::layers::Dropout;pub use nn::layers::flatten::Flatten;pub use nn::Adam;pub use nn::BatchNorm1d;pub use nn::BatchNorm2d;pub use nn::Conv2d;pub use nn::ConvTranspose2d;pub use nn::Embedding;pub use nn::LSTMCell;pub use nn::Linear;pub use nn::MaxPool2d;pub use nn::Module;pub use nn::PixelShuffle;pub use nn::ReLU;pub use nn::SGD;pub use nn::Sequential;pub use nn::SequentialBuilder;pub use nn::Sigmoid;pub use nn::Tanh;pub use tensor::RawTensor;pub use tensor::Tensor;pub use tensor::TensorOps;pub use tensor::DataLoader;pub use tensor::bce_loss;pub use tensor::bce_with_logits_loss;pub use tensor::check_gradients;pub use tensor::check_gradients_simple;pub use tensor::cross_entropy_loss;pub use tensor::kl_divergence_gaussian;pub use tensor::manual_seed;pub use tensor::max_dim;pub use tensor::mse_loss;pub use tensor::new_tensor;pub use tensor::nll_loss;pub use tensor::ones;pub use tensor::rand;pub use tensor::randn;pub use tensor::randn_like;pub use tensor::softmax;pub use tensor::sum_dim;pub use tensor::zeros;pub use data::load_mnist_images;pub use data::load_mnist_labels;pub use data::normalize;pub use data::to_one_hot;pub use io::TypedTensorData;pub use io::load_safetensors;pub use io::load_safetensors_raw;pub use io::load_safetensors_with_mapping;pub use io::load_state_dict_with_mapping;pub use io::mapping;pub use io::save_safetensors;pub use io::save_safetensors_typed;pub use utils::ProgressBar;pub use ops::BinaryGradFn;pub use ops::BinaryOp;pub use ops::MatMulGradFn;pub use ops::MaxReduceGradFn;pub use ops::MeanGradFn;pub use ops::MovementGradFn;pub use ops::MovementOp;pub use ops::MulAccGradFn;pub use ops::ReduceOp;pub use ops::SumGradFn;pub use ops::TernaryOp;pub use ops::UnaryGradFn;pub use ops::UnaryOp;pub use ops::WhereGradFn;