ghost_flow/lib.rs
1//! # GhostFlow - Complete Machine Learning Framework in Rust
2//!
3//! GhostFlow is a production-ready machine learning framework built entirely in Rust,
4//! designed to rival PyTorch and TensorFlow in both performance and ease of use.
5//!
6//! ## Features
7//!
8//! - **Tensor Operations**: Multi-dimensional arrays with SIMD optimization
9//! - **Automatic Differentiation**: Full autograd engine with computational graph
10//! - **Neural Networks**: CNN, RNN, LSTM, GRU, Transformer, Attention
11//! - **50+ ML Algorithms**: Decision trees, random forests, SVM, clustering, and more
12//! - **GPU Acceleration**: Hand-optimized CUDA kernels (optional)
13//! - **Production Ready**: Zero warnings, comprehensive tests, full documentation
14//!
15//! ## Quick Start
16//!
17//! ```rust
18//! use ghostflow::prelude::*;
19//!
20//! // Create tensors
21//! let x = Tensor::randn(&[32, 784]);
22//! let y = Tensor::randn(&[32, 10]);
23//!
24//! // Tensor operations
25//! let z = x.matmul(&y.transpose(0, 1).unwrap()).unwrap();
26//! ```
27//!
28//! ## Installation
29//!
30//! Add to your `Cargo.toml`:
31//!
32//! ```toml
33//! [dependencies]
34//! ghostflow = "0.1"
35//! ```
36//!
37//! With GPU support:
38//!
39//! ```toml
40//! [dependencies]
41//! ghostflow = { version = "0.1", features = ["cuda"] }
42//! ```
43//!
44//! ## Modules
45//!
46//! - [`core`] - Core tensor operations and data structures
47//! - [`nn`] - Neural network layers and building blocks
48//! - [`ml`] - Classical machine learning algorithms
49//! - [`autograd`] - Automatic differentiation
50//! - [`optim`] - Optimizers (SGD, Adam, AdamW)
51//! - [`data`] - Data loading and preprocessing utilities
52//! - [`cuda`] - GPU acceleration (optional)
53
54// Re-export core (always available)
55pub use ghostflow_core as core;
56pub use ghostflow_core::*;
57
58// Re-export optional modules
59#[cfg(feature = "nn")]
60pub use ghostflow_nn as nn;
61
62#[cfg(feature = "ml")]
63pub use ghostflow_ml as ml;
64
65#[cfg(feature = "autograd")]
66pub use ghostflow_autograd as autograd;
67
68#[cfg(feature = "optim")]
69pub use ghostflow_optim as optim;
70
71#[cfg(feature = "data")]
72pub use ghostflow_data as data;
73
74#[cfg(feature = "cuda")]
75pub use ghostflow_cuda as cuda;
76
77/// Prelude module for convenient imports
78///
79/// Import everything you need with:
80/// ```
81/// use ghostflow::prelude::*;
82/// ```
83pub mod prelude {
84 pub use crate::core::{Tensor, Shape, DType};
85
86 #[cfg(feature = "nn")]
87 pub use crate::nn::{Linear, Conv2d, ReLU, Sigmoid, Softmax};
88
89 #[cfg(feature = "autograd")]
90 pub use crate::autograd::*;
91
92 #[cfg(feature = "optim")]
93 pub use crate::optim::{SGD, Adam, AdamW};
94
95 #[cfg(feature = "data")]
96 pub use crate::data::{Dataset, DataLoader};
97}