peft_rs/lib.rs
1//! # peft-rs
2//!
3//! Comprehensive PEFT (Parameter-Efficient Fine-Tuning) adapter library for Rust.
4//!
5//! This crate provides modular implementations of various PEFT methods:
6//! - **`LoRA`** (Low-Rank Adaptation)
7//! - **`DoRA`** (Weight-Decomposed Low-Rank Adaptation)
8//! - **`AdaLoRA`** (Adaptive Low-Rank Adaptation)
9//! - **IA³** (Infused Adapter by Inhibiting and Amplifying Inner Activations)
10//! - **`LoHa`** (Low-Rank Hadamard Product)
11//! - **`LoKr`** (Low-Rank Kronecker Product)
12//! - **OFT** (Orthogonal Fine-Tuning)
13//! - **BOFT** (Butterfly Orthogonal Fine-Tuning)
14//! - **`VeRA`** (Vector-based Random Matrix Adaptation)
15//! - **Prefix Tuning**
16//! - **Prompt Tuning**
17//!
18//! ## Quick Start
19//!
20//! ```rust,ignore
21//! use peft_rs::{LoraConfig, LoraLayer};
22//! use candle_core::{Device, Tensor};
23//!
24//! // Create a LoRA layer
25//! let config = LoraConfig {
26//! r: 8,
27//! alpha: 16,
28//! dropout: 0.0,
29//! };
30//! let layer = LoraLayer::new(768, 768, config, &Device::Cpu)?;
31//!
32//! // Apply to input
33//! let input = Tensor::zeros(&[1, 10, 768], candle_core::DType::F32, &Device::Cpu)?;
34//! let output = layer.forward(&input)?;
35//! ```
36//!
37//! ## Architecture
38//!
39//! All adapters implement the [`Adapter`] trait, which provides a common interface
40//! for forward passes and weight merging.
41
42#![warn(missing_docs)]
43#![warn(clippy::pedantic)]
44
45pub mod adapters;
46pub mod config;
47pub mod error;
48pub mod io;
49pub mod model;
50pub mod registry;
51pub mod training;
52pub mod traits;
53
54pub use adapters::adalora::{AdaLoraConfig, AdaLoraLayer};
55pub use adapters::boft::{BoftConfig, BoftLayer};
56pub use adapters::ia3::{Ia3Config, Ia3Layer};
57pub use adapters::loha::{LoHaConfig, LoHaLayer};
58pub use adapters::lokr::{LoKrConfig, LoKrLayer};
59pub use adapters::lora::{DoraLayer, LoraConfig, LoraLayer};
60pub use adapters::oft::{OftConfig, OftLayer};
61pub use adapters::prefix_tuning::{PrefixTuningConfig, PrefixTuningLayer};
62pub use adapters::prompt_tuning::{PromptTuningConfig, PromptTuningLayer};
63pub use adapters::vera::{VeraConfig, VeraLayer};
64pub use error::{PeftError, Result};
65pub use io::{
66 load_adapter_config, load_adapter_weights, load_pretrained, save_adapter_config,
67 save_adapter_weights, save_pretrained, SaveLoad, ADAPTER_CONFIG_FILENAME,
68 ADAPTER_WEIGHTS_FILENAME,
69};
70pub use model::{get_peft_model, ModulePattern, PeftModel};
71pub use registry::AdapterRegistry;
72pub use training::{
73 count_trainable_parameters, format_parameter_count, AdapterTrainingConfig,
74 AdapterTrainingState, LrSchedule,
75};
76pub use traits::{Adapter, AdapterConfig, Mergeable, Trainable};