quantum_diffusion/
quantum_diffusion.rs

1//! Quantum Diffusion Model Example
2//!
3//! This example demonstrates quantum diffusion models for generative modeling,
4//! including DDPM-style models and score-based diffusion.
5
6use ndarray::{s, Array1, Array2};
7use quantrs2_ml::autodiff::optimizers::Adam;
8use quantrs2_ml::prelude::*;
9
10fn main() -> Result<()> {
11    println!("=== Quantum Diffusion Model Demo ===\n");
12
13    // Step 1: Demonstrate noise schedules
14    println!("1. Comparing Noise Schedules...");
15    compare_noise_schedules()?;
16
17    // Step 2: Train quantum diffusion model on simple data
18    println!("\n2. Training Quantum Diffusion Model...");
19    train_diffusion_model()?;
20
21    // Step 3: Generate samples
22    println!("\n3. Generating New Samples...");
23    generate_samples()?;
24
25    // Step 4: Score-based diffusion
26    println!("\n4. Score-Based Diffusion Demo...");
27    score_diffusion_demo()?;
28
29    // Step 5: Demonstrate diffusion process
30    println!("\n5. Visualizing Diffusion Process...");
31    visualize_diffusion_process()?;
32
33    println!("\n=== Diffusion Model Demo Complete ===");
34
35    Ok(())
36}
37
38/// Compare different noise schedules
39fn compare_noise_schedules() -> Result<()> {
40    let num_timesteps = 100;
41
42    let schedules = vec![
43        (
44            "Linear",
45            NoiseSchedule::Linear {
46                beta_start: 0.0001,
47                beta_end: 0.02,
48            },
49        ),
50        ("Cosine", NoiseSchedule::Cosine { s: 0.008 }),
51        (
52            "Quadratic",
53            NoiseSchedule::Quadratic {
54                beta_start: 0.0001,
55                beta_end: 0.02,
56            },
57        ),
58        (
59            "Sigmoid",
60            NoiseSchedule::Sigmoid {
61                beta_start: 0.0001,
62                beta_end: 0.02,
63            },
64        ),
65    ];
66
67    println!("   Noise levels at different timesteps:");
68    println!("   Time     Linear   Cosine   Quadratic  Sigmoid");
69
70    for t in (0..=100).step_by(20) {
71        let t_idx = (t * (num_timesteps - 1) / 100).min(num_timesteps - 1);
72        print!("   t={:3}%: ", t);
73
74        for (_, schedule) in &schedules {
75            let model = QuantumDiffusionModel::new(2, 4, num_timesteps, *schedule)?;
76            print!("{:8.4} ", model.betas()[t_idx]);
77        }
78        println!();
79    }
80
81    Ok(())
82}
83
84/// Train a quantum diffusion model
85fn train_diffusion_model() -> Result<()> {
86    // Generate synthetic 2D data (two moons)
87    let num_samples = 200;
88    let data = generate_two_moons(num_samples);
89
90    println!("   Generated {} samples of 2D two-moons data", num_samples);
91
92    // Create diffusion model
93    let mut model = QuantumDiffusionModel::new(
94        2,  // data dimension
95        4,  // num qubits
96        50, // timesteps
97        NoiseSchedule::Cosine { s: 0.008 },
98    )?;
99
100    println!("   Created quantum diffusion model:");
101    println!("   - Data dimension: 2");
102    println!("   - Qubits: 4");
103    println!("   - Timesteps: 50");
104    println!("   - Schedule: Cosine");
105
106    // Train model
107    let mut optimizer = Adam::new(0.001);
108    let epochs = 100;
109    let batch_size = 32;
110
111    println!("\n   Training for {} epochs...", epochs);
112    let losses = model.train(&data, &mut optimizer, epochs, batch_size)?;
113
114    // Print training statistics
115    println!("\n   Training Statistics:");
116    println!("   - Initial loss: {:.4}", losses[0]);
117    println!("   - Final loss: {:.4}", losses.last().unwrap());
118    println!(
119        "   - Improvement: {:.2}%",
120        (1.0 - losses.last().unwrap() / losses[0]) * 100.0
121    );
122
123    Ok(())
124}
125
126/// Generate samples from trained model
127fn generate_samples() -> Result<()> {
128    // Create a simple trained model
129    let model = QuantumDiffusionModel::new(
130        2,  // data dimension
131        4,  // num qubits
132        50, // timesteps
133        NoiseSchedule::Linear {
134            beta_start: 0.0001,
135            beta_end: 0.02,
136        },
137    )?;
138
139    // Generate samples
140    let num_samples = 10;
141    println!("   Generating {} samples...", num_samples);
142
143    let samples = model.generate(num_samples)?;
144
145    println!("\n   Generated samples:");
146    for i in 0..num_samples.min(5) {
147        println!(
148            "   Sample {}: [{:.3}, {:.3}]",
149            i + 1,
150            samples[[i, 0]],
151            samples[[i, 1]]
152        );
153    }
154
155    // Compute statistics
156    let mean = samples.mean_axis(ndarray::Axis(0)).unwrap();
157    let std = samples.std_axis(ndarray::Axis(0), 0.0);
158
159    println!("\n   Sample statistics:");
160    println!("   - Mean: [{:.3}, {:.3}]", mean[0], mean[1]);
161    println!("   - Std:  [{:.3}, {:.3}]", std[0], std[1]);
162
163    Ok(())
164}
165
166/// Score-based diffusion demonstration
167fn score_diffusion_demo() -> Result<()> {
168    // Create score-based model
169    let model = QuantumScoreDiffusion::new(
170        2,  // data dimension
171        4,  // num qubits
172        10, // noise levels
173    )?;
174
175    println!("   Created quantum score-based diffusion model");
176    println!("   - Noise levels: {:?}", model.noise_levels());
177
178    // Test score estimation
179    let x = Array1::from_vec(vec![0.5, -0.3]);
180    let noise_level = 0.1;
181
182    let score = model.estimate_score(&x, noise_level)?;
183    println!("\n   Score estimation:");
184    println!("   - Input: [{:.3}, {:.3}]", x[0], x[1]);
185    println!("   - Noise level: {:.3}", noise_level);
186    println!("   - Estimated score: [{:.3}, {:.3}]", score[0], score[1]);
187
188    // Langevin sampling
189    println!("\n   Langevin sampling:");
190    let init = Array1::from_vec(vec![2.0, 2.0]);
191    let num_steps = 100;
192    let step_size = 0.01;
193
194    let sample = model.langevin_sample(init.clone(), noise_level, num_steps, step_size)?;
195
196    println!("   - Initial: [{:.3}, {:.3}]", init[0], init[1]);
197    println!(
198        "   - After {} steps: [{:.3}, {:.3}]",
199        num_steps, sample[0], sample[1]
200    );
201    println!(
202        "   - Distance moved: {:.3}",
203        ((sample[0] - init[0]).powi(2) + (sample[1] - init[1]).powi(2)).sqrt()
204    );
205
206    Ok(())
207}
208
209/// Visualize the diffusion process
210fn visualize_diffusion_process() -> Result<()> {
211    let model = QuantumDiffusionModel::new(
212        2,  // data dimension
213        4,  // num qubits
214        20, // fewer timesteps for visualization
215        NoiseSchedule::Linear {
216            beta_start: 0.0001,
217            beta_end: 0.02,
218        },
219    )?;
220
221    // Start with a clear data point
222    let x0 = Array1::from_vec(vec![1.0, 0.5]);
223
224    println!("   Forward diffusion process:");
225    println!("   t=0 (original): [{:.3}, {:.3}]", x0[0], x0[1]);
226
227    // Show forward diffusion at different timesteps
228    for t in [5, 10, 15, 19] {
229        let (xt, _) = model.forward_diffusion(&x0, t)?;
230        let noise_level = (1.0 - model.alphas_cumprod()[t]).sqrt();
231        println!(
232            "   t={:2} (noise={:.3}): [{:.3}, {:.3}]",
233            t, noise_level, xt[0], xt[1]
234        );
235    }
236
237    println!("\n   Reverse diffusion process:");
238
239    // Start from noise
240    let mut xt = Array1::from_vec(vec![
241        2.0 * rand::random::<f64>() - 1.0,
242        2.0 * rand::random::<f64>() - 1.0,
243    ]);
244
245    println!("   t=19 (pure noise): [{:.3}, {:.3}]", xt[0], xt[1]);
246
247    // Show reverse diffusion
248    for t in [15, 10, 5, 0] {
249        xt = model.reverse_diffusion_step(&xt, t)?;
250        println!("   t={:2} (denoised): [{:.3}, {:.3}]", t, xt[0], xt[1]);
251    }
252
253    println!("\n   This demonstrates how diffusion models:");
254    println!("   1. Gradually add noise to data (forward process)");
255    println!("   2. Learn to reverse this process (backward process)");
256    println!("   3. Generate new samples by denoising random noise");
257
258    Ok(())
259}
260
261/// Generate two-moons dataset
262fn generate_two_moons(n_samples: usize) -> Array2<f64> {
263    let mut data = Array2::zeros((n_samples, 2));
264    let n_samples_per_moon = n_samples / 2;
265
266    // First moon
267    for i in 0..n_samples_per_moon {
268        let angle = std::f64::consts::PI * i as f64 / n_samples_per_moon as f64;
269        data[[i, 0]] = angle.cos() + 0.1 * (2.0 * rand::random::<f64>() - 1.0);
270        data[[i, 1]] = angle.sin() + 0.1 * (2.0 * rand::random::<f64>() - 1.0);
271    }
272
273    // Second moon (shifted and flipped)
274    for i in 0..n_samples_per_moon {
275        let idx = n_samples_per_moon + i;
276        let angle = std::f64::consts::PI * i as f64 / n_samples_per_moon as f64;
277        data[[idx, 0]] = 1.0 - angle.cos() + 0.1 * (2.0 * rand::random::<f64>() - 1.0);
278        data[[idx, 1]] = 0.5 - angle.sin() + 0.1 * (2.0 * rand::random::<f64>() - 1.0);
279    }
280
281    data
282}
283
284/// Advanced diffusion techniques demonstration
285fn advanced_diffusion_demo() -> Result<()> {
286    println!("\n6. Advanced Diffusion Techniques:");
287
288    // Conditional generation
289    println!("\n   a) Conditional Generation:");
290    let model = QuantumDiffusionModel::new(4, 4, 50, NoiseSchedule::Cosine { s: 0.008 })?;
291    let condition = Array1::from_vec(vec![0.5, -0.5]);
292    let conditional_samples = model.conditional_generate(&condition, 5)?;
293
294    println!(
295        "   Generated {} conditional samples",
296        conditional_samples.nrows()
297    );
298    println!("   Condition: [{:.3}, {:.3}]", condition[0], condition[1]);
299
300    // Variational diffusion
301    println!("\n   b) Variational Diffusion Model:");
302    let vdm = QuantumVariationalDiffusion::new(
303        4, // data_dim
304        2, // latent_dim
305        4, // num_qubits
306    )?;
307
308    let x = Array1::from_vec(vec![0.1, 0.2, 0.3, 0.4]);
309    let (mean, log_var) = vdm.encode(&x)?;
310
311    println!("   Encoded data to latent space:");
312    println!("   - Input: {:?}", x.as_slice().unwrap());
313    println!("   - Latent mean: [{:.3}, {:.3}]", mean[0], mean[1]);
314    println!(
315        "   - Latent log_var: [{:.3}, {:.3}]",
316        log_var[0], log_var[1]
317    );
318
319    Ok(())
320}