pub struct QuantumRBM { /* private fields */ }Expand description
Quantum Restricted Boltzmann Machine
Implementations§
Source§impl QuantumRBM
impl QuantumRBM
Sourcepub fn new(
num_visible: usize,
num_hidden: usize,
temperature: f64,
learning_rate: f64,
) -> Result<Self>
pub fn new( num_visible: usize, num_hidden: usize, temperature: f64, learning_rate: f64, ) -> Result<Self>
Create a new Quantum RBM
Examples found in repository?
examples/quantum_boltzmann.rs (lines 88-93)
84fn rbm_demo() -> Result<()> {
85 // Create RBM with annealing
86 let annealing = AnnealingSchedule::new(2.0, 0.5, 100);
87
88 let mut rbm = QuantumRBM::new(
89 6, // visible units
90 3, // hidden units
91 2.0, // initial temperature
92 0.01, // learning rate
93 )?
94 .with_annealing(annealing);
95
96 println!(" Created Quantum RBM with annealing schedule");
97
98 // Generate correlated binary data
99 let data = generate_correlated_data(200, 6);
100
101 // Train with PCD
102 println!(" Training with Persistent Contrastive Divergence...");
103 let losses = rbm.train_pcd(
104 &data, 100, // epochs
105 20, // batch size
106 50, // persistent chains
107 )?;
108
109 // Analyze training
110 let improvement = (losses[0] - losses.last().unwrap()) / losses[0] * 100.0;
111 println!(" Training statistics:");
112 println!(" - Loss reduction: {improvement:.1}%");
113 println!(" - Final temperature: 0.5");
114
115 // Test reconstruction
116 let test_data = data.slice(s![0..5, ..]).to_owned();
117 let reconstructed = rbm.qbm().reconstruct(&test_data)?;
118
119 println!("\n Reconstruction quality:");
120 for i in 0..3 {
121 print!(" Original: [");
122 for val in test_data.row(i) {
123 print!("{val:.0} ");
124 }
125 print!("] → Reconstructed: [");
126 for val in reconstructed.row(i) {
127 print!("{val:.0} ");
128 }
129 println!("]");
130 }
131
132 Ok(())
133}
134
135/// Deep Boltzmann Machine demonstration
136fn deep_boltzmann_demo() -> Result<()> {
137 // Create a 3-layer DBM
138 let layer_sizes = vec![8, 4, 2];
139 let mut dbm = DeepBoltzmannMachine::new(
140 layer_sizes.clone(),
141 1.0, // temperature
142 0.01, // learning rate
143 )?;
144
145 println!(" Created Deep Boltzmann Machine:");
146 println!(" - Architecture: {layer_sizes:?}");
147 println!(" - Total layers: {}", dbm.rbms().len());
148
149 // Generate hierarchical data
150 let data = generate_hierarchical_data(300, 8);
151
152 // Layer-wise pretraining
153 println!("\n Performing layer-wise pretraining...");
154 dbm.pretrain(
155 &data, 50, // epochs per layer
156 30, // batch size
157 )?;
158
159 println!("\n Pretraining complete!");
160 println!(" Each layer learned increasingly abstract features");
161
162 Ok(())
163}
164
165/// Energy landscape visualization
166fn energy_landscape_demo() -> Result<()> {
167 // Create small QBM for visualization
168 let qbm = QuantumBoltzmannMachine::new(
169 2, // visible units (for 2D visualization)
170 1, // hidden unit
171 0.5, // temperature
172 0.01, // learning rate
173 )?;
174
175 println!(" Analyzing energy landscape of 2-unit system");
176
177 // Compute energy for all 4 possible states
178 let states = [
179 Array1::from_vec(vec![0.0, 0.0]),
180 Array1::from_vec(vec![0.0, 1.0]),
181 Array1::from_vec(vec![1.0, 0.0]),
182 Array1::from_vec(vec![1.0, 1.0]),
183 ];
184
185 println!("\n State energies:");
186 for (i, state) in states.iter().enumerate() {
187 let energy = qbm.energy(state);
188 let prob = (-energy / qbm.temperature()).exp();
189 println!(
190 " State [{:.0}, {:.0}]: E = {:.3}, P ∝ {:.3}",
191 state[0], state[1], energy, prob
192 );
193 }
194
195 // Show coupling matrix
196 println!("\n Coupling matrix:");
197 for i in 0..3 {
198 print!(" [");
199 for j in 0..3 {
200 print!("{:6.3} ", qbm.couplings()[[i, j]]);
201 }
202 println!("]");
203 }
204
205 Ok(())
206}
207
208/// Pattern completion demonstration
209fn pattern_completion_demo() -> Result<()> {
210 // Create RBM
211 let mut rbm = QuantumRBM::new(
212 8, // visible units
213 4, // hidden units
214 1.0, // temperature
215 0.02, // learning rate
216 )?;
217
218 // Train on specific patterns
219 let patterns = create_letter_patterns();
220 println!(" Training on letter-like patterns...");
221
222 rbm.train_pcd(&patterns, 100, 10, 20)?;
223
224 // Test pattern completion
225 println!("\n Pattern completion test:");
226
227 // Create corrupted patterns
228 let mut corrupted = patterns.row(0).to_owned();
229 corrupted[3] = 1.0 - corrupted[3]; // Flip one bit
230 corrupted[5] = 1.0 - corrupted[5]; // Flip another
231
232 print!(" Corrupted: [");
233 for val in &corrupted {
234 print!("{val:.0} ");
235 }
236 println!("]");
237
238 // Complete pattern
239 let completed = complete_pattern(&rbm, &corrupted)?;
240
241 print!(" Completed: [");
242 for val in &completed {
243 print!("{val:.0} ");
244 }
245 println!("]");
246
247 print!(" Original: [");
248 for val in patterns.row(0) {
249 print!("{val:.0} ");
250 }
251 println!("]");
252
253 let accuracy = patterns
254 .row(0)
255 .iter()
256 .zip(completed.iter())
257 .filter(|(&a, &b)| (a - b).abs() < 0.5)
258 .count() as f64
259 / 8.0;
260
261 println!(" Reconstruction accuracy: {:.1}%", accuracy * 100.0);
262
263 Ok(())
264}Sourcepub fn with_annealing(self, schedule: AnnealingSchedule) -> Self
pub fn with_annealing(self, schedule: AnnealingSchedule) -> Self
Enable quantum annealing
Examples found in repository?
examples/quantum_boltzmann.rs (line 94)
84fn rbm_demo() -> Result<()> {
85 // Create RBM with annealing
86 let annealing = AnnealingSchedule::new(2.0, 0.5, 100);
87
88 let mut rbm = QuantumRBM::new(
89 6, // visible units
90 3, // hidden units
91 2.0, // initial temperature
92 0.01, // learning rate
93 )?
94 .with_annealing(annealing);
95
96 println!(" Created Quantum RBM with annealing schedule");
97
98 // Generate correlated binary data
99 let data = generate_correlated_data(200, 6);
100
101 // Train with PCD
102 println!(" Training with Persistent Contrastive Divergence...");
103 let losses = rbm.train_pcd(
104 &data, 100, // epochs
105 20, // batch size
106 50, // persistent chains
107 )?;
108
109 // Analyze training
110 let improvement = (losses[0] - losses.last().unwrap()) / losses[0] * 100.0;
111 println!(" Training statistics:");
112 println!(" - Loss reduction: {improvement:.1}%");
113 println!(" - Final temperature: 0.5");
114
115 // Test reconstruction
116 let test_data = data.slice(s![0..5, ..]).to_owned();
117 let reconstructed = rbm.qbm().reconstruct(&test_data)?;
118
119 println!("\n Reconstruction quality:");
120 for i in 0..3 {
121 print!(" Original: [");
122 for val in test_data.row(i) {
123 print!("{val:.0} ");
124 }
125 print!("] → Reconstructed: [");
126 for val in reconstructed.row(i) {
127 print!("{val:.0} ");
128 }
129 println!("]");
130 }
131
132 Ok(())
133}Sourcepub fn create_rbm_circuit(&self) -> Result<()>
pub fn create_rbm_circuit(&self) -> Result<()>
Create circuit for RBM sampling
Sourcepub fn train_pcd(
&mut self,
data: &Array2<f64>,
epochs: usize,
batch_size: usize,
num_persistent: usize,
) -> Result<Vec<f64>>
pub fn train_pcd( &mut self, data: &Array2<f64>, epochs: usize, batch_size: usize, num_persistent: usize, ) -> Result<Vec<f64>>
Train using persistent contrastive divergence
Examples found in repository?
examples/quantum_boltzmann.rs (lines 103-107)
84fn rbm_demo() -> Result<()> {
85 // Create RBM with annealing
86 let annealing = AnnealingSchedule::new(2.0, 0.5, 100);
87
88 let mut rbm = QuantumRBM::new(
89 6, // visible units
90 3, // hidden units
91 2.0, // initial temperature
92 0.01, // learning rate
93 )?
94 .with_annealing(annealing);
95
96 println!(" Created Quantum RBM with annealing schedule");
97
98 // Generate correlated binary data
99 let data = generate_correlated_data(200, 6);
100
101 // Train with PCD
102 println!(" Training with Persistent Contrastive Divergence...");
103 let losses = rbm.train_pcd(
104 &data, 100, // epochs
105 20, // batch size
106 50, // persistent chains
107 )?;
108
109 // Analyze training
110 let improvement = (losses[0] - losses.last().unwrap()) / losses[0] * 100.0;
111 println!(" Training statistics:");
112 println!(" - Loss reduction: {improvement:.1}%");
113 println!(" - Final temperature: 0.5");
114
115 // Test reconstruction
116 let test_data = data.slice(s![0..5, ..]).to_owned();
117 let reconstructed = rbm.qbm().reconstruct(&test_data)?;
118
119 println!("\n Reconstruction quality:");
120 for i in 0..3 {
121 print!(" Original: [");
122 for val in test_data.row(i) {
123 print!("{val:.0} ");
124 }
125 print!("] → Reconstructed: [");
126 for val in reconstructed.row(i) {
127 print!("{val:.0} ");
128 }
129 println!("]");
130 }
131
132 Ok(())
133}
134
135/// Deep Boltzmann Machine demonstration
136fn deep_boltzmann_demo() -> Result<()> {
137 // Create a 3-layer DBM
138 let layer_sizes = vec![8, 4, 2];
139 let mut dbm = DeepBoltzmannMachine::new(
140 layer_sizes.clone(),
141 1.0, // temperature
142 0.01, // learning rate
143 )?;
144
145 println!(" Created Deep Boltzmann Machine:");
146 println!(" - Architecture: {layer_sizes:?}");
147 println!(" - Total layers: {}", dbm.rbms().len());
148
149 // Generate hierarchical data
150 let data = generate_hierarchical_data(300, 8);
151
152 // Layer-wise pretraining
153 println!("\n Performing layer-wise pretraining...");
154 dbm.pretrain(
155 &data, 50, // epochs per layer
156 30, // batch size
157 )?;
158
159 println!("\n Pretraining complete!");
160 println!(" Each layer learned increasingly abstract features");
161
162 Ok(())
163}
164
165/// Energy landscape visualization
166fn energy_landscape_demo() -> Result<()> {
167 // Create small QBM for visualization
168 let qbm = QuantumBoltzmannMachine::new(
169 2, // visible units (for 2D visualization)
170 1, // hidden unit
171 0.5, // temperature
172 0.01, // learning rate
173 )?;
174
175 println!(" Analyzing energy landscape of 2-unit system");
176
177 // Compute energy for all 4 possible states
178 let states = [
179 Array1::from_vec(vec![0.0, 0.0]),
180 Array1::from_vec(vec![0.0, 1.0]),
181 Array1::from_vec(vec![1.0, 0.0]),
182 Array1::from_vec(vec![1.0, 1.0]),
183 ];
184
185 println!("\n State energies:");
186 for (i, state) in states.iter().enumerate() {
187 let energy = qbm.energy(state);
188 let prob = (-energy / qbm.temperature()).exp();
189 println!(
190 " State [{:.0}, {:.0}]: E = {:.3}, P ∝ {:.3}",
191 state[0], state[1], energy, prob
192 );
193 }
194
195 // Show coupling matrix
196 println!("\n Coupling matrix:");
197 for i in 0..3 {
198 print!(" [");
199 for j in 0..3 {
200 print!("{:6.3} ", qbm.couplings()[[i, j]]);
201 }
202 println!("]");
203 }
204
205 Ok(())
206}
207
208/// Pattern completion demonstration
209fn pattern_completion_demo() -> Result<()> {
210 // Create RBM
211 let mut rbm = QuantumRBM::new(
212 8, // visible units
213 4, // hidden units
214 1.0, // temperature
215 0.02, // learning rate
216 )?;
217
218 // Train on specific patterns
219 let patterns = create_letter_patterns();
220 println!(" Training on letter-like patterns...");
221
222 rbm.train_pcd(&patterns, 100, 10, 20)?;
223
224 // Test pattern completion
225 println!("\n Pattern completion test:");
226
227 // Create corrupted patterns
228 let mut corrupted = patterns.row(0).to_owned();
229 corrupted[3] = 1.0 - corrupted[3]; // Flip one bit
230 corrupted[5] = 1.0 - corrupted[5]; // Flip another
231
232 print!(" Corrupted: [");
233 for val in &corrupted {
234 print!("{val:.0} ");
235 }
236 println!("]");
237
238 // Complete pattern
239 let completed = complete_pattern(&rbm, &corrupted)?;
240
241 print!(" Completed: [");
242 for val in &completed {
243 print!("{val:.0} ");
244 }
245 println!("]");
246
247 print!(" Original: [");
248 for val in patterns.row(0) {
249 print!("{val:.0} ");
250 }
251 println!("]");
252
253 let accuracy = patterns
254 .row(0)
255 .iter()
256 .zip(completed.iter())
257 .filter(|(&a, &b)| (a - b).abs() < 0.5)
258 .count() as f64
259 / 8.0;
260
261 println!(" Reconstruction accuracy: {:.1}%", accuracy * 100.0);
262
263 Ok(())
264}Sourcepub fn qbm(&self) -> &QuantumBoltzmannMachine
pub fn qbm(&self) -> &QuantumBoltzmannMachine
Get reference to the underlying QBM
Examples found in repository?
examples/quantum_boltzmann.rs (line 117)
84fn rbm_demo() -> Result<()> {
85 // Create RBM with annealing
86 let annealing = AnnealingSchedule::new(2.0, 0.5, 100);
87
88 let mut rbm = QuantumRBM::new(
89 6, // visible units
90 3, // hidden units
91 2.0, // initial temperature
92 0.01, // learning rate
93 )?
94 .with_annealing(annealing);
95
96 println!(" Created Quantum RBM with annealing schedule");
97
98 // Generate correlated binary data
99 let data = generate_correlated_data(200, 6);
100
101 // Train with PCD
102 println!(" Training with Persistent Contrastive Divergence...");
103 let losses = rbm.train_pcd(
104 &data, 100, // epochs
105 20, // batch size
106 50, // persistent chains
107 )?;
108
109 // Analyze training
110 let improvement = (losses[0] - losses.last().unwrap()) / losses[0] * 100.0;
111 println!(" Training statistics:");
112 println!(" - Loss reduction: {improvement:.1}%");
113 println!(" - Final temperature: 0.5");
114
115 // Test reconstruction
116 let test_data = data.slice(s![0..5, ..]).to_owned();
117 let reconstructed = rbm.qbm().reconstruct(&test_data)?;
118
119 println!("\n Reconstruction quality:");
120 for i in 0..3 {
121 print!(" Original: [");
122 for val in test_data.row(i) {
123 print!("{val:.0} ");
124 }
125 print!("] → Reconstructed: [");
126 for val in reconstructed.row(i) {
127 print!("{val:.0} ");
128 }
129 println!("]");
130 }
131
132 Ok(())
133}
134
135/// Deep Boltzmann Machine demonstration
136fn deep_boltzmann_demo() -> Result<()> {
137 // Create a 3-layer DBM
138 let layer_sizes = vec![8, 4, 2];
139 let mut dbm = DeepBoltzmannMachine::new(
140 layer_sizes.clone(),
141 1.0, // temperature
142 0.01, // learning rate
143 )?;
144
145 println!(" Created Deep Boltzmann Machine:");
146 println!(" - Architecture: {layer_sizes:?}");
147 println!(" - Total layers: {}", dbm.rbms().len());
148
149 // Generate hierarchical data
150 let data = generate_hierarchical_data(300, 8);
151
152 // Layer-wise pretraining
153 println!("\n Performing layer-wise pretraining...");
154 dbm.pretrain(
155 &data, 50, // epochs per layer
156 30, // batch size
157 )?;
158
159 println!("\n Pretraining complete!");
160 println!(" Each layer learned increasingly abstract features");
161
162 Ok(())
163}
164
165/// Energy landscape visualization
166fn energy_landscape_demo() -> Result<()> {
167 // Create small QBM for visualization
168 let qbm = QuantumBoltzmannMachine::new(
169 2, // visible units (for 2D visualization)
170 1, // hidden unit
171 0.5, // temperature
172 0.01, // learning rate
173 )?;
174
175 println!(" Analyzing energy landscape of 2-unit system");
176
177 // Compute energy for all 4 possible states
178 let states = [
179 Array1::from_vec(vec![0.0, 0.0]),
180 Array1::from_vec(vec![0.0, 1.0]),
181 Array1::from_vec(vec![1.0, 0.0]),
182 Array1::from_vec(vec![1.0, 1.0]),
183 ];
184
185 println!("\n State energies:");
186 for (i, state) in states.iter().enumerate() {
187 let energy = qbm.energy(state);
188 let prob = (-energy / qbm.temperature()).exp();
189 println!(
190 " State [{:.0}, {:.0}]: E = {:.3}, P ∝ {:.3}",
191 state[0], state[1], energy, prob
192 );
193 }
194
195 // Show coupling matrix
196 println!("\n Coupling matrix:");
197 for i in 0..3 {
198 print!(" [");
199 for j in 0..3 {
200 print!("{:6.3} ", qbm.couplings()[[i, j]]);
201 }
202 println!("]");
203 }
204
205 Ok(())
206}
207
208/// Pattern completion demonstration
209fn pattern_completion_demo() -> Result<()> {
210 // Create RBM
211 let mut rbm = QuantumRBM::new(
212 8, // visible units
213 4, // hidden units
214 1.0, // temperature
215 0.02, // learning rate
216 )?;
217
218 // Train on specific patterns
219 let patterns = create_letter_patterns();
220 println!(" Training on letter-like patterns...");
221
222 rbm.train_pcd(&patterns, 100, 10, 20)?;
223
224 // Test pattern completion
225 println!("\n Pattern completion test:");
226
227 // Create corrupted patterns
228 let mut corrupted = patterns.row(0).to_owned();
229 corrupted[3] = 1.0 - corrupted[3]; // Flip one bit
230 corrupted[5] = 1.0 - corrupted[5]; // Flip another
231
232 print!(" Corrupted: [");
233 for val in &corrupted {
234 print!("{val:.0} ");
235 }
236 println!("]");
237
238 // Complete pattern
239 let completed = complete_pattern(&rbm, &corrupted)?;
240
241 print!(" Completed: [");
242 for val in &completed {
243 print!("{val:.0} ");
244 }
245 println!("]");
246
247 print!(" Original: [");
248 for val in patterns.row(0) {
249 print!("{val:.0} ");
250 }
251 println!("]");
252
253 let accuracy = patterns
254 .row(0)
255 .iter()
256 .zip(completed.iter())
257 .filter(|(&a, &b)| (a - b).abs() < 0.5)
258 .count() as f64
259 / 8.0;
260
261 println!(" Reconstruction accuracy: {:.1}%", accuracy * 100.0);
262
263 Ok(())
264}
265
266/// Generate binary patterns
267fn generate_binary_patterns(n_samples: usize, n_features: usize) -> Array2<f64> {
268 Array2::from_shape_fn((n_samples, n_features), |(_, _)| {
269 if thread_rng().gen::<f64>() > 0.5 {
270 1.0
271 } else {
272 0.0
273 }
274 })
275}
276
277/// Generate correlated binary data
278fn generate_correlated_data(n_samples: usize, n_features: usize) -> Array2<f64> {
279 let mut data = Array2::zeros((n_samples, n_features));
280
281 for i in 0..n_samples {
282 // Generate correlated features
283 let base = if thread_rng().gen::<f64>() > 0.5 {
284 1.0
285 } else {
286 0.0
287 };
288
289 for j in 0..n_features {
290 if j % 2 == 0 {
291 data[[i, j]] = base;
292 } else {
293 // Correlate with previous feature
294 data[[i, j]] = if thread_rng().gen::<f64>() > 0.2 {
295 base
296 } else {
297 1.0 - base
298 };
299 }
300 }
301 }
302
303 data
304}
305
306/// Generate hierarchical data
307fn generate_hierarchical_data(n_samples: usize, n_features: usize) -> Array2<f64> {
308 let mut data = Array2::zeros((n_samples, n_features));
309
310 for i in 0..n_samples {
311 // Choose high-level pattern
312 let pattern_type = i % 3;
313
314 match pattern_type {
315 0 => {
316 // Pattern A: alternating
317 for j in 0..n_features {
318 data[[i, j]] = (j % 2) as f64;
319 }
320 }
321 1 => {
322 // Pattern B: blocks
323 for j in 0..n_features {
324 data[[i, j]] = ((j / 2) % 2) as f64;
325 }
326 }
327 _ => {
328 // Pattern C: random with structure
329 let shift = (thread_rng().gen::<f64>() * 4.0) as usize;
330 for j in 0..n_features {
331 data[[i, j]] = if (j + shift) % 3 == 0 { 1.0 } else { 0.0 };
332 }
333 }
334 }
335
336 // Add noise
337 for j in 0..n_features {
338 if thread_rng().gen::<f64>() < 0.1 {
339 data[[i, j]] = 1.0 - data[[i, j]];
340 }
341 }
342 }
343
344 data
345}
346
347/// Create letter-like patterns
348fn create_letter_patterns() -> Array2<f64> {
349 // Simple 8-bit patterns resembling letters
350 Array2::from_shape_vec(
351 (4, 8),
352 vec![
353 // Pattern 'L'
354 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, // Pattern 'T'
355 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, // Pattern 'I'
356 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, // Pattern 'H'
357 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0,
358 ],
359 )
360 .unwrap()
361}
362
363/// Complete a partial pattern
364fn complete_pattern(rbm: &QuantumRBM, partial: &Array1<f64>) -> Result<Array1<f64>> {
365 // Use Gibbs sampling to complete pattern
366 let mut current = partial.clone();
367
368 for _ in 0..10 {
369 let hidden = rbm.qbm().sample_hidden_given_visible(¤t.view())?;
370 current = rbm.qbm().sample_visible_given_hidden(&hidden)?;
371 }
372
373 Ok(current)
374}Auto Trait Implementations§
impl Freeze for QuantumRBM
impl RefUnwindSafe for QuantumRBM
impl Send for QuantumRBM
impl Sync for QuantumRBM
impl Unpin for QuantumRBM
impl UnwindSafe for QuantumRBM
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.