pub struct QuantumRBM { /* private fields */ }Expand description
Quantum Restricted Boltzmann Machine
Implementations§
Source§impl QuantumRBM
impl QuantumRBM
Sourcepub fn new(
num_visible: usize,
num_hidden: usize,
temperature: f64,
learning_rate: f64,
) -> Result<Self>
pub fn new( num_visible: usize, num_hidden: usize, temperature: f64, learning_rate: f64, ) -> Result<Self>
Create a new Quantum RBM
Examples found in repository?
examples/quantum_boltzmann.rs (lines 81-86)
77fn rbm_demo() -> Result<()> {
78 // Create RBM with annealing
79 let annealing = AnnealingSchedule::new(2.0, 0.5, 100);
80
81 let mut rbm = QuantumRBM::new(
82 6, // visible units
83 3, // hidden units
84 2.0, // initial temperature
85 0.01, // learning rate
86 )?
87 .with_annealing(annealing);
88
89 println!(" Created Quantum RBM with annealing schedule");
90
91 // Generate correlated binary data
92 let data = generate_correlated_data(200, 6);
93
94 // Train with PCD
95 println!(" Training with Persistent Contrastive Divergence...");
96 let losses = rbm.train_pcd(
97 &data, 100, // epochs
98 20, // batch size
99 50, // persistent chains
100 )?;
101
102 // Analyze training
103 let improvement = (losses[0] - losses.last().unwrap()) / losses[0] * 100.0;
104 println!(" Training statistics:");
105 println!(" - Loss reduction: {improvement:.1}%");
106 println!(" - Final temperature: 0.5");
107
108 // Test reconstruction
109 let test_data = data.slice(s![0..5, ..]).to_owned();
110 let reconstructed = rbm.qbm().reconstruct(&test_data)?;
111
112 println!("\n Reconstruction quality:");
113 for i in 0..3 {
114 print!(" Original: [");
115 for val in test_data.row(i) {
116 print!("{val:.0} ");
117 }
118 print!("] → Reconstructed: [");
119 for val in reconstructed.row(i) {
120 print!("{val:.0} ");
121 }
122 println!("]");
123 }
124
125 Ok(())
126}
127
128/// Deep Boltzmann Machine demonstration
129fn deep_boltzmann_demo() -> Result<()> {
130 // Create a 3-layer DBM
131 let layer_sizes = vec![8, 4, 2];
132 let mut dbm = DeepBoltzmannMachine::new(
133 layer_sizes.clone(),
134 1.0, // temperature
135 0.01, // learning rate
136 )?;
137
138 println!(" Created Deep Boltzmann Machine:");
139 println!(" - Architecture: {layer_sizes:?}");
140 println!(" - Total layers: {}", dbm.rbms().len());
141
142 // Generate hierarchical data
143 let data = generate_hierarchical_data(300, 8);
144
145 // Layer-wise pretraining
146 println!("\n Performing layer-wise pretraining...");
147 dbm.pretrain(
148 &data, 50, // epochs per layer
149 30, // batch size
150 )?;
151
152 println!("\n Pretraining complete!");
153 println!(" Each layer learned increasingly abstract features");
154
155 Ok(())
156}
157
158/// Energy landscape visualization
159fn energy_landscape_demo() -> Result<()> {
160 // Create small QBM for visualization
161 let qbm = QuantumBoltzmannMachine::new(
162 2, // visible units (for 2D visualization)
163 1, // hidden unit
164 0.5, // temperature
165 0.01, // learning rate
166 )?;
167
168 println!(" Analyzing energy landscape of 2-unit system");
169
170 // Compute energy for all 4 possible states
171 let states = [
172 Array1::from_vec(vec![0.0, 0.0]),
173 Array1::from_vec(vec![0.0, 1.0]),
174 Array1::from_vec(vec![1.0, 0.0]),
175 Array1::from_vec(vec![1.0, 1.0]),
176 ];
177
178 println!("\n State energies:");
179 for (i, state) in states.iter().enumerate() {
180 let energy = qbm.energy(state);
181 let prob = (-energy / qbm.temperature()).exp();
182 println!(
183 " State [{:.0}, {:.0}]: E = {:.3}, P ∝ {:.3}",
184 state[0], state[1], energy, prob
185 );
186 }
187
188 // Show coupling matrix
189 println!("\n Coupling matrix:");
190 for i in 0..3 {
191 print!(" [");
192 for j in 0..3 {
193 print!("{:6.3} ", qbm.couplings()[[i, j]]);
194 }
195 println!("]");
196 }
197
198 Ok(())
199}
200
201/// Pattern completion demonstration
202fn pattern_completion_demo() -> Result<()> {
203 // Create RBM
204 let mut rbm = QuantumRBM::new(
205 8, // visible units
206 4, // hidden units
207 1.0, // temperature
208 0.02, // learning rate
209 )?;
210
211 // Train on specific patterns
212 let patterns = create_letter_patterns();
213 println!(" Training on letter-like patterns...");
214
215 rbm.train_pcd(&patterns, 100, 10, 20)?;
216
217 // Test pattern completion
218 println!("\n Pattern completion test:");
219
220 // Create corrupted patterns
221 let mut corrupted = patterns.row(0).to_owned();
222 corrupted[3] = 1.0 - corrupted[3]; // Flip one bit
223 corrupted[5] = 1.0 - corrupted[5]; // Flip another
224
225 print!(" Corrupted: [");
226 for val in &corrupted {
227 print!("{val:.0} ");
228 }
229 println!("]");
230
231 // Complete pattern
232 let completed = complete_pattern(&rbm, &corrupted)?;
233
234 print!(" Completed: [");
235 for val in &completed {
236 print!("{val:.0} ");
237 }
238 println!("]");
239
240 print!(" Original: [");
241 for val in patterns.row(0) {
242 print!("{val:.0} ");
243 }
244 println!("]");
245
246 let accuracy = patterns
247 .row(0)
248 .iter()
249 .zip(completed.iter())
250 .filter(|(&a, &b)| (a - b).abs() < 0.5)
251 .count() as f64
252 / 8.0;
253
254 println!(" Reconstruction accuracy: {:.1}%", accuracy * 100.0);
255
256 Ok(())
257}Sourcepub fn with_annealing(self, schedule: AnnealingSchedule) -> Self
pub fn with_annealing(self, schedule: AnnealingSchedule) -> Self
Enable quantum annealing
Examples found in repository?
examples/quantum_boltzmann.rs (line 87)
77fn rbm_demo() -> Result<()> {
78 // Create RBM with annealing
79 let annealing = AnnealingSchedule::new(2.0, 0.5, 100);
80
81 let mut rbm = QuantumRBM::new(
82 6, // visible units
83 3, // hidden units
84 2.0, // initial temperature
85 0.01, // learning rate
86 )?
87 .with_annealing(annealing);
88
89 println!(" Created Quantum RBM with annealing schedule");
90
91 // Generate correlated binary data
92 let data = generate_correlated_data(200, 6);
93
94 // Train with PCD
95 println!(" Training with Persistent Contrastive Divergence...");
96 let losses = rbm.train_pcd(
97 &data, 100, // epochs
98 20, // batch size
99 50, // persistent chains
100 )?;
101
102 // Analyze training
103 let improvement = (losses[0] - losses.last().unwrap()) / losses[0] * 100.0;
104 println!(" Training statistics:");
105 println!(" - Loss reduction: {improvement:.1}%");
106 println!(" - Final temperature: 0.5");
107
108 // Test reconstruction
109 let test_data = data.slice(s![0..5, ..]).to_owned();
110 let reconstructed = rbm.qbm().reconstruct(&test_data)?;
111
112 println!("\n Reconstruction quality:");
113 for i in 0..3 {
114 print!(" Original: [");
115 for val in test_data.row(i) {
116 print!("{val:.0} ");
117 }
118 print!("] → Reconstructed: [");
119 for val in reconstructed.row(i) {
120 print!("{val:.0} ");
121 }
122 println!("]");
123 }
124
125 Ok(())
126}Sourcepub fn create_rbm_circuit(&self) -> Result<()>
pub fn create_rbm_circuit(&self) -> Result<()>
Create circuit for RBM sampling
Sourcepub fn train_pcd(
&mut self,
data: &Array2<f64>,
epochs: usize,
batch_size: usize,
num_persistent: usize,
) -> Result<Vec<f64>>
pub fn train_pcd( &mut self, data: &Array2<f64>, epochs: usize, batch_size: usize, num_persistent: usize, ) -> Result<Vec<f64>>
Train using persistent contrastive divergence
Examples found in repository?
examples/quantum_boltzmann.rs (lines 96-100)
77fn rbm_demo() -> Result<()> {
78 // Create RBM with annealing
79 let annealing = AnnealingSchedule::new(2.0, 0.5, 100);
80
81 let mut rbm = QuantumRBM::new(
82 6, // visible units
83 3, // hidden units
84 2.0, // initial temperature
85 0.01, // learning rate
86 )?
87 .with_annealing(annealing);
88
89 println!(" Created Quantum RBM with annealing schedule");
90
91 // Generate correlated binary data
92 let data = generate_correlated_data(200, 6);
93
94 // Train with PCD
95 println!(" Training with Persistent Contrastive Divergence...");
96 let losses = rbm.train_pcd(
97 &data, 100, // epochs
98 20, // batch size
99 50, // persistent chains
100 )?;
101
102 // Analyze training
103 let improvement = (losses[0] - losses.last().unwrap()) / losses[0] * 100.0;
104 println!(" Training statistics:");
105 println!(" - Loss reduction: {improvement:.1}%");
106 println!(" - Final temperature: 0.5");
107
108 // Test reconstruction
109 let test_data = data.slice(s![0..5, ..]).to_owned();
110 let reconstructed = rbm.qbm().reconstruct(&test_data)?;
111
112 println!("\n Reconstruction quality:");
113 for i in 0..3 {
114 print!(" Original: [");
115 for val in test_data.row(i) {
116 print!("{val:.0} ");
117 }
118 print!("] → Reconstructed: [");
119 for val in reconstructed.row(i) {
120 print!("{val:.0} ");
121 }
122 println!("]");
123 }
124
125 Ok(())
126}
127
128/// Deep Boltzmann Machine demonstration
129fn deep_boltzmann_demo() -> Result<()> {
130 // Create a 3-layer DBM
131 let layer_sizes = vec![8, 4, 2];
132 let mut dbm = DeepBoltzmannMachine::new(
133 layer_sizes.clone(),
134 1.0, // temperature
135 0.01, // learning rate
136 )?;
137
138 println!(" Created Deep Boltzmann Machine:");
139 println!(" - Architecture: {layer_sizes:?}");
140 println!(" - Total layers: {}", dbm.rbms().len());
141
142 // Generate hierarchical data
143 let data = generate_hierarchical_data(300, 8);
144
145 // Layer-wise pretraining
146 println!("\n Performing layer-wise pretraining...");
147 dbm.pretrain(
148 &data, 50, // epochs per layer
149 30, // batch size
150 )?;
151
152 println!("\n Pretraining complete!");
153 println!(" Each layer learned increasingly abstract features");
154
155 Ok(())
156}
157
158/// Energy landscape visualization
159fn energy_landscape_demo() -> Result<()> {
160 // Create small QBM for visualization
161 let qbm = QuantumBoltzmannMachine::new(
162 2, // visible units (for 2D visualization)
163 1, // hidden unit
164 0.5, // temperature
165 0.01, // learning rate
166 )?;
167
168 println!(" Analyzing energy landscape of 2-unit system");
169
170 // Compute energy for all 4 possible states
171 let states = [
172 Array1::from_vec(vec![0.0, 0.0]),
173 Array1::from_vec(vec![0.0, 1.0]),
174 Array1::from_vec(vec![1.0, 0.0]),
175 Array1::from_vec(vec![1.0, 1.0]),
176 ];
177
178 println!("\n State energies:");
179 for (i, state) in states.iter().enumerate() {
180 let energy = qbm.energy(state);
181 let prob = (-energy / qbm.temperature()).exp();
182 println!(
183 " State [{:.0}, {:.0}]: E = {:.3}, P ∝ {:.3}",
184 state[0], state[1], energy, prob
185 );
186 }
187
188 // Show coupling matrix
189 println!("\n Coupling matrix:");
190 for i in 0..3 {
191 print!(" [");
192 for j in 0..3 {
193 print!("{:6.3} ", qbm.couplings()[[i, j]]);
194 }
195 println!("]");
196 }
197
198 Ok(())
199}
200
201/// Pattern completion demonstration
202fn pattern_completion_demo() -> Result<()> {
203 // Create RBM
204 let mut rbm = QuantumRBM::new(
205 8, // visible units
206 4, // hidden units
207 1.0, // temperature
208 0.02, // learning rate
209 )?;
210
211 // Train on specific patterns
212 let patterns = create_letter_patterns();
213 println!(" Training on letter-like patterns...");
214
215 rbm.train_pcd(&patterns, 100, 10, 20)?;
216
217 // Test pattern completion
218 println!("\n Pattern completion test:");
219
220 // Create corrupted patterns
221 let mut corrupted = patterns.row(0).to_owned();
222 corrupted[3] = 1.0 - corrupted[3]; // Flip one bit
223 corrupted[5] = 1.0 - corrupted[5]; // Flip another
224
225 print!(" Corrupted: [");
226 for val in &corrupted {
227 print!("{val:.0} ");
228 }
229 println!("]");
230
231 // Complete pattern
232 let completed = complete_pattern(&rbm, &corrupted)?;
233
234 print!(" Completed: [");
235 for val in &completed {
236 print!("{val:.0} ");
237 }
238 println!("]");
239
240 print!(" Original: [");
241 for val in patterns.row(0) {
242 print!("{val:.0} ");
243 }
244 println!("]");
245
246 let accuracy = patterns
247 .row(0)
248 .iter()
249 .zip(completed.iter())
250 .filter(|(&a, &b)| (a - b).abs() < 0.5)
251 .count() as f64
252 / 8.0;
253
254 println!(" Reconstruction accuracy: {:.1}%", accuracy * 100.0);
255
256 Ok(())
257}Sourcepub fn qbm(&self) -> &QuantumBoltzmannMachine
pub fn qbm(&self) -> &QuantumBoltzmannMachine
Get reference to the underlying QBM
Examples found in repository?
examples/quantum_boltzmann.rs (line 110)
77fn rbm_demo() -> Result<()> {
78 // Create RBM with annealing
79 let annealing = AnnealingSchedule::new(2.0, 0.5, 100);
80
81 let mut rbm = QuantumRBM::new(
82 6, // visible units
83 3, // hidden units
84 2.0, // initial temperature
85 0.01, // learning rate
86 )?
87 .with_annealing(annealing);
88
89 println!(" Created Quantum RBM with annealing schedule");
90
91 // Generate correlated binary data
92 let data = generate_correlated_data(200, 6);
93
94 // Train with PCD
95 println!(" Training with Persistent Contrastive Divergence...");
96 let losses = rbm.train_pcd(
97 &data, 100, // epochs
98 20, // batch size
99 50, // persistent chains
100 )?;
101
102 // Analyze training
103 let improvement = (losses[0] - losses.last().unwrap()) / losses[0] * 100.0;
104 println!(" Training statistics:");
105 println!(" - Loss reduction: {improvement:.1}%");
106 println!(" - Final temperature: 0.5");
107
108 // Test reconstruction
109 let test_data = data.slice(s![0..5, ..]).to_owned();
110 let reconstructed = rbm.qbm().reconstruct(&test_data)?;
111
112 println!("\n Reconstruction quality:");
113 for i in 0..3 {
114 print!(" Original: [");
115 for val in test_data.row(i) {
116 print!("{val:.0} ");
117 }
118 print!("] → Reconstructed: [");
119 for val in reconstructed.row(i) {
120 print!("{val:.0} ");
121 }
122 println!("]");
123 }
124
125 Ok(())
126}
127
128/// Deep Boltzmann Machine demonstration
129fn deep_boltzmann_demo() -> Result<()> {
130 // Create a 3-layer DBM
131 let layer_sizes = vec![8, 4, 2];
132 let mut dbm = DeepBoltzmannMachine::new(
133 layer_sizes.clone(),
134 1.0, // temperature
135 0.01, // learning rate
136 )?;
137
138 println!(" Created Deep Boltzmann Machine:");
139 println!(" - Architecture: {layer_sizes:?}");
140 println!(" - Total layers: {}", dbm.rbms().len());
141
142 // Generate hierarchical data
143 let data = generate_hierarchical_data(300, 8);
144
145 // Layer-wise pretraining
146 println!("\n Performing layer-wise pretraining...");
147 dbm.pretrain(
148 &data, 50, // epochs per layer
149 30, // batch size
150 )?;
151
152 println!("\n Pretraining complete!");
153 println!(" Each layer learned increasingly abstract features");
154
155 Ok(())
156}
157
158/// Energy landscape visualization
159fn energy_landscape_demo() -> Result<()> {
160 // Create small QBM for visualization
161 let qbm = QuantumBoltzmannMachine::new(
162 2, // visible units (for 2D visualization)
163 1, // hidden unit
164 0.5, // temperature
165 0.01, // learning rate
166 )?;
167
168 println!(" Analyzing energy landscape of 2-unit system");
169
170 // Compute energy for all 4 possible states
171 let states = [
172 Array1::from_vec(vec![0.0, 0.0]),
173 Array1::from_vec(vec![0.0, 1.0]),
174 Array1::from_vec(vec![1.0, 0.0]),
175 Array1::from_vec(vec![1.0, 1.0]),
176 ];
177
178 println!("\n State energies:");
179 for (i, state) in states.iter().enumerate() {
180 let energy = qbm.energy(state);
181 let prob = (-energy / qbm.temperature()).exp();
182 println!(
183 " State [{:.0}, {:.0}]: E = {:.3}, P ∝ {:.3}",
184 state[0], state[1], energy, prob
185 );
186 }
187
188 // Show coupling matrix
189 println!("\n Coupling matrix:");
190 for i in 0..3 {
191 print!(" [");
192 for j in 0..3 {
193 print!("{:6.3} ", qbm.couplings()[[i, j]]);
194 }
195 println!("]");
196 }
197
198 Ok(())
199}
200
201/// Pattern completion demonstration
202fn pattern_completion_demo() -> Result<()> {
203 // Create RBM
204 let mut rbm = QuantumRBM::new(
205 8, // visible units
206 4, // hidden units
207 1.0, // temperature
208 0.02, // learning rate
209 )?;
210
211 // Train on specific patterns
212 let patterns = create_letter_patterns();
213 println!(" Training on letter-like patterns...");
214
215 rbm.train_pcd(&patterns, 100, 10, 20)?;
216
217 // Test pattern completion
218 println!("\n Pattern completion test:");
219
220 // Create corrupted patterns
221 let mut corrupted = patterns.row(0).to_owned();
222 corrupted[3] = 1.0 - corrupted[3]; // Flip one bit
223 corrupted[5] = 1.0 - corrupted[5]; // Flip another
224
225 print!(" Corrupted: [");
226 for val in &corrupted {
227 print!("{val:.0} ");
228 }
229 println!("]");
230
231 // Complete pattern
232 let completed = complete_pattern(&rbm, &corrupted)?;
233
234 print!(" Completed: [");
235 for val in &completed {
236 print!("{val:.0} ");
237 }
238 println!("]");
239
240 print!(" Original: [");
241 for val in patterns.row(0) {
242 print!("{val:.0} ");
243 }
244 println!("]");
245
246 let accuracy = patterns
247 .row(0)
248 .iter()
249 .zip(completed.iter())
250 .filter(|(&a, &b)| (a - b).abs() < 0.5)
251 .count() as f64
252 / 8.0;
253
254 println!(" Reconstruction accuracy: {:.1}%", accuracy * 100.0);
255
256 Ok(())
257}
258
259/// Generate binary patterns
260fn generate_binary_patterns(n_samples: usize, n_features: usize) -> Array2<f64> {
261 Array2::from_shape_fn((n_samples, n_features), |(_, _)| {
262 if thread_rng().gen::<f64>() > 0.5 {
263 1.0
264 } else {
265 0.0
266 }
267 })
268}
269
270/// Generate correlated binary data
271fn generate_correlated_data(n_samples: usize, n_features: usize) -> Array2<f64> {
272 let mut data = Array2::zeros((n_samples, n_features));
273
274 for i in 0..n_samples {
275 // Generate correlated features
276 let base = if thread_rng().gen::<f64>() > 0.5 {
277 1.0
278 } else {
279 0.0
280 };
281
282 for j in 0..n_features {
283 if j % 2 == 0 {
284 data[[i, j]] = base;
285 } else {
286 // Correlate with previous feature
287 data[[i, j]] = if thread_rng().gen::<f64>() > 0.2 {
288 base
289 } else {
290 1.0 - base
291 };
292 }
293 }
294 }
295
296 data
297}
298
299/// Generate hierarchical data
300fn generate_hierarchical_data(n_samples: usize, n_features: usize) -> Array2<f64> {
301 let mut data = Array2::zeros((n_samples, n_features));
302
303 for i in 0..n_samples {
304 // Choose high-level pattern
305 let pattern_type = i % 3;
306
307 match pattern_type {
308 0 => {
309 // Pattern A: alternating
310 for j in 0..n_features {
311 data[[i, j]] = (j % 2) as f64;
312 }
313 }
314 1 => {
315 // Pattern B: blocks
316 for j in 0..n_features {
317 data[[i, j]] = ((j / 2) % 2) as f64;
318 }
319 }
320 _ => {
321 // Pattern C: random with structure
322 let shift = (thread_rng().gen::<f64>() * 4.0) as usize;
323 for j in 0..n_features {
324 data[[i, j]] = if (j + shift) % 3 == 0 { 1.0 } else { 0.0 };
325 }
326 }
327 }
328
329 // Add noise
330 for j in 0..n_features {
331 if thread_rng().gen::<f64>() < 0.1 {
332 data[[i, j]] = 1.0 - data[[i, j]];
333 }
334 }
335 }
336
337 data
338}
339
340/// Create letter-like patterns
341fn create_letter_patterns() -> Array2<f64> {
342 // Simple 8-bit patterns resembling letters
343 Array2::from_shape_vec(
344 (4, 8),
345 vec![
346 // Pattern 'L'
347 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, // Pattern 'T'
348 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, // Pattern 'I'
349 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, // Pattern 'H'
350 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0,
351 ],
352 )
353 .unwrap()
354}
355
356/// Complete a partial pattern
357fn complete_pattern(rbm: &QuantumRBM, partial: &Array1<f64>) -> Result<Array1<f64>> {
358 // Use Gibbs sampling to complete pattern
359 let mut current = partial.clone();
360
361 for _ in 0..10 {
362 let hidden = rbm.qbm().sample_hidden_given_visible(¤t.view())?;
363 current = rbm.qbm().sample_visible_given_hidden(&hidden)?;
364 }
365
366 Ok(current)
367}Auto Trait Implementations§
impl Freeze for QuantumRBM
impl RefUnwindSafe for QuantumRBM
impl Send for QuantumRBM
impl Sync for QuantumRBM
impl Unpin for QuantumRBM
impl UnwindSafe for QuantumRBM
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.