1use crate::fab::serialize::write_flagged_int;
15use crate::repr::{BinaryHashRepr, IoWrite, MemWrite};
16use const_hex::ToHexExt;
17use fake::Dummy;
18#[cfg(feature = "proptest")]
19use proptest::arbitrary::Arbitrary;
20use rand::Rng;
21use rand::distributions::Standard;
22use rand::prelude::Distribution;
23use serde::{Deserialize, Serialize};
24#[cfg(feature = "proptest")]
25use serialize::{NoStrategy, simple_arbitrary};
26use serialize::{Serializable, Tagged, tag_enforcement_test};
27use std::borrow::Borrow;
28use std::fmt::{self, Debug, Formatter};
29use std::iter::{empty, once};
30#[cfg(feature = "proptest")]
31use std::marker::PhantomData;
32use std::ops::{
33 Deref, Index, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive,
34};
35use std::sync::Arc;
36
37use super::serialize::flagged_int_size;
38
39#[derive(Clone, Default, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize, Dummy)]
41#[serde(transparent)]
42pub struct Value(pub Vec<ValueAtom>);
43
44impl Tagged for Value {
45 fn tag() -> std::borrow::Cow<'static, str> {
46 "fab-value[v1]".into()
47 }
48 fn tag_unique_factor() -> String {
49 "vec(vec(u8))".into()
50 }
51}
52tag_enforcement_test!(Value);
53
54#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
56#[serde(transparent)]
57#[repr(transparent)]
58pub struct ValueSlice(pub [ValueAtom]);
59
60impl Index<Range<usize>> for ValueSlice {
61 type Output = ValueSlice;
62 fn index(&self, range: Range<usize>) -> &Self::Output {
63 ValueSlice::from_prim_slice(&self.0[range])
64 }
65}
66
67impl Index<RangeFrom<usize>> for ValueSlice {
68 type Output = ValueSlice;
69 fn index(&self, range: RangeFrom<usize>) -> &Self::Output {
70 ValueSlice::from_prim_slice(&self.0[range])
71 }
72}
73
74impl Index<RangeFull> for ValueSlice {
75 type Output = ValueSlice;
76 fn index(&self, range: RangeFull) -> &Self::Output {
77 ValueSlice::from_prim_slice(&self.0[range])
78 }
79}
80
81impl Index<RangeInclusive<usize>> for ValueSlice {
82 type Output = ValueSlice;
83 fn index(&self, range: RangeInclusive<usize>) -> &Self::Output {
84 ValueSlice::from_prim_slice(&self.0[range])
85 }
86}
87
88impl Index<RangeTo<usize>> for ValueSlice {
89 type Output = ValueSlice;
90 fn index(&self, range: RangeTo<usize>) -> &Self::Output {
91 ValueSlice::from_prim_slice(&self.0[range])
92 }
93}
94
95impl Index<RangeToInclusive<usize>> for ValueSlice {
96 type Output = ValueSlice;
97 fn index(&self, range: RangeToInclusive<usize>) -> &Self::Output {
98 ValueSlice::from_prim_slice(&self.0[range])
99 }
100}
101
102impl AsRef<Value> for Value {
103 fn as_ref(&self) -> &Value {
104 self
105 }
106}
107
108impl Deref for Value {
109 type Target = ValueSlice;
110 fn deref(&self) -> &ValueSlice {
111 ValueSlice::from_prim_slice(&self.0[..])
112 }
113}
114
115impl Borrow<ValueSlice> for Value {
116 fn borrow(&self) -> &ValueSlice {
117 self
118 }
119}
120
121impl ToOwned for ValueSlice {
122 type Owned = Value;
123 fn to_owned(&self) -> Value {
124 Value::concat([self])
125 }
126}
127
128impl Value {
129 pub fn concat<'a, V: Borrow<ValueSlice> + 'a + ?Sized, I: IntoIterator<Item = &'a V>>(
131 iter: I,
132 ) -> Value {
133 Value(
134 iter.into_iter()
135 .flat_map(|vs| vs.borrow().0.iter())
136 .cloned()
137 .collect(),
138 )
139 }
140}
141
142impl Debug for Value {
143 fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {
144 (**self).fmt(formatter)
145 }
146}
147
148impl Debug for ValueSlice {
149 fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {
150 write!(formatter, "[")?;
152 let mut first = true;
153 for i in self.0.iter() {
154 if first {
155 first = false;
156 } else {
157 write!(formatter, ", ")?;
158 }
159 write!(formatter, "{:?}", i)?;
160 }
161 write!(formatter, "]")
162 }
163}
164
165impl ValueSlice {
166 pub(crate) fn from_prim_slice(prim_slice: &[ValueAtom]) -> &ValueSlice {
167 unsafe { &*(prim_slice as *const [ValueAtom] as *const ValueSlice) }
171 }
172
173 pub fn is_empty(&self) -> bool {
176 self.0.is_empty()
177 }
178}
179
180#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize, Dummy)]
183#[serde(transparent)]
184pub struct Alignment(pub Vec<AlignmentSegment>);
185
186impl Tagged for Alignment {
187 fn tag() -> std::borrow::Cow<'static, str> {
188 "fab-alignment[v1]".into()
189 }
190 fn tag_unique_factor() -> String {
191 "vec([[(),(u32),()],vec(fab-alignment[v1])])".into()
192 }
193}
194tag_enforcement_test!(Alignment);
195
196impl Debug for Alignment {
197 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
198 for segment in self.0.iter() {
199 write!(f, "{segment:?}")?;
200 }
201 Ok(())
202 }
203}
204
205#[cfg(feature = "proptest")]
206simple_arbitrary!(Alignment);
207
208impl Distribution<Alignment> for Standard {
209 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Alignment {
210 let size: usize = rng.gen_range(1..9);
211 let mut segments: Vec<AlignmentSegment> = Vec::new();
212
213 for _ in 0..size {
214 segments.push(rng.r#gen());
215 }
216
217 Alignment(segments)
218 }
219}
220
221impl BinaryHashRepr for Alignment {
222 fn binary_repr<W: MemWrite<u8>>(&self, writer: &mut W) {
223 (self.0.len() as u32).binary_repr(writer);
224 for segment in self.0.iter() {
225 segment.binary_repr(writer);
226 }
227 }
228
229 fn binary_len(&self) -> usize {
230 4 + self.0.iter().map(BinaryHashRepr::binary_len).sum::<usize>()
231 }
232}
233
234impl Alignment {
235 fn sample_value<R: Rng + ?Sized>(&self, rng: &mut R) -> Value {
236 Value(
237 self.0
238 .iter()
239 .flat_map(|a| a.sample_value(rng).0.into_iter())
240 .collect(),
241 )
242 }
243}
244
245impl<'a> From<&'a [AlignmentAtom]> for Alignment {
246 fn from(alignment: &'a [AlignmentAtom]) -> Alignment {
247 Alignment(
248 alignment
249 .iter()
250 .copied()
251 .map(AlignmentSegment::Atom)
252 .collect(),
253 )
254 }
255}
256
257impl AsRef<Alignment> for Alignment {
258 fn as_ref(&self) -> &Alignment {
259 self
260 }
261}
262
263#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize, Dummy)]
266#[serde(tag = "tag", content = "value", rename_all = "camelCase")]
267pub enum AlignmentSegment {
268 Atom(AlignmentAtom),
271 Option(Vec<Alignment>),
278}
279
280#[cfg(feature = "proptest")]
281simple_arbitrary!(AlignmentSegment);
282
283impl Distribution<AlignmentSegment> for Standard {
284 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> AlignmentSegment {
285 let discriminant = rng.gen_range(0..100);
286 match discriminant {
287 0 => {
288 let size = rng.gen_range(1..10);
289 let mut options = Vec::new();
290 for _ in 0..size {
291 options.push(rng.r#gen())
292 }
293 AlignmentSegment::Option(options)
294 }
295 _ => AlignmentSegment::Atom(rng.r#gen()),
296 }
297 }
298}
299
300impl BinaryHashRepr for AlignmentSegment {
301 fn binary_repr<W: MemWrite<u8>>(&self, mut writer: &mut W) {
302 match self {
303 AlignmentSegment::Atom(atom) => atom.binary_repr(writer),
304 AlignmentSegment::Option(options) => {
305 write_flagged_int(&mut IoWrite(&mut writer), true, false, options.len() as u32)
306 .expect("Memory write shouldn't fail");
307 for option in options.iter() {
308 option.binary_repr(writer);
309 }
310 }
311 }
312 }
313
314 fn binary_len(&self) -> usize {
315 match self {
316 AlignmentSegment::Atom(atom) => atom.binary_len(),
317 AlignmentSegment::Option(options) => {
318 flagged_int_size(options.len() as u32)
319 + options
320 .iter()
321 .map(BinaryHashRepr::binary_len)
322 .sum::<usize>()
323 }
324 }
325 }
326}
327
328impl AlignmentSegment {
329 fn sample_value<R: Rng + ?Sized>(&self, rng: &mut R) -> Value {
330 match self {
331 Self::Atom(atom) => Value(vec![atom.sample_value_atom(rng)]),
332 Self::Option(options) => {
333 let choice = rng.gen_range(0..options.len());
334 let discriminant = ValueAtom(choice.to_le_bytes().to_vec()).normalize();
335 let remaining = options[choice].sample_value(rng);
336 Value(once(discriminant).chain(remaining.0).collect())
337 }
338 }
339 }
340}
341
342impl Debug for AlignmentSegment {
343 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
344 match self {
345 AlignmentSegment::Atom(atom) => write!(f, "{atom:?}"),
346 AlignmentSegment::Option(options) => {
347 write!(f, "[")?;
348 let mut first = true;
349 for option in options {
350 if first {
351 first = false;
352 } else {
353 write!(f, "|")?;
354 }
355 write!(f, "{option:?}")?;
356 }
357 write!(f, "]")
358 }
359 }
360 }
361}
362
363impl Alignment {
364 pub fn singleton(pty: AlignmentAtom) -> Alignment {
366 Alignment(vec![AlignmentSegment::Atom(pty)])
367 }
368
369 pub fn fits(&self, value: &ValueSlice) -> bool {
371 self.consume(value).map(|(_, res)| res.0.is_empty()) == Some(true)
372 }
373
374 pub(crate) fn consume_internal<T, F: Fn(&mut T, AlignmentAtom), G: Fn(&T) -> usize>(
375 &self,
376 mut value: &ValueSlice,
377 f: &F,
378 len: &G,
379 mut acc: T,
380 ) -> Option<T> {
381 for ts in self.0.iter() {
382 match ts {
383 AlignmentSegment::Atom(pty) => {
384 if value.0.is_empty() || !pty.fits(&value.0[0]) {
385 return None;
386 }
387 value = ValueSlice::from_prim_slice(&value.0[1..]);
388 f(&mut acc, *pty);
389 }
390 AlignmentSegment::Option(tys) => {
391 if value.0.is_empty() || !(AlignmentAtom::Bytes { length: 2 }).fits(&value.0[0])
392 {
393 return None;
394 }
395 let branch =
396 u16::try_from(ValueSlice::from_prim_slice(&value.0[..1])).ok()? as usize;
397 f(&mut acc, AlignmentAtom::Bytes { length: 2 });
398 value = ValueSlice::from_prim_slice(&value.0[1..]);
399 let prev_consumed = len(&acc);
400 let branch = tys.get(branch)?;
401 acc = branch.consume_internal(value, f, len, acc)?;
402 let consumed = len(&acc) - prev_consumed;
403 value = ValueSlice::from_prim_slice(&value.0[consumed..]);
404 }
405 }
406 }
407 Some(acc)
408 }
409
410 pub fn consume<'a>(
413 &'a self,
414 value: &'a ValueSlice,
415 ) -> Option<(AlignedValueSlice<'a>, &'a ValueSlice)> {
416 let split_point = self.consume_internal(value, &|ctr, _| *ctr += 1, &|ctr| *ctr, 0)?;
417 Some((
418 AlignedValueSlice(ValueSlice::from_prim_slice(&value.0[..split_point]), self),
419 ValueSlice::from_prim_slice(&value.0[split_point..]),
420 ))
421 }
422
423 pub fn concat<'a, I: IntoIterator<Item = &'a Alignment>>(iter: I) -> Alignment {
425 Alignment(iter.into_iter().flat_map(|a| a.0.clone()).collect())
426 }
427
428 pub fn default(&self) -> Value {
431 Value(
432 self.0
433 .iter()
434 .flat_map(
435 |ts: &AlignmentSegment| -> Box<dyn Iterator<Item = ValueAtom>> {
436 match ts {
437 AlignmentSegment::Atom(_) => Box::new(once(Default::default())),
438 AlignmentSegment::Option(tys) if tys.is_empty() => Box::new(empty()),
439 AlignmentSegment::Option(tys) => {
440 Box::new(tys[0].default().0.into_iter())
441 }
442 }
443 },
444 )
445 .collect(),
446 )
447 }
448}
449
450#[derive(Serialize, Deserialize)]
451#[serde(rename_all = "camelCase")]
452struct AlignedValueUnchecked {
455 value: Value,
456 alignment: Alignment,
457}
458
459impl TryFrom<AlignedValueUnchecked> for AlignedValue {
460 type Error = String;
461 fn try_from(unchecked: AlignedValueUnchecked) -> Result<AlignedValue, Self::Error> {
462 if !unchecked.alignment.fits(&unchecked.value) {
463 Err(format!(
464 "value deserialized as aligned failed alignment check (value: {:?}; alignment: {:?})",
465 &unchecked.value, &unchecked.alignment
466 ))
467 } else if !unchecked.value.0.iter().all(ValueAtom::is_in_normal_form) {
468 Err("aligned value is not in normal form (has trailing zero bytes)".into())
469 } else {
470 Ok(AlignedValue {
471 value: unchecked.value,
472 alignment: unchecked.alignment,
473 })
474 }
475 }
476}
477
478impl From<AlignedValue> for AlignedValueUnchecked {
479 fn from(checked: AlignedValue) -> AlignedValueUnchecked {
480 AlignedValueUnchecked {
481 value: checked.value,
482 alignment: checked.alignment,
483 }
484 }
485}
486
487#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
489#[serde(
490 rename_all = "camelCase",
491 try_from = "AlignedValueUnchecked",
492 into = "AlignedValueUnchecked"
493)]
494pub struct AlignedValue {
495 pub value: Value,
497 pub alignment: Alignment,
499}
500
501impl Tagged for AlignedValue {
502 fn tag() -> std::borrow::Cow<'static, str> {
503 "fab-aligned-value[v1]".into()
504 }
505 fn tag_unique_factor() -> String {
506 "(fab-value[v1],fab-alignment[v1])".into()
507 }
508}
509tag_enforcement_test!(AlignedValue);
510
511#[cfg(feature = "proptest")]
512simple_arbitrary!(AlignedValue);
513
514impl Distribution<AlignedValue> for Standard {
515 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> AlignedValue {
516 let alignment: Alignment = rng.r#gen();
517 let value = alignment.sample_value(rng);
518 AlignedValue { value, alignment }
519 }
520}
521
522impl Debug for AlignedValue {
523 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
524 write!(fmt, "<{:?}: {:?}>", self.value, self.alignment)
525 }
526}
527
528impl AlignedValue {
529 pub fn new(value: Value, alignment: Alignment) -> Option<Self> {
531 if alignment.fits(&value) {
532 Some(AlignedValue { value, alignment })
533 } else {
534 None
535 }
536 }
537
538 pub fn concat<'a, I: IntoIterator<Item = &'a AlignedValue>>(iter: I) -> AlignedValue {
540 let mut val = Vec::new();
541 let mut align = Vec::new();
542 for i in iter.into_iter() {
543 val.extend(i.value.0.iter().cloned());
544 align.extend(i.alignment.0.iter().cloned());
545 }
546 AlignedValue {
547 value: Value(val),
548 alignment: Alignment(align),
549 }
550 }
551
552 pub fn as_slice(&self) -> AlignedValueSlice<'_> {
554 AlignedValueSlice(&self.value, &self.alignment)
555 }
556}
557
558impl AsRef<Value> for AlignedValue {
559 fn as_ref(&self) -> &Value {
560 &self.value
561 }
562}
563
564impl AsRef<Alignment> for AlignedValue {
565 fn as_ref(&self) -> &Alignment {
566 &self.alignment
567 }
568}
569
570impl AsRef<Value> for Arc<AlignedValue> {
571 fn as_ref(&self) -> &Value {
572 &self.value
573 }
574}
575
576#[derive(Clone, Serialize)]
578#[serde(into = "Value")]
579pub struct AlignedValueSlice<'a>(pub(crate) &'a ValueSlice, pub(crate) &'a Alignment);
580
581impl From<AlignedValueSlice<'_>> for Value {
582 fn from(slice: AlignedValueSlice<'_>) -> Value {
583 Value(slice.0.0.to_vec())
584 }
585}
586
587impl AlignedValueSlice<'_> {
588 pub fn to_owned_aligned(&self) -> AlignedValue {
590 AlignedValue::new(Value(self.0.0.to_vec()), self.1.clone())
591 .expect("Already aligned value should still match")
592 }
593}
594
595impl Deref for AlignedValueSlice<'_> {
596 type Target = ValueSlice;
597
598 fn deref(&self) -> &Self::Target {
599 self.0
600 }
601}
602
603#[derive(Clone, Default, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize, Dummy)]
606#[serde(transparent)]
607pub struct ValueAtom(#[serde(with = "serde_bytes")] pub Vec<u8>);
608
609impl ValueAtom {
610 pub fn normalize(mut self) -> ValueAtom {
613 while let Some(0) = self.0.last() {
614 self.0.pop();
615 }
616 self
617 }
618
619 pub fn is_in_normal_form(&self) -> bool {
621 self.0.last() != Some(&0)
622 }
623}
624
625impl Debug for ValueAtom {
626 fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {
627 if self.0.is_empty() {
628 formatter.write_str("-")
629 } else {
630 formatter.write_str(&self.0.encode_hex())
631 }
632 }
633}
634
635#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize, Dummy)]
638#[serde(tag = "tag", rename_all = "camelCase")]
639pub enum AlignmentAtom {
640 Compress,
642 Bytes {
645 length: u32,
647 },
648 Field,
650}
651
652#[cfg(feature = "proptest")]
653simple_arbitrary!(AlignmentAtom);
654
655impl Distribution<AlignmentAtom> for Standard {
656 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> AlignmentAtom {
657 let disc = rng.gen_range(0..3);
658 match disc {
659 0 => AlignmentAtom::Compress,
660 1 => AlignmentAtom::Bytes {
661 length: rng.gen_range(0..8),
662 },
663 2 => AlignmentAtom::Field,
664 _ => unreachable!(),
665 }
666 }
667}
668
669impl AlignmentAtom {
670 fn sample_value_atom<R: Rng + ?Sized>(&self, rng: &mut R) -> ValueAtom {
671 match self {
672 Self::Compress | Self::Field => {
673 let mut bytes: Vec<u8> = vec![0; FIELD_BYTE_LIMIT];
674 rng.fill_bytes(&mut bytes);
675 ValueAtom(bytes).normalize()
676 }
677 Self::Bytes { length } => {
678 let mut bytes: Vec<u8> = vec![0; *length as usize];
679 rng.fill_bytes(&mut bytes);
680 ValueAtom(bytes).normalize()
681 }
682 }
683 }
684}
685
686impl Debug for AlignmentAtom {
687 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
688 match self {
689 AlignmentAtom::Compress => write!(f, "c"),
690 AlignmentAtom::Bytes { length } => write!(f, "b{length}"),
691 AlignmentAtom::Field => write!(f, "f"),
692 }
693 }
694}
695
696impl BinaryHashRepr for AlignmentAtom {
697 fn binary_repr<W: MemWrite<u8>>(&self, writer: &mut W) {
698 Serializable::serialize(self, &mut IoWrite(writer)).ok();
699 }
700 fn binary_len(&self) -> usize {
701 Serializable::serialized_size(self)
702 }
703}
704
705pub const FIELD_BYTE_LIMIT: usize = 64;
707
708impl AlignmentAtom {
709 pub fn fits(&self, value: &ValueAtom) -> bool {
711 match self {
712 AlignmentAtom::Compress => true,
713 AlignmentAtom::Bytes { length } => {
714 *length >= value.0.len() as u32 && value.is_in_normal_form()
715 }
716
717 AlignmentAtom::Field => FIELD_BYTE_LIMIT >= value.0.len() && value.is_in_normal_form(),
718 }
719 }
720}