1use core::fmt;
13use std::collections::{HashMap, HashSet};
14
15use chacha20::cipher::{KeyIvInit, StreamCipher};
16use chacha20::ChaCha20;
17use zeroize::Zeroize;
18
19use crate::block_storage::*;
20use crate::errors::*;
21use crate::log::*;
22use crate::store::Store;
23use crate::types::*;
24
25pub const BLOCK_EXTRA: usize = 12; pub const HEADER_REF_EXTRA: usize = 66;
27pub const HEADER_EMBED_EXTRA: usize = 34;
28pub const CHILD_SIZE: usize = 66;
29
30pub const BLOCK_ID_SIZE: usize = 33;
31pub const BLOCK_KEY_SIZE: usize = 33;
33pub const BIG_VARINT_EXTRA: usize = 2;
36pub const DATA_VARINT_EXTRA: usize = 4;
38pub const BLOCK_MAX_DATA_EXTRA: usize = 4;
39
40#[derive(Debug, PartialEq, Eq, Clone)]
41pub struct Object {
43 block_contents: HashMap<BlockId, Block>,
45
46 blocks: Vec<BlockId>,
48
49 header: Option<CommitHeader>,
51
52 header_blocks: Vec<Block>,
54
55 #[cfg(test)]
56 already_saved: bool,
57}
58
59impl Object {
60 pub(crate) fn convergence_key(store: &Store) -> [u8; blake3::OUT_LEN] {
63 let mut key_material = match (
64 *store.get_store_repo().repo_id(),
65 store.get_store_overlay_branch_readcap_secret().clone(),
66 ) {
67 (PubKey::Ed25519PubKey(pubkey), SymKey::ChaCha20Key(secret)) => {
68 [pubkey, secret].concat()
69 }
70 (_, _) => panic!("cannot derive key with Montgomery key"),
71 };
72 let res = blake3::derive_key("NextGraph Data BLAKE3 key", key_material.as_slice());
73 key_material.zeroize();
74 res
75 }
76
77 fn make_block(
78 mut content: Vec<u8>,
79 conv_key: &[u8; blake3::OUT_LEN],
80 children: Vec<ObjectId>,
81 header_ref: Option<CommitHeaderRef>,
82 already_existing: &mut HashMap<BlockKey, BlockId>,
83 ) -> Result<Block, BlockId> {
84 let key_hash = blake3::keyed_hash(conv_key, &content);
85
86 let key_slice = key_hash.as_bytes();
87 let key = SymKey::ChaCha20Key(key_slice.clone());
88 let it = already_existing.get(&key);
89 if it.is_some() {
90 return Err(*it.unwrap());
91 }
92 let nonce = [0u8; 12];
93 let mut cipher = ChaCha20::new(key_slice.into(), &nonce.into());
94 let mut content_enc_slice = &mut content.as_mut_slice();
96 cipher.apply_keystream(&mut content_enc_slice);
97
98 let block = Block::new(children, header_ref, content, Some(key));
99 Ok(block)
102 }
103
104 fn make_header_v0(
105 header: CommitHeaderV0,
106 object_size: usize,
107 conv_key: &ChaCha20Key,
108 ) -> (ObjectRef, Vec<Block>) {
109 let header_obj = Object::new_with_convergence_key(
110 ObjectContent::V0(ObjectContentV0::CommitHeader(CommitHeader::V0(header))),
111 None,
112 object_size,
113 conv_key,
114 );
115 let header_ref = ObjectRef {
116 id: header_obj.id(),
117 key: header_obj.key().unwrap(),
118 };
119 (header_ref, header_obj.blocks().cloned().collect())
120 }
121
122 fn make_header(
123 header: CommitHeader,
124 object_size: usize,
125 conv_key: &ChaCha20Key,
126 ) -> (ObjectRef, Vec<Block>) {
127 match header {
128 CommitHeader::V0(v0) => Self::make_header_v0(v0, object_size, conv_key),
129 }
130 }
131
132 fn make_tree(
134 block_contents: &mut HashMap<BlockId, Block>,
135 already_existing: &mut HashMap<BlockKey, BlockId>,
136 leaves: &[BlockId],
137 conv_key: &ChaCha20Key,
138 header_prepare_size: usize,
139 mut header_prepare_block_ref: Option<BlockRef>,
140 mut header_prepare_blocks: Vec<Block>,
141 valid_block_size: usize,
142 arity: usize,
143 ) -> (Vec<BlockId>, Vec<Block>) {
144 let mut parents: Vec<BlockId> = vec![];
145 let mut header_blocks = vec![];
146 let chunks = leaves.chunks(arity);
147 let mut it = chunks.peekable();
148 while let Some(nodes) = it.next() {
149 let children = nodes.to_vec();
150 let keys: Vec<BlockKey> = nodes
151 .iter()
152 .map(|block_id| block_contents.get(block_id).unwrap().key().unwrap())
153 .collect();
154 let content = ChunkContentV0::InternalNode(keys);
155 let content_ser = serde_bare::to_vec(&content).unwrap();
156 let header = if parents.is_empty() && it.peek().is_none() {
158 let mut header_prepare_blocks_taken = vec![];
159 header_prepare_blocks_taken.append(&mut header_prepare_blocks);
160 match (
161 header_prepare_size,
162 header_prepare_block_ref.take(),
163 header_prepare_blocks_taken,
164 ) {
165 (0, None, _) => None,
166 (header_size, Some(block_ref), blocks) => {
167 let is_embeddable = header_size > 0
168 && ((valid_block_size
169 - BLOCK_EXTRA
170 - HEADER_EMBED_EXTRA
171 - header_size)
172 / CHILD_SIZE)
173 >= children.len();
174 let (header_r, mut h_blocks) =
175 Self::make_header_ref(is_embeddable, block_ref, blocks);
176 header_blocks.append(&mut h_blocks);
177 header_r
178 }
179 (_, None, _) => unimplemented!(),
180 }
181 } else {
183 None
184 };
185 Self::add_block(
186 Self::make_block(content_ser, conv_key, children, header, already_existing),
187 &mut parents,
188 block_contents,
189 already_existing,
190 );
191 }
192 if 1 < parents.len() {
195 let mut great_parents = Self::make_tree(
196 block_contents,
197 already_existing,
198 parents.as_slice(),
199 conv_key,
200 header_prepare_size,
201 header_prepare_block_ref,
202 header_prepare_blocks,
203 valid_block_size,
204 arity,
205 );
206 parents.append(&mut great_parents.0);
207 header_blocks.append(&mut great_parents.1);
208 }
209 (parents, header_blocks)
210 }
211
212 fn make_header_ref(
213 embedded: bool,
214 header_ref: BlockRef,
215 blocks: Vec<Block>,
216 ) -> (Option<CommitHeaderRef>, Vec<Block>) {
217 if embedded {
218 (
219 Some(CommitHeaderRef {
220 obj: CommitHeaderObject::EncryptedContent(
221 blocks[0].encrypted_content().to_vec(),
222 ),
223 key: header_ref.key,
224 }),
225 vec![],
226 )
227 } else {
228 (
229 Some(CommitHeaderRef {
230 obj: CommitHeaderObject::Id(header_ref.id),
231 key: header_ref.key,
232 }),
233 blocks,
234 )
235 }
236 }
237
238 fn add_block(
239 block_result: Result<Block, BlockId>,
240 blocks: &mut Vec<BlockId>,
241 block_contents: &mut HashMap<BlockId, Block>,
242 already_existing: &mut HashMap<BlockKey, BlockId>,
243 ) {
244 match block_result {
245 Ok(mut block) => {
246 let id = block.get_and_save_id();
247 blocks.push(id);
248 if !block_contents.contains_key(&id) {
249 already_existing.insert(block.key().unwrap(), id);
250 block_contents.insert(id, block);
251 }
252 }
253 Err(id) => {
254 blocks.push(id);
255 }
256 }
257 }
258
259 pub fn new(
271 content: ObjectContent,
272 header: Option<CommitHeader>,
273 block_size: usize,
274 store: &Store,
275 ) -> Object {
276 let mut conv_key = Self::convergence_key(store);
277 let res = Self::new_with_convergence_key(content, header, block_size, &conv_key);
278 conv_key.zeroize();
279 res
280 }
281
282 pub fn new_with_convergence_key(
283 content: ObjectContent,
284 mut header: Option<CommitHeader>,
285 block_size: usize,
286 conv_key: &ChaCha20Key,
287 ) -> Object {
288 if header.is_some() && !content.can_have_header() {
289 panic!(
290 "cannot make a new Object with header if ObjectContent type different from Commit"
291 );
292 }
293 let valid_block_size = store_valid_value_size(block_size);
296 let max_data_payload_size =
302 valid_block_size - BLOCK_EXTRA - HEADER_REF_EXTRA * header.as_ref().map_or(0, |_| 1);
303 let max_arity: usize = max_data_payload_size / CHILD_SIZE;
304
305 let mut blocks: Vec<BlockId> = vec![];
306 let mut block_contents: HashMap<BlockId, Block> = HashMap::new();
307 let mut already_existing: HashMap<BlockKey, BlockId> = HashMap::new();
308
309 let header_prepare = match &header {
310 None => (0 as usize, None, vec![]),
311 Some(h) => {
312 let block_info = Self::make_header(h.clone(), valid_block_size, conv_key);
313 if block_info.1.len() == 1 {
314 (
315 block_info.1[0].encrypted_content().len(),
316 Some(block_info.0),
317 block_info.1,
318 )
319 } else {
320 (0 as usize, Some(block_info.0), block_info.1)
321 }
322 }
323 };
324 let content_ser = serde_bare::to_vec(&content).unwrap();
327 let content_len = content_ser.len();
328
329 let header_blocks = if content_len <= max_data_payload_size {
336 let data_chunk = ChunkContentV0::DataChunk(content_ser.clone());
338 let content_ser = serde_bare::to_vec(&data_chunk).unwrap();
339
340 let (header_ref, h_blocks) = match header_prepare {
341 (0, None, _) => (None, vec![]),
342 (header_size, Some(block_ref), blocks) => {
343 let is_embeddable = header_size > 0
344 && valid_block_size - BLOCK_EXTRA - HEADER_EMBED_EXTRA - content_ser.len()
345 > header_size;
346 Self::make_header_ref(is_embeddable, block_ref, blocks)
347 }
348 (_, None, _) => unimplemented!(),
349 };
350 Self::add_block(
351 Self::make_block(
352 content_ser,
353 conv_key,
354 vec![],
355 header_ref,
356 &mut already_existing,
357 ),
358 &mut blocks,
359 &mut block_contents,
360 &mut already_existing,
361 );
362
363 h_blocks
364 } else {
365 let mut i = 0;
367 #[cfg(not(target_arch = "wasm32"))]
368 let _total = std::cmp::max(1, content_len / (valid_block_size - BLOCK_EXTRA));
369 for chunk in content_ser.chunks(valid_block_size - BLOCK_EXTRA) {
370 let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec());
371 let chunk_ser = serde_bare::to_vec(&data_chunk).unwrap();
372 Self::add_block(
373 Self::make_block(chunk_ser, conv_key, vec![], None, &mut already_existing),
374 &mut blocks,
375 &mut block_contents,
376 &mut already_existing,
377 );
378 #[cfg(not(target_arch = "wasm32"))]
379 log_debug!(
380 "make_block {} of {} - {}%",
381 i + 1,
382 _total + 1,
383 i * 100 / _total
384 );
385 i = i + 1;
386 }
387
388 let mut parents = Self::make_tree(
391 &mut block_contents,
392 &mut already_existing,
393 blocks.as_slice(),
394 conv_key,
395 header_prepare.0,
396 header_prepare.1,
397 header_prepare.2,
398 valid_block_size,
399 max_arity,
400 );
401
402 blocks.append(&mut parents.0);
403 parents.1
404 };
405
406 if header_blocks.len() > 0 {
407 header
413 .as_mut()
414 .unwrap()
415 .set_id(header_blocks.last().unwrap().id());
416 }
417 Object {
418 blocks,
419 block_contents,
420 header,
421 header_blocks,
422 #[cfg(test)]
423 already_saved: false,
424 }
425 }
426
427 pub fn load_ref(reference: &ObjectRef, store: &Store) -> Result<Object, ObjectParseError> {
431 Self::load(reference.id.clone(), Some(reference.key.clone()), store)
432 }
433
434 pub fn load_header(
435 root_block: &Block,
436 store: &Store,
437 ) -> Result<Option<CommitHeader>, ObjectParseError> {
438 Ok(Self::load_header_(root_block, store)?.0)
439 }
440
441 fn load_header_(
442 root: &Block,
443 store: &Store,
444 ) -> Result<(Option<CommitHeader>, Vec<Block>), ObjectParseError> {
445 match root.header_ref() {
446 Some(header_ref) => match header_ref.obj {
447 CommitHeaderObject::None | CommitHeaderObject::RandomAccess => {
448 panic!("shouldn't happen")
449 }
450 CommitHeaderObject::Id(id) => {
451 let obj_res = Object::load(id, Some(header_ref.key.clone()), store);
452 match obj_res {
453 Err(e) => return Err(e),
454 Ok(obj) => match obj.content()? {
455 ObjectContent::V0(ObjectContentV0::CommitHeader(mut commit_header)) => {
456 commit_header.set_id(id);
457 Ok((Some(commit_header), obj.blocks().cloned().collect()))
458 }
459 _ => {
460 return Err(ObjectParseError::InvalidHeader);
461 }
462 },
463 }
464 }
465 CommitHeaderObject::EncryptedContent(content) => {
466 let (_, decrypted_content) =
467 Block::new_with_encrypted_content(content, None).read(&header_ref.key)?;
468 match serde_bare::from_slice(&decrypted_content) {
469 Ok(ObjectContent::V0(ObjectContentV0::CommitHeader(commit_header))) => {
470 Ok((Some(commit_header), vec![]))
471 }
472 Err(_e) => {
473 return Err(ObjectParseError::InvalidHeader);
474 }
475 _ => {
476 return Err(ObjectParseError::InvalidHeader);
477 }
478 }
479 }
480 },
481 None => Ok((None, vec![])),
482 }
483 }
484
485 pub fn load(
489 id: ObjectId,
490 key: Option<SymKey>,
491 store: &Store,
492 ) -> Result<Object, ObjectParseError> {
493 Self::load_(id, key, store, true)
494 }
495
496 pub fn load_without_header(
497 id: ObjectId,
498 key: Option<SymKey>,
499 store: &Store,
500 ) -> Result<Object, ObjectParseError> {
501 Self::load_(id, key, store, false)
502 }
503
504 fn load_(
505 id: ObjectId,
506 key: Option<SymKey>,
507 store: &Store,
508 with_header: bool,
509 ) -> Result<Object, ObjectParseError> {
510 fn load_tree(
511 parents: Vec<BlockId>,
512 store: &Store,
513 blocks: &mut Vec<BlockId>,
514 missing: &mut Vec<BlockId>,
515 block_contents: &mut HashMap<BlockId, Block>,
516 ) {
517 let mut children: Vec<BlockId> = vec![];
518 for id in parents {
519 match store.get(&id) {
520 Ok(block) => {
521 match &block {
522 Block::V0(o) => {
523 children.extend(o.children().iter().rev());
524 }
525 }
526 blocks.insert(0, id);
527 if !block_contents.contains_key(&id) {
528 block_contents.insert(id, block);
529 }
530 }
531 Err(_) => missing.push(id.clone()),
532 }
533 }
534 if !children.is_empty() {
535 load_tree(children, store, blocks, missing, block_contents);
536 }
537 }
538
539 let mut blocks: Vec<BlockId> = vec![];
540 let mut block_contents: HashMap<BlockId, Block> = HashMap::new();
541 let mut missing: Vec<BlockId> = vec![];
542
543 load_tree(
544 vec![id],
545 store,
546 &mut blocks,
547 &mut missing,
548 &mut block_contents,
549 );
550
551 if !missing.is_empty() {
552 return Err(ObjectParseError::MissingBlocks(missing));
553 }
554
555 let root = block_contents.get_mut(blocks.last().unwrap()).unwrap();
556 if key.is_some() {
557 root.set_key(key);
558 }
559
560 let header = if with_header {
561 match Self::load_header_(root, store) {
562 Err(ObjectParseError::MissingBlocks(m)) => {
563 return Err(ObjectParseError::MissingHeaderBlocks((
564 Object {
565 blocks,
566 block_contents,
567 header: None,
568 header_blocks: vec![],
569 #[cfg(test)]
570 already_saved: false,
571 },
572 m,
573 )));
574 }
575 Err(e) => return Err(e),
576 Ok(h) => h,
577 }
578 } else {
579 root.destroy_header();
580 (None, vec![])
581 };
582
583 Ok(Object {
584 blocks,
585 block_contents,
586 header: header.0,
587 header_blocks: header.1,
588 #[cfg(test)]
589 already_saved: true,
590 })
591 }
592
593 pub fn save(&self, store: &Store) -> Result<Vec<BlockId>, StorageError> {
595 let mut deduplicated: HashSet<ObjectId> = HashSet::new();
596 for block_id in self.blocks.iter() {
598 deduplicated.insert(*block_id);
599 store.put(self.block_contents.get(block_id).unwrap())?;
600 }
601 for block in &self.header_blocks {
602 let id = block.id();
603 if deduplicated.get(&id).is_none() {
604 deduplicated.insert(id);
605 store.put(block)?;
606 }
607 }
608 let root_id = self.id();
609 let mut blocks = vec![root_id];
610 deduplicated.remove(&root_id);
611 let list = deduplicated.drain();
612 blocks.append(&mut list.collect());
613 deduplicated.shrink_to(0);
614 Ok(blocks)
615 }
616
617 #[cfg(test)]
618 pub fn save_in_test(&mut self, store: &Store) -> Result<Vec<BlockId>, StorageError> {
619 assert!(self.already_saved == false);
620 self.already_saved = true;
621
622 self.save(store)
623 }
624
625 pub fn id(&self) -> ObjectId {
627 self.root_block().id()
628 }
629
630 pub fn get_and_save_id(&mut self) -> ObjectId {
632 self.block_contents
633 .get_mut(self.blocks.last().unwrap())
634 .unwrap()
635 .get_and_save_id()
636 }
637
638 pub fn key(&self) -> Option<SymKey> {
640 self.root_block().key()
641 }
642
643 pub fn reference(&self) -> Option<ObjectRef> {
645 if self.key().is_some() {
646 Some(ObjectRef {
647 id: self.id(),
648 key: self.key().unwrap(),
649 })
650 } else {
651 None
652 }
653 }
654
655 pub fn is_root(&self) -> bool {
656 self.header.as_ref().map_or(true, |h| h.is_root())
657 }
658
659 pub fn deps(&self) -> Vec<ObjectId> {
662 match &self.header {
663 Some(h) => h.deps(),
664 None => vec![],
665 }
666 }
667
668 pub fn acks_and_nacks(&self) -> Vec<ObjectId> {
671 match &self.header {
672 Some(h) => h.acks_and_nacks(),
673 None => vec![],
674 }
675 }
676
677 pub fn acks(&self) -> Vec<ObjectId> {
680 match &self.header {
681 Some(h) => h.acks(),
682 None => vec![],
683 }
684 }
685
686 pub fn root_block(&self) -> &Block {
687 self.block_contents
688 .get(self.blocks.last().unwrap())
689 .unwrap()
690 }
691
692 pub fn header(&self) -> &Option<CommitHeader> {
693 &self.header
694 }
695
696 pub fn blocks(&self) -> impl Iterator<Item = &Block> + '_ {
697 self.blocks
698 .iter()
699 .map(|key| self.block_contents.get(key).unwrap())
700 }
701
702 pub fn all_blocks_len(&self) -> usize {
703 self.blocks.len() + self.header_blocks.len()
704 }
705
706 pub fn blocks_len(&self) -> usize {
707 self.blocks.len()
708 }
709
710 pub fn header_blocks_len(&self) -> usize {
711 self.header_blocks.len()
712 }
713
714 pub fn size(&self) -> usize {
715 let mut total = 0;
716 self.blocks().for_each(|b| {
717 let s = b.size();
718 total += s;
720 });
721 self.header_blocks.iter().for_each(|b| {
722 let s = b.size();
723 total += s;
725 });
726 total
727 }
728
729 pub fn dedup_size(&self) -> usize {
730 let mut total = 0;
731 self.block_contents.values().for_each(|b| total += b.size());
732 self.header_blocks.iter().for_each(|b| total += b.size());
733 total
734 }
735
736 pub fn hashmap(&self) -> &HashMap<BlockId, Block> {
737 &self.block_contents
738 }
739
740 pub fn into_blocks(self) -> Vec<Block> {
741 self.block_contents.into_values().collect()
742 }
743
744 fn collect_leaves(
746 blocks: &Vec<BlockId>,
747 parents: &Vec<(ObjectId, SymKey)>,
748 parent_index: usize,
749 leaves: &mut Option<&mut Vec<Block>>,
750 obj_content: &mut Option<&mut Vec<u8>>,
751 block_contents: &HashMap<BlockId, Block>,
752 ) -> Result<u8, ObjectParseError> {
753 let mut children: Vec<(ObjectId, SymKey)> = vec![];
759 let mut i = parent_index;
760
761 for (id, key) in parents {
762 let block = block_contents.get(&blocks[i]).unwrap();
764 i += 1;
765
766 let block_id = block.id();
768 if *id != block_id {
769 log_debug!("Invalid ObjectId.\nExp: {:?}\nGot: {:?}", *id, block_id);
770 return Err(ObjectParseError::InvalidBlockId);
771 }
772
773 match block {
774 Block::V0(b) => {
775 let b_children = b.children();
776 if leaves.is_none() && obj_content.is_none() {
777 for id in b_children {
779 #[allow(deprecated)]
780 children.push((id.clone(), ObjectKey::nil()));
781 }
782 continue;
783 }
784 let mut content_dec = b.content.encrypted_content().clone();
786 match key {
787 SymKey::ChaCha20Key(key) => {
788 let nonce = [0u8; 12];
789 let mut cipher = ChaCha20::new(key.into(), &nonce.into());
790 let mut content_dec_slice = &mut content_dec.as_mut_slice();
791 cipher.apply_keystream(&mut content_dec_slice);
792 }
793 }
794
795 let content: ChunkContentV0;
797 match serde_bare::from_slice(content_dec.as_slice()) {
798 Ok(c) => content = c,
799 Err(_e) => {
800 return Err(ObjectParseError::BlockDeserializeError);
802 }
803 }
804 match content {
806 ChunkContentV0::InternalNode(keys) => {
807 if keys.len() != b_children.len() {
808 log_debug!(
809 "Invalid keys length: got {}, expected {}",
810 keys.len(),
811 b_children.len()
812 );
813 log_debug!("!!! children: {:?}", b_children);
814 log_debug!("!!! keys: {:?}", keys);
815 return Err(ObjectParseError::InvalidKeys);
816 }
817
818 for (id, key) in b_children.iter().zip(keys.iter()) {
819 children.push((id.clone(), key.clone()));
820 }
821 }
822 ChunkContentV0::DataChunk(chunk) => {
823 if leaves.is_some() {
824 let mut leaf = block.clone();
827 leaf.set_key(Some(key.clone()));
828 let l = &mut **leaves.as_mut().unwrap();
829 l.push(leaf);
830 }
831 if obj_content.is_some() {
832 let c = &mut **obj_content.as_mut().unwrap();
833 c.extend_from_slice(chunk.as_slice());
834 }
835 }
836 }
837 }
838 }
839 }
840 Ok(if !children.is_empty() {
841 if parent_index < children.len() {
842 return Err(ObjectParseError::InvalidChildren);
843 }
844 Self::collect_leaves(
845 blocks,
846 &children,
847 parent_index - children.len(),
848 leaves,
849 obj_content,
850 block_contents,
851 )? + 1
852 } else {
853 0
854 })
855 }
856
857 pub fn content(&self) -> Result<ObjectContent, ObjectParseError> {
875 if self.key().is_none() {
877 return Err(ObjectParseError::MissingRootKey);
878 }
879 let mut obj_content: Vec<u8> = vec![];
880 let parents = vec![(self.id(), self.key().unwrap())];
881 match Self::collect_leaves(
882 &self.blocks,
883 &parents,
884 self.blocks.len() - 1,
885 &mut None,
886 &mut Some(&mut obj_content),
887 &self.block_contents,
888 ) {
889 Ok(_) => match serde_bare::from_slice(obj_content.as_slice()) {
890 Ok(c) => Ok(c),
891 Err(_e) => {
892 Err(ObjectParseError::ObjectDeserializeError)
894 }
895 },
896 Err(e) => Err(e),
897 }
898 }
899
900 pub fn depth(&self) -> Result<u8, ObjectParseError> {
902 if self.key().is_none() {
903 return Err(ObjectParseError::MissingRootKey);
904 }
905 let parents = vec![(self.id(), self.key().unwrap())];
906 Self::collect_leaves(
907 &self.blocks,
908 &parents,
909 self.blocks.len() - 1,
910 &mut None,
911 &mut None,
912 &self.block_contents,
913 )
914 }
915
916 pub fn content_v0(&self) -> Result<ObjectContentV0, ObjectParseError> {
917 match self.content() {
918 Ok(ObjectContent::V0(v0)) => Ok(v0),
919 Err(e) => Err(e),
920 }
921 }
922}
923
924impl IObject for Object {
925 fn block_ids(&self) -> Vec<BlockId> {
926 let mut deduplicated: HashSet<ObjectId> = HashSet::new();
927 for block_id in self.blocks.iter() {
929 deduplicated.insert(*block_id);
930 }
931 for block in &self.header_blocks {
932 let id = block.id();
933 if deduplicated.get(&id).is_none() {
934 deduplicated.insert(id);
935 }
936 }
937 let root_id = self.id();
938 let mut blocks = vec![root_id];
939 deduplicated.remove(&root_id);
940 let list = deduplicated.drain();
941 blocks.append(&mut list.collect());
942 deduplicated.shrink_to(0);
943 blocks
944 }
945
946 fn id(&self) -> Option<ObjectId> {
947 Some(self.id())
948 }
949
950 fn key(&self) -> Option<SymKey> {
951 self.key()
952 }
953}
954
955impl fmt::Display for Object {
956 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
957 writeln!(f, "====== Object ID {}", self.id())?;
958 writeln!(
959 f,
960 "== Key: {}",
961 self.key().map_or("None".to_string(), |k| format!("{}", k))
962 )?;
963 #[cfg(test)]
964 writeln!(f, "== saved: {}", self.already_saved)?;
965 writeln!(
966 f,
967 "== Header: {}",
968 self.header
969 .as_ref()
970 .map_or("None".to_string(), |k| format!("{}", k))
971 )?;
972 writeln!(f, "== Blocks: {}", self.blocks.len())?;
973 let mut i = 0;
974 for block_id in &self.blocks {
975 writeln!(f, "========== {:03}: {}", i, block_id)?;
976 i += 1;
977 }
978 writeln!(f, "== Depth: {:?}", self.depth().unwrap_or(0))?;
979
980 writeln!(f, "== Header Blocks: {}", self.header_blocks.len())?;
981 i = 0;
982 for block in &self.header_blocks {
983 writeln!(f, "========== {:03}: {}", i, block.id())?;
984 }
985 write!(
986 f,
987 "{}",
988 self.content().map_or_else(
989 |e| format!("Error on content: {:?}", e),
990 |c| format!("{}", c)
991 )
992 )?;
993 Ok(())
994 }
995}
996
997impl ObjectContent {
998 pub fn can_have_header(&self) -> bool {
999 match self {
1000 Self::V0(v0) => match v0 {
1001 ObjectContentV0::Commit(_) => true,
1002 _ => false,
1003 },
1004 }
1005 }
1006
1007 pub fn new_file_v0_with_content(content: Vec<u8>, content_type: &str) -> Self {
1008 ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1009 content_type: content_type.into(),
1010 metadata: vec![],
1011 content,
1012 })))
1013 }
1014}
1015
1016impl fmt::Display for ObjectContent {
1017 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1018 let (version, content) = match self {
1019 Self::V0(v0) => (
1020 "v0",
1021 match v0 {
1022 ObjectContentV0::Commit(c) => ("Commit", format!("{}", c)),
1023 ObjectContentV0::CommitBody(c) => ("CommitBody", format!("{}", c)),
1024 ObjectContentV0::CommitHeader(c) => ("CommitHeader", format!("{}", c)),
1025 ObjectContentV0::Quorum(_c) => ("Quorum", format!("{}", "")),
1026 ObjectContentV0::Signature(_c) => ("Signature", format!("{}", "")),
1027 ObjectContentV0::Certificate(_c) => ("Certificate", format!("{}", "")),
1028 ObjectContentV0::SmallFile(_c) => ("SmallFile", format!("{}", "")),
1029 ObjectContentV0::RandomAccessFileMeta(_c) => {
1030 ("RandomAccessFileMeta", format!("{}", ""))
1031 }
1032 ObjectContentV0::RefreshCap(_c) => ("RefreshCap", format!("{}", "")),
1033 ObjectContentV0::Snapshot(_c) => ("Snapshot", format!("size={}", _c.len())),
1034 },
1035 ),
1036 };
1037 writeln!(f, "====== ObjectContent {} {} ======", version, content.0)?;
1038 write!(f, "{}", content.1)?;
1039 Ok(())
1040 }
1041}
1042
1043#[cfg(test)]
1044mod test {
1045
1046 use crate::object::*;
1047 use std::io::BufReader;
1048 use std::io::Read;
1049 use std::io::Write;
1050
1051 #[test]
1060 pub fn test_pubkey_from_str() {
1061 let pubkey = PubKey::Ed25519PubKey([1u8; 32]);
1062 let str = pubkey.to_string();
1063 let server_key: PubKey = str.as_str().try_into().unwrap();
1064 assert_eq!(server_key, pubkey);
1065 }
1066
1067 #[test]
1069 #[should_panic]
1070 pub fn test_no_header() {
1071 let file = SmallFile::V0(SmallFileV0 {
1072 content_type: "image/jpeg".into(),
1073 metadata: vec![],
1074 content: vec![],
1075 });
1076 let content = ObjectContent::V0(ObjectContentV0::SmallFile(file));
1077 let store = Store::dummy_public_v0();
1078 let header = CommitHeader::new_with_acks([ObjectId::dummy()].to_vec());
1079 let _obj = Object::new(content, header, store_max_value_size(), &store);
1080 }
1081
1082 #[test]
1084 pub fn test_jpg() {
1085 let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg");
1086 let mut reader = BufReader::new(f);
1087 let mut img_buffer: Vec<u8> = Vec::new();
1088 reader
1089 .read_to_end(&mut img_buffer)
1090 .expect("read of test.jpg");
1091 let content = ObjectContent::new_file_v0_with_content(img_buffer, "image/jpeg");
1092
1093 let max_object_size = store_max_value_size();
1094 let store = Store::dummy_public_v0();
1095 let obj = Object::new(content, None, max_object_size, &store);
1096
1097 log_debug!("{}", obj);
1098
1099 let mut i = 0;
1100 for node in obj.blocks() {
1101 log_debug!("#{}: {}", i, node.id());
1102 let mut file = std::fs::File::create(format!("tests/{}.ng", node.id()))
1103 .expect("open block write file");
1104 let ser_file = serde_bare::to_vec(node).unwrap();
1105 file.write_all(&ser_file)
1106 .expect(&format!("write of block #{}", i));
1107 i += 1;
1108 }
1109 }
1110
1111 #[test]
1113 pub fn test_object() {
1114 let file = SmallFile::V0(SmallFileV0 {
1115 content_type: "file/test".into(),
1116 metadata: Vec::from("some meta data here"),
1117 content: [(0..255).collect::<Vec<u8>>().as_slice(); 320].concat(),
1118 });
1119 let content = ObjectContent::V0(ObjectContentV0::SmallFile(file));
1120
1121 let acks = vec![];
1122 let max_object_size = 0;
1124
1125 let store = Store::dummy_public_v0();
1126
1127 let mut obj = Object::new(content.clone(), None, max_object_size, &store);
1128
1129 log_debug!("{}", obj);
1130
1131 assert_eq!(*obj.acks(), acks);
1132
1133 match obj.content() {
1134 Ok(cnt) => {
1135 log_debug!("{}", cnt);
1136 assert_eq!(content, cnt);
1137 }
1138 Err(e) => panic!("Object parse error: {:?}", e),
1139 }
1140
1141 obj.save_in_test(&store).expect("Object save error");
1142
1143 let obj2 = Object::load(obj.id(), obj.key(), &store).unwrap();
1144
1145 log_debug!("{}", obj2);
1146
1147 assert_eq!(*obj2.acks(), acks);
1148
1149 match obj2.content() {
1150 Ok(cnt) => {
1151 log_debug!("{}", cnt);
1152 assert_eq!(content, cnt);
1153 }
1154 Err(e) => panic!("Object2 parse error: {:?}", e),
1155 }
1156
1157 let obj3 = Object::load(obj.id(), None, &store).unwrap();
1158
1159 log_debug!("{}", obj3);
1160
1161 assert_eq!(*obj3.acks(), acks);
1162
1163 match obj3.content() {
1164 Err(ObjectParseError::MissingRootKey) => (),
1165 Err(e) => panic!("Object3 parse error: {:?}", e),
1166 Ok(_) => panic!("Object3 should not return content"),
1167 }
1168 }
1169
1170 #[test]
1172 pub fn test_depth_0() {
1173 let store = Store::dummy_public_v0();
1174
1175 let empty_file =
1176 ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1177 content_type: "".into(),
1178 metadata: vec![],
1179 content: vec![],
1180 })));
1181 let content_ser = serde_bare::to_vec(&empty_file).unwrap();
1182 log_debug!("content len for empty : {}", content_ser.len());
1183
1184 let empty_obj = Object::new(empty_file, None, store_max_value_size(), &store);
1241
1242 let empty_file_size = empty_obj.size();
1243 log_debug!("empty file size: {}", empty_file_size);
1244
1245 let size =
1246 store_max_value_size() - empty_file_size - BLOCK_MAX_DATA_EXTRA - BIG_VARINT_EXTRA;
1247 log_debug!("full file content size: {}", size);
1248
1249 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1250 content_type: "".into(),
1251 metadata: vec![],
1252 content: vec![99; size],
1253 })));
1254 let content_ser = serde_bare::to_vec(&content).unwrap();
1255 log_debug!("content len: {}", content_ser.len());
1256
1257 let object = Object::new(content, None, store_max_value_size(), &store);
1258 log_debug!("{}", object);
1259
1260 log_debug!("object size: {}", object.size());
1261
1262 assert_eq!(object.blocks.len(), 1);
1263 }
1264
1265 #[ignore]
1267 #[test]
1268 pub fn test_depth_1() {
1269 const MAX_ARITY_LEAVES: usize = 15887;
1270 const MAX_DATA_PAYLOAD_SIZE: usize = 1048564;
1274
1275 let data_size = MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE - 10;
1277
1278 let store = Store::dummy_public_v0();
1279 log_debug!("creating 16GB of data");
1280 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1281 content_type: "".into(),
1282 metadata: vec![],
1283 content: vec![99; data_size],
1284 })));
1285 log_debug!("creating object with that data");
1288 let object = Object::new(content, None, store_max_value_size(), &store);
1289 log_debug!("{}", object);
1290
1291 let obj_size = object.size();
1292 log_debug!("object size: {}", obj_size);
1293
1294 log_debug!("data size: {}", data_size);
1295 log_debug!(
1296 "overhead: {} - {}%",
1297 obj_size - data_size,
1298 ((obj_size - data_size) * 100) as f32 / data_size as f32
1299 );
1300
1301 log_debug!("number of blocks : {}", object.blocks.len());
1302 assert_eq!(object.blocks.len(), MAX_ARITY_LEAVES + 1);
1303 assert_eq!(object.depth().unwrap(), 1);
1304 }
1305
1306 #[ignore]
1308 #[test]
1309 pub fn test_depth_2() {
1310 const MAX_ARITY_LEAVES: usize = 15887;
1311 const MAX_DATA_PAYLOAD_SIZE: usize = 1048564;
1312
1313 let data_size = MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE;
1315
1316 let store = Store::dummy_public_v0();
1317 log_debug!("creating 16GB of data");
1318 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1319 content_type: "".into(),
1320 metadata: vec![],
1321 content: vec![99; data_size],
1322 })));
1323 log_debug!("creating object with that data");
1326 let object = Object::new(content, None, store_max_value_size(), &store);
1327 log_debug!("{}", object);
1328
1329 let obj_size = object.size();
1330 log_debug!("object size: {}", obj_size);
1331
1332 log_debug!("data size: {}", data_size);
1333 log_debug!(
1334 "overhead: {} - {}%",
1335 obj_size - data_size,
1336 ((obj_size - data_size) * 100) as f32 / data_size as f32
1337 );
1338
1339 log_debug!("number of blocks : {}", object.blocks.len());
1340 assert_eq!(object.blocks.len(), MAX_ARITY_LEAVES + 4);
1341 assert_eq!(object.depth().unwrap(), 2);
1342 }
1343
1344 #[ignore]
1346 #[test]
1347 pub fn test_depth_3() {
1348 const MAX_ARITY_LEAVES: usize = 61;
1349 const MAX_DATA_PAYLOAD_SIZE: usize = 4084;
1350
1351 let data_size =
1353 MAX_ARITY_LEAVES * MAX_ARITY_LEAVES * MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE - 10;
1354
1355 let store = Store::dummy_public_v0();
1356 log_debug!("creating 900MB of data");
1357 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1358 content_type: "".into(),
1359 metadata: vec![],
1360 content: vec![99; data_size],
1361 })));
1362 log_debug!("creating object with that data");
1365 let object = Object::new(content, None, store_valid_value_size(0), &store);
1366 log_debug!("{}", object);
1367
1368 let obj_size = object.size();
1369 log_debug!("object size: {}", obj_size);
1370
1371 log_debug!("data size: {}", data_size);
1372 log_debug!(
1373 "overhead: {} - {}%",
1374 obj_size - data_size,
1375 ((obj_size - data_size) * 100) as f32 / data_size as f32
1376 );
1377
1378 let dedup_size = object.dedup_size();
1379 log_debug!(
1380 "dedup compression: {} - {}%",
1381 data_size - dedup_size,
1382 ((data_size - dedup_size) * 100) as f32 / data_size as f32
1383 );
1384
1385 log_debug!("number of blocks : {}", object.blocks.len());
1386 assert_eq!(
1387 object.blocks.len(),
1388 MAX_ARITY_LEAVES * (MAX_ARITY_LEAVES + 1) * MAX_ARITY_LEAVES + MAX_ARITY_LEAVES + 1
1389 );
1390 assert_eq!(object.depth().unwrap(), 3);
1391 }
1392
1393 #[ignore]
1395 #[test]
1396 pub fn test_depth_4() {
1397 const MAX_ARITY_LEAVES: usize = 61;
1398 const MAX_DATA_PAYLOAD_SIZE: usize = 4084;
1399
1400 let data_size = MAX_ARITY_LEAVES
1402 * MAX_ARITY_LEAVES
1403 * MAX_ARITY_LEAVES
1404 * MAX_ARITY_LEAVES
1405 * MAX_DATA_PAYLOAD_SIZE
1406 - 12;
1407
1408 let store = Store::dummy_public_v0();
1409 log_debug!("creating 52GB of data");
1410 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1411 content_type: "".into(),
1412 metadata: vec![],
1413 content: vec![99; data_size],
1414 })));
1415 log_debug!("creating object with that data");
1418 let object = Object::new(content, None, store_valid_value_size(0), &store);
1419 log_debug!("{}", object);
1420
1421 let obj_size = object.size();
1422 log_debug!("object size: {}", obj_size);
1423
1424 log_debug!("data size: {}", data_size);
1425 log_debug!(
1426 "overhead: {} - {}%",
1427 obj_size - data_size,
1428 ((obj_size - data_size) * 100) as f32 / data_size as f32
1429 );
1430
1431 log_debug!("number of blocks : {}", object.blocks.len());
1432 assert_eq!(
1433 object.blocks.len(),
1434 MAX_ARITY_LEAVES
1435 * (MAX_ARITY_LEAVES * (MAX_ARITY_LEAVES + 1) * MAX_ARITY_LEAVES
1436 + MAX_ARITY_LEAVES
1437 + 1)
1438 + 1
1439 );
1440 assert_eq!(object.depth().unwrap(), 4);
1441 }
1442
1443 #[test]
1444 pub fn test_block_size() {
1445 fn test_block(max_block_size: usize) {
1448 let max_arity_leaves: usize = (max_block_size - BLOCK_EXTRA) / CHILD_SIZE;
1449 let max_arity_root: usize =
1450 (max_block_size - BLOCK_EXTRA - HEADER_REF_EXTRA) / CHILD_SIZE;
1451
1452 let max_data_payload_size = max_block_size - BLOCK_EXTRA;
1453
1454 log_debug!("max_block_size: {}", max_block_size);
1455 log_debug!("max_arity_leaves: {}", max_arity_leaves);
1456 log_debug!("max_arity_root: {}", max_arity_root);
1457 log_debug!("max_data_payload_size: {}", max_data_payload_size);
1458
1459 let (id, key) = ObjectRef::dummy().into();
1460
1461 let zero_key = ChunkContentV0::InternalNode(vec![]);
1463 let zero_key_ser = serde_bare::to_vec(&zero_key).unwrap();
1464
1465 let one_key = ChunkContentV0::InternalNode(vec![key.clone()]);
1466 let one_key_ser = serde_bare::to_vec(&one_key).unwrap();
1467
1468 let two_keys = ChunkContentV0::InternalNode(vec![key.clone(), key.clone()]);
1469 let two_keys_ser = serde_bare::to_vec(&two_keys).unwrap();
1470
1471 let max_keys = ChunkContentV0::InternalNode(vec![key.clone(); max_arity_leaves]);
1472 let max_keys_ser = serde_bare::to_vec(&max_keys).unwrap();
1473
1474 let max_keys_root = ChunkContentV0::InternalNode(vec![key.clone(); max_arity_root]);
1475 let max_keys_root_ser = serde_bare::to_vec(&max_keys_root).unwrap();
1476
1477 let data_empty = ChunkContentV0::DataChunk(vec![]);
1479 let data_empty_ser = serde_bare::to_vec(&data_empty).unwrap();
1480
1481 let data_full = ChunkContentV0::DataChunk(vec![0; max_data_payload_size]);
1482 let data_full_ser = serde_bare::to_vec(&data_full).unwrap();
1483
1484 let leaf_empty = Block::new(vec![], None, data_empty_ser.clone(), None);
1486 let leaf_empty_ser = serde_bare::to_vec(&leaf_empty).unwrap();
1487
1488 log_debug!(
1489 "block size of empty leaf without header: {}",
1490 leaf_empty_ser.len()
1491 );
1492
1493 let leaf_full_data = Block::new(vec![], None, data_full_ser.clone(), None);
1494 let leaf_full_data_ser = serde_bare::to_vec(&leaf_full_data).unwrap();
1495
1496 log_debug!(
1497 "block size of full leaf block without header: {}",
1498 leaf_full_data_ser.len()
1499 );
1500
1501 let internal_zero = Block::new(vec![], None, zero_key_ser.clone(), None);
1503 let internal_zero_ser = serde_bare::to_vec(&internal_zero).unwrap();
1504
1505 log_debug!(
1506 "block size of empty internal block without header: {}",
1507 internal_zero_ser.len()
1508 );
1509
1510 assert!(leaf_full_data_ser.len() <= max_block_size);
1511
1512 let header_ref = CommitHeaderRef::from_id_key(id, key.clone());
1521
1522 let header_embed = CommitHeaderRef::from_content_key(vec![], key.clone());
1524
1525 let root_zero_header_ref = Block::new(
1527 vec![],
1528 Some(header_ref.clone()),
1529 data_empty_ser.clone(),
1530 None,
1531 );
1532 let root_zero_header_ref_ser = serde_bare::to_vec(&root_zero_header_ref).unwrap();
1533
1534 let root_zero_header_embed = Block::new(
1536 vec![],
1537 Some(header_embed.clone()),
1538 data_empty_ser.clone(),
1539 None,
1540 );
1541 let root_zero_header_embed_ser = serde_bare::to_vec(&root_zero_header_embed).unwrap();
1542
1543 log_debug!(
1549 "block size of empty root block with header ref: {}",
1550 root_zero_header_ref_ser.len()
1551 );
1552
1553 log_debug!(
1554 "block size of empty root block with header embedded: {}",
1555 root_zero_header_embed_ser.len()
1556 );
1557
1558 let internal_max =
1559 Block::new(vec![id; max_arity_leaves], None, max_keys_ser.clone(), None);
1560 let internal_max_ser = serde_bare::to_vec(&internal_max).unwrap();
1561
1562 let internal_one = Block::new(vec![id; 1], None, one_key_ser.clone(), None);
1563 let internal_one_ser = serde_bare::to_vec(&internal_one).unwrap();
1564
1565 let internal_two = Block::new(vec![id; 2], None, two_keys_ser.clone(), None);
1566 let internal_two_ser = serde_bare::to_vec(&internal_two).unwrap();
1567
1568 log_debug!(
1569 "block size of internal block with 1 child, without header: {}",
1570 internal_one_ser.len()
1571 );
1572
1573 log_debug!(
1574 "block size of internal block with 2 children, without header: {}",
1575 internal_two_ser.len()
1576 );
1577
1578 log_debug!(
1579 "block size of internal block with max arity children, without header: {}",
1580 internal_max_ser.len()
1581 );
1582
1583 assert!(internal_max_ser.len() <= max_block_size);
1584
1585 let root_one = Block::new(
1586 vec![id; 1],
1587 Some(header_ref.clone()),
1588 one_key_ser.clone(),
1589 None,
1590 );
1591 let root_one_ser = serde_bare::to_vec(&root_one).unwrap();
1592
1593 let root_two = Block::new(
1594 vec![id; 2],
1595 Some(header_ref.clone()),
1596 two_keys_ser.clone(),
1597 None,
1598 );
1599 let root_two_ser = serde_bare::to_vec(&root_two).unwrap();
1600
1601 let root_max = Block::new(
1602 vec![id; max_arity_root],
1603 Some(header_ref.clone()),
1604 max_keys_root_ser.clone(),
1605 None,
1606 );
1607 let root_max_ser = serde_bare::to_vec(&root_max).unwrap();
1608
1609 let data_full_when_header_ref =
1610 ChunkContentV0::DataChunk(vec![0; max_data_payload_size - HEADER_REF_EXTRA]);
1611 let data_full_when_header_ref_ser =
1612 serde_bare::to_vec(&data_full_when_header_ref).unwrap();
1613
1614 let root_full = Block::new(
1615 vec![],
1616 Some(header_ref.clone()),
1617 data_full_when_header_ref_ser.clone(),
1618 None,
1619 );
1620 let root_full_ser = serde_bare::to_vec(&root_full).unwrap();
1621
1622 log_debug!(
1623 "block size of root block with header ref with 1 child: {}",
1624 root_one_ser.len()
1625 );
1626
1627 log_debug!(
1628 "block size of root block with header ref with 2 children: {}",
1629 root_two_ser.len()
1630 );
1631
1632 log_debug!(
1633 "block size of root block with header ref with max arity children: {}",
1634 root_max_ser.len()
1635 );
1636
1637 log_debug!(
1638 "block size of root block with header ref with full DataChunk (fitting ObjectContent): {}",
1639 root_full_ser.len()
1640 );
1641
1642 assert!(root_full_ser.len() <= max_block_size);
1643
1644 let root_embed_one = Block::new(
1645 vec![id; 1],
1646 Some(header_embed.clone()),
1647 one_key_ser.clone(),
1648 None,
1649 );
1650 let root_embed_one_ser = serde_bare::to_vec(&root_embed_one).unwrap();
1651
1652 let root_embed_two = Block::new(
1653 vec![id; 2],
1654 Some(header_embed.clone()),
1655 two_keys_ser.clone(),
1656 None,
1657 );
1658 let root_embed_two_ser = serde_bare::to_vec(&root_embed_two).unwrap();
1659
1660 let root_embed_max = Block::new(
1661 vec![id; max_arity_root],
1662 Some(header_embed.clone()),
1663 max_keys_root_ser.clone(),
1664 None,
1665 );
1666 let root_embed_max_ser = serde_bare::to_vec(&root_embed_max).unwrap();
1667
1668 let data_full_when_header_embed =
1669 ChunkContentV0::DataChunk(vec![0; max_data_payload_size - HEADER_EMBED_EXTRA]);
1670 let data_full_when_header_embed_ser =
1671 serde_bare::to_vec(&data_full_when_header_embed).unwrap();
1672
1673 let root_embed_full = Block::new(
1674 vec![],
1675 Some(header_embed.clone()),
1676 data_full_when_header_embed_ser.clone(),
1677 None,
1678 );
1679 let root_embed_full_ser = serde_bare::to_vec(&root_embed_full).unwrap();
1680
1681 log_debug!(
1682 "block size of root block with header embed with 1 child: {}",
1683 root_embed_one_ser.len()
1684 );
1685
1686 log_debug!(
1687 "block size of root block with header embed with 2 children: {}",
1688 root_embed_two_ser.len()
1689 );
1690
1691 log_debug!(
1692 "block size of root block with header embed with max arity children: {}",
1693 root_embed_max_ser.len()
1694 );
1695
1696 log_debug!(
1697 "block size of root block with header embed with full DataChunk (fitting ObjectContent): {}",
1698 root_embed_full_ser.len()
1699 );
1700
1701 assert!(root_embed_full_ser.len() <= max_block_size);
1702
1703 let header_acks_1 = CommitHeader::new_with_acks(vec![id]);
1704 let header_acks_2 = CommitHeader::new_with_acks(vec![id, id]);
1705 let header_acks_60 = CommitHeader::new_with_acks(vec![id; 60]);
1706 let header_acks_60_deps_60 =
1707 CommitHeader::new_with_deps_and_acks(vec![id; 60], vec![id; 60]);
1708
1709 fn make_header_block(header: Option<CommitHeader>) -> CommitHeaderRef {
1710 let content_ser = serde_bare::to_vec(&ObjectContent::V0(
1711 ObjectContentV0::CommitHeader(header.unwrap()),
1712 ))
1713 .unwrap();
1714 let data_chunk = ChunkContentV0::DataChunk(content_ser.clone());
1715 let encrypted_content = serde_bare::to_vec(&data_chunk).unwrap();
1716 CommitHeaderRef::from_content_key(encrypted_content, SymKey::dummy())
1717 }
1718
1719 let header_embed_acks_1 = make_header_block(header_acks_1);
1720 let header_embed_acks_2 = make_header_block(header_acks_2);
1721 let header_embed_acks_60 = make_header_block(header_acks_60);
1722 let header_embed_acks_60_deps_60 = make_header_block(header_acks_60_deps_60);
1723
1724 fn test_header_embed(name: &str, header: CommitHeaderRef, max_block_size: usize) {
1725 let (id, key) = BlockRef::dummy().into();
1726
1727 log_debug!("header content size : {}", header.encrypted_content_len());
1728
1729 let max_arity = (max_block_size
1730 - header.encrypted_content_len()
1731 - BLOCK_EXTRA
1732 - HEADER_EMBED_EXTRA)
1733 / CHILD_SIZE;
1734
1735 log_debug!("max arity for header {} : {}", name, max_arity);
1736
1737 let max_keys_when_real_header =
1738 ChunkContentV0::InternalNode(vec![key.clone(); max_arity]);
1739 let max_keys_when_real_header_ser =
1740 serde_bare::to_vec(&max_keys_when_real_header).unwrap();
1741
1742 let root_embed_max = Block::new(
1743 vec![id; max_arity],
1744 Some(header),
1745 max_keys_when_real_header_ser.clone(),
1746 None,
1747 );
1748 let root_embed_max_ser = serde_bare::to_vec(&root_embed_max).unwrap();
1749
1750 log_debug!(
1751 "block size of root block with header {} with max possible arity children : {}",
1752 name,
1753 root_embed_max_ser.len()
1754 );
1755
1756 assert!(root_embed_max_ser.len() <= max_block_size);
1757 }
1758
1759 test_header_embed(
1760 "embed acks 60 deps 60",
1761 header_embed_acks_60_deps_60,
1762 max_block_size,
1763 );
1764
1765 test_header_embed("embed acks 60", header_embed_acks_60, max_block_size);
1766
1767 test_header_embed("embed acks 2", header_embed_acks_2, max_block_size);
1768
1769 test_header_embed("embed acks 1", header_embed_acks_1, max_block_size);
1770 }
1771
1772 let max_block_size = store_max_value_size();
1773 let min_block_size = store_valid_value_size(0);
1774
1775 test_block(max_block_size);
1776 test_block(min_block_size);
1777 test_block(store_valid_value_size(10000));
1778 test_block(store_valid_value_size(100000));
1779 test_block(store_valid_value_size(1000000));
1780 test_block(store_valid_value_size(5000));
1781 }
1782}