1use core::fmt;
13use std::collections::{HashMap, HashSet};
14
15use chacha20::cipher::{KeyIvInit, StreamCipher};
16use chacha20::ChaCha20;
17use zeroize::Zeroize;
18
19use crate::block_storage::*;
20use crate::errors::*;
21use crate::log::*;
22use crate::store::Store;
23use crate::types::*;
24
25pub const BLOCK_EXTRA: usize = 12; pub const HEADER_REF_EXTRA: usize = 66;
27pub const HEADER_EMBED_EXTRA: usize = 34;
28pub const CHILD_SIZE: usize = 66;
29
30pub const BLOCK_ID_SIZE: usize = 33;
31pub const BLOCK_KEY_SIZE: usize = 33;
33pub const BIG_VARINT_EXTRA: usize = 2;
36pub const DATA_VARINT_EXTRA: usize = 4;
38pub const BLOCK_MAX_DATA_EXTRA: usize = 4;
39
40#[derive(Debug, PartialEq, Eq, Clone)]
41pub struct Object {
43 block_contents: HashMap<BlockId, Block>,
45
46 blocks: Vec<BlockId>,
48
49 header: Option<CommitHeader>,
51
52 header_blocks: Vec<Block>,
54
55 #[cfg(test)]
56 already_saved: bool,
57}
58
59impl Object {
60 pub(crate) fn convergence_key(store: &Store) -> [u8; blake3::OUT_LEN] {
63 let mut key_material = match (
64 *store.get_store_repo().repo_id(),
65 store.get_store_overlay_branch_readcap_secret().clone(),
66 ) {
67 (PubKey::Ed25519PubKey(pubkey), SymKey::ChaCha20Key(secret)) => {
68 [pubkey, secret].concat()
69 }
70 (_, _) => panic!("cannot derive key with Montgomery key"),
71 };
72 let res = blake3::derive_key("NextGraph Data BLAKE3 key", key_material.as_slice());
73 key_material.zeroize();
74 res
75 }
76
77 fn make_block(
78 mut content: Vec<u8>,
79 conv_key: &[u8; blake3::OUT_LEN],
80 children: Vec<ObjectId>,
81 header_ref: Option<CommitHeaderRef>,
82 already_existing: &mut HashMap<BlockKey, BlockId>,
83 ) -> Result<Block, BlockId> {
84 let key_hash = blake3::keyed_hash(conv_key, &content);
85
86 let key_slice = key_hash.as_bytes();
87 let key = SymKey::ChaCha20Key(key_slice.clone());
88 let it = already_existing.get(&key);
89 if it.is_some() {
90 return Err(*it.unwrap());
91 }
92 let nonce = [0u8; 12];
93 let mut cipher = ChaCha20::new(key_slice.into(), &nonce.into());
94 let mut content_enc_slice = &mut content.as_mut_slice();
96 cipher.apply_keystream(&mut content_enc_slice);
97
98 let block = Block::new(children, header_ref, content, Some(key));
99 Ok(block)
102 }
103
104 fn make_header_v0(
105 header: CommitHeaderV0,
106 object_size: usize,
107 conv_key: &ChaCha20Key,
108 ) -> (ObjectRef, Vec<Block>) {
109 let header_obj = Object::new_with_convergence_key(
110 ObjectContent::V0(ObjectContentV0::CommitHeader(CommitHeader::V0(header))),
111 None,
112 object_size,
113 conv_key,
114 );
115 let header_ref = ObjectRef {
116 id: header_obj.id(),
117 key: header_obj.key().unwrap(),
118 };
119 (header_ref, header_obj.blocks().cloned().collect())
120 }
121
122 fn make_header(
123 header: CommitHeader,
124 object_size: usize,
125 conv_key: &ChaCha20Key,
126 ) -> (ObjectRef, Vec<Block>) {
127 match header {
128 CommitHeader::V0(v0) => Self::make_header_v0(v0, object_size, conv_key),
129 }
130 }
131
132 fn make_tree(
134 block_contents: &mut HashMap<BlockId, Block>,
135 already_existing: &mut HashMap<BlockKey, BlockId>,
136 leaves: &[BlockId],
137 conv_key: &ChaCha20Key,
138 header_prepare_size: usize,
139 mut header_prepare_block_ref: Option<BlockRef>,
140 mut header_prepare_blocks: Vec<Block>,
141 valid_block_size: usize,
142 arity: usize,
143 ) -> (Vec<BlockId>, Vec<Block>) {
144 let mut parents: Vec<BlockId> = vec![];
145 let mut header_blocks = vec![];
146 let chunks = leaves.chunks(arity);
147 let mut it = chunks.peekable();
148 while let Some(nodes) = it.next() {
149 let children = nodes.to_vec();
150 let keys: Vec<BlockKey> = nodes
151 .iter()
152 .map(|block_id| block_contents.get(block_id).unwrap().key().unwrap())
153 .collect();
154 let content = ChunkContentV0::InternalNode(keys);
155 let content_ser = serde_bare::to_vec(&content).unwrap();
156 let header = if parents.is_empty() && it.peek().is_none() {
158 let mut header_prepare_blocks_taken = vec![];
159 header_prepare_blocks_taken.append(&mut header_prepare_blocks);
160 match (
161 header_prepare_size,
162 header_prepare_block_ref.take(),
163 header_prepare_blocks_taken,
164 ) {
165 (0, None, _) => None,
166 (header_size, Some(block_ref), blocks) => {
167 let is_embeddable = header_size > 0
168 && ((valid_block_size
169 - BLOCK_EXTRA
170 - HEADER_EMBED_EXTRA
171 - header_size)
172 / CHILD_SIZE)
173 >= children.len();
174 let (header_r, mut h_blocks) =
175 Self::make_header_ref(is_embeddable, block_ref, blocks);
176 header_blocks.append(&mut h_blocks);
177 header_r
178 }
179 (_, None, _) => unimplemented!(),
180 }
181 } else {
183 None
184 };
185 Self::add_block(
186 Self::make_block(content_ser, conv_key, children, header, already_existing),
187 &mut parents,
188 block_contents,
189 already_existing,
190 );
191 }
192 if 1 < parents.len() {
195 let mut great_parents = Self::make_tree(
196 block_contents,
197 already_existing,
198 parents.as_slice(),
199 conv_key,
200 header_prepare_size,
201 header_prepare_block_ref,
202 header_prepare_blocks,
203 valid_block_size,
204 arity,
205 );
206 parents.append(&mut great_parents.0);
207 header_blocks.append(&mut great_parents.1);
208 }
209 (parents, header_blocks)
210 }
211
212 fn make_header_ref(
213 embedded: bool,
214 header_ref: BlockRef,
215 blocks: Vec<Block>,
216 ) -> (Option<CommitHeaderRef>, Vec<Block>) {
217 if embedded {
218 (
219 Some(CommitHeaderRef {
220 obj: CommitHeaderObject::EncryptedContent(
221 blocks[0].encrypted_content().to_vec(),
222 ),
223 key: header_ref.key,
224 }),
225 vec![],
226 )
227 } else {
228 (
229 Some(CommitHeaderRef {
230 obj: CommitHeaderObject::Id(header_ref.id),
231 key: header_ref.key,
232 }),
233 blocks,
234 )
235 }
236 }
237
238 fn add_block(
239 block_result: Result<Block, BlockId>,
240 blocks: &mut Vec<BlockId>,
241 block_contents: &mut HashMap<BlockId, Block>,
242 already_existing: &mut HashMap<BlockKey, BlockId>,
243 ) {
244 match block_result {
245 Ok(mut block) => {
246 let id = block.get_and_save_id();
247 blocks.push(id);
248 if !block_contents.contains_key(&id) {
249 already_existing.insert(block.key().unwrap(), id);
250 block_contents.insert(id, block);
251 }
252 }
253 Err(id) => {
254 blocks.push(id);
255 }
256 }
257 }
258
259 pub fn new(
271 content: ObjectContent,
272 header: Option<CommitHeader>,
273 block_size: usize,
274 store: &Store,
275 ) -> Object {
276 let mut conv_key = Self::convergence_key(store);
277 let res = Self::new_with_convergence_key(content, header, block_size, &conv_key);
278 conv_key.zeroize();
279 res
280 }
281
282 pub fn new_with_convergence_key(
283 content: ObjectContent,
284 mut header: Option<CommitHeader>,
285 block_size: usize,
286 conv_key: &ChaCha20Key,
287 ) -> Object {
288 if header.is_some() && !content.can_have_header() {
289 panic!(
290 "cannot make a new Object with header if ObjectContent type different from Commit"
291 );
292 }
293 let valid_block_size = store_valid_value_size(block_size);
296 let max_data_payload_size =
302 valid_block_size - BLOCK_EXTRA - HEADER_REF_EXTRA * header.as_ref().map_or(0, |_| 1);
303 let max_arity: usize = max_data_payload_size / CHILD_SIZE;
304
305 let mut blocks: Vec<BlockId> = vec![];
306 let mut block_contents: HashMap<BlockId, Block> = HashMap::new();
307 let mut already_existing: HashMap<BlockKey, BlockId> = HashMap::new();
308
309 let header_prepare = match &header {
310 None => (0 as usize, None, vec![]),
311 Some(h) => {
312 let block_info = Self::make_header(h.clone(), valid_block_size, conv_key);
313 if block_info.1.len() == 1 {
314 (
315 block_info.1[0].encrypted_content().len(),
316 Some(block_info.0),
317 block_info.1,
318 )
319 } else {
320 (0 as usize, Some(block_info.0), block_info.1)
321 }
322 }
323 };
324 let content_ser = serde_bare::to_vec(&content).unwrap();
327 let content_len = content_ser.len();
328
329 let header_blocks = if content_len <= max_data_payload_size {
336 let data_chunk = ChunkContentV0::DataChunk(content_ser.clone());
338 let content_ser = serde_bare::to_vec(&data_chunk).unwrap();
339
340 let (header_ref, h_blocks) = match header_prepare {
341 (0, None, _) => (None, vec![]),
342 (header_size, Some(block_ref), blocks) => {
343 let is_embeddable = header_size > 0
344 && valid_block_size - BLOCK_EXTRA - HEADER_EMBED_EXTRA - content_ser.len()
345 > header_size;
346 Self::make_header_ref(is_embeddable, block_ref, blocks)
347 }
348 (_, None, _) => unimplemented!(),
349 };
350 Self::add_block(
351 Self::make_block(
352 content_ser,
353 conv_key,
354 vec![],
355 header_ref,
356 &mut already_existing,
357 ),
358 &mut blocks,
359 &mut block_contents,
360 &mut already_existing,
361 );
362
363 h_blocks
364 } else {
365 let mut i = 0;
367 #[cfg(not(target_arch = "wasm32"))]
368 let _total = std::cmp::max(1, content_len / (valid_block_size - BLOCK_EXTRA));
369 for chunk in content_ser.chunks(valid_block_size - BLOCK_EXTRA) {
370 let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec());
371 let chunk_ser = serde_bare::to_vec(&data_chunk).unwrap();
372 Self::add_block(
373 Self::make_block(chunk_ser, conv_key, vec![], None, &mut already_existing),
374 &mut blocks,
375 &mut block_contents,
376 &mut already_existing,
377 );
378 #[cfg(not(target_arch = "wasm32"))]
379 log_debug!(
380 "make_block {} of {} - {}%",
381 i + 1,
382 _total + 1,
383 i * 100 / _total
384 );
385 i = i + 1;
386 }
387
388 let mut parents = Self::make_tree(
391 &mut block_contents,
392 &mut already_existing,
393 blocks.as_slice(),
394 conv_key,
395 header_prepare.0,
396 header_prepare.1,
397 header_prepare.2,
398 valid_block_size,
399 max_arity,
400 );
401
402 blocks.append(&mut parents.0);
403 parents.1
404 };
405
406 if header_blocks.len() > 0 {
407 header
413 .as_mut()
414 .unwrap()
415 .set_id(header_blocks.last().unwrap().id());
416 }
417 Object {
418 blocks,
419 block_contents,
420 header,
421 header_blocks,
422 #[cfg(test)]
423 already_saved: false,
424 }
425 }
426
427 pub fn load_ref(reference: &ObjectRef, store: &Store) -> Result<Object, ObjectParseError> {
431 Self::load(reference.id.clone(), Some(reference.key.clone()), store)
432 }
433
434 pub fn load_header(
435 root_block: &Block,
436 store: &Store,
437 ) -> Result<Option<CommitHeader>, ObjectParseError> {
438 Ok(Self::load_header_(root_block, store)?.0)
439 }
440
441 fn load_header_(
442 root: &Block,
443 store: &Store,
444 ) -> Result<(Option<CommitHeader>, Vec<Block>), ObjectParseError> {
445 match root.header_ref() {
446 Some(header_ref) => match header_ref.obj {
447 CommitHeaderObject::None | CommitHeaderObject::RandomAccess => {
448 panic!("shouldn't happen")
449 }
450 CommitHeaderObject::Id(id) => {
451 let obj_res = Object::load(id, Some(header_ref.key.clone()), store);
452 match obj_res {
453 Err(e) => return Err(e),
454 Ok(obj) => match obj.content()? {
455 ObjectContent::V0(ObjectContentV0::CommitHeader(mut commit_header)) => {
456 commit_header.set_id(id);
457 Ok((Some(commit_header), obj.blocks().cloned().collect()))
458 }
459 _ => {
460 return Err(ObjectParseError::InvalidHeader);
461 }
462 },
463 }
464 }
465 CommitHeaderObject::EncryptedContent(content) => {
466 let (_, decrypted_content) =
467 Block::new_with_encrypted_content(content, None).read(&header_ref.key)?;
468 match serde_bare::from_slice(&decrypted_content) {
469 Ok(ObjectContent::V0(ObjectContentV0::CommitHeader(commit_header))) => {
470 Ok((Some(commit_header), vec![]))
471 }
472 Err(_e) => {
473 return Err(ObjectParseError::InvalidHeader);
474 }
475 _ => {
476 return Err(ObjectParseError::InvalidHeader);
477 }
478 }
479 }
480 },
481 None => Ok((None, vec![])),
482 }
483 }
484
485 pub fn load(
489 id: ObjectId,
490 key: Option<SymKey>,
491 store: &Store,
492 ) -> Result<Object, ObjectParseError> {
493 fn load_tree(
494 parents: Vec<BlockId>,
495 store: &Store,
496 blocks: &mut Vec<BlockId>,
497 missing: &mut Vec<BlockId>,
498 block_contents: &mut HashMap<BlockId, Block>,
499 ) {
500 let mut children: Vec<BlockId> = vec![];
501 for id in parents {
502 match store.get(&id) {
503 Ok(block) => {
504 match &block {
505 Block::V0(o) => {
506 children.extend(o.children().iter().rev());
507 }
508 }
509 blocks.insert(0, id);
510 if !block_contents.contains_key(&id) {
511 block_contents.insert(id, block);
512 }
513 }
514 Err(_) => missing.push(id.clone()),
515 }
516 }
517 if !children.is_empty() {
518 load_tree(children, store, blocks, missing, block_contents);
519 }
520 }
521
522 let mut blocks: Vec<BlockId> = vec![];
523 let mut block_contents: HashMap<BlockId, Block> = HashMap::new();
524 let mut missing: Vec<BlockId> = vec![];
525
526 load_tree(
527 vec![id],
528 store,
529 &mut blocks,
530 &mut missing,
531 &mut block_contents,
532 );
533
534 if !missing.is_empty() {
535 return Err(ObjectParseError::MissingBlocks(missing));
536 }
537
538 let root = block_contents.get_mut(blocks.last().unwrap()).unwrap();
539 if key.is_some() {
540 root.set_key(key);
541 }
542
543 let header = match Self::load_header_(root, store) {
544 Err(ObjectParseError::MissingBlocks(m)) => {
545 return Err(ObjectParseError::MissingHeaderBlocks((
546 Object {
547 blocks,
548 block_contents,
549 header: None,
550 header_blocks: vec![],
551 #[cfg(test)]
552 already_saved: false,
553 },
554 m,
555 )));
556 }
557 Err(e) => return Err(e),
558 Ok(h) => h,
559 };
560
561 Ok(Object {
562 blocks,
563 block_contents,
564 header: header.0,
565 header_blocks: header.1,
566 #[cfg(test)]
567 already_saved: true,
568 })
569 }
570
571 pub fn save(&self, store: &Store) -> Result<Vec<BlockId>, StorageError> {
573 let mut deduplicated: HashSet<ObjectId> = HashSet::new();
574 for block_id in self.blocks.iter() {
576 deduplicated.insert(*block_id);
577 store.put(self.block_contents.get(block_id).unwrap())?;
578 }
579 for block in &self.header_blocks {
580 let id = block.id();
581 if deduplicated.get(&id).is_none() {
582 deduplicated.insert(id);
583 store.put(block)?;
584 }
585 }
586 let root_id = self.id();
587 let mut blocks = vec![root_id];
588 deduplicated.remove(&root_id);
589 let list = deduplicated.drain();
590 blocks.append(&mut list.collect());
591 deduplicated.shrink_to(0);
592 Ok(blocks)
593 }
594
595 #[cfg(test)]
596 pub fn save_in_test(&mut self, store: &Store) -> Result<Vec<BlockId>, StorageError> {
597 assert!(self.already_saved == false);
598 self.already_saved = true;
599
600 self.save(store)
601 }
602
603 pub fn id(&self) -> ObjectId {
605 self.root_block().id()
606 }
607
608 pub fn get_and_save_id(&mut self) -> ObjectId {
610 self.block_contents
611 .get_mut(self.blocks.last().unwrap())
612 .unwrap()
613 .get_and_save_id()
614 }
615
616 pub fn key(&self) -> Option<SymKey> {
618 self.root_block().key()
619 }
620
621 pub fn reference(&self) -> Option<ObjectRef> {
623 if self.key().is_some() {
624 Some(ObjectRef {
625 id: self.id(),
626 key: self.key().unwrap(),
627 })
628 } else {
629 None
630 }
631 }
632
633 pub fn is_root(&self) -> bool {
634 self.header.as_ref().map_or(true, |h| h.is_root())
635 }
636
637 pub fn deps(&self) -> Vec<ObjectId> {
640 match &self.header {
641 Some(h) => h.deps(),
642 None => vec![],
643 }
644 }
645
646 pub fn acks_and_nacks(&self) -> Vec<ObjectId> {
649 match &self.header {
650 Some(h) => h.acks_and_nacks(),
651 None => vec![],
652 }
653 }
654
655 pub fn acks(&self) -> Vec<ObjectId> {
658 match &self.header {
659 Some(h) => h.acks(),
660 None => vec![],
661 }
662 }
663
664 pub fn root_block(&self) -> &Block {
665 self.block_contents
666 .get(self.blocks.last().unwrap())
667 .unwrap()
668 }
669
670 pub fn header(&self) -> &Option<CommitHeader> {
671 &self.header
672 }
673
674 pub fn blocks(&self) -> impl Iterator<Item = &Block> + '_ {
675 self.blocks
676 .iter()
677 .map(|key| self.block_contents.get(key).unwrap())
678 }
679
680 pub fn all_blocks_len(&self) -> usize {
681 self.blocks.len() + self.header_blocks.len()
682 }
683
684 pub fn blocks_len(&self) -> usize {
685 self.blocks.len()
686 }
687
688 pub fn header_blocks_len(&self) -> usize {
689 self.header_blocks.len()
690 }
691
692 pub fn size(&self) -> usize {
693 let mut total = 0;
694 self.blocks().for_each(|b| {
695 let s = b.size();
696 total += s;
698 });
699 self.header_blocks.iter().for_each(|b| {
700 let s = b.size();
701 total += s;
703 });
704 total
705 }
706
707 pub fn dedup_size(&self) -> usize {
708 let mut total = 0;
709 self.block_contents.values().for_each(|b| total += b.size());
710 self.header_blocks.iter().for_each(|b| total += b.size());
711 total
712 }
713
714 pub fn hashmap(&self) -> &HashMap<BlockId, Block> {
715 &self.block_contents
716 }
717
718 fn collect_leaves(
720 blocks: &Vec<BlockId>,
721 parents: &Vec<(ObjectId, SymKey)>,
722 parent_index: usize,
723 leaves: &mut Option<&mut Vec<Block>>,
724 obj_content: &mut Option<&mut Vec<u8>>,
725 block_contents: &HashMap<BlockId, Block>,
726 ) -> Result<u8, ObjectParseError> {
727 let mut children: Vec<(ObjectId, SymKey)> = vec![];
733 let mut i = parent_index;
734
735 for (id, key) in parents {
736 let block = block_contents.get(&blocks[i]).unwrap();
738 i += 1;
739
740 let block_id = block.id();
742 if *id != block_id {
743 log_debug!("Invalid ObjectId.\nExp: {:?}\nGot: {:?}", *id, block_id);
744 return Err(ObjectParseError::InvalidBlockId);
745 }
746
747 match block {
748 Block::V0(b) => {
749 let b_children = b.children();
750 if leaves.is_none() && obj_content.is_none() {
751 for id in b_children {
753 #[allow(deprecated)]
754 children.push((id.clone(), ObjectKey::nil()));
755 }
756 continue;
757 }
758 let mut content_dec = b.content.encrypted_content().clone();
760 match key {
761 SymKey::ChaCha20Key(key) => {
762 let nonce = [0u8; 12];
763 let mut cipher = ChaCha20::new(key.into(), &nonce.into());
764 let mut content_dec_slice = &mut content_dec.as_mut_slice();
765 cipher.apply_keystream(&mut content_dec_slice);
766 }
767 }
768
769 let content: ChunkContentV0;
771 match serde_bare::from_slice(content_dec.as_slice()) {
772 Ok(c) => content = c,
773 Err(_e) => {
774 return Err(ObjectParseError::BlockDeserializeError);
776 }
777 }
778 match content {
780 ChunkContentV0::InternalNode(keys) => {
781 if keys.len() != b_children.len() {
782 log_debug!(
783 "Invalid keys length: got {}, expected {}",
784 keys.len(),
785 b_children.len()
786 );
787 log_debug!("!!! children: {:?}", b_children);
788 log_debug!("!!! keys: {:?}", keys);
789 return Err(ObjectParseError::InvalidKeys);
790 }
791
792 for (id, key) in b_children.iter().zip(keys.iter()) {
793 children.push((id.clone(), key.clone()));
794 }
795 }
796 ChunkContentV0::DataChunk(chunk) => {
797 if leaves.is_some() {
798 let mut leaf = block.clone();
801 leaf.set_key(Some(key.clone()));
802 let l = &mut **leaves.as_mut().unwrap();
803 l.push(leaf);
804 }
805 if obj_content.is_some() {
806 let c = &mut **obj_content.as_mut().unwrap();
807 c.extend_from_slice(chunk.as_slice());
808 }
809 }
810 }
811 }
812 }
813 }
814 Ok(if !children.is_empty() {
815 if parent_index < children.len() {
816 return Err(ObjectParseError::InvalidChildren);
817 }
818 Self::collect_leaves(
819 blocks,
820 &children,
821 parent_index - children.len(),
822 leaves,
823 obj_content,
824 block_contents,
825 )? + 1
826 } else {
827 0
828 })
829 }
830
831 pub fn content(&self) -> Result<ObjectContent, ObjectParseError> {
849 if self.key().is_none() {
851 return Err(ObjectParseError::MissingRootKey);
852 }
853 let mut obj_content: Vec<u8> = vec![];
854 let parents = vec![(self.id(), self.key().unwrap())];
855 match Self::collect_leaves(
856 &self.blocks,
857 &parents,
858 self.blocks.len() - 1,
859 &mut None,
860 &mut Some(&mut obj_content),
861 &self.block_contents,
862 ) {
863 Ok(_) => match serde_bare::from_slice(obj_content.as_slice()) {
864 Ok(c) => Ok(c),
865 Err(_e) => {
866 Err(ObjectParseError::ObjectDeserializeError)
868 }
869 },
870 Err(e) => Err(e),
871 }
872 }
873
874 pub fn depth(&self) -> Result<u8, ObjectParseError> {
876 if self.key().is_none() {
877 return Err(ObjectParseError::MissingRootKey);
878 }
879 let parents = vec![(self.id(), self.key().unwrap())];
880 Self::collect_leaves(
881 &self.blocks,
882 &parents,
883 self.blocks.len() - 1,
884 &mut None,
885 &mut None,
886 &self.block_contents,
887 )
888 }
889
890 pub fn content_v0(&self) -> Result<ObjectContentV0, ObjectParseError> {
891 match self.content() {
892 Ok(ObjectContent::V0(v0)) => Ok(v0),
893 Err(e) => Err(e),
894 }
895 }
896}
897
898impl IObject for Object {
899 fn block_ids(&self) -> Vec<BlockId> {
900 let mut deduplicated: HashSet<ObjectId> = HashSet::new();
901 for block_id in self.blocks.iter() {
903 deduplicated.insert(*block_id);
904 }
905 for block in &self.header_blocks {
906 let id = block.id();
907 if deduplicated.get(&id).is_none() {
908 deduplicated.insert(id);
909 }
910 }
911 let root_id = self.id();
912 let mut blocks = vec![root_id];
913 deduplicated.remove(&root_id);
914 let list = deduplicated.drain();
915 blocks.append(&mut list.collect());
916 deduplicated.shrink_to(0);
917 blocks
918 }
919
920 fn id(&self) -> Option<ObjectId> {
921 Some(self.id())
922 }
923
924 fn key(&self) -> Option<SymKey> {
925 self.key()
926 }
927}
928
929impl fmt::Display for Object {
930 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
931 writeln!(f, "====== Object ID {}", self.id())?;
932 writeln!(
933 f,
934 "== Key: {}",
935 self.key().map_or("None".to_string(), |k| format!("{}", k))
936 )?;
937 #[cfg(test)]
938 writeln!(f, "== saved: {}", self.already_saved)?;
939 writeln!(
940 f,
941 "== Header: {}",
942 self.header
943 .as_ref()
944 .map_or("None".to_string(), |k| format!("{}", k))
945 )?;
946 writeln!(f, "== Blocks: {}", self.blocks.len())?;
947 let mut i = 0;
948 for block_id in &self.blocks {
949 writeln!(f, "========== {:03}: {}", i, block_id)?;
950 i += 1;
951 }
952 writeln!(f, "== Depth: {:?}", self.depth().unwrap_or(0))?;
953
954 writeln!(f, "== Header Blocks: {}", self.header_blocks.len())?;
955 i = 0;
956 for block in &self.header_blocks {
957 writeln!(f, "========== {:03}: {}", i, block.id())?;
958 }
959 write!(
960 f,
961 "{}",
962 self.content().map_or_else(
963 |e| format!("Error on content: {:?}", e),
964 |c| format!("{}", c)
965 )
966 )?;
967 Ok(())
968 }
969}
970
971impl ObjectContent {
972 pub fn can_have_header(&self) -> bool {
973 match self {
974 Self::V0(v0) => match v0 {
975 ObjectContentV0::Commit(_) => true,
976 _ => false,
977 },
978 }
979 }
980
981 pub fn new_file_v0_with_content(content: Vec<u8>, content_type: &str) -> Self {
982 ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
983 content_type: content_type.into(),
984 metadata: vec![],
985 content,
986 })))
987 }
988}
989
990impl fmt::Display for ObjectContent {
991 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
992 let (version, content) = match self {
993 Self::V0(v0) => (
994 "v0",
995 match v0 {
996 ObjectContentV0::Commit(c) => ("Commit", format!("{}", c)),
997 ObjectContentV0::CommitBody(c) => ("CommitBody", format!("{}", c)),
998 ObjectContentV0::CommitHeader(c) => ("CommitHeader", format!("{}", c)),
999 ObjectContentV0::Quorum(_c) => ("Quorum", format!("{}", "")),
1000 ObjectContentV0::Signature(_c) => ("Signature", format!("{}", "")),
1001 ObjectContentV0::Certificate(_c) => ("Certificate", format!("{}", "")),
1002 ObjectContentV0::SmallFile(_c) => ("SmallFile", format!("{}", "")),
1003 ObjectContentV0::RandomAccessFileMeta(_c) => {
1004 ("RandomAccessFileMeta", format!("{}", ""))
1005 }
1006 ObjectContentV0::RefreshCap(_c) => ("RefreshCap", format!("{}", "")),
1007 },
1008 ),
1009 };
1010 writeln!(f, "====== ObjectContent {} {} ======", version, content.0)?;
1011 write!(f, "{}", content.1)?;
1012 Ok(())
1013 }
1014}
1015
1016#[cfg(test)]
1017mod test {
1018
1019 use crate::object::*;
1020 use std::io::BufReader;
1021 use std::io::Read;
1022 use std::io::Write;
1023
1024 #[test]
1033 pub fn test_pubkey_from_str() {
1034 let pubkey = PubKey::Ed25519PubKey([1u8; 32]);
1035 let str = pubkey.to_string();
1036 let server_key: PubKey = str.as_str().try_into().unwrap();
1037 assert_eq!(server_key, pubkey);
1038 }
1039
1040 #[test]
1042 #[should_panic]
1043 pub fn test_no_header() {
1044 let file = SmallFile::V0(SmallFileV0 {
1045 content_type: "image/jpeg".into(),
1046 metadata: vec![],
1047 content: vec![],
1048 });
1049 let content = ObjectContent::V0(ObjectContentV0::SmallFile(file));
1050 let store = Store::dummy_public_v0();
1051 let header = CommitHeader::new_with_acks([ObjectId::dummy()].to_vec());
1052 let _obj = Object::new(content, header, store_max_value_size(), &store);
1053 }
1054
1055 #[test]
1057 pub fn test_jpg() {
1058 let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg");
1059 let mut reader = BufReader::new(f);
1060 let mut img_buffer: Vec<u8> = Vec::new();
1061 reader
1062 .read_to_end(&mut img_buffer)
1063 .expect("read of test.jpg");
1064 let content = ObjectContent::new_file_v0_with_content(img_buffer, "image/jpeg");
1065
1066 let max_object_size = store_max_value_size();
1067 let store = Store::dummy_public_v0();
1068 let obj = Object::new(content, None, max_object_size, &store);
1069
1070 log_debug!("{}", obj);
1071
1072 let mut i = 0;
1073 for node in obj.blocks() {
1074 log_debug!("#{}: {}", i, node.id());
1075 let mut file = std::fs::File::create(format!("tests/{}.ng", node.id()))
1076 .expect("open block write file");
1077 let ser_file = serde_bare::to_vec(node).unwrap();
1078 file.write_all(&ser_file)
1079 .expect(&format!("write of block #{}", i));
1080 i += 1;
1081 }
1082 }
1083
1084 #[test]
1086 pub fn test_object() {
1087 let file = SmallFile::V0(SmallFileV0 {
1088 content_type: "file/test".into(),
1089 metadata: Vec::from("some meta data here"),
1090 content: [(0..255).collect::<Vec<u8>>().as_slice(); 320].concat(),
1091 });
1092 let content = ObjectContent::V0(ObjectContentV0::SmallFile(file));
1093
1094 let acks = vec![];
1095 let max_object_size = 0;
1097
1098 let store = Store::dummy_public_v0();
1099
1100 let mut obj = Object::new(content.clone(), None, max_object_size, &store);
1101
1102 log_debug!("{}", obj);
1103
1104 assert_eq!(*obj.acks(), acks);
1105
1106 match obj.content() {
1107 Ok(cnt) => {
1108 log_debug!("{}", cnt);
1109 assert_eq!(content, cnt);
1110 }
1111 Err(e) => panic!("Object parse error: {:?}", e),
1112 }
1113
1114 obj.save_in_test(&store).expect("Object save error");
1115
1116 let obj2 = Object::load(obj.id(), obj.key(), &store).unwrap();
1117
1118 log_debug!("{}", obj2);
1119
1120 assert_eq!(*obj2.acks(), acks);
1121
1122 match obj2.content() {
1123 Ok(cnt) => {
1124 log_debug!("{}", cnt);
1125 assert_eq!(content, cnt);
1126 }
1127 Err(e) => panic!("Object2 parse error: {:?}", e),
1128 }
1129
1130 let obj3 = Object::load(obj.id(), None, &store).unwrap();
1131
1132 log_debug!("{}", obj3);
1133
1134 assert_eq!(*obj3.acks(), acks);
1135
1136 match obj3.content() {
1137 Err(ObjectParseError::MissingRootKey) => (),
1138 Err(e) => panic!("Object3 parse error: {:?}", e),
1139 Ok(_) => panic!("Object3 should not return content"),
1140 }
1141 }
1142
1143 #[test]
1145 pub fn test_depth_0() {
1146 let store = Store::dummy_public_v0();
1147
1148 let empty_file =
1149 ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1150 content_type: "".into(),
1151 metadata: vec![],
1152 content: vec![],
1153 })));
1154 let content_ser = serde_bare::to_vec(&empty_file).unwrap();
1155 log_debug!("content len for empty : {}", content_ser.len());
1156
1157 let empty_obj = Object::new(empty_file, None, store_max_value_size(), &store);
1214
1215 let empty_file_size = empty_obj.size();
1216 log_debug!("empty file size: {}", empty_file_size);
1217
1218 let size =
1219 store_max_value_size() - empty_file_size - BLOCK_MAX_DATA_EXTRA - BIG_VARINT_EXTRA;
1220 log_debug!("full file content size: {}", size);
1221
1222 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1223 content_type: "".into(),
1224 metadata: vec![],
1225 content: vec![99; size],
1226 })));
1227 let content_ser = serde_bare::to_vec(&content).unwrap();
1228 log_debug!("content len: {}", content_ser.len());
1229
1230 let object = Object::new(content, None, store_max_value_size(), &store);
1231 log_debug!("{}", object);
1232
1233 log_debug!("object size: {}", object.size());
1234
1235 assert_eq!(object.blocks.len(), 1);
1236 }
1237
1238 #[ignore]
1240 #[test]
1241 pub fn test_depth_1() {
1242 const MAX_ARITY_LEAVES: usize = 15887;
1243 const MAX_DATA_PAYLOAD_SIZE: usize = 1048564;
1247
1248 let data_size = MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE - 10;
1250
1251 let store = Store::dummy_public_v0();
1252 log_debug!("creating 16GB of data");
1253 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1254 content_type: "".into(),
1255 metadata: vec![],
1256 content: vec![99; data_size],
1257 })));
1258 log_debug!("creating object with that data");
1261 let object = Object::new(content, None, store_max_value_size(), &store);
1262 log_debug!("{}", object);
1263
1264 let obj_size = object.size();
1265 log_debug!("object size: {}", obj_size);
1266
1267 log_debug!("data size: {}", data_size);
1268 log_debug!(
1269 "overhead: {} - {}%",
1270 obj_size - data_size,
1271 ((obj_size - data_size) * 100) as f32 / data_size as f32
1272 );
1273
1274 log_debug!("number of blocks : {}", object.blocks.len());
1275 assert_eq!(object.blocks.len(), MAX_ARITY_LEAVES + 1);
1276 assert_eq!(object.depth().unwrap(), 1);
1277 }
1278
1279 #[ignore]
1281 #[test]
1282 pub fn test_depth_2() {
1283 const MAX_ARITY_LEAVES: usize = 15887;
1284 const MAX_DATA_PAYLOAD_SIZE: usize = 1048564;
1285
1286 let data_size = MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE;
1288
1289 let store = Store::dummy_public_v0();
1290 log_debug!("creating 16GB of data");
1291 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1292 content_type: "".into(),
1293 metadata: vec![],
1294 content: vec![99; data_size],
1295 })));
1296 log_debug!("creating object with that data");
1299 let object = Object::new(content, None, store_max_value_size(), &store);
1300 log_debug!("{}", object);
1301
1302 let obj_size = object.size();
1303 log_debug!("object size: {}", obj_size);
1304
1305 log_debug!("data size: {}", data_size);
1306 log_debug!(
1307 "overhead: {} - {}%",
1308 obj_size - data_size,
1309 ((obj_size - data_size) * 100) as f32 / data_size as f32
1310 );
1311
1312 log_debug!("number of blocks : {}", object.blocks.len());
1313 assert_eq!(object.blocks.len(), MAX_ARITY_LEAVES + 4);
1314 assert_eq!(object.depth().unwrap(), 2);
1315 }
1316
1317 #[ignore]
1319 #[test]
1320 pub fn test_depth_3() {
1321 const MAX_ARITY_LEAVES: usize = 61;
1322 const MAX_DATA_PAYLOAD_SIZE: usize = 4084;
1323
1324 let data_size =
1326 MAX_ARITY_LEAVES * MAX_ARITY_LEAVES * MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE - 10;
1327
1328 let store = Store::dummy_public_v0();
1329 log_debug!("creating 900MB of data");
1330 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1331 content_type: "".into(),
1332 metadata: vec![],
1333 content: vec![99; data_size],
1334 })));
1335 log_debug!("creating object with that data");
1338 let object = Object::new(content, None, store_valid_value_size(0), &store);
1339 log_debug!("{}", object);
1340
1341 let obj_size = object.size();
1342 log_debug!("object size: {}", obj_size);
1343
1344 log_debug!("data size: {}", data_size);
1345 log_debug!(
1346 "overhead: {} - {}%",
1347 obj_size - data_size,
1348 ((obj_size - data_size) * 100) as f32 / data_size as f32
1349 );
1350
1351 let dedup_size = object.dedup_size();
1352 log_debug!(
1353 "dedup compression: {} - {}%",
1354 data_size - dedup_size,
1355 ((data_size - dedup_size) * 100) as f32 / data_size as f32
1356 );
1357
1358 log_debug!("number of blocks : {}", object.blocks.len());
1359 assert_eq!(
1360 object.blocks.len(),
1361 MAX_ARITY_LEAVES * (MAX_ARITY_LEAVES + 1) * MAX_ARITY_LEAVES + MAX_ARITY_LEAVES + 1
1362 );
1363 assert_eq!(object.depth().unwrap(), 3);
1364 }
1365
1366 #[ignore]
1368 #[test]
1369 pub fn test_depth_4() {
1370 const MAX_ARITY_LEAVES: usize = 61;
1371 const MAX_DATA_PAYLOAD_SIZE: usize = 4084;
1372
1373 let data_size = MAX_ARITY_LEAVES
1375 * MAX_ARITY_LEAVES
1376 * MAX_ARITY_LEAVES
1377 * MAX_ARITY_LEAVES
1378 * MAX_DATA_PAYLOAD_SIZE
1379 - 12;
1380
1381 let store = Store::dummy_public_v0();
1382 log_debug!("creating 52GB of data");
1383 let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 {
1384 content_type: "".into(),
1385 metadata: vec![],
1386 content: vec![99; data_size],
1387 })));
1388 log_debug!("creating object with that data");
1391 let object = Object::new(content, None, store_valid_value_size(0), &store);
1392 log_debug!("{}", object);
1393
1394 let obj_size = object.size();
1395 log_debug!("object size: {}", obj_size);
1396
1397 log_debug!("data size: {}", data_size);
1398 log_debug!(
1399 "overhead: {} - {}%",
1400 obj_size - data_size,
1401 ((obj_size - data_size) * 100) as f32 / data_size as f32
1402 );
1403
1404 log_debug!("number of blocks : {}", object.blocks.len());
1405 assert_eq!(
1406 object.blocks.len(),
1407 MAX_ARITY_LEAVES
1408 * (MAX_ARITY_LEAVES * (MAX_ARITY_LEAVES + 1) * MAX_ARITY_LEAVES
1409 + MAX_ARITY_LEAVES
1410 + 1)
1411 + 1
1412 );
1413 assert_eq!(object.depth().unwrap(), 4);
1414 }
1415
1416 #[test]
1417 pub fn test_block_size() {
1418 fn test_block(max_block_size: usize) {
1421 let max_arity_leaves: usize = (max_block_size - BLOCK_EXTRA) / CHILD_SIZE;
1422 let max_arity_root: usize =
1423 (max_block_size - BLOCK_EXTRA - HEADER_REF_EXTRA) / CHILD_SIZE;
1424
1425 let max_data_payload_size = max_block_size - BLOCK_EXTRA;
1426
1427 log_debug!("max_block_size: {}", max_block_size);
1428 log_debug!("max_arity_leaves: {}", max_arity_leaves);
1429 log_debug!("max_arity_root: {}", max_arity_root);
1430 log_debug!("max_data_payload_size: {}", max_data_payload_size);
1431
1432 let (id, key) = ObjectRef::dummy().into();
1433
1434 let zero_key = ChunkContentV0::InternalNode(vec![]);
1436 let zero_key_ser = serde_bare::to_vec(&zero_key).unwrap();
1437
1438 let one_key = ChunkContentV0::InternalNode(vec![key.clone()]);
1439 let one_key_ser = serde_bare::to_vec(&one_key).unwrap();
1440
1441 let two_keys = ChunkContentV0::InternalNode(vec![key.clone(), key.clone()]);
1442 let two_keys_ser = serde_bare::to_vec(&two_keys).unwrap();
1443
1444 let max_keys = ChunkContentV0::InternalNode(vec![key.clone(); max_arity_leaves]);
1445 let max_keys_ser = serde_bare::to_vec(&max_keys).unwrap();
1446
1447 let max_keys_root = ChunkContentV0::InternalNode(vec![key.clone(); max_arity_root]);
1448 let max_keys_root_ser = serde_bare::to_vec(&max_keys_root).unwrap();
1449
1450 let data_empty = ChunkContentV0::DataChunk(vec![]);
1452 let data_empty_ser = serde_bare::to_vec(&data_empty).unwrap();
1453
1454 let data_full = ChunkContentV0::DataChunk(vec![0; max_data_payload_size]);
1455 let data_full_ser = serde_bare::to_vec(&data_full).unwrap();
1456
1457 let leaf_empty = Block::new(vec![], None, data_empty_ser.clone(), None);
1459 let leaf_empty_ser = serde_bare::to_vec(&leaf_empty).unwrap();
1460
1461 log_debug!(
1462 "block size of empty leaf without header: {}",
1463 leaf_empty_ser.len()
1464 );
1465
1466 let leaf_full_data = Block::new(vec![], None, data_full_ser.clone(), None);
1467 let leaf_full_data_ser = serde_bare::to_vec(&leaf_full_data).unwrap();
1468
1469 log_debug!(
1470 "block size of full leaf block without header: {}",
1471 leaf_full_data_ser.len()
1472 );
1473
1474 let internal_zero = Block::new(vec![], None, zero_key_ser.clone(), None);
1476 let internal_zero_ser = serde_bare::to_vec(&internal_zero).unwrap();
1477
1478 log_debug!(
1479 "block size of empty internal block without header: {}",
1480 internal_zero_ser.len()
1481 );
1482
1483 assert!(leaf_full_data_ser.len() <= max_block_size);
1484
1485 let header_ref = CommitHeaderRef::from_id_key(id, key.clone());
1494
1495 let header_embed = CommitHeaderRef::from_content_key(vec![], key.clone());
1497
1498 let root_zero_header_ref = Block::new(
1500 vec![],
1501 Some(header_ref.clone()),
1502 data_empty_ser.clone(),
1503 None,
1504 );
1505 let root_zero_header_ref_ser = serde_bare::to_vec(&root_zero_header_ref).unwrap();
1506
1507 let root_zero_header_embed = Block::new(
1509 vec![],
1510 Some(header_embed.clone()),
1511 data_empty_ser.clone(),
1512 None,
1513 );
1514 let root_zero_header_embed_ser = serde_bare::to_vec(&root_zero_header_embed).unwrap();
1515
1516 log_debug!(
1522 "block size of empty root block with header ref: {}",
1523 root_zero_header_ref_ser.len()
1524 );
1525
1526 log_debug!(
1527 "block size of empty root block with header embedded: {}",
1528 root_zero_header_embed_ser.len()
1529 );
1530
1531 let internal_max =
1532 Block::new(vec![id; max_arity_leaves], None, max_keys_ser.clone(), None);
1533 let internal_max_ser = serde_bare::to_vec(&internal_max).unwrap();
1534
1535 let internal_one = Block::new(vec![id; 1], None, one_key_ser.clone(), None);
1536 let internal_one_ser = serde_bare::to_vec(&internal_one).unwrap();
1537
1538 let internal_two = Block::new(vec![id; 2], None, two_keys_ser.clone(), None);
1539 let internal_two_ser = serde_bare::to_vec(&internal_two).unwrap();
1540
1541 log_debug!(
1542 "block size of internal block with 1 child, without header: {}",
1543 internal_one_ser.len()
1544 );
1545
1546 log_debug!(
1547 "block size of internal block with 2 children, without header: {}",
1548 internal_two_ser.len()
1549 );
1550
1551 log_debug!(
1552 "block size of internal block with max arity children, without header: {}",
1553 internal_max_ser.len()
1554 );
1555
1556 assert!(internal_max_ser.len() <= max_block_size);
1557
1558 let root_one = Block::new(
1559 vec![id; 1],
1560 Some(header_ref.clone()),
1561 one_key_ser.clone(),
1562 None,
1563 );
1564 let root_one_ser = serde_bare::to_vec(&root_one).unwrap();
1565
1566 let root_two = Block::new(
1567 vec![id; 2],
1568 Some(header_ref.clone()),
1569 two_keys_ser.clone(),
1570 None,
1571 );
1572 let root_two_ser = serde_bare::to_vec(&root_two).unwrap();
1573
1574 let root_max = Block::new(
1575 vec![id; max_arity_root],
1576 Some(header_ref.clone()),
1577 max_keys_root_ser.clone(),
1578 None,
1579 );
1580 let root_max_ser = serde_bare::to_vec(&root_max).unwrap();
1581
1582 let data_full_when_header_ref =
1583 ChunkContentV0::DataChunk(vec![0; max_data_payload_size - HEADER_REF_EXTRA]);
1584 let data_full_when_header_ref_ser =
1585 serde_bare::to_vec(&data_full_when_header_ref).unwrap();
1586
1587 let root_full = Block::new(
1588 vec![],
1589 Some(header_ref.clone()),
1590 data_full_when_header_ref_ser.clone(),
1591 None,
1592 );
1593 let root_full_ser = serde_bare::to_vec(&root_full).unwrap();
1594
1595 log_debug!(
1596 "block size of root block with header ref with 1 child: {}",
1597 root_one_ser.len()
1598 );
1599
1600 log_debug!(
1601 "block size of root block with header ref with 2 children: {}",
1602 root_two_ser.len()
1603 );
1604
1605 log_debug!(
1606 "block size of root block with header ref with max arity children: {}",
1607 root_max_ser.len()
1608 );
1609
1610 log_debug!(
1611 "block size of root block with header ref with full DataChunk (fitting ObjectContent): {}",
1612 root_full_ser.len()
1613 );
1614
1615 assert!(root_full_ser.len() <= max_block_size);
1616
1617 let root_embed_one = Block::new(
1618 vec![id; 1],
1619 Some(header_embed.clone()),
1620 one_key_ser.clone(),
1621 None,
1622 );
1623 let root_embed_one_ser = serde_bare::to_vec(&root_embed_one).unwrap();
1624
1625 let root_embed_two = Block::new(
1626 vec![id; 2],
1627 Some(header_embed.clone()),
1628 two_keys_ser.clone(),
1629 None,
1630 );
1631 let root_embed_two_ser = serde_bare::to_vec(&root_embed_two).unwrap();
1632
1633 let root_embed_max = Block::new(
1634 vec![id; max_arity_root],
1635 Some(header_embed.clone()),
1636 max_keys_root_ser.clone(),
1637 None,
1638 );
1639 let root_embed_max_ser = serde_bare::to_vec(&root_embed_max).unwrap();
1640
1641 let data_full_when_header_embed =
1642 ChunkContentV0::DataChunk(vec![0; max_data_payload_size - HEADER_EMBED_EXTRA]);
1643 let data_full_when_header_embed_ser =
1644 serde_bare::to_vec(&data_full_when_header_embed).unwrap();
1645
1646 let root_embed_full = Block::new(
1647 vec![],
1648 Some(header_embed.clone()),
1649 data_full_when_header_embed_ser.clone(),
1650 None,
1651 );
1652 let root_embed_full_ser = serde_bare::to_vec(&root_embed_full).unwrap();
1653
1654 log_debug!(
1655 "block size of root block with header embed with 1 child: {}",
1656 root_embed_one_ser.len()
1657 );
1658
1659 log_debug!(
1660 "block size of root block with header embed with 2 children: {}",
1661 root_embed_two_ser.len()
1662 );
1663
1664 log_debug!(
1665 "block size of root block with header embed with max arity children: {}",
1666 root_embed_max_ser.len()
1667 );
1668
1669 log_debug!(
1670 "block size of root block with header embed with full DataChunk (fitting ObjectContent): {}",
1671 root_embed_full_ser.len()
1672 );
1673
1674 assert!(root_embed_full_ser.len() <= max_block_size);
1675
1676 let header_acks_1 = CommitHeader::new_with_acks(vec![id]);
1677 let header_acks_2 = CommitHeader::new_with_acks(vec![id, id]);
1678 let header_acks_60 = CommitHeader::new_with_acks(vec![id; 60]);
1679 let header_acks_60_deps_60 =
1680 CommitHeader::new_with_deps_and_acks(vec![id; 60], vec![id; 60]);
1681
1682 fn make_header_block(header: Option<CommitHeader>) -> CommitHeaderRef {
1683 let content_ser = serde_bare::to_vec(&ObjectContent::V0(
1684 ObjectContentV0::CommitHeader(header.unwrap()),
1685 ))
1686 .unwrap();
1687 let data_chunk = ChunkContentV0::DataChunk(content_ser.clone());
1688 let encrypted_content = serde_bare::to_vec(&data_chunk).unwrap();
1689 CommitHeaderRef::from_content_key(encrypted_content, SymKey::dummy())
1690 }
1691
1692 let header_embed_acks_1 = make_header_block(header_acks_1);
1693 let header_embed_acks_2 = make_header_block(header_acks_2);
1694 let header_embed_acks_60 = make_header_block(header_acks_60);
1695 let header_embed_acks_60_deps_60 = make_header_block(header_acks_60_deps_60);
1696
1697 fn test_header_embed(name: &str, header: CommitHeaderRef, max_block_size: usize) {
1698 let (id, key) = BlockRef::dummy().into();
1699
1700 log_debug!("header content size : {}", header.encrypted_content_len());
1701
1702 let max_arity = (max_block_size
1703 - header.encrypted_content_len()
1704 - BLOCK_EXTRA
1705 - HEADER_EMBED_EXTRA)
1706 / CHILD_SIZE;
1707
1708 log_debug!("max arity for header {} : {}", name, max_arity);
1709
1710 let max_keys_when_real_header =
1711 ChunkContentV0::InternalNode(vec![key.clone(); max_arity]);
1712 let max_keys_when_real_header_ser =
1713 serde_bare::to_vec(&max_keys_when_real_header).unwrap();
1714
1715 let root_embed_max = Block::new(
1716 vec![id; max_arity],
1717 Some(header),
1718 max_keys_when_real_header_ser.clone(),
1719 None,
1720 );
1721 let root_embed_max_ser = serde_bare::to_vec(&root_embed_max).unwrap();
1722
1723 log_debug!(
1724 "block size of root block with header {} with max possible arity children : {}",
1725 name,
1726 root_embed_max_ser.len()
1727 );
1728
1729 assert!(root_embed_max_ser.len() <= max_block_size);
1730 }
1731
1732 test_header_embed(
1733 "embed acks 60 deps 60",
1734 header_embed_acks_60_deps_60,
1735 max_block_size,
1736 );
1737
1738 test_header_embed("embed acks 60", header_embed_acks_60, max_block_size);
1739
1740 test_header_embed("embed acks 2", header_embed_acks_2, max_block_size);
1741
1742 test_header_embed("embed acks 1", header_embed_acks_1, max_block_size);
1743 }
1744
1745 let max_block_size = store_max_value_size();
1746 let min_block_size = store_valid_value_size(0);
1747
1748 test_block(max_block_size);
1749 test_block(min_block_size);
1750 test_block(store_valid_value_size(10000));
1751 test_block(store_valid_value_size(100000));
1752 test_block(store_valid_value_size(1000000));
1753 test_block(store_valid_value_size(5000));
1754 }
1755}