1#![allow(dead_code)]
5
6use bytemuck::cast_slice;
9use derive_getters::Dissolve;
10use rayon::prelude::*;
11use std::ops::Range;
12
13pub mod blocks;
14
15pub type Token = u32;
17
18pub type Salt = Vec<u8>;
21
22pub type SaltHash = u64;
25
26pub type BlockHash = u64;
29
30pub type SequenceHash = u64;
34
35pub fn compute_hash_v2(data: &[u8], seed: u64) -> u64 {
37 xxhash_rust::xxh3::xxh3_64_with_seed(data, seed)
38}
39
40#[derive(Debug, Clone, Dissolve, Default, Eq)]
44pub struct Tokens(Vec<Token>);
45
46impl AsRef<[Token]> for Tokens {
47 fn as_ref(&self) -> &[Token] {
48 &self.0
49 }
50}
51
52impl std::ops::Deref for Tokens {
53 type Target = [Token];
54
55 fn deref(&self) -> &Self::Target {
56 &self.0
57 }
58}
59
60impl std::borrow::Borrow<[Token]> for Tokens {
61 fn borrow(&self) -> &[Token] {
62 &self.0
63 }
64}
65
66impl From<Vec<Token>> for Tokens {
67 fn from(tokens: Vec<Token>) -> Self {
68 Tokens(tokens)
69 }
70}
71
72impl From<&[Token]> for Tokens {
73 fn from(tokens: &[Token]) -> Self {
74 Tokens(tokens.to_vec())
75 }
76}
77
78impl From<Vec<usize>> for Tokens {
79 fn from(tokens: Vec<usize>) -> Self {
80 Tokens(tokens.into_iter().map(|t| t as u32).collect())
81 }
82}
83
84impl From<Vec<i32>> for Tokens {
85 fn from(tokens: Vec<i32>) -> Self {
87 Tokens(tokens.into_iter().map(|t| t as u32).collect())
88 }
89}
90
91impl From<&[i32]> for Tokens {
92 fn from(tokens: &[i32]) -> Self {
94 Tokens(tokens.iter().map(|&t| t as u32).collect())
95 }
96}
97
98impl From<Tokens> for Vec<Token> {
99 fn from(tokens: Tokens) -> Self {
100 tokens.0
101 }
102}
103
104impl PartialEq<Vec<Token>> for Tokens {
107 fn eq(&self, other: &Vec<Token>) -> bool {
108 self.0 == *other
109 }
110}
111
112impl PartialEq<Tokens> for Vec<Token> {
113 fn eq(&self, other: &Tokens) -> bool {
114 *self == other.0
115 }
116}
117
118impl PartialEq<[Token]> for Tokens {
119 fn eq(&self, other: &[Token]) -> bool {
120 self.0.as_slice() == other
121 }
122}
123
124impl PartialEq<Tokens> for &[Token] {
125 fn eq(&self, other: &Tokens) -> bool {
126 *self == other.0.as_slice()
127 }
128}
129
130impl PartialEq for Tokens {
131 fn eq(&self, other: &Self) -> bool {
132 self.0 == other.0
133 }
134}
135
136impl PartialEq<&[Token]> for Tokens {
139 fn eq(&self, other: &&[Token]) -> bool {
140 self.0.as_slice() == *other
141 }
142}
143
144impl Tokens {
145 pub fn into_sequence(self, block_size: u32, salt_hash: Option<SaltHash>) -> TokenBlockSequence {
155 TokenBlockSequence::new(self, block_size, salt_hash)
156 }
157}
158
159#[derive(Debug, Clone, Copy, PartialEq, Eq, thiserror::Error)]
161pub enum TokenBlockError {
162 #[error("TokenBlock is full")]
164 Full,
165
166 #[error("TokenBlock is incomplete")]
168 Incomplete,
169
170 #[error("TokenBlock is empty")]
172 Empty,
173
174 #[error("TokenBlock has insufficient tokens")]
176 InsufficientTokens,
177}
178
179#[derive(Debug, PartialEq)] pub struct PartialTokenBlock {
185 tokens: Tokens,
186 block_size: u32,
187 salt_hash: SaltHash,
188 parent_sequence_hash: Option<SequenceHash>,
189}
190
191impl PartialTokenBlock {
192 pub(crate) fn create_sequence_root(block_size: u32, salt_hash: SaltHash) -> Self {
199 Self {
200 tokens: Tokens::default(),
201 block_size,
202 salt_hash,
203 parent_sequence_hash: None, }
205 }
206
207 pub(crate) fn push_token(&mut self, token: Token) -> Result<(), TokenBlockError> {
218 if self.tokens.0.len() >= self.block_size as usize {
219 return Err(TokenBlockError::Full);
220 }
221 self.tokens.0.push(token);
222 Ok(())
223 }
224
225 pub(crate) fn push_tokens(&mut self, tokens: Tokens) -> Tokens {
238 let remaining_space = self.remaining();
239
240 if remaining_space == 0 {
241 return tokens; }
243
244 if tokens.0.len() <= remaining_space {
245 self.tokens.0.extend(tokens.0);
247 Tokens::default() } else {
249 let (to_add, remaining) = tokens.0.split_at(remaining_space);
251 self.tokens.0.extend_from_slice(to_add);
252 Tokens(remaining.to_vec()) }
254 }
255
256 pub(crate) fn pop_token(&mut self) -> Result<(), TokenBlockError> {
263 if self.tokens.0.is_empty() {
264 return Err(TokenBlockError::Empty);
265 }
266 self.tokens.0.pop();
267 Ok(())
268 }
269
270 pub(crate) fn pop_tokens(&mut self, count: usize) -> Result<(), TokenBlockError> {
281 if self.tokens.0.len() < count {
282 return Err(TokenBlockError::InsufficientTokens);
283 }
284 self.tokens.0.truncate(self.tokens.0.len() - count);
285 Ok(())
286 }
287
288 pub(crate) fn commit(&mut self) -> Result<TokenBlock, TokenBlockError> {
300 if self.tokens.0.len() != self.block_size as usize {
301 return Err(TokenBlockError::Incomplete);
303 }
304
305 let tokens = std::mem::take(&mut self.tokens);
307
308 let chunk = TokenBlockChunk::new(tokens, self.salt_hash);
309 let block = TokenBlock::from_chunk(chunk, self.parent_sequence_hash);
310
311 self.parent_sequence_hash = Some(block.sequence_hash());
313 Ok(block)
317 }
318
319 pub fn remaining(&self) -> usize {
321 (self.block_size as usize).saturating_sub(self.tokens.0.len())
323 }
324
325 pub fn len(&self) -> usize {
327 self.tokens.0.len()
328 }
329
330 pub fn is_empty(&self) -> bool {
332 self.tokens.0.is_empty()
333 }
334
335 pub fn tokens(&self) -> &Tokens {
337 &self.tokens
338 }
339}
340
341impl std::ops::Deref for PartialTokenBlock {
343 type Target = Tokens;
344
345 fn deref(&self) -> &Self::Target {
346 &self.tokens
347 }
348}
349
350#[derive(Debug)] struct TokenBlockChunk {
356 tokens: Tokens,
357 salt_hash: SaltHash,
358 block_hash: BlockHash,
359}
360
361impl TokenBlockChunk {
362 fn new(tokens: Tokens, salt_hash: SaltHash) -> Self {
364 let block_hash = compute_hash_v2(cast_slice(&tokens), salt_hash);
365 Self {
366 tokens,
367 salt_hash,
368 block_hash,
369 }
370 }
371
372 fn from_tokens(tokens: &[Token], salt_hash: SaltHash) -> Self {
374 let block_hash = compute_hash_v2(cast_slice(tokens), salt_hash);
375 Self {
376 tokens: tokens.into(), salt_hash,
378 block_hash,
379 }
380 }
381}
382
383#[derive(Debug, Clone, Default, PartialEq)] pub struct TokenBlock {
389 tokens: Tokens,
390 salt_hash: SaltHash,
391 block_hash: BlockHash,
392 sequence_hash: SequenceHash,
393 parent_sequence_hash: Option<SequenceHash>,
394}
395
396impl TokenBlock {
397 pub fn next_block(&self) -> PartialTokenBlock {
401 PartialTokenBlock {
402 tokens: Tokens::default(),
403 block_size: self.tokens.len() as u32, salt_hash: self.salt_hash,
405 parent_sequence_hash: Some(self.sequence_hash), }
407 }
408
409 fn from_chunk(chunk: TokenBlockChunk, parent_sequence_hash: Option<SequenceHash>) -> Self {
413 let sequence_hash = match parent_sequence_hash {
414 Some(parent) => {
415 compute_hash_v2(cast_slice(&[parent, chunk.block_hash]), chunk.salt_hash)
417 }
418 None => {
419 chunk.block_hash
421 }
422 };
423
424 Self {
425 tokens: chunk.tokens,
426 salt_hash: chunk.salt_hash,
427 block_hash: chunk.block_hash,
428 sequence_hash,
429 parent_sequence_hash,
430 }
431 }
432
433 pub fn tokens(&self) -> &Tokens {
435 &self.tokens
436 }
437
438 pub fn salt_hash(&self) -> SaltHash {
440 self.salt_hash
441 }
442
443 pub fn block_hash(&self) -> BlockHash {
445 self.block_hash
446 }
447
448 pub fn sequence_hash(&self) -> SequenceHash {
450 self.sequence_hash
451 }
452
453 pub fn parent_sequence_hash(&self) -> Option<SequenceHash> {
455 self.parent_sequence_hash
456 }
457
458 pub fn block_size(&self) -> usize {
460 self.tokens.0.len()
461 }
462}
463
464#[derive(Debug, PartialEq)]
479pub struct TokenBlockSequence {
480 blocks: Vec<TokenBlock>,
481 current_block: PartialTokenBlock,
482 salt_hash: SaltHash,
483 block_size: usize,
484}
485
486impl TokenBlockSequence {
487 pub fn new(tokens: Tokens, block_size: u32, salt_hash: Option<SaltHash>) -> Self {
502 assert!(block_size > 0, "block_size must be greater than 0");
503 let salt_hash = salt_hash.unwrap_or(0);
504 let (blocks, current_block) = Self::split_tokens(&tokens, block_size, salt_hash);
505
506 Self {
507 blocks,
508 current_block,
509 salt_hash,
510 block_size: block_size as usize,
511 }
512 }
513
514 pub fn extend(&mut self, tokens: Tokens) -> Result<Option<Range<usize>>, TokenBlockError> {
531 let start_block_index = self.blocks.len();
532 let mut tokens_to_append = tokens;
533
534 while !tokens_to_append.is_empty() {
535 let remaining_in_current = self.current_block.remaining();
536
537 if remaining_in_current == 0 {
538 let new_block = self.current_block.commit()?;
540 self.blocks.push(new_block);
541 }
543
544 let available_tokens = tokens_to_append;
546 tokens_to_append = self.current_block.push_tokens(available_tokens);
547
548 if self.current_block.remaining() == 0 {
550 let new_block = self.current_block.commit()?;
553 self.blocks.push(new_block);
554 }
555 }
556
557 let end_block_index = self.blocks.len();
558 if start_block_index == end_block_index {
559 Ok(None) } else {
561 Ok(Some(start_block_index..end_block_index))
562 }
563 }
564
565 pub fn append(&mut self, token: Token) -> Result<Option<usize>, TokenBlockError> {
582 let tokens = Tokens::from(vec![token]);
584
585 let range_option = self.extend(tokens)?;
587
588 match range_option {
590 None => Ok(None),
591 Some(range) => {
592 assert_eq!(
595 range.len(),
596 1,
597 "Appending a single token completed more than one block, which should be impossible."
598 );
599 Ok(Some(range.start))
600 }
601 }
602 }
603
604 pub fn truncate(&mut self, len: usize) -> Result<(), TokenBlockError> {
623 let current_total_len = self.total_tokens();
624 if len >= current_total_len {
625 return Ok(()); }
627
628 let n = current_total_len - len; {
632 let current_len = self.current_block.len();
633 let block_size = self.current_block.block_size.max(1);
635
636 if n <= current_len {
637 self.current_block.pop_tokens(n)?;
639 } else {
640 let tokens_to_pop_from_blocks = n - current_len;
642
643 let num_blocks_to_affect = tokens_to_pop_from_blocks.div_ceil(block_size as usize);
645
646 if num_blocks_to_affect > self.blocks.len() {
648 debug_assert!(
650 false,
651 "Truncate calculation error: trying to pop too many blocks."
652 );
653 return Err(TokenBlockError::InsufficientTokens);
654 }
655
656 let source_block_index = self.blocks.len() - num_blocks_to_affect;
658
659 let num_full_blocks_completely_popped = num_blocks_to_affect - 1;
661 let num_tokens_to_pop_from_source_block = tokens_to_pop_from_blocks
662 - num_full_blocks_completely_popped * block_size as usize;
663 let num_tokens_to_keep_in_new_partial =
664 (block_size as usize).saturating_sub(num_tokens_to_pop_from_source_block);
665
666 let new_partial_tokens = if num_tokens_to_keep_in_new_partial > 0 {
668 self.blocks[source_block_index].tokens().as_ref()
669 [..num_tokens_to_keep_in_new_partial]
670 .to_vec()
671 } else {
672 Vec::new()
673 };
674
675 self.blocks.truncate(source_block_index);
677
678 self.current_block.tokens = Tokens(new_partial_tokens);
680 self.current_block.parent_sequence_hash =
682 self.blocks.last().map(|b| b.sequence_hash());
683 }
685 }
686 Ok(())
687 }
688
689 pub fn unwind(&mut self, count: usize) -> Result<(), TokenBlockError> {
703 let current_total_len = self.total_tokens();
704 if count > current_total_len {
705 return Err(TokenBlockError::InsufficientTokens);
707 }
708
709 let len = current_total_len - count;
711 self.truncate(len)
712 }
713
714 pub fn reset(&mut self) {
716 self.blocks.clear();
717 self.current_block =
718 PartialTokenBlock::create_sequence_root(self.block_size as u32, self.salt_hash);
719 }
720
721 pub fn pop(&mut self) -> Option<Token> {
730 let current_total_len = self.total_tokens();
731 if current_total_len == 0 {
732 return None;
733 }
734
735 let last_token = if !self.current_block.tokens.is_empty() {
738 *self
740 .current_block
741 .tokens
742 .last()
743 .expect("Current block checked for non-empty")
744 } else {
745 let last_block = self
747 .blocks
748 .last()
749 .expect("Sequence is not empty but has no blocks and empty current block?");
750 *last_block
751 .tokens()
752 .last()
753 .expect("Last block cannot be empty")
754 };
755
756 match self.truncate(current_total_len - 1) {
759 Ok(_) => Some(last_token),
760 Err(_) => {
761 debug_assert!(
764 false,
765 "truncate failed unexpectedly after checking length in pop"
766 );
767 None
768 }
769 }
770 }
771
772 pub fn blocks(&self) -> &[TokenBlock] {
774 &self.blocks
775 }
776
777 pub fn last_complete_block(&self) -> Option<&TokenBlock> {
779 self.blocks.last()
780 }
781
782 pub fn current_block(&self) -> &PartialTokenBlock {
784 &self.current_block
785 }
786
787 pub fn into_parts(self) -> (Vec<TokenBlock>, PartialTokenBlock) {
789 (self.blocks, self.current_block)
790 }
791
792 pub fn block_size(&self) -> usize {
794 self.block_size
795 }
796
797 pub fn salt_hash(&self) -> SaltHash {
799 self.salt_hash
800 }
801
802 pub fn total_tokens(&self) -> usize {
805 let block_size = self.current_block.block_size as usize;
806 (self.blocks.len() * block_size) + self.current_block.len()
807 }
808
809 pub fn tokens_at(&self, range: Range<usize>) -> Tokens {
811 let total = self.total_tokens();
812
813 if range.start > range.end || range.end > total {
815 return Tokens::default();
816 }
817
818 if range.is_empty() {
820 return Tokens::default();
821 }
822
823 let mut result = Vec::with_capacity(range.len());
824
825 for i in range {
826 if i < self.blocks.len() * self.block_size {
827 let block_index = i / self.block_size;
829 let token_index = i % self.block_size;
830 result.push(self.blocks[block_index].tokens()[token_index]);
831 } else {
832 let current_block_index = i - (self.blocks.len() * self.block_size);
834 result.push(self.current_block.tokens()[current_block_index]);
835 }
836 }
837
838 Tokens::from(result)
839 }
840
841 pub fn split_tokens(
859 tokens: &[Token],
860 block_size: u32,
861 salt_hash: u64,
862 ) -> (Vec<TokenBlock>, PartialTokenBlock) {
863 assert!(block_size > 0, "block_size must be greater than 0");
864 let chunks: Vec<TokenBlockChunk> = tokens
866 .as_ref()
867 .par_chunks_exact(block_size as usize)
868 .map(|chunk| TokenBlockChunk::from_tokens(chunk, salt_hash))
869 .collect();
870
871 let mut result_blocks = Vec::with_capacity(chunks.len());
872 let mut last_sequence_hash: Option<SequenceHash> = None;
873
874 for chunk in chunks {
876 let new_block = TokenBlock::from_chunk(chunk, last_sequence_hash);
877 last_sequence_hash = Some(new_block.sequence_hash());
878 result_blocks.push(new_block);
879 }
880
881 let remainder = tokens
883 .as_ref()
884 .chunks_exact(block_size as usize)
885 .remainder();
886
887 let current_block = PartialTokenBlock {
888 tokens: remainder.into(),
889 block_size,
890 salt_hash,
891 parent_sequence_hash: last_sequence_hash,
893 };
894
895 (result_blocks, current_block)
896 }
897
898 pub fn from_slice(tokens: &[Token], block_size: u32, salt_hash: Option<SaltHash>) -> Self {
899 assert!(block_size > 0, "block_size must be greater than 0");
900 let salt_hash = salt_hash.unwrap_or(0);
901 let (blocks, current_block) = Self::split_tokens(tokens, block_size, salt_hash);
902
903 Self {
904 blocks,
905 current_block,
906 salt_hash,
907 block_size: block_size as usize,
908 }
909 }
910}
911
912#[cfg(test)]
913mod tests {
914 use super::*;
915 use bytemuck::cast_slice;
916
917 fn create_test_sequence(
919 initial_tokens: &[Token],
920 block_size: u32,
921 salt_hash: Option<SaltHash>,
922 ) -> TokenBlockSequence {
923 TokenBlockSequence::new(Tokens::from(initial_tokens), block_size, salt_hash)
924 }
925
926 const TEST_SALT_HASH: SaltHash = 1337;
928 const HASH_1_4: BlockHash = 14643705804678351452; const SEQ_HASH_1_4: SequenceHash = HASH_1_4;
930 const HASH_5_8: BlockHash = 16777012769546811212; const SEQ_HASH_5_8: SequenceHash = 4945711292740353085; const HASH_9_12: BlockHash = 483935686894639516; const SEQ_HASH_9_12: SequenceHash = 12583592247330656132; #[test]
936 fn test_validate_hash_constants() {
937 let salt = TEST_SALT_HASH;
938
939 let tokens_1_4 = &[1u32, 2, 3, 4];
941 let computed_hash_1_4 = compute_hash_v2(cast_slice(tokens_1_4), salt);
942 assert_eq!(computed_hash_1_4, HASH_1_4, "Mismatch for HASH_1_4");
943 assert_eq!(computed_hash_1_4, SEQ_HASH_1_4, "Mismatch for SEQ_HASH_1_4");
945
946 let tokens_5_8 = &[5u32, 6, 7, 8];
948 let computed_hash_5_8 = compute_hash_v2(cast_slice(tokens_5_8), salt);
949 assert_eq!(computed_hash_5_8, HASH_5_8, "Mismatch for HASH_5_8");
950 let computed_seq_hash_5_8 = compute_hash_v2(cast_slice(&[SEQ_HASH_1_4, HASH_5_8]), salt);
951 assert_eq!(
952 computed_seq_hash_5_8, SEQ_HASH_5_8,
953 "Mismatch for SEQ_HASH_5_8"
954 );
955
956 let tokens_9_12 = &[9u32, 10, 11, 12];
958 let computed_hash_9_12 = compute_hash_v2(cast_slice(tokens_9_12), salt);
959 assert_eq!(computed_hash_9_12, HASH_9_12, "Mismatch for HASH_9_12");
960 let computed_seq_hash_9_12 = compute_hash_v2(cast_slice(&[SEQ_HASH_5_8, HASH_9_12]), salt);
961 assert_eq!(
962 computed_seq_hash_9_12, SEQ_HASH_9_12,
963 "Mismatch for SEQ_HASH_9_12"
964 );
965 }
966
967 #[test]
968 fn test_tokens_from() {
969 let vec_u32: Vec<u32> = vec![1, 2, 3];
970 let tokens_u32: Tokens = vec_u32.clone().into();
971 assert_eq!(tokens_u32.0, vec_u32);
972
973 let slice_u32: &[u32] = &[4, 5];
974 let tokens_slice_u32: Tokens = slice_u32.into();
975 assert_eq!(tokens_slice_u32.0, vec![4, 5]);
976
977 let vec_i32: Vec<i32> = vec![-1, 0, 1]; let tokens_i32: Tokens = vec_i32.into();
979 assert_eq!(tokens_i32.0, vec![u32::MAX, 0, 1]);
980
981 let slice_i32: &[i32] = &[100, 200];
982 let tokens_slice_i32: Tokens = slice_i32.into();
983 assert_eq!(tokens_slice_i32.0, vec![100, 200]);
984
985 let into_vec: Vec<u32> = tokens_slice_i32.into();
986 assert_eq!(into_vec, vec![100, 200]);
987 }
988
989 #[test]
990 fn test_tokens_equality() {
991 let tokens = Tokens::from(vec![1, 2, 3]);
992 assert_eq!(tokens, vec![1, 2, 3]);
993 assert_eq!(vec![1, 2, 3], tokens);
994 assert_eq!(tokens, &[1, 2, 3][..]);
995 assert_eq!(&[1, 2, 3][..], tokens);
996 assert_eq!(tokens, Tokens::from(vec![1, 2, 3]));
997 assert_ne!(tokens, Tokens::from(vec![1, 2, 4]));
998 }
999
1000 #[test]
1001 fn test_tokens_deref_asref() {
1002 let tokens = Tokens::from(vec![10, 20, 30]);
1003
1004 assert_eq!(tokens.len(), 3);
1006 assert_eq!(tokens[1], 20);
1007 let slice: &[Token] = &tokens;
1008 assert_eq!(slice, &[10, 20, 30]);
1009
1010 let as_ref_slice: &[Token] = tokens.as_ref();
1012 assert_eq!(as_ref_slice, &[10, 20, 30]);
1013
1014 let borrowed_slice: &[Token] = std::borrow::Borrow::borrow(&tokens);
1016 assert_eq!(borrowed_slice, &[10, 20, 30]);
1017 }
1018
1019 #[test]
1020 fn test_tokens_into_sequence() {
1021 let tokens = Tokens::from(vec![1, 2, 3, 4, 5]);
1022 let seq = tokens.into_sequence(3, Some(TEST_SALT_HASH));
1023 assert_eq!(seq.blocks().len(), 1);
1024 assert_eq!(seq.blocks[0].tokens().as_ref(), &[1, 2, 3]);
1025 assert_eq!(seq.current_block().tokens().as_ref(), &[4, 5]);
1026 assert_eq!(seq.salt_hash(), TEST_SALT_HASH);
1027 }
1028
1029 #[test]
1030 fn test_partial_block_ops() {
1031 let mut partial = PartialTokenBlock::create_sequence_root(3, TEST_SALT_HASH);
1032 assert_eq!(partial.len(), 0);
1033 assert_eq!(partial.remaining(), 3);
1034 assert!(partial.is_empty());
1035
1036 assert!(partial.push_token(1).is_ok());
1038 assert_eq!(partial.len(), 1);
1039 assert_eq!(partial.remaining(), 2);
1040 let remaining = partial.push_tokens(Tokens::from(vec![2, 3, 4]));
1041 assert_eq!(partial.len(), 3);
1042 assert_eq!(partial.remaining(), 0);
1043 assert_eq!(remaining.as_ref(), &[4]); assert_eq!(partial.tokens().as_ref(), &[1, 2, 3]);
1045
1046 assert_eq!(partial.push_token(5), Err(TokenBlockError::Full));
1048 let remaining_full = partial.push_tokens(Tokens::from(vec![5]));
1049 assert_eq!(remaining_full.as_ref(), &[5]);
1050
1051 assert!(partial.pop_token().is_ok());
1053 assert_eq!(partial.len(), 2);
1054 assert_eq!(partial.tokens().as_ref(), &[1, 2]);
1055 assert!(partial.pop_tokens(2).is_ok());
1056 assert!(partial.is_empty());
1057
1058 assert_eq!(partial.pop_token(), Err(TokenBlockError::Empty));
1060 assert_eq!(
1061 partial.pop_tokens(1),
1062 Err(TokenBlockError::InsufficientTokens)
1063 );
1064
1065 assert!(partial.push_token(10).is_ok());
1067 assert_eq!(partial.commit(), Err(TokenBlockError::Incomplete));
1068
1069 assert!(partial.push_token(11).is_ok());
1071 assert!(partial.push_token(12).is_ok());
1072 assert_eq!(partial.len(), 3);
1073 let commit_result = partial.commit();
1074 assert!(commit_result.is_ok());
1075 let committed_block = commit_result.unwrap();
1076 assert_eq!(committed_block.tokens().as_ref(), &[10, 11, 12]);
1077
1078 assert!(partial.is_empty());
1080 assert_eq!(
1081 partial.parent_sequence_hash,
1082 Some(committed_block.sequence_hash())
1083 );
1084 assert_eq!(partial.block_size, 3);
1085 }
1086
1087 #[test]
1088 fn test_token_block_creation_and_hashes() {
1089 let salt = TEST_SALT_HASH;
1090 let tokens1 = Tokens::from(vec![1, 2, 3, 4]);
1091 let chunk1 = TokenBlockChunk::new(tokens1.clone(), salt);
1092 let block1 = TokenBlock::from_chunk(chunk1, None);
1093
1094 assert_eq!(block1.tokens(), &tokens1);
1095 assert_eq!(block1.salt_hash(), salt);
1096 assert_eq!(block1.parent_sequence_hash(), None);
1097 assert_eq!(block1.block_hash(), HASH_1_4);
1098 assert_eq!(block1.sequence_hash(), SEQ_HASH_1_4); let tokens2 = Tokens::from(vec![5, 6, 7, 8]);
1101 let chunk2 = TokenBlockChunk::new(tokens2.clone(), salt);
1102 let block2 = TokenBlock::from_chunk(chunk2, block1.parent_sequence_hash()); assert_ne!(block2.sequence_hash(), SEQ_HASH_5_8);
1105
1106 let chunk2_correct = TokenBlockChunk::new(tokens2.clone(), salt);
1107 let block2_correct = TokenBlock::from_chunk(chunk2_correct, Some(block1.sequence_hash()));
1108
1109 assert_eq!(block2_correct.tokens(), &tokens2);
1110 assert_eq!(block2_correct.salt_hash(), salt);
1111 assert_eq!(
1112 block2_correct.parent_sequence_hash(),
1113 Some(block1.sequence_hash())
1114 );
1115 assert_eq!(block2_correct.block_hash(), HASH_5_8);
1116 assert_eq!(block2_correct.sequence_hash(), SEQ_HASH_5_8);
1117 }
1118
1119 #[test]
1120 fn test_new_sequence() {
1121 let seq_empty = create_test_sequence(&[], 4, Some(TEST_SALT_HASH));
1123 assert!(seq_empty.blocks().is_empty());
1124 assert!(seq_empty.current_block().is_empty());
1125 assert_eq!(seq_empty.total_tokens(), 0);
1126 assert_eq!(seq_empty.salt_hash(), TEST_SALT_HASH);
1127 assert_eq!(seq_empty.current_block().parent_sequence_hash, None);
1128
1129 let seq_partial = create_test_sequence(&[1, 2], 4, Some(TEST_SALT_HASH));
1131 assert!(seq_partial.blocks().is_empty());
1132 assert_eq!(seq_partial.current_block().tokens().as_ref(), &[1, 2]);
1133 assert_eq!(seq_partial.total_tokens(), 2);
1134 assert_eq!(seq_partial.current_block().parent_sequence_hash, None);
1135
1136 let seq_one_block = create_test_sequence(&[1, 2, 3, 4], 4, Some(TEST_SALT_HASH));
1138 assert_eq!(seq_one_block.blocks().len(), 1);
1139 assert!(seq_one_block.current_block().is_empty());
1140 assert_eq!(seq_one_block.total_tokens(), 4);
1141 assert_eq!(seq_one_block.blocks[0].tokens().as_ref(), &[1, 2, 3, 4]);
1142 assert_eq!(seq_one_block.blocks[0].sequence_hash(), SEQ_HASH_1_4);
1143 assert_eq!(
1144 seq_one_block.current_block().parent_sequence_hash,
1145 Some(SEQ_HASH_1_4)
1146 );
1147
1148 let seq_multi = create_test_sequence(&[1, 2, 3, 4, 5, 6, 7, 8, 9], 4, Some(TEST_SALT_HASH));
1150 assert_eq!(seq_multi.blocks().len(), 2);
1151 assert_eq!(seq_multi.current_block().tokens().as_ref(), &[9]);
1152 assert_eq!(seq_multi.total_tokens(), 9);
1153 assert_eq!(seq_multi.blocks[0].sequence_hash(), SEQ_HASH_1_4);
1154 assert_eq!(seq_multi.blocks[1].sequence_hash(), SEQ_HASH_5_8);
1155 assert_eq!(
1156 seq_multi.current_block().parent_sequence_hash,
1157 Some(SEQ_HASH_5_8)
1158 );
1159
1160 assert_eq!(seq_multi.tokens_at(0..4).as_ref(), &[1, 2, 3, 4]); assert_eq!(seq_multi.tokens_at(4..8).as_ref(), &[5, 6, 7, 8]); assert_eq!(seq_multi.tokens_at(8..9).as_ref(), &[9]); assert_eq!(seq_multi.tokens_at(2..6).as_ref(), &[3, 4, 5, 6]); assert_eq!(seq_multi.tokens_at(6..9).as_ref(), &[7, 8, 9]); assert_eq!(seq_multi.tokens_at(5..5).as_ref(), &[0u32; 0]); assert_eq!(seq_multi.tokens_at(10..15).as_ref(), &[0u32; 0]); let seq_no_salt = create_test_sequence(&[1, 2, 3, 4, 5], 4, None);
1171 assert_eq!(seq_no_salt.salt_hash(), 0);
1172 assert_eq!(seq_no_salt.blocks().len(), 1);
1173 assert_ne!(seq_no_salt.blocks[0].block_hash(), HASH_1_4); assert_eq!(seq_no_salt.current_block().tokens().as_ref(), &[5]);
1175 }
1176
1177 #[test]
1178 #[should_panic]
1179 fn test_new_sequence_zero_block_size() {
1180 let _ = create_test_sequence(&[1], 0, None);
1181 }
1182
1183 #[test]
1184 fn test_append_single_token() {
1185 let mut sequence =
1186 create_test_sequence(&[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 4, Some(TEST_SALT_HASH));
1187 assert_eq!(sequence.blocks().len(), 2);
1188 assert_eq!(sequence.current_block().tokens.len(), 2);
1189 assert_eq!(sequence.current_block().tokens, vec![9, 10]);
1190 assert_eq!(
1191 sequence.current_block().parent_sequence_hash,
1192 Some(SEQ_HASH_5_8)
1193 );
1194
1195 let completed_idx = sequence.append(11).unwrap();
1197 assert_eq!(completed_idx, None);
1198 assert_eq!(sequence.blocks().len(), 2);
1199 assert_eq!(sequence.current_block().tokens.as_ref(), &[9, 10, 11]);
1200
1201 let completed_idx = sequence.append(12).unwrap();
1204 assert_eq!(completed_idx, Some(2));
1205 assert_eq!(sequence.blocks().len(), 3);
1206 assert_eq!(sequence.current_block.tokens.as_ref(), &[0u32; 0]);
1207 assert_eq!(sequence.current_block.remaining(), 4);
1208 assert_eq!(
1209 sequence.current_block().parent_sequence_hash,
1210 Some(SEQ_HASH_9_12)
1211 ); let completed_idx_13 = sequence.append(13).unwrap();
1215 assert_eq!(completed_idx_13, None);
1216 assert_eq!(sequence.blocks().len(), 3);
1217 assert_eq!(sequence.blocks[2].tokens().as_ref(), &[9, 10, 11, 12]);
1218 assert_eq!(sequence.blocks[2].sequence_hash(), SEQ_HASH_9_12);
1219 assert_eq!(sequence.current_block.tokens.as_ref(), &[13]); assert_eq!(sequence.current_block.remaining(), 3);
1221 assert_eq!(
1222 sequence.current_block.parent_sequence_hash,
1223 Some(SEQ_HASH_9_12)
1224 ); }
1226
1227 #[test]
1228 fn test_extend() {
1229 let block_size = 4;
1230 let salt_hash = Some(TEST_SALT_HASH);
1231
1232 let mut seq1 = create_test_sequence(&[], block_size, salt_hash);
1234 let tokens1 = Tokens::from(vec![1, 2]);
1235 let completed1 = seq1.extend(tokens1).unwrap();
1236 assert_eq!(completed1, None); assert_eq!(seq1.blocks.len(), 0);
1238 assert_eq!(seq1.current_block.tokens.as_ref(), &[1, 2]);
1239 assert_eq!(seq1.current_block.remaining(), 2);
1240 assert_eq!(seq1.current_block.parent_sequence_hash, None); let mut seq2 = create_test_sequence(&[], block_size, salt_hash);
1244 let tokens2 = Tokens::from(vec![1, 2, 3, 4]);
1245 let completed2 = seq2.extend(tokens2).unwrap();
1246 assert_eq!(completed2, Some(0..1));
1247 assert_eq!(seq2.blocks.len(), 1);
1248 assert_eq!(seq2.current_block.tokens.as_ref(), &[0u32; 0]); assert_eq!(seq2.current_block.remaining(), 4);
1250 assert_eq!(seq2.current_block.parent_sequence_hash, Some(SEQ_HASH_1_4)); let mut seq3 = create_test_sequence(&[], block_size, salt_hash);
1254 let tokens3 = Tokens::from(vec![1, 2, 3, 4, 5, 6]);
1255 let completed3 = seq3.extend(tokens3).unwrap();
1256 assert_eq!(completed3, Some(0..1)); assert_eq!(seq3.blocks.len(), 1);
1258 assert_eq!(seq3.current_block.tokens.as_ref(), &[5, 6]); assert_eq!(seq3.blocks[0].tokens().as_ref(), &[1, 2, 3, 4]);
1260 assert_eq!(seq3.current_block.parent_sequence_hash, Some(SEQ_HASH_1_4));
1261 assert_eq!(seq3.current_block.remaining(), 2);
1262
1263 let mut seq4 = create_test_sequence(&[], block_size, salt_hash);
1265 let tokens4 = Tokens::from(vec![1, 2, 3, 4, 5, 6, 7, 8]);
1266 let completed4 = seq4.extend(tokens4).unwrap();
1267 assert_eq!(completed4, Some(0..2)); assert_eq!(seq4.blocks.len(), 2); assert_eq!(seq4.current_block.tokens.as_ref(), &[0u32; 0]);
1270 assert_eq!(seq4.current_block.remaining(), 4);
1271 assert_eq!(seq4.blocks[0].tokens().as_ref(), &[1, 2, 3, 4]);
1272 assert_eq!(seq4.blocks[0].sequence_hash(), SEQ_HASH_1_4);
1273 assert_eq!(seq4.current_block.parent_sequence_hash, Some(SEQ_HASH_5_8)); let mut seq5 = create_test_sequence(&[], block_size, salt_hash);
1277 let tokens5a = Tokens::from(vec![1, 2]);
1278 let completed5a = seq5.extend(tokens5a).unwrap();
1279 assert_eq!(completed5a, None);
1280 assert_eq!(seq5.blocks.len(), 0);
1281 assert_eq!(seq5.current_block.tokens.as_ref(), &[1, 2]);
1282
1283 let tokens5b = Tokens::from(vec![3, 4, 5]);
1284 let completed5b = seq5.extend(tokens5b).unwrap();
1285 assert_eq!(completed5b, Some(0..1)); assert_eq!(seq5.blocks.len(), 1);
1287 assert_eq!(seq5.current_block.tokens.as_ref(), &[5]);
1288 assert_eq!(seq5.blocks[0].tokens().as_ref(), &[1, 2, 3, 4]);
1289 assert_eq!(seq5.current_block.parent_sequence_hash, Some(SEQ_HASH_1_4));
1290 assert_eq!(seq5.current_block.remaining(), 3);
1291
1292 let tokens5c = Tokens::from(vec![6, 7, 8, 9, 10]);
1293 let completed5c = seq5.extend(tokens5c).unwrap();
1294 assert_eq!(completed5c, Some(1..2)); assert_eq!(seq5.blocks.len(), 2);
1296 assert_eq!(seq5.current_block.tokens.as_ref(), &[9, 10]);
1297 assert_eq!(seq5.blocks[1].tokens().as_ref(), &[5, 6, 7, 8]);
1298 assert_eq!(seq5.current_block.parent_sequence_hash, Some(SEQ_HASH_5_8));
1299 assert_eq!(seq5.current_block.remaining(), 2);
1300
1301 let mut seq6 = create_test_sequence(&[1], block_size, salt_hash);
1303 let completed6 = seq6.extend(Tokens::default()).unwrap();
1304 assert_eq!(completed6, None);
1305 assert_eq!(seq6.blocks.len(), 0);
1306 assert_eq!(seq6.current_block.tokens.as_ref(), &[1]);
1307 assert_eq!(seq6.total_tokens(), 1);
1308
1309 let mut seq7 = create_test_sequence(&[1, 2], block_size, salt_hash);
1311 let tokens7 = Tokens::from(vec![3, 4]);
1312 let completed7 = seq7.extend(tokens7).unwrap();
1313 assert_eq!(completed7, Some(0..1)); assert_eq!(seq7.blocks.len(), 1);
1315 assert_eq!(seq7.current_block.tokens.as_ref(), &[0u32; 0]); assert_eq!(seq7.current_block.remaining(), 4);
1317 assert_eq!(seq7.total_tokens(), 4);
1318 assert_eq!(seq7.current_block.parent_sequence_hash, Some(SEQ_HASH_1_4)); assert_eq!(seq7.tokens_at(0..2).as_ref(), &[1, 2]);
1322 assert_eq!(seq7.tokens_at(1..3).as_ref(), &[2, 3]);
1323 assert_eq!(seq7.tokens_at(0..4).as_ref(), &[1, 2, 3, 4]);
1324 assert_eq!(seq7.tokens_at(2..2).as_ref(), &[0u32; 0]); }
1326
1327 #[test]
1328 fn test_truncate() {
1329 let block_size = 4;
1330 let salt_hash = Some(TEST_SALT_HASH);
1331 let initial_tokens = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let mut seq1 = create_test_sequence(initial_tokens, block_size, salt_hash);
1335 assert!(seq1.truncate(9).is_ok());
1336 assert_eq!(seq1.total_tokens(), 9);
1337 assert_eq!(seq1.blocks().len(), 2);
1338 assert_eq!(seq1.current_block().tokens.as_ref(), &[9]);
1339 assert_eq!(
1340 seq1.current_block().parent_sequence_hash,
1341 Some(SEQ_HASH_5_8)
1342 );
1343
1344 let mut seq2 = create_test_sequence(initial_tokens, block_size, salt_hash);
1346 assert!(seq2.truncate(8).is_ok());
1347 assert_eq!(seq2.total_tokens(), 8);
1348 assert_eq!(seq2.blocks().len(), 2);
1349 assert!(seq2.current_block().tokens.is_empty());
1350 assert_eq!(
1351 seq2.current_block().parent_sequence_hash,
1352 Some(SEQ_HASH_5_8)
1353 );
1354
1355 let mut seq3 = create_test_sequence(initial_tokens, block_size, salt_hash);
1357 assert!(seq3.truncate(7).is_ok());
1358 assert_eq!(seq3.total_tokens(), 7);
1359 assert_eq!(seq3.blocks().len(), 1); assert_eq!(seq3.current_block().tokens.as_ref(), &[5, 6, 7]); assert_eq!(
1362 seq3.current_block().parent_sequence_hash,
1363 Some(SEQ_HASH_1_4)
1364 ); assert_eq!(seq3.blocks()[0].tokens().as_ref(), &[1, 2, 3, 4]);
1366
1367 let mut seq4 = create_test_sequence(initial_tokens, block_size, salt_hash);
1369 assert!(seq4.truncate(4).is_ok());
1370 assert_eq!(seq4.total_tokens(), 4);
1371 assert_eq!(seq4.blocks().len(), 1); assert!(seq4.current_block().tokens.is_empty()); assert_eq!(
1374 seq4.current_block().parent_sequence_hash,
1375 Some(SEQ_HASH_1_4)
1376 );
1377 assert_eq!(seq4.blocks()[0].tokens().as_ref(), &[1, 2, 3, 4]);
1378
1379 let mut seq5 = create_test_sequence(initial_tokens, block_size, salt_hash);
1381 assert!(seq5.truncate(3).is_ok());
1382 assert_eq!(seq5.total_tokens(), 3);
1383 assert!(seq5.blocks().is_empty()); assert_eq!(seq5.current_block().tokens.as_ref(), &[1, 2, 3]); assert_eq!(seq5.current_block().parent_sequence_hash, None); let mut seq6 = create_test_sequence(initial_tokens, block_size, salt_hash);
1389 assert!(seq6.truncate(0).is_ok());
1390 assert_eq!(seq6.total_tokens(), 0);
1391 assert!(seq6.blocks().is_empty());
1392 assert!(seq6.current_block().tokens.is_empty());
1393 assert_eq!(seq6.current_block().parent_sequence_hash, None);
1394
1395 let mut seq7 = create_test_sequence(initial_tokens, block_size, salt_hash);
1397 let original_state = (seq7.blocks.clone(), seq7.current_block.tokens.clone()); assert!(seq7.truncate(11).is_ok()); assert_eq!(seq7.total_tokens(), 10);
1400 assert_eq!(seq7.blocks, original_state.0);
1401 assert_eq!(seq7.current_block.tokens, original_state.1);
1402
1403 let mut seq8 = create_test_sequence(initial_tokens, block_size, salt_hash);
1405 let original_state = (seq8.blocks.clone(), seq8.current_block.tokens.clone());
1406 assert!(seq8.truncate(10).is_ok());
1407 assert_eq!(seq8.total_tokens(), 10);
1408 assert_eq!(seq8.blocks, original_state.0);
1409 assert_eq!(seq8.current_block.tokens, original_state.1);
1410
1411 let mut seq9 = create_test_sequence(&[], block_size, salt_hash);
1413 assert!(seq9.truncate(0).is_ok());
1414 assert_eq!(seq9.total_tokens(), 0);
1415 assert!(seq9.blocks().is_empty());
1416 assert!(seq9.current_block().tokens.is_empty());
1417
1418 let tokens10 = &[1, 2, 3, 4, 5, 6, 7, 8]; let mut seq10 = create_test_sequence(tokens10, block_size, salt_hash);
1421 assert_eq!(seq10.total_tokens(), 8);
1422 assert!(seq10.current_block().is_empty());
1423 assert!(seq10.truncate(4).is_ok()); assert_eq!(seq10.total_tokens(), 4);
1425 assert_eq!(seq10.blocks().len(), 1);
1426 assert!(seq10.current_block().tokens.is_empty());
1427 assert_eq!(
1428 seq10.current_block().parent_sequence_hash,
1429 Some(SEQ_HASH_1_4)
1430 );
1431
1432 let tokens11 = &[1, 2, 3, 4, 5, 6, 7, 8]; let mut seq11 = create_test_sequence(tokens11, block_size, salt_hash);
1435 assert!(seq11.truncate(3).is_ok()); assert_eq!(seq11.total_tokens(), 3);
1437 assert!(seq11.blocks().is_empty());
1438 assert_eq!(seq11.current_block().tokens.as_ref(), &[1, 2, 3]); assert_eq!(seq11.current_block().parent_sequence_hash, None);
1440 }
1441
1442 #[test]
1443 fn test_unwind() {
1444 let block_size = 4;
1445 let salt_hash = Some(TEST_SALT_HASH);
1446 let initial_tokens = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let mut seq = create_test_sequence(initial_tokens, block_size, salt_hash);
1450 assert!(seq.unwind(0).is_ok());
1451 assert_eq!(seq.total_tokens(), 10);
1452
1453 let mut seq = create_test_sequence(initial_tokens, block_size, salt_hash);
1455 assert!(seq.unwind(1).is_ok());
1456 assert_eq!(seq.total_tokens(), 9);
1457 assert_eq!(seq.current_block.tokens.as_ref(), &[9]);
1458
1459 let mut seq = create_test_sequence(initial_tokens, block_size, salt_hash);
1461 assert!(seq.unwind(3).is_ok());
1462 assert_eq!(seq.total_tokens(), 7);
1463 assert_eq!(seq.blocks.len(), 1);
1464 assert_eq!(seq.current_block.tokens.as_ref(), &[5, 6, 7]);
1465
1466 let mut seq = create_test_sequence(initial_tokens, block_size, salt_hash);
1468 assert!(seq.unwind(10).is_ok());
1469 assert_eq!(seq.total_tokens(), 0);
1470 assert!(seq.blocks.is_empty());
1471 assert!(seq.current_block.is_empty());
1472
1473 let mut seq = create_test_sequence(initial_tokens, block_size, salt_hash);
1475 assert_eq!(seq.unwind(11), Err(TokenBlockError::InsufficientTokens));
1476 assert_eq!(seq.total_tokens(), 10); let mut seq_empty = create_test_sequence(&[], block_size, salt_hash);
1480 assert_eq!(
1481 seq_empty.unwind(1),
1482 Err(TokenBlockError::InsufficientTokens)
1483 );
1484 }
1485
1486 #[test]
1487 fn test_pop() {
1488 let block_size = 4;
1489 let salt_hash = Some(TEST_SALT_HASH);
1490 let initial_tokens = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let mut seq = create_test_sequence(initial_tokens, block_size, salt_hash);
1493
1494 assert_eq!(seq.pop(), Some(10));
1496 assert_eq!(seq.total_tokens(), 9);
1497 assert_eq!(seq.current_block.tokens.as_ref(), &[9]);
1498 assert_eq!(seq.blocks.len(), 2);
1499
1500 assert_eq!(seq.pop(), Some(9));
1502 assert_eq!(seq.total_tokens(), 8);
1503 assert!(seq.current_block.is_empty());
1504 assert_eq!(seq.blocks.len(), 2);
1505 assert_eq!(seq.current_block.parent_sequence_hash, Some(SEQ_HASH_5_8));
1506
1507 assert_eq!(seq.pop(), Some(8));
1509 assert_eq!(seq.total_tokens(), 7);
1510 assert_eq!(seq.current_block.tokens.as_ref(), &[5, 6, 7]);
1511 assert_eq!(seq.blocks.len(), 1);
1512 assert_eq!(seq.current_block.parent_sequence_hash, Some(SEQ_HASH_1_4));
1513
1514 assert_eq!(seq.pop(), Some(7));
1516 assert_eq!(seq.pop(), Some(6));
1517 assert_eq!(seq.pop(), Some(5));
1518 assert_eq!(seq.total_tokens(), 4);
1519 assert!(seq.current_block.is_empty());
1520 assert_eq!(seq.blocks.len(), 1);
1521 assert_eq!(seq.current_block.parent_sequence_hash, Some(SEQ_HASH_1_4));
1522
1523 assert_eq!(seq.pop(), Some(4));
1525 assert_eq!(seq.total_tokens(), 3);
1526 assert_eq!(seq.current_block.tokens.as_ref(), &[1, 2, 3]);
1527 assert!(seq.blocks.is_empty());
1528 assert_eq!(seq.current_block.parent_sequence_hash, None);
1529
1530 assert_eq!(seq.pop(), Some(3));
1532 assert_eq!(seq.pop(), Some(2));
1533 assert_eq!(seq.pop(), Some(1));
1534 assert_eq!(seq.total_tokens(), 0);
1535 assert!(seq.current_block.is_empty());
1536 assert!(seq.blocks.is_empty());
1537
1538 assert_eq!(seq.pop(), None);
1540 assert_eq!(seq.total_tokens(), 0);
1541 }
1542
1543 #[test]
1544 fn test_total_tokens() {
1545 let block_size = 3;
1546 let salt_hash = Some(TEST_SALT_HASH);
1547
1548 let mut seq = create_test_sequence(&[], block_size, salt_hash);
1549 assert_eq!(seq.total_tokens(), 0);
1550
1551 seq.extend(Tokens::from(vec![1, 2])).unwrap();
1552 assert_eq!(seq.total_tokens(), 2);
1553
1554 seq.append(3).unwrap(); assert_eq!(seq.total_tokens(), 3);
1556
1557 seq.extend(Tokens::from(vec![4, 5, 6, 7])).unwrap(); assert_eq!(seq.total_tokens(), 7);
1559
1560 seq.pop().unwrap(); assert_eq!(seq.total_tokens(), 6);
1562
1563 seq.truncate(4).unwrap(); assert_eq!(seq.total_tokens(), 4);
1565
1566 seq.unwind(2).unwrap(); assert_eq!(seq.total_tokens(), 2);
1568 }
1569
1570 #[test]
1571 fn test_push_tokens_partial_block() {
1572 let mut partial = PartialTokenBlock::create_sequence_root(4, 1337);
1573
1574 let tokens = Tokens(vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
1575
1576 let remaining = partial.push_tokens(tokens);
1577 assert_eq!(partial.tokens.len(), 4);
1578 assert_eq!(remaining.len(), 6);
1579 }
1580}