1use crate::format::apply_item_delta;
2use crate::format::create_item_delta;
3use crate::format::item_data_to_uuid;
4use crate::format::key;
5use crate::format::key_to_id;
6use crate::format::key_to_raw_type_id;
7use crate::format::uuid_to_item_data;
8use crate::format::DeltaDifferingSizes;
9use crate::format::DeltaHeader;
10use crate::format::Item;
11use crate::format::RawItem;
12use crate::format::SnapHeader;
13use crate::format::TypeId;
14use crate::format::Warning;
15use crate::format::OFFSET_EXTENDED_TYPE_ID;
16use crate::format::TYPE_ID_EX;
17use crate::to_usize;
18use crate::ReadInt;
19use libtw2_buffer::CapacityError;
20use libtw2_common::num::Cast;
21use libtw2_gamenet_snap as msg;
22use libtw2_gamenet_snap::SnapMsg;
23use libtw2_gamenet_snap::MAX_SNAPSHOT_PACKSIZE;
24use libtw2_packer::IntUnpacker;
25use libtw2_packer::Packer;
26use libtw2_packer::UnexpectedEnd;
27use libtw2_packer::Unpacker;
28use libtw2_warn::wrap;
29use libtw2_warn::Ignore;
30use libtw2_warn::Warn;
31use std::cmp;
32use std::collections::btree_map;
33use std::collections::BTreeMap;
34use std::collections::BTreeSet;
35use std::fmt;
36use std::iter;
37use std::mem;
38use std::ops;
39use uuid::Uuid;
40
41pub const MAX_SNAPSHOT_SIZE: usize = 64 * 1024; pub const MAX_SNAPSHOT_ITEMS: usize = 1024;
43
44#[derive(Clone, Debug, Eq, PartialEq)]
45pub enum Error {
46 UnexpectedEnd,
47 IntOutOfRange,
48 DeletedItemsUnpacking,
49 ItemDiffsUnpacking,
50 TypeIdRange,
51 IdRange,
52 NegativeSize,
53 TooLongDiff,
54 TooLongSnap,
55 TooManyItems,
56 DeltaDifferingSizes,
57 OffsetsUnpacking,
58 InvalidOffset,
59 ItemsUnpacking,
60 DuplicateKey,
61 DuplicateUuidType,
62 InvalidUuidType,
63 MissingUuidType,
64}
65
66#[derive(Clone, Debug, Eq, PartialEq)]
67pub enum BuilderError {
68 DuplicateKey,
69 TooLongSnap,
70 TooManyItems,
71}
72
73impl From<BuilderError> for Error {
74 fn from(err: BuilderError) -> Error {
75 match err {
76 BuilderError::DuplicateKey => Error::DuplicateKey,
77 BuilderError::TooLongSnap => Error::TooLongSnap,
78 BuilderError::TooManyItems => Error::TooManyItems,
79 }
80 }
81}
82
83impl From<DeltaDifferingSizes> for Error {
84 fn from(DeltaDifferingSizes: DeltaDifferingSizes) -> Error {
85 Error::DeltaDifferingSizes
86 }
87}
88
89impl From<libtw2_packer::IntOutOfRange> for Error {
90 fn from(_: libtw2_packer::IntOutOfRange) -> Error {
91 Error::IntOutOfRange
92 }
93}
94
95impl From<UnexpectedEnd> for Error {
96 fn from(UnexpectedEnd: UnexpectedEnd) -> Error {
97 Error::UnexpectedEnd
98 }
99}
100
101#[derive(Clone, Default)]
102pub struct RawSnap {
103 offsets: BTreeMap<i32, ops::Range<u32>>,
104 buf: Vec<i32>,
105}
106
107impl RawSnap {
108 pub fn empty() -> RawSnap {
109 Default::default()
110 }
111 fn clear(&mut self) {
112 self.offsets.clear();
113 self.buf.clear();
114 }
115 fn item_from_offset(&self, offset: ops::Range<u32>) -> &[i32] {
116 &self.buf[to_usize(offset)]
117 }
118 pub fn item(&self, raw_type_id: u16, id: u16) -> Option<&[i32]> {
119 self.offsets
120 .get(&key(raw_type_id, id))
121 .map(|o| &self.buf[to_usize(o.clone())])
122 }
123 pub fn items(&self) -> RawItems<'_> {
124 RawItems {
125 snap: self,
126 iter: self.offsets.iter(),
127 }
128 }
129 fn serialized_ints_size(num_items: usize, num_item_data_i32s: usize) -> usize {
130 mem::size_of::<i32>() * (2 + num_items + num_items + num_item_data_i32s)
141 }
142 fn prepare_item_vacant<'a>(
143 num_items: usize,
144 entry: btree_map::VacantEntry<'a, i32, ops::Range<u32>>,
145 buf: &mut Vec<i32>,
146 size: usize,
147 ) -> Result<&'a mut ops::Range<u32>, BuilderError> {
148 let offset = buf.len();
149 if num_items + 1 > MAX_SNAPSHOT_ITEMS {
150 return Err(BuilderError::TooManyItems);
151 }
152 if RawSnap::serialized_ints_size(num_items + 1, offset + size) > MAX_SNAPSHOT_SIZE {
153 return Err(BuilderError::TooLongSnap);
154 }
155 let start = offset.assert_u32();
156 let end = (offset + size).assert_u32();
157 buf.extend(iter::repeat(0).take(size));
158 Ok(entry.insert(start..end))
159 }
160 fn add_item_uninitialized(
161 &mut self,
162 raw_type_id: u16,
163 id: u16,
164 size: usize,
165 ) -> Result<&mut [i32], BuilderError> {
166 let num_items = self.offsets.len();
167 let offset = match self.offsets.entry(key(raw_type_id, id)) {
168 btree_map::Entry::Occupied(..) => return Err(BuilderError::DuplicateKey),
169 btree_map::Entry::Vacant(v) => {
170 RawSnap::prepare_item_vacant(num_items, v, &mut self.buf, size)?
171 }
172 }
173 .clone();
174 Ok(&mut self.buf[to_usize(offset)])
175 }
176 fn add_item(&mut self, raw_type_id: u16, id: u16, data: &[i32]) -> Result<(), BuilderError> {
177 self.add_item_uninitialized(raw_type_id, id, data.len())?
178 .copy_from_slice(data);
179 Ok(())
180 }
181 fn prepare_item(
182 &mut self,
183 raw_type_id: u16,
184 id: u16,
185 size: usize,
186 ) -> Result<&mut [i32], Error> {
187 let num_items = self.offsets.len();
188 let offset = match self.offsets.entry(key(raw_type_id, id)) {
189 btree_map::Entry::Occupied(o) => o.into_mut(),
190 btree_map::Entry::Vacant(v) => {
191 RawSnap::prepare_item_vacant(num_items, v, &mut self.buf, size)?
192 }
193 }
194 .clone();
195 Ok(&mut self.buf[to_usize(offset)])
196 }
197 pub fn read<W: Warn<Warning>>(
198 &mut self,
199 warn: &mut W,
200 buf: &mut Vec<i32>,
201 data: &[u8],
202 ) -> Result<(), Error> {
203 self.clear();
204 buf.clear();
205
206 let mut unpacker = Unpacker::new(data);
207 while !unpacker.is_empty() {
208 match unpacker.read_int(wrap(warn)) {
209 Ok(int) => buf.push(int),
210 Err(UnexpectedEnd) => {
211 warn.warn(Warning::ExcessSnapData);
212 break;
213 }
214 }
215 }
216
217 self.read_from_ints(warn, &buf)
218 }
219 pub fn read_from_ints<W: Warn<Warning>>(
220 &mut self,
221 warn: &mut W,
222 data: &[i32],
223 ) -> Result<(), Error> {
224 self.clear();
225
226 let mut unpacker = IntUnpacker::new(data);
227 let header = SnapHeader::decode_obj(&mut unpacker)?;
228 let data = unpacker.as_slice();
229
230 let offsets_len = header.num_items.assert_usize();
231 if data.len() < offsets_len {
232 return Err(Error::OffsetsUnpacking);
233 }
234 if header.data_size % 4 != 0 {
235 return Err(Error::InvalidOffset);
236 }
237 let items_len = (header.data_size / 4).assert_usize();
238 match (offsets_len + items_len).cmp(&data.len()) {
239 cmp::Ordering::Less => warn.warn(Warning::ExcessSnapData),
240 cmp::Ordering::Equal => {}
241 cmp::Ordering::Greater => return Err(Error::ItemsUnpacking),
242 }
243
244 let (offsets, item_data) = data.split_at(offsets_len);
245 let item_data = &item_data[..items_len];
246
247 let mut offsets = offsets.iter();
248 let mut prev_offset = None;
249 loop {
250 let offset = offsets.next().copied();
251 if let Some(offset) = offset {
252 if offset < 0 {
253 return Err(Error::InvalidOffset);
254 }
255 if offset % 4 != 0 {
256 return Err(Error::InvalidOffset);
257 }
258 }
259 let finished = offset.is_none();
260 let offset = offset.map(|o| o.assert_usize() / 4).unwrap_or(items_len);
261
262 if let Some(prev_offset) = prev_offset {
263 if offset <= prev_offset {
264 return Err(Error::InvalidOffset);
265 }
266 if offset > items_len {
267 return Err(Error::InvalidOffset);
268 }
269 let raw_type_id = key_to_raw_type_id(item_data[prev_offset]);
270 let id = key_to_id(item_data[prev_offset]);
271 self.add_item(raw_type_id, id, &item_data[prev_offset + 1..offset])?;
272 } else if offset != 0 {
273 return Err(Error::InvalidOffset);
275 }
276
277 prev_offset = Some(offset);
278
279 if finished {
280 break;
281 }
282 }
283 Ok(())
284 }
285 pub fn read_with_delta<W>(
286 &mut self,
287 warn: &mut W,
288 from: &RawSnap,
289 delta: &Delta,
290 ) -> Result<(), Error>
291 where
292 W: Warn<Warning>,
293 {
294 self.clear();
295
296 let mut num_deletions = 0;
297 for item in from.items() {
298 if !delta.deleted_items.contains(&item.key()) {
299 let out = self.prepare_item(item.raw_type_id, item.id, item.data.len())?;
300 out.copy_from_slice(item.data);
301 } else {
302 num_deletions += 1;
303 }
304 }
305 if num_deletions != delta.deleted_items.len() {
306 warn.warn(Warning::UnknownDelete);
307 }
308
309 for (&key, offset) in &delta.updated_items {
310 let raw_type_id = key_to_raw_type_id(key);
311 let id = key_to_id(key);
312 let diff = &delta.buf[to_usize(offset.clone())];
313 let out = self.prepare_item(raw_type_id, id, diff.len())?;
314 let in_ = from.item(raw_type_id, id);
315
316 apply_item_delta(in_, diff, out)?;
317 }
318 Ok(())
319 }
320 fn write_impl<F: FnMut(i32) -> Result<(), CapacityError>>(
321 &self,
322 buf: &mut Vec<i32>,
323 mut write_int: F,
324 ) -> Result<(), CapacityError> {
325 assert!(self.offsets.len() <= MAX_SNAPSHOT_ITEMS);
326 let mut written = 0;
327 let mut write_int = |i| {
328 written += mem::size_of::<i32>();
329 write_int(i)
330 };
331 let keys = buf;
332 keys.clear();
333 keys.extend(self.offsets.keys().cloned());
334 keys.sort_unstable_by_key(|&k| k as u32);
335 let data_size = self
336 .buf
337 .len()
338 .checked_add(self.offsets.len())
339 .expect("snap size overflow")
340 .checked_mul(mem::size_of::<i32>())
341 .expect("snap size overflow")
342 .assert_i32();
343 write_int(data_size)?;
344 let num_items = self.offsets.len().assert_i32();
345 write_int(num_items)?;
346
347 let mut offset = 0;
348 for &key in &*keys {
349 write_int(offset)?;
350 let key_offset = self.offsets[&key].clone();
351 offset = offset
352 .checked_add(
353 (key_offset.end - key_offset.start + 1)
354 .usize()
355 .checked_mul(mem::size_of::<i32>())
356 .expect("item size overflow")
357 .assert_i32(),
358 )
359 .expect("offset overflow");
360 }
361 for &key in &*keys {
362 write_int(key)?;
363 for &i in &self.buf[to_usize(self.offsets[&key].clone())] {
364 write_int(i)?;
365 }
366 }
367 assert!(written <= MAX_SNAPSHOT_SIZE);
368 Ok(())
369 }
370 pub fn write<'d, 's>(
371 &self,
372 buf: &mut Vec<i32>,
373 mut p: Packer<'d, 's>,
374 ) -> Result<&'d [u8], CapacityError> {
375 self.write_impl(buf, |int| p.write_int(int))?;
376 Ok(p.written())
377 }
378 pub fn write_to_ints<'a>(
379 &self,
380 buf: &mut Vec<i32>,
381 result: &'a mut [i32],
382 ) -> Result<&'a [i32], CapacityError> {
383 let mut iter = result.iter_mut();
384 self.write_impl(buf, |int| {
385 *iter.next().ok_or(CapacityError)? = int;
386 Ok(())
387 })?;
388 let remaining = iter.len();
389 let len = result.len() - remaining;
390 Ok(&result[..len])
391 }
392 pub fn crc(&self) -> i32 {
393 self.buf.iter().fold(0, |s, &a| s.wrapping_add(a))
394 }
395 pub fn recycle(mut self) -> RawBuilder {
396 self.clear();
397 RawBuilder { snap: self }
398 }
399}
400
401fn read_int_err<R: ReadInt, W: Warn<Warning>>(
402 reader: &mut R,
403 w: &mut W,
404 e: Error,
405) -> Result<i32, Error> {
406 reader.read_int(w).map_err(|_| e)
407}
408
409pub struct RawItems<'a> {
410 snap: &'a RawSnap,
411 iter: btree_map::Iter<'a, i32, ops::Range<u32>>,
412}
413
414impl<'a> Iterator for RawItems<'a> {
415 type Item = RawItem<'a>;
416 fn next(&mut self) -> Option<RawItem<'a>> {
417 self.iter
418 .next()
419 .map(|(&k, o)| RawItem::from_key(k, self.snap.item_from_offset(o.clone())))
420 }
421 fn size_hint(&self) -> (usize, Option<usize>) {
422 self.iter.size_hint()
423 }
424}
425
426impl<'a> ExactSizeIterator for RawItems<'a> {
427 fn len(&self) -> usize {
428 self.iter.len()
429 }
430}
431
432impl fmt::Debug for RawSnap {
433 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
434 f.debug_map()
435 .entries(self.items().map(
436 |RawItem {
437 raw_type_id,
438 id,
439 data,
440 }| ((raw_type_id, id), data),
441 ))
442 .finish()
443 }
444}
445
446#[derive(Clone, Default)]
447pub struct Snap {
448 raw: RawSnap,
449 extended_types: BTreeMap<Uuid, u16>,
450}
451
452impl Snap {
453 pub fn empty() -> Snap {
454 Default::default()
455 }
456 fn raw_type_id(&self, type_id: TypeId) -> Option<u16> {
457 match type_id {
458 TypeId::Ordinal(ordinal) => {
459 assert!(0 < ordinal && ordinal < OFFSET_EXTENDED_TYPE_ID);
460 Some(ordinal)
461 }
462 TypeId::Uuid(uuid) => self.extended_types.get(&uuid).copied(),
463 }
464 }
465 fn type_id(&self, raw_type_id: u16) -> Option<TypeId> {
466 if raw_type_id == TYPE_ID_EX {
467 None
468 } else if raw_type_id < OFFSET_EXTENDED_TYPE_ID {
469 Some(TypeId::Ordinal(raw_type_id))
470 } else {
471 Some(TypeId::Uuid(item_data_to_uuid(
473 &mut Ignore,
474 self.raw.item(TYPE_ID_EX, raw_type_id).unwrap(),
475 )?))
476 }
477 }
478 pub fn item(&self, type_id: TypeId, id: u16) -> Option<&[i32]> {
479 self.raw.item(self.raw_type_id(type_id)?, id)
480 }
481 pub fn items(&self) -> Items<'_> {
482 let raw = self.raw.items();
483 let remaining = self.raw.items().len() - self.extended_types.len();
484 Items {
485 raw,
486 snap: self,
487 remaining,
488 }
489 }
490 fn build_from_raw<W: Warn<Warning>>(&mut self, warn: &mut W) -> Result<(), Error> {
491 self.extended_types.clear();
492 let mut prev_checked_raw_type_id = None;
493 for (&item_key, offset) in &self.raw.offsets {
494 let raw_type_id = key_to_raw_type_id(item_key);
495 if raw_type_id == TYPE_ID_EX {
496 let item_data = self.raw.item_from_offset(offset.clone());
497 let uuid = item_data_to_uuid(warn, item_data).ok_or(Error::InvalidUuidType)?;
498 if self.extended_types.insert(uuid, raw_type_id).is_some() {
499 return Err(Error::DuplicateUuidType);
500 }
501 } else if raw_type_id >= OFFSET_EXTENDED_TYPE_ID {
502 if Some(raw_type_id) == prev_checked_raw_type_id {
503 continue;
504 }
505 if self
506 .raw
507 .offsets
508 .get(&key(TYPE_ID_EX, raw_type_id))
509 .is_none()
510 {
511 return Err(Error::MissingUuidType);
512 }
513 prev_checked_raw_type_id = Some(raw_type_id);
514 }
515 }
516 Ok(())
517 }
518 pub fn read<W: Warn<Warning>>(
519 &mut self,
520 warn: &mut W,
521 buf: &mut Vec<i32>,
522 data: &[u8],
523 ) -> Result<(), Error> {
524 self.raw.read(warn, buf, data)?;
525 self.build_from_raw(warn)?;
526 Ok(())
527 }
528 pub fn read_from_ints<W: Warn<Warning>>(
529 &mut self,
530 warn: &mut W,
531 data: &[i32],
532 ) -> Result<(), Error> {
533 self.raw.read_from_ints(warn, data)?;
534 self.build_from_raw(warn)?;
535 Ok(())
536 }
537 pub fn read_with_delta<W>(
538 &mut self,
539 warn: &mut W,
540 from: &Snap,
541 delta: &Delta,
542 ) -> Result<(), Error>
543 where
544 W: Warn<Warning>,
545 {
546 self.raw.read_with_delta(warn, &from.raw, delta)?;
547 self.build_from_raw(warn)?;
548 Ok(())
549 }
550 pub fn write<'d, 's>(
551 &self,
552 buf: &mut Vec<i32>,
553 p: Packer<'d, 's>,
554 ) -> Result<&'d [u8], CapacityError> {
555 self.raw.write(buf, p)
556 }
557 pub fn write_to_ints<'a>(
558 &self,
559 buf: &mut Vec<i32>,
560 result: &'a mut [i32],
561 ) -> Result<&'a [i32], CapacityError> {
562 self.raw.write_to_ints(buf, result)
563 }
564 pub fn crc(&self) -> i32 {
565 self.raw.crc()
566 }
567 pub fn recycle(mut self) -> Builder {
572 let mut next_type_id = OFFSET_EXTENDED_TYPE_ID;
573 for &key in self.raw.offsets.keys() {
574 let raw_type_id = key_to_raw_type_id(key);
575 let id = key_to_id(key);
576 const _: () = assert!(TYPE_ID_EX == 0);
577 if raw_type_id != TYPE_ID_EX {
578 break;
579 }
580 if id < next_type_id + 256 {
582 next_type_id = id + 1;
583 }
584 }
585 self.raw.clear();
586 for (&uuid, &raw_type_id) in &self.extended_types {
587 self.raw
589 .add_item(TYPE_ID_EX, raw_type_id, &uuid_to_item_data(uuid))
590 .unwrap();
591 }
592 Builder {
593 snap: self,
594 next_type_id,
595 }
596 }
597}
598
599pub struct Items<'a> {
600 raw: RawItems<'a>,
601 snap: &'a Snap,
602 remaining: usize,
603}
604
605impl<'a> Iterator for Items<'a> {
606 type Item = Item<'a>;
607 fn next(&mut self) -> Option<Item<'a>> {
608 loop {
609 match self.raw.next() {
610 None => return None,
611 Some(RawItem {
612 raw_type_id,
613 id,
614 data,
615 }) => {
616 if let Some(type_id) = self.snap.type_id(raw_type_id) {
617 self.remaining -= 1;
618 return Some(Item { type_id, id, data });
619 } else {
620 continue;
622 }
623 }
624 }
625 }
626 }
627 fn size_hint(&self) -> (usize, Option<usize>) {
628 (self.remaining, Some(self.remaining))
629 }
630}
631
632impl<'a> ExactSizeIterator for Items<'a> {
633 fn len(&self) -> usize {
634 self.remaining
635 }
636}
637
638impl fmt::Debug for Snap {
639 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
640 f.debug_map()
641 .entries(
642 self.items()
643 .map(|Item { type_id, id, data }| ((type_id, id), data)),
644 )
645 .finish()
646 }
647}
648
649#[derive(Clone, Default)]
650pub struct Delta {
651 deleted_items: BTreeSet<i32>,
652 updated_items: BTreeMap<i32, ops::Range<u32>>,
653 buf: Vec<i32>,
654}
655
656impl Delta {
657 pub fn new() -> Delta {
658 Default::default()
659 }
660 pub fn clear(&mut self) {
661 self.deleted_items.clear();
662 self.updated_items.clear();
663 self.buf.clear();
664 }
665 fn prepare_update_item(&mut self, raw_type_id: u16, id: u16, size: usize) -> &mut [i32] {
666 let key = key(raw_type_id, id);
667
668 let offset = self.buf.len();
669 let start = offset.assert_u32();
670 let end = (offset + size).assert_u32();
671 self.buf.extend(iter::repeat(0).take(size));
672 assert!(self.updated_items.insert(key, start..end).is_none());
673 &mut self.buf[to_usize(start..end)]
674 }
675 pub fn create(&mut self, from: &Snap, to: &Snap) {
676 self.create_raw(&from.raw, &to.raw)
677 }
678 pub fn create_raw(&mut self, from: &RawSnap, to: &RawSnap) {
679 self.clear();
680 for RawItem {
681 raw_type_id, id, ..
682 } in from.items()
683 {
684 if to.item(raw_type_id, id).is_none() {
685 assert!(self.deleted_items.insert(key(raw_type_id, id)));
686 }
687 }
688 for RawItem {
689 raw_type_id,
690 id,
691 data,
692 } in to.items()
693 {
694 let from_data = from.item(raw_type_id, id);
695 let out_delta = self.prepare_update_item(raw_type_id, id, data.len());
696 create_item_delta(from_data, data, out_delta)
697 .expect("item sizes can't be mismatched for self-created snapshots");
698 }
700 }
701
702 fn write_impl<O, F>(&self, mut object_size: O, mut write_int: F) -> Result<(), CapacityError>
703 where
704 O: FnMut(u16) -> Option<u32>,
705 F: FnMut(i32) -> Result<(), CapacityError>,
706 {
707 {
708 let header = DeltaHeader {
709 num_deleted_items: self.deleted_items.len().assert_i32(),
710 num_updated_items: self.updated_items.len().assert_i32(),
711 };
712 for int in header.encode_obj() {
713 write_int(int)?;
714 }
715 }
716 for &key in &self.deleted_items {
717 write_int(key)?;
718 }
719 for (&key, range) in &self.updated_items {
720 let data = &self.buf[to_usize(range.clone())];
721 let raw_type_id = key_to_raw_type_id(key);
722 let id = key_to_id(key);
723 write_int(raw_type_id.i32())?;
724 write_int(id.i32())?;
725 match object_size(raw_type_id) {
726 Some(size) => assert!(size.usize() == data.len()),
727 None => write_int(data.len().assert_i32())?,
728 }
729 for &d in data {
730 write_int(d)?;
731 }
732 }
733 Ok(())
734 }
735 pub fn write<'d, 's, O>(
736 &self,
737 object_size: O,
738 p: Packer<'d, 's>,
739 ) -> Result<&'d [u8], CapacityError>
740 where
741 O: FnMut(u16) -> Option<u32>,
742 {
743 let mut p = p;
744 self.write_impl(object_size, |int| p.write_int(int))?;
745 Ok(p.written())
746 }
747 pub fn write_to_ints<'a, O>(
748 &self,
749 object_size: O,
750 result: &'a mut [i32],
751 ) -> Result<&'a [i32], CapacityError>
752 where
753 O: FnMut(u16) -> Option<u32>,
754 {
755 let mut iter = result.iter_mut();
756 self.write_impl(object_size, |int| {
757 *iter.next().ok_or(CapacityError)? = int;
758 Ok(())
759 })?;
760 let remaining = iter.len();
761 let len = result.len() - remaining;
762 Ok(&result[..len])
763 }
764
765 fn read_impl<W, O, R>(&mut self, warn: &mut W, object_size: O, p: &mut R) -> Result<(), Error>
766 where
767 W: Warn<Warning>,
768 O: FnMut(u16) -> Option<u32>,
769 R: ReadInt,
770 {
771 self.clear();
772
773 let mut object_size = object_size;
774
775 let header = DeltaHeader::decode_impl(warn, p)?;
776
777 for _ in 0..header.num_deleted_items {
778 self.deleted_items
779 .insert(read_int_err(p, warn, Error::DeletedItemsUnpacking)?);
780 }
781 if header.num_deleted_items.assert_usize() != self.deleted_items.len() {
782 warn.warn(Warning::DuplicateDelete);
783 }
784
785 let mut num_updates = 0;
786
787 while !p.is_empty() {
788 let raw_type_id = read_int_err(p, warn, Error::ItemDiffsUnpacking)?;
789 let id = read_int_err(p, warn, Error::ItemDiffsUnpacking)?;
790
791 let raw_type_id = raw_type_id.try_u16().ok_or(Error::TypeIdRange)?;
792 let id = id.try_u16().ok_or(Error::IdRange)?;
793
794 let size = match object_size(raw_type_id) {
795 Some(s) => s,
796 None => {
797 let s = read_int_err(p, warn, Error::ItemDiffsUnpacking)?;
798 s.try_u32().ok_or(Error::NegativeSize)?
799 }
800 };
801 let start = self.buf.len().try_u32().ok_or(Error::TooLongDiff)?;
802 let end = start.checked_add(size).ok_or(Error::TooLongDiff)?;
803 for _ in 0..size {
804 self.buf
805 .push(read_int_err(p, warn, Error::ItemDiffsUnpacking)?);
806 }
807
808 if self
810 .updated_items
811 .insert(key(raw_type_id, id), start..end)
812 .is_some()
813 {
814 warn.warn(Warning::DuplicateUpdate);
815 }
816
817 if self.deleted_items.contains(&key(raw_type_id, id)) {
818 warn.warn(Warning::DeleteUpdate);
819 }
820 num_updates += 1;
821 }
822
823 if num_updates != header.num_updated_items {
824 warn.warn(Warning::NumUpdatedItems);
825 }
826
827 Ok(())
828 }
829 pub fn read_from_ints<W, O>(
830 &mut self,
831 warn: &mut W,
832 object_size: O,
833 p: &mut IntUnpacker,
834 ) -> Result<(), Error>
835 where
836 W: Warn<Warning>,
837 O: FnMut(u16) -> Option<u32>,
838 {
839 self.read_impl(warn, object_size, p)
840 }
841
842 pub fn read<W, O>(
843 &mut self,
844 warn: &mut W,
845 object_size: O,
846 p: &mut Unpacker,
847 ) -> Result<(), Error>
848 where
849 W: Warn<Warning>,
850 O: FnMut(u16) -> Option<u32>,
851 {
852 self.read_impl(warn, object_size, p)
853 }
854}
855
856#[derive(Default)]
857pub struct RawBuilder {
858 snap: RawSnap,
859}
860
861impl RawBuilder {
862 pub fn new() -> RawBuilder {
863 Default::default()
864 }
865 pub fn add_item(&mut self, type_id: u16, id: u16, data: &[i32]) -> Result<(), BuilderError> {
866 self.snap.add_item(type_id, id, data)
867 }
868 pub fn finish(self) -> RawSnap {
869 self.snap
870 }
871}
872
873pub struct Builder {
874 snap: Snap,
875 next_type_id: u16,
876}
877
878impl Default for Builder {
879 fn default() -> Builder {
880 Builder {
881 snap: Default::default(),
882 next_type_id: OFFSET_EXTENDED_TYPE_ID,
883 }
884 }
885}
886
887impl Builder {
888 pub fn new() -> Builder {
889 Default::default()
890 }
891 pub fn add_item(&mut self, type_id: TypeId, id: u16, data: &[i32]) -> Result<(), BuilderError> {
892 let raw_type_id = match type_id {
893 TypeId::Ordinal(ordinal) => {
894 assert!(0 < ordinal && ordinal < OFFSET_EXTENDED_TYPE_ID);
895 ordinal
896 }
897 TypeId::Uuid(uuid) => {
898 match self.snap.extended_types.entry(uuid) {
899 btree_map::Entry::Occupied(o) => *o.get(),
900 btree_map::Entry::Vacant(v) => {
901 let raw_type_id = self.next_type_id;
902 assert!(OFFSET_EXTENDED_TYPE_ID <= raw_type_id, "invalid type ID");
903 assert!(raw_type_id < 0x8000, "invalid type ID");
904 self.snap.raw.add_item(
905 TYPE_ID_EX,
906 raw_type_id,
907 &uuid_to_item_data(uuid),
908 )?;
909 self.next_type_id += 1;
912 v.insert(raw_type_id);
913 raw_type_id
914 }
915 }
916 }
917 };
918 self.snap.raw.add_item(raw_type_id, id, data)
919 }
920 pub fn finish(self) -> Snap {
921 self.snap
922 }
923}
924
925pub fn delta_chunks(tick: i32, delta_tick: i32, data: &[u8], crc: i32) -> DeltaChunks<'_> {
926 DeltaChunks {
927 tick: tick,
928 delta_tick: tick - delta_tick,
929 crc: crc,
930 cur_part: if !data.is_empty() { 0 } else { -1 },
931 num_parts: ((data.len() + MAX_SNAPSHOT_PACKSIZE as usize - 1)
932 / MAX_SNAPSHOT_PACKSIZE as usize)
933 .assert_i32(),
934 data: data,
935 }
936}
937
938pub struct DeltaChunks<'a> {
939 tick: i32,
940 delta_tick: i32,
941 crc: i32,
942 cur_part: i32,
943 num_parts: i32,
944 data: &'a [u8],
945}
946
947impl<'a> Iterator for DeltaChunks<'a> {
948 type Item = SnapMsg<'a>;
949 fn next(&mut self) -> Option<SnapMsg<'a>> {
950 if self.cur_part == self.num_parts {
951 return None;
952 }
953 let result = if self.num_parts == 0 {
954 SnapMsg::SnapEmpty(msg::SnapEmpty {
955 tick: self.tick,
956 delta_tick: self.delta_tick,
957 })
958 } else if self.num_parts == 1 {
959 SnapMsg::SnapSingle(msg::SnapSingle {
960 tick: self.tick,
961 delta_tick: self.delta_tick,
962 crc: self.crc,
963 data: self.data,
964 })
965 } else {
966 let index = self.cur_part.assert_usize();
967 let start = MAX_SNAPSHOT_PACKSIZE as usize * index;
968 let end = cmp::min(
969 MAX_SNAPSHOT_PACKSIZE as usize * (index + 1),
970 self.data.len(),
971 );
972 SnapMsg::Snap(msg::Snap {
973 tick: self.tick,
974 delta_tick: self.delta_tick,
975 num_parts: self.num_parts,
976 part: self.cur_part,
977 crc: self.crc,
978 data: &self.data[start..end],
979 })
980 };
981 self.cur_part += 1;
982 Some(result)
983 }
984}
985
986#[cfg(test)]
987mod test {
988 use super::Builder;
989 use super::Item;
990 use uuid::Uuid;
991
992 #[test]
993 fn smoke_test() {
994 let uuid: Uuid = "1a3fcc94-1e53-461e-912e-21200882024b".parse().unwrap();
995
996 let mut builder = Builder::new();
997 builder
998 .add_item(uuid.into(), 1337, &[0x1234, 0x567890ab])
999 .unwrap();
1000 let snap = builder.finish();
1001
1002 assert_eq!(
1003 snap.item(uuid.into(), 1337),
1004 Some(&[0x1234, 0x567890ab][..])
1005 );
1006 let item = Item {
1007 type_id: uuid.into(),
1008 id: 1337,
1009 data: &[0x1234, 0x567890ab],
1010 };
1011 assert_eq!(snap.items().collect::<Vec<_>>(), &[item][..]);
1012 }
1013}