Skip to main content

nydus_storage/meta/
batch.rs

1// Copyright (C) 2022 Alibaba Cloud. All rights reserved.
2//
3// SPDX-License-Identifier: Apache-2.0
4
5use std::io::Result;
6use std::mem::size_of;
7use std::slice;
8
9use crate::meta::chunk_info_v2::BlobChunkInfoV2Ondisk;
10use crate::meta::BlobMetaChunkInfo;
11
12/// Context information to support batch chunk.
13/// Each one corresponds to a whole batch chunk containing multiple small chunks.
14#[repr(C, packed)]
15#[derive(Default)]
16pub struct BatchInflateContext {
17    /// Compressed size of the whole batch chunk data.
18    compressed_size: u32,
19    /// Uncompressed size of the whole batch chunk data without 4K aligned.
20    uncompressed_batch_size: u32,
21    __reserved1: u64,
22    __reserved2: u64,
23    __reserved3: u64,
24    __reserved4: u64,
25}
26
27impl BatchInflateContext {
28    /// Get compressed size of the whole batch chunk data.
29    pub fn compressed_size(&self) -> u32 {
30        u32::from_le(self.compressed_size)
31    }
32
33    /// Set compressed size of the whole batch chunk data.
34    pub fn set_compressed_size(&mut self, compressed_size: u32) {
35        self.compressed_size = u32::to_le(compressed_size);
36    }
37
38    /// Set uncompressed size of the whole batch chunk data.
39    pub fn set_uncompressed_batch_size(&mut self, uncompressed_batch_size: u32) {
40        self.uncompressed_batch_size = u32::to_le(uncompressed_batch_size);
41    }
42
43    /// Get uncompressed size of the whole batch chunk data.
44    pub fn uncompressed_batch_size(&self) -> u32 {
45        u32::from_le(self.uncompressed_batch_size)
46    }
47
48    /// Convert to an immutable u8 slice.
49    pub fn as_slice(&self) -> &[u8] {
50        unsafe {
51            slice::from_raw_parts(
52                self as *const BatchInflateContext as *const u8,
53                size_of::<BatchInflateContext>(),
54            )
55        }
56    }
57}
58
59/// Struct to generate [BatchInflateContext] objects for batch chunks.
60pub struct BatchContextGenerator {
61    /// Buffering the to be dumped chunk data for Chunk Merging.
62    chunk_data_buf: Vec<u8>,
63    /// Storing all `BatchInflateContext` of current blob.
64    contexts: Vec<BatchInflateContext>,
65}
66
67impl BatchContextGenerator {
68    /// Get the buffer of to be dumped chunk data for batch chunk.
69    pub fn chunk_data_buf(&self) -> &Vec<u8> {
70        &self.chunk_data_buf
71    }
72
73    /// Check whether the chunk data buffer is empty.
74    pub fn chunk_data_buf_is_empty(&self) -> bool {
75        self.chunk_data_buf.is_empty()
76    }
77
78    /// Get the length of chunk data buffer.
79    pub fn chunk_data_buf_len(&self) -> usize {
80        self.chunk_data_buf.len()
81    }
82
83    /// Append new chunk data to the chunk data buffer.
84    pub fn append_chunk_data_buf(&mut self, chunk_data: &[u8]) {
85        self.chunk_data_buf.extend_from_slice(chunk_data);
86    }
87
88    /// Clear the chunk data buffer.
89    pub fn clear_chunk_data_buf(&mut self) {
90        self.chunk_data_buf.clear();
91    }
92
93    /// Add a batch context for a dumped batch chunk.
94    pub fn add_context(&mut self, compressed_size: u32) {
95        let ctx = BatchInflateContext {
96            compressed_size: u32::to_le(compressed_size),
97            uncompressed_batch_size: u32::to_le(self.chunk_data_buf_len() as u32),
98            __reserved1: u64::to_le(0),
99            __reserved2: u64::to_le(0),
100            __reserved3: u64::to_le(0),
101            __reserved4: u64::to_le(0),
102        };
103        self.contexts.push(ctx);
104    }
105
106    /// Create a new instance of [BatchInflateContext].
107    pub fn new(batch_size: u32) -> Result<Self> {
108        Ok(Self {
109            chunk_data_buf: Vec::with_capacity(batch_size as usize),
110            contexts: Vec::with_capacity(10240),
111        })
112    }
113
114    /// Generate and return a v2 chunk info struct.
115    pub fn generate_chunk_info(
116        &mut self,
117        compressed_offset: u64,
118        uncompressed_offset: u64,
119        uncompressed_size: u32,
120        encrypted: bool,
121    ) -> Result<BlobChunkInfoV2Ondisk> {
122        let mut chunk = BlobChunkInfoV2Ondisk::default();
123        chunk.set_compressed_offset(compressed_offset);
124        chunk.set_compressed_size(0);
125        chunk.set_uncompressed_offset(uncompressed_offset);
126        chunk.set_uncompressed_size(uncompressed_size);
127        chunk.set_batch(true);
128        chunk.set_batch_index(self.contexts.len() as u32);
129        chunk.set_uncompressed_offset_in_batch_buf(self.chunk_data_buf_len() as u32);
130        chunk.set_compressed(true);
131        chunk.set_encrypted(encrypted);
132        chunk.set_has_crc32(false); // Batch chunk cannot store CRC32 due to data field conflict
133
134        Ok(chunk)
135    }
136
137    /// Convert all the batch chunk information to a u8 vector.
138    pub fn to_vec(&self) -> Result<(Vec<u8>, u32)> {
139        let mut data = Vec::new();
140
141        for ctx in &self.contexts {
142            data.extend_from_slice(ctx.as_slice());
143        }
144
145        Ok((data, self.contexts.len() as u32))
146    }
147}
148
149#[cfg(test)]
150mod tests {
151    use super::*;
152    use std::mem::ManuallyDrop;
153
154    #[test]
155    fn test_batch_inflate_context() {
156        let mut ctx = BatchInflateContext {
157            compressed_size: 0,
158            uncompressed_batch_size: 0,
159            __reserved1: 0,
160            __reserved2: 0,
161            __reserved3: 0,
162            __reserved4: 0,
163        };
164        ctx.set_compressed_size(0x20);
165        assert_eq!(ctx.compressed_size(), 0x20);
166        ctx.set_uncompressed_batch_size(0x30);
167        assert_eq!(ctx.uncompressed_batch_size(), 0x30);
168        let mut v = [0u8; 40];
169        v[0] = 0x20;
170        v[4] = 0x30;
171        assert_eq!(ctx.as_slice(), v);
172    }
173
174    #[test]
175    fn test_batch_context_generator() {
176        let mut generator = BatchContextGenerator::new(0x100000).unwrap();
177        assert!(generator.chunk_data_buf_is_empty());
178        assert_eq!(generator.chunk_data_buf_len(), 0);
179
180        generator.append_chunk_data_buf(&[1, 2, 3, 4]);
181        assert!(!generator.chunk_data_buf_is_empty());
182        assert_eq!(generator.chunk_data_buf_len(), 4);
183
184        generator.add_context(4);
185
186        let (ctx_data, _) = generator.to_vec().unwrap();
187        let ctx_vec = unsafe {
188            ManuallyDrop::new(Vec::from_raw_parts(
189                ctx_data.as_slice().as_ptr() as *mut BatchInflateContext,
190                1,
191                1,
192            ))
193        };
194        assert_eq!(ctx_vec[0].compressed_size(), 4);
195        assert_eq!(ctx_vec[0].uncompressed_batch_size(), 4);
196
197        generator.clear_chunk_data_buf();
198        assert!(generator.chunk_data_buf_is_empty());
199        assert_eq!(generator.chunk_data_buf_len(), 0);
200
201        let chunk_info = generator.generate_chunk_info(0, 0, 4, false).unwrap();
202        assert!(chunk_info.is_batch());
203    }
204}