git_index/decode/
entries.rs

1use std::{convert::TryInto, ops::Range};
2
3use crate::{
4    decode::{self, header},
5    entry,
6    util::{read_u32, split_at_byte_exclusive, split_at_pos, var_int},
7    Entry, Version,
8};
9
10/// a guess directly from git sources
11pub const AVERAGE_V4_DELTA_PATH_LEN_IN_BYTES: usize = 80;
12
13pub struct Outcome {
14    pub is_sparse: bool,
15}
16
17pub fn estimate_path_storage_requirements_in_bytes(
18    num_entries: u32,
19    on_disk_size: usize,
20    offset_to_extensions: Option<usize>,
21    object_hash: git_hash::Kind,
22    version: Version,
23) -> usize {
24    const fn on_disk_entry_sans_path(object_hash: git_hash::Kind) -> usize {
25        8 + // ctime
26        8 + // mtime
27        (4 * 6) +  // various stat fields
28        2 + // flag, ignore extended flag as we'd rather overallocate a bit
29        object_hash.len_in_bytes()
30    }
31    match version {
32        Version::V3 | Version::V2 => {
33            let size_of_entries_block = offset_to_extensions.unwrap_or(on_disk_size);
34            size_of_entries_block
35                .saturating_sub(num_entries as usize * on_disk_entry_sans_path(object_hash))
36                .saturating_sub(header::SIZE)
37        }
38        Version::V4 => num_entries as usize * AVERAGE_V4_DELTA_PATH_LEN_IN_BYTES,
39    }
40}
41
42/// Note that `data` must point to the beginning of the entries, right past the header.
43pub fn chunk<'a>(
44    mut data: &'a [u8],
45    entries: &mut Vec<Entry>,
46    path_backing: &mut Vec<u8>,
47    num_entries: u32,
48    object_hash: git_hash::Kind,
49    version: Version,
50) -> Result<(Outcome, &'a [u8]), decode::Error> {
51    let mut is_sparse = false;
52    let has_delta_paths = version == Version::V4;
53    let mut prev_path = None;
54    let mut delta_buf = Vec::<u8>::with_capacity(AVERAGE_V4_DELTA_PATH_LEN_IN_BYTES);
55
56    for idx in 0..num_entries {
57        let (entry, remaining) = load_one(
58            data,
59            path_backing,
60            object_hash.len_in_bytes(),
61            has_delta_paths,
62            prev_path,
63        )
64        .ok_or(decode::Error::Entry { index: idx })?;
65
66        data = remaining;
67        if entry.mode.is_sparse() {
68            is_sparse = true;
69        }
70        // TODO: entries are actually in an intrusive collection, with path as key. Could be set for us. This affects 'ignore_case' which we
71        //       also don't yet handle but probably could, maybe even smartly with the collection.
72        //       For now it's unclear to me how they access the index, they could iterate quickly, and have fast access by path.
73        entries.push(entry);
74        prev_path = entries.last().map(|e| (e.path.clone(), &mut delta_buf));
75    }
76
77    Ok((Outcome { is_sparse }, data))
78}
79
80/// Note that `prev_path` is only useful if the version is V4
81fn load_one<'a>(
82    data: &'a [u8],
83    path_backing: &mut Vec<u8>,
84    hash_len: usize,
85    has_delta_paths: bool,
86    prev_path_and_buf: Option<(Range<usize>, &mut Vec<u8>)>,
87) -> Option<(Entry, &'a [u8])> {
88    let first_byte_of_entry = data.as_ptr() as usize;
89    let (ctime_secs, data) = read_u32(data)?;
90    let (ctime_nsecs, data) = read_u32(data)?;
91    let (mtime_secs, data) = read_u32(data)?;
92    let (mtime_nsecs, data) = read_u32(data)?;
93    let (dev, data) = read_u32(data)?;
94    let (ino, data) = read_u32(data)?;
95    let (mode, data) = read_u32(data)?;
96    let (uid, data) = read_u32(data)?;
97    let (gid, data) = read_u32(data)?;
98    let (size, data) = read_u32(data)?;
99    let (hash, data) = split_at_pos(data, hash_len)?;
100    let (flags, data) = read_u16(data)?;
101    let flags = entry::at_rest::Flags::from_bits(flags)?;
102    let (flags, data) = if flags.contains(entry::at_rest::Flags::EXTENDED) {
103        let (extended_flags, data) = read_u16(data)?;
104        let extended_flags = entry::at_rest::FlagsExtended::from_bits(extended_flags)?;
105        let extended_flags = extended_flags.to_flags()?;
106        (flags.to_memory() | extended_flags, data)
107    } else {
108        (flags.to_memory(), data)
109    };
110
111    let start = path_backing.len();
112    let data = if has_delta_paths {
113        let (strip_len, data) = var_int(data)?;
114        if let Some((prev_path, buf)) = prev_path_and_buf {
115            let end = prev_path.end.checked_sub(strip_len.try_into().ok()?)?;
116            let copy_len = end.checked_sub(prev_path.start)?;
117            if copy_len > 0 {
118                buf.resize(copy_len, 0);
119                buf.copy_from_slice(&path_backing[prev_path.start..end]);
120                path_backing.extend_from_slice(buf);
121            }
122        }
123
124        let (path, data) = split_at_byte_exclusive(data, 0)?;
125        path_backing.extend_from_slice(path);
126
127        data
128    } else {
129        let (path, data) = if flags.contains(entry::Flags::PATH_LEN) {
130            split_at_byte_exclusive(data, 0)?
131        } else {
132            let path_len = (flags.bits() & entry::Flags::PATH_LEN.bits()) as usize;
133            let (path, data) = split_at_pos(data, path_len)?;
134            (path, skip_padding(data, first_byte_of_entry))
135        };
136
137        path_backing.extend_from_slice(path);
138        data
139    };
140    let path_range = start..path_backing.len();
141
142    Some((
143        Entry {
144            stat: entry::Stat {
145                ctime: entry::Time {
146                    secs: ctime_secs,
147                    nsecs: ctime_nsecs,
148                },
149                mtime: entry::Time {
150                    secs: mtime_secs,
151                    nsecs: mtime_nsecs,
152                },
153                dev,
154                ino,
155                uid,
156                gid,
157                size,
158            },
159            id: git_hash::ObjectId::from(hash),
160            flags: flags & !entry::Flags::PATH_LEN,
161            // This forces us to add the bits we need before being able to use them.
162            mode: entry::Mode::from_bits_truncate(mode),
163            path: path_range,
164        },
165        data,
166    ))
167}
168
169#[inline]
170fn skip_padding(data: &[u8], first_byte_of_entry: usize) -> &[u8] {
171    let current_offset = data.as_ptr() as usize;
172    let c_padding = (current_offset - first_byte_of_entry + 8) & !7;
173    let skip = (first_byte_of_entry + c_padding) - current_offset;
174
175    &data[skip..]
176}
177
178#[inline]
179fn read_u16(data: &[u8]) -> Option<(u16, &[u8])> {
180    split_at_pos(data, 2).map(|(num, data)| (u16::from_be_bytes(num.try_into().unwrap()), data))
181}