1use panproto_vcs::{Object, ObjectId, Store};
8use rustc_hash::FxHashMap;
9
10use crate::error::GitBridgeError;
11
12#[derive(Debug)]
14pub struct ExportResult {
15 pub git_oid: git2::Oid,
17 pub file_count: usize,
19}
20
21pub fn export_to_git<S: Store, H: std::hash::BuildHasher>(
42 panproto_store: &S,
43 git_repo: &git2::Repository,
44 commit_id: ObjectId,
45 parent_map: &std::collections::HashMap<ObjectId, git2::Oid, H>,
46 update_ref: Option<&str>,
47) -> Result<ExportResult, GitBridgeError> {
48 let commit_obj = panproto_store.get(&commit_id)?;
50 let commit = match &commit_obj {
51 Object::Commit(c) => c,
52 other => {
53 return Err(GitBridgeError::ObjectRead {
54 oid: commit_id.to_string(),
55 reason: format!("expected commit, got {}", other.type_name()),
56 });
57 }
58 };
59
60 let schema_obj = panproto_store.get(&commit.schema_id)?;
62 let schema = match &schema_obj {
63 Object::Schema(s) => s,
64 other => {
65 return Err(GitBridgeError::ObjectRead {
66 oid: commit.schema_id.to_string(),
67 reason: format!("expected schema, got {}", other.type_name()),
68 });
69 }
70 };
71
72 let mut tree_builder = git_repo.treebuilder(None)?;
76 let mut file_count = 0;
77
78 let schema_json =
80 serde_json::to_vec_pretty(schema.as_ref()).map_err(|e| GitBridgeError::ObjectRead {
81 oid: commit.schema_id.to_string(),
82 reason: format!("JSON serialization failed: {e}"),
83 })?;
84 let blob_oid = git_repo.blob(&schema_json)?;
85 tree_builder.insert("schema.json", blob_oid, 0o100_644)?;
86 file_count += 1;
87
88 let commit_json =
90 serde_json::to_vec_pretty(commit).map_err(|e| GitBridgeError::ObjectRead {
91 oid: commit_id.to_string(),
92 reason: format!("commit JSON serialization failed: {e}"),
93 })?;
94 let commit_blob = git_repo.blob(&commit_json)?;
95 tree_builder.insert("commit.json", commit_blob, 0o100_644)?;
96 file_count += 1;
97
98 let files_fragments = collect_file_fragments(schema);
99 let mut file_blobs: FxHashMap<String, git2::Oid> = FxHashMap::default();
100
101 for (file_path, mut fragments) in files_fragments {
103 fragments.sort_by_key(|(s, _)| *s);
104
105 let mut content = Vec::new();
106 let mut cursor = 0;
107 for (pos, text) in &fragments {
108 if *pos >= cursor {
109 content.extend_from_slice(text.as_bytes());
110 cursor = pos + text.len();
111 }
112 }
113
114 if !content.is_empty() {
115 let blob_oid = git_repo.blob(&content)?;
116 file_blobs.insert(file_path, blob_oid);
117 file_count += 1;
118 }
119 }
120
121 build_nested_tree(git_repo, &mut tree_builder, &file_blobs)?;
124
125 let tree_oid = tree_builder.write()?;
126 let tree = git_repo.find_tree(tree_oid)?;
127
128 let sig = git2::Signature::new(
130 &commit.author,
131 &format!("{}@panproto", commit.author),
132 &git2::Time::new(i64::try_from(commit.timestamp).unwrap_or(i64::MAX), 0),
133 )?;
134
135 let mut parents: Vec<git2::Commit<'_>> = Vec::new();
137 for parent_panproto_id in &commit.parents {
138 if let Some(parent_git_oid) = parent_map.get(parent_panproto_id) {
139 if let Ok(parent_commit) = git_repo.find_commit(*parent_git_oid) {
140 parents.push(parent_commit);
141 }
142 }
143 }
144 let parent_refs: Vec<&git2::Commit<'_>> = parents.iter().collect();
145
146 let git_oid = git_repo.commit(update_ref, &sig, &sig, &commit.message, &tree, &parent_refs)?;
147
148 Ok(ExportResult {
149 git_oid,
150 file_count,
151 })
152}
153
154fn collect_file_fragments(
161 schema: &panproto_schema::Schema,
162) -> FxHashMap<String, Vec<(usize, String)>> {
163 let mut files_fragments: FxHashMap<String, Vec<(usize, String)>> = FxHashMap::default();
164
165 for name in schema.vertices.keys() {
166 if let Some(constraints) = schema.constraints.get(name) {
167 let name_str = name.as_ref();
168 let file_prefix = name_str
169 .find("::")
170 .map_or(name_str, |pos| &name_str[..pos])
171 .to_owned();
172
173 let start_byte = constraints
174 .iter()
175 .find(|c| c.sort.as_ref() == "start-byte")
176 .and_then(|c| c.value.parse::<usize>().ok());
177 let literal = constraints
178 .iter()
179 .find(|c| c.sort.as_ref() == "literal-value")
180 .map(|c| c.value.clone());
181 if let (Some(start), Some(text)) = (start_byte, literal) {
182 files_fragments
183 .entry(file_prefix.clone())
184 .or_default()
185 .push((start, text));
186 }
187
188 for c in constraints {
189 let sort_str = c.sort.as_ref();
190 if sort_str.starts_with("interstitial-") && !sort_str.ends_with("-start-byte") {
191 let pos_sort = format!("{sort_str}-start-byte");
192 let pos = constraints
193 .iter()
194 .find(|c2| c2.sort.as_ref() == pos_sort.as_str())
195 .and_then(|c2| c2.value.parse::<usize>().ok());
196 if let Some(p) = pos {
197 files_fragments
198 .entry(file_prefix.clone())
199 .or_default()
200 .push((p, c.value.clone()));
201 }
202 }
203 }
204 }
205 }
206
207 files_fragments
208}
209
210fn build_nested_tree(
211 repo: &git2::Repository,
212 root_builder: &mut git2::TreeBuilder<'_>,
213 file_blobs: &FxHashMap<String, git2::Oid>,
214) -> Result<(), GitBridgeError> {
215 let mut dirs: FxHashMap<String, Vec<(String, git2::Oid)>> = FxHashMap::default();
217 let mut root_files: Vec<(String, git2::Oid)> = Vec::new();
218
219 for (path, oid) in file_blobs {
220 if let Some(slash_pos) = path.find('/') {
221 let dir = &path[..slash_pos];
222 let rest = &path[slash_pos + 1..];
223 dirs.entry(dir.to_owned())
224 .or_default()
225 .push((rest.to_owned(), *oid));
226 } else {
227 root_files.push((path.clone(), *oid));
228 }
229 }
230
231 for (name, oid) in &root_files {
233 root_builder.insert(name, *oid, 0o100_644)?;
234 }
235
236 for (dir_name, entries) in &dirs {
238 let subtree_oid = build_subtree(repo, entries)?;
239 root_builder.insert(dir_name, subtree_oid, 0o040_000)?;
240 }
241
242 Ok(())
243}
244
245fn build_subtree(
247 repo: &git2::Repository,
248 entries: &[(String, git2::Oid)],
249) -> Result<git2::Oid, GitBridgeError> {
250 let mut builder = repo.treebuilder(None)?;
251
252 let mut subdirs: FxHashMap<String, Vec<(String, git2::Oid)>> = FxHashMap::default();
254 let mut files: Vec<(String, git2::Oid)> = Vec::new();
255
256 for (path, oid) in entries {
257 if let Some(slash_pos) = path.find('/') {
258 let dir = &path[..slash_pos];
259 let rest = &path[slash_pos + 1..];
260 subdirs
261 .entry(dir.to_owned())
262 .or_default()
263 .push((rest.to_owned(), *oid));
264 } else {
265 files.push((path.clone(), *oid));
266 }
267 }
268
269 for (name, oid) in &files {
270 builder.insert(name, *oid, 0o100_644)?;
271 }
272
273 for (dir_name, sub_entries) in &subdirs {
274 let subtree_oid = build_subtree(repo, sub_entries)?;
275 builder.insert(dir_name, subtree_oid, 0o040_000)?;
276 }
277
278 Ok(builder.write()?)
279}