1use panproto_vcs::{Object, ObjectId, Store};
8use rustc_hash::FxHashMap;
9
10use crate::error::GitBridgeError;
11
12#[derive(Debug)]
14pub struct ExportResult {
15 pub git_oid: git2::Oid,
17 pub file_count: usize,
19}
20
21pub fn export_to_git<S: Store, H: std::hash::BuildHasher>(
42 panproto_store: &S,
43 git_repo: &git2::Repository,
44 commit_id: ObjectId,
45 parent_map: &std::collections::HashMap<ObjectId, git2::Oid, H>,
46 update_ref: Option<&str>,
47) -> Result<ExportResult, GitBridgeError> {
48 let commit_obj = panproto_store.get(&commit_id)?;
50 let commit = match &commit_obj {
51 Object::Commit(c) => c,
52 other => {
53 return Err(GitBridgeError::ObjectRead {
54 oid: commit_id.to_string(),
55 reason: format!("expected commit, got {}", other.type_name()),
56 });
57 }
58 };
59
60 let schema =
62 panproto_vcs::tree::resolve_commit_schema(panproto_store, commit).map_err(|e| {
63 GitBridgeError::ObjectRead {
64 oid: commit.schema_id.to_string(),
65 reason: format!("failed to resolve commit schema tree: {e}"),
66 }
67 })?;
68
69 let mut tree_builder = git_repo.treebuilder(None)?;
73 let mut file_count = 0;
74
75 let schema_json =
77 serde_json::to_vec_pretty(&schema).map_err(|e| GitBridgeError::ObjectRead {
78 oid: commit.schema_id.to_string(),
79 reason: format!("JSON serialization failed: {e}"),
80 })?;
81 let blob_oid = git_repo.blob(&schema_json)?;
82 tree_builder.insert("schema.json", blob_oid, 0o100_644)?;
83 file_count += 1;
84
85 let commit_json =
87 serde_json::to_vec_pretty(commit).map_err(|e| GitBridgeError::ObjectRead {
88 oid: commit_id.to_string(),
89 reason: format!("commit JSON serialization failed: {e}"),
90 })?;
91 let commit_blob = git_repo.blob(&commit_json)?;
92 tree_builder.insert("commit.json", commit_blob, 0o100_644)?;
93 file_count += 1;
94
95 let files_fragments = collect_file_fragments(&schema);
96 let mut file_blobs: FxHashMap<String, git2::Oid> = FxHashMap::default();
97
98 for (file_path, mut fragments) in files_fragments {
100 fragments.sort_by_key(|(s, _)| *s);
101
102 let mut content = Vec::new();
103 let mut cursor = 0;
104 for (pos, text) in &fragments {
105 if *pos >= cursor {
106 content.extend_from_slice(text.as_bytes());
107 cursor = pos + text.len();
108 }
109 }
110
111 if !content.is_empty() {
112 let blob_oid = git_repo.blob(&content)?;
113 file_blobs.insert(file_path, blob_oid);
114 file_count += 1;
115 }
116 }
117
118 build_nested_tree(git_repo, &mut tree_builder, &file_blobs)?;
121
122 let tree_oid = tree_builder.write()?;
123 let tree = git_repo.find_tree(tree_oid)?;
124
125 let sig = git2::Signature::new(
127 &commit.author,
128 &format!("{}@panproto", commit.author),
129 &git2::Time::new(i64::try_from(commit.timestamp).unwrap_or(i64::MAX), 0),
130 )?;
131
132 let mut parents: Vec<git2::Commit<'_>> = Vec::new();
134 for parent_panproto_id in &commit.parents {
135 if let Some(parent_git_oid) = parent_map.get(parent_panproto_id) {
136 if let Ok(parent_commit) = git_repo.find_commit(*parent_git_oid) {
137 parents.push(parent_commit);
138 }
139 }
140 }
141 let parent_refs: Vec<&git2::Commit<'_>> = parents.iter().collect();
142
143 let git_oid = git_repo.commit(update_ref, &sig, &sig, &commit.message, &tree, &parent_refs)?;
144
145 Ok(ExportResult {
146 git_oid,
147 file_count,
148 })
149}
150
151fn collect_file_fragments(
158 schema: &panproto_schema::Schema,
159) -> FxHashMap<String, Vec<(usize, String)>> {
160 let mut files_fragments: FxHashMap<String, Vec<(usize, String)>> = FxHashMap::default();
161
162 for name in schema.vertices.keys() {
163 if let Some(constraints) = schema.constraints.get(name) {
164 let name_str = name.as_ref();
165 let file_prefix = name_str
166 .find("::")
167 .map_or(name_str, |pos| &name_str[..pos])
168 .to_owned();
169
170 let start_byte = constraints
171 .iter()
172 .find(|c| c.sort.as_ref() == "start-byte")
173 .and_then(|c| c.value.parse::<usize>().ok());
174 let literal = constraints
175 .iter()
176 .find(|c| c.sort.as_ref() == "literal-value")
177 .map(|c| c.value.clone());
178 if let (Some(start), Some(text)) = (start_byte, literal) {
179 files_fragments
180 .entry(file_prefix.clone())
181 .or_default()
182 .push((start, text));
183 }
184
185 for c in constraints {
186 let sort_str = c.sort.as_ref();
187 if sort_str.starts_with("interstitial-") && !sort_str.ends_with("-start-byte") {
188 let pos_sort = format!("{sort_str}-start-byte");
189 let pos = constraints
190 .iter()
191 .find(|c2| c2.sort.as_ref() == pos_sort.as_str())
192 .and_then(|c2| c2.value.parse::<usize>().ok());
193 if let Some(p) = pos {
194 files_fragments
195 .entry(file_prefix.clone())
196 .or_default()
197 .push((p, c.value.clone()));
198 }
199 }
200 }
201 }
202 }
203
204 files_fragments
205}
206
207fn build_nested_tree(
208 repo: &git2::Repository,
209 root_builder: &mut git2::TreeBuilder<'_>,
210 file_blobs: &FxHashMap<String, git2::Oid>,
211) -> Result<(), GitBridgeError> {
212 let mut dirs: FxHashMap<String, Vec<(String, git2::Oid)>> = FxHashMap::default();
214 let mut root_files: Vec<(String, git2::Oid)> = Vec::new();
215
216 for (path, oid) in file_blobs {
217 if let Some(slash_pos) = path.find('/') {
218 let dir = &path[..slash_pos];
219 let rest = &path[slash_pos + 1..];
220 dirs.entry(dir.to_owned())
221 .or_default()
222 .push((rest.to_owned(), *oid));
223 } else {
224 root_files.push((path.clone(), *oid));
225 }
226 }
227
228 for (name, oid) in &root_files {
230 root_builder.insert(name, *oid, 0o100_644)?;
231 }
232
233 for (dir_name, entries) in &dirs {
235 let subtree_oid = build_subtree(repo, entries)?;
236 root_builder.insert(dir_name, subtree_oid, 0o040_000)?;
237 }
238
239 Ok(())
240}
241
242fn build_subtree(
244 repo: &git2::Repository,
245 entries: &[(String, git2::Oid)],
246) -> Result<git2::Oid, GitBridgeError> {
247 let mut builder = repo.treebuilder(None)?;
248
249 let mut subdirs: FxHashMap<String, Vec<(String, git2::Oid)>> = FxHashMap::default();
251 let mut files: Vec<(String, git2::Oid)> = Vec::new();
252
253 for (path, oid) in entries {
254 if let Some(slash_pos) = path.find('/') {
255 let dir = &path[..slash_pos];
256 let rest = &path[slash_pos + 1..];
257 subdirs
258 .entry(dir.to_owned())
259 .or_default()
260 .push((rest.to_owned(), *oid));
261 } else {
262 files.push((path.clone(), *oid));
263 }
264 }
265
266 for (name, oid) in &files {
267 builder.insert(name, *oid, 0o100_644)?;
268 }
269
270 for (dir_name, sub_entries) in &subdirs {
271 let subtree_oid = build_subtree(repo, sub_entries)?;
272 builder.insert(dir_name, subtree_oid, 0o040_000)?;
273 }
274
275 Ok(builder.write()?)
276}