1use std::{
2 collections::{BTreeMap, BTreeSet, VecDeque},
3 fs::{create_dir_all, File},
4 io::Read,
5 path::{Path, PathBuf},
6};
7
8use collections::{contents::Collection, CollectionError};
9use config::{Config, ConfigError};
10use git2::{
11 build::{CheckoutBuilder, RepoBuilder},
12 BranchType, Oid, Repository,
13};
14use ipkg::{IPkg, IPkgError};
15use json::{AnnotatedSource, Source, Sources};
16use petgraph::{
17 graph::{DiGraph, NodeIndex},
18 visit::EdgeRef,
19};
20use prefetch::PrefetchError;
21use snafu::{ensure, OptionExt, ResultExt, Snafu};
22use tracing::{debug, info, instrument};
23
24use crate::{
25 collections::{tag::CollectionTag, Collections},
26 prefetch::prefetch_git,
27};
28
29pub mod cli;
30pub mod collections;
31pub mod config;
32pub mod ipkg;
33pub mod json;
34pub mod prefetch;
35
36pub const IDRIS2_BUILTINS: [&str; 7] = [
38 "base", "contrib", "linear", "network", "papers", "prelude", "idris2",
39];
40
41#[derive(custom_debug::Debug, Clone)]
43pub struct State {
44 pub config: Config,
46 #[debug(skip)]
48 pub collections: Collections,
49 pub root: PathBuf,
51 ipkg_cache: BTreeMap<(CollectionTag, String), (IPkg, String)>,
53}
54
55impl State {
56 #[instrument(skip(path), fields(path = path.as_ref().to_string_lossy().as_ref()))]
58 pub fn init(path: impl AsRef<Path>, url: Option<String>) -> Result<(), StateError> {
59 let path = path.as_ref();
60 info!(?path, "Creating idr2nix directory");
62 create_dir_all(path).context(DirectoryCreationSnafu { dir: path })?;
63 let mut config = Config::default();
65 if let Some(url) = url {
66 config.url = url;
67 }
68 let repo_path = path.join("repo");
70 info!(?repo_path, ?config.url, "Initalizing git repo");
71 Repository::clone(&config.url, &repo_path).context(GitInitSnafu)?;
72 info!("Reading collections from repo");
74 let collections = Collections::open_directory(path.join("repo/collections"))
75 .context(ReadCollectionsSnafu)?;
76 let newest = collections
77 .tags()
78 .filter(|x| *x != &CollectionTag::Head)
79 .max()
80 .cloned()
81 .context(NoCollectionsSnafu)?;
82 config.collection = newest;
83 let config_path = path.join("config.toml");
85 info!(?config_path, "Writing out configuration file");
86 config.write(config_path).context(WriteConfigSnafu)?;
87 Ok(())
88 }
89
90 #[instrument(skip(path), fields(path = path.as_ref().to_string_lossy().as_ref()))]
92 pub fn load(path: impl AsRef<Path>) -> Result<Self, StateError> {
93 let path = path.as_ref();
94 ensure!(
96 path.try_exists().context(DirectorySnafu { dir: path })?,
97 DirectoryDNESnafu { dir: path }
98 );
99 let config = Config::read(path.join("config.toml")).context(ReadConfigSnafu)?;
101 let collections = Collections::open_directory(path.join("repo/collections"))
103 .context(ReadCollectionsSnafu)?;
104 Ok(Self {
105 config,
106 collections,
107 root: path.to_owned(),
108 ipkg_cache: BTreeMap::new(),
109 })
110 }
111
112 #[instrument]
114 pub fn write_config(&self) -> Result<(), StateError> {
115 self.config
116 .write(self.root.join("config.toml"))
117 .context(WriteConfigSnafu)
118 }
119
120 #[instrument]
122 pub fn update_pack_db(&self) -> Result<(), StateError> {
123 let new_repo_path = self.root.join("repo-new");
125 let repo_path = self.root.join("repo");
126 info!(?new_repo_path, "Making new repository");
127 Repository::clone(&self.config.url, &new_repo_path).context(GitUpdateSnafu)?;
128 debug!(?repo_path, "Removing old copy of the repository");
129 std::fs::remove_dir_all(&repo_path).context(SwapSnafu {
130 old: &repo_path,
131 new: &repo_path,
132 })?;
133 debug!(
134 ?new_repo_path,
135 ?repo_path,
136 "Moving new copy to correct location"
137 );
138 std::fs::rename(&new_repo_path, &repo_path).context(SwapSnafu {
139 old: &repo_path,
140 new: &repo_path,
141 })?;
142 Ok(())
143 }
144
145 pub fn get_collection(&mut self) -> Result<&Collection, StateError> {
147 let tag = &self.config.collection;
148 let collection = self
149 .collections
150 .get(tag)
151 .context(ReadCollectionSnafu { tag: tag.boxed() })?
152 .context(MissingCollectionSnafu { tag: tag.boxed() })?;
153 Ok(collection)
154 }
155
156 #[instrument(skip(package), fields(package = package.as_ref()))]
158 pub fn get_ipkg_from_collection(
159 &mut self,
160 tag: &CollectionTag,
161 package: impl AsRef<str>,
162 ) -> Result<(IPkg, String), StateError> {
163 let package = package.as_ref();
164 let cache_key = (tag.clone(), package.to_string());
165 if let Some(res) = self.ipkg_cache.get(&cache_key) {
167 return Ok(res.clone());
168 }
169 let deps_dir = self.root.join("deps");
171 debug!(?deps_dir, "Making sure deps dir exists");
172 create_dir_all(&deps_dir).context(DirectoryCreationSnafu { dir: &deps_dir })?;
173 debug!("Loading collection and finding entry for package");
175 let collection = self
176 .collections
177 .get(tag)
178 .context(ReadCollectionSnafu { tag: tag.boxed() })?
179 .context(MissingCollectionSnafu { tag: tag.boxed() })?;
180 let entry = collection
181 .db
182 .get(package)
183 .context(MissingPackageSnafu { package })?;
184 let ipkg_name: &str = &entry.ipkg;
185 let mut checkout = CheckoutBuilder::new();
187 checkout.path(ipkg_name);
188 let checkout_dir = deps_dir.join(package);
190 if checkout_dir.exists() {
192 debug!(?checkout_dir, "Deleting existing checkout directory");
193 std::fs::remove_dir_all(&checkout_dir)
194 .context(DirectorySnafu { dir: &checkout_dir })?;
195 }
196 debug!(
197 ?ipkg_name,
198 ?checkout_dir,
199 ?entry.url,
200 "Doing sparse checkout of git repository for {package}"
201 );
202 let mut builder = RepoBuilder::new();
203 builder.with_checkout(checkout);
204 let repo = builder
205 .clone(&entry.url, &checkout_dir)
206 .context(GitIPkgSnafu { entry: package })?;
207 let reference = if let Ok(oid) = Oid::from_str(&entry.commit) {
209 repo.find_object(oid, None)
210 .context(GitIPkgSnafu { entry: package })?
211 } else if let Ok(branch) = repo.find_branch(&entry.commit, BranchType::Local) {
212 branch
213 .into_reference()
214 .peel_to_commit()
215 .context(GitIPkgSnafu { entry: package })?
216 .as_object()
217 .clone()
218 } else if let Ok(reference) = repo.find_reference(&entry.commit) {
219 reference
220 .peel_to_commit()
221 .context(GitIPkgSnafu { entry: package })?
222 .as_object()
223 .clone()
224 } else {
225 return NoMatchingCommitSnafu {
226 commit: &entry.commit,
227 }
228 .fail();
229 };
230 let mut checkout = CheckoutBuilder::new();
231 checkout.path(ipkg_name);
232 repo.checkout_tree(&reference, Some(&mut checkout))
233 .context(GitIPkgSnafu { entry: package })?;
234 let commit_ref = repo
235 .head()
236 .context(GitIPkgSnafu { entry: package })?
237 .peel_to_commit()
238 .context(GitIPkgSnafu { entry: package })?
239 .id()
240 .to_string();
241 let ipkg_path = checkout_dir.join(ipkg_name);
243 debug!(?ipkg_path, "Reading in ipkg for {package}");
244 let mut ipkg_contents = String::new();
245 let mut ipkg_file = File::open(&ipkg_path).context(IPkgFileSnafu { path: &ipkg_path })?;
246 ipkg_file
247 .read_to_string(&mut ipkg_contents)
248 .context(IPkgFileSnafu { path: &ipkg_path })?;
249 let ipkg = IPkg::parse(&ipkg_contents).context(IPkgParseSnafu { path: &ipkg_path })?;
250 self.ipkg_cache
252 .insert(cache_key, (ipkg.clone(), commit_ref.clone()));
253 Ok((ipkg, commit_ref))
254 }
255
256 pub fn get_ipkg(&mut self, package: impl AsRef<str>) -> Result<(IPkg, String), StateError> {
258 let collection = self.config.collection.clone();
259 self.get_ipkg_from_collection(&collection, package)
260 }
261
262 pub fn solve_deps_with_collection(
264 &mut self,
265 ipkg: &IPkg,
266 tag: &CollectionTag,
267 ) -> Result<DiGraph<IPkg, ()>, StateError> {
268 let mut graph: DiGraph<IPkg, ()> = DiGraph::new();
269 let mut package_queue: VecDeque<(IPkg, IPkg)> = VecDeque::new();
271 let mut index_map: BTreeMap<IPkg, NodeIndex> = BTreeMap::new();
273 let _root = *index_map
275 .entry(ipkg.clone())
276 .or_insert_with(|| graph.add_node(ipkg.clone()));
277 for (name, child_raw) in ipkg
279 .dependencies
280 .keys()
281 .filter(|x| !IDRIS2_BUILTINS.contains(&x.as_ref()))
282 .map(|x| (x, self.get_ipkg_from_collection(tag, x)))
283 {
284 debug!(?child_raw, "Looking up child in collection, top level");
285 let child = child_raw
286 .map_err(|_| NotInCollectionSnafu { package: name }.build())?
287 .0;
288 package_queue.push_back((ipkg.clone(), child.clone()));
289 }
290 while let Some((parent, child)) = package_queue.pop_back() {
292 let parent_idx = *index_map
294 .entry(parent.clone())
295 .or_insert_with(|| graph.add_node(parent.clone()));
296 let child_idx = *index_map
297 .entry(child.clone())
298 .or_insert_with(|| graph.add_node(child.clone()));
299 graph.update_edge(child_idx, parent_idx, ());
300 debug!(?parent, ?child, "Solving dependencies");
302 let child_ipkg = self
303 .get_ipkg_from_collection(tag, &child.package)
304 .context(DependenciesSnafu {
305 package: child.package,
306 })?
307 .0;
308 for (name, sub_child_raw) in child_ipkg
309 .dependencies
310 .keys()
311 .filter(|x| !IDRIS2_BUILTINS.contains(&x.as_ref()))
312 .map(|x| (x, self.get_ipkg_from_collection(tag, x)))
313 {
314 debug!(?sub_child_raw, "Looking up child in collection");
315 let sub_child = sub_child_raw
316 .map_err(|_| NotInCollectionSnafu { package: name }.build())?
317 .0;
318 package_queue.push_back((child_ipkg.clone(), sub_child.clone()));
319 }
320 }
321 Ok(graph)
322 }
323 pub fn solve_deps(&mut self, ipkg: &IPkg) -> Result<DiGraph<IPkg, ()>, StateError> {
325 let collection = self.config.collection.clone();
326 self.solve_deps_with_collection(ipkg, &collection)
327 }
328
329 pub fn source_with_collection(
331 &mut self,
332 package: impl AsRef<str>,
333 tag: &CollectionTag,
334 ) -> Result<Source, StateError> {
335 let package = package.as_ref();
336 let (_ipgk, rev) = self.get_ipkg_from_collection(tag, package)?;
337 let repo = self
338 .collections
339 .get(tag)
340 .context(ReadCollectionSnafu { tag: tag.boxed() })?
341 .context(MissingCollectionSnafu { tag: tag.boxed() })?
342 .db
343 .get(package)
344 .context(MissingPackageSnafu { package })?
345 .url
346 .clone();
347 let sha256 = prefetch_git(&repo, &rev).context(PrefetchSnafu { package })?;
348 Ok(Source {
349 url: repo,
350 rev,
351 sha256,
352 })
353 }
354
355 pub fn source(&mut self, package: impl AsRef<str>) -> Result<Source, StateError> {
357 let collection = self.config.collection.clone();
358 self.source_with_collection(package, &collection)
359 }
360
361 pub fn sources_with_collection<'a>(
363 &mut self,
364 ipkgs: impl Iterator<Item = &'a IPkg>,
365 tag: &CollectionTag,
366 ) -> Result<Sources, StateError> {
367 let ipkgs = ipkgs.collect::<Vec<_>>();
369 let package_names = ipkgs
370 .iter()
371 .map(|x| x.package.to_string())
372 .collect::<BTreeSet<_>>();
373 let raw_deps = ipkgs
374 .iter()
375 .map(|ipkg| self.solve_deps_with_collection(ipkg, tag))
376 .collect::<Result<Vec<_>, _>>()?;
377 let mut deps = DiGraph::new();
378 let mut dep_map = BTreeMap::new();
379 for raw_dep in raw_deps {
380 for edge in raw_dep.edge_references() {
381 let source_weight = raw_dep.node_weight(edge.source()).unwrap();
382 let target_weight = raw_dep.node_weight(edge.target()).unwrap();
383 let new_source_idx = *dep_map
384 .entry(source_weight.clone())
385 .or_insert_with(|| deps.add_node(source_weight.clone()));
386 let new_target_idx = *dep_map
387 .entry(target_weight.clone())
388 .or_insert_with(|| deps.add_node(target_weight.clone()));
389 deps.update_edge(new_source_idx, new_target_idx, ());
390 }
391 }
392 let sorted_raw = petgraph::algo::toposort(&deps, None)
393 .map_err(|x| {
394 let pkg = deps.node_weight(x.node_id()).unwrap();
395 DepCycleSnafu {
396 package: &pkg.package,
397 }
398 .build()
399 })?
400 .into_iter()
401 .map(|d| deps.node_weight(d).unwrap())
402 .filter(|d| !package_names.contains(&d.package))
403 .collect::<Vec<_>>();
404
405 let sorted = sorted_raw
406 .iter()
407 .map(|x| x.package.clone())
408 .collect::<Vec<_>>();
409
410 let collection = self
411 .collections
412 .get(tag)
413 .context(ReadCollectionSnafu { tag: tag.boxed() })?
414 .context(MissingCollectionSnafu { tag: tag.boxed() })?
415 .clone();
416 let sources = sorted_raw
417 .iter()
418 .cloned()
419 .map(|d| {
420 self.source_with_collection(&d.package, tag).map(|x| {
421 (
422 d.package.clone(),
423 AnnotatedSource {
424 source: x,
425 ipkg: collection.db.get(&d.package).unwrap().ipkg.clone(),
426 name: d.package.clone(),
427 },
428 )
429 })
430 })
431 .collect::<Result<BTreeMap<_, _>, _>>()?;
432
433 let collection = self
434 .collections
435 .get(tag)
436 .context(ReadCollectionSnafu { tag: tag.boxed() })?
437 .context(MissingCollectionSnafu { tag: tag.boxed() })?;
438
439 let idris2_repo = collection.idris2.url.clone();
440 let idris2_rev = collection.idris2.commit.clone();
441 let idris2_hash =
442 prefetch_git(&idris2_repo, &idris2_rev).context(PrefetchSnafu { package: "idris2" })?;
443
444 let idris2 = Source {
445 url: idris2_repo,
446 rev: idris2_rev,
447 sha256: idris2_hash,
448 };
449
450 Ok(Sources {
451 sources,
452 sorted,
453 idris2,
454 })
455 }
456
457 pub fn sources<'a>(
459 &mut self,
460 ipkgs: impl Iterator<Item = &'a IPkg>,
461 ) -> Result<Sources, StateError> {
462 let collection = self.config.collection.clone();
463 self.sources_with_collection(ipkgs, &collection)
464 }
465}
466
467#[derive(Debug, Snafu)]
469pub enum StateError {
470 #[snafu(display("Error creating idr2nix directory: {:?}", dir))]
472 DirectoryCreation {
473 source: std::io::Error,
474 dir: PathBuf,
475 },
476 #[snafu(display("Directory did not exist or we do not have permissions: {:?}", dir))]
478 DirectoryError {
479 source: std::io::Error,
480 dir: PathBuf,
481 },
482 #[snafu(display("Directory did not exist or we do not have permissions: {:?}", dir))]
484 DirectoryDNE { dir: PathBuf },
485 WriteConfig { source: ConfigError },
487 ReadConfig { source: ConfigError },
489 GitInit { source: git2::Error },
491 GitUpdate { source: git2::Error },
493 #[snafu(display("Git error fetching ipkg for entry: {entry}"))]
495 GitIPkg { source: git2::Error, entry: String },
496 #[snafu(display("Error swapping old repo with new repo: {:?} -> {:?}", new, old))]
498 SwapError {
499 source: std::io::Error,
500 new: PathBuf,
501 old: PathBuf,
502 },
503 ReadCollections { source: CollectionError },
505 NoCollections,
507 #[snafu(display("Error reading collection from repository: {tag}"))]
509 ReadCollection {
510 source: CollectionError,
511 tag: Box<CollectionTag>,
512 },
513 #[snafu(display("Missing Collection: {tag}"))]
515 MissingCollection { tag: Box<CollectionTag> },
516 #[snafu(display("Missing package: {package}"))]
518 MissingPackage { package: String },
519 #[snafu(display("Error opening ipkg file: {:?}", path))]
521 IPkgFile {
522 source: std::io::Error,
523 path: PathBuf,
524 },
525 #[snafu(display("Error parsing ipkg file: {:?}", path))]
527 IPkgParse { source: IPkgError, path: PathBuf },
528 #[snafu(display("No matching commit: {commit}"))]
530 NoMatchingCommit { commit: String },
531 #[snafu(display("Error resolving dependencies for package: {package}"))]
533 Dependencies {
534 #[snafu(source(from(StateError, Box::new)))]
535 source: Box<StateError>,
536 package: String,
537 },
538 #[snafu(display("Dependency not in collection: {package}"))]
540 NotInCollection { package: String },
541 #[snafu(display("Error prefetching package: {package}"))]
543 Prefetch {
544 package: String,
545 source: PrefetchError,
546 },
547 #[snafu(display("Dependency graph for package contains a cycle: {package}"))]
549 DepCycle { package: String },
550}
551
552#[cfg(test)]
553mod tests {
554 use super::*;
555 use crate::collections::tag::CollectionTag;
556
557 #[test]
559 fn smoke() {
560 let tmp = tempfile::tempdir().unwrap();
562 State::init(tmp.path(), None).expect("initalize");
564 let mut inital_state = State::load(tmp.path()).expect("first load");
566 let config = Config {
568 collection: inital_state
569 .collections
570 .tags()
571 .filter(|x| *x != &CollectionTag::Head)
572 .max()
573 .cloned()
574 .context(NoCollectionsSnafu)
575 .unwrap(),
576 ..Default::default()
577 };
578 assert_eq!(inital_state.config, config);
579 assert!(inital_state.collections.contains(&CollectionTag::Head));
581 inital_state.config.url = "http://test.site".to_string();
583 inital_state.write_config().expect("Write config");
585 let second_state = State::load(tmp.path()).expect("second load");
587 assert_eq!(&second_state.config.url, "http://test.site")
589 }
590
591 mod ipkgs {
593 use super::*;
594 const TAG: &str = "nightly-230522";
595 const ALGEBRA: &str = include_str!("../test-data/ipkgs/algebra.ipkg");
596 const EFF: &str = include_str!("../test-data/ipkgs/eff.ipkg");
597
598 #[test]
599 fn algebra() {
600 let reference = IPkg::parse(ALGEBRA).expect("Failed to parse reference ipkg");
601 let tag = CollectionTag::try_from(TAG).expect("Failed to parse reference tag");
602
603 let tmp = tempfile::tempdir().unwrap();
605 State::init(tmp.path(), None).expect("initalize");
607 let mut state = State::load(tmp.path()).expect("first load");
609
610 let test_ipkg = state
612 .get_ipkg_from_collection(&tag, "algebra")
613 .expect("Failed to fetch and parse ipkg")
614 .0;
615 assert_eq!(reference, test_ipkg)
616 }
617
618 #[test]
619 fn eff() {
620 let reference = IPkg::parse(EFF).expect("Failed to parse reference ipkg");
621 let tag = CollectionTag::try_from(TAG).expect("Failed to parse reference tag");
622
623 let tmp = tempfile::tempdir().unwrap();
625 State::init(tmp.path(), None).expect("initalize");
627 let mut state = State::load(tmp.path()).expect("first load");
629
630 let test_ipkg = state
632 .get_ipkg_from_collection(&tag, "eff")
633 .expect("Failed to fetch and parse ipkg")
634 .0;
635 assert_eq!(reference, test_ipkg)
636 }
637 }
638
639 mod source {
641 use super::*;
642 #[test]
644 fn smoke() {
645 let tag =
646 CollectionTag::try_from("nightly-230518").expect("Failed to parse reference tag");
647 let tmp = tempfile::tempdir().unwrap();
649 State::init(tmp.path(), None).expect("initalize");
651 let mut state = State::load(tmp.path()).expect("first load");
653 let algebra = state
654 .source_with_collection("algebra", &tag)
655 .expect("Failed to get source");
656 assert_eq!(
657 algebra,
658 Source {
659 url: "https://github.com/stefan-hoeck/idris2-algebra".to_string(),
660 rev: "1172ed5b4848c5e18f7bf5f9bb467bc3e1f6b7e9".to_string(),
661 sha256: "sha256-5jxzuzLLBmKeyzK11kTI4GBr5eTnpHuN1Efb1x6Dj6Q=".to_string()
662 }
663 );
664 }
665 }
666}