1use crate::analyze::{analyze, Analysis};
4use crate::bail;
5use crate::config::{Config, ConfigFile, Depends, FsConfig, Project, ProjectId, Size};
6use crate::either::{IterEither2 as E2, IterEither3 as E3};
7use crate::errors::Result;
8use crate::git::{Auth, CommitInfoBuf, FromTag, FromTagBuf, FullPr, GithubInfo, Repo};
9use crate::github::{changes, line_commits_head, Changes};
10use crate::state::{CommitArgs, CurrentState, OldTags, PrevFiles, PrevTagMessage, StateRead, StateWrite};
11use crate::vcs::VcsState;
12use chrono::{DateTime, FixedOffset};
13use serde::Deserialize;
14use std::cmp::{max, Ordering};
15use std::collections::{BTreeSet, HashMap, HashSet, VecDeque};
16use std::iter::{empty, once};
17use std::path::{Path, PathBuf};
18use tracing::trace;
19
20const USER_PREFS_DIR: &str = ".versio";
21const USER_PREFS_FILE: &str = "prefs.toml";
22
23pub struct Mono {
24 current: Config<CurrentState>,
25 next: StateWrite,
26 last_commits: HashMap<ProjectId, String>,
27 repo: Repo,
28 user_prefs: UserPrefs
29}
30
31impl Mono {
32 pub fn here(vcs: VcsState) -> Result<Mono> { Mono::open(".", vcs) }
33
34 pub fn open<P: AsRef<Path>>(dir: P, vcs: VcsState) -> Result<Mono> {
35 let root = Repo::find_working_dir(dir.as_ref(), *vcs.level(), false)?;
36
37 let file = ConfigFile::from_dir(&root)?;
39 trace!("Using commit message: {}", file.commit_config().message());
40
41 let repo = Repo::open(dir.as_ref(), vcs, file.commit_config().clone())?;
42 let projects = file.projects().iter();
43 let old_tags = find_old_tags(projects, file.prev_tag(), &repo)?;
44 let state = CurrentState::new(root, old_tags);
45 let current = Config::new(state, file);
46
47 let last_commits = find_last_commits(¤t, &repo)?;
48 let next = StateWrite::new();
49 let user_prefs = read_env_prefs()?;
50
51 Ok(Mono { current, next, last_commits, repo, user_prefs })
52 }
53
54 pub fn check_branch(&self) -> std::result::Result<(), (String, String)> {
55 if let Ok(branch_name) = self.repo.branch_name() {
56 if let Some(branch_name) = branch_name {
57 if let Some(cfg_name) = self.current.branch() {
58 if branch_name != cfg_name {
59 Err((cfg_name.clone(), branch_name.clone()))
60 } else {
61 Ok(())
62 }
63 } else {
64 Ok(())
65 }
66 } else if let Some(cfg_name) = self.current.branch() {
67 Err((cfg_name.clone(), "((No branch))".to_string()))
68 } else {
69 Ok(())
70 }
71 } else {
72 Ok(())
73 }
74 }
75
76 pub fn write_changelogs(&mut self) -> Result<()> { self.next.write_changelogs() }
77
78 pub fn commit(&mut self, advance_prev: bool, pause: bool) -> Result<()> {
79 self.next.commit(
80 &self.repo,
81 CommitArgs::new(
82 self.current.prev_tag(),
83 &self.last_commits,
84 self.current.old_tags().current(),
85 advance_prev,
86 &self.current.hooks(),
87 pause
88 )
89 )
90 }
91
92 pub fn get_project(&self, id: &ProjectId) -> Result<&Project> {
93 self.current.get_project(id).ok_or_else(|| bad!("No such project {}", id))
94 }
95
96 pub fn write_chains(&mut self, ids: &[(ProjectId, ProjectId)], vers: &HashMap<ProjectId, String>) -> Result<()> {
97 for (id, dpid) in ids {
98 let dproj =
99 self.current.get_project(dpid).ok_or_else(|| bad!("No such dependent {} project for {}.", dpid, id))?;
100 let deps = dproj.depends().get(id).ok_or_else(|| bad!("No such depends {} in project {}.", id, dpid))?;
101 let val = vers.get(id).ok_or_else(|| bad!("No new value for {}.", id))?;
102 deps.write_values(&mut self.next, dproj.root(), val, dpid)?;
103 }
104 Ok(())
105 }
106
107 pub fn diff(&self) -> Result<Analysis> {
108 let prev_config = self.current.slice_to_prev(&self.repo)?;
109
110 let curt_annotate = self.current.annotate()?;
111 let prev_annotate = prev_config.annotate()?;
112
113 Ok(analyze(prev_annotate, curt_annotate))
114 }
115
116 pub fn config(&self) -> &Config<CurrentState> { &self.current }
117 pub fn repo(&self) -> &Repo { &self.repo }
118
119 pub fn set_by_id(&mut self, id: &ProjectId, val: &str) -> Result<()> {
120 self.do_project_write(id, move |p, n| p.set_value(n, val))
121 }
122
123 pub fn set_by_name(&mut self, name: &str, val: &str) -> Result<()> {
124 let id = self.current.find_unique(name)?.clone();
125 self.set_by_id(&id, val)
126 }
127
128 pub fn set_by_exact_name(&mut self, name: &str, val: &str) -> Result<()> {
129 let id = self.current.find_exact(name)?.clone();
130 self.set_by_id(&id, val)
131 }
132
133 pub fn set_by_only(&mut self, val: &str) -> Result<()> {
134 if self.current.projects().len() != 1 {
135 bail!("No solo project.");
136 }
137 let id = self.current.projects().first().unwrap().id().clone();
138 self.set_by_id(&id, val)
139 }
140
141 pub fn forward_by_id(&mut self, id: &ProjectId, val: &str) -> Result<()> {
142 self.do_project_write(id, move |p, n| p.forward_tag(n, val))
143 }
144
145 pub async fn write_changelog(
146 &mut self, id: &ProjectId, changelog: &Changelog, new_vers: &str
147 ) -> Result<Option<PathBuf>> {
148 let proj = self.current.get_project(id).ok_or_else(|| bad!("No such project {}", id))?;
149 proj.write_changelog(&mut self.next, changelog, new_vers).await
150 }
151
152 fn do_project_write<F, T>(&mut self, id: &ProjectId, f: F) -> Result<T>
153 where
154 F: FnOnce(&Project, &mut StateWrite) -> Result<T>
155 {
156 let proj = self.current.get_project(id).ok_or_else(|| bad!("No such project {}", id))?;
157 f(proj, &mut self.next)
158 }
159
160 pub fn check(&self) -> Result<()> {
161 for project in self.current.projects() {
162 project.check(self.current.state_read())?;
163 }
164 Ok(())
165 }
166
167 pub async fn keyed_files(&self) -> Result<impl Iterator<Item = Result<(String, String)>> + '_> {
168 let changes = self.changes().await?;
169 let prs = changes.into_groups().into_values().filter(|pr| !pr.best_guess());
170
171 let mut vec = Vec::new();
172 for pr in prs {
173 vec.push(pr_keyed_files(&self.repo, pr));
174 }
175
176 Ok(vec.into_iter().flatten())
177 }
178
179 pub async fn build_plan(&self) -> Result<Plan> {
180 let mut plan = PlanBuilder::create(&self.repo, self.current.file(), self.user_prefs.auth());
181
182 for pr in self.changes().await?.groups().values() {
184 plan.start_pr(pr)?;
185 for commit in pr.included_commits() {
186 plan.start_commit(commit.clone())?;
187 for file in commit.files() {
188 plan.start_file(file)?;
189 plan.finish_file()?;
190 }
191 plan.finish_commit()?;
192 }
193 plan.finish_pr()?;
194 }
195
196 plan.handle_deps()?;
198
199 plan.sort_and_dedup()?;
201
202 Ok(plan.build())
203 }
204
205 pub async fn changes(&self) -> Result<Changes> {
206 let base = FromTagBuf::new(self.current.prev_tag().to_string(), true);
207 changes(&self.user_prefs.auth, &self.repo, base, "HEAD".into()).await
208 }
209}
210
211fn read_env_prefs() -> Result<UserPrefs> {
213 read_user_prefs().map(|mut prefs| {
214 if let Ok(token) = std::env::var("GITHUB_TOKEN") {
215 if let Some(auth) = prefs.auth_mut() {
216 auth.set_github_token(Some(token))
217 } else {
218 prefs.auth = Some(Auth::new(Some(token)));
219 }
220 }
221 prefs
222 })
223}
224
225fn read_user_prefs() -> Result<UserPrefs> {
226 let homefile = dirs::home_dir().map(|h| h.join(USER_PREFS_DIR).join(USER_PREFS_FILE));
227 let homefile = match homefile {
228 Some(f) => f,
229 None => return Ok(Default::default())
230 };
231 if !homefile.exists() {
232 return Ok(Default::default());
233 }
234
235 let user_prefs: UserPrefs = toml::from_str(&std::fs::read_to_string(homefile)?)?;
236 Ok(user_prefs)
237}
238
239#[derive(Deserialize, Debug, Default)]
240struct UserPrefs {
241 auth: Option<Auth>
242}
243
244impl UserPrefs {
245 fn auth(&self) -> &Option<Auth> { &self.auth }
246 fn auth_mut(&mut self) -> &mut Option<Auth> { &mut self.auth }
247}
248
249fn find_last_commits(current: &Config<CurrentState>, repo: &Repo) -> Result<HashMap<ProjectId, String>> {
251 let prev_spec = current.prev_tag();
252 let mut last_commits = LastCommitBuilder::create(repo, current);
253
254 for commit in line_commits_head(repo, FromTag::new(prev_spec, true))? {
256 last_commits.start_line_commit(&commit)?;
257 for file in commit.files() {
258 last_commits.start_line_file(file)?;
259 last_commits.finish_line_file()?;
260 }
261 last_commits.finish_line_commit()?;
262 }
263
264 let result = last_commits.build();
265 trace!("Found last commits: {:?}", result);
266 result
267}
268
269fn pr_keyed_files(repo: &Repo, pr: FullPr) -> impl Iterator<Item = Result<(String, String)>> + '_ {
270 let head_oid = match pr.head_oid() {
271 Some(oid) => *oid,
272 None => return E3::C(empty())
273 };
274
275 let iter = repo.commits_between(pr.base_oid(), head_oid, false).map(move |cmts| {
276 cmts
277 .filter_map(move |cmt| match cmt {
278 Ok(cmt) => {
279 if pr.has_exclude(&cmt.id()) {
280 None
281 } else {
282 match cmt.files() {
283 Ok(files) => {
284 let kind = cmt.kind();
285 Some(E2::A(files.map(move |f| Ok((kind.clone(), f)))))
286 }
287 Err(e) => Some(E2::B(once(Err(e))))
288 }
289 }
290 }
291 Err(e) => Some(E2::B(once(Err(e))))
292 })
293 .flatten()
294 });
295
296 match iter {
297 Ok(iter) => E3::A(iter),
298 Err(e) => E3::B(once(Err(e)))
299 }
300}
301
302pub struct PlanInfo {
305 failed_commits: BTreeSet<CommitInfoBuf>
306}
307
308impl Default for PlanInfo {
309 fn default() -> PlanInfo { PlanInfo::new() }
310}
311
312impl PlanInfo {
313 pub fn new() -> PlanInfo { PlanInfo { failed_commits: BTreeSet::new() } }
314 pub fn failed_commits(&self) -> &BTreeSet<CommitInfoBuf> { &self.failed_commits }
315 pub fn add_failed_commit(&mut self, failure: CommitInfoBuf) { self.failed_commits.insert(failure); }
316}
317
318pub struct Plan {
319 incrs: HashMap<ProjectId, (Size, Changelog)>, ineffective: Vec<LoggedPr>, chain_writes: Vec<(ProjectId, ProjectId)>,
322 info: PlanInfo
323}
324
325impl Plan {
326 pub fn incrs(&self) -> &HashMap<ProjectId, (Size, Changelog)> { &self.incrs }
327 pub fn ineffective(&self) -> &[LoggedPr] { &self.ineffective }
328 pub fn chain_writes(&self) -> &[(ProjectId, ProjectId)] { &self.chain_writes }
329 pub fn info(&self) -> &PlanInfo { &self.info }
330}
331
332pub struct Changelog {
333 entries: Vec<ChangelogEntry>
334}
335
336pub enum ChangelogEntry {
337 Pr(LoggedPr, Size),
338 Dep(ProjectId, String)
339}
340
341impl Changelog {
342 pub fn empty() -> Changelog { Changelog { entries: Vec::new() } }
343 pub fn entries(&self) -> &[ChangelogEntry] { &self.entries }
344 pub fn add_entry(&mut self, pr: LoggedPr, size: Size) { self.entries.push(ChangelogEntry::Pr(pr, size)); }
345
346 pub fn add_dep(&mut self, id: ProjectId, name: impl ToString) {
347 self.entries.push(ChangelogEntry::Dep(id, name.to_string()));
348 }
349
350 pub fn is_empty(&self) -> bool { self.entries.is_empty() }
351}
352
353pub struct LoggedPr {
354 number: u32,
355 title: String,
356 _closed_at: DateTime<FixedOffset>,
357 discovery_order: usize,
358 commits: Vec<LoggedCommit>,
359 url: Option<String>
360}
361
362impl LoggedPr {
363 pub fn capture(pr: &FullPr, url: Option<String>) -> LoggedPr {
364 LoggedPr {
365 number: pr.number(),
366 title: pr.title().to_string(),
367 _closed_at: *pr.closed_at(),
368 discovery_order: pr.discovery_order(),
369 commits: Vec::new(),
370 url
371 }
372 }
373
374 pub fn number(&self) -> u32 { self.number }
375 pub fn title(&self) -> &str { &self.title }
376 pub fn _closed_at(&self) -> &DateTime<FixedOffset> { &self._closed_at }
377 pub fn discovery_order(&self) -> usize { self.discovery_order }
378 pub fn commits(&self) -> &[LoggedCommit] { &self.commits }
379 pub fn url(&self) -> &Option<String> { &self.url }
380}
381
382pub struct LoggedCommit {
383 oid: String,
384 summary: String,
385 message: String,
386 size: Size,
387 applies: bool,
388 duplicate: bool,
389 url: Option<String>
390}
391
392impl LoggedCommit {
393 pub fn new(oid: String, summary: String, message: String, size: Size, url: Option<String>) -> LoggedCommit {
394 LoggedCommit { oid, summary, message, size, applies: false, duplicate: false, url }
395 }
396
397 pub fn applies(&self) -> bool { self.applies }
398 pub fn duplicate(&self) -> bool { self.duplicate }
399 pub fn included(&self) -> bool { self.applies && !self.duplicate }
400 pub fn oid(&self) -> &str { &self.oid }
401 pub fn summary(&self) -> &str { &self.summary }
402 pub fn message(&self) -> &str { &self.message }
403 pub fn size(&self) -> Size { self.size }
404 pub fn url(&self) -> &Option<String> { &self.url }
405}
406
407struct PlanBuilder<'s> {
408 on_pr_sizes: HashMap<ProjectId, LoggedPr>,
409 on_ineffective: Option<LoggedPr>,
410 on_commit: Option<CommitInfoBuf>,
411 prev: Slicer<'s>,
412 current: &'s ConfigFile,
413 incrs: HashMap<ProjectId, (Size, Changelog)>,
414 ineffective: Vec<LoggedPr>,
416 github_info: Option<GithubInfo>,
418 chain_writes: Vec<(ProjectId, ProjectId)>,
419 info: PlanInfo
420}
421
422impl<'s> PlanBuilder<'s> {
423 fn create(repo: &'s Repo, current: &'s ConfigFile, auth: &Option<Auth>) -> PlanBuilder<'s> {
424 let prev = Slicer::init(repo);
425 let github_info = repo.github_info(auth).ok();
426 PlanBuilder {
427 on_pr_sizes: HashMap::new(),
428 on_ineffective: None,
429 on_commit: None,
430 prev,
431 current,
432 incrs: HashMap::new(),
433 ineffective: Vec::new(),
434 github_info,
435 chain_writes: Vec::new(),
436 info: PlanInfo::new()
437 }
438 }
439
440 pub fn start_pr(&mut self, pr: &FullPr) -> Result<()> {
441 trace!(
442 "planning PR {} with {}.",
443 pr.number(),
444 self.github_info.as_ref().map(|gh| gh.repo_name()).unwrap_or("<no gh>")
445 );
446 let url = self
447 .github_info
448 .as_ref()
449 .map(|gh| format!("https://github.com/{}/{}/pull/{}", gh.owner_name(), gh.repo_name(), pr.number()));
450 self.on_pr_sizes =
451 self.current.projects().iter().map(|p| (p.id().clone(), LoggedPr::capture(pr, url.clone()))).collect();
452 self.on_ineffective = Some(LoggedPr::capture(pr, url));
453 Ok(())
454 }
455
456 pub fn finish_pr(&mut self) -> Result<()> {
457 trace!("planning PR done.");
458 let mut found = false;
459 for (proj_id, logged_pr) in self.on_pr_sizes.drain() {
460 let (size, changelog) = self.incrs.entry(proj_id).or_insert((Size::Empty, Changelog::empty()));
461 let pr_size = logged_pr.commits.iter().filter(|c| c.applies).map(|c| c.size).max();
462 if let Some(pr_size) = pr_size {
463 found = true;
464 *size = max(*size, pr_size);
465 changelog.add_entry(logged_pr, pr_size);
466 }
467 }
468
469 let ineffective = self.on_ineffective.take().unwrap();
470 if !found {
471 self.ineffective.push(ineffective);
472 }
473
474 Ok(())
475 }
476
477 pub fn start_commit(&mut self, commit: CommitInfoBuf) -> Result<()> {
478 let id = commit.id().to_string();
479 let kind = commit.kind().to_string();
480 let summary = commit.summary().to_string();
481 let msg = commit.message().to_string();
482 self.prev.slice_to(FromTagBuf::new(id.clone(), false))?;
483
484 let url = self
485 .github_info
486 .as_ref()
487 .map(|gh| format!("https://github.com/{}/{}/commit/{}", gh.owner_name(), gh.repo_name(), id));
488 trace!(" planning commit {} at {}.", id, url.as_deref().unwrap_or("<no url>"));
489
490 for (proj_id, logged_pr) in &mut self.on_pr_sizes {
491 if let Some(cur_project) = self.current.get_project(proj_id) {
492 let size = cur_project.size(self.current.sizes(), &kind)?;
493 if size.is_failure() {
494 self.info.add_failed_commit(commit.clone());
495 }
496 logged_pr.commits.push(LoggedCommit::new(id.clone(), summary.clone(), msg.clone(), size, url.clone()));
497 }
498 }
499
500 self.on_commit = Some(commit);
501 Ok(())
502 }
503
504 pub fn finish_commit(&mut self) -> Result<()> {
505 trace!(" planning commit done.");
506 Ok(())
507 }
508
509 pub fn start_file(&mut self, path: &str) -> Result<()> {
510 trace!(" planning file {}.", path);
511 let commit = self.on_commit.as_ref().ok_or_else(|| bad!("Not on a commit"))?;
512 let commit_id = commit.id();
513
514 for prev_project in self.prev.file()?.projects() {
515 if let Some(logged_pr) = self.on_pr_sizes.get_mut(prev_project.id()) {
516 trace!(" vs current project {}.", prev_project.id());
517 if prev_project.does_cover(path)? {
518 let LoggedCommit { applies, .. } = logged_pr.commits.iter_mut().find(|c| c.oid == commit_id).unwrap();
519 *applies = true;
520 trace!(" covered.");
521 } else {
522 trace!(" not covered.");
523 }
524 } else {
525 trace!(" project {} doesn't currently exist.", prev_project.id());
526 }
527 }
528 Ok(())
529 }
530
531 pub fn finish_file(&mut self) -> Result<()> { Ok(()) }
532
533 pub fn handle_deps(&mut self) -> Result<()> {
534 let mut queue: VecDeque<ProjectId> = VecDeque::new();
536
537 let mut dependents: HashMap<ProjectId, HashMap<ProjectId, Depends>> = HashMap::new();
538 for project in self.current.projects() {
539 for (dep_id, dep) in project.depends() {
540 dependents.entry(dep_id.clone()).or_default().insert(project.id().clone(), dep.clone());
541 }
542
543 if project.depends().is_empty() {
544 queue.push_back(project.id().clone());
545 }
546 }
547
548 while let Some(id) = queue.pop_front() {
549 let size = self.incrs.get(&id).map(|s| s.0).unwrap_or(Size::Empty);
550 let depds: Option<HashMap<ProjectId, Depends>> = dependents.get(&id).cloned();
551 if let Some(depds) = depds {
552 for (depd_id, dep) in depds {
553 dependents.get_mut(&id).unwrap().remove(&depd_id);
554 let converted_size = dep.size().convert(size);
555 if converted_size > Size::Empty {
556 let (val, ch_log) = &mut self.incrs.entry(depd_id.clone()).or_insert((Size::Empty, Changelog::empty()));
557 *val = max(*val, converted_size);
558 let project = self.current.projects().iter().find(|p| p.id() == &id).unwrap();
559 ch_log.add_dep(id.clone(), project.name());
560 }
561
562 self.chain_writes.push((id.clone(), depd_id.clone()));
563
564 if dependents.values().all(|ds| !ds.contains_key(&depd_id)) {
565 queue.push_back(depd_id);
566 }
567 }
568 }
569 }
570
571 Ok(())
572 }
573
574 pub fn sort_and_dedup(&mut self) -> Result<()> {
575 for (.., changelog) in self.incrs.values_mut() {
576 changelog.entries.sort_by(|entry1, entry2| match entry1 {
577 ChangelogEntry::Pr(pr1, _) => match entry2 {
578 ChangelogEntry::Pr(pr2, _) => pr2.discovery_order().cmp(&pr1.discovery_order()),
579 _ => Ordering::Greater
580 },
581 ChangelogEntry::Dep(pr_id1, _) => match entry2 {
582 ChangelogEntry::Dep(pr_id2, _) => pr_id1.to_string().cmp(&pr_id2.to_string()),
583 _ => Ordering::Less
584 }
585 });
586
587 let mut seen_commits = HashSet::new();
588 for entry in &mut changelog.entries {
589 if let ChangelogEntry::Pr(pr, size) = entry {
590 for LoggedCommit { oid, duplicate, .. } in &mut pr.commits {
591 if seen_commits.contains(oid) {
592 *duplicate = true;
593 }
594 seen_commits.insert(oid.clone());
595 }
596 *size = pr.commits().iter().filter(|c| c.included()).map(|c| c.size).max().unwrap_or(Size::Empty);
597 }
598 }
599 }
600 Ok(())
601 }
602
603 pub fn build(self) -> Plan {
604 Plan { incrs: self.incrs, ineffective: self.ineffective, chain_writes: self.chain_writes, info: self.info }
605 }
606}
607
608struct LastCommitBuilder<'s, C: StateRead> {
609 on_line_commit: Option<String>,
610 last_commits: HashMap<ProjectId, String>,
611 prev: Slicer<'s>,
612 current: &'s Config<C>
613}
614
615impl<'s, C: StateRead> LastCommitBuilder<'s, C> {
616 fn create(repo: &'s Repo, current: &'s Config<C>) -> LastCommitBuilder<'s, C> {
617 let prev = Slicer::init(repo);
618 LastCommitBuilder { on_line_commit: None, last_commits: HashMap::new(), prev, current }
619 }
620
621 pub fn start_line_commit(&mut self, commit: &CommitInfoBuf) -> Result<()> {
622 let id = commit.id().to_string();
623 self.on_line_commit = Some(id.clone());
624 self.prev.slice_to(FromTagBuf::new(id, false))?;
625 Ok(())
626 }
627
628 pub fn finish_line_commit(&mut self) -> Result<()> { Ok(()) }
629
630 pub fn start_line_file(&mut self, path: &str) -> Result<()> {
631 let commit_id = self.on_line_commit.as_ref().ok_or_else(|| bad!("Not on a line commit"))?;
632
633 for prev_project in self.prev.file()?.projects() {
634 let proj_id = prev_project.id();
635 if self.current.get_project(proj_id).is_some()
636 && prev_project.does_cover(path)?
637 && !self.last_commits.contains_key(proj_id)
638 {
639 self.last_commits.insert(proj_id.clone(), commit_id.clone());
640 }
641 }
642 Ok(())
643 }
644
645 pub fn finish_line_file(&mut self) -> Result<()> { Ok(()) }
646
647 pub fn build(self) -> Result<HashMap<ProjectId, String>> { Ok(self.last_commits) }
648}
649
650#[allow(clippy::large_enum_variant)]
651enum Slicer<'r> {
652 Orig(&'r Repo),
653 Slice(FsConfig<PrevFiles<'r>>)
654}
655
656impl<'r> Slicer<'r> {
657 pub fn init(repo: &'r Repo) -> Slicer<'r> { Slicer::Orig(repo) }
658
659 pub fn file(&self) -> Result<&ConfigFile> {
660 match self {
661 Slicer::Slice(fsc) => Ok(fsc.file()),
662 _ => err!("Slicer not sliced")
663 }
664 }
665
666 pub fn slice_to(&mut self, id: FromTagBuf) -> Result<()> {
667 *self = Slicer::Slice(match self {
668 Slicer::Orig(repo) => FsConfig::from_slice(repo.slice(id))?,
669 Slicer::Slice(fsc) => fsc.slice_to(id)?
670 });
671 Ok(())
672 }
673}
674
675fn find_old_tags<'s, I: Iterator<Item = &'s Project>>(projects: I, prev_tag: &str, repo: &Repo) -> Result<OldTags> {
676 let mut by_proj_oid = HashMap::new(); let mut proj_ids = HashSet::new();
678
679 for proj in projects {
684 proj_ids.insert(proj.id().clone());
685 for fnmatch in tag_fnmatches(proj) {
686 trace!("Searching tags for proj {} matching \"{}\".", proj.id(), fnmatch);
687 for tag in repo.tag_names(Some(fnmatch.as_str()))?.iter().flatten() {
688 let oid = repo.revparse_oid(FromTag::new(&format!("{}^{{}}", tag), false))?;
689 trace!("Found proj {} tag {} at {}.", proj.id(), tag, oid);
690 let by_id = by_proj_oid
691 .entry(proj.id().clone())
692 .or_insert_with(|| (proj.tag_prefix_separator().to_string(), HashMap::new()));
693 by_id.1.entry(oid).or_insert_with(Vec::new).push(tag.to_string());
694 }
695 }
696 }
697
698 let mut current = HashMap::new();
699 for commit_oid in repo.commits_to_head(FromTag::new(prev_tag, true), false)?.map(|c| c.map(|c| c.id())) {
700 let commit_oid = commit_oid?;
701 by_proj_oid.retain(|proj_id, (sep, by_id)| {
702 if let Some(tags) = by_id.remove(&commit_oid) {
703 let mut versions = tags_to_versions(sep, &tags);
704 versions.sort_unstable_by(version_sort);
705 current.insert(proj_id.clone(), versions[0].clone());
706 false
707 } else {
708 true
709 }
710 });
711 }
712
713 let prev = pull_from_annotation(repo, prev_tag)?;
714 fill_from_prev(&prev, &proj_ids, &mut current);
715
716 let old_tags = OldTags::new(current, prev);
717 trace!("Found old tags: {:?}", old_tags);
718 Ok(old_tags)
719}
720
721fn pull_from_annotation(repo: &Repo, prev_tag: &str) -> Result<HashMap<ProjectId, String>> {
722 repo
723 .annotation_of(prev_tag)
724 .map(|anno| {
725 let clip = if let Some(p) = anno.find("\n-----BEGIN PGP SIGNATURE-----") { &anno[.. p] } else { anno.as_str() };
727 serde_json::from_str::<PrevTagMessage>(clip)
728 })
729 .transpose()
730 .map_err(|e| e.into())
731 .map(|o| o.unwrap_or_default().into_versions())
732}
733
734fn fill_from_prev(
735 prev: &HashMap<ProjectId, String>, proj_ids: &HashSet<ProjectId>, current: &mut HashMap<ProjectId, String>
736) {
737 for id in proj_ids {
738 if !current.contains_key(id) {
739 if let Some(tag) = prev.get(id) {
740 current.insert(id.clone(), tag.clone());
741 }
742 }
743 }
744}
745
746fn tag_fnmatches(proj: &Project) -> impl Iterator<Item = String> + '_ {
751 let majors = proj.tag_majors();
752
753 let majors_v = if let Some(majors) = majors {
754 E2::A(majors.iter().map(|major| format!("v{}.*", major)))
755 } else {
756 E2::B(once("v*".to_string()))
757 };
758
759 let sep = proj.tag_prefix_separator();
760 let tag_prefix = proj.tag_prefix().as_deref();
761 match tag_prefix {
762 None => E3::A(empty()),
763 Some("") => E3::B(majors_v),
764 Some(pref) => E3::C(majors_v.map(move |major_v| format!("{}{}{}", pref, sep, major_v)))
765 }
766}
767
768fn tags_to_versions(prefix_sep: &str, tags: &[String]) -> Vec<String> {
769 tags
770 .iter()
771 .map(|tag| {
772 let v = tag.rfind(prefix_sep).map(|d| d + 1).unwrap_or(0);
773 tag[v + 1 ..].to_string()
774 })
775 .filter(|v| Size::parts(v).is_ok())
776 .collect()
777}
778
779#[allow(clippy::ptr_arg)]
780fn version_sort(a: &String, b: &String) -> Ordering {
781 let p1 = Size::parts(a);
782 let p2 = Size::parts(b);
783
784 if let Ok(p1) = p1 {
785 if let Ok(p2) = p2 {
786 if p1[0] < p2[0] {
787 Ordering::Greater
788 } else if p1[0] > p2[0] {
789 Ordering::Less
790 } else if p1[1] < p2[1] {
791 Ordering::Greater
792 } else if p1[1] > p2[1] {
793 Ordering::Less
794 } else if p1[2] < p2[2] {
795 Ordering::Greater
796 } else if p1[2] > p2[2] {
797 Ordering::Less
798 } else {
799 Ordering::Equal
800 }
801 } else {
802 Ordering::Greater
803 }
804 } else if p2.is_ok() {
805 Ordering::Less
806 } else {
807 Ordering::Equal
808 }
809}