1use std::collections::hash_map::HashMap;
8use std::fmt;
9use std::iter;
10use std::sync::{Arc, OnceLock};
11use std::thread;
12use std::time::Duration;
13
14use chrono::Utc;
15use ghostflow::host::*;
16use git_workarea::CommitId;
17use itertools::Itertools;
18use regex::Regex;
19use serde::de::DeserializeOwned;
20use thiserror::Error;
21use url::Url;
22
23pub use gitlab;
28
29use gitlab::api::{self, Query};
30
31mod types;
32
33#[derive(Debug, Clone, Copy, PartialEq)]
34enum Retry {
35 Yes,
36 No,
37}
38
39const BACKOFF_LIMIT: usize = if cfg!(test) { 2 } else { 5 };
41const BACKOFF_START: Duration = Duration::from_secs(1);
43const BACKOFF_SCALE: u32 = 2;
45
46fn should_backoff<E>(err: &api::ApiError<E>) -> bool
47where
48 E: std::error::Error + Send + Sync + 'static,
49{
50 match err {
51 api::ApiError::GitlabWithStatus {
52 status, ..
53 } => {
54 *status == http::StatusCode::NOT_FOUND
57 },
58 _ => false,
59 }
60}
61
62fn retry_with_backoff<F, E, K>(mut tryf: F) -> Result<K, api::ApiError<E>>
63where
64 F: FnMut() -> Result<K, api::ApiError<E>>,
65 E: std::error::Error + Send + Sync + 'static,
66{
67 iter::repeat_n((), BACKOFF_LIMIT)
68 .scan(BACKOFF_START, |timeout, _| {
69 match tryf() {
70 Ok(r) => Some(Some(Ok(r))),
71 Err(err) => {
72 if should_backoff(&err) {
73 thread::sleep(*timeout);
74 *timeout *= BACKOFF_SCALE;
75 Some(None)
76 } else {
77 Some(Some(Err(err)))
78 }
79 },
80 }
81 })
82 .flatten()
83 .next()
84 .unwrap_or_else(|| {
85 let msg = "failed even after exponential backoff".into();
86 Err(api::ApiError::GitlabWithStatus {
87 status: http::StatusCode::NOT_FOUND,
88 msg,
89 })
90 })
91}
92
93fn mr_update_re() -> &'static Regex {
96 static CELL: OnceLock<Regex> = OnceLock::new();
97 CELL.get_or_init(|| {
98 Regex::new(
99 "^[Aa]dded [0-9][0-9]* (new )?commits?:?\n\
100 (\n<ul>(<li>[0-9a-f.]+ - .*?</li>)*</ul>|(\n\\* [0-9a-f.]+ - [^\n]*)*)\
101 (\n\n\\[Compare with previous versions?\\]\\(.*\\))?\
102 $",
103 )
104 .expect("invalid `mr_update_re` regex")
105 })
106}
107
108fn ghostflow_user(user: types::FullUser) -> User {
109 User {
110 handle: user.username,
111 name: user.name,
112 email: user.email,
113 }
114}
115
116fn ghostflow_pipeline_state(status: types::PipelineStatus) -> PipelineState {
117 match status {
118 types::PipelineStatus::Manual => PipelineState::Manual,
119 types::PipelineStatus::Created
120 | types::PipelineStatus::WaitingForResource
121 | types::PipelineStatus::Preparing
122 | types::PipelineStatus::WaitingForCallback
123 | types::PipelineStatus::Pending
124 | types::PipelineStatus::Scheduled
125 | types::PipelineStatus::Running => PipelineState::InProgress,
126 types::PipelineStatus::Canceling
127 | types::PipelineStatus::Canceled
128 | types::PipelineStatus::Skipped => PipelineState::Canceled,
129 types::PipelineStatus::Failed => PipelineState::Failed,
130 types::PipelineStatus::Success => PipelineState::Success,
131 }
132}
133
134fn gitlab_commit_status_state(
135 state: CommitStatusState,
136) -> api::projects::repository::commits::CommitStatusState {
137 match state {
138 CommitStatusState::Pending => {
139 api::projects::repository::commits::CommitStatusState::Pending
140 },
141 CommitStatusState::Running => {
142 api::projects::repository::commits::CommitStatusState::Running
143 },
144 CommitStatusState::Success => {
145 api::projects::repository::commits::CommitStatusState::Success
146 },
147 CommitStatusState::Failed => api::projects::repository::commits::CommitStatusState::Failed,
148 }
149}
150
151fn ghostflow_commit_status_state(state: types::StatusState) -> CommitStatusState {
152 match state {
153 types::StatusState::Manual
154 | types::StatusState::Skipped
155 | types::StatusState::Created
156 | types::StatusState::Canceling
157 | types::StatusState::Canceled
158 | types::StatusState::Pending
159 | types::StatusState::Scheduled => CommitStatusState::Pending,
160 types::StatusState::Running => CommitStatusState::Running,
161 types::StatusState::Success => CommitStatusState::Success,
162 types::StatusState::Failed => CommitStatusState::Failed,
163 }
164}
165
166trait ReferenceTarget {
168 fn sigil() -> char;
170 fn id(&self) -> u64;
172}
173
174impl ReferenceTarget for types::Issue {
175 fn sigil() -> char {
176 '#'
177 }
178
179 fn id(&self) -> u64 {
180 self.iid
181 }
182}
183
184impl ReferenceTarget for types::MergeRequest {
185 fn sigil() -> char {
186 '!'
187 }
188
189 fn id(&self) -> u64 {
190 self.iid
191 }
192}
193
194#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
196enum ReferenceLevel {
197 #[default]
199 Project,
200 Namespace,
202 Site,
204 }
208
209impl ReferenceLevel {
210 fn between(source: &types::Project, target: &types::Project) -> Self {
211 if source.id == target.id {
212 ReferenceLevel::Project
213 } else if source.namespace == target.namespace {
214 ReferenceLevel::Namespace
215 } else {
216 ReferenceLevel::Site
217 }
218 }
219
220 fn to<T>(self, project: &types::Project, target: &T) -> String
221 where
222 T: ReferenceTarget,
223 {
224 match self {
225 ReferenceLevel::Project => format!("{}{}", T::sigil(), target.id()),
226 ReferenceLevel::Namespace => format!("{}{}{}", project.path, T::sigil(), target.id()),
227 ReferenceLevel::Site => {
228 format!(
229 "{}/{}{}{}",
230 project.namespace.path,
231 project.path,
232 T::sigil(),
233 target.id(),
234 )
235 },
236 }
237 }
238}
239
240pub struct GitlabService {
242 gitlab: gitlab::Gitlab,
244 user: User,
246 is_admin: bool,
248 domain: &'static str,
250}
251
252impl GitlabService {
253 pub fn new(gitlab: gitlab::Gitlab) -> Result<Self, HostingServiceError> {
255 let endpoint = api::users::CurrentUser::builder().build().unwrap();
256 let self_user: types::SelfUser =
257 endpoint.query(&gitlab).map_err(HostingServiceError::host)?;
258 let is_admin = self_user.is_admin;
259 let gitlab_user = types::FullUser {
260 username: self_user.username,
261 name: self_user.name,
262 email: self_user.email,
263 };
264 let user = ghostflow_user(gitlab_user);
265
266 Ok(Self {
267 user,
268 gitlab,
269 is_admin,
270 domain: "gitlab.invalid",
272 })
273 }
274
275 pub fn gitlab(&self) -> &gitlab::Gitlab {
277 &self.gitlab
278 }
279
280 fn raw_query<Q, T>(&self, query: &Q) -> Result<T, api::ApiError<gitlab::RestError>>
281 where
282 Q: api::Query<T, gitlab::Gitlab>,
283 T: DeserializeOwned,
284 {
285 query.query(&self.gitlab)
286 }
287
288 fn query<Q, T>(&self, query: &Q) -> Result<T, HostingServiceError>
289 where
290 Q: api::Query<T, gitlab::Gitlab>,
291 T: DeserializeOwned,
292 {
293 self.raw_query(query).map_err(HostingServiceError::host)
294 }
295
296 fn full_project<'a, T>(&self, project: T) -> Result<types::Project, HostingServiceError>
297 where
298 T: Into<api::common::NameOrId<'a>>,
299 {
300 let endpoint = api::projects::Project::builder()
301 .project(project)
302 .build()
303 .unwrap();
304 self.query(&endpoint)
305 }
306
307 fn full_user_by_id(&self, id: u64) -> Result<types::User, HostingServiceError> {
308 let endpoint = api::users::User::builder().user(id).build().unwrap();
309 self.query(&endpoint)
310 }
311
312 fn full_user_by_name(&self, name: &str) -> Result<types::User, HostingServiceError> {
313 let endpoint = api::users::Users::builder().username(name).build().unwrap();
314 let users: Vec<types::User> = self.query(&endpoint)?;
316
317 users
318 .into_iter()
319 .find(|user| user.username == name)
320 .ok_or_else(|| HostingServiceError::host(GitlabServiceError::no_such_user(name.into())))
321 }
322
323 fn user(&self, id: u64) -> Result<User, HostingServiceError> {
324 self.full_user_by_id(id)
325 .map(|user: types::User| ghostflow_user(user.for_domain(self.domain)))
326 }
327
328 fn user_by_name(&self, name: &str) -> Result<User, HostingServiceError> {
329 self.full_user_by_name(name)
330 .map(|user| ghostflow_user(user.for_domain(self.domain)))
331 }
332
333 fn pipeline(
334 &self,
335 pipeline: types::SinglePipeline,
336 repo: Repo,
337 latest: Option<u64>,
338 ) -> Pipeline {
339 Pipeline {
340 id: pipeline.id,
341 state: ghostflow_pipeline_state(pipeline.status),
342 commit: Commit {
343 repo,
344 id: CommitId::new(pipeline.sha),
345 refname: pipeline.ref_,
346 last_pipeline: latest,
347 },
348 user: ghostflow_user(pipeline.user.for_domain(self.domain)),
349 archived: pipeline
350 .archived
351 .expect("archived should be determined by now"),
352 }
353 }
354
355 fn determine_archived(
356 &self,
357 pipeline: &mut types::SinglePipeline,
358 repo: &Repo,
359 ) -> Result<(), HostingServiceError> {
360 if pipeline.archived.is_none() {
362 let endpoint = api::projects::pipelines::PipelineJobs::builder()
364 .project(repo.name.as_str())
365 .pipeline(pipeline.id)
366 .build()
367 .unwrap();
368 let endpoint = api::paged(endpoint, api::Pagination::All);
369 let jobs: Vec<types::PipelineJob> = self.query(&endpoint)?;
370 let any_archived = jobs.into_iter().any(|j| j.archived);
371
372 pipeline.archived = Some(any_archived);
373 }
374
375 Ok(())
376 }
377
378 fn job(&self, job: types::PipelineJob, repo: Repo) -> PipelineJob {
379 PipelineJob {
380 id: job.id,
381 state: ghostflow_pipeline_state(job.status),
382 repo,
383 stage: Some(job.stage),
384 name: job.name,
385 user: ghostflow_user(job.user.for_domain(self.domain)),
386 archived: job.archived,
387 }
388 }
389
390 fn repo_from_project(&self, project: types::Project) -> Result<Repo, HostingServiceError> {
392 let parent_project = if let Some(ref upstream) = project.forked_from_project {
393 let parent_project = self.full_project(upstream.id)?;
394 Some(Box::new(self.repo_from_project(parent_project)?))
395 } else {
396 None
397 };
398
399 Ok(Repo {
400 name: project.path_with_namespace,
401 url: project.ssh_url_to_repo,
402 http_url: project.http_url_to_repo,
403 forked_from: parent_project,
404 })
405 }
406
407 fn commit_from_project(
409 &self,
410 project: types::Project,
411 commit: &CommitId,
412 retry: Retry,
413 ) -> Result<Commit, HostingServiceError> {
414 let endpoint = api::projects::repository::commits::Commit::builder()
415 .project(project.id)
416 .commit(commit.as_str())
417 .build()
418 .unwrap();
419 let commit: types::Commit = if retry == Retry::Yes {
420 retry_with_backoff(|| self.raw_query(&endpoint)).map_err(HostingServiceError::host)?
421 } else {
422 self.query(&endpoint)?
423 };
424
425 Ok(Commit {
426 repo: self.repo_from_project(project)?,
427 refname: None,
428 id: CommitId::new(commit.id),
429 last_pipeline: commit.last_pipeline.map(|pipe| pipe.id),
430 })
431 }
432
433 fn gitlab_issue(
434 &self,
435 project: types::Project,
436 issue: types::Issue,
437 referrer: Option<&types::Project>,
438 ) -> Result<Issue, HostingServiceError> {
439 let reference = referrer.map_or(ReferenceLevel::Project, |source| {
440 ReferenceLevel::between(source, &project)
441 });
442
443 Ok(Issue {
444 reference: reference.to(&project, &issue),
445 repo: self.repo_from_project(project)?,
446 id: issue.iid,
447 url: issue.web_url,
448 labels: issue.labels,
449 })
450 }
451
452 fn merge_request_from_project(
454 &self,
455 project: types::Project,
456 id: u64,
457 ) -> Result<MergeRequest, HostingServiceError> {
458 let endpoint = api::projects::merge_requests::MergeRequest::builder()
459 .project(project.id)
460 .merge_request(id)
461 .build()
462 .unwrap();
463 let mr: types::MergeRequest = endpoint
464 .query(&self.gitlab)
465 .map_err(HostingServiceError::host)?;
466 let source_project = self.full_project(mr.source_project_id)?;
467 let author = self.user(mr.author.id)?;
468
469 let reference = ReferenceLevel::default().to(&project, &mr);
470
471 let source_repo = self.repo_from_project(source_project.clone())?;
472 let target_repo = self.repo_from_project(project.clone())?;
473
474 let mr_commit = if let Some(ref sha) = &mr.sha {
479 let commit_id = CommitId::new(sha);
486 let source_commit = self.commit_from_project(source_project, &commit_id, Retry::No)?;
487 let target_retry = if mr.source_project_id == project.id {
490 Retry::No
492 } else {
493 Retry::Yes
494 };
495 let target_name = project.path_with_namespace.clone();
496 let target_commit = self.commit_from_project(project, &commit_id, target_retry)?;
497
498 if let Some(last_pipeline_id) = target_commit.last_pipeline {
499 let endpoint = api::projects::pipelines::Pipeline::builder()
502 .project(target_name)
503 .pipeline(last_pipeline_id)
504 .build()
505 .unwrap();
506 let pipeline: types::SinglePipeline = self.query(&endpoint)?;
507 let mut commit = target_commit;
508 commit.refname = pipeline.ref_;
509 commit
510 } else {
511 let mut commit = source_commit;
512 commit.refname = Some(mr.source_branch.clone());
514 commit
515 }
516 } else {
517 Commit {
519 repo: source_repo.clone(),
520 refname: Some(mr.source_branch.clone()),
521 id: CommitId::new(""),
522 last_pipeline: None,
523 }
524 };
525
526 Ok(MergeRequest {
527 source_repo: Some(source_repo),
528 source_branch: mr.source_branch,
529 target_repo,
530 target_branch: mr.target_branch,
531 id: mr.iid,
532 url: mr.web_url,
533 work_in_progress: mr.work_in_progress,
534 description: mr.description.unwrap_or_default(),
535 old_commit: None,
536 commit: mr_commit,
537 author,
538 reference,
539 remove_source_branch: mr.force_remove_source_branch.unwrap_or(false),
540 })
541 }
542
543 fn sort_notes(&self, notes: Vec<types::Note>) -> Result<Vec<Comment>, HostingServiceError> {
545 Ok(notes
546 .into_iter()
547 .map(|note| {
548 Ok(Comment {
549 id: format!("{}", note.id),
550 is_system: note.system,
551 is_branch_update: note.system && mr_update_re().is_match(¬e.body),
552 created_at: note.created_at,
553 author: self.user(note.author.id)?,
554 content: note
555 .body
556 .replace('\u{200b}', ""),
562 })
563 })
564 .collect::<Result<Vec<_>, HostingServiceError>>()?
565 .into_iter()
566 .sorted_by(|a, b| a.id.cmp(&b.id))
567 .collect())
568 }
569}
570
571#[derive(Debug, Error)]
572#[non_exhaustive]
573enum GitlabServiceError {
574 #[error("failed to find a user named '{}'", name)]
575 NoSuchUser { name: String },
576 #[error("invalid repo URL")]
577 InvalidUrl {
578 #[source]
579 source: url::ParseError,
580 },
581 #[error("invalid repo URL: could not set username")]
582 UrlNoUsername,
583 #[error("invalid repo URL: could not set password")]
584 UrlNoPassword,
585 #[error(
586 "job `{}` is archived and may not be started; the pipeline needs recreated",
587 name
588 )]
589 ArchivedJob { name: String },
590}
591
592impl GitlabServiceError {
593 fn no_such_user(name: String) -> Self {
594 GitlabServiceError::NoSuchUser {
595 name,
596 }
597 }
598
599 fn invalid_url(source: url::ParseError) -> Self {
600 Self::InvalidUrl {
601 source,
602 }
603 }
604
605 fn url_no_username() -> Self {
606 Self::UrlNoUsername
607 }
608
609 fn url_no_password() -> Self {
610 Self::UrlNoPassword
611 }
612
613 fn archived_job(name: String) -> Self {
614 Self::ArchivedJob {
615 name,
616 }
617 }
618}
619
620impl From<GitlabServiceError> for HostingServiceError {
621 fn from(gitlab: GitlabServiceError) -> Self {
622 HostingServiceError::service(gitlab)
623 }
624}
625
626impl HostingService for GitlabService {
627 fn suppress_ci_push_option(&self, branch: &str) -> Option<String> {
628 Some(format!("branch.{branch}.ci.skip"))
630 }
631
632 fn as_pipeline_service(self: Arc<Self>) -> Option<Arc<dyn HostedPipelineService>> {
633 Some(self as Arc<dyn HostedPipelineService>)
634 }
635
636 fn service_user(&self) -> &User {
637 &self.user
638 }
639
640 fn user(&self, project: &str, user: &str) -> Result<User, HostingServiceError> {
641 let _ = self.full_project(project)?;
642 self.user_by_name(user)
643 }
644
645 fn commit(&self, project: &str, commit: &CommitId) -> Result<Commit, HostingServiceError> {
646 let project = self.full_project(project)?;
647 self.commit_from_project(project, commit, Retry::No)
648 }
649
650 fn merge_request(&self, project: &str, id: u64) -> Result<MergeRequest, HostingServiceError> {
651 let project = self.full_project(project)?;
652 self.merge_request_from_project(project, id)
653 }
654
655 fn repo(&self, project: &str) -> Result<Repo, HostingServiceError> {
656 let project = self.full_project(project)?;
657 self.repo_from_project(project)
658 }
659
660 fn repo_as_user(
661 &self,
662 repo: &Repo,
663 user: &User,
664 reason: &str,
665 ) -> Result<Option<Repo>, HostingServiceError> {
666 if !self.is_admin {
668 return Ok(None);
669 }
670
671 let url = Url::parse(&repo.http_url).map_err(GitlabServiceError::invalid_url)?;
672
673 let token = {
674 let gl_user = self.full_user_by_name(&user.handle)?;
675 let token_name = format!("ghostflow-impersonation-{}-{}", user.handle, reason);
676 let today = Utc::now().date_naive();
677 let expires_at = iter::repeat_n((), 2).fold(today, |day, _| {
679 if let Some(after) = day.succ_opt() {
680 after
681 } else {
682 day
683 }
684 });
685 let endpoint = api::users::impersonation_tokens::CreateImpersonationToken::builder()
686 .user(gl_user.id)
687 .name(token_name)
688 .scope(api::users::impersonation_tokens::ImpersonationTokenScope::Api)
689 .expires_at(expires_at)
690 .build()
691 .unwrap();
692 let token: types::ImpersonationToken = self.query(&endpoint)?;
693 token.token
694 };
695
696 let url_with_token = {
697 let mut url = url;
698 url.set_username("ghostflow")
700 .map_err(|_| GitlabServiceError::url_no_username())?;
701 url.set_password(Some(&token))
702 .map_err(|_| GitlabServiceError::url_no_password())?;
703 url
704 };
705
706 let mut new_repo = repo.clone();
707 new_repo.url = url_with_token.into();
708
709 Ok(Some(new_repo))
710 }
711
712 fn get_mr_comments(&self, mr: &MergeRequest) -> Result<Vec<Comment>, HostingServiceError> {
713 let endpoint = api::projects::merge_requests::notes::MergeRequestNotes::builder()
714 .project(&mr.target_repo.name)
715 .merge_request(mr.id)
716 .build()
717 .unwrap();
718 let endpoint = api::paged(endpoint, api::Pagination::All);
719 let notes: Vec<types::Note> = self.query(&endpoint)?;
720
721 self.sort_notes(notes)
722 }
723
724 fn post_mr_comment(&self, mr: &MergeRequest, content: &str) -> Result<(), HostingServiceError> {
725 let endpoint = api::projects::merge_requests::notes::CreateMergeRequestNote::builder()
726 .project(&mr.target_repo.name)
727 .merge_request(mr.id)
728 .body(content)
729 .build()
730 .unwrap();
731 let endpoint = api::ignore(endpoint);
732 self.query(&endpoint)
733 }
734
735 fn get_commit_statuses(
736 &self,
737 commit: &Commit,
738 ) -> Result<Vec<CommitStatus>, HostingServiceError> {
739 let endpoint = api::projects::repository::commits::CommitStatuses::builder()
740 .project(commit.repo.name.as_str())
741 .commit(commit.id.as_str())
742 .build()
743 .unwrap();
744 let endpoint = api::paged(endpoint, api::Pagination::All);
745 let statuses = self.query(&endpoint)?;
746
747 Ok(statuses
748 .into_iter()
749 .map(move |status: types::CommitStatus| {
750 CommitStatus {
751 state: ghostflow_commit_status_state(status.status),
752 author: ghostflow_user(status.author.for_domain(self.domain)),
753 refname: status.ref_,
754 name: status.name,
755 description: status.description.unwrap_or_default(),
756 target_url: status.target_url,
757 }
758 })
759 .collect())
760 }
761
762 fn post_commit_status(&self, status: PendingCommitStatus) -> Result<(), HostingServiceError> {
763 const REFS_HEADS_PREFIX: &str = "refs/heads/";
764 const REFS_TAGS_PREFIX: &str = "refs/tags/";
765
766 let refname = status.commit.refname.as_ref().map(|refname| {
768 if let Some(head_name) = refname.strip_prefix(REFS_HEADS_PREFIX) {
769 head_name
770 } else if let Some(tag_name) = refname.strip_prefix(REFS_TAGS_PREFIX) {
771 tag_name
772 } else {
773 refname
774 }
775 });
776
777 let mut builder = api::projects::repository::commits::CreateCommitStatus::builder();
778 builder
779 .project(status.commit.repo.name.as_str())
780 .commit(status.commit.id.as_str())
781 .state(gitlab_commit_status_state(status.state))
782 .name(status.name)
783 .description(status.description);
784
785 if let Some(refname) = refname {
786 builder.ref_(refname);
787 }
788 if let Some(target_url) = status.target_url {
789 builder.target_url(target_url);
790 }
791 if let Some(last_pipeline) = status.commit.last_pipeline {
792 builder.pipeline_id(last_pipeline);
793 }
794
795 let endpoint = builder.build().unwrap();
796 let endpoint = api::ignore(endpoint);
797 self.query(&endpoint)
798 }
799
800 fn get_mr_awards(&self, mr: &MergeRequest) -> Result<Vec<Award>, HostingServiceError> {
801 let endpoint = api::projects::merge_requests::awards::MergeRequestAwards::builder()
802 .project(&mr.target_repo.name)
803 .merge_request(mr.id)
804 .build()
805 .unwrap();
806 let endpoint = api::paged(endpoint, api::Pagination::All);
807
808 self.query(&endpoint)?
809 .into_iter()
810 .map(|award: types::AwardEmoji| {
811 let author = self.user(award.user.id)?;
812
813 Ok(Award {
814 name: award.name,
815 author,
816 })
817 })
818 .collect()
819 }
820
821 fn issues_closed_by_mr(&self, mr: &MergeRequest) -> Result<Vec<Issue>, HostingServiceError> {
822 let target_name = &mr.target_repo.name;
823 let target_project = self.full_project(target_name.as_str())?;
824
825 let endpoint = api::projects::merge_requests::IssuesClosedBy::builder()
826 .project(target_project.id)
827 .merge_request(mr.id)
828 .build()
829 .unwrap();
830 let endpoint = api::paged(endpoint, api::Pagination::All);
831 let issues: Vec<types::Issue> = self.query(&endpoint)?;
832
833 let projects = issues
835 .iter()
836 .map(|issue| issue.project_id)
837 .unique()
838 .map(|project_id| {
839 self.full_project(project_id)
840 .map(|project| (project_id, project))
841 })
842 .collect::<Result<HashMap<_, _>, HostingServiceError>>();
843
844 projects.and_then(|projects| {
846 issues
847 .into_iter()
848 .map(|issue| {
849 let project = projects
850 .get(&issue.project_id)
851 .expect("the fetched project ID should exist");
852 self.gitlab_issue(project.clone(), issue, Some(&target_project))
853 })
854 .collect()
855 })
856 }
857
858 fn add_issue_labels(&self, issue: &Issue, labels: &[&str]) -> Result<(), HostingServiceError> {
859 let endpoint = {
860 let mut endpoint = api::projects::issues::EditIssue::builder();
861 endpoint.project(issue.repo.name.as_str()).issue(issue.id);
862
863 for label in labels {
864 endpoint.add_label(*label);
865 }
866
867 endpoint.build().unwrap()
868 };
869 let endpoint = api::ignore(endpoint);
870 self.query(&endpoint)
871 }
872
873 fn remove_issue_labels(
874 &self,
875 issue: &Issue,
876 labels: &[&str],
877 ) -> Result<(), HostingServiceError> {
878 let endpoint = {
879 let mut endpoint = api::projects::issues::EditIssue::builder();
880 endpoint.project(issue.repo.name.as_str()).issue(issue.id);
881
882 for label in labels {
883 endpoint.remove_label(*label);
884 }
885
886 endpoint.build().unwrap()
887 };
888 let endpoint = api::ignore(endpoint);
889 self.query(&endpoint)
890 }
891}
892
893impl HostedPipelineService for GitlabService {
894 fn pipelines_for_mr(
895 &self,
896 mr: &MergeRequest,
897 ) -> Result<Option<Vec<Pipeline>>, HostingServiceError> {
898 let source_pipelines = if let Some(source_repo) = mr.source_repo.as_ref() {
899 let project = self.full_project(source_repo.name.as_str())?;
900
901 if project.builds_access_level != types::AccessLevel::Disabled {
902 let endpoint = api::projects::pipelines::Pipelines::builder()
903 .project(source_repo.name.as_str())
904 .ref_(mr.source_branch.as_str())
905 .sha(mr.commit.id.as_str())
906 .build()
907 .unwrap();
908 let endpoint = api::paged(endpoint, api::Pagination::All);
909 let pipelines: Vec<types::Pipeline> = self.query(&endpoint)?;
910 let repo = self.repo_from_project(project)?;
911 let latest = pipelines.iter().map(|pipeline| pipeline.id).max();
912 pipelines
913 .into_iter()
914 .map(|pipeline| {
915 let endpoint = api::projects::pipelines::Pipeline::builder()
916 .project(source_repo.name.as_str())
917 .pipeline(pipeline.id)
918 .build()
919 .unwrap();
920 let mut pipeline: types::SinglePipeline = self.query(&endpoint)?;
921 self.determine_archived(&mut pipeline, source_repo)?;
922 Ok(self.pipeline(pipeline, repo.clone(), latest))
923 })
924 .collect::<Result<_, HostingServiceError>>()?
925 } else {
926 Vec::new()
927 }
928 } else {
929 Vec::new()
930 };
931
932 let target_pipelines = {
933 let project = self.full_project(mr.target_repo.name.as_str())?;
934
935 if project.builds_access_level != types::AccessLevel::Disabled {
936 let mr_ref = format!("refs/merge-requests/{}/head", mr.id);
937 let endpoint = api::projects::pipelines::Pipelines::builder()
938 .project(mr.target_repo.name.as_str())
939 .ref_(mr_ref)
940 .sha(mr.commit.id.as_str())
941 .build()
942 .unwrap();
943 let endpoint = api::paged(endpoint, api::Pagination::All);
944 let pipelines: Vec<types::Pipeline> = self.query(&endpoint)?;
945 let repo = self.repo_from_project(project)?;
946 let latest = pipelines.iter().map(|pipeline| pipeline.id).max();
947 pipelines
948 .into_iter()
949 .map(|pipeline| {
950 let endpoint = api::projects::pipelines::Pipeline::builder()
951 .project(mr.target_repo.name.as_str())
952 .pipeline(pipeline.id)
953 .build()
954 .unwrap();
955 let mut pipeline: types::SinglePipeline = self.query(&endpoint)?;
956 self.determine_archived(&mut pipeline, &mr.target_repo)?;
957 Ok(self.pipeline(pipeline, repo.clone(), latest))
958 })
959 .collect::<Result<_, HostingServiceError>>()?
960 } else {
961 Vec::new()
962 }
963 };
964
965 Ok(Some(
966 source_pipelines
967 .into_iter()
968 .chain(target_pipelines)
969 .collect(),
970 ))
971 }
972
973 fn pipeline_jobs(
974 &self,
975 pipeline: &Pipeline,
976 ) -> Result<Option<Vec<PipelineJob>>, HostingServiceError> {
977 let project = self.full_project(pipeline.commit.repo.name.as_str())?;
978
979 if project.builds_access_level == types::AccessLevel::Disabled {
980 return Ok(None);
981 }
982
983 let endpoint = api::projects::pipelines::PipelineJobs::builder()
984 .project(pipeline.commit.repo.name.as_str())
985 .pipeline(pipeline.id)
986 .build()
987 .unwrap();
988 let endpoint = api::paged(endpoint, api::Pagination::All);
989 let jobs: Vec<types::PipelineJob> = self.query(&endpoint)?;
990 Ok(Some(
991 jobs.into_iter()
992 .map(|job| self.job(job, pipeline.commit.repo.clone()))
993 .collect(),
994 ))
995 }
996
997 fn trigger_job(
998 &self,
999 job: &PipelineJob,
1000 user: Option<&str>,
1001 ) -> Result<(), HostingServiceError> {
1002 if job.archived {
1003 return Err(GitlabServiceError::archived_job(job.name.clone()).into());
1004 }
1005
1006 if job.state.is_complete() {
1007 let endpoint = api::projects::jobs::RetryJob::builder()
1008 .project(job.repo.name.as_str())
1009 .job(job.id)
1010 .build()
1011 .unwrap();
1012 if let Some(user) = user {
1013 let endpoint = api::sudo(endpoint, user);
1014 let endpoint = api::ignore(endpoint);
1015 self.query(&endpoint)
1016 } else {
1017 let endpoint = api::ignore(endpoint);
1018 self.query(&endpoint)
1019 }
1020 } else {
1021 let endpoint = api::projects::jobs::PlayJob::builder()
1022 .project(job.repo.name.as_str())
1023 .job(job.id)
1024 .build()
1025 .unwrap();
1026 if let Some(user) = user {
1027 let endpoint = api::sudo(endpoint, user);
1028 let endpoint = api::ignore(endpoint);
1029 self.query(&endpoint)
1030 } else {
1031 let endpoint = api::ignore(endpoint);
1032 self.query(&endpoint)
1033 }
1034 }
1035 }
1036}
1037
1038impl fmt::Debug for GitlabService {
1039 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1040 f.debug_struct("GitlabService")
1041 .field("user", &self.user.handle)
1042 .finish()
1043 }
1044}
1045
1046#[cfg(test)]
1047mod tests {
1048 use std::collections::BTreeMap;
1049
1050 use ghostflow::host::{CommitStatusState, PipelineState, User};
1051 use gitlab::api;
1052 use http::StatusCode;
1053 use thiserror::Error;
1054
1055 use crate::types;
1056
1057 use super::{ReferenceLevel, ReferenceTarget};
1058
1059 #[derive(Debug, Error)]
1060 enum MyError {}
1061
1062 fn mk_gitlab_status(status: StatusCode) -> api::ApiError<MyError> {
1063 api::ApiError::<MyError>::GitlabWithStatus {
1064 status,
1065 msg: String::new(),
1066 }
1067 }
1068
1069 type TestResult = Result<(), api::ApiError<MyError>>;
1070
1071 #[test]
1072 fn test_should_backoff() {
1073 let items = [
1074 (mk_gitlab_status(StatusCode::NOT_FOUND), true),
1075 (mk_gitlab_status(StatusCode::FORBIDDEN), false),
1076 ];
1077
1078 for (i, e) in items {
1079 assert_eq!(super::should_backoff(&i), e);
1080 }
1081 }
1082
1083 #[test]
1084 fn test_retry_with_backoff_first_success() {
1085 let mut call_count = 0;
1086 super::retry_with_backoff(|| -> TestResult {
1087 call_count += 1;
1088 Ok(())
1089 })
1090 .unwrap();
1091 assert_eq!(call_count, 1);
1092 }
1093
1094 #[test]
1095 fn test_retry_with_backoff_second_success() {
1096 let mut call_count = 0;
1097 let mut did_err = false;
1098 super::retry_with_backoff(|| {
1099 call_count += 1;
1100 if did_err {
1101 Ok(())
1102 } else {
1103 did_err = true;
1104 Err(mk_gitlab_status(StatusCode::NOT_FOUND))
1105 }
1106 })
1107 .unwrap();
1108 assert_eq!(call_count, 2);
1109 }
1110
1111 #[test]
1112 fn test_retry_with_backoff_no_success() {
1113 let mut call_count = 0;
1114 let err = super::retry_with_backoff(|| -> TestResult {
1115 call_count += 1;
1116 Err(mk_gitlab_status(StatusCode::NOT_FOUND))
1117 })
1118 .unwrap_err();
1119 assert_eq!(call_count, super::BACKOFF_LIMIT);
1120 if let api::ApiError::GitlabWithStatus {
1121 status,
1122 msg,
1123 } = err
1124 {
1125 assert_eq!(status, StatusCode::NOT_FOUND);
1126 assert_eq!(msg, "failed even after exponential backoff");
1127 } else {
1128 panic!("unexpected error: {}", err);
1129 }
1130 }
1131
1132 #[test]
1133 fn test_mr_update_re() {
1134 let comments = [
1135 "Added 1 commit:\n\n\
1137 * deadbeef0 - blah blah blah blah blah blah blab blah",
1138 "Added 4 commits:\n\n\
1139 * deadbeef1 - blah blab blah blah\n\
1140 * deadbeef2 - blah blab blah blah\n\
1141 * deadbeef3 - blah blab blah blah\n\
1142 * deadbeef4 - blah blah blah blah",
1143 "added 1 commit\n\n\
1144 * deadbeef5 - blah blah blah\
1145 \n\n[Compare with previous version](link_to_revision_diff)",
1146 "added 90 commits\n\n\
1147 * deadbeef6...deadbeef7 - 89 commits from branch `upstream:master`\n\
1148 * deadbeef8 - blah blah blah blah\
1149 \n\n[Compare with previous version](link_to_revision_diff)",
1150 "added 1 commit\n\n\
1152 <ul>\
1153 <li>deadbeef7 - blah blah blah blah</li>\
1154 </ul>\
1155 \n\n[Compare with previous version](link_to_revision_diff)",
1156 "added 18 commits\n\n\
1157 <ul>\
1158 <li>deadbeef8...deadbeef9 - 17 commits from branch <code>upstream:master</code></li>\
1159 <li>deadbeef10 - Merge remote-tracking branch 'origin/master' into this_topic</li>\
1160 </ul>\
1161 \n\n[Compare with previous version](link_to_revision_diff)",
1162 ];
1163
1164 for comment in comments.iter() {
1165 assert!(super::mr_update_re().is_match(dbg!(comment)));
1166 }
1167 }
1168
1169 #[test]
1170 fn test_ghostflow_user() {
1171 let expect_username = "uname";
1172 let expect_email = "foo@bar.invalid";
1173 let expect_name = "name";
1174 let full_user = types::FullUser {
1175 username: expect_username.into(),
1176 email: expect_email.into(),
1177 name: expect_name.into(),
1178 };
1179
1180 let User {
1181 handle,
1182 name,
1183 email,
1184 } = super::ghostflow_user(full_user);
1185 assert_eq!(handle, expect_username);
1186 assert_eq!(email, expect_email);
1187 assert_eq!(name, expect_name);
1188 }
1189
1190 #[test]
1191 fn test_ghostflow_pipeline_state() {
1192 let items = [
1193 (types::PipelineStatus::Created, PipelineState::InProgress),
1194 (
1195 types::PipelineStatus::WaitingForResource,
1196 PipelineState::InProgress,
1197 ),
1198 (types::PipelineStatus::Preparing, PipelineState::InProgress),
1199 (
1200 types::PipelineStatus::WaitingForCallback,
1201 PipelineState::InProgress,
1202 ),
1203 (types::PipelineStatus::Pending, PipelineState::InProgress),
1204 (types::PipelineStatus::Running, PipelineState::InProgress),
1205 (types::PipelineStatus::Failed, PipelineState::Failed),
1206 (types::PipelineStatus::Success, PipelineState::Success),
1207 (types::PipelineStatus::Canceling, PipelineState::Canceled),
1208 (types::PipelineStatus::Canceled, PipelineState::Canceled),
1209 (types::PipelineStatus::Skipped, PipelineState::Canceled),
1210 (types::PipelineStatus::Manual, PipelineState::Manual),
1211 (types::PipelineStatus::Scheduled, PipelineState::InProgress),
1212 ];
1213
1214 for (gl, gf) in items {
1215 assert_eq!(super::ghostflow_pipeline_state(gl), gf);
1216 }
1217 }
1218
1219 #[test]
1220 fn test_gitlab_status_state() {
1221 let items = [
1222 (
1223 CommitStatusState::Pending,
1224 api::projects::repository::commits::CommitStatusState::Pending,
1225 ),
1226 (
1227 CommitStatusState::Success,
1228 api::projects::repository::commits::CommitStatusState::Success,
1229 ),
1230 (
1231 CommitStatusState::Failed,
1232 api::projects::repository::commits::CommitStatusState::Failed,
1233 ),
1234 (
1235 CommitStatusState::Running,
1236 api::projects::repository::commits::CommitStatusState::Running,
1237 ),
1238 ];
1239
1240 for (gf, gl) in items {
1241 assert_eq!(super::gitlab_commit_status_state(gf), gl);
1242 }
1243 }
1244
1245 #[test]
1246 fn test_ghostflow_commit_status_state() {
1247 let items = [
1248 (types::StatusState::Created, CommitStatusState::Pending),
1249 (types::StatusState::Pending, CommitStatusState::Pending),
1250 (types::StatusState::Running, CommitStatusState::Running),
1251 (types::StatusState::Success, CommitStatusState::Success),
1252 (types::StatusState::Failed, CommitStatusState::Failed),
1253 (types::StatusState::Canceled, CommitStatusState::Pending),
1254 (types::StatusState::Skipped, CommitStatusState::Pending),
1255 (types::StatusState::Manual, CommitStatusState::Pending),
1256 (types::StatusState::Scheduled, CommitStatusState::Pending),
1257 ];
1258
1259 for (gl, gf) in items {
1260 assert_eq!(super::ghostflow_commit_status_state(gl), gf);
1261 }
1262 }
1263
1264 #[test]
1265 fn test_reference_target_issue() {
1266 let issue = types::Issue {
1267 labels: Vec::new(),
1268 project_id: 0,
1269 web_url: String::new(),
1270 iid: 100,
1271 };
1272
1273 assert_eq!(types::Issue::sigil(), '#');
1274 assert_eq!(issue.id(), 100);
1275 }
1276
1277 #[test]
1278 fn test_reference_target_merge_request() {
1279 let mr = types::MergeRequest {
1280 source_project_id: 0,
1281 source_branch: String::new(),
1282 target_branch: String::new(),
1283 description: None,
1284 sha: None,
1285 work_in_progress: false,
1286 force_remove_source_branch: None,
1287 author: types::Author {
1288 id: 0,
1289 },
1290 web_url: String::new(),
1291 iid: 100,
1292 };
1293
1294 assert_eq!(types::MergeRequest::sigil(), '!');
1295 assert_eq!(mr.id(), 100);
1296 }
1297
1298 #[test]
1299 fn test_reference_level_default() {
1300 assert_eq!(ReferenceLevel::default(), ReferenceLevel::Project);
1301 }
1302
1303 #[test]
1304 fn test_reference_level_between() {
1305 let namespaces: BTreeMap<&'static str, u64> = [("group", 100), ("other_group", 101)]
1306 .iter()
1307 .cloned()
1308 .collect();
1309 let projects: BTreeMap<&'static str, u64> = [
1310 ("project", 200),
1311 ("sibling_project", 201),
1312 ("other_project", 202),
1313 ]
1314 .iter()
1315 .cloned()
1316 .collect();
1317 let mk_project = |namespace: &str, project: &str| {
1318 types::Project {
1319 id: *projects.get(project).unwrap(),
1320 path_with_namespace: format!("{namespace}/{project}"),
1321 ssh_url_to_repo: String::new(),
1322 http_url_to_repo: String::new(),
1323 forked_from_project: None,
1324 namespace: types::Namespace {
1325 id: *namespaces.get(namespace).unwrap(),
1326 kind: types::NamespaceKind::Group,
1327 path: namespace.into(),
1328 },
1329 path: project.into(),
1330 builds_access_level: types::AccessLevel::Disabled,
1331 }
1332 };
1333
1334 let project_source = mk_project("group", "project");
1335 let project_target_same = mk_project("group", "project");
1336 let project_target_sibling = mk_project("group", "sibling_project");
1337 let project_target_elsewhere = mk_project("other_group", "other_project");
1338
1339 let items = [
1340 (&project_source, ReferenceLevel::Project),
1341 (&project_target_same, ReferenceLevel::Project),
1342 (&project_target_sibling, ReferenceLevel::Namespace),
1343 (&project_target_elsewhere, ReferenceLevel::Site),
1344 ];
1345
1346 for (p, rl) in items {
1347 assert_eq!(ReferenceLevel::between(&project_source, p), rl);
1348 }
1349 }
1350
1351 #[test]
1352 fn test_reference_level_to() {
1353 let project = types::Project {
1354 id: 0,
1355 path_with_namespace: "namespace/project".into(),
1356 ssh_url_to_repo: String::new(),
1357 http_url_to_repo: String::new(),
1358 forked_from_project: None,
1359 namespace: types::Namespace {
1360 id: 0,
1361 kind: types::NamespaceKind::Group,
1362 path: "namespace".into(),
1363 },
1364 path: "project".into(),
1365 builds_access_level: types::AccessLevel::Disabled,
1366 };
1367 let issue = types::Issue {
1368 labels: Vec::new(),
1369 project_id: 0,
1370 web_url: String::new(),
1371 iid: 100,
1372 };
1373 let mr = types::MergeRequest {
1374 source_project_id: 0,
1375 source_branch: String::new(),
1376 target_branch: String::new(),
1377 description: None,
1378 sha: None,
1379 work_in_progress: false,
1380 force_remove_source_branch: None,
1381 author: types::Author {
1382 id: 0,
1383 },
1384 web_url: String::new(),
1385 iid: 200,
1386 };
1387
1388 let issue_items = [
1389 (ReferenceLevel::Project, "#100"),
1390 (ReferenceLevel::Namespace, "project#100"),
1391 (ReferenceLevel::Site, "namespace/project#100"),
1392 ];
1393
1394 for (rl, expect) in issue_items {
1395 assert_eq!(rl.to(&project, &issue), expect);
1396 }
1397
1398 let mr_items = [
1399 (ReferenceLevel::Project, "!200"),
1400 (ReferenceLevel::Namespace, "project!200"),
1401 (ReferenceLevel::Site, "namespace/project!200"),
1402 ];
1403
1404 for (rl, expect) in mr_items {
1405 assert_eq!(rl.to(&project, &mr), expect);
1406 }
1407 }
1408}