lezeh_deployment/
client.rs

1use std::collections::HashMap;
2use std::convert::TryInto;
3use std::process::Stdio;
4use std::sync::Arc;
5
6use anyhow::anyhow;
7use anyhow::Error;
8use futures::FutureExt;
9use futures::StreamExt;
10use ghub::v3::branch::DeleteBranchInput;
11use ghub::v3::client::GithubClient;
12use ghub::v3::pull_request as github_pull_request;
13use ghub::v3::pull_request::GithubMergeMethod;
14use phab_lib::client::config::CertIdentityConfig;
15use phab_lib::client::config::PhabricatorClientConfig;
16use phab_lib::client::phabricator::PhabricatorClient;
17use phab_lib::dto::Task;
18use phab_lib::dto::User;
19use serde::Serialize;
20use serde_json::Value;
21use slog::Logger;
22
23use crate::config::Config;
24use crate::config::RepositoryConfig;
25
26use lezeh_common::command;
27use lezeh_common::command::PresetCommand;
28use lezeh_common::types::ResultAnyError;
29
30pub struct GlobalDeploymentClient {
31  pub config: Config,
32  phabricator: Arc<PhabricatorClient>,
33  repository_deployment_client_by_key: HashMap<String, RepositoryDeploymentClient>,
34
35  #[allow(dead_code)]
36  ghub: Arc<GithubClient>,
37
38  #[allow(dead_code)]
39  logger: &'static Logger,
40}
41
42impl GlobalDeploymentClient {
43  pub fn new(config: Config, logger: &'static Logger) -> ResultAnyError<GlobalDeploymentClient> {
44    let cert_identity_config = CertIdentityConfig {
45      pkcs12_path: config.phab.pkcs12_path.clone(),
46      pkcs12_password: config.phab.pkcs12_password.clone(),
47    };
48
49    let phabricator = Arc::new(PhabricatorClient::new(PhabricatorClientConfig {
50      host: config.phab.host.clone(),
51      api_token: config.phab.api_token.clone(),
52      cert_identity_config: Some(cert_identity_config),
53    })?);
54
55    let ghub = Arc::new(GithubClient::new(&config.ghub.api_token)?);
56
57    let repository_deployment_client_by_key: HashMap<String, RepositoryDeploymentClient> = config
58      .repositories
59      .clone()
60      .into_iter()
61      .map(|repo_config| {
62        let repo_key = repo_config.clone().key;
63
64        return (
65          repo_key.clone(),
66          RepositoryDeploymentClient::new(
67            repo_config.clone(),
68            ghub.clone(),
69            logger.new(slog::o!("repo" => repo_key)),
70          ),
71        );
72      })
73      .collect();
74
75    return Ok(GlobalDeploymentClient {
76      ghub,
77      config,
78      phabricator,
79      repository_deployment_client_by_key,
80      logger,
81    });
82  }
83}
84
85#[derive(Debug, Clone, thiserror::Error)]
86pub enum GitError {
87  #[error("Merge failed err: {message}. Please see {pull_request_url}")]
88  MergeError {
89    message: String,
90    remote_branch: String,
91    pull_request_url: String,
92  },
93  #[error("Remote branch is behind master(no changes to master), remote branch {remote_branch}")]
94  RemoteBranchIsBehindMasterError {
95    remote_branch: String,
96    debug_url: String,
97  },
98}
99
100#[derive(Debug, Serialize)]
101pub struct SuccesfulMergeOutput {
102  pub remote_branch: String,
103  pub pull_request_url: String,
104}
105
106#[derive(Debug, Serialize)]
107pub struct SuccesfulMergeTaskOutput {
108  pub repo_config: RepositoryConfig,
109  pub task_id: String,
110  pub remote_branch: String,
111  pub pull_request_url: String,
112}
113
114#[derive(Debug, Serialize)]
115pub struct FailedMergeTaskOutput {
116  pub repo_config: RepositoryConfig,
117  pub task_id: String,
118  pub remote_branch: String,
119  pub debug_url: String,
120  pub message: String,
121}
122
123#[derive(Debug, Serialize)]
124pub struct MergeAllTasksOutput {
125  pub repo_path: String,
126  pub tasks_in_master_branch_by_task_id: HashMap<String, Vec<TaskInMasterBranch>>,
127  pub matched_task_branch_mappings: Vec<MatchedTaskBranchMapping>,
128  pub successful_merge_task_output_by_task_id: HashMap<String, SuccesfulMergeTaskOutput>,
129  pub failed_merge_task_output_by_task_id: HashMap<String, FailedMergeTaskOutput>,
130}
131
132#[derive(Debug, Serialize)]
133pub struct MergeFeatureBranchesOutput {
134  pub merge_all_tasks_outputs: Vec<MergeAllTasksOutput>,
135  pub task_by_id: HashMap<String, Task>,
136  pub found_task_by_id: HashMap<String, Task>,
137  pub not_found_user_task_mappings: Vec<UserTaskMapping>,
138}
139
140#[derive(Debug, Serialize)]
141pub struct UserTaskMapping(pub User, pub Task);
142
143#[derive(Debug, Serialize)]
144pub struct MatchedTaskBranchMapping(pub String, pub String);
145
146#[derive(Debug, Serialize)]
147pub struct TaskInMasterBranch {
148  pub repo_config: RepositoryConfig,
149  pub commit_message: String,
150  pub task_id: String,
151}
152
153impl GlobalDeploymentClient {
154  pub async fn deploy(&self, repo_key: &str, scheme_key: &str) -> ResultAnyError<()> {
155    let repo_deployment_client = self
156      .repository_deployment_client_by_key
157      .get(repo_key)
158      .ok_or_else(|| {
159        return anyhow!("Invalid repo key {}", repo_key);
160      })?;
161
162    return repo_deployment_client
163      .deploy(scheme_key, GithubMergeMethod::Merge)
164      .await;
165  }
166
167  pub async fn merge_feature_branches(
168    &self,
169    task_ids: &Vec<&str>,
170    concurrency_limit: usize,
171  ) -> ResultAnyError<MergeFeatureBranchesOutput> {
172    let tasks: Vec<Task> = self.phabricator.get_tasks_by_ids(task_ids.clone()).await?;
173    let task_by_id: HashMap<String, Task> = tasks
174      .iter()
175      .map(|task| {
176        return (task.id.clone(), task.clone());
177      })
178      .collect();
179
180    let task_assignee_ids: Vec<&str> = tasks
181      .iter()
182      // For simplicity's sake, we can be sure that every task should
183      // have been assigned to an engineer.
184      .map(|task| task.assigned_phid.as_ref().unwrap().as_ref())
185      .collect();
186
187    let task_assignees: Vec<User> = self
188      .phabricator
189      .get_users_by_phids(task_assignee_ids.iter().map(AsRef::as_ref).collect())
190      .await?;
191
192    let task_assignee_by_phid: HashMap<String, User> = task_assignees
193      .into_iter()
194      .map(|user| (user.phid.clone(), user))
195      .collect();
196
197    // Create async tasks that will be run in parallel.
198    let tasks = self
199      .repository_deployment_client_by_key
200      .values()
201      .map(|deployment_client| {
202        return deployment_client.merge_all_tasks(&task_by_id);
203      });
204
205    let merge_results: Vec<ResultAnyError<MergeAllTasksOutput>> = futures::stream::iter(tasks)
206      .buffered(concurrency_limit)
207      .collect()
208      .await;
209
210    // Make sure that all is well
211    let merge_results: ResultAnyError<Vec<MergeAllTasksOutput>> =
212      merge_results.into_iter().collect();
213    let merge_results = merge_results?;
214    let not_found_user_task_mappings =
215      TaskUtil::find_not_found_tasks(&merge_results, &task_by_id, &task_assignee_by_phid);
216
217    let found_task_by_id: HashMap<String, Task> = task_by_id
218      .iter()
219      .filter(|(task_id, _)| {
220        return not_found_user_task_mappings
221          .iter()
222          .find(|UserTaskMapping(_user, not_found_task)| {
223            return not_found_task.id == **task_id;
224          })
225          .is_none();
226      })
227      .map(|(key, task_reference): (&String, &Task)| {
228        return (key.clone(), task_reference.clone());
229      })
230      .collect();
231
232    return Ok(MergeFeatureBranchesOutput {
233      merge_all_tasks_outputs: merge_results,
234      not_found_user_task_mappings,
235      found_task_by_id,
236      task_by_id,
237    });
238  }
239}
240
241struct RepositoryDeploymentClient {
242  pub config: RepositoryConfig,
243  ghub: Arc<GithubClient>,
244  logger: Arc<Logger>,
245  preset_command: PresetCommand,
246}
247
248impl RepositoryDeploymentClient {
249  fn new(
250    config: RepositoryConfig,
251    ghub: Arc<GithubClient>,
252    logger: Logger,
253  ) -> RepositoryDeploymentClient {
254    return RepositoryDeploymentClient {
255      config: config.clone(),
256      ghub,
257      logger: Arc::new(logger),
258      preset_command: PresetCommand {
259        working_dir: config.path.clone(),
260      },
261    };
262  }
263}
264
265struct GetPullRequestInput<'a> {
266  pub repo_path: &'a str,
267  pub branch_name: &'a str,
268}
269
270impl RepositoryDeploymentClient {
271  async fn get_pull_request<'a>(
272    &self,
273    input: GetPullRequestInput<'a>,
274  ) -> ResultAnyError<Option<Value>> {
275    let GetPullRequestInput {
276      repo_path,
277      branch_name,
278    } = input;
279
280    return self
281      .ghub
282      .pull_request
283      .get_by_head(github_pull_request::GetPullRequestByHeadInput {
284        repo_path,
285        branch_name,
286        branch_owner: repo_path
287          .split('/')
288          .nth(0)
289          .ok_or(anyhow!("Could not read branch owner from {}", repo_path))?,
290      })
291      .await;
292  }
293
294  pub async fn merge_remote_branch(
295    &self,
296    pull_request_title: &str,
297    source_branch_name: &str,
298    into_branch_name: &str,
299    merge_method: github_pull_request::GithubMergeMethod,
300  ) -> ResultAnyError<SuccesfulMergeOutput> {
301    let repo_path = &self.config.github_path;
302
303    let mut pull_request: Option<Value> = self
304      .get_pull_request(GetPullRequestInput {
305        repo_path,
306        branch_name: source_branch_name,
307      })
308      .await?;
309
310    // Create pull request if there's none of it yet.
311    if pull_request.is_none() {
312      let input = github_pull_request::CreatePullRequestInput {
313        title: pull_request_title,
314        repo_path,
315        branch_name: source_branch_name,
316        into_branch: into_branch_name,
317      };
318
319      slog::info!(self.logger, "Creating PR {:?}", input);
320
321      // Add this point creating pull request might fail due to many things.
322      // One of the case that we should handle is when
323      // the remote branch is behind master branch, in other words, the remote
324      // branch does not have any commits to be merged. This can happen
325      // because of 2 things:
326      // A) It's already merged but the remote branch is not cleaned up yet
327      // B) People just create remote branch but haven't pushed into it yet.
328      //
329      // The easiest way is to just return a specialized error
330      // so the caller can handle this case.
331      let res_body: Value = self.ghub.pull_request.create(input).await.map_err(|err| {
332        if err
333          .to_string()
334          .to_lowercase()
335          .starts_with("no commits between master")
336        {
337          let remote_branch: String = source_branch_name.into();
338
339          return GitError::RemoteBranchIsBehindMasterError {
340            remote_branch: remote_branch.clone(),
341            debug_url: format!("https://github.com/{}/tree/{}", repo_path, remote_branch),
342          }
343          .into();
344        }
345
346        return err;
347      })?;
348
349      slog::info!(self.logger, "Done creating PR {:?}", res_body);
350      slog::debug!(self.logger, "Response body {:?}", res_body);
351
352      // Wait for 2 seconds to give github sometime to calculate mergeability
353      tokio::time::sleep(tokio::time::Duration::from_millis(2000)).await;
354
355      // We're refetching the PR to trigger a mergeability check on github
356      // https://developer.github.com/v3/git/#checking-mergeability-of-pull-requests
357      pull_request = self
358        .get_pull_request(GetPullRequestInput {
359          repo_path,
360          branch_name: source_branch_name,
361        })
362        .await?;
363    }
364
365    let pull_request = pull_request.unwrap();
366
367    let mergeable: Option<bool> = pull_request["mergeable"].as_bool();
368    let pull_number = &format!("{}", pull_request["number"]);
369    let pull_request_url = format!("https://github.com/{}/pull/{}", repo_path, pull_number);
370
371    if mergeable.is_some() && !mergeable.unwrap() {
372      return Err(
373        GitError::MergeError {
374          message: format!("mergeable field is falsy ({})", mergeable.unwrap()),
375          remote_branch: source_branch_name.into(),
376          pull_request_url,
377        }
378        .into(),
379      );
380    }
381
382    if mergeable.is_none() {
383      slog::warn!(
384        self.logger,
385        "Could not reat mergeable will try to proceed, it should be safe because it will throw error if it's not mergeable from github side"
386      )
387    }
388
389    // Merge
390    // -----------------------
391    let input = github_pull_request::MergePullRequestInput {
392      repo_path: &self.config.github_path,
393      pull_number,
394      merge_method,
395    };
396
397    slog::info!(self.logger, "Merging PR {:?}", input);
398
399    let res_body: Value = self.ghub.pull_request.merge(input).await.map_err(|err| {
400      // This is to handle merge error when we can't read `mergeable` field,
401      // we'll just rewrap the error so the merge sequence does not stop.
402      return GitError::MergeError {
403        message: err.to_string(),
404        remote_branch: source_branch_name.into(),
405        pull_request_url: pull_request_url.clone(),
406      };
407    })?;
408
409    slog::info!(self.logger, "Done merging PR");
410    slog::debug!(self.logger, "Response body {:?}", res_body);
411
412    let merge_succeeded: bool = res_body["merged"].as_bool().ok_or(anyhow!(
413      "Failed to parse merge pull request 'merged' to bool",
414    ))?;
415
416    if !merge_succeeded {
417      return Err(
418        GitError::MergeError {
419          message: "Not sure why".into(),
420          remote_branch: source_branch_name.into(),
421          pull_request_url,
422        }
423        .into(),
424      );
425    }
426
427    return Ok(SuccesfulMergeOutput {
428      remote_branch: source_branch_name.into(),
429      pull_request_url,
430    });
431  }
432
433  /// As of now this only do merging.
434  /// Will do deployment in the future~
435  pub async fn deploy(
436    &self,
437    scheme_key: &str,
438    merge_method: GithubMergeMethod,
439  ) -> ResultAnyError<()> {
440    let scheme = self
441      .config
442      .deployment_scheme_by_key
443      .get(scheme_key)
444      .ok_or_else(|| {
445        return anyhow!("Invalid scheme key {}", scheme_key);
446      })?;
447
448    let _ = self
449      .merge_remote_branch(
450        &scheme.default_pull_request_title,
451        &scheme.merge_from_branch,
452        &scheme.merge_into_branch,
453        merge_method,
454      )
455      .await;
456
457    return Ok(());
458  }
459
460  pub async fn merge_all_tasks(
461    &self,
462    task_by_id: &HashMap<String, Task>,
463  ) -> ResultAnyError<MergeAllTasksOutput> {
464    // slog::info!(self.logger, "HAA");
465    // tokio::time::sleep(std::time::Duration::from_secs(2)).await;
466    // slog::info!(self.logger, "HOOO");
467
468    slog::info!(self.logger, "[Run] git checkout master");
469
470    slog::info!(
471      self.logger,
472      "{}",
473      self.preset_command.exec("git checkout master").await?
474    );
475
476    slog::info!(self.logger, "[Run] git pull origin master");
477
478    slog::info!(
479      self.logger,
480      "{}",
481      self.preset_command.exec("git pull origin master").await?
482    );
483
484    // This will sync deleted branch remotely, sometimes we've deleted remote branch
485    // but it still appears locally under origin/<branchname> when running `git branch -r`.
486    slog::info!(self.logger, "[Run] git remote prune origin");
487    slog::info!(
488      self.logger,
489      "{}",
490      self.preset_command.exec("git remote prune origin").await?
491    );
492
493    slog::info!(self.logger, "[Run] git fetch --all");
494
495    slog::info!(
496      self.logger,
497      "{}",
498      self.preset_command.exec("git fetch --all").await?
499    );
500
501    slog::info!(self.logger, "[Run] git branch -r");
502
503    let remote_branches = self.preset_command.exec("git branch -r").await?;
504    let task_ids: Vec<&str> = task_by_id.keys().map(String::as_ref).collect();
505
506    let filtered_branch_mappings: Vec<MatchedTaskBranchMapping> =
507      TaskUtil::create_matching_task_and_branch(&task_ids, &remote_branches.split('\n').collect());
508
509    let tasks_in_master_branch_by_task_id =
510      self.tasks_in_master_branch_by_task_id(&task_ids).await?;
511
512    let all: Vec<futures::future::BoxFuture<(String, ResultAnyError<SuccesfulMergeOutput>)>> =
513      filtered_branch_mappings
514        .iter()
515        .map(|MatchedTaskBranchMapping(task_id, remote_branch)| {
516          async move {
517            return (
518              task_id.clone(),
519              self
520                .merge(
521                  &format!(
522                    "[{}] {}",
523                    remote_branch
524                      .split('/')
525                      .nth(1)
526                      .or(Some(task_id.as_ref()))
527                      .unwrap(),
528                    task_by_id.get(task_id).unwrap().name
529                  ),
530                  &remote_branch,
531                )
532                .await,
533            );
534          }
535          .boxed()
536        })
537        .collect();
538
539    let mut results: Vec<(String, ResultAnyError<SuccesfulMergeOutput>)> = vec![];
540
541    // Merge in serially instead of concurrently to reduce possibility
542    // of race conditions.
543    for fut in all.into_iter() {
544      results.push(fut.await);
545    }
546
547    let show_stopper_error: Option<&Error> = results.iter().find_map(
548      |(_task_id, result): &(String, ResultAnyError<SuccesfulMergeOutput>)| -> Option<&Error> {
549        return result.as_ref().err().filter(|err| {
550          let maybe_merge_error: Option<&GitError> = err.downcast_ref();
551
552          return maybe_merge_error.is_none();
553        });
554      },
555    );
556
557    if show_stopper_error.is_some() {
558      return Err(anyhow!(format!("{}", show_stopper_error.unwrap())));
559    }
560
561    let (successes, failures): (
562      Vec<(String, ResultAnyError<SuccesfulMergeOutput>)>,
563      Vec<(String, ResultAnyError<SuccesfulMergeOutput>)>,
564    ) = results
565      .into_iter()
566      .partition(|(_task_id, result)| result.is_ok());
567
568    let failed_merge_task_output_by_task_id: HashMap<_, _> = failures
569      .into_iter()
570      .map(
571        |(task_id, possible_merge_error): (String, ResultAnyError<SuccesfulMergeOutput>)| -> (String, FailedMergeTaskOutput) {
572          let err = possible_merge_error.err().unwrap();
573          let client_operation_error: &GitError = err.downcast_ref().unwrap();
574
575          let (remote_branch, debug_url) = match client_operation_error {
576            GitError::MergeError{
577              message: _,
578              remote_branch,
579              pull_request_url
580            } => (remote_branch, pull_request_url),
581            GitError::RemoteBranchIsBehindMasterError{
582              remote_branch,
583              debug_url
584            } => (remote_branch, debug_url),
585          };
586
587          return (
588            task_id.clone(),
589            FailedMergeTaskOutput {
590              repo_config: self.config.clone(),
591              task_id: task_id.clone(),
592              remote_branch: String::from(remote_branch),
593              debug_url: String::from(debug_url),
594              message: client_operation_error.to_string()
595            },
596          );
597        },
598      )
599      .collect();
600
601    let successful_merge_task_output_by_task_id: HashMap<_, _> = successes
602      .into_iter()
603      .map(|(task_id, successful_merge_branch_output)| {
604        let successful_merge_branch_output = successful_merge_branch_output.unwrap();
605
606        return (
607          task_id.clone(),
608          SuccesfulMergeTaskOutput {
609            repo_config: self.config.clone(),
610            task_id: task_id.clone(),
611            remote_branch: successful_merge_branch_output.remote_branch,
612            pull_request_url: successful_merge_branch_output.pull_request_url,
613          },
614        );
615      })
616      .collect();
617
618    return Ok(MergeAllTasksOutput {
619      tasks_in_master_branch_by_task_id,
620      matched_task_branch_mappings: filtered_branch_mappings,
621      repo_path: self.config.github_path.clone(),
622      successful_merge_task_output_by_task_id,
623      failed_merge_task_output_by_task_id,
624    });
625  }
626
627  async fn merge(
628    &self,
629    pull_request_title: &str,
630    remote_branch: &str,
631  ) -> ResultAnyError<SuccesfulMergeOutput> {
632    // Create PR
633    // -----------------------
634    let branch_name = remote_branch
635      .split('/')
636      .last()
637      .ok_or(anyhow!("Could not get branch name from {}", remote_branch))?;
638
639    let merge_output = self
640      .merge_remote_branch(
641        pull_request_title,
642        branch_name,
643        "master",
644        github_pull_request::GithubMergeMethod::Merge,
645      )
646      .await?;
647
648    // Cleanup branch after squash merge to prevent
649    // multiple merges
650    self
651      .ghub
652      .branch
653      .delete(DeleteBranchInput {
654        repo_path: &self.config.github_path,
655        branch_name,
656      })
657      .await?;
658
659    return Ok(merge_output);
660  }
661
662  async fn tasks_in_master_branch_by_task_id(
663    &self,
664    task_ids: &Vec<&str>,
665  ) -> ResultAnyError<HashMap<String, Vec<TaskInMasterBranch>>> {
666    let git_log_handle = self
667      .preset_command
668      .spawn_command_from_str(
669        "git log --oneline --no-decorate", // In format {abbreviatedHash} {message}
670        None,
671        Some(Stdio::piped()),
672      )
673      .await?;
674
675    let grep_regex_input = task_ids.iter().fold("".to_owned(), |acc, task_id| {
676      if acc.is_empty() {
677        return String::from(*task_id);
678      }
679
680      return format!("{}\\|{}", acc, task_id);
681    });
682
683    let grep_output = self
684      .preset_command
685      .spawn_command_from_str(
686        &format!("grep {}", grep_regex_input),
687        Some(git_log_handle.stdout.unwrap().try_into()?),
688        None,
689      )
690      .await?
691      .wait_with_output()
692      .await?;
693
694    let grep_output = command::handle_command_output(grep_output)?;
695    let commit_messages: Vec<&str> = grep_output
696      .lines()
697      .filter(|line| {
698        return !line.contains("Merge pull request");
699      })
700      .collect();
701
702    // Iterate all task ids and find which one
703    // is in the commit messages, double loop here
704    let task_id_commit_message_pairs: Vec<(String, String)> = task_ids
705      .iter()
706      .flat_map(|task_id| -> Vec<(String, String)> {
707        // First loop
708        return commit_messages
709          .iter()
710          .filter_map(|commit_message: &&str| -> Option<(String, String)> {
711            // 2nd loop
712            if !commit_message.contains(task_id) {
713              return None;
714            }
715
716            return Some((String::from(*task_id), String::from(*commit_message)));
717          })
718          .collect();
719      })
720      .collect();
721
722    let mut tasks_in_master_branch_by_id: HashMap<String, Vec<TaskInMasterBranch>> =
723      Default::default();
724
725    for (task_id, commit_message) in task_id_commit_message_pairs {
726      tasks_in_master_branch_by_id
727        .entry(task_id.clone())
728        .or_insert(Default::default())
729        .push(TaskInMasterBranch {
730          repo_config: self.config.clone(),
731          task_id,
732          commit_message,
733        });
734    }
735
736    return Ok(tasks_in_master_branch_by_id);
737  }
738}
739
740// TODO: Move to another module
741struct TaskUtil;
742
743impl TaskUtil {
744  fn create_matching_task_and_branch(
745    task_ids: &Vec<&str>,
746    remote_branches: &Vec<&str>,
747  ) -> Vec<MatchedTaskBranchMapping> {
748    return remote_branches
749      .iter()
750      .flat_map(|remote_branch| {
751        let remote_branch = remote_branch.trim().to_owned();
752
753        return task_ids
754          .into_iter()
755          .map(|task_id| {
756            return MatchedTaskBranchMapping(
757              String::from(task_id.to_owned()),
758              remote_branch.clone(),
759            );
760          })
761          .collect::<Vec<MatchedTaskBranchMapping>>();
762      })
763      .filter(|MatchedTaskBranchMapping(task_id, remote_branch)| {
764        return remote_branch.contains(&task_id[..]);
765      })
766      .collect();
767  }
768
769  fn find_not_found_tasks(
770    merge_results: &Vec<MergeAllTasksOutput>,
771    task_by_id: &HashMap<String, Task>,
772    task_assignee_by_phid: &HashMap<String, User>,
773  ) -> Vec<UserTaskMapping> {
774    // Start filtering all the not found tasks
775    let mut found_task_count_by_id: HashMap<String, usize> = task_by_id
776      .values()
777      .into_iter()
778      .map(|task| {
779        return (task.id.clone(), 0);
780      })
781      .collect();
782
783    merge_results.iter().for_each(|merge_result| {
784      for MatchedTaskBranchMapping(task_id, _remote_branch) in
785        merge_result.matched_task_branch_mappings.iter()
786      {
787        let current_counter = found_task_count_by_id
788          .get_mut(PhabricatorClient::clean_id(task_id))
789          .unwrap();
790
791        *current_counter += 1;
792      }
793
794      for (task_id, _) in merge_result.tasks_in_master_branch_by_task_id.iter() {
795        let current_counter = found_task_count_by_id
796          .get_mut(PhabricatorClient::clean_id(task_id))
797          .unwrap();
798
799        *current_counter += 1;
800      }
801    });
802
803    let not_found_user_task_mappings: Vec<UserTaskMapping> = found_task_count_by_id
804      .into_iter()
805      .filter(|(_task_id, count)| {
806        return *count == 0 as usize;
807      })
808      .map(|(task_id, _count)| {
809        let task = task_by_id.get(&task_id).unwrap();
810        let user_id: String = task.assigned_phid.clone().unwrap();
811        let user = task_assignee_by_phid.get(&user_id).unwrap();
812
813        return UserTaskMapping(user.clone(), task.clone());
814      })
815      .collect();
816
817    return not_found_user_task_mappings;
818  }
819}
820
821#[cfg(test)]
822mod test {
823  use super::*;
824
825  mod find_not_found_tasks {
826    use super::*;
827    use fake::Fake;
828    use fake::Faker;
829
830    #[test]
831    fn it_should_return_not_found_tasks() {
832      let mut task_1: Task = Faker.fake();
833      task_1.id = "1234".into();
834      task_1.assigned_phid = Some("haha".into());
835
836      let mut task_2: Task = Faker.fake();
837      task_2.id = "3333".into();
838      task_2.assigned_phid = Some("wut".into());
839
840      let task_by_id: HashMap<String, Task> = vec![task_1.clone(), task_2.clone()]
841        .iter()
842        .map(|task| {
843          return (task.id.clone(), task.clone());
844        })
845        .collect();
846
847      let mut user_1: User = Faker.fake();
848      user_1.phid = task_1.assigned_phid.unwrap().clone();
849
850      let mut user_2: User = Faker.fake();
851      user_2.phid = "wut".into();
852
853      let task_assignee_by_phid: HashMap<String, User> = vec![user_1, user_2]
854        .iter()
855        .map(|user| {
856          return (user.phid.clone(), user.clone());
857        })
858        .collect();
859
860      let mut successful_merge_task_output_by_task_id: HashMap<String, SuccesfulMergeTaskOutput> =
861        HashMap::new();
862      successful_merge_task_output_by_task_id.insert(
863        "3333".to_owned(),
864        SuccesfulMergeTaskOutput {
865          repo_config: RepositoryConfig {
866            key: "".to_owned(),
867            path: "".to_owned(),
868            github_path: "".to_owned(),
869            deployment_scheme_by_key: HashMap::new(),
870          },
871          task_id: "3333".to_owned(),
872          remote_branch: "origin/bar_T3333_foo".into(),
873          pull_request_url: "https://example.com".into(),
874        },
875      );
876
877      let merge_results: Vec<MergeAllTasksOutput> = vec![MergeAllTasksOutput {
878        repo_path: String::from("/foo"),
879        tasks_in_master_branch_by_task_id: Default::default(),
880        matched_task_branch_mappings: vec![MatchedTaskBranchMapping(
881          "3333".into(),
882          "origin/bar_T3333_foo".into(),
883        )],
884        successful_merge_task_output_by_task_id,
885        failed_merge_task_output_by_task_id: HashMap::new(),
886      }];
887
888      let not_found_user_task_mappings =
889        TaskUtil::find_not_found_tasks(&merge_results, &task_by_id, &task_assignee_by_phid);
890
891      assert_eq!(1, not_found_user_task_mappings.len());
892    }
893  }
894
895  mod create_matching_task_and_branch {
896    use super::*;
897
898    #[test]
899    fn it_should_create_matching_branch() {
900      let matched_task_branch_mappings = TaskUtil::create_matching_task_and_branch(
901        &vec!["1234", "444"],
902        &vec!["hmm_123", "hey1234", "445"],
903      );
904
905      let expected_mappings = vec![MatchedTaskBranchMapping("1234".into(), "hey1234".into())];
906
907      assert_eq!(1, matched_task_branch_mappings.len());
908
909      for i in 0..expected_mappings.len() {
910        let expected_mapping = expected_mappings.get(i).unwrap();
911        let result_mapping = matched_task_branch_mappings.get(i).unwrap();
912
913        assert_eq!(expected_mapping.0, result_mapping.0);
914        assert_eq!(expected_mapping.1, result_mapping.1);
915      }
916    }
917  }
918}