use anyhow::{anyhow, bail};
use colored::Colorize;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, BTreeSet};
use std::io::Write;
use std::path::{Path, PathBuf};
use std::process::exit;
use crate::actions::{Action, DefinedAction, UsedAction};
use crate::bmap;
use crate::cmd::{CatPipelineArgs, CheckProjectPipelineArgs, EditPipelineArgs, NewPipelineArgs, WithPipelineArgs};
use crate::cmd::{ExportPipelineArgs, RemovePipelineArgs};
use crate::entities::ansible_opts::AnsibleOpts;
use crate::entities::compose_opts::ComposeOpts;
use crate::entities::containered_opts::ContaineredOpts;
use crate::entities::custom_command::CustomCommand;
use crate::entities::driver::PipelineDriver;
use crate::entities::environment::RunEnvironment;
use crate::entities::github_cicd_opts::GitHubOpts;
use crate::entities::gitlab_cicd_opts::GitLabOpts;
use crate::entities::info::StrToInfo;
use crate::entities::info::{PipelineInfo, info2str, str2info};
use crate::entities::placements::Placement;
use crate::entities::requirements::Requirement;
use crate::entities::systemd_opts::SystemdOpts;
use crate::globals::DeployerGlobalConfig;
use crate::project::DeployerProjectOptions;
use crate::run::{prepare_artifacts_folder, sync_run_folder};
use crate::rw::read_checked;
use crate::tui::setup::specify_pipeline_short_name;
#[derive(Deserialize, Serialize, Eq, Clone)]
pub struct DescribedPipeline {
pub title: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub desc: Option<String>,
#[serde(serialize_with = "info2str", deserialize_with = "str2info")]
pub info: PipelineInfo,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tags: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub default: Option<bool>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub copy_only: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub actions: Vec<UsedAction>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub artifacts: Vec<Placement>,
#[serde(skip_serializing_if = "Option::is_none")]
pub exclusive_exec_tag: Option<String>,
#[serde(default, skip_serializing_if = "PipelineDriver::is_deployer")]
pub driver: PipelineDriver,
#[serde(skip_serializing_if = "Option::is_none")]
pub containered_opts: Option<ContaineredOpts>,
#[serde(skip_serializing_if = "Option::is_none")]
pub compose_opts: Option<ComposeOpts>,
#[serde(skip_serializing_if = "Option::is_none")]
pub ansible_opts: Option<AnsibleOpts>,
#[serde(skip_serializing_if = "Option::is_none")]
pub systemd_opts: Option<SystemdOpts>,
#[serde(skip_serializing_if = "Option::is_none")]
pub gh_opts: Option<GitHubOpts>,
#[serde(skip_serializing_if = "Option::is_none")]
pub gl_opts: Option<GitLabOpts>,
}
impl PartialEq for DescribedPipeline {
fn eq(&self, other: &Self) -> bool {
self.info.eq(&other.info)
}
}
impl Ord for DescribedPipeline {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.info.cmp(&other.info)
}
}
#[allow(clippy::non_canonical_partial_ord_impl)]
impl PartialOrd for DescribedPipeline {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.info.cmp(&other.info))
}
}
pub fn place_artifacts(
env: &RunEnvironment,
placements: Vec<(String, String)>,
panic_when_not_found: bool,
) -> anyhow::Result<()> {
for (from, to) in placements {
let artifact_path = env.run_dir.join(from);
if !std::fs::exists(artifact_path.clone())? {
if panic_when_not_found {
panic!("There is no such artifact: {artifact_path:?}!");
}
} else if artifact_path.as_path().is_dir() || artifact_path.as_path().is_file() {
crate::rw::copy_all(
artifact_path.as_path(),
artifact_path.as_path(),
env.artifacts_dir.join(to).as_path(),
&[] as &[&str],
)?;
}
}
Ok(())
}
impl DescribedPipeline {
pub fn return_all_cmds(&self, definitions: &BTreeSet<DefinedAction>) -> anyhow::Result<Vec<String>> {
let mut cmds = vec![];
for action in &self.actions {
match &action.definition(definitions)?.action {
Action::Interrupt => {}
Action::SyncToRemote { .. } => cmds.push("<sync-to-remote>".to_string()),
Action::SyncFromRemote { .. } => cmds.push("<sync-from-remote>".to_string()),
Action::Custom(cmd) => cmds.push(cmd.cmd.to_owned()),
Action::Test(check) => cmds.push(format!("<test> {}", check.command.cmd)),
Action::Staged(cmd) => cmds.push(cmd.command.cmd.to_string()),
Action::Observe(a) => cmds.push(format!("<observe> {}", a.command.cmd)),
Action::UseFromStorage { .. } => cmds.push("<use-from-storage>".to_string()),
Action::AddToStorage(_) => cmds.push("<add-to-storage>".to_string()),
Action::Patch(p) => cmds.push(format!("<patch-with-file> {:?}", p.patch)),
}
}
Ok(cmds)
}
pub async fn satisfy_all_requirements(
&self,
config: &DeployerProjectOptions,
env: &RunEnvironment<'_>,
) -> anyhow::Result<(bool, usize)> {
use crate::bset;
use crate::entities::requirements::SatisfyErr;
#[allow(clippy::mutable_key_type)]
let mut requirements = bset!();
let mut unsatisfied = false;
for action in self.actions.iter() {
let action = action.definition(&config.actions)?;
for action_req in &action.requirements {
requirements.insert(action_req.clone());
}
}
for requirement in requirements.iter() {
if let Err(e) = requirement.satisfy(env).await {
unsatisfied = true;
match e {
SatisfyErr::Exists(path) => println!("Due to: path `{}` is not exist.", path.to_string_lossy()),
SatisfyErr::ExistsAny(paths) => {
let paths = paths
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect::<Vec<_>>();
println!("Due to: no one of paths `{}` is not exist.", &paths.join("`, `"));
}
SatisfyErr::NoBinary(e) => println!("Due to: such binary `{e}` is unavailable."),
SatisfyErr::Check(output) => {
println!("Due to: requirement (check) is not satisfied, output:");
for line in output {
println!("- {line}");
}
}
SatisfyErr::Remote(e) => println!("Due to: such remote `{e}` is unavailable."),
}
}
}
Ok((!unsatisfied, requirements.len()))
}
pub async fn exec_with_deployer_driver(
&self,
config: &DeployerProjectOptions,
env: &mut RunEnvironment<'_>,
) -> anyhow::Result<bool> {
use crate::rw::{build_log, generate_build_log_filepath};
use std::time::{Duration, Instant};
let mut total_time = Duration::from_secs(0);
let log_file = generate_build_log_filepath(&config.project_name, &self.title, env.cache_dir);
println!("Starting the `{}` pipeline...", &self.title);
build_log(&log_file, &[format!("Starting the `{}` pipeline...", &self.title)])?;
let run_path = env.run_dir.canonicalize()?;
let run_path = run_path.to_str().expect("Can't convert `Path` to string!");
println!("Run path: {run_path}");
build_log(&log_file, &[format!("Run path: {run_path}")])?;
let log_file_path = log_file.canonicalize()?;
let log_file_path = log_file_path.to_str().expect("Can't convert `Path` to string!");
println!("Log file: {log_file_path}");
build_log(&log_file, &[format!("Log file: {log_file_path}")])?;
let now = Instant::now();
let (satisfied, checked) = self.satisfy_all_requirements(config, env).await?;
if !satisfied {
return Ok(false);
}
if checked > 0 {
let elapsed = now.elapsed();
println!("Requirements checked in {}.", format!("{elapsed:.2?}").green());
build_log(&log_file, &[format!("Requirements checked in {:.2?}.", elapsed)])?;
total_time += elapsed;
}
let mut success = true;
let mut cntr = 1usize;
let mut to_sync = false;
let real_run_dir = env.run_dir;
let mut sd_notifier = None;
let total = self.actions.len();
for used in self.actions.iter() {
let action = used.definition(&config.actions)?;
if env.skipper.skip().await {
cntr += 1;
continue;
}
let mut env = if action.exec_in_project_dir.is_some_and(|v| v)
&& let Some(project_dir) = env.project_dir
{
if action.skip_sync.is_none_or(|v| !v) {
to_sync = true;
}
RunEnvironment {
run_dir: project_dir,
daemons: env.daemons.clone(),
skipper: env.skipper.clone(),
restart_requested: env.restart_requested.clone(),
..(*env)
}
} else {
RunEnvironment {
daemons: env.daemons.clone(),
skipper: env.skipper.clone(),
restart_requested: env.restart_requested.clone(),
..(*env)
}
};
let start_msg = format!(
"[{}/{}] Action `{}`...",
cntr,
total,
used
.title
.as_deref()
.unwrap_or(action.info.to_str().as_str())
.blue()
.italic()
);
if !matches!(&action.action, Action::Observe(_)) {
print!("{start_msg}");
std::io::stdout().flush()?;
} else {
println!("{start_msg}");
}
build_log(&log_file, &[start_msg])?;
if cntr == total
&& let Ok("1") = std::env::var("DEPLOYER_NOTIFY_SYSTEMD").as_deref()
{
let skip_time_sec = std::env::var("DEPLOYER_NOTIFY_SYSTEMD_AFTER_SECS").unwrap_or("1".to_string());
let skip_time_sec: u64 = skip_time_sec.parse().unwrap_or(1);
sd_notifier = Some(std::thread::spawn(move || {
std::thread::sleep(std::time::Duration::from_secs(skip_time_sec));
let _ = sd_notify::notify(true, &[sd_notify::NotifyState::Ready]);
}));
}
let now = Instant::now();
let (status, output) = Box::pin(action.action.run_with(config, self, &mut env, &used.with)).await?;
if to_sync {
sync_run_folder(
config,
self,
real_run_dir,
env
.project_dir
.expect("No project directory specified on pipeline run!"),
false,
false,
)?;
to_sync = false;
}
let elapsed = now.elapsed();
if !matches!(&action.action, Action::Observe(_)) {
total_time += elapsed;
}
let ending = format!(
"{} ({}).",
if status {
" done".to_string()
} else {
" got an error!".red().bold().to_string()
},
format!("{elapsed:.2?}").green(),
);
let end_msg = format!(
"[{}/{}] Action `{}` -{}",
cntr,
total,
used
.title
.as_deref()
.unwrap_or(action.info.to_str().as_str())
.blue()
.italic(),
ending,
);
if status
&& (!action.is_always_piped()
|| (!action.is_observer()
&& (output.is_empty() || (output.len() == 1 && output.first().is_some_and(|l| l.as_str().eq("\n"))))))
{
println!("{ending}");
} else {
println!("{end_msg}");
}
if !status && !action.is_always_piped() {
for line in &output {
println!("{line}");
}
}
build_log(&log_file, &output)?;
build_log(&log_file, &[end_msg])?;
cntr += 1;
success &= status;
if !status {
break;
}
if let Some(restart_signal) = &env.restart_requested
&& restart_signal.load(std::sync::atomic::Ordering::Relaxed)
{
let is_last_action = cntr > total;
if !is_last_action {
crate::rw::log("Restart requested, stopping pipeline after current action...");
println!("{}", "Files changed, restarting pipeline...".yellow());
break;
}
}
}
env.daemons.shutdown().await;
if let Some(sd_notifier) = sd_notifier {
let _ = sd_notifier.join();
}
let placements = self.collect_artifacts_placements(config, env).await?;
place_artifacts(env, placements, false)?;
println!("Done in {}.", format!("{total_time:.2?}").green());
build_log(&log_file, &[format!("Done in {:.2?}.", total_time)])?;
Ok(success)
}
pub async fn exec_with_shell(
&self,
config: &DeployerProjectOptions,
env: &RunEnvironment<'_>,
) -> anyhow::Result<bool> {
let content = self.to_shell_script(config, env).await?;
let pipeline_filename = format!(".pipe.{}.sh", env.master_pipeline);
std::fs::write(env.run_dir.join(pipeline_filename.as_str()), content)?;
let pipeline_filename = CustomCommand::escape_with_spaces(pipeline_filename);
CustomCommand::run_simple_observer(env, format!("chmod +x {pipeline_filename}")).await?;
CustomCommand::run_simple_observer(env, format!("./{pipeline_filename}"))
.await
.map(|_| true)
}
pub async fn to_shell_script(
&self,
config: &DeployerProjectOptions,
env: &RunEnvironment<'_>,
) -> anyhow::Result<String> {
let mut cmds = vec!["#!/bin/sh".to_string(), "".to_string(), "set -e".to_string()];
if !env.ansible_run && !env.containered_build && !env.containered_run {
cmds.push("clear".to_string());
}
cmds.extend_from_slice(&[
"".to_string(),
"BLUE='\\033[34;3m'".to_string(),
"GREEN='\\033[32m'".to_string(),
"RESET='\\033[0m'".to_string(),
"".to_string(),
]);
cmds.extend_from_slice(self.actions_to_shell(config, env).await?.as_slice());
if !self.artifacts.is_empty() && !env.containered_build {
cmds.push("mkdir -p artifacts".to_string());
for (from, to) in self.collect_artifacts_placements(config, env).await? {
cmds.push(format!("cp -rf {} artifacts/{} || true", from, to,));
}
}
cmds.push("".to_string());
if let Ok("1") = std::env::var("DEPLOYER_NOTIFY_SYSTEMD").as_deref() {
cmds.extend_from_slice(&["systemd-notify --ready".to_string(), "".to_string()]);
}
let content = cmds.join("\n");
Ok(content)
}
pub async fn actions_to_shell(
&self,
config: &DeployerProjectOptions,
env: &RunEnvironment<'_>,
) -> anyhow::Result<Vec<String>> {
let mut cmds = vec![];
let mut cntr = 1usize;
let total = self.actions.len();
for used in self.actions.iter() {
let with = config.variables_for(&used.with)?;
let action = used.definition(&config.actions)?;
let info = if !env.ansible_run {
format!(
"printf '[{}/{}] Action `%b{}%b`...\\n' \"$BLUE\" \"$RESET\"",
cntr,
total,
CustomCommand::escape(used.title.as_deref().unwrap_or(action.info.to_str().as_str()))
)
} else {
format!(
"printf '[{}/{}] Action `{}`...\\n'",
cntr,
total,
CustomCommand::escape(used.title.as_deref().unwrap_or(action.info.to_str().as_str()))
)
};
cmds.push(info);
match action.to_shell(env, &with).await {
Ok(v) => cmds.extend_from_slice(&v),
Err(e) => {
eprintln!("{e:?}");
println!("Skip action during translation into shell script...");
cmds.push("Skip action during translation into shell script...".to_string());
}
}
cmds.push(String::new());
cntr += 1;
}
Ok(cmds)
}
pub fn collect_actions(&self, definitions: &BTreeSet<DefinedAction>) -> BTreeSet<DefinedAction> {
self
.actions
.iter()
.filter_map(|action| action.definition(definitions).ok())
.cloned()
.collect::<BTreeSet<_>>()
}
pub async fn collect_artifacts_placements(
&self,
config: &DeployerProjectOptions,
env: &RunEnvironment<'_>,
) -> anyhow::Result<Vec<(String, String)>> {
let mut resolved_placements = Vec::with_capacity(self.artifacts.len());
for placement in &self.artifacts {
if placement.with.is_empty() {
resolved_placements.push((placement.from.clone(), placement.to.clone()));
} else {
let vars = config.variables_for(&placement.with)?;
let vars = {
let mut _v = BTreeMap::new();
for (k, v) in vars {
_v.insert(k, v.get_value(env).await?);
}
_v
};
let mut from = placement.from.clone();
for (k, v) in &vars {
from = from.replace(k, v.as_str());
}
let mut to = placement.to.clone();
for (k, v) in &vars {
to = to.replace(k, v.as_str());
}
}
}
Ok(resolved_placements)
}
}
pub fn list_pipelines(globals: &DeployerGlobalConfig) -> anyhow::Result<()> {
println!("Available pipelines in Deployer's Registry:");
for pipeline in globals.pipelines_registry.iter() {
let pipeline_info = pipeline.info.to_str();
let pipeline_title = format!("[{}]", pipeline.title);
let tags = if pipeline.tags.is_empty() {
String::new()
} else {
format!(" (tags: {})", pipeline.tags.join(", ").as_str().blue().italic())
};
println!(
"• {} {}{}{}",
pipeline_info.blue().bold(),
pipeline_title.green().bold(),
tags,
if let Some(desc) = pipeline.desc.as_deref()
&& !desc.is_empty()
{
format!(" > {}", desc.green().italic())
} else {
String::new()
}
);
}
Ok(())
}
fn choose_pipeline<'a>(
pipelines_registry: &'a BTreeSet<DescribedPipeline>,
prompt: &str,
) -> anyhow::Result<&'a DescribedPipeline> {
if pipelines_registry.is_empty() {
bail!("There is no pipelines in the Registry.");
}
let keys = pipelines_registry
.iter()
.map(|pipeline| pipeline.info.to_str())
.collect::<Vec<_>>();
let selected = inquire_reorder::Select::new(prompt, keys).prompt()?;
pipelines_registry
.iter()
.find(|pipeline| pipeline.info.to_str().eq(&selected))
.ok_or(anyhow!("No such pipeline!"))
}
pub fn new_pipeline(globals: &mut DeployerGlobalConfig, args: &NewPipelineArgs) -> anyhow::Result<DescribedPipeline> {
if let Some(from_file) = &args.from {
let pipeline = read_checked::<DescribedPipeline>(from_file)
.map_err(|e| {
panic!("Can't read provided Pipeline file due to: {e}");
})
.unwrap();
globals.pipelines_registry.insert(pipeline.clone());
return Ok(pipeline);
}
let mut updated_actions = globals.actions_registry.clone();
let described_pipeline = DescribedPipeline::new_from_prompt(globals, &mut updated_actions)?;
globals.actions_registry = updated_actions;
if globals
.pipelines_registry
.iter()
.any(|p| p.info == described_pipeline.info)
&& !inquire_reorder::Confirm::new(&format!(
"Pipelines Registry already have `{}` pipeline. Do you want to override it? (y/n)",
described_pipeline.info.to_str(),
))
.prompt()?
{
return Ok(described_pipeline);
}
globals.pipelines_registry.insert(described_pipeline.clone());
Ok(described_pipeline)
}
pub fn remove_pipeline(globals: &mut DeployerGlobalConfig, args: RemovePipelineArgs) -> anyhow::Result<()> {
let pipeline = if let Some(info) = args.info {
let info = info.to_info()?;
globals
.pipelines_registry
.iter()
.find(|pipeline| pipeline.info.eq(&info))
.ok_or(anyhow!(""))?
.clone()
} else {
choose_pipeline(
&globals.pipelines_registry,
"Select pipeline for removing from the registry:",
)?
.clone()
};
if !args.yes && !inquire_reorder::Confirm::new("Are you sure? (y/n)").prompt()? {
return Ok(());
}
globals.pipelines_registry.remove(&pipeline);
Ok(())
}
pub fn cat_pipeline(globals: &DeployerGlobalConfig, args: CatPipelineArgs) -> anyhow::Result<()> {
let pipeline = if let Some(info) = &args.info {
let info = info.to_info()?;
globals
.pipelines_registry
.iter()
.find(|pipeline| pipeline.info.eq(&info))
.ok_or(anyhow!(""))?
.clone()
} else {
choose_pipeline(&globals.pipelines_registry, "Select pipeline for displaying:")?.clone()
};
let pipeline_ser = serde_pretty_yaml::to_string_pretty(&pipeline)?;
println!("{pipeline_ser}");
Ok(())
}
pub fn assign_pipeline_to_project(
globals: &mut DeployerGlobalConfig,
config: &mut DeployerProjectOptions,
args: &WithPipelineArgs,
) -> anyhow::Result<()> {
if *config == Default::default() {
panic!("Config is invalid! Reinit the project.");
}
let mut pipeline = if let Some(tag) = &args.tag {
let info = tag.to_info()?;
globals
.pipelines_registry
.iter()
.find(|pipe| pipe.info == info)
.ok_or_else(|| {
anyhow::anyhow!("There is no such pipeline in Registry. See available pipelines with `depl ls pipelines`.")
})?
.clone()
} else if !globals.pipelines_registry.is_empty() {
let mut ptags = bmap!();
let mut tags = vec![];
globals
.pipelines_registry
.iter()
.map(|pipe| {
(
format!("`{}` - {}", pipe.info.to_str().blue().bold(), pipe.title.green().bold()),
pipe,
)
})
.for_each(|(t, p)| {
tags.push(t.clone());
ptags.insert(t, p.clone());
});
tags.push("• Specify another pipeline".to_string());
let selected = inquire_reorder::Select::new("Select the pipeline for this project:", tags).prompt()?;
if selected.as_str().eq("• Specify another pipeline") {
DescribedPipeline::new_from_prompt(globals, &mut config.actions)?
} else {
let pipeline = ptags.get(&selected).ok_or(anyhow::anyhow!(
"There is no such pipeline in Registry. See available pipelines with `depl ls pipelines`."
))?;
(*pipeline).clone()
}
} else {
DescribedPipeline::new_from_prompt(globals, &mut config.actions)?
};
let needed_actions = pipeline.collect_actions(&globals.actions_registry);
config.actions = config.actions.union(&needed_actions).cloned().collect();
if config.variables.is_empty() {
println!("Please, specify variables for project first!");
let mut needed = vec![];
for action in &pipeline.actions {
let definition = action.definition(&config.actions)?;
for var in definition.collect_required_variables() {
needed.push(format!(
"\t- variable `{var}` for pipeline `{}`, action `{}`",
pipeline.info.to_str(),
action.used.to_str()
));
}
}
println!("These variables are required for specified pipeline:");
needed.iter().for_each(|v| println!("{v}"));
return Ok(());
}
for action in &mut pipeline.actions {
*action = action.prompt_setup_for_project(&config.variables, &config.actions)?;
}
let short_name = if let Some(short_name) = args.r#as.as_ref() {
short_name.to_owned()
} else {
inquire_reorder::Text::new("Write the pipeline's short name (only for this project):").prompt()?
};
pipeline.desc = Some(format!(
r#"Got from `{}`.{}{}"#,
pipeline.title,
if pipeline.desc.as_ref().is_none_or(|d| d.is_empty()) {
""
} else {
" "
},
pipeline.desc.as_deref().unwrap_or("")
));
pipeline.title = short_name.clone();
if specify_pipeline_short_name(config, &mut pipeline.title).is_err() {
return Ok(());
};
if let Some(old_default) = config.pipelines.iter_mut().find(|p| p.default.is_some_and(|v| v)) {
if inquire_reorder::Confirm::new(&format!(
"Pipeline `{}` is already set by default. Set this pipeline running by default instead?",
old_default.title.as_str()
))
.prompt()?
{
old_default.default = None;
pipeline.default = Some(true);
}
} else if inquire_reorder::Confirm::new("Set this pipeline running by default? (y/n)").prompt()? {
pipeline.default = Some(true);
}
if let Some(i) = config.pipelines.iter().position(|p| p.title.as_str() == short_name) {
config.pipelines.remove(i);
}
config.pipelines.push(pipeline);
println!("Pipeline is successfully set up for this project.");
Ok(())
}
pub async fn edit_pipeline(globals: &mut DeployerGlobalConfig, args: EditPipelineArgs) -> anyhow::Result<()> {
let mut pipeline = if let Some(info) = args.info {
let info = info.to_info()?;
globals
.pipelines_registry
.iter()
.find(|pipeline| pipeline.info.eq(&info))
.ok_or(anyhow!("This pipeline is not found in registry!"))?
.clone()
} else {
choose_pipeline(&globals.pipelines_registry, "Select pipeline for editing:")?.clone()
};
pipeline
.edit_pipeline_from_prompt(
&mut globals.pipelines_registry,
&globals.actions_registry,
None,
&globals.remote_hosts,
true,
)
.await?;
globals
.pipelines_registry
.retain(|in_registry| in_registry.info.ne(&pipeline.info));
globals.pipelines_registry.insert(pipeline);
Ok(())
}
pub async fn export_project_pipeline(
config: &DeployerProjectOptions,
globals: &DeployerGlobalConfig,
cache_dir: &Path,
config_dir: &Path,
storage_dir: &Path,
args: ExportPipelineArgs,
) -> anyhow::Result<()> {
let pipeline = config
.pipelines
.iter()
.find(|p| p.title.as_str().eq(args.pipeline_tag.as_str()))
.ok_or(anyhow!("Can't find chosen pipeline!"))?;
let curr_dir = std::env::current_dir().expect("Can't get current dir!");
let artifacts_dir = prepare_artifacts_folder(&curr_dir)?;
let env = RunEnvironment {
master_pipeline: &pipeline.title,
run_dir: &curr_dir,
cache_dir,
config_dir,
storage_dir,
project_dir: Some(&curr_dir),
artifacts_dir: &artifacts_dir,
artifacts_placements: &pipeline.artifacts,
new_build: false,
remotes: &globals.remote_hosts,
ignore: &config.ignore_files,
log_file: None,
containered_build: false,
containered_run: false,
ansible_run: false,
daemons: Default::default(),
observe_cli: &None,
skipper: Default::default(),
driver: PipelineDriver::Shell,
restart_requested: None,
};
let shell_script = pipeline.to_shell_script(config, &env).await?;
let has_cicd_export = args.gh || args.gl;
if let Some(shell_path) = args.output.as_deref()
&& !has_cicd_export
{
std::fs::write(shell_path, shell_script.as_str())?;
}
if args.gh {
crate::github::export(
config,
pipeline,
pipeline.gh_opts.as_ref(),
&env,
shell_script.as_str(),
args.output.as_deref(),
)?;
}
if args.gl {
crate::gitlab::export(
config,
pipeline,
pipeline.gl_opts.as_ref(),
&env,
shell_script.as_str(),
args.output.as_deref(),
)?;
}
if args.systemd {
crate::systemd::export(config, pipeline, &env)?;
}
if args.containerized || args.compose || args.ansible {
let output_dir = if let Some(ref output) = args.output {
PathBuf::from(output)
} else {
curr_dir.clone()
};
let export_env = RunEnvironment {
master_pipeline: &pipeline.title,
run_dir: &output_dir,
cache_dir,
config_dir,
storage_dir,
project_dir: Some(&curr_dir),
artifacts_dir: &artifacts_dir,
artifacts_placements: &pipeline.artifacts,
new_build: false,
remotes: &globals.remote_hosts,
ignore: &config.ignore_files,
log_file: None,
containered_build: false,
containered_run: false,
ansible_run: false,
daemons: Default::default(),
observe_cli: &None,
skipper: Default::default(),
driver: pipeline.driver,
restart_requested: None,
};
if args.containerized {
if let Some(opts) = pipeline.containered_opts.as_ref() {
let exclusive_exec_tag =
pipeline.exclusive_exec_tag.clone().unwrap_or(String::from("default")) + "-containered";
let vars = config.variables_for(&opts.with)?;
crate::containered::generate_dockerfile(config, &export_env, pipeline, opts, &exclusive_exec_tag, &vars)
.await?;
crate::containered::generate_dockerignore(&export_env, config)?;
if pipeline.driver.is_shell() {
crate::containered::generate_shell_driver_scripts(config, &export_env, pipeline).await?;
}
println!("Exported Dockerfile to: {}", output_dir.display());
} else {
anyhow::bail!("Pipeline `{}` has no containerized options configured!", pipeline.title);
}
}
if args.compose {
if let Some(opts) = pipeline.compose_opts.as_ref() {
let exclusive_exec_tag = pipeline.exclusive_exec_tag.clone().unwrap_or(String::from("default")) + "-compose";
let vars = config.variables_for(&opts.app.with)?;
crate::compose::generate_compose_file(config, &export_env, pipeline, opts, &exclusive_exec_tag, &vars).await?;
crate::containered::generate_dockerignore(&export_env, config)?;
if pipeline.driver.is_shell() {
crate::containered::generate_shell_driver_scripts(config, &export_env, pipeline).await?;
}
println!("Exported docker-compose file to: {}", output_dir.display());
} else {
anyhow::bail!("Pipeline `{}` has no compose options configured!", pipeline.title);
}
}
if args.ansible {
if let Some(ansible_opts) = &pipeline.ansible_opts {
crate::ansible::make_inventory(&export_env, ansible_opts, export_env.remotes)?;
crate::ansible::make_playbook(config, &export_env, pipeline).await?;
println!("Exported Ansible playbook and inventory to: {}", output_dir.display());
} else {
anyhow::bail!("Pipeline `{}` has no Ansible options configured!", pipeline.title);
}
}
}
println!("{}", "Export completed.".green());
Ok(())
}
pub async fn check_project_pipeline(
config: &DeployerProjectOptions,
globals: &DeployerGlobalConfig,
cache_dir: &Path,
config_dir: &Path,
storage_dir: &Path,
args: CheckProjectPipelineArgs,
) {
let pipeline = if let Some(pipe) = config
.pipelines
.iter()
.find(|p| p.title.as_str().eq(args.pipeline_tag.as_str()))
{
pipe
} else {
println!("{}", "Can't find chosen pipeline!".red());
exit(1);
};
let curr_dir = std::env::current_dir().expect("Can't get current dir!");
let artifacts_dir = prepare_artifacts_folder(&curr_dir).expect("Can't prepare artifacts folder!");
let env = RunEnvironment {
master_pipeline: &pipeline.title,
run_dir: &curr_dir,
cache_dir,
config_dir,
storage_dir,
project_dir: Some(&curr_dir),
artifacts_dir: &artifacts_dir,
artifacts_placements: &pipeline.artifacts,
new_build: false,
remotes: &globals.remote_hosts,
ignore: &config.ignore_files,
log_file: None,
containered_build: false,
containered_run: false,
ansible_run: false,
daemons: Default::default(),
observe_cli: &None,
skipper: Default::default(),
driver: pipeline.driver,
restart_requested: None,
};
let mut errs = 0u32;
if let Some(opts) = &pipeline.containered_opts {
if opts.executor.is_docker() && Requirement::in_path("docker").satisfy(&env).await.is_err() {
println!("{}", "Docker is not installed!".red());
errs += 1;
} else if !opts.executor.is_docker() && Requirement::in_path("podman").satisfy(&env).await.is_err() {
println!("{}", "Podman is not installed!".red());
errs += 1;
}
}
if let Some(opts) = &pipeline.compose_opts {
let executor = opts.effective_executor();
if executor.is_docker() && Requirement::in_path("docker").satisfy(&env).await.is_err() {
println!("{}", "Docker is not installed (required for Compose)!".red());
errs += 1;
} else if !executor.is_docker() && Requirement::in_path("podman").satisfy(&env).await.is_err() {
println!("{}", "Podman is not installed (required for Compose)!".red());
errs += 1;
}
}
if pipeline.ansible_opts.is_some() && Requirement::in_path("ansible-playbook").satisfy(&env).await.is_err() {
println!("{}", "Ansible is not installed!".red());
errs += 1;
}
let (satisfied, _) = pipeline.satisfy_all_requirements(config, &env).await.unwrap();
if !satisfied {
println!("{}", "Requirements are not satisfied!".red());
errs += 1;
}
for used in &pipeline.actions {
match used.definition(&config.actions) {
Err(e) => {
println!(
"{} {} due to: {e}.",
"Can't find definition of this action:".red(),
used.used.to_str().blue()
);
errs += 1;
continue;
}
Ok(action) => {
if let Action::Patch(patch) = &action.action {
println!(
"Checking patch {}. Run with `{}` environment variable to verbose patch output.",
action.info.to_str().blue(),
"SMPTCHVERBOSE=1".green()
);
match patch.test(&env).await {
Err(e) => {
println!(
"{} {} due to: {e}.",
"Can't test patch action:".red(),
action.info.to_str().blue()
);
errs += 1;
continue;
}
Ok((status, out)) => {
if !status {
println!(
"{} {} due to: {}.",
"Patch is invalid:".red(),
action.info.to_str().blue(),
out[0],
);
errs += 1;
continue;
}
}
}
}
}
}
let vars = if let Ok(vars) = config.variables_for(&used.with) {
vars
} else {
println!(
"{} {}.",
"Can't collect all variables in this project for action:".red(),
used.used.to_str().blue()
);
errs += 1;
continue;
};
for (name, var) in vars {
if var.get_value(&env).await.is_err() {
println!(
"{} {}.",
"Can't get value of variable in this project with name:".red(),
name
);
errs += 1;
continue;
}
}
}
if errs > 0 {
exit(1);
}
}