use crate::engine::{Mission, MissionStep, StepType};
use crate::transpiler::common::{TranspilerUtils, TranspilationContext};
use crate::core::Result;
use serde::{Deserialize, Serialize};
use serde_yaml;
use std::collections::HashMap;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GitHubWorkflow {
pub name: Option<String>,
pub on: WorkflowTrigger,
pub jobs: HashMap<String, Job>,
pub env: Option<HashMap<String, String>>,
pub defaults: Option<Defaults>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum WorkflowTrigger {
Simple(String),
List(Vec<String>),
Complex(HashMap<String, serde_yaml::Value>),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Job {
pub name: Option<String>,
#[serde(rename = "runs-on")]
pub runs_on: RunsOn,
pub needs: Option<Dependencies>,
pub steps: Vec<Step>,
pub env: Option<HashMap<String, String>>,
pub strategy: Option<Strategy>,
#[serde(rename = "timeout-minutes")]
pub timeout_minutes: Option<u32>,
#[serde(rename = "continue-on-error")]
pub continue_on_error: Option<bool>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Step {
pub id: Option<String>,
pub name: Option<String>,
pub uses: Option<String>,
pub run: Option<String>,
#[serde(rename = "with")]
pub with_params: Option<HashMap<String, serde_yaml::Value>>,
pub env: Option<HashMap<String, String>>,
#[serde(rename = "continue-on-error")]
pub continue_on_error: Option<bool>,
#[serde(rename = "timeout-minutes")]
pub timeout_minutes: Option<u32>,
#[serde(rename = "working-directory")]
pub working_directory: Option<String>,
pub shell: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum RunsOn {
Simple(String),
List(Vec<String>),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Dependencies {
Single(String),
Multiple(Vec<String>),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Strategy {
pub matrix: Option<HashMap<String, serde_yaml::Value>>,
#[serde(rename = "fail-fast")]
pub fail_fast: Option<bool>,
#[serde(rename = "max-parallel")]
pub max_parallel: Option<u32>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Defaults {
pub run: Option<RunDefaults>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RunDefaults {
pub shell: Option<String>,
#[serde(rename = "working-directory")]
pub working_directory: Option<String>,
}
pub struct GitHubActionsParser;
impl GitHubActionsParser {
pub async fn parse_string(content: &str) -> Result<Mission> {
let workflow: GitHubWorkflow = serde_yaml::from_str(content)
.map_err(|e| format!("Failed to parse GitHub Actions workflow: {}", e))?;
Self::convert_workflow_to_mission(workflow).await
}
pub async fn parse_file(file_path: &str) -> Result<Mission> {
let content = tokio::fs::read_to_string(file_path).await
.map_err(|e| format!("Failed to read GitHub Actions workflow file: {}", e))?;
Self::parse_string(&content).await
}
async fn convert_workflow_to_mission(workflow: GitHubWorkflow) -> Result<Mission> {
let mission_name = workflow.name.clone().unwrap_or_else(|| "github_actions_workflow".to_string());
let mut context = TranspilationContext::new(mission_name.clone());
let mut steps = Vec::new();
let mut job_dependencies: HashMap<String, Vec<String>> = HashMap::new();
for (job_id, job) in &workflow.jobs {
let job_steps = Self::convert_job_to_steps(job_id, job, &mut context)?;
if let Some(needs) = &job.needs {
let deps = match needs {
Dependencies::Single(dep) => vec![dep.clone()],
Dependencies::Multiple(deps) => deps.clone(),
};
job_dependencies.insert(job_id.clone(), deps);
}
steps.extend(job_steps);
}
Self::apply_job_dependencies(&mut steps, &job_dependencies)?;
let description = Self::generate_description(&workflow);
let mission = TranspilerUtils::create_mission(
mission_name,
description,
steps,
);
Ok(mission)
}
fn convert_job_to_steps(job_id: &str, job: &Job, context: &mut TranspilationContext) -> Result<Vec<MissionStep>> {
let mut steps = Vec::new();
let setup_step = Self::create_job_setup_step(job_id, job, context)?;
if let Some(step) = setup_step {
steps.push(step);
}
for (step_index, step) in job.steps.iter().enumerate() {
let rustchain_step = Self::convert_step_to_mission_step(
job_id,
step_index,
step,
job,
context
)?;
steps.push(rustchain_step);
}
Ok(steps)
}
fn create_job_setup_step(job_id: &str, job: &Job, _context: &mut TranspilationContext) -> Result<Option<MissionStep>> {
let runner = match &job.runs_on {
RunsOn::Simple(runner) => runner.clone(),
RunsOn::List(runners) => runners.first().unwrap_or(&"ubuntu-latest".to_string()).clone(),
};
let step_id = format!("{}_setup", job_id);
let step_name = format!("Setup {} Environment", job_id);
let parameters = serde_json::json!({
"runner": runner,
"job_id": job_id,
"environment": job.env.as_ref().unwrap_or(&HashMap::new())
});
Ok(Some(MissionStep {
id: step_id,
name: step_name,
step_type: StepType::Noop, parameters,
depends_on: None,
timeout_seconds: Some(300), continue_on_error: None,
}))
}
fn convert_step_to_mission_step(
job_id: &str,
step_index: usize,
step: &Step,
job: &Job,
_context: &mut TranspilationContext,
) -> Result<MissionStep> {
let step_id = step.id.clone().unwrap_or_else(|| format!("{}_{}", job_id, step_index));
let step_name = step.name.clone().unwrap_or_else(|| format!("Step {}", step_index + 1));
let (step_type, parameters) = if let Some(run_command) = &step.run {
Self::create_command_step(run_command, step, job)?
} else if let Some(action) = &step.uses {
Self::create_action_step(action, step)?
} else {
(StepType::Noop, serde_json::json!({}))
};
let timeout_seconds = step.timeout_minutes
.or(job.timeout_minutes)
.map(|mins| (mins as u64) * 60)
.or(Some(1800));
Ok(MissionStep {
id: step_id,
name: step_name,
step_type,
parameters,
depends_on: None, timeout_seconds,
continue_on_error: step.continue_on_error.or(job.continue_on_error),
})
}
fn create_command_step(run_command: &str, step: &Step, _job: &Job) -> Result<(StepType, serde_json::Value)> {
let shell = step.shell
.as_ref()
.unwrap_or(&"bash".to_string())
.clone();
let working_dir = step.working_directory
.as_ref();
let mut parameters = serde_json::json!({
"command": shell,
"args": ["-c", run_command]
});
if let Some(dir) = working_dir {
parameters["working_directory"] = serde_json::Value::String(dir.clone());
}
if let Some(env) = &step.env {
parameters["environment"] = serde_json::to_value(env)
.map_err(|e| format!("Failed to serialize environment variables: {}", e))?;
}
Ok((StepType::Command, parameters))
}
fn create_action_step(action: &str, step: &Step) -> Result<(StepType, serde_json::Value)> {
let mut parameters = serde_json::json!({
"action": action
});
if let Some(with_params) = &step.with_params {
parameters["parameters"] = serde_json::to_value(with_params)
.map_err(|e| format!("Failed to serialize action parameters: {}", e))?;
}
if let Some(env) = &step.env {
parameters["environment"] = serde_json::to_value(env)
.map_err(|e| format!("Failed to serialize environment variables: {}", e))?;
}
let step_type = match action {
a if a.contains("checkout") => StepType::GitStatus, a if a.contains("upload-artifact") || a.contains("download-artifact") => StepType::CopyFile,
a if a.contains("setup-node") || a.contains("setup-python") || a.contains("setup-java") => StepType::Noop,
a if a.contains("cache") => StepType::Noop, _ => StepType::Tool, };
Ok((step_type, parameters))
}
fn apply_job_dependencies(steps: &mut Vec<MissionStep>, job_deps: &HashMap<String, Vec<String>>) -> Result<()> {
let mut job_to_steps: HashMap<String, Vec<String>> = HashMap::new();
for step in steps.iter() {
let job_id = if let Some(pos) = step.id.find('_') {
step.id[..pos].to_string()
} else {
step.id.clone()
};
job_to_steps.entry(job_id).or_insert_with(Vec::new).push(step.id.clone());
}
for step in steps.iter_mut() {
let job_id = if let Some(pos) = step.id.find('_') {
step.id[..pos].to_string()
} else {
step.id.clone()
};
if let Some(deps) = job_deps.get(&job_id) {
let mut dependencies = Vec::new();
for dep_job in deps {
if let Some(dep_steps) = job_to_steps.get(dep_job) {
if let Some(last_step) = dep_steps.last() {
dependencies.push(last_step.clone());
}
}
}
if !dependencies.is_empty() {
step.depends_on = Some(dependencies);
}
}
}
Ok(())
}
fn generate_description(workflow: &GitHubWorkflow) -> Option<String> {
let mut desc_parts = Vec::new();
if let Some(name) = &workflow.name {
desc_parts.push(format!("GitHub Actions Workflow: {}", name));
} else {
desc_parts.push("GitHub Actions Workflow".to_string());
}
desc_parts.push(format!("Jobs: {}", workflow.jobs.len()));
match &workflow.on {
WorkflowTrigger::Simple(trigger) => {
desc_parts.push(format!("Trigger: {}", trigger));
}
WorkflowTrigger::List(triggers) => {
desc_parts.push(format!("Triggers: {}", triggers.join(", ")));
}
WorkflowTrigger::Complex(_) => {
desc_parts.push("Triggers: Complex configuration".to_string());
}
}
Some(desc_parts.join(" | "))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_parse_simple_workflow() {
let workflow_yaml = r#"
name: CI
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Run tests
run: npm test
"#;
let mission = GitHubActionsParser::parse_string(workflow_yaml).await.unwrap();
assert_eq!(mission.name, "CI");
assert!(mission.description.is_some());
assert!(mission.steps.len() >= 2);
let checkout_step = mission.steps.iter().find(|s| s.name.contains("Checkout"));
assert!(checkout_step.is_some());
let test_step = mission.steps.iter().find(|s| s.name.contains("Run tests"));
assert!(test_step.is_some());
assert!(matches!(test_step.unwrap().step_type, StepType::Command));
}
#[tokio::test]
async fn test_parse_multi_job_workflow() {
let workflow_yaml = r#"
name: Build and Deploy
on: push
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Build app
run: npm run build
deploy:
runs-on: ubuntu-latest
needs: build
steps:
- name: Deploy app
run: ./deploy.sh
"#;
let mission = GitHubActionsParser::parse_string(workflow_yaml).await.unwrap();
assert_eq!(mission.name, "Build and Deploy");
assert!(mission.steps.len() >= 4);
let deploy_step = mission.steps.iter()
.find(|s| s.name.contains("Deploy app"))
.unwrap();
assert!(deploy_step.depends_on.is_some());
assert!(!deploy_step.depends_on.as_ref().unwrap().is_empty());
}
#[tokio::test]
async fn test_parse_workflow_with_environment() {
let workflow_yaml = r#"
name: Test with Environment
on: push
env:
NODE_ENV: test
API_KEY: secret
jobs:
test:
runs-on: ubuntu-latest
env:
DATABASE_URL: postgres://test
steps:
- name: Run tests with env
run: npm test
env:
TEST_VAR: value
"#;
let mission = GitHubActionsParser::parse_string(workflow_yaml).await.unwrap();
assert_eq!(mission.name, "Test with Environment");
let test_step = mission.steps.iter()
.find(|s| s.name.contains("Run tests"))
.unwrap();
assert!(test_step.parameters.get("environment").is_some());
}
#[tokio::test]
async fn test_parse_workflow_with_matrix() {
let workflow_yaml = r#"
name: Matrix Build
on: push
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14, 16, 18]
steps:
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- name: Run tests
run: npm test
"#;
let mission = GitHubActionsParser::parse_string(workflow_yaml).await.unwrap();
assert_eq!(mission.name, "Matrix Build");
let setup_step = mission.steps.iter()
.find(|s| s.name.contains("Setup Node"))
.unwrap();
assert!(matches!(setup_step.step_type, StepType::Noop)); assert!(setup_step.parameters.get("action").is_some());
}
#[tokio::test]
async fn test_parse_workflow_with_timeout() {
let workflow_yaml = r#"
name: Test with Timeout
on: push
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Long running task
run: sleep 30
timeout-minutes: 2
"#;
let mission = GitHubActionsParser::parse_string(workflow_yaml).await.unwrap();
let long_task = mission.steps.iter()
.find(|s| s.name.contains("Long running"))
.unwrap();
assert_eq!(long_task.timeout_seconds, Some(120));
}
#[tokio::test]
async fn test_parse_empty_workflow() {
let workflow_yaml = r#"
name: Empty Workflow
on: push
jobs: {}
"#;
let mission = GitHubActionsParser::parse_string(workflow_yaml).await.unwrap();
assert_eq!(mission.name, "Empty Workflow");
assert_eq!(mission.steps.len(), 0);
}
#[tokio::test]
async fn test_parse_workflow_with_different_shells() {
let workflow_yaml = r#"
name: Shell Test
on: push
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Bash command
run: echo "Hello from bash"
shell: bash
- name: PowerShell command
run: Write-Host "Hello from PowerShell"
shell: pwsh
"#;
let mission = GitHubActionsParser::parse_string(workflow_yaml).await.unwrap();
let bash_step = mission.steps.iter()
.find(|s| s.name.contains("Bash command"))
.unwrap();
let powershell_step = mission.steps.iter()
.find(|s| s.name.contains("PowerShell command"))
.unwrap();
assert!(matches!(bash_step.step_type, StepType::Command));
assert!(matches!(powershell_step.step_type, StepType::Command));
assert!(bash_step.parameters.get("command").is_some());
assert!(powershell_step.parameters.get("command").is_some());
}
#[test]
fn test_workflow_trigger_parsing() {
let simple_yaml = "push";
let trigger: WorkflowTrigger = serde_yaml::from_str(simple_yaml).unwrap();
assert!(matches!(trigger, WorkflowTrigger::Simple(_)));
let list_yaml = "[push, pull_request]";
let trigger: WorkflowTrigger = serde_yaml::from_str(list_yaml).unwrap();
assert!(matches!(trigger, WorkflowTrigger::List(_)));
}
}