use crate::schema::{SchemaType, SchemaValidator};
use crate::workflow;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use thiserror::Error;
use wrkflw_models::gitlab::Pipeline;
use wrkflw_models::ValidationResult;
#[derive(Error, Debug)]
pub enum GitlabParserError {
#[error("I/O error: {0}")]
IoError(#[from] std::io::Error),
#[error("YAML parsing error: {0}")]
YamlError(#[from] serde_yaml::Error),
#[error("Invalid pipeline structure: {0}")]
InvalidStructure(String),
#[error("Schema validation error: {0}")]
SchemaValidationError(String),
}
pub fn parse_pipeline(pipeline_path: &Path) -> Result<Pipeline, GitlabParserError> {
let pipeline_content = fs::read_to_string(pipeline_path)?;
let validator = SchemaValidator::new().map_err(GitlabParserError::SchemaValidationError)?;
validator
.validate_with_specific_schema(&pipeline_content, SchemaType::GitLab)
.map_err(GitlabParserError::SchemaValidationError)?;
let pipeline: Pipeline = serde_yaml::from_str(&pipeline_content)?;
Ok(pipeline)
}
pub fn validate_pipeline_structure(pipeline: &Pipeline) -> ValidationResult {
let mut result = ValidationResult::new();
if pipeline.jobs.is_empty() {
result.add_issue("Pipeline must contain at least one job".to_string());
}
for (job_name, job) in &pipeline.jobs {
if let Some(true) = job.template {
continue;
}
if job.script.is_none() && job.extends.is_none() {
result.add_issue(format!(
"Job '{}' must have a script section or extend another job",
job_name
));
}
}
if let Some(stages) = &pipeline.stages {
for (job_name, job) in &pipeline.jobs {
if let Some(stage) = &job.stage {
if !stages.contains(stage) {
result.add_issue(format!(
"Job '{}' references undefined stage '{}'",
job_name, stage
));
}
}
}
}
for (job_name, job) in &pipeline.jobs {
if let Some(dependencies) = &job.dependencies {
for dependency in dependencies {
if !pipeline.jobs.contains_key(dependency) {
result.add_issue(format!(
"Job '{}' depends on undefined job '{}'",
job_name, dependency
));
}
}
}
}
for (job_name, job) in &pipeline.jobs {
if let Some(extends) = &job.extends {
for extend in extends {
if !pipeline.jobs.contains_key(extend) {
result.add_issue(format!(
"Job '{}' extends undefined job '{}'",
job_name, extend
));
}
}
}
}
result
}
pub fn convert_to_workflow_format(pipeline: &Pipeline) -> workflow::WorkflowDefinition {
let mut workflow = workflow::WorkflowDefinition {
name: "Converted GitLab CI Pipeline".to_string(),
on: vec!["push".to_string()], on_raw: serde_yaml::Value::String("push".to_string()),
jobs: HashMap::new(),
};
for (job_name, gitlab_job) in &pipeline.jobs {
if let Some(true) = gitlab_job.template {
continue;
}
let mut job = workflow::Job {
runs_on: Some(vec!["ubuntu-latest".to_string()]), needs: None,
steps: Vec::new(),
env: HashMap::new(),
matrix: None,
services: HashMap::new(),
if_condition: None,
outputs: None,
permissions: None,
uses: None,
with: None,
secrets: None,
};
if let Some(variables) = &gitlab_job.variables {
job.env.extend(variables.clone());
}
if let Some(variables) = &pipeline.variables {
for (key, value) in variables {
job.env.entry(key.clone()).or_insert_with(|| value.clone());
}
}
if let Some(before_script) = &gitlab_job.before_script {
for (i, cmd) in before_script.iter().enumerate() {
let step = workflow::Step {
name: Some(format!("Before script {}", i + 1)),
uses: None,
run: Some(cmd.clone()),
with: None,
env: HashMap::new(),
continue_on_error: None,
};
job.steps.push(step);
}
}
if let Some(script) = &gitlab_job.script {
for (i, cmd) in script.iter().enumerate() {
let step = workflow::Step {
name: Some(format!("Run script line {}", i + 1)),
uses: None,
run: Some(cmd.clone()),
with: None,
env: HashMap::new(),
continue_on_error: None,
};
job.steps.push(step);
}
}
if let Some(after_script) = &gitlab_job.after_script {
for (i, cmd) in after_script.iter().enumerate() {
let step = workflow::Step {
name: Some(format!("After script {}", i + 1)),
uses: None,
run: Some(cmd.clone()),
with: None,
env: HashMap::new(),
continue_on_error: Some(true), };
job.steps.push(step);
}
}
if let Some(services) = &gitlab_job.services {
for (i, service) in services.iter().enumerate() {
let service_name = format!("service-{}", i);
let service_image = match service {
wrkflw_models::gitlab::Service::Simple(name) => name.clone(),
wrkflw_models::gitlab::Service::Detailed { name, .. } => name.clone(),
};
let service = workflow::Service {
image: service_image,
ports: None,
env: HashMap::new(),
volumes: None,
options: None,
};
job.services.insert(service_name, service);
}
}
workflow.jobs.insert(job_name.clone(), job);
}
workflow
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::NamedTempFile;
#[test]
fn test_parse_simple_pipeline() {
let file = NamedTempFile::new().unwrap();
let content = r#"
stages:
- build
- test
build_job:
stage: build
script:
- echo "Building..."
- make build
test_job:
stage: test
script:
- echo "Testing..."
- make test
"#;
fs::write(&file, content).unwrap();
let pipeline = parse_pipeline(file.path()).unwrap();
assert_eq!(pipeline.stages.as_ref().unwrap().len(), 2);
assert_eq!(pipeline.jobs.len(), 2);
let build_job = pipeline.jobs.get("build_job").unwrap();
assert_eq!(build_job.stage.as_ref().unwrap(), "build");
assert_eq!(build_job.script.as_ref().unwrap().len(), 2);
let test_job = pipeline.jobs.get("test_job").unwrap();
assert_eq!(test_job.stage.as_ref().unwrap(), "test");
assert_eq!(test_job.script.as_ref().unwrap().len(), 2);
}
}