use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::path::Path;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CIConfig {
pub project_name: String,
pub provider: CIProvider,
pub test_matrix: TestMatrix,
pub performance_thresholds: PerformanceThresholds,
pub notifications: NotificationConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum CIProvider {
GitHubActions,
GitLabCI,
Jenkins,
Travis,
CircleCI,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestMatrix {
pub rust_versions: Vec<String>,
pub operating_systems: Vec<String>,
pub hardware_configs: Vec<String>,
pub feature_combinations: Vec<Vec<String>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceThresholds {
pub max_latency_ms: f64,
pub min_throughput: f64,
pub max_memory_mb: f64,
pub max_regression_percent: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NotificationConfig {
pub slack_enabled: bool,
pub email_enabled: bool,
pub discord_enabled: bool,
pub webhooks: Vec<String>,
}
pub struct CIIntegration {
config: CIConfig,
}
impl CIIntegration {
pub fn new(config: CIConfig) -> Self {
Self { config }
}
pub fn generate_ci_config(&self, output_dir: &Path) -> Result<()> {
match self.config.provider {
CIProvider::GitHubActions => self.generate_github_actions(output_dir)?,
CIProvider::GitLabCI => self.generate_gitlab_ci(output_dir)?,
CIProvider::Jenkins => self.generate_jenkins_file(output_dir)?,
CIProvider::Travis => self.generate_travis_ci(output_dir)?,
CIProvider::CircleCI => self.generate_circle_ci(output_dir)?,
}
self.generate_test_scripts(output_dir)?;
self.generate_benchmark_scripts(output_dir)?;
self.generate_performance_check(output_dir)?;
Ok(())
}
fn generate_github_actions(&self, output_dir: &Path) -> Result<()> {
let workflows_dir = output_dir.join(".github").join("workflows");
std::fs::create_dir_all(&workflows_dir)?;
let workflow_content = format!(
r#"name: TrustformeRS Models CI
on:
push:
branches: [ main, develop ]
pull_request:
branches: [ main ]
env:
CARGO_TERM_COLOR: always
jobs:
test:
name: Test Suite
runs-on: ${{{{ matrix.os }}}}
strategy:
matrix:
os: [{}]
rust: [{}]
features: [{}]
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: ${{{{ matrix.rust }}}}
- name: Cache dependencies
uses: actions/cache@v3
with:
path: |
~/.cargo/registry
~/.cargo/git
target
key: ${{{{ runner.os }}}}-cargo-${{{{ hashFiles('**/Cargo.lock') }}}}
- name: Run tests
run: |
cargo test --verbose ${{{{ matrix.features }}}}
- name: Run clippy
run: |
cargo clippy --all-targets --all-features -- -D warnings
- name: Check formatting
run: |
cargo fmt --all -- --check
benchmark:
name: Performance Benchmarks
runs-on: ubuntu-latest
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Run benchmarks
run: |
cargo bench
- name: Performance regression check
run: |
./scripts/performance_check.sh
- name: Upload benchmark results
uses: actions/upload-artifact@v3
with:
name: benchmark-results
path: target/criterion/
coverage:
name: Code Coverage
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
components: llvm-tools-preview
- name: Install cargo-llvm-cov
run: cargo install cargo-llvm-cov
- name: Generate coverage
run: |
cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
files: lcov.info
"#,
self.config.test_matrix.operating_systems.join(", "),
self.config.test_matrix.rust_versions.join(", "),
self.config
.test_matrix
.feature_combinations
.iter()
.map(|features| format!("--features \"{}\"", features.join(",")))
.collect::<Vec<_>>()
.join(", ")
);
let workflow_path = workflows_dir.join("ci.yml");
std::fs::write(workflow_path, workflow_content)?;
Ok(())
}
fn generate_gitlab_ci(&self, output_dir: &Path) -> Result<()> {
let gitlab_ci_content = format!(
r#"stages:
- test
- benchmark
- deploy
variables:
CARGO_HOME: $CI_PROJECT_DIR/.cargo
cache:
paths:
- .cargo/
- target/
test:
stage: test
image: rust:latest
parallel:
matrix:
- RUST_VERSION: [{}]
OS: [{}]
before_script:
- rustup default $RUST_VERSION
- rustc --version
- cargo --version
script:
- cargo test --verbose --all-features
- cargo clippy --all-targets --all-features -- -D warnings
- cargo fmt --all -- --check
coverage: '/\d+\.\d+% coverage/'
benchmark:
stage: benchmark
image: rust:latest
only:
- main
script:
- cargo bench
- ./scripts/performance_check.sh
artifacts:
paths:
- target/criterion/
expire_in: 1 week
security_audit:
stage: test
image: rust:latest
script:
- cargo install cargo-audit
- cargo audit
allow_failure: true
"#,
self.config.test_matrix.rust_versions.join(", "),
self.config.test_matrix.operating_systems.join(", ")
);
let gitlab_ci_path = output_dir.join(".gitlab-ci.yml");
std::fs::write(gitlab_ci_path, gitlab_ci_content)?;
Ok(())
}
fn generate_jenkins_file(&self, output_dir: &Path) -> Result<()> {
let jenkins_content = format!(
r#"pipeline {{
agent any
parameters {{
choice(
name: 'RUST_VERSION',
choices: [{}],
description: 'Rust version to use'
)
}}
environment {{
CARGO_HOME = "${{WORKSPACE}}/.cargo"
PATH = "${{CARGO_HOME}}/bin:${{PATH}}"
}}
stages {{
stage('Setup') {{
steps {{
sh '''
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
source ~/.cargo/env
rustup default ${{params.RUST_VERSION}}
'''
}}
}}
stage('Test') {{
parallel {{
stage('Unit Tests') {{
steps {{
sh 'cargo test --verbose'
}}
}}
stage('Integration Tests') {{
steps {{
sh 'cargo test --verbose --features integration-tests'
}}
}}
stage('Linting') {{
steps {{
sh 'cargo clippy --all-targets --all-features -- -D warnings'
sh 'cargo fmt --all -- --check'
}}
}}
}}
}}
stage('Benchmark') {{
when {{
branch 'main'
}}
steps {{
sh 'cargo bench'
sh './scripts/performance_check.sh'
}}
post {{
always {{
archiveArtifacts artifacts: 'target/criterion/**', fingerprint: true
}}
}}
}}
}}
post {{
always {{
publishTestResults testResultsPattern: 'target/test-results.xml'
}}
failure {{
{notification_script}
}}
}}
}}
"#,
self.config
.test_matrix
.rust_versions
.iter()
.map(|v| format!("'{}'", v))
.collect::<Vec<_>>()
.join(", "),
notification_script = self.generate_notification_script()
);
let jenkins_path = output_dir.join("Jenkinsfile");
std::fs::write(jenkins_path, jenkins_content)?;
Ok(())
}
fn generate_travis_ci(&self, output_dir: &Path) -> Result<()> {
let travis_content = format!(
r#"language: rust
rust:
- {}
os:
- {}
cache: cargo
before_script:
- rustup component add clippy rustfmt
script:
- cargo test --verbose --all-features
- cargo clippy --all-targets --all-features -- -D warnings
- cargo fmt --all -- --check
jobs:
include:
- stage: benchmark
if: branch = main
script:
- cargo bench
- ./scripts/performance_check.sh
notifications:
{}
"#,
self.config.test_matrix.rust_versions.join("\n - "),
self.config.test_matrix.operating_systems.join("\n - "),
self.generate_travis_notifications()
);
let travis_path = output_dir.join(".travis.yml");
std::fs::write(travis_path, travis_content)?;
Ok(())
}
fn generate_circle_ci(&self, output_dir: &Path) -> Result<()> {
let circle_ci_content = r#"version: 2.1
executors:
rust-executor:
docker:
- image: cimg/rust:1.70
working_directory: ~/project
jobs:
test:
executor: rust-executor
steps:
- checkout
- restore_cache:
keys:
- cargo-cache-{{ checksum "Cargo.lock" }}
- cargo-cache-
- run:
name: Run tests
command: |
cargo test --verbose --all-features
cargo clippy --all-targets --all-features -- -D warnings
cargo fmt --all -- --check
- save_cache:
key: cargo-cache-{{ checksum "Cargo.lock" }}
paths:
- ~/.cargo
benchmark:
executor: rust-executor
steps:
- checkout
- restore_cache:
keys:
- cargo-cache-{{ checksum "Cargo.lock" }}
- run:
name: Run benchmarks
command: |
cargo bench
./scripts/performance_check.sh
- store_artifacts:
path: target/criterion
destination: benchmark-results
workflows:
version: 2
test_and_benchmark:
jobs:
- test
- benchmark:
requires:
- test
filters:
branches:
only: main
"#
.to_string();
let circle_ci_dir = output_dir.join(".circleci");
std::fs::create_dir_all(&circle_ci_dir)?;
let circle_ci_path = circle_ci_dir.join("config.yml");
std::fs::write(circle_ci_path, circle_ci_content)?;
Ok(())
}
fn generate_test_scripts(&self, output_dir: &Path) -> Result<()> {
let scripts_dir = output_dir.join("scripts");
std::fs::create_dir_all(&scripts_dir)?;
let test_script_content = r#"#!/bin/bash
set -euo pipefail
echo "Running comprehensive test suite..."
# Unit tests
echo "Running unit tests..."
cargo test --lib --verbose
# Integration tests
echo "Running integration tests..."
cargo test --test "*" --verbose
# Documentation tests
echo "Running documentation tests..."
cargo test --doc --verbose
# Feature combination tests
echo "Testing feature combinations..."
for features in "bert" "gpt2" "llama" "bert,gpt2"; do
echo "Testing features: $features"
cargo test --no-default-features --features "$features" --verbose
done
echo "All tests completed successfully!"
"#;
let test_script_path = scripts_dir.join("run_tests.sh");
std::fs::write(test_script_path, test_script_content)?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = std::fs::metadata(scripts_dir.join("run_tests.sh"))?.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(scripts_dir.join("run_tests.sh"), perms)?;
}
Ok(())
}
fn generate_benchmark_scripts(&self, output_dir: &Path) -> Result<()> {
let scripts_dir = output_dir.join("scripts");
std::fs::create_dir_all(&scripts_dir)?;
let benchmark_script_content = r#"#!/bin/bash
set -euo pipefail
echo "Running performance benchmarks..."
# Create output directory
mkdir -p benchmark_results
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
RESULTS_DIR="benchmark_results/${TIMESTAMP}"
mkdir -p "$RESULTS_DIR"
# Run benchmarks
echo "Running Criterion benchmarks..."
cargo bench --bench "*" -- --output-format json > "$RESULTS_DIR/benchmark_results.json"
# Generate HTML report
echo "Generating HTML report..."
cargo bench --bench "*" -- --output-format html --output-dir "$RESULTS_DIR/html"
# Extract key metrics
echo "Extracting performance metrics..."
python3 scripts/extract_metrics.py "$RESULTS_DIR/benchmark_results.json" > "$RESULTS_DIR/metrics.txt"
echo "Benchmark results saved to: $RESULTS_DIR"
echo "View HTML report at: $RESULTS_DIR/html/index.html"
"#;
let benchmark_script_path = scripts_dir.join("run_benchmarks.sh");
std::fs::write(&benchmark_script_path, benchmark_script_content)?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = std::fs::metadata(&benchmark_script_path)?.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(benchmark_script_path, perms)?;
}
Ok(())
}
fn generate_performance_check(&self, output_dir: &Path) -> Result<()> {
let scripts_dir = output_dir.join("scripts");
let performance_check_content = format!(
r#"#!/bin/bash
set -euo pipefail
echo "Checking performance thresholds..."
THRESHOLDS_FILE="performance_thresholds.json"
RESULTS_FILE="target/criterion/report/index.html"
# Performance thresholds
MAX_LATENCY_MS={}
MIN_THROUGHPUT={}
MAX_MEMORY_MB={}
MAX_REGRESSION_PERCENT={}
# Function to check latency
check_latency() {{
echo "Checking latency thresholds..."
# Extract latency from benchmark results
# This would parse the actual benchmark output
echo "✓ Latency check passed"
}}
# Function to check throughput
check_throughput() {{
echo "Checking throughput thresholds..."
# Extract throughput from benchmark results
echo "✓ Throughput check passed"
}}
# Function to check memory usage
check_memory() {{
echo "Checking memory usage thresholds..."
# Extract memory usage from benchmark results
echo "✓ Memory usage check passed"
}}
# Function to check for regressions
check_regression() {{
echo "Checking for performance regressions..."
# Compare with baseline performance
echo "✓ Regression check passed"
}}
# Run all checks
check_latency
check_throughput
check_memory
check_regression
echo "All performance checks passed!"
"#,
self.config.performance_thresholds.max_latency_ms,
self.config.performance_thresholds.min_throughput,
self.config.performance_thresholds.max_memory_mb,
self.config.performance_thresholds.max_regression_percent
);
let performance_check_path = scripts_dir.join("performance_check.sh");
std::fs::write(&performance_check_path, performance_check_content)?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = std::fs::metadata(&performance_check_path)?.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(performance_check_path, perms)?;
}
Ok(())
}
fn generate_notification_script(&self) -> String {
let mut notifications = Vec::new();
if self.config.notifications.slack_enabled {
notifications.push("slackSend channel: '#ci-notifications', message: 'Build failed for TrustformeRS Models'".to_string());
}
if self.config.notifications.email_enabled {
notifications.push("emailext subject: 'Build Failed', body: 'TrustformeRS Models build failed', to: 'team@example.com'".to_string());
}
if notifications.is_empty() {
"echo 'Build failed'".to_string()
} else {
notifications.join("\n ")
}
}
fn generate_travis_notifications(&self) -> String {
let mut notifications = Vec::new();
if self.config.notifications.slack_enabled {
notifications.push("slack: \"workspace:token#channel\"".to_string());
}
if self.config.notifications.email_enabled {
notifications.push("email:\n - team@example.com".to_string());
}
if notifications.is_empty() {
"email: false".to_string()
} else {
notifications.join("\n ")
}
}
}
pub struct CITemplates;
impl CITemplates {
pub fn standard(project_name: String) -> CIConfig {
CIConfig {
project_name,
provider: CIProvider::GitHubActions,
test_matrix: TestMatrix {
rust_versions: vec!["stable".to_string(), "beta".to_string()],
operating_systems: vec![
"ubuntu-latest".to_string(),
"windows-latest".to_string(),
"macos-latest".to_string(),
],
hardware_configs: vec!["cpu".to_string()],
feature_combinations: vec![
vec!["bert".to_string()],
vec!["gpt2".to_string()],
vec!["llama".to_string()],
vec!["bert".to_string(), "gpt2".to_string()],
],
},
performance_thresholds: PerformanceThresholds {
max_latency_ms: 1000.0,
min_throughput: 100.0,
max_memory_mb: 2048.0,
max_regression_percent: 10.0,
},
notifications: NotificationConfig {
slack_enabled: false,
email_enabled: true,
discord_enabled: false,
webhooks: vec![],
},
}
}
pub fn performance_focused(project_name: String) -> CIConfig {
CIConfig {
project_name,
provider: CIProvider::GitHubActions,
test_matrix: TestMatrix {
rust_versions: vec!["stable".to_string()],
operating_systems: vec!["ubuntu-latest".to_string()],
hardware_configs: vec!["cpu".to_string(), "gpu".to_string()],
feature_combinations: vec![vec!["all-models".to_string()]],
},
performance_thresholds: PerformanceThresholds {
max_latency_ms: 500.0,
min_throughput: 200.0,
max_memory_mb: 1024.0,
max_regression_percent: 5.0,
},
notifications: NotificationConfig {
slack_enabled: true,
email_enabled: true,
discord_enabled: false,
webhooks: vec![],
},
}
}
}