#![cfg(feature = "js")]
use pyrograph::analyze;
fn must_be_clean(js: &str, name: &str) {
let graph = pyrograph::parse::parse_js(js, &format!("{name}.js")).unwrap();
let findings = analyze(&graph).unwrap();
assert!(findings.is_empty(), "{name}: infra code must not trigger, but found: {findings:?}");
}
#[test]
fn test_fp_infra_001_cluster_basic() {
must_be_clean(r#"
const cluster = require('cluster');
const http = require('http');
const numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
for (let i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log("worker " + worker.process.pid + " died");
});
} else {
http.createServer((req, res) => {
res.writeHead(200);
res.end('hello world\n');
}).listen(8000);
}
"#, "fp_infra_001_cluster_basic");
}
#[test]
fn test_fp_infra_002_pm2_ecosystem() {
must_be_clean(r#"
module.exports = {
apps : [{
name: 'api-server',
script: 'app.js',
instances: 'max',
exec_mode: 'cluster',
env: {
NODE_ENV: 'development'
},
env_production: {
NODE_ENV: 'production'
}
}]
};
"#, "fp_infra_002_pm2_ecosystem");
}
#[test]
fn test_fp_infra_003_pm2_programmatic() {
must_be_clean(r#"
const pm2 = require('pm2');
pm2.connect(function(err) {
if (err) {
console.error(err);
process.exit(2);
}
pm2.start({
script : 'api.js',
name : 'api'
}, function(err, apps) {
pm2.disconnect();
if (err) throw err;
});
});
"#, "fp_infra_003_pm2_programmatic");
}
#[test]
fn test_fp_infra_004_cluster_messages() {
must_be_clean(r#"
const cluster = require('cluster');
if (cluster.isMaster) {
const worker = cluster.fork();
worker.on('message', function(msg) {
console.log('Master ' + msg);
});
worker.send('Hello from Master');
} else {
process.on('message', function(msg) {
console.log('Worker ' + msg);
process.send('Hello from Worker');
});
}
"#, "fp_infra_004_cluster_messages");
}
#[test]
fn test_fp_infra_005_child_process_fork() {
must_be_clean(r#"
const { fork } = require('child_process');
const child = fork('worker.js');
child.on('message', (msg) => {
console.log('Message from child', msg);
});
child.send({ hello: 'world' });
"#, "fp_infra_005_child_process_fork");
}
#[test]
fn test_fp_infra_006_worker_threads() {
must_be_clean(r#"
const { Worker, isMainThread, parentPort } = require('worker_threads');
if (isMainThread) {
const worker = new Worker(__filename);
worker.once('message', (message) => console.log(message));
worker.postMessage('Hello, world!');
} else {
parentPort.once('message', (message) => {
parentPort.postMessage(message);
});
}
"#, "fp_infra_006_worker_threads");
}
#[test]
fn test_fp_infra_007_cluster_disconnect() {
must_be_clean(r#"
const cluster = require('cluster');
if (cluster.isMaster) {
const worker = cluster.fork();
let timeout;
worker.on('listening', (address) => {
worker.send('shutdown');
worker.disconnect();
timeout = setTimeout(() => {
worker.kill();
}, 2000);
});
worker.on('disconnect', () => {
clearTimeout(timeout);
});
}
"#, "fp_infra_007_cluster_disconnect");
}
#[test]
fn test_fp_infra_008_pm2_custom_metrics() {
must_be_clean(r#"
const probe = require('pm2').io();
const meter = probe.meter({
name : 'req/min',
samples : 1,
timeframe : 60
});
const http = require('http');
http.createServer(function (req, res) {
meter.mark();
res.end('ok');
}).listen(8080);
"#, "fp_infra_008_pm2_custom_metrics");
}
#[test]
fn test_fp_infra_009_cluster_shared_state() {
must_be_clean(r#"
const cluster = require('cluster');
const numCPUs = require('os').cpus().length;
let requests = 0;
if (cluster.isMaster) {
for (let i = 0; i < numCPUs; i++) {
cluster.fork();
}
for (const id in cluster.workers) {
cluster.workers[id].on('message', message => {
if (message.cmd && message.cmd === 'notifyRequest') {
requests += 1;
}
});
}
} else {
process.send({ cmd: 'notifyRequest' });
}
"#, "fp_infra_009_cluster_shared_state");
}
#[test]
fn test_fp_infra_010_fork_env_vars() {
must_be_clean(r#"
const { fork } = require('child_process');
const env = Object.assign({}, process.env, { CUSTOM_VAR: '123' });
const child = fork('script.js', [], { env: env });
child.on('exit', code => {
console.log("Child exited with code " + code);
});
"#, "fp_infra_010_fork_env_vars");
}
#[test]
fn test_fp_infra_011_docker_health_http() {
must_be_clean(r#"
const http = require('http');
const options = {
host: 'localhost',
port: 8080,
path: '/health',
timeout: 2000
};
const request = http.request(options, (res) => {
if (res.statusCode == 200) {
process.exit(0);
} else {
process.exit(1);
}
});
request.on('error', (err) => {
process.exit(1);
});
request.end();
"#, "fp_infra_011_docker_health_http");
}
#[test]
fn test_fp_infra_012_k8s_readiness_probe() {
must_be_clean(r#"
const fs = require('fs');
try {
if (fs.existsSync('/tmp/ready')) {
process.exit(0);
} else {
process.exit(1);
}
} catch (err) {
console.error(err);
process.exit(1);
}
"#, "fp_infra_012_k8s_readiness_probe");
}
#[test]
fn test_fp_infra_013_docker_compose_wait() {
must_be_clean(r#"
const net = require('net');
const client = new net.Socket();
client.connect(5432, 'db', function() {
console.log('Connected to DB');
client.destroy();
process.exit(0);
});
client.on('error', function() {
console.log('DB not ready');
process.exit(1);
});
"#, "fp_infra_013_docker_compose_wait");
}
#[test]
fn test_fp_infra_014_express_health_endpoint() {
must_be_clean(r#"
const express = require('express');
const app = express();
app.get('/health', (req, res) => {
res.status(200).json({ status: 'UP', timestamp: new Date() });
});
app.get('/ready', (req, res) => {
if (global.isReady) {
res.status(200).send('Ready');
} else {
res.status(503).send('Not Ready');
}
});
app.listen(3000);
"#, "fp_infra_014_express_health_endpoint");
}
#[test]
fn test_fp_infra_015_grpc_health_check() {
must_be_clean(r#"
const grpc = require('@grpc/grpc-js');
const health = require('grpc-health-check');
const statusMap = {
'': health.servingStatus.SERVING,
'myService': health.servingStatus.NOT_SERVING,
};
const healthImpl = new health.Implementation(statusMap);
const server = new grpc.Server();
server.addService(health.service, healthImpl);
server.bindAsync('0.0.0.0:50051', grpc.ServerCredentials.createInsecure(), () => {
server.start();
});
"#, "fp_infra_015_grpc_health_check");
}
#[test]
fn test_fp_infra_016_k8s_lifecycle_hook() {
must_be_clean(r#"
const { execSync } = require('child_process');
console.log('Pre-stop hook running');
try {
execSync('nginx -s quit');
console.log('Graceful shutdown initiated');
} catch (e) {
console.error('Failed to shutdown', e);
process.exit(1);
}
"#, "fp_infra_016_k8s_lifecycle_hook");
}
#[test]
fn test_fp_infra_017_custom_health_script() {
must_be_clean(r#"
const axios = require('axios');
async function checkHealth() {
try {
const res = await axios.get('http://localhost:8080/api/status');
if (res.data.status === 'ok') {
process.exit(0);
}
process.exit(1);
} catch (err) {
process.exit(1);
}
}
checkHealth();
"#, "fp_infra_017_custom_health_script");
}
#[test]
fn test_fp_infra_018_k8s_liveness_tcp() {
must_be_clean(r#"
const net = require('net');
const server = net.createServer((c) => {
c.end('ok\\n');
});
server.on('error', (err) => {
throw err;
});
server.listen(8181, () => {
console.log('TCP probe listening on 8181');
});
"#, "fp_infra_018_k8s_liveness_tcp");
}
#[test]
fn test_fp_infra_019_docker_container_status() {
must_be_clean(r#"
const Docker = require('dockerode');
const docker = new Docker({socketPath: '/var/run/docker.sock'});
docker.getContainer('my_container').inspect(function (err, data) {
if (err) {
process.exit(1);
}
if (data.State.Running) {
process.exit(0);
} else {
process.exit(1);
}
});
"#, "fp_infra_019_docker_container_status");
}
#[test]
fn test_fp_infra_020_db_connection_check() {
must_be_clean(r#"
const { Client } = require('pg');
const client = new Client({
user: 'dbuser',
host: 'database.server.com',
database: 'mydb',
password: 'secretpassword',
port: 5432,
});
client.connect()
.then(() => {
console.log('Connected');
return client.end();
})
.then(() => process.exit(0))
.catch(err => {
console.error('Connection error', err.stack);
process.exit(1);
});
"#, "fp_infra_020_db_connection_check");
}
#[test]
fn test_fp_infra_021_cdktf_s3_bucket() {
must_be_clean(r#"
const { Construct } = require("constructs");
const { App, TerraformStack } = require("cdktf");
const { AwsProvider } = require("@cdktf/provider-aws/lib/provider");
const { S3Bucket } = require("@cdktf/provider-aws/lib/s3-bucket");
class MyStack extends TerraformStack {
constructor(scope, id) {
super(scope, id);
new AwsProvider(this, "AWS", { region: "us-west-1" });
new S3Bucket(this, "MyBucket", {
bucket: "my-unique-bucket-name"
});
}
}
const app = new App();
new MyStack(app, "cdktf-app");
app.synth();
"#, "fp_infra_021_cdktf_s3_bucket");
}
#[test]
fn test_fp_infra_022_pulumi_aws_lambda() {
must_be_clean(r#"
const pulumi = require("@pulumi/pulumi");
const aws = require("@pulumi/aws");
const role = new aws.iam.Role("my-role", {
assumeRolePolicy: aws.iam.assumeRolePolicyForPrincipal({ Service: "lambda.amazonaws.com" }),
});
const lambda = new aws.lambda.Function("mylambda", {
runtime: "nodejs14.x",
role: role.arn,
handler: "index.handler",
code: new pulumi.asset.AssetArchive({
"index.js": new pulumi.asset.StringAsset("exports.handler = (e, c, cb) => cb(null, 'ok');"),
}),
});
exports.lambdaArn = lambda.arn;
"#, "fp_infra_022_pulumi_aws_lambda");
}
#[test]
fn test_fp_infra_023_cdktf_ec2_instance() {
must_be_clean(r#"
const { Construct } = require("constructs");
const { App, TerraformStack } = require("cdktf");
const { Instance } = require("@cdktf/provider-aws/lib/instance");
class Ec2Stack extends TerraformStack {
constructor(scope, id) {
super(scope, id);
new Instance(this, "compute", {
ami: "ami-01456a06140082728",
instanceType: "t2.micro"
});
}
}
"#, "fp_infra_023_cdktf_ec2_instance");
}
#[test]
fn test_fp_infra_024_pulumi_k8s_deployment() {
must_be_clean(r#"
const k8s = require("@pulumi/kubernetes");
const appLabels = { app: "nginx" };
const deployment = new k8s.apps.v1.Deployment("nginx", {
spec: {
selector: { matchLabels: appLabels },
replicas: 1,
template: {
metadata: { labels: appLabels },
spec: { containers: [{ name: "nginx", image: "nginx" }] }
}
}
});
"#, "fp_infra_024_pulumi_k8s_deployment");
}
#[test]
fn test_fp_infra_025_cdktf_vpc() {
must_be_clean(r#"
const { Vpc } = require("@cdktf/provider-aws/lib/vpc");
const { Subnet } = require("@cdktf/provider-aws/lib/subnet");
// Assume inside a stack
const vpc = new Vpc(this, "my-vpc", {
cidrBlock: "10.0.0.0/16"
});
new Subnet(this, "my-subnet", {
vpcId: vpc.id,
cidrBlock: "10.0.1.0/24"
});
"#, "fp_infra_025_cdktf_vpc");
}
#[test]
fn test_fp_infra_026_pulumi_azure_blob() {
must_be_clean(r#"
const azure = require("@pulumi/azure-native");
const resourceGroup = new azure.resources.ResourceGroup("resourceGroup");
const storageAccount = new azure.storage.StorageAccount("storageaccount", {
resourceGroupName: resourceGroup.name,
sku: { name: azure.storage.SkuName.Standard_LRS },
kind: azure.storage.Kind.StorageV2,
});
const container = new azure.storage.BlobContainer("container", {
resourceGroupName: resourceGroup.name,
accountName: storageAccount.name,
});
"#, "fp_infra_026_pulumi_azure_blob");
}
#[test]
fn test_fp_infra_027_cdktf_gcp_compute() {
must_be_clean(r#"
const { ComputeInstance } = require("@cdktf/provider-google/lib/compute-instance");
new ComputeInstance(this, "vm", {
name: "my-vm",
machineType: "f1-micro",
zone: "us-central1-a",
bootDisk: {
initializeParams: { image: "debian-cloud/debian-9" }
},
networkInterface: [{ network: "default" }]
});
"#, "fp_infra_027_cdktf_gcp_compute");
}
#[test]
fn test_fp_infra_028_pulumi_config() {
must_be_clean(r#"
const pulumi = require("@pulumi/pulumi");
const config = new pulumi.Config();
const secretValue = config.requireSecret("mySecret");
secretValue.apply(val => {
console.log("Got secret of length:", val.length);
});
"#, "fp_infra_028_pulumi_config");
}
#[test]
fn test_fp_infra_029_cdktf_iam() {
must_be_clean(r#"
const { IamRole } = require("@cdktf/provider-aws/lib/iam-role");
new IamRole(this, "role", {
name: "my-role",
assumeRolePolicy: JSON.stringify({
Version: "2012-10-17",
Statement: [{
Action: "sts:AssumeRole",
Principal: { Service: "ec2.amazonaws.com" },
Effect: "Allow"
}]
})
});
"#, "fp_infra_029_cdktf_iam");
}
#[test]
fn test_fp_infra_030_pulumi_dynamic() {
must_be_clean(r#"
const pulumi = require("@pulumi/pulumi");
class MyProvider {
async create(inputs) {
return { id: "123", outs: inputs };
}
}
class MyResource extends pulumi.dynamic.Resource {
constructor(name, props, opts) {
super(new MyProvider(), name, props, opts);
}
}
new MyResource("res", { foo: "bar" });
"#, "fp_infra_030_pulumi_dynamic");
}
#[test]
fn test_fp_infra_031_actions_core_output() {
must_be_clean(r#"
const core = require('@actions/core');
try {
const nameToGreet = core.getInput('who-to-greet');
console.log("Hello " + nameToGreet + "!");
const time = (new Date()).toTimeString();
core.setOutput("time", time);
} catch (error) {
core.setFailed(error.message);
}
"#, "fp_infra_031_actions_core_output");
}
#[test]
fn test_fp_infra_032_actions_exec() {
must_be_clean(r#"
const exec = require('@actions/exec');
async function run() {
let myOutput = '';
let myError = '';
const options = {};
options.listeners = {
stdout: (data) => { myOutput += data.toString(); },
stderr: (data) => { myError += data.toString(); }
};
await exec.exec('node', ['-v'], options);
console.log(myOutput);
}
run();
"#, "fp_infra_032_actions_exec");
}
#[test]
fn test_fp_infra_033_actions_octokit() {
must_be_clean(r#"
const github = require('@actions/github');
async function run() {
const token = 'mytoken';
const octokit = github.getOctokit(token);
const context = github.context;
await octokit.rest.issues.createComment({
...context.repo,
issue_number: context.issue.number,
body: 'Hello from action!'
});
}
run();
"#, "fp_infra_033_actions_octokit");
}
#[test]
fn test_fp_infra_034_actions_io() {
must_be_clean(r#"
const io = require('@actions/io');
const path = require('path');
async function run() {
await io.mkdirP('path/to/my/dir');
await io.cp('source.txt', 'path/to/my/dir/');
await io.rmRF('path/to/my/dir');
}
run();
"#, "fp_infra_034_actions_io");
}
#[test]
fn test_fp_infra_035_actions_cache() {
must_be_clean(r#"
const cache = require('@actions/cache');
async function run() {
const paths = ['node_modules'];
const key = 'my-cache-key-v1';
const restoreKeys = ['my-cache-key-'];
const cacheKey = await cache.restoreCache(paths, key, restoreKeys);
if (!cacheKey) {
console.log('Cache not found');
}
await cache.saveCache(paths, key);
}
run();
"#, "fp_infra_035_actions_cache");
}
#[test]
fn test_fp_infra_036_actions_artifact() {
must_be_clean(r#"
const artifact = require('@actions/artifact');
async function run() {
const artifactClient = artifact.create();
const artifactName = 'my-artifact';
const files = ['file1.txt', 'file2.txt'];
const rootDirectory = '.';
const options = { continueOnError: false };
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options);
}
run();
"#, "fp_infra_036_actions_artifact");
}
#[test]
fn test_fp_infra_037_actions_env_context() {
must_be_clean(r#"
const github = require('@actions/github');
const prNumber = github.context.payload.pull_request ? github.context.payload.pull_request.number : undefined;
if (prNumber) {
console.log("Running on PR #" + prNumber);
} else {
console.log('Not a PR');
}
"#, "fp_infra_037_actions_env_context");
}
#[test]
fn test_fp_infra_038_actions_create_issue() {
must_be_clean(r#"
const github = require('@actions/github');
const core = require('@actions/core');
async function run() {
const octokit = github.getOctokit(core.getInput('token'));
await octokit.rest.issues.create({
owner: github.context.repo.owner,
repo: github.context.repo.repo,
title: 'New Issue',
body: 'Created by action'
});
}
run();
"#, "fp_infra_038_actions_create_issue");
}
#[test]
fn test_fp_infra_039_actions_tool_cache() {
must_be_clean(r#"
const tc = require('@actions/tool-cache');
async function run() {
const nodePath = await tc.downloadTool('https://nodejs.org/dist/v14.17.0/node-v14.17.0-linux-x64.tar.gz');
const extPath = await tc.extractTar(nodePath);
const cachedPath = await tc.cacheDir(extPath, 'node', '14.17.0');
console.log("Cached to " + cachedPath);
}
run();
"#, "fp_infra_039_actions_tool_cache");
}
#[test]
fn test_fp_infra_040_actions_glob() {
must_be_clean(r#"
const glob = require('@actions/glob');
async function run() {
const patterns = ['**/*.js', '!node_modules/**'];
const globber = await glob.create(patterns.join('\\n'));
const files = await globber.glob();
console.log(files);
}
run();
"#, "fp_infra_040_actions_glob");
}
#[test]
fn test_fp_infra_041_jenkins_api() {
must_be_clean(r#"
const jenkins = require('jenkins')({ baseUrl: 'http://user:pass@localhost:8080', crumbIssuer: true });
jenkins.job.build('my-job', function(err, data) {
if (err) throw err;
console.log('queue item number', data);
});
"#, "fp_infra_041_jenkins_api");
}
#[test]
fn test_fp_infra_042_circleci_dynamic_config() {
must_be_clean(r#"
const fs = require('fs');
const config = {
version: 2.1,
jobs: {
build: {
docker: [{ image: 'cimg/node:14.17.0' }],
steps: ['checkout', 'echo test']
}
}
};
fs.writeFileSync('.circleci/continue_config.yml', JSON.stringify(config));
"#, "fp_infra_042_circleci_dynamic_config");
}
#[test]
fn test_fp_infra_043_gitlab_ci_script() {
must_be_clean(r#"
const { execSync } = require('child_process');
console.log("Running build");
if (1) {
execSync('npm run test', { stdio: 'inherit' });
}
"#, "fp_infra_043_gitlab_ci_script");
}
#[test]
fn test_fp_infra_044_travis_deploy() {
must_be_clean(r#"
const { spawn } = require('child_process');
if (true) {
const deploy = spawn('npm', ['run', 'deploy']);
deploy.stdout.on('data', data => console.log(data.toString()));
}
"#, "fp_infra_044_travis_deploy");
}
#[test]
fn test_fp_infra_045_buildkite_pipeline() {
must_be_clean(r#"
const pipeline = {
steps: [
{ label: "Build", command: "npm run build" },
{ label: "Test", command: "npm test" }
]
};
console.log(JSON.stringify(pipeline, null, 2));
"#, "fp_infra_045_buildkite_pipeline");
}
#[test]
fn test_fp_infra_046_jenkins_shared_lib() {
must_be_clean(r#"
function triggerDownstream(jobName) {
const jenkins = require('jenkins')({ baseUrl: 'http://jenkins' });
return new Promise((resolve, reject) => {
jenkins.job.build(jobName, (err, n) => {
if (err) return reject(err);
resolve(n);
});
});
}
"#, "fp_infra_046_jenkins_shared_lib");
}
#[test]
fn test_fp_infra_047_circleci_api() {
must_be_clean(r#"
const axios = require('axios');
async function triggerPipeline() {
await axios.post('https://circleci.com/api/v2/project/github/org/repo/pipeline', {
branch: 'main'
}, {
headers: { 'Circle-Token': 'mytoken' }
});
}
triggerPipeline();
"#, "fp_infra_047_circleci_api");
}
#[test]
fn test_fp_infra_048_gitlab_artifacts() {
must_be_clean(r#"
const fs = require('fs');
const path = require('path');
const reportPath = path.join('.', 'gl-sast-report.json');
const report = { version: "14.0.0", vulnerabilities: [] };
fs.writeFileSync(reportPath, JSON.stringify(report));
"#, "fp_infra_048_gitlab_artifacts");
}
#[test]
fn test_fp_infra_049_drone_plugin() {
must_be_clean(r#"
const { execSync } = require('child_process');
const webhookUrl = 'http://webhook';
const message = 'Build finished';
if (webhookUrl) {
execSync('curl -X POST -d "' + message + '" ' + webhookUrl);
}
"#, "fp_infra_049_drone_plugin");
}
#[test]
fn test_fp_infra_050_azure_pipelines() {
must_be_clean(r#"
const tl = require('azure-pipelines-task-lib/task');
async function run() {
try {
const inputString = tl.getInput('samplestring', true);
if (inputString == 'bad') {
tl.setResult(tl.TaskResult.Failed, 'Bad input was given');
return;
}
console.log('Hello', inputString);
}
catch (err) {
tl.setResult(tl.TaskResult.Failed, err.message);
}
}
run();
"#, "fp_infra_050_azure_pipelines");
}