from __future__ import annotations
import argparse
import json
import os
import subprocess
import sys
from pathlib import Path
from typing import Any, Dict, Sequence
ROOT = Path(__file__).resolve().parents[1]
def parse_args(argv: Sequence[str]) -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Write the Iridium integrated harness report artifacts"
)
parser.add_argument("--report-dir", default="artifacts")
parser.add_argument("--report-prefix", default="integrated_harness")
parser.add_argument(
"--binary",
default=str(ROOT / "target" / "debug" / "ir"),
help="Path to the Iridium CLI binary",
)
parser.add_argument(
"--python-bin",
default=os.environ.get("PYTHON_BIN", sys.executable),
help="Python interpreter to use for Python-backed harness stages",
)
return parser.parse_args(argv)
def run_command(
command: list[str], env: dict[str, str] | None = None
) -> subprocess.CompletedProcess[str]:
return subprocess.run(
command,
cwd=ROOT,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
check=True,
env=env,
)
def run_and_capture(
command: list[str], log_path: Path, env: dict[str, str] | None = None
) -> int:
completed = subprocess.run(
command,
cwd=ROOT,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
check=False,
env=env,
)
log_path.write_text(completed.stdout, encoding="utf-8")
return completed.returncode
def load_json(path: Path) -> Dict[str, Any]:
return json.loads(path.read_text(encoding="utf-8"))
def write_markdown(path: Path, payload: Dict[str, Any]) -> None:
lines = [
"# Integrated Harness Report",
"",
f"- schema: `{payload['schema']}`",
f"- overall_pass: `{str(payload['overall_pass']).lower()}`",
f"- runtime_contract: `{payload['runtime_contract']}`",
f"- topology_boundary: `{payload['topology_assumptions']['boundary']}`",
"",
"## Stage Results",
]
for stage in payload["stages"]:
lines.extend(
[
f"### {stage['id']}",
f"- description: {stage['description']}",
f"- required: `{str(stage['required']).lower()}`",
f"- status: {stage['status']}",
f"- artifact: `{stage['artifact']}`",
f"- log: `{stage['log']}`",
"",
]
)
lines.extend(
[
"## Topology Assumptions",
f"- query_profile: `{payload['topology_assumptions']['query_profile']}`",
f"- cache_path: `{payload['topology_assumptions']['cache_path']}`",
f"- control_plane_boundary: `{payload['topology_assumptions']['control_plane_boundary']}`",
f"- replay_handoff: `{payload['topology_assumptions']['replay_handoff']}`",
]
)
path.write_text("\n".join(lines) + "\n", encoding="utf-8")
def main(argv: Sequence[str]) -> int:
args = parse_args(argv)
report_dir = Path(args.report_dir)
report_dir.mkdir(parents=True, exist_ok=True)
command_env = os.environ.copy()
command_env["PYTHON_BIN"] = args.python_bin
compatibility_log = report_dir / f"{args.report_prefix}_service_compatibility.log"
durability_log = report_dir / f"{args.report_prefix}_durability.log"
recovery_log = report_dir / f"{args.report_prefix}_core_recovery.log"
mixed_log = report_dir / f"{args.report_prefix}_mixed_iqr.log"
stages = []
service_compatibility_status = run_and_capture(
[args.python_bin, str(ROOT / "scripts" / "service_compatibility_report.py"), "--report-dir", str(report_dir)],
compatibility_log,
env=command_env,
)
stages.append(
{
"id": "service-compatibility",
"description": "Service-backed retrieval, lifecycle, and compatibility package is green.",
"required": True,
"status": "pass" if service_compatibility_status == 0 else "fail",
"artifact": "service_compatibility_report.json",
"log": compatibility_log.name,
}
)
durability_status = run_and_capture(
[args.python_bin, str(ROOT / "scripts" / "durability_report.py"), "--report-dir", str(report_dir)],
durability_log,
env=command_env,
)
stages.append(
{
"id": "restart-recovery-durability",
"description": "Embedded restart, re-query, and capability rejection durability path is green.",
"required": True,
"status": "pass" if durability_status == 0 else "fail",
"artifact": "durability_verification_report.json",
"log": durability_log.name,
}
)
recovery_status = run_and_capture(
["bash", str(ROOT / "scripts" / "core_recovery_gate.sh")],
recovery_log,
env=command_env,
)
recovery_log_text = recovery_log.read_text(encoding="utf-8") if recovery_log.exists() else ""
recovery_status_label = "pass" if recovery_status == 0 else "fail"
if "failed to import iridium" in recovery_log_text:
recovery_status_label = "skipped"
stages.append(
{
"id": "core-recovery-gate",
"description": "Recovery latency and recovered-query path are within the product-owned gate when the optional Python native module is available.",
"required": False,
"status": recovery_status_label,
"artifact": "core_recovery_gate_report.json",
"log": recovery_log.name,
}
)
mixed_status = run_and_capture(
[
"cargo",
"test",
"--release",
"--test",
"storage_paths",
"integration_mixed_ingest_query_recovery_flow",
"--",
"--exact",
],
mixed_log,
)
stages.append(
{
"id": "mixed-ingest-query-recovery",
"description": "Integrated ingest/query/restart/recovery execution path stays green in the storage-path integration test.",
"required": True,
"status": "pass" if mixed_status == 0 else "fail",
"artifact": "integration_mixed_ingest_query_recovery_flow",
"log": mixed_log.name,
}
)
overall_pass = all(
stage["status"] == "pass" for stage in stages if stage["required"]
)
payload = {
"schema": "iridium.integrated-harness-report.v1",
"runtime_contract": "integrated-ingest-query-restart-recovery",
"overall_pass": overall_pass,
"stages": stages,
"artifact_set": {
"service_compatibility": "service_compatibility_report.json",
"service_install": "service_install_report.json",
"durability": "durability_verification_report.json",
"core_recovery_gate": "core_recovery_gate_report.json",
"mixed_integration_test": "integration_mixed_ingest_query_recovery_flow",
},
"topology_assumptions": {
"boundary": "single-node execution anchor with external cache/control-plane dependencies",
"query_profile": "plexus-aligned flagship query profile remains an upstream dependency input",
"cache_path": "compiled-plan and retrieval artifact cache path remains Rhodium-owned and is not emulated locally",
"control_plane_boundary": "bounded-cluster control-plane behavior remains Palladium-owned and out of the single-node harness path",
"replay_handoff": "full-stack fault and replay attachment remains Strontium-owned once the integrated harness path is consumed downstream",
},
}
json_path = report_dir / f"{args.report_prefix}_report.json"
md_path = report_dir / f"{args.report_prefix}_report.md"
json_path.write_text(json.dumps(payload, indent=2), encoding="utf-8")
write_markdown(md_path, payload)
print(f"wrote: {json_path}")
print(f"wrote: {md_path}")
return 0 if overall_pass else 1
if __name__ == "__main__":
raise SystemExit(main(sys.argv[1:]))