use std::collections::{HashMap, HashSet};
use crate::ast::TopLevel;
use crate::codegen;
use crate::diagnostics::{AnalyzeOptions, analyze_source};
use crate::ir::{PipelineConfig, TypecheckMode};
use crate::source::{LoadedModule, load_module_tree_from_map, parse_source};
pub fn compile_to_wasm(source: &str) -> Result<Vec<u8>, String> {
let mut items = parse_source(source)?;
let pipeline_result = crate::ir::pipeline::run(
&mut items,
PipelineConfig {
typecheck: Some(TypecheckMode::Full { base_dir: None }),
run_interp_lower: false,
run_buffer_build: false,
..Default::default()
},
);
let tc_result = pipeline_result.typecheck.expect("typecheck was requested");
if !tc_result.errors.is_empty() {
return Err(format_tc_errors(&tc_result.errors));
}
codegen::wasm_gc::compile_to_wasm_gc(&items, pipeline_result.analysis.as_ref())
.map_err(|e| format!("{e}"))
}
pub fn compile_project_to_wasm(
files: &HashMap<String, String>,
entry: &str,
) -> Result<Vec<u8>, String> {
let entry_source = files
.get(entry)
.ok_or_else(|| format!("Entry '{}' not present in file map", entry))?;
let mut entry_items = parse_source(entry_source)?;
let root_depends = module_depends(&entry_items);
let loaded = load_module_tree_from_map(&root_depends, files)?;
let pipeline_result = crate::ir::pipeline::run(
&mut entry_items,
PipelineConfig {
typecheck: Some(TypecheckMode::WithLoaded(&loaded)),
run_interp_lower: false,
run_buffer_build: false,
..Default::default()
},
);
let tc_result = pipeline_result.typecheck.expect("typecheck was requested");
if !tc_result.errors.is_empty() {
return Err(format_tc_errors(&tc_result.errors));
}
let modules: Vec<codegen::ModuleInfo> = loaded
.into_iter()
.map(|m| loaded_to_module_info(m, false))
.collect();
codegen::wasm_gc::flatten_multimodule(&mut entry_items, &modules);
crate::ir::pipeline::resolve(&mut entry_items);
codegen::wasm_gc::compile_to_wasm_gc(&entry_items, pipeline_result.analysis.as_ref())
.map_err(|e| format!("{e}"))
}
pub fn compile_project_to_wasm_with_entry(
files: &HashMap<String, String>,
entry: &str,
expr: &str,
) -> Result<(Vec<u8>, String), String> {
let entry_source = files
.get(entry)
.ok_or_else(|| format!("Entry '{}' not present in file map", entry))?;
let mut entry_items = parse_source(entry_source)?;
let root_depends = module_depends(&entry_items);
let loaded = load_module_tree_from_map(&root_depends, files)?;
let (target_fn, args) =
crate::replay::parse_entry_call(expr).map_err(|e| format!("--expr parse: {}", e))?;
let (return_type, _effects) = lookup_fn_signature(&entry_items, &loaded, &target_fn)
.ok_or_else(|| format!("entry fn `{}` not found in project", target_fn))?;
let synth = build_synth_entry_fn(&target_fn, &args, &return_type)?;
entry_items.push(synth);
let pipeline_result = crate::ir::pipeline::run(
&mut entry_items,
PipelineConfig {
typecheck: Some(TypecheckMode::WithLoaded(&loaded)),
run_interp_lower: false,
run_buffer_build: false,
..Default::default()
},
);
let tc_result = pipeline_result.typecheck.expect("typecheck was requested");
if !tc_result.errors.is_empty() {
return Err(format_tc_errors(&tc_result.errors));
}
let modules: Vec<codegen::ModuleInfo> = loaded
.into_iter()
.map(|m| loaded_to_module_info(m, false))
.collect();
codegen::wasm_gc::flatten_multimodule(&mut entry_items, &modules);
crate::ir::pipeline::resolve(&mut entry_items);
let bytes =
codegen::wasm_gc::compile_to_wasm_gc(&entry_items, pipeline_result.analysis.as_ref())
.map_err(|e| format!("{e}"))?;
Ok((bytes, target_fn))
}
fn lookup_fn_signature(
entry_items: &[crate::ast::TopLevel],
loaded: &[LoadedModule],
target: &str,
) -> Option<(String, Vec<crate::ast::Spanned<String>>)> {
let scan =
|items: &[crate::ast::TopLevel]| -> Option<(String, Vec<crate::ast::Spanned<String>>)> {
for item in items {
if let crate::ast::TopLevel::FnDef(fd) = item
&& fd.name == target
{
return Some((fd.return_type.clone(), fd.effects.clone()));
}
}
None
};
if let Some(s) = scan(entry_items) {
return Some(s);
}
for m in loaded {
if let Some(s) = scan(&m.items) {
return Some(s);
}
}
None
}
fn build_synth_entry_fn(
target_fn: &str,
args: &[crate::value::Value],
return_type: &str,
) -> Result<crate::ast::TopLevel, String> {
use crate::ast::{Expr, FnBody, FnDef, Spanned, Stmt, TopLevel};
let arg_exprs: Vec<Spanned<Expr>> = args
.iter()
.map(value_to_literal_expr)
.collect::<Result<_, _>>()?;
let callee = Spanned::bare(Expr::Ident(target_fn.to_string()));
let call = Spanned::bare(Expr::FnCall(Box::new(callee), arg_exprs));
let body = FnBody::Block(vec![Stmt::Expr(call)]);
Ok(TopLevel::FnDef(FnDef {
name: "__entry__".to_string(),
line: 0,
params: vec![],
return_type: return_type.to_string(),
effects: vec![Spanned::bare(target_fn.to_string())],
desc: None,
body: std::sync::Arc::new(body),
resolution: None,
}))
}
fn value_to_literal_expr(
v: &crate::value::Value,
) -> Result<crate::ast::Spanned<crate::ast::Expr>, String> {
use crate::ast::{Expr, Literal, Spanned};
let lit = match v {
crate::value::Value::Int(n) => Literal::Int(*n),
crate::value::Value::Float(f) => Literal::Float(*f),
crate::value::Value::Str(s) => Literal::Str(s.clone()),
crate::value::Value::Bool(b) => Literal::Bool(*b),
crate::value::Value::Unit => Literal::Unit,
other => {
return Err(format!(
"synthetic `__entry__` only supports Int/Float/Bool/String/Unit args today; got {:?}",
other
));
}
};
Ok(Spanned::bare(Expr::Literal(lit)))
}
#[cfg(feature = "playground")]
fn crate_parse_entry_call(expr: &str) -> Result<(String, Vec<crate::value::Value>), String> {
crate::replay::parse_entry_call(expr)
}
#[cfg(feature = "runtime")]
fn build_ctx(
source: &str,
apply_traversal_lowering: bool,
) -> Result<codegen::CodegenContext, String> {
let mut items = parse_source(source)?;
let pipeline_result = crate::ir::pipeline::run(
&mut items,
PipelineConfig {
typecheck: Some(TypecheckMode::Full { base_dir: None }),
run_interp_lower: apply_traversal_lowering,
run_buffer_build: apply_traversal_lowering,
..Default::default()
},
);
let tc_result = pipeline_result.typecheck.expect("typecheck was requested");
if !tc_result.errors.is_empty() {
return Err(format_tc_errors(&tc_result.errors));
}
Ok(codegen::build_context(
items,
&tc_result,
pipeline_result.analysis.as_ref(),
HashSet::new(),
"playground".to_string(),
vec![],
))
}
#[cfg(feature = "runtime")]
pub fn proof_lean_files(source: &str) -> Result<HashMap<String, String>, String> {
let mut ctx = build_ctx(source, false)?;
let output = codegen::lean::transpile(&mut ctx);
Ok(output.files.into_iter().collect())
}
#[cfg(feature = "runtime")]
pub fn proof_dafny_files(source: &str) -> Result<HashMap<String, String>, String> {
let ctx = build_ctx(source, false)?;
let output = codegen::dafny::transpile(&ctx);
Ok(output.files.into_iter().collect())
}
#[cfg(feature = "runtime")]
pub fn compile_rust_files(source: &str) -> Result<HashMap<String, String>, String> {
let mut ctx = build_ctx(source, true)?;
let output = codegen::rust::transpile(&mut ctx);
Ok(output.files.into_iter().collect())
}
#[cfg(feature = "runtime")]
fn build_project_ctx(
files: &HashMap<String, String>,
entry: &str,
apply_traversal_lowering: bool,
) -> Result<codegen::CodegenContext, String> {
let entry_source = files
.get(entry)
.ok_or_else(|| format!("Entry '{}' not present in file map", entry))?;
let mut entry_items = parse_source(entry_source)?;
let root_depends = module_depends(&entry_items);
let loaded = load_module_tree_from_map(&root_depends, files)?;
let pipeline_result = crate::ir::pipeline::run(
&mut entry_items,
PipelineConfig {
typecheck: Some(TypecheckMode::WithLoaded(&loaded)),
run_interp_lower: apply_traversal_lowering,
run_buffer_build: apply_traversal_lowering,
..Default::default()
},
);
let tc_result = pipeline_result.typecheck.expect("typecheck was requested");
if !tc_result.errors.is_empty() {
return Err(format_tc_errors(&tc_result.errors));
}
let modules: Vec<codegen::ModuleInfo> = loaded
.into_iter()
.map(|m| loaded_to_module_info(m, apply_traversal_lowering))
.collect();
Ok(codegen::build_context(
entry_items,
&tc_result,
pipeline_result.analysis.as_ref(),
HashSet::new(),
"playground".to_string(),
modules,
))
}
#[cfg(feature = "runtime")]
pub fn proof_lean_files_project(
files: &HashMap<String, String>,
entry: &str,
) -> Result<HashMap<String, String>, String> {
let mut ctx = build_project_ctx(files, entry, false)?;
let output = codegen::lean::transpile(&mut ctx);
Ok(output.files.into_iter().collect())
}
#[cfg(feature = "runtime")]
pub fn proof_dafny_files_project(
files: &HashMap<String, String>,
entry: &str,
) -> Result<HashMap<String, String>, String> {
let ctx = build_project_ctx(files, entry, false)?;
let output = codegen::dafny::transpile(&ctx);
Ok(output.files.into_iter().collect())
}
#[cfg(feature = "runtime")]
pub fn compile_rust_files_project(
files: &HashMap<String, String>,
entry: &str,
) -> Result<HashMap<String, String>, String> {
let mut ctx = build_project_ctx(files, entry, true)?;
let output = codegen::rust::transpile(&mut ctx);
Ok(output.files.into_iter().collect())
}
fn module_depends(items: &[TopLevel]) -> Vec<String> {
items
.iter()
.find_map(|i| match i {
TopLevel::Module(m) => Some(m.depends.clone()),
_ => None,
})
.unwrap_or_default()
}
fn loaded_to_module_info(m: LoadedModule, apply_traversal_lowering: bool) -> codegen::ModuleInfo {
let mut items = m.items;
let neutral_policy = crate::ir::NeutralAllocPolicy;
let pipeline_result = crate::ir::pipeline::run(
&mut items,
PipelineConfig {
run_interp_lower: apply_traversal_lowering,
run_buffer_build: apply_traversal_lowering,
alloc_policy: Some(&neutral_policy),
..Default::default()
},
);
let depends = module_depends(&items);
let type_defs = items
.iter()
.filter_map(|i| match i {
TopLevel::TypeDef(td) => Some(td.clone()),
_ => None,
})
.collect();
let fn_defs = items
.iter()
.filter_map(|i| match i {
TopLevel::FnDef(fd) if fd.name != "main" => Some(fd.clone()),
_ => None,
})
.collect();
codegen::ModuleInfo {
prefix: m.dep_name,
depends,
type_defs,
fn_defs,
analysis: pipeline_result.analysis,
}
}
fn format_tc_errors(errors: &[crate::types::checker::TypeError]) -> String {
errors
.iter()
.map(|e| format!("error[{}:{}]: {}", e.line, e.col, e.message))
.collect::<Vec<_>>()
.join("\n")
}
pub fn check_source(source: &str) -> String {
let opts = AnalyzeOptions::new("playground");
analyze_source(source, &opts).to_json()
}
fn analyze_project(
files: &HashMap<String, String>,
entry: &str,
make_opts: impl FnOnce(AnalyzeOptions) -> AnalyzeOptions,
) -> String {
let entry_source = match files.get(entry) {
Some(s) => s.clone(),
None => {
return crate::diagnostics::AnalysisReport::new("playground").to_json();
}
};
let mut opts = AnalyzeOptions::new("playground");
if let Ok(items) = parse_source(&entry_source) {
let depends = module_depends(&items);
if let Ok(loaded) = crate::source::load_module_tree_from_map(&depends, files) {
opts = opts.with_loaded_modules(loaded);
}
}
opts = make_opts(opts);
analyze_source(&entry_source, &opts).to_json()
}
pub fn check_project(files: &HashMap<String, String>, entry: &str) -> String {
analyze_project(files, entry, |o| o)
}
pub fn verify_source(source: &str) -> String {
let mut opts = AnalyzeOptions::new("playground");
opts.include_verify_run = true;
analyze_source(source, &opts).to_json()
}
pub fn verify_project(files: &HashMap<String, String>, entry: &str) -> String {
analyze_project(files, entry, |mut o| {
o.include_verify_run = true;
o
})
}
pub fn verify_source_hostile(source: &str) -> String {
let mut opts = AnalyzeOptions::new("playground");
opts.include_verify_run = true;
opts.verify_run_hostile = true;
analyze_source(source, &opts).to_json()
}
pub fn verify_project_hostile(files: &HashMap<String, String>, entry: &str) -> String {
analyze_project(files, entry, |mut o| {
o.include_verify_run = true;
o.verify_run_hostile = true;
o
})
}
pub fn why_source(source: &str) -> String {
let mut opts = AnalyzeOptions::new("playground");
opts.include_why_summary = true;
analyze_source(source, &opts).to_json()
}
pub fn why_project(files: &HashMap<String, String>, entry: &str) -> String {
analyze_project(files, entry, |mut o| {
o.include_why_summary = true;
o
})
}
pub fn context_source(source: &str) -> String {
let mut opts = AnalyzeOptions::new("playground");
opts.include_context_summary = true;
analyze_source(source, &opts).to_json()
}
pub fn context_project(files: &HashMap<String, String>, entry: &str) -> String {
analyze_project(files, entry, |mut o| {
o.include_context_summary = true;
o
})
}
pub fn context_md_source(source: &str) -> String {
let mut opts = AnalyzeOptions::new("playground");
opts.include_context_summary = true;
let report = analyze_source(source, &opts);
match report.context_summary {
Some(summary) => crate::diagnostics::context::render_context_md(&summary),
None => {
"# Aver Context\n\n_No context available (parse or typecheck failed)._\n".to_string()
}
}
}
pub fn context_md_project(files: &HashMap<String, String>, entry: &str) -> String {
let Some(entry_source) = files.get(entry).cloned() else {
return format!(
"# Aver Context\n\n_Entry '{}' not found in project._\n",
entry
);
};
let mut opts = AnalyzeOptions::new("playground");
opts.include_context_summary = true;
if let Ok(items) = parse_source(&entry_source) {
let deps = module_depends(&items);
if let Ok(loaded) = crate::source::load_module_tree_from_map(&deps, files) {
opts = opts.with_loaded_modules(loaded);
}
}
let report = analyze_source(&entry_source, &opts);
match report.context_summary {
Some(summary) => crate::diagnostics::context::render_context_md(&summary),
None => {
"# Aver Context\n\n_No context available (parse or typecheck failed)._\n".to_string()
}
}
}
#[cfg(feature = "runtime")]
pub fn audit_source(source: &str) -> String {
audit_build_report(source, None, None, None, false).to_json()
}
#[cfg(feature = "runtime")]
pub fn audit_source_hostile(source: &str) -> String {
audit_build_report(source, None, None, None, true).to_json()
}
#[cfg(feature = "runtime")]
pub fn audit_project(files: &HashMap<String, String>, entry: &str) -> String {
let Some(entry_source) = files.get(entry) else {
return crate::diagnostics::AnalysisReport::new("playground").to_json();
};
let loaded = parse_source(entry_source)
.ok()
.map(|items| module_depends(&items))
.and_then(|deps| crate::source::load_module_tree_from_map(&deps, files).ok());
audit_build_report(entry_source, loaded, Some(files), Some(entry), false).to_json()
}
#[cfg(feature = "runtime")]
pub fn audit_project_hostile(files: &HashMap<String, String>, entry: &str) -> String {
let Some(entry_source) = files.get(entry) else {
return crate::diagnostics::AnalysisReport::new("playground").to_json();
};
let loaded = parse_source(entry_source)
.ok()
.map(|items| module_depends(&items))
.and_then(|deps| crate::source::load_module_tree_from_map(&deps, files).ok());
audit_build_report(entry_source, loaded, Some(files), Some(entry), true).to_json()
}
#[cfg(feature = "runtime")]
fn audit_build_report(
source: &str,
loaded: Option<Vec<LoadedModule>>,
all_files: Option<&HashMap<String, String>>,
entry: Option<&str>,
hostile: bool,
) -> crate::diagnostics::AnalysisReport {
use crate::diagnostics::needs_format_diagnostic;
let mut opts = AnalyzeOptions::new("playground");
opts.include_verify_run = true;
opts.verify_run_hostile = hostile;
if let Some(loaded) = loaded {
opts = opts.with_loaded_modules(loaded);
}
let mut report = analyze_source(source, &opts);
#[cfg(feature = "tty-render")]
if let Ok((formatted, violations)) = crate::format::try_format_source(source)
&& formatted != source
{
report
.diagnostics
.push(needs_format_diagnostic("playground", &violations, source));
}
#[cfg(feature = "tty-render")]
if let (Some(files), Some(entry)) = (all_files, entry) {
for (path, src) in files {
if path == entry {
continue;
}
if let Ok((formatted, violations)) = crate::format::try_format_source(src)
&& formatted != *src
{
report
.diagnostics
.push(needs_format_diagnostic(path, &violations, src));
}
}
}
report
}
#[cfg(feature = "tty-render")]
pub fn format_source(source: &str) -> String {
crate::format::try_format_source(source)
.map(|(text, _violations)| text)
.unwrap_or_else(|_| source.to_string())
}
#[cfg(feature = "playground")]
mod bindgen {
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(js_namespace = console, js_name = error)]
fn console_error(s: &str);
}
#[wasm_bindgen(start)]
pub fn init_playground() {
std::panic::set_hook(Box::new(|info| {
console_error(&format!("Aver playground panic: {}", info));
}));
}
#[wasm_bindgen]
pub fn aver_compile(source: &str) -> Result<Vec<u8>, JsError> {
super::compile_to_wasm(source).map_err(|e| JsError::new(&e))
}
#[wasm_bindgen]
pub fn aver_compile_project(files_json: &str, entry: &str) -> Result<Vec<u8>, JsError> {
let files: std::collections::HashMap<String, String> =
serde_json::from_str(files_json).map_err(|e| JsError::new(&e.to_string()))?;
super::compile_project_to_wasm(&files, entry).map_err(|e| JsError::new(&e))
}
#[wasm_bindgen]
pub fn aver_compile_project_with_entry(
files_json: &str,
entry: &str,
expr: &str,
) -> Result<Vec<u8>, JsError> {
let files: std::collections::HashMap<String, String> =
serde_json::from_str(files_json).map_err(|e| JsError::new(&e.to_string()))?;
let (bytes, _target_fn) = super::compile_project_to_wasm_with_entry(&files, entry, expr)
.map_err(|e| JsError::new(&e))?;
Ok(bytes)
}
#[wasm_bindgen]
pub fn aver_parse_entry_target(expr: &str) -> Result<String, JsError> {
let (name, _args) = super::crate_parse_entry_call(expr).map_err(|e| JsError::new(&e))?;
Ok(name)
}
#[wasm_bindgen]
pub fn aver_check(source: &str) -> String {
super::check_source(source)
}
#[wasm_bindgen]
pub fn aver_verify(source: &str) -> String {
super::verify_source(source)
}
#[wasm_bindgen]
pub fn aver_verify_hostile(source: &str) -> String {
super::verify_source_hostile(source)
}
#[wasm_bindgen]
pub fn aver_why(source: &str) -> String {
super::why_source(source)
}
#[wasm_bindgen]
pub fn aver_context(source: &str) -> String {
super::context_source(source)
}
#[wasm_bindgen]
pub fn aver_audit(source: &str) -> String {
super::audit_source(source)
}
#[wasm_bindgen]
pub fn aver_audit_hostile(source: &str) -> String {
super::audit_source_hostile(source)
}
#[wasm_bindgen]
pub fn aver_format(source: &str) -> String {
super::format_source(source)
}
#[wasm_bindgen]
pub fn aver_proof_lean(source: &str) -> Result<String, JsError> {
let files = super::proof_lean_files(source).map_err(|e| JsError::new(&e))?;
serde_json::to_string(&files).map_err(|e| JsError::new(&e.to_string()))
}
#[wasm_bindgen]
pub fn aver_proof_dafny(source: &str) -> Result<String, JsError> {
let files = super::proof_dafny_files(source).map_err(|e| JsError::new(&e))?;
serde_json::to_string(&files).map_err(|e| JsError::new(&e.to_string()))
}
#[wasm_bindgen]
pub fn aver_compile_rust(source: &str) -> Result<String, JsError> {
let files = super::compile_rust_files(source).map_err(|e| JsError::new(&e))?;
serde_json::to_string(&files).map_err(|e| JsError::new(&e.to_string()))
}
#[wasm_bindgen]
pub fn aver_proof_lean_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files: std::collections::HashMap<String, String> =
serde_json::from_str(files_json).map_err(|e| JsError::new(&e.to_string()))?;
let out = super::proof_lean_files_project(&files, entry).map_err(|e| JsError::new(&e))?;
serde_json::to_string(&out).map_err(|e| JsError::new(&e.to_string()))
}
#[wasm_bindgen]
pub fn aver_proof_dafny_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files: std::collections::HashMap<String, String> =
serde_json::from_str(files_json).map_err(|e| JsError::new(&e.to_string()))?;
let out = super::proof_dafny_files_project(&files, entry).map_err(|e| JsError::new(&e))?;
serde_json::to_string(&out).map_err(|e| JsError::new(&e.to_string()))
}
#[wasm_bindgen]
pub fn aver_compile_rust_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files: std::collections::HashMap<String, String> =
serde_json::from_str(files_json).map_err(|e| JsError::new(&e.to_string()))?;
let out = super::compile_rust_files_project(&files, entry).map_err(|e| JsError::new(&e))?;
serde_json::to_string(&out).map_err(|e| JsError::new(&e.to_string()))
}
fn parse_files(files_json: &str) -> Result<std::collections::HashMap<String, String>, JsError> {
serde_json::from_str(files_json).map_err(|e| JsError::new(&e.to_string()))
}
#[wasm_bindgen]
pub fn aver_check_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files = parse_files(files_json)?;
Ok(super::check_project(&files, entry))
}
#[wasm_bindgen]
pub fn aver_verify_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files = parse_files(files_json)?;
Ok(super::verify_project(&files, entry))
}
#[wasm_bindgen]
pub fn aver_verify_hostile_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files = parse_files(files_json)?;
Ok(super::verify_project_hostile(&files, entry))
}
#[wasm_bindgen]
pub fn aver_why_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files = parse_files(files_json)?;
Ok(super::why_project(&files, entry))
}
#[wasm_bindgen]
pub fn aver_context_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files = parse_files(files_json)?;
Ok(super::context_project(&files, entry))
}
#[wasm_bindgen]
pub fn aver_context_md(source: &str) -> String {
super::context_md_source(source)
}
#[wasm_bindgen]
pub fn aver_context_md_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files = parse_files(files_json)?;
Ok(super::context_md_project(&files, entry))
}
#[wasm_bindgen]
pub fn aver_audit_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files = parse_files(files_json)?;
Ok(super::audit_project(&files, entry))
}
#[wasm_bindgen]
pub fn aver_audit_hostile_project(files_json: &str, entry: &str) -> Result<String, JsError> {
let files = parse_files(files_json)?;
Ok(super::audit_project_hostile(&files, entry))
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
fn read(path: &str) -> String {
std::fs::read_to_string(path).unwrap_or_else(|_| panic!("missing {}", path))
}
fn load_rogue_files() -> HashMap<String, String> {
let root = "tools/website/playground/sources/examples/games/rogue";
let mut files: HashMap<String, String> = HashMap::new();
for f in [
"types",
"map",
"fov",
"pathfinding",
"combat",
"render",
"main",
] {
files.insert(format!("{}.av", f), read(&format!("{}/{}.av", root, f)));
}
files
}
#[test]
fn proof_lean_emits_files_for_simple_source() {
let src = "module M\n intent = \"t\"\n\n\
fn add(a: Int, b: Int) -> Int\n a + b\n\n\
verify add\n add(2, 3) => 5\n";
let files = proof_lean_files(src).expect("lean files");
assert!(!files.is_empty(), "Lean export should produce files");
let any_lean_with_add = files.iter().any(|(k, v)| {
k.ends_with(".lean")
&& k != "lakefile.lean"
&& (v.contains("def add") || v.contains("add ("))
});
assert!(
any_lean_with_add,
"generated Lean should mention `add` somewhere; files: {:?}",
files.keys().collect::<Vec<_>>()
);
}
#[test]
fn proof_dafny_emits_files_for_simple_source() {
let src = "module M\n intent = \"t\"\n\n\
fn add(a: Int, b: Int) -> Int\n a + b\n\n\
verify add\n add(2, 3) => 5\n";
let files = proof_dafny_files(src).expect("dafny files");
assert!(!files.is_empty(), "Dafny export should produce files");
assert!(
files.iter().any(|(k, _)| k.ends_with(".dfy")),
"should include a .dfy"
);
}
#[test]
fn compile_rust_emits_cargo_project() {
let src = "module M\n intent = \"t\"\n\n\
fn add(a: Int, b: Int) -> Int\n a + b\n\n\
fn main() -> Unit\n ! [Console.print]\n Console.print(\"ok\")\n";
let files = compile_rust_files(src).expect("rust files");
assert!(
files.contains_key("Cargo.toml"),
"Rust export should include Cargo.toml"
);
assert!(
files
.iter()
.any(|(k, _)| k.starts_with("src/") && k.ends_with(".rs")),
"Rust export should include at least one src/*.rs file"
);
}
#[test]
fn proof_lean_project_handles_multi_file() {
let files = load_rogue_files();
let out = proof_lean_files_project(&files, "main.av")
.expect("multi-file Lean export should succeed");
assert!(!out.is_empty(), "Lean project export should produce files");
assert!(out.iter().any(|(k, _)| k.ends_with(".lean")));
}
#[test]
fn proof_dafny_project_handles_multi_file() {
let files = load_rogue_files();
let out = proof_dafny_files_project(&files, "main.av")
.expect("multi-file Dafny export should succeed");
assert!(!out.is_empty(), "Dafny project export should produce files");
assert!(out.iter().any(|(k, _)| k.ends_with(".dfy")));
}
#[test]
fn compile_rust_project_handles_multi_file() {
let files = load_rogue_files();
let out = compile_rust_files_project(&files, "main.av")
.expect("multi-file Rust export should succeed");
assert!(out.contains_key("Cargo.toml"));
}
#[test]
fn compiles_multi_file_rogue_from_virtual_fs() {
let files = load_rogue_files();
let bytes = compile_project_to_wasm(&files, "main.av")
.expect("rogue project should compile from virtual fs");
assert!(
bytes.len() > 1000,
"emitted wasm looks too small: {}",
bytes.len()
);
}
#[test]
fn compiles_multi_file_wasm_gc_with_imported_type_in_record_field() {
let mut files = HashMap::new();
files.insert(
"tmpreviewb.av".to_string(),
r#"module TmpReviewB
intent = "dependency with a sum type"
exposes [Status, open]
type Status
Open
Closed
fn open() -> Status
Status.Open
"#
.to_string(),
);
files.insert(
"main.av".to_string(),
r#"module Main
intent = "entry record stores an imported sum type"
depends [TmpReviewB]
record Wrapper
status: TmpReviewB.Status
fn make() -> Wrapper
Wrapper(status = TmpReviewB.open())
fn main() -> Int
match make().status
TmpReviewB.Status.Open -> 1
TmpReviewB.Status.Closed -> 0
"#
.to_string(),
);
let bytes = compile_project_to_wasm(&files, "main.av")
.expect("multi-file wasm-gc project should compile");
let wat = wasmprinter::print_bytes(&bytes).expect("wasm-gc bytes should print");
assert!(
wat.contains("(struct"),
"playground project compile should use wasm-gc, got:\n{}",
wat
);
}
#[test]
fn multi_file_check_has_no_unknown_ident_noise() {
let files = load_rogue_files();
let report: serde_json::Value =
serde_json::from_str(&check_project(&files, "main.av")).unwrap();
let diagnostics = report["diagnostics"]
.as_array()
.cloned()
.unwrap_or_default();
let unknown_ident_on_deps: Vec<_> = diagnostics
.iter()
.filter(|d| d["slug"] == "unknown-ident")
.filter(|d| {
let s = d["summary"].as_str().unwrap_or("");
["Types", "Map", "Fov", "Combat", "Render", "Pathfinding"]
.iter()
.any(|name| s.contains(&format!("'{}'", name)))
})
.collect();
assert!(
unknown_ident_on_deps.is_empty(),
"multi-file check still reports unknown-ident for declared deps: {:?}",
unknown_ident_on_deps
);
}
#[test]
fn reports_missing_dep_clearly() {
let mut files = HashMap::new();
files.insert(
"main.av".to_string(),
[
"module Main",
" intent = \"demo\"",
" depends [Missing]",
"",
"fn main() -> Unit",
" ! [Console.print]",
" Console.print(\"hi\")",
"",
]
.join("\n"),
);
let err = compile_project_to_wasm(&files, "main.av").unwrap_err();
assert!(
err.contains("Missing") || err.contains("not found"),
"expected missing-module error, got: {}",
err
);
}
#[test]
#[cfg(feature = "runtime")]
fn audit_source_hostile_does_not_panic_on_showcase() {
let src = r#"module DeadlineCheck
intent =
"Demonstrate `aver verify --hostile`: the law passes under real time"
"but breaks under the saturated-clock adversarial profile. Toggle the"
"hostile checkbox next to Audit and watch the failure call out a"
"missing `when` precondition or unpinned `given` for Time.unixMs."
effects [Time.unixMs]
fn willCompleteBeforeDeadline(deadlineMs: Int) -> Bool
? "is the current time still before the deadline?"
! [Time.unixMs]
Time.unixMs() < deadlineMs
verify willCompleteBeforeDeadline law deadlineHolds
given d: Int = [9999999999999]
willCompleteBeforeDeadline(d) => true
"#;
let report = audit_source_hostile(src);
assert!(
!report.is_empty(),
"audit_source_hostile returned empty payload"
);
assert!(
report.contains("verify-hostile-mismatch"),
"expected the hostile failures in the report; got: {}",
&report[..report.len().min(400)]
);
}
}