use std::collections::HashMap;
use std::sync::LazyLock;
use quote::quote;
use regex::Regex;
use syn::{
Attribute, Expr, ExprLit, ForeignItem, ForeignItemFn, Item, ItemEnum, Lit, Meta, MetaNameValue,
ReturnType, Type, Variant,
};
const OPAQUE_HANDLE_TYPES: &[&str] = &[
"Z3_symbol",
"Z3_literals",
"Z3_config",
"Z3_context",
"Z3_sort",
"Z3_func_decl",
"Z3_ast",
"Z3_app",
"Z3_pattern",
"Z3_model",
"Z3_constructor",
"Z3_constructor_list",
"Z3_params",
"Z3_param_descrs",
"Z3_parser_context",
"Z3_goal",
"Z3_tactic",
"Z3_simplifier",
"Z3_probe",
"Z3_stats",
"Z3_solver",
"Z3_solver_callback",
"Z3_ast_vector",
"Z3_ast_map",
"Z3_apply_result",
"Z3_func_interp",
"Z3_func_entry",
"Z3_fixedpoint",
"Z3_optimize",
"Z3_rcf_num",
];
fn is_opaque_handle(ty: &Type) -> bool {
match ty {
Type::Path(type_path) if type_path.qself.is_none() => type_path
.path
.get_ident()
.is_some_and(|ident| OPAQUE_HANDLE_TYPES.contains(&ident.to_string().as_str())),
_ => false,
}
}
static RE_CCODE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\\ccode\{([^}]*)\}").unwrap());
static RE_C_REF: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\\c ((?:\([^)]*\)|\[[^\]]*\]|[A-Za-z0-9_:]+))").unwrap());
static RE_HASH_REF: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"#(Z3_[A-Za-z0-9_]+)(?:\(\))?").unwrap());
static RE_BARE_URL: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"https?://[^\s>)\]]+").unwrap());
static RE_PLACEHOLDER_TAG: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"<[a-z][a-z0-9-]*-[a-z0-9][a-z0-9-]*>(?:\.<[a-z][a-z0-9-]*-[a-z0-9][a-z0-9-]*>)*")
.unwrap()
});
static RE_BRACKET_LABEL: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^\[([a-z][a-z-]*)\]:").unwrap());
static RE_MATH_BRACKET: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\[([a-zA-Z0-9][a-zA-Z0-9+\-]*)\]").unwrap());
static RE_INLINE_CODE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\\code (.+?) \\endcode").unwrap());
static RE_VARIANT_BULLET: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^- (Z3_[A-Z0-9_]+)[ \t:]*(?:is )?\s*(.*)$").unwrap());
fn is_strip_line(line: &str) -> bool {
let t = line.trim();
t.starts_with("def_API")
|| t.starts_with("@{")
|| t.starts_with("@}")
|| t.starts_with("/**@{*/")
|| t.starts_with("/**@}*/")
|| t.starts_with("@name ")
|| t == "/**"
}
fn extract_sa(line: &str) -> Option<Vec<String>> {
let rest = line.trim().strip_prefix(r"\sa ")?;
let names: Vec<String> = rest
.split_whitespace()
.map(|s| s.trim_end_matches(['.', ',']))
.filter(|s| s.starts_with("Z3_"))
.map(str::to_string)
.collect();
if names.is_empty() { None } else { Some(names) }
}
fn replace_ccode(s: &str) -> String {
RE_CCODE
.replace_all(s, |caps: ®ex::Captures| {
let content = caps[1].replace(r"\,", ",");
format!("`{content}`")
})
.into_owned()
}
fn replace_c_ref(s: &str) -> String {
RE_C_REF.replace_all(s, "`$1`").into_owned()
}
fn replace_hash_ref(s: &str) -> String {
RE_HASH_REF
.replace_all(s, |caps: ®ex::Captures| {
let name = &caps[1];
if name.starts_with("Z3_OP_") {
format!("`{name}`")
} else {
format!("[`{name}`]")
}
})
.into_owned()
}
fn replace_bare_url(s: &str) -> String {
let bytes = s.as_bytes();
RE_BARE_URL
.replace_all(s, |caps: ®ex::Captures| {
let m = caps.get(0).unwrap();
let url = m.as_str();
let already_wrapped =
m.start() > 0 && matches!(bytes[m.start() - 1], b'<' | b'[' | b'`');
if already_wrapped {
url.to_string()
} else {
format!("<{url}>")
}
})
.into_owned()
}
fn replace_placeholder_tags(s: &str) -> String {
RE_PLACEHOLDER_TAG.replace_all(s, "`$0`").into_owned()
}
fn escape_bracket_label(s: &str) -> String {
RE_BRACKET_LABEL.replace(s, r"\[$1\]:").into_owned()
}
fn escape_math_brackets(s: &str) -> String {
let bytes = s.as_bytes();
RE_MATH_BRACKET
.replace_all(s, |caps: ®ex::Captures| {
let m = caps.get(0).unwrap();
let content = &caps[1];
let start = m.start();
let already_escaped = start > 0 && bytes[start - 1] == b'\\';
let backtick_count = bytes[..start].iter().filter(|&&b| b == b'`').count();
let inside_code_span = backtick_count % 2 == 1;
if already_escaped || inside_code_span {
m.as_str().to_string()
} else {
format!("\\[{content}]")
}
})
.into_owned()
}
fn replace_inline_code(s: &str) -> String {
RE_INLINE_CODE
.replace_all(s, |caps: ®ex::Captures| format!("`{}`", caps[1].trim()))
.into_owned()
}
fn apply_inline(s: &str) -> String {
let s = escape_bracket_label(s);
let s = replace_inline_code(&s);
let s = replace_ccode(&s);
let s = replace_c_ref(&s);
let s = replace_placeholder_tags(&s);
let s = replace_hash_ref(&s);
let s = replace_bare_url(&s);
escape_math_brackets(&s)
}
#[derive(PartialEq)]
enum DocBlock {
Normal,
Code, Verbatim, Nicebox, }
fn process_doc_string(raw: &str) -> String {
let joined;
let raw = if raw.contains("\\c\n") {
joined = raw.replace("\\c\n", "\\c ");
&joined
} else {
raw
};
let mut out: Vec<String> = Vec::new();
let mut preconditions: Vec<String> = Vec::new();
let mut see_also: Vec<String> = Vec::new();
let mut first = true;
let mut block = DocBlock::Normal;
for line in raw.lines() {
if block != DocBlock::Normal {
let t = line.trim();
let end_tag = match block {
DocBlock::Code => r"\endcode",
DocBlock::Verbatim => r"\endverbatim",
DocBlock::Nicebox => "}",
DocBlock::Normal => unreachable!(),
};
if t == end_tag {
out.push("```".to_string());
block = DocBlock::Normal;
} else {
out.push(line.to_string());
}
continue;
}
if is_strip_line(line) {
continue;
}
if let Some(names) = extract_sa(line) {
see_also.extend(names);
continue;
}
let line = {
let t = line.trim_start();
if first {
first = false;
if t == r"\brief" {
continue; }
t.strip_prefix(r"\brief ").unwrap_or(t)
} else if let Some(rest) = t.strip_prefix(r"\brief ") {
rest
} else if t == r"\brief" {
continue;
} else {
line
}
};
let t = line.trim_start();
if t == r"\code" {
out.push("```c".to_string());
block = DocBlock::Code;
continue;
}
if t == r"\verbatim" {
out.push("```text".to_string());
block = DocBlock::Verbatim;
continue;
}
if t == r"\nicebox{" {
out.push("```text".to_string());
block = DocBlock::Nicebox;
continue;
}
if let Some(rest) = t
.strip_prefix(r"\remark ")
.or_else(|| if t == r"\remark" { Some("") } else { None })
{
out.push(format!("**Remark:** {}", apply_inline(rest.trim_end())));
continue;
}
if let Some(rest) = t
.strip_prefix(r"\note ")
.or_else(|| if t == r"\note" { Some("") } else { None })
{
out.push(format!("**Note:** {}", apply_inline(rest.trim_end())));
continue;
}
if let Some(rest) = t
.strip_prefix(r"\returns ")
.or_else(|| if t == r"\returns" { Some("") } else { None })
{
out.push(format!("\n**Returns:** {}", apply_inline(rest.trim_end())));
continue;
}
if let Some(rest) = t.strip_prefix(r"\pre ") {
preconditions.push(apply_inline(rest.trim_end()));
continue;
}
if let Some(rest) = t.strip_prefix(r"\param ") {
let rest = rest.trim();
if let Some((name, desc)) = rest.split_once(|c: char| c.is_whitespace()) {
out.push(format!("- `{}`: {}", name, apply_inline(desc.trim_start())));
} else {
out.push(format!("- `{rest}`"));
}
continue;
}
out.push(apply_inline(line));
}
while out.last().is_some_and(|l| l.is_empty()) {
out.pop();
}
if !preconditions.is_empty() {
out.push(String::new());
out.push("# Preconditions".to_string());
out.push(String::new());
for pre in preconditions {
out.push(format!("- `{pre}`"));
}
}
if !see_also.is_empty() {
out.push(String::new());
out.push("# See also".to_string());
out.push(String::new());
for sa in see_also {
out.push(format!("- [`{sa}`]"));
}
}
out.join("\n")
}
fn doc_string(attr: &Attribute) -> Option<String> {
if !attr.path().is_ident("doc") {
return None;
}
if let Meta::NameValue(MetaNameValue {
value: Expr::Lit(ExprLit {
lit: Lit::Str(s), ..
}),
..
}) = &attr.meta
{
Some(s.value())
} else {
None
}
}
fn doc_attrs(processed: &str) -> Vec<Attribute> {
processed
.lines()
.map(|line| {
let content = format!(" {line}");
syn::parse_quote!(#[doc = #content])
})
.collect()
}
fn transform_fn(mut func: ForeignItemFn) -> ForeignItemFn {
func.sig.output = match func.sig.output {
ReturnType::Default => ReturnType::Default,
ReturnType::Type(arrow, ty) if is_opaque_handle(&ty) => {
let wrapped: Type = syn::parse_quote!(Option<#ty>);
ReturnType::Type(arrow, Box::new(wrapped))
}
ret => ret,
};
let mut new_attrs: Vec<Attribute> = Vec::new();
for attr in func.attrs {
if let Some(raw) = doc_string(&attr) {
let processed = process_doc_string(&raw);
if !processed.is_empty() {
new_attrs.extend(doc_attrs(&processed));
}
} else {
new_attrs.push(attr);
}
}
func.attrs = new_attrs;
func
}
fn pascal_case(s: &str) -> String {
s.split('_')
.map(|part| {
let mut chars = part.chars();
match chars.next() {
None => String::new(),
Some(first) => {
let upper = first.to_uppercase().to_string();
let rest = chars.as_str().to_lowercase();
upper + &rest
}
}
})
.collect()
}
#[derive(Debug)]
enum StripKind {
Prefix(String), Suffix(String), None,
}
fn compute_strip_component(variants: &[&str]) -> StripKind {
if variants.is_empty() {
return StripKind::None;
}
let threshold = variants.len() / 2 + 1;
let mut prefix_counts: HashMap<&str, usize> = HashMap::new();
for v in variants {
let first = v.split('_').next().unwrap_or(v);
*prefix_counts.entry(first).or_insert(0) += 1;
}
if let Some((best, &count)) = prefix_counts.iter().max_by_key(|&(_, &c)| c) {
if count >= threshold {
return StripKind::Prefix(format!("{best}_"));
}
}
let mut suffix_counts: HashMap<&str, usize> = HashMap::new();
for v in variants {
if let Some(idx) = v.rfind('_') {
let last = &v[idx + 1..];
*suffix_counts.entry(last).or_insert(0) += 1;
}
}
if let Some((best, &count)) = suffix_counts.iter().max_by_key(|&(_, &c)| c) {
if count >= threshold {
return StripKind::Suffix(format!("_{best}"));
}
}
StripKind::None
}
fn apply_strip<'a>(variant: &'a str, strip: &StripKind) -> &'a str {
match strip {
StripKind::Prefix(p) => variant.strip_prefix(p.as_str()).unwrap_or(variant),
StripKind::Suffix(s) => variant.strip_suffix(s.as_str()).unwrap_or(variant),
StripKind::None => variant,
}
}
fn parse_variant_docs(doc: &str) -> HashMap<String, String> {
let mut map: HashMap<String, String> = HashMap::new();
let mut current_name: Option<String> = None;
let mut current_lines: Vec<String> = Vec::new();
macro_rules! flush {
() => {
if let Some(ref name) = current_name {
let mut end = current_lines.len();
while end > 0 && current_lines[end - 1].trim().is_empty() {
end -= 1;
}
let processed = process_doc_string(¤t_lines[..end].join("\n"));
if !processed.is_empty() {
map.insert(name.clone(), processed);
}
}
};
}
for line in doc.lines() {
let t = line.trim();
if let Some(caps) = RE_VARIANT_BULLET.captures(t) {
flush!();
let variant_name = caps[1].to_string();
let first_text = caps[2].trim();
current_name = Some(variant_name);
current_lines.clear();
if !first_text.is_empty() {
current_lines.push(first_text.to_string());
}
} else if current_name.is_some() {
current_lines.push(t.to_string());
}
}
flush!();
map
}
fn strip_variant_bullets(doc: &str) -> String {
let mut out: Vec<&str> = Vec::new();
let mut in_bullet = false;
for line in doc.lines() {
let t = line.trim();
if t.starts_with("- Z3_") {
in_bullet = true;
} else if in_bullet {
} else {
out.push(line);
}
}
while out.last().is_some_and(|l| l.trim().is_empty()) {
out.pop();
}
out.join("\n")
}
fn transform_enum(mut item: ItemEnum) -> (ItemEnum, proc_macro2::TokenStream) {
let original_name = item.ident.to_string();
let new_name_str = original_name
.strip_prefix("Z3_")
.map(pascal_case)
.unwrap_or_else(|| original_name.clone());
let new_ident: syn::Ident = syn::parse_str(&new_name_str).expect("valid enum ident");
let raw_doc: String = item
.attrs
.iter()
.filter_map(doc_string)
.collect::<Vec<_>>()
.join("\n");
let variant_docs = parse_variant_docs(&raw_doc);
let enum_doc_processed = process_doc_string(&strip_variant_bullets(&raw_doc));
let variant_names_no_z3: Vec<String> = item
.variants
.iter()
.map(|v| {
let s = v.ident.to_string();
s.strip_prefix("Z3_").unwrap_or(&s).to_string()
})
.collect();
let variant_refs: Vec<&str> = variant_names_no_z3.iter().map(String::as_str).collect();
let strip = compute_strip_component(&variant_refs);
let new_variants: Vec<Variant> = item
.variants
.iter()
.cloned()
.map(|mut variant| {
let orig_name = variant.ident.to_string();
let no_z3 = orig_name.strip_prefix("Z3_").unwrap_or(&orig_name);
let stripped = apply_strip(no_z3, &strip);
let new_variant_name = pascal_case(stripped);
let new_variant_ident: syn::Ident =
syn::parse_str(&new_variant_name).expect("valid variant ident");
let variant_doc = variant_docs
.get(&orig_name)
.cloned()
.unwrap_or_else(|| format!("Corresponds to `{orig_name}` in the C API."));
let alias_attr: Attribute = syn::parse_quote!(#[doc(alias = #orig_name)]);
let mut new_attrs = doc_attrs(&variant_doc);
new_attrs.push(alias_attr);
variant.attrs = new_attrs;
variant.ident = new_variant_ident;
variant
})
.collect();
let orig_name_str = &original_name;
let mut new_attrs: Vec<Attribute> = Vec::new();
if !enum_doc_processed.is_empty() {
new_attrs.extend(doc_attrs(&enum_doc_processed));
}
new_attrs.push(syn::parse_quote!(#[doc(alias = #orig_name_str)]));
for attr in &item.attrs {
if doc_string(attr).is_none() {
new_attrs.push(attr.clone());
}
}
item.ident = new_ident.clone();
item.attrs = new_attrs;
item.variants = new_variants.into_iter().collect();
let orig_ident: syn::Ident = syn::parse_str(&original_name).expect("valid alias ident");
let alias_tokens = quote! {
pub type #orig_ident = #new_ident;
};
(item, alias_tokens)
}
pub struct TransformOutput {
pub functions: String,
pub enums: String,
}
pub fn transform(input: &str) -> TransformOutput {
let file: syn::File = syn::parse_str(input).expect("failed to parse bindgen output as Rust");
let mut functions: Vec<ForeignItemFn> = Vec::new();
let mut enum_items: Vec<proc_macro2::TokenStream> = Vec::new();
for item in file.items {
match item {
Item::ForeignMod(foreign_mod) => {
for foreign_item in foreign_mod.items {
if let ForeignItem::Fn(func) = foreign_item {
functions.push(transform_fn(func));
}
}
}
Item::Enum(enum_item) => {
let (transformed, alias) = transform_enum(enum_item);
enum_items.push(quote! {
#transformed
#alias
});
}
_ => {}
}
}
let funcs_tokens = quote! {
unsafe extern "C" {
#(#functions)*
}
};
let funcs_file: syn::File =
syn::parse2(funcs_tokens).expect("failed to parse transformed functions");
let functions_str = prettyplease::unparse(&funcs_file);
let enums_tokens = quote! {
#(#enum_items)*
};
let enums_file: syn::File =
syn::parse2(enums_tokens).expect("failed to parse transformed enums");
let enums_str = prettyplease::unparse(&enums_file);
TransformOutput {
functions: functions_str,
enums: enums_str,
}
}