import subprocess
import os
import shutil
import re
from collections import defaultdict
from tree_sitter import Language, Parser
import tree_sitter_rust
path_cwd = os.path.abspath(os.getcwd())
path_repo = f"{os.getenv('HOME')}/Git-Others/dftd4"
path_header = f"{path_cwd}/../header"
path_temp = f"{path_cwd}/tmp"
path_out = f"{path_cwd}/.."
api_versions = [
("V_3_0", "api-v3_0"),
("V_3_1", "api-v3_1"),
("V_3_2", "api-v3_2"),
("V_3_3", "api-v3_3"),
("V_3_4", "api-v3_4"),
("V_3_5", "api-v3_5"),
("V_4_0", "api-v4_0"),
("V_4_2", "api-v4_2"),
]
default_api_version = "api-v3_0"
def parse_api_versions(header_content):
version_map = {}
pattern = r'DFTD4_API_ENTRY\s+\w+\s+DFTD4_API_CALL\s+(\w+)\s*\([^)]*\)\s*DFTD4_API_SUFFIX__(\w+);'
for match in re.finditer(pattern, header_content, re.MULTILINE):
func_name = match.group(1)
version_suffix = match.group(2)
version_map[func_name] = version_suffix
joined_content = header_content.replace('\n', ' ')
for match in re.finditer(pattern, joined_content):
func_name = match.group(1)
version_suffix = match.group(2)
version_map[func_name] = version_suffix
return version_map
def get_feature_for_version(version_suffix):
for v_suffix, feature_name in api_versions:
if v_suffix == version_suffix:
return feature_name
return default_api_version
def add_version_attributes(token, version_map):
func_cfg_map = {}
for func_name, version_suffix in version_map.items():
feature = get_feature_for_version(version_suffix)
func_cfg_map[func_name] = feature
lines = token.split('\n')
result_lines = []
i = 0
processed_funcs = set()
while i < len(lines):
line = lines[i]
stripped = line.strip()
if stripped.startswith('#[doc ='):
j = i + 1
while j < len(lines) and lines[j].strip().startswith('#[doc ='):
j += 1
if j < len(lines):
func_line = lines[j]
func_match = re.match(r'\s*pub fn (\w+)\s*\(', func_line)
if func_match:
func_name = func_match.group(1)
if func_name in func_cfg_map and func_name not in processed_funcs:
feature = func_cfg_map[func_name]
indent = len(line) - len(line.lstrip())
cfg_line = ' ' * indent + f'#[cfg(feature = "{feature}")]'
result_lines.append(cfg_line)
processed_funcs.add(func_name)
result_lines.append(line)
i += 1
return '\n'.join(result_lines)
def generate_static_ffi(token, version_map):
token = token.replace("::core::ffi::", "")
token = token.replace("minor + 100", "minor * 100")
token = add_version_attributes(token, version_map)
feature_docs = """//! FFI bindings for dftd4.
//!
//! # API Version Features
//!
//! This crate provides versioned FFI bindings through cargo features:
//!
//! - `api-v3_0`: Base API (default)
//! - `api-v3_1`: Extends api-v3_0, adds custom D4 model and properties
//! - `api-v3_2`: Extends api-v3_1, adds pairwise dispersion
//! - `api-v3_3`: Extends api-v3_2
//! - `api-v3_4`: Extends api-v3_3
//! - `api-v3_5`: Extends api-v3_4, adds numerical hessian
//! - `api-v4_0`: Extends api-v3_5, adds D4S model
//! - `api-v4_2`: Extends api-v4_0, adds realspace cutoff setters
//!
//! Features are cumulative: enabling `api-v3_5` also enables all functions from
//! earlier versions (api-v3_0, api-v3_1, api-v3_2, api-v3_3, api-v3_4).
#![allow(non_camel_case_types)]
use core::ffi::{c_char, c_int};
"""
return feature_docs + "\n\n" + token
def dyload_parse_file(token):
parser = Parser(Language(tree_sitter_rust.language()))
token_transformed = token.replace("unsafe extern \"C\"", "extern \"C\"")
parsed = parser.parse(bytes(token_transformed, "utf8"))
parsed_ffi = []
for node in parsed.root_node.children:
if node.type == "foreign_mod_item":
parsed_ffi.append(node)
return parsed, parsed_ffi
def dyload_remove_extern(parsed, node_extern):
return parsed.root_node.text.decode("utf8").replace(node_extern.text.decode("utf8"), "")
def dyload_get_ffi_fn(node):
assert node.type == "foreign_mod_item"
return [n for n in node.children[-1].children if n.type == "function_signature_item"]
def dyload_fn_split(node):
assert node.type == "function_signature_item"
keys = ["visibility_modifier", "identifier", "parameters", "return_type"]
result = {key: None for key in keys}
for (idx, child) in enumerate(node.children):
if child.type == "->":
result["return_type"] = node.children[idx + 1]
elif child.type in keys:
result[child.type] = child
assert result["identifier"] is not None
assert result["parameters"] is not None
return result
def normalize_ffi_types(text):
text = text.replace("::core::ffi::c_int", "c_int")
text = text.replace("::core::ffi::c_char", "c_char")
return text
def dyload_main(token):
parsed, parsed_ffi = dyload_parse_file(token)
token_ffi_base = token
nodes_fn = []
for node_extern in parsed_ffi:
nodes_fn.extend(dyload_get_ffi_fn(node_extern))
token_dyload_struct = ""
token_dyload_initializer = ""
token_dyload_compatible = ""
for node_fn in nodes_fn:
dict_fn = dyload_fn_split(node_fn)
visibility_modifier = dict_fn["visibility_modifier"].text.decode("utf8") if dict_fn["visibility_modifier"] else "pub"
identifier = dict_fn["identifier"].text.decode("utf8")
return_type_string = ""
if dict_fn["return_type"] is not None:
return_type_string = " -> " + normalize_ffi_types(dict_fn["return_type"].text.decode("utf8"))
nodes_para = [n for n in dict_fn["parameters"].children if n.type == "parameter"]
parameters = "(" + ", ".join([normalize_ffi_types(n.text.decode("utf8")) for n in nodes_para]) + ")"
parameters_called = ", ".join([n.children[0].text.decode("utf8") for n in nodes_para])
part_dyload_struct = f"""
{visibility_modifier} {identifier}: Option<unsafe extern "C" fn{parameters}{return_type_string}>,
""".strip()
part_dyload_initializer = f"""
{identifier}: get_symbol(&libs, b"{identifier}\\0").map(|sym| *sym),
""".strip()
part_dyload_compatible = f"""
{visibility_modifier} unsafe fn {identifier}{parameters}{return_type_string} {{
dyload_lib().{identifier}.unwrap()({parameters_called})
}}
""".strip()
token_dyload_struct += part_dyload_struct + "\n"
token_dyload_initializer += part_dyload_initializer + "\n"
token_dyload_compatible += part_dyload_compatible + "\n\n"
for node_extern in parsed_ffi:
token_ffi_base = dyload_remove_extern(parsed, node_extern)
import_pattern = r'use core::ffi::\{c_char, c_int\};'
token_ffi_base = re.sub(import_pattern, '', token_ffi_base)
output_ffi_base = f"""//! Base types and imports for FFI.
//!
//! This file is generated automatically.
#![allow(non_camel_case_types)]
{token_ffi_base}
"""
output_dyload_struct = f"""//! Library struct definition for dynamic loading.
//!
//! This file is generated automatically.
//!
//! Note: For dynamic loading, API version features are ignored.
//! All functions are available at runtime. Runtime panic occurs if a function
//! is not found in the loaded library.
use super::*;
use core::ffi::{{c_char, c_int}};
pub struct DyLoadLib {{
pub __libraries: Vec<libloading::Library>,
pub __libraries_path: Vec<String>,
pub __error: Option<String>,
{token_dyload_struct}
}}
"""
output_dyload_initializer = f"""//! Library initializer implementation for dynamic loading.
//!
//! This file is generated automatically.
use super::*;
use libloading::{{Library, Symbol}};
unsafe fn get_symbol<'f, F>(libs: &'f [Library], name: &[u8]) -> Option<Symbol<'f, F>> {{
libs.iter().find_map(|lib| lib.get::<F>(name).ok())
}}
impl DyLoadLib {{
pub unsafe fn new(libs: Vec<libloading::Library>, libs_path: Vec<String>) -> DyLoadLib {{
let mut result = DyLoadLib {{
__libraries: vec![], // dummy, set later
__libraries_path: vec![], // dummy, set later
__error: None,
{token_dyload_initializer}
}};
result.__libraries = libs;
result.__libraries_path = libs_path;
result
}}
}}
"""
output_dyload_compatible = f"""//! Compatible wrapper functions for dynamic loading.
//!
//! This file is generated automatically.
//!
//! Note: For dynamic loading, API version features are ignored.
//! All functions are available at runtime.
use super::*;
use core::ffi::{{c_char, c_int}};
{token_dyload_compatible}
"""
return {
"ffi_base": output_ffi_base,
"dyload_struct": output_dyload_struct,
"dyload_initializer": output_dyload_initializer,
"dyload_compatible": output_dyload_compatible,
}
def main():
os.makedirs(path_header, exist_ok=True)
os.makedirs(f"{path_out}/src", exist_ok=True)
os.makedirs(f"{path_out}/src/ffi_dynamic", exist_ok=True)
for name in ["dftd4.h"]:
shutil.copy(f"{path_repo}/include/{name}", f"{path_header}")
header_path = f"{path_header}/dftd4.h"
with open(header_path, "r") as f:
header_content = f.read()
version_map = parse_api_versions(header_content)
shutil.rmtree(path_temp, ignore_errors=True)
shutil.copytree(path_header, path_temp)
os.chdir(path_temp)
subprocess.run([
"bindgen",
"dftd4.h", "-o", "ffi.rs",
"--allowlist-file", "dftd4.h",
"--no-layout-tests",
"--use-core",
"--merge-extern-blocks",
])
with open("ffi.rs", "r") as f:
bindgen_output = f.read()
static_ffi = generate_static_ffi(bindgen_output, version_map)
with open(f"{path_out}/src/ffi_static.rs", "w") as f:
f.write(static_ffi)
dyload_files = dyload_main(bindgen_output)
with open(f"{path_out}/src/ffi_dynamic/ffi_base.rs", "w") as f:
f.write(dyload_files["ffi_base"])
with open(f"{path_out}/src/ffi_dynamic/dyload_struct.rs", "w") as f:
f.write(dyload_files["dyload_struct"])
with open(f"{path_out}/src/ffi_dynamic/dyload_initializer.rs", "w") as f:
f.write(dyload_files["dyload_initializer"])
with open(f"{path_out}/src/ffi_dynamic/dyload_compatible.rs", "w") as f:
f.write(dyload_files["dyload_compatible"])
os.chdir(path_out)
subprocess.run(["cargo", "fmt"])
print(f"Generated:")
print(f" - {path_out}/src/ffi_static.rs")
print(f" - {path_out}/src/ffi_dynamic/")
if __name__ == "__main__":
main()