#[cfg(feature = "sbom")]
use crate::auditwheel::get_sysroot_path;
use crate::auditwheel::{AuditWheelMode, get_policy_and_libs, patchelf, relpath};
use crate::auditwheel::{PlatformTag, Policy};
use crate::binding_generator::{
BinBindingGenerator, CffiBindingGenerator, Pyo3BindingGenerator, UniFfiBindingGenerator,
generate_binding,
};
use crate::bridge::Abi3Version;
use crate::build_options::CargoOptions;
use crate::compile::{CompileTarget, warn_missing_py_init};
use crate::compression::CompressionOptions;
#[cfg(feature = "sbom")]
use crate::module_writer::ModuleWriter;
use crate::module_writer::{WheelWriter, add_data, write_pth};
use crate::project_layout::ProjectLayout;
use crate::sbom::{SbomData, generate_sbom_data, write_sboms};
use crate::source_distribution::source_distribution;
use crate::target::validate_wheel_filename_for_pypi;
use crate::target::{Arch, Os};
use crate::{
BridgeModel, BuildArtifact, Metadata24, PyProjectToml, PythonInterpreter, Target,
VirtualWriter, compile, pyproject_toml::Format, pyproject_toml::SbomConfig,
};
use anyhow::{Context, Result, anyhow, bail};
use cargo_metadata::CrateType;
use cargo_metadata::Metadata;
use fs_err as fs;
use ignore::overrides::{Override, OverrideBuilder};
use lddtree::Library;
use normpath::PathExt;
use platform_info::*;
use regex::Regex;
use sha2::{Digest, Sha256};
use std::borrow::Borrow;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::env;
use std::io;
use std::path::{Path, PathBuf};
use tracing::instrument;
use zip::DateTime;
pub fn unpack_sdist(sdist_path: &Path) -> Result<(tempfile::TempDir, PathBuf)> {
let tmp = tempfile::tempdir().context("Failed to create temporary directory")?;
let gz = flate2::read::GzDecoder::new(
fs::File::open(sdist_path)
.with_context(|| format!("Failed to open sdist {}", sdist_path.display()))?,
);
let mut archive = tar::Archive::new(gz);
archive
.unpack(tmp.path())
.context("Failed to unpack source distribution")?;
let entries: Vec<_> = fs::read_dir(tmp.path())
.context("Failed to read unpacked sdist directory")?
.filter_map(|e| e.ok())
.filter(|e| e.file_type().map(|t| t.is_dir()).unwrap_or(false))
.collect();
let top_dir = match entries.len() {
1 => entries[0].path(),
n => bail!(
"Expected exactly one top-level directory in sdist, found {}",
n
),
};
let pyproject_file = top_dir.join("pyproject.toml");
let cargo_toml = if pyproject_file.is_file() {
let pyproject = PyProjectToml::new(&pyproject_file)?;
if let Some(manifest_path) = pyproject.manifest_path() {
top_dir.join(manifest_path)
} else {
top_dir.join("Cargo.toml")
}
} else {
top_dir.join("Cargo.toml")
};
if !cargo_toml.exists() {
bail!(
"Cargo.toml not found in unpacked sdist at {}",
cargo_toml.display()
);
}
Ok((tmp, cargo_toml))
}
#[derive(Clone)]
pub struct BuildContext {
pub target: Target,
pub compile_targets: Vec<CompileTarget>,
pub project_layout: ProjectLayout,
pub pyproject_toml_path: PathBuf,
pub pyproject_toml: Option<PyProjectToml>,
pub metadata24: Metadata24,
pub crate_name: String,
pub module_name: String,
pub manifest_path: PathBuf,
pub target_dir: PathBuf,
pub out: PathBuf,
pub strip: bool,
pub auditwheel: AuditWheelMode,
#[cfg(feature = "zig")]
pub zig: bool,
pub platform_tag: Vec<PlatformTag>,
pub interpreter: Vec<PythonInterpreter>,
pub cargo_metadata: Metadata,
pub universal2: bool,
pub editable: bool,
pub cargo_options: CargoOptions,
pub compression: CompressionOptions,
pub pypi_validation: bool,
pub sbom: Option<SbomConfig>,
pub include_import_lib: bool,
}
pub type BuiltWheelMetadata = (PathBuf, String);
impl BuildContext {
#[instrument(skip_all)]
pub fn build_wheels(&self) -> Result<Vec<BuiltWheelMetadata>> {
use itertools::Itertools;
fs::create_dir_all(&self.out)
.context("Failed to create the target directory for the wheels")?;
let sbom_data = generate_sbom_data(self)?;
let wheels = match self.bridge() {
BridgeModel::Bin(None) => self.build_bin_wheel(None, &sbom_data)?,
BridgeModel::Bin(Some(..)) => self.build_bin_wheels(&self.interpreter, &sbom_data)?,
BridgeModel::PyO3(crate::PyO3 { abi3, .. }) => match abi3 {
Some(Abi3Version::Version(major, minor)) => {
let abi3_interps: Vec<_> = self
.interpreter
.iter()
.filter(|interp| interp.has_stable_api())
.cloned()
.collect();
let non_abi3_interps: Vec<_> = self
.interpreter
.iter()
.filter(|interp| !interp.has_stable_api())
.cloned()
.collect();
let mut built_wheels = Vec::new();
if !abi3_interps.is_empty() {
built_wheels.extend(self.build_pyo3_wheel_abi3(
&abi3_interps,
*major,
*minor,
&sbom_data,
)?);
}
if !non_abi3_interps.is_empty() {
let interp_names: HashSet<_> = non_abi3_interps
.iter()
.map(|interp| interp.to_string())
.collect();
eprintln!(
"⚠️ Warning: {} does not yet support abi3 so the build artifacts will be version-specific.",
interp_names.iter().join(", ")
);
built_wheels.extend(self.build_pyo3_wheels(&non_abi3_interps, &sbom_data)?);
}
built_wheels
}
Some(Abi3Version::CurrentPython) => {
let abi3_interps: Vec<_> = self
.interpreter
.iter()
.filter(|interp| interp.has_stable_api())
.cloned()
.collect();
let non_abi3_interps: Vec<_> = self
.interpreter
.iter()
.filter(|interp| !interp.has_stable_api())
.cloned()
.collect();
let mut built_wheels = Vec::new();
if !abi3_interps.is_empty() {
let interp = abi3_interps.first().unwrap();
built_wheels.extend(self.build_pyo3_wheel_abi3(
&abi3_interps,
interp.major as u8,
interp.minor as u8,
&sbom_data,
)?);
}
if !non_abi3_interps.is_empty() {
let interp_names: HashSet<_> = non_abi3_interps
.iter()
.map(|interp| interp.to_string())
.collect();
eprintln!(
"⚠️ Warning: {} does not yet support abi3 so the build artifacts will be version-specific.",
interp_names.iter().join(", ")
);
built_wheels.extend(self.build_pyo3_wheels(&non_abi3_interps, &sbom_data)?);
}
built_wheels
}
None => self.build_pyo3_wheels(&self.interpreter, &sbom_data)?,
},
BridgeModel::Cffi => self.build_cffi_wheel(&sbom_data)?,
BridgeModel::UniFfi => self.build_uniffi_wheel(&sbom_data)?,
};
if self.pypi_validation {
for wheel in &wheels {
let filename = wheel
.0
.file_name()
.and_then(|name| name.to_str())
.ok_or_else(|| anyhow!("Invalid wheel filename: {:?}", wheel.0))?;
if let Err(error) = validate_wheel_filename_for_pypi(filename) {
bail!("PyPI validation failed: {}", error);
}
}
}
Ok(wheels)
}
pub fn bridge(&self) -> &BridgeModel {
&self.compile_targets[0].bridge_model
}
pub fn build_source_distribution(&self) -> Result<Option<BuiltWheelMetadata>> {
fs::create_dir_all(&self.out)
.context("Failed to create the target directory for the source distribution")?;
match self.pyproject_toml.as_ref() {
Some(pyproject) => {
let sdist_path =
source_distribution(self, pyproject, self.excludes(Format::Sdist)?)
.context("Failed to build source distribution")?;
Ok(Some((sdist_path, "source".to_string())))
}
None => Ok(None),
}
}
pub fn tags_from_bridge(&self) -> Result<Vec<String>> {
let tags = match self.bridge() {
BridgeModel::PyO3(bindings) | BridgeModel::Bin(Some(bindings)) => match bindings.abi3 {
Some(Abi3Version::Version(major, minor)) => {
let platform = self.get_platform_tag(&[PlatformTag::Linux])?;
vec![format!("cp{major}{minor}-abi3-{platform}")]
}
Some(Abi3Version::CurrentPython) => {
let interp = &self.interpreter[0];
let platform = self.get_platform_tag(&[PlatformTag::Linux])?;
vec![format!(
"cp{major}{minor}-abi3-{platform}",
major = interp.major,
minor = interp.minor
)]
}
None => {
vec![self.interpreter[0].get_tag(self, &[PlatformTag::Linux])?]
}
},
BridgeModel::Bin(None) | BridgeModel::Cffi | BridgeModel::UniFfi => {
self.get_universal_tags(&[PlatformTag::Linux])?.1
}
};
Ok(tags)
}
fn auditwheel(
&self,
artifact: &BuildArtifact,
platform_tag: &[PlatformTag],
python_interpreter: Option<&PythonInterpreter>,
) -> Result<(Policy, Vec<Library>)> {
if matches!(self.auditwheel, AuditWheelMode::Skip) {
return Ok((Policy::default(), Vec::new()));
}
if let Some(python_interpreter) = python_interpreter
&& platform_tag.is_empty()
&& self.target.is_linux()
&& !python_interpreter.support_portable_wheels()
{
eprintln!(
"🐍 Skipping auditwheel because {python_interpreter} does not support manylinux/musllinux wheels"
);
return Ok((Policy::default(), Vec::new()));
}
let mut musllinux: Vec<_> = platform_tag
.iter()
.filter(|tag| tag.is_musllinux())
.copied()
.collect();
musllinux.sort();
let mut others: Vec<_> = platform_tag
.iter()
.filter(|tag| !tag.is_musllinux())
.copied()
.collect();
others.sort();
let allow_linking_libpython = self.bridge().is_bin();
if self.bridge().is_bin() && !musllinux.is_empty() {
return get_policy_and_libs(
artifact,
Some(musllinux[0]),
&self.target,
&self.manifest_path,
allow_linking_libpython,
);
}
let tag = others.first().or_else(|| musllinux.first()).copied();
get_policy_and_libs(
artifact,
tag,
&self.target,
&self.manifest_path,
allow_linking_libpython,
)
}
fn add_rpath<A>(&self, artifacts: &[A]) -> Result<()>
where
A: Borrow<BuildArtifact>,
{
if self.editable && self.target.is_linux() && !artifacts.is_empty() {
for artifact in artifacts {
let artifact = artifact.borrow();
if artifact.linked_paths.is_empty() {
continue;
}
let old_rpaths = patchelf::get_rpath(&artifact.path)?;
let mut new_rpaths = old_rpaths.clone();
for path in &artifact.linked_paths {
if !old_rpaths.contains(path) {
new_rpaths.push(path.to_string());
}
}
let new_rpath = new_rpaths.join(":");
if let Err(err) = patchelf::set_rpath(&artifact.path, &new_rpath) {
eprintln!(
"⚠️ Warning: Failed to set rpath for {}: {}",
artifact.path.display(),
err
);
}
}
}
Ok(())
}
fn add_external_libs<A>(
&self,
writer: &mut VirtualWriter<WheelWriter>,
artifacts: &[A],
ext_libs: &[Vec<Library>],
) -> Result<()>
where
A: Borrow<BuildArtifact>,
{
if self.editable {
return self.add_rpath(artifacts);
}
if ext_libs.iter().all(|libs| libs.is_empty()) {
return Ok(());
}
eprintln!("🔗 External shared libraries to be copied into the wheel:");
for (artifact, artifact_ext_libs) in artifacts.iter().zip(ext_libs) {
let artifact = artifact.borrow();
if artifact_ext_libs.is_empty() {
continue;
}
eprintln!(" {} requires:", artifact.path.display());
for lib in artifact_ext_libs {
if let Some(path) = lib.realpath.as_ref() {
eprintln!(" {} => {}", lib.name, path.display());
} else {
eprintln!(" {} => not found", lib.name);
}
}
}
if matches!(self.auditwheel, AuditWheelMode::Check) {
bail!(
"Your library is not manylinux/musllinux compliant because it requires copying the above libraries. \
Re-run with `--auditwheel=repair` to copy them."
);
}
patchelf::verify_patchelf()?;
let libs_dir = PathBuf::from(format!(
"{}.libs",
self.metadata24.get_distribution_escaped()
));
let temp_dir = writer.temp_dir()?;
let mut soname_map = BTreeMap::new();
let mut libs_copied = HashSet::new();
for lib in ext_libs.iter().flatten() {
let lib_path = lib.realpath.clone().with_context(|| {
format!(
"Cannot repair wheel, because required library {} could not be located.",
lib.path.display()
)
})?;
let short_hash = &hash_file(&lib_path)?[..8];
let (file_stem, file_ext) = lib.name.split_once('.').unwrap();
let new_soname = if !file_stem.ends_with(&format!("-{short_hash}")) {
format!("{file_stem}-{short_hash}.{file_ext}")
} else {
format!("{file_stem}.{file_ext}")
};
let dest_path = temp_dir.path().join(&new_soname);
fs::copy(&lib_path, &dest_path)?;
libs_copied.insert(lib_path);
let mut perms = fs::metadata(&dest_path)?.permissions();
#[allow(clippy::permissions_set_readonly_false)]
perms.set_readonly(false);
fs::set_permissions(&dest_path, perms)?;
patchelf::set_soname(&dest_path, &new_soname)?;
if !lib.rpath.is_empty() || !lib.runpath.is_empty() {
patchelf::set_rpath(&dest_path, &libs_dir)?;
}
soname_map.insert(
lib.name.clone(),
(new_soname.clone(), dest_path.clone(), lib.needed.clone()),
);
}
for (artifact, artifact_ext_libs) in artifacts.iter().zip(ext_libs) {
let artifact = artifact.borrow();
let artifact_deps: HashSet<_> = artifact_ext_libs.iter().map(|lib| &lib.name).collect();
let replacements = soname_map
.iter()
.filter_map(|(k, v)| {
if artifact_deps.contains(k) {
Some((k, v.0.clone()))
} else {
None
}
})
.collect::<Vec<_>>();
if !replacements.is_empty() {
patchelf::replace_needed(&artifact.path, &replacements[..])?;
}
}
for (new_soname, path, needed) in soname_map.values() {
let mut replacements = Vec::new();
for n in needed {
if soname_map.contains_key(n) {
replacements.push((n, soname_map[n].0.clone()));
}
}
if !replacements.is_empty() {
patchelf::replace_needed(path, &replacements[..])?;
}
writer.add_file_force(libs_dir.join(new_soname), path, true)?;
}
let mut grafted_paths: Vec<PathBuf> = libs_copied.into_iter().collect();
grafted_paths.sort();
eprintln!(
"🖨 Copied external shared libraries to package {} directory.",
libs_dir.display()
);
#[cfg(feature = "sbom")]
{
let auditwheel_sbom_enabled = self
.sbom
.as_ref()
.and_then(|c| c.auditwheel)
.unwrap_or(true);
if auditwheel_sbom_enabled {
let sysroot = get_sysroot_path(&self.target).unwrap_or_else(|_| PathBuf::from("/"));
if let Some(sbom_json) = crate::auditwheel::sbom::create_auditwheel_sbom(
&self.metadata24.name,
&self.metadata24.version.to_string(),
&grafted_paths,
&sysroot,
) {
let sbom_path = self
.metadata24
.get_dist_info_dir()
.join("sboms/auditwheel.cdx.json");
writer.add_bytes(&sbom_path, None, sbom_json, false)?;
}
}
}
let artifact_dir = match self.bridge() {
BridgeModel::Cffi => self.module_name.split(".").collect::<PathBuf>(),
_ if self.module_name.contains(".") => {
let mut path = self.module_name.split(".").collect::<PathBuf>();
path.pop();
path
}
_ => PathBuf::from(&self.module_name),
};
for artifact in artifacts {
let artifact = artifact.borrow();
let mut new_rpaths = patchelf::get_rpath(&artifact.path)?;
let new_rpath = Path::new("$ORIGIN").join(relpath(&libs_dir, &artifact_dir));
new_rpaths.push(new_rpath.to_str().unwrap().to_string());
let new_rpath = new_rpaths.join(":");
patchelf::set_rpath(&artifact.path, &new_rpath)?;
}
Ok(())
}
fn add_pth(&self, writer: &mut VirtualWriter<WheelWriter>) -> Result<()> {
if self.editable {
write_pth(writer, &self.project_layout, &self.metadata24)?;
}
Ok(())
}
fn excludes(&self, format: Format) -> Result<Override> {
let project_dir = match self.pyproject_toml_path.normalize() {
Ok(pyproject_toml_path) => pyproject_toml_path.into_path_buf(),
Err(_) => self.manifest_path.normalize()?.into_path_buf(),
};
let mut excludes = OverrideBuilder::new(project_dir.parent().unwrap());
if let Some(pyproject) = self.pyproject_toml.as_ref()
&& let Some(glob_patterns) = &pyproject.exclude()
{
for glob in glob_patterns
.iter()
.filter_map(|glob_pattern| glob_pattern.targets(format))
{
excludes.add(glob)?;
}
}
if matches!(format, Format::Sdist) {
let glob_pattern = format!(
"{}{}{}-*.tar.gz",
self.out.display(),
std::path::MAIN_SEPARATOR,
&self.metadata24.get_distribution_escaped(),
);
excludes.add(&glob_pattern)?;
}
Ok(excludes.build()?)
}
pub fn get_platform_tag(&self, platform_tags: &[PlatformTag]) -> Result<String> {
if let Ok(host_platform) = env::var("_PYTHON_HOST_PLATFORM") {
let override_platform = host_platform.replace(['.', '-'], "_");
eprintln!(
"🚉 Overriding platform tag from _PYTHON_HOST_PLATFORM environment variable as {override_platform}."
);
return Ok(override_platform);
}
let target = &self.target;
let tag = match (&target.target_os(), &target.target_arch()) {
(Os::Windows, Arch::X86) => "win32".to_string(),
(Os::Windows, Arch::X86_64) => "win_amd64".to_string(),
(Os::Windows, Arch::Aarch64) => "win_arm64".to_string(),
(Os::Linux, _) => {
let arch = target.get_platform_arch()?;
if target.target_triple().contains("android") {
let android_arch = match arch.as_str() {
"armv7l" => "armeabi_v7a",
"aarch64" => "arm64_v8a",
"i686" => "x86",
"x86_64" => "x86_64",
_ => bail!("Unsupported Android architecture: {}", arch),
};
let api_level = find_android_api_level(target.target_triple(), &self.manifest_path)?;
format!("android_{}_{}", api_level, android_arch)
} else {
let mut platform_tags = platform_tags.to_vec();
platform_tags.sort();
let mut tags = vec![];
for platform_tag in platform_tags {
tags.push(format!("{platform_tag}_{arch}"));
for alias in platform_tag.aliases() {
tags.push(format!("{alias}_{arch}"));
}
}
tags.join(".")
}
}
(Os::Macos, Arch::X86_64) | (Os::Macos, Arch::Aarch64) => {
let ((x86_64_major, x86_64_minor), (arm64_major, arm64_minor)) = macosx_deployment_target(env::var("MACOSX_DEPLOYMENT_TARGET").ok().as_deref(), self.universal2)?;
let x86_64_tag = if let Some(deployment_target) = self.pyproject_toml.as_ref().and_then(|x| x.target_config("x86_64-apple-darwin")).and_then(|config| config.macos_deployment_target.as_ref()) {
deployment_target.replace('.', "_")
} else {
format!("{x86_64_major}_{x86_64_minor}")
};
let arm64_tag = if let Some(deployment_target) = self.pyproject_toml.as_ref().and_then(|x| x.target_config("aarch64-apple-darwin")).and_then(|config| config.macos_deployment_target.as_ref()) {
deployment_target.replace('.', "_")
} else {
format!("{arm64_major}_{arm64_minor}")
};
if self.universal2 {
format!(
"macosx_{x86_64_tag}_x86_64.macosx_{arm64_tag}_arm64.macosx_{x86_64_tag}_universal2"
)
} else if target.target_arch() == Arch::Aarch64 {
format!("macosx_{arm64_tag}_arm64")
} else {
format!("macosx_{x86_64_tag}_x86_64")
}
}
(Os::Ios, Arch::X86_64) | (Os::Ios, Arch::Aarch64) => {
let arch = if target.target_arch() == Arch::Aarch64 {
"arm64"
} else {
"x86_64"
};
let abi = if target.target_arch() == Arch::X86_64 || self.target.target_triple().ends_with("-sim") {
"iphonesimulator"
} else {
"iphoneos"
};
let (min_sdk_major, min_sdk_minor) = iphoneos_deployment_target(env::var("IPHONEOS_DEPLOYMENT_TARGET").ok().as_deref())?;
format!("ios_{min_sdk_major}_{min_sdk_minor}_{arch}_{abi}")
}
| (Os::FreeBsd, _) => {
format!(
"{}_{}_{}",
target.target_os().to_string().to_ascii_lowercase(),
target.get_platform_release()?.to_ascii_lowercase(),
target.target_arch().machine(),
)
}
| (Os::NetBsd, _)
| (Os::OpenBsd, _) => {
let release = target.get_platform_release()?;
format!(
"{}_{}_{}",
target.target_os().to_string().to_ascii_lowercase(),
release,
target.target_arch().machine(),
)
}
(Os::Dragonfly, Arch::X86_64)
| (Os::Haiku, Arch::X86_64) => {
let release = target.get_platform_release()?;
format!(
"{}_{}_{}",
target.target_os().to_string().to_ascii_lowercase(),
release.to_ascii_lowercase(),
"x86_64"
)
}
(Os::Emscripten, Arch::Wasm32) => {
let release = emscripten_version()?.replace(['.', '-'], "_");
format!("emscripten_{release}_wasm32")
}
(Os::Wasi, Arch::Wasm32) => {
"any".to_string()
}
(Os::Cygwin, _) => {
format!(
"{}_{}",
target.target_os().to_string().to_ascii_lowercase(),
target.get_platform_arch()?,
)
}
(_, _) => {
let info = PlatformInfo::new()
.map_err(|e| anyhow!("Failed to fetch platform information: {e}"))?;
let mut release = info.release().to_string_lossy().replace(['.', '-'], "_");
let mut machine = info.machine().to_string_lossy().replace([' ', '/'], "_");
let mut os = target.target_os().to_string().to_ascii_lowercase();
if target.target_os() == Os::Solaris || target.target_os() == Os::Illumos {
if let Some((major, other)) = release.split_once('_') {
let major_ver: u64 = major.parse().context("illumos major version is not a number")?;
if major_ver >= 5 {
os = "solaris".to_string();
release = format!("{}_{}", major_ver - 3, other);
machine = format!("{machine}_64bit");
}
}
}
format!(
"{os}_{release}_{machine}"
)
}
};
Ok(tag)
}
pub fn get_py3_tags(&self, platform_tags: &[PlatformTag]) -> Result<Vec<String>> {
let tags = vec![format!(
"py3-none-{}",
self.get_platform_tag(platform_tags)?
)];
Ok(tags)
}
pub fn get_universal_tags(
&self,
platform_tags: &[PlatformTag],
) -> Result<(String, Vec<String>)> {
let tag = format!(
"py3-none-{platform}",
platform = self.get_platform_tag(platform_tags)?
);
let tags = self.get_py3_tags(platform_tags)?;
Ok((tag, tags))
}
#[allow(clippy::too_many_arguments)]
fn write_pyo3_wheel_abi3(
&self,
artifact: BuildArtifact,
platform_tags: &[PlatformTag],
ext_libs: Vec<Library>,
major: u8,
min_minor: u8,
sbom_data: &Option<SbomData>,
out_dirs: &HashMap<String, PathBuf>,
) -> Result<BuiltWheelMetadata> {
let platform = self.get_platform_tag(platform_tags)?;
let tag = format!("cp{major}{min_minor}-abi3-{platform}");
let file_options = self
.compression
.get_file_options()
.last_modified_time(zip_mtime());
let writer = WheelWriter::new(&tag, &self.out, &self.metadata24, file_options)?;
let mut writer = VirtualWriter::new(writer, self.excludes(Format::Wheel)?);
self.add_external_libs(&mut writer, &[&artifact], &[ext_libs])?;
let mut generator =
Pyo3BindingGenerator::new(true, self.interpreter.first(), writer.temp_dir()?)
.context("Failed to initialize PyO3 binding generator")?;
generate_binding(&mut writer, &mut generator, self, &[&artifact], out_dirs)
.context("Failed to add the files to the wheel")?;
self.add_pth(&mut writer)?;
add_data(
&mut writer,
&self.metadata24,
self.project_layout.data.as_deref(),
)?;
write_sboms(
self,
sbom_data.as_ref(),
&mut writer,
&self.metadata24.get_dist_info_dir(),
)?;
let wheel_path = writer.finish(
&self.metadata24,
&self.project_layout.project_root,
std::slice::from_ref(&tag),
)?;
Ok((wheel_path, format!("cp{major}{min_minor}")))
}
pub fn build_pyo3_wheel_abi3(
&self,
interpreters: &[PythonInterpreter],
major: u8,
min_minor: u8,
sbom_data: &Option<SbomData>,
) -> Result<Vec<BuiltWheelMetadata>> {
let mut wheels = Vec::new();
let python_interpreter = interpreters.first();
let (artifact, out_dirs) = self.compile_cdylib(
python_interpreter,
Some(&self.project_layout.extension_name),
)?;
let (policy, external_libs) =
self.auditwheel(&artifact, &self.platform_tag, python_interpreter)?;
let platform_tags = if self.platform_tag.is_empty() {
vec![policy.platform_tag()]
} else {
self.platform_tag.clone()
};
let (wheel_path, tag) = self.write_pyo3_wheel_abi3(
artifact,
&platform_tags,
external_libs,
major,
min_minor,
sbom_data,
&out_dirs,
)?;
eprintln!(
"📦 Built wheel for abi3 Python ≥ {}.{} to {}",
major,
min_minor,
wheel_path.display()
);
wheels.push((wheel_path, tag));
Ok(wheels)
}
fn write_pyo3_wheel(
&self,
python_interpreter: &PythonInterpreter,
artifact: BuildArtifact,
platform_tags: &[PlatformTag],
ext_libs: Vec<Library>,
sbom_data: &Option<SbomData>,
out_dirs: &HashMap<String, PathBuf>,
) -> Result<BuiltWheelMetadata> {
let tag = python_interpreter.get_tag(self, platform_tags)?;
let file_options = self
.compression
.get_file_options()
.last_modified_time(zip_mtime());
let writer = WheelWriter::new(&tag, &self.out, &self.metadata24, file_options)?;
let mut writer = VirtualWriter::new(writer, self.excludes(Format::Wheel)?);
self.add_external_libs(&mut writer, &[&artifact], &[ext_libs])?;
let mut generator =
Pyo3BindingGenerator::new(false, Some(python_interpreter), writer.temp_dir()?)
.context("Failed to initialize PyO3 binding generator")?;
generate_binding(&mut writer, &mut generator, self, &[&artifact], out_dirs)
.context("Failed to add the files to the wheel")?;
self.add_pth(&mut writer)?;
add_data(
&mut writer,
&self.metadata24,
self.project_layout.data.as_deref(),
)?;
write_sboms(
self,
sbom_data.as_ref(),
&mut writer,
&self.metadata24.get_dist_info_dir(),
)?;
let wheel_path = writer.finish(
&self.metadata24,
&self.project_layout.project_root,
std::slice::from_ref(&tag),
)?;
Ok((
wheel_path,
format!("cp{}{}", python_interpreter.major, python_interpreter.minor),
))
}
pub fn build_pyo3_wheels(
&self,
interpreters: &[PythonInterpreter],
sbom_data: &Option<SbomData>,
) -> Result<Vec<BuiltWheelMetadata>> {
let mut wheels = Vec::new();
for python_interpreter in interpreters {
let (artifact, out_dirs) = self.compile_cdylib(
Some(python_interpreter),
Some(&self.project_layout.extension_name),
)?;
let (policy, external_libs) =
self.auditwheel(&artifact, &self.platform_tag, Some(python_interpreter))?;
let platform_tags = if self.platform_tag.is_empty() {
vec![policy.platform_tag()]
} else {
self.platform_tag.clone()
};
let (wheel_path, tag) = self.write_pyo3_wheel(
python_interpreter,
artifact,
&platform_tags,
external_libs,
sbom_data,
&out_dirs,
)?;
eprintln!(
"📦 Built wheel for {} {}.{}{} to {}",
python_interpreter.interpreter_kind,
python_interpreter.major,
python_interpreter.minor,
python_interpreter.abiflags,
wheel_path.display()
);
wheels.push((wheel_path, tag));
}
Ok(wheels)
}
pub fn compile_cdylib(
&self,
python_interpreter: Option<&PythonInterpreter>,
extension_name: Option<&str>,
) -> Result<(BuildArtifact, HashMap<String, PathBuf>)> {
let result = compile(self, python_interpreter, &self.compile_targets)
.context("Failed to build a native library through cargo")?;
let error_msg = "Cargo didn't build a cdylib. Did you miss crate-type = [\"cdylib\"] \
in the lib section of your Cargo.toml?";
let artifacts = result.artifacts.first().context(error_msg)?;
let mut artifact = artifacts
.get(&CrateType::CDyLib)
.cloned()
.ok_or_else(|| anyhow!(error_msg,))?;
if let Some(extension_name) = extension_name {
let _ = warn_missing_py_init(&artifact.path, extension_name);
}
self.copy_artifact_for_repair(&mut artifact)?;
Ok((artifact, result.out_dirs))
}
fn copy_artifact_for_repair(&self, artifact: &mut BuildArtifact) -> Result<()> {
if self.editable || !matches!(self.auditwheel, AuditWheelMode::Repair) {
return Ok(());
}
let maturin_build = self.target_dir.join(env!("CARGO_PKG_NAME"));
fs::create_dir_all(&maturin_build)?;
let artifact_path = &artifact.path;
let new_artifact_path = maturin_build.join(artifact_path.file_name().unwrap());
fs::copy(artifact_path, &new_artifact_path)?;
artifact.path = new_artifact_path.normalize()?.into_path_buf();
Ok(())
}
fn write_cffi_wheel(
&self,
artifact: BuildArtifact,
platform_tags: &[PlatformTag],
ext_libs: Vec<Library>,
sbom_data: &Option<SbomData>,
out_dirs: &HashMap<String, PathBuf>,
) -> Result<BuiltWheelMetadata> {
let (tag, tags) = self.get_universal_tags(platform_tags)?;
let file_options = self
.compression
.get_file_options()
.last_modified_time(zip_mtime());
let writer = WheelWriter::new(&tag, &self.out, &self.metadata24, file_options)?;
let mut writer = VirtualWriter::new(writer, self.excludes(Format::Wheel)?);
self.add_external_libs(&mut writer, &[&artifact], &[ext_libs])?;
let interpreter = self.interpreter.first().ok_or_else(|| {
anyhow!("A python interpreter is required for cffi builds but one was not provided")
})?;
let mut generator = CffiBindingGenerator::new(interpreter, writer.temp_dir()?)
.context("Failed to initialize Cffi binding generator")?;
generate_binding(&mut writer, &mut generator, self, &[&artifact], out_dirs)?;
self.add_pth(&mut writer)?;
add_data(
&mut writer,
&self.metadata24,
self.project_layout.data.as_deref(),
)?;
write_sboms(
self,
sbom_data.as_ref(),
&mut writer,
&self.metadata24.get_dist_info_dir(),
)?;
let wheel_path =
writer.finish(&self.metadata24, &self.project_layout.project_root, &tags)?;
Ok((wheel_path, "py3".to_string()))
}
pub fn build_cffi_wheel(
&self,
sbom_data: &Option<SbomData>,
) -> Result<Vec<BuiltWheelMetadata>> {
let mut wheels = Vec::new();
let (artifact, out_dirs) = self.compile_cdylib(None, None)?;
let (policy, external_libs) = self.auditwheel(&artifact, &self.platform_tag, None)?;
let platform_tags = if self.platform_tag.is_empty() {
vec![policy.platform_tag()]
} else {
self.platform_tag.clone()
};
let (wheel_path, tag) = self.write_cffi_wheel(
artifact,
&platform_tags,
external_libs,
sbom_data,
&out_dirs,
)?;
if !self
.metadata24
.requires_dist
.iter()
.any(|requirement| requirement.name.as_ref() == "cffi")
{
eprintln!(
"⚠️ Warning: missing cffi package dependency, please add it to pyproject.toml. \
e.g: `dependencies = [\"cffi\"]`. This will become an error."
);
}
eprintln!("📦 Built wheel to {}", wheel_path.display());
wheels.push((wheel_path, tag));
Ok(wheels)
}
fn write_uniffi_wheel(
&self,
artifact: BuildArtifact,
platform_tags: &[PlatformTag],
ext_libs: Vec<Library>,
sbom_data: &Option<SbomData>,
out_dirs: &HashMap<String, PathBuf>,
) -> Result<BuiltWheelMetadata> {
let (tag, tags) = self.get_universal_tags(platform_tags)?;
let file_options = self
.compression
.get_file_options()
.last_modified_time(zip_mtime());
let writer = WheelWriter::new(&tag, &self.out, &self.metadata24, file_options)?;
let mut writer = VirtualWriter::new(writer, self.excludes(Format::Wheel)?);
self.add_external_libs(&mut writer, &[&artifact], &[ext_libs])?;
let mut generator = UniFfiBindingGenerator::default();
generate_binding(&mut writer, &mut generator, self, &[&artifact], out_dirs)?;
self.add_pth(&mut writer)?;
add_data(
&mut writer,
&self.metadata24,
self.project_layout.data.as_deref(),
)?;
write_sboms(
self,
sbom_data.as_ref(),
&mut writer,
&self.metadata24.get_dist_info_dir(),
)?;
let wheel_path =
writer.finish(&self.metadata24, &self.project_layout.project_root, &tags)?;
Ok((wheel_path, "py3".to_string()))
}
pub fn build_uniffi_wheel(
&self,
sbom_data: &Option<SbomData>,
) -> Result<Vec<BuiltWheelMetadata>> {
let mut wheels = Vec::new();
let (artifact, out_dirs) = self.compile_cdylib(None, None)?;
let (policy, external_libs) = self.auditwheel(&artifact, &self.platform_tag, None)?;
let platform_tags = if self.platform_tag.is_empty() {
vec![policy.platform_tag()]
} else {
self.platform_tag.clone()
};
let (wheel_path, tag) = self.write_uniffi_wheel(
artifact,
&platform_tags,
external_libs,
sbom_data,
&out_dirs,
)?;
eprintln!("📦 Built wheel to {}", wheel_path.display());
wheels.push((wheel_path, tag));
Ok(wheels)
}
fn write_bin_wheel(
&self,
python_interpreter: Option<&PythonInterpreter>,
artifacts: &[BuildArtifact],
platform_tags: &[PlatformTag],
ext_libs: &[Vec<Library>],
sbom_data: &Option<SbomData>,
out_dirs: &HashMap<String, PathBuf>,
) -> Result<BuiltWheelMetadata> {
if !self.metadata24.scripts.is_empty() {
bail!("Defining scripts and working with a binary doesn't mix well");
}
if self.target.is_wasi() {
eprintln!("⚠️ Warning: wasi support is experimental");
if !self.metadata24.entry_points.is_empty() {
bail!("You can't define entrypoints yourself for a binary project");
}
if self.project_layout.python_module.is_some() {
bail!("Sorry, adding python code to a wasm binary is currently not supported")
}
}
let (tag, tags) = match (self.bridge(), python_interpreter) {
(BridgeModel::Bin(None), _) => self.get_universal_tags(platform_tags)?,
(BridgeModel::Bin(Some(..)), Some(python_interpreter)) => {
let tag = python_interpreter.get_tag(self, platform_tags)?;
(tag.clone(), vec![tag])
}
_ => unreachable!(),
};
let mut metadata24 = self.metadata24.clone();
let file_options = self
.compression
.get_file_options()
.last_modified_time(zip_mtime());
let writer = WheelWriter::new(&tag, &self.out, &metadata24, file_options)?;
let mut writer = VirtualWriter::new(writer, self.excludes(Format::Wheel)?);
self.add_external_libs(&mut writer, artifacts, ext_libs)?;
let mut generator = BinBindingGenerator::new(&mut metadata24);
generate_binding(&mut writer, &mut generator, self, artifacts, out_dirs)
.context("Failed to add the files to the wheel")?;
self.add_pth(&mut writer)?;
add_data(
&mut writer,
&metadata24,
self.project_layout.data.as_deref(),
)?;
write_sboms(
self,
sbom_data.as_ref(),
&mut writer,
&metadata24.get_dist_info_dir(),
)?;
let wheel_path = writer.finish(&metadata24, &self.project_layout.project_root, &tags)?;
Ok((wheel_path, "py3".to_string()))
}
pub fn build_bin_wheel(
&self,
python_interpreter: Option<&PythonInterpreter>,
sbom_data: &Option<SbomData>,
) -> Result<Vec<BuiltWheelMetadata>> {
let mut wheels = Vec::new();
let result = compile(self, python_interpreter, &self.compile_targets)
.context("Failed to build a native library through cargo")?;
if result.artifacts.is_empty() {
bail!("Cargo didn't build a binary")
}
let mut policies = Vec::with_capacity(result.artifacts.len());
let mut ext_libs = Vec::new();
let mut artifact_paths = Vec::with_capacity(result.artifacts.len());
for artifact in result.artifacts {
let mut artifact = artifact
.get(&CrateType::Bin)
.cloned()
.ok_or_else(|| anyhow!("Cargo didn't build a binary"))?;
let (policy, external_libs) = self.auditwheel(&artifact, &self.platform_tag, None)?;
policies.push(policy);
ext_libs.push(external_libs);
self.copy_artifact_for_repair(&mut artifact)?;
artifact_paths.push(artifact);
}
let policy = policies.iter().min_by_key(|p| p.priority).unwrap();
let platform_tags = if self.platform_tag.is_empty() {
vec![policy.platform_tag()]
} else {
self.platform_tag.clone()
};
let (wheel_path, tag) = self.write_bin_wheel(
python_interpreter,
&artifact_paths,
&platform_tags,
&ext_libs,
sbom_data,
&result.out_dirs,
)?;
eprintln!("📦 Built wheel to {}", wheel_path.display());
wheels.push((wheel_path, tag));
Ok(wheels)
}
pub fn build_bin_wheels(
&self,
interpreters: &[PythonInterpreter],
sbom_data: &Option<SbomData>,
) -> Result<Vec<BuiltWheelMetadata>> {
let mut wheels = Vec::new();
for python_interpreter in interpreters {
wheels.extend(self.build_bin_wheel(Some(python_interpreter), sbom_data)?);
}
Ok(wheels)
}
}
pub fn hash_file(path: impl AsRef<Path>) -> Result<String, io::Error> {
let mut file = fs::File::open(path.as_ref())?;
let mut hasher = Sha256::new();
io::copy(&mut file, &mut hasher)?;
let hex = format!("{:x}", hasher.finalize());
Ok(hex)
}
fn macosx_deployment_target(
deploy_target: Option<&str>,
universal2: bool,
) -> Result<((u16, u16), (u16, u16))> {
let x86_64_default_rustc = rustc_macosx_target_version("x86_64-apple-darwin");
let x86_64_default = if universal2 && x86_64_default_rustc.1 < 9 {
(10, 9)
} else {
x86_64_default_rustc
};
let arm64_default = rustc_macosx_target_version("aarch64-apple-darwin");
let mut x86_64_ver = x86_64_default;
let mut arm64_ver = arm64_default;
if let Some(deploy_target) = deploy_target {
let err_ctx = "MACOSX_DEPLOYMENT_TARGET is invalid";
let mut parts = deploy_target.split('.');
let major = parts.next().context(err_ctx)?;
let major: u16 = major.parse().context(err_ctx)?;
let minor = parts.next().context(err_ctx)?;
let minor: u16 = minor.parse().context(err_ctx)?;
if (major, minor) > x86_64_default {
x86_64_ver = (major, minor);
}
if (major, minor) > arm64_default {
arm64_ver = (major, minor);
}
}
Ok((
python_macosx_target_version(x86_64_ver),
python_macosx_target_version(arm64_ver),
))
}
fn iphoneos_deployment_target(deploy_target: Option<&str>) -> Result<(u16, u16)> {
let (major, minor) = if let Some(deploy_target) = deploy_target {
let err_ctx = "IPHONEOS_DEPLOYMENT_TARGET is invalid";
let mut parts = deploy_target.split('.');
let major = parts.next().context(err_ctx)?;
let major: u16 = major.parse().context(err_ctx)?;
let minor = parts.next().context(err_ctx)?;
let minor: u16 = minor.parse().context(err_ctx)?;
(major, minor)
} else {
(13, 0)
};
Ok((major, minor))
}
#[inline]
fn python_macosx_target_version(version: (u16, u16)) -> (u16, u16) {
let (major, minor) = version;
if major >= 11 {
(major, 0)
} else {
(major, minor)
}
}
pub(crate) fn rustc_macosx_target_version(target: &str) -> (u16, u16) {
use std::process::{Command, Stdio};
use target_lexicon::OperatingSystem;
if let Ok(output) = Command::new("rustc")
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.env_remove("MACOSX_DEPLOYMENT_TARGET")
.args(["--target", target])
.args(["--print", "deployment-target"])
.output()
&& output.status.success()
{
let target_version = std::str::from_utf8(&output.stdout)
.unwrap()
.split('=')
.next_back()
.and_then(|v| v.trim().split_once('.'));
if let Some((major, minor)) = target_version {
let major: u16 = major.parse().unwrap();
let minor: u16 = minor.parse().unwrap();
return (major, minor);
}
}
let fallback_version = if target == "aarch64-apple-darwin" {
(11, 0)
} else {
(10, 7)
};
let rustc_target_version = || -> Result<(u16, u16)> {
let cmd = Command::new("rustc")
.arg("-Z")
.arg("unstable-options")
.arg("--print")
.arg("target-spec-json")
.arg("--target")
.arg(target)
.env("RUSTC_BOOTSTRAP", "1")
.env_remove("MACOSX_DEPLOYMENT_TARGET")
.output()
.context("Failed to run rustc to get the target spec")?;
let stdout = String::from_utf8(cmd.stdout).context("rustc output is not valid utf-8")?;
let spec: serde_json::Value =
serde_json::from_str(&stdout).context("rustc output is not valid json")?;
let llvm_target = spec
.as_object()
.context("rustc output is not a json object")?
.get("llvm-target")
.context("rustc output does not contain llvm-target")?
.as_str()
.context("llvm-target is not a string")?;
let triple = llvm_target.parse::<target_lexicon::Triple>();
let (major, minor) = match triple.map(|t| t.operating_system) {
Ok(
OperatingSystem::MacOSX(Some(deployment_target))
| OperatingSystem::Darwin(Some(deployment_target)),
) => (deployment_target.major, u16::from(deployment_target.minor)),
_ => fallback_version,
};
Ok((major, minor))
};
rustc_target_version().unwrap_or(fallback_version)
}
fn emscripten_version() -> Result<String> {
let os_version = env::var("MATURIN_EMSCRIPTEN_VERSION");
let release = match os_version {
Ok(os_ver) => os_ver,
Err(_) => emcc_version()?,
};
Ok(release)
}
fn emcc_version() -> Result<String> {
use std::process::Command;
let emcc = Command::new(if cfg!(windows) { "emcc.bat" } else { "emcc" })
.arg("-dumpversion")
.output()
.context("Failed to run emcc to get the version")?;
let ver = String::from_utf8(emcc.stdout)?;
let mut trimmed = ver.trim();
trimmed = trimmed.strip_suffix("-git").unwrap_or(trimmed);
Ok(trimmed.into())
}
fn find_android_api_level(target_triple: &str, manifest_path: &Path) -> Result<String> {
if let Ok(val) = env::var("ANDROID_API_LEVEL") {
return Ok(val);
}
let mut clues = Vec::new();
if let Some(manifest_dir) = manifest_path.parent()
&& let Ok(config) = cargo_config2::Config::load_with_cwd(manifest_dir)
&& let Ok(Some(linker)) = config.linker(target_triple)
{
clues.push(linker.to_string_lossy().into_owned());
}
if let Ok(cc) = env::var(format!("CC_{}", target_triple.replace('-', "_"))) {
clues.push(cc);
}
if let Ok(cc) = env::var("CC") {
clues.push(cc);
}
let re = Regex::new(r"android(\d+)")?;
for clue in clues {
if let Some(caps) = re.captures(&clue) {
return Ok(caps[1].to_string());
}
}
bail!(
"Failed to determine Android API level. Please set the ANDROID_API_LEVEL environment variable."
);
}
fn zip_mtime() -> DateTime {
let res = env::var("SOURCE_DATE_EPOCH")
.context("") .and_then(|epoch| {
let epoch: i64 = epoch.parse()?;
let dt = time::OffsetDateTime::from_unix_timestamp(epoch)?;
let dt = DateTime::try_from(dt)?;
Ok(dt)
});
res.unwrap_or_default()
}
#[cfg(test)]
mod tests {
use super::{iphoneos_deployment_target, macosx_deployment_target};
use pretty_assertions::assert_eq;
#[test]
fn test_macosx_deployment_target() {
let rustc_ver = rustc_version::version().unwrap();
let rustc_ver = (rustc_ver.major, rustc_ver.minor);
let x86_64_minor = if rustc_ver >= (1, 74) { 12 } else { 7 };
let universal2_minor = if rustc_ver >= (1, 74) { 12 } else { 9 };
assert_eq!(
macosx_deployment_target(None, false).unwrap(),
((10, x86_64_minor), (11, 0))
);
assert_eq!(
macosx_deployment_target(None, true).unwrap(),
((10, universal2_minor), (11, 0))
);
assert_eq!(
macosx_deployment_target(Some("10.6"), false).unwrap(),
((10, x86_64_minor), (11, 0))
);
assert_eq!(
macosx_deployment_target(Some("10.6"), true).unwrap(),
((10, universal2_minor), (11, 0))
);
assert_eq!(
macosx_deployment_target(Some("10.9"), false).unwrap(),
((10, universal2_minor), (11, 0))
);
assert_eq!(
macosx_deployment_target(Some("11.0.0"), false).unwrap(),
((11, 0), (11, 0))
);
assert_eq!(
macosx_deployment_target(Some("11.1"), false).unwrap(),
((11, 0), (11, 0))
);
}
#[test]
fn test_iphoneos_deployment_target() {
assert_eq!(iphoneos_deployment_target(None).unwrap(), (13, 0));
assert_eq!(iphoneos_deployment_target(Some("13.0")).unwrap(), (13, 0));
assert_eq!(iphoneos_deployment_target(Some("14.5")).unwrap(), (14, 5));
assert_eq!(iphoneos_deployment_target(Some("15.0")).unwrap(), (15, 0));
assert_eq!(iphoneos_deployment_target(Some("14.5.1")).unwrap(), (14, 5));
assert!(iphoneos_deployment_target(Some("invalid")).is_err());
assert!(iphoneos_deployment_target(Some("13")).is_err());
assert!(iphoneos_deployment_target(Some("13.")).is_err());
assert!(iphoneos_deployment_target(Some(".0")).is_err());
assert!(iphoneos_deployment_target(Some("abc.def")).is_err());
assert!(iphoneos_deployment_target(Some("13.abc")).is_err());
assert!(iphoneos_deployment_target(Some("")).is_err());
}
}