use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD as base64};
use fs_err::File;
use globset::{GlobSet, GlobSetBuilder};
use rustc_hash::FxHashSet;
use sha2::{Digest, Sha256};
use std::fmt::{Display, Formatter};
use std::io::{BufReader, Read, Seek, Write};
use std::path::{Component, Path, PathBuf};
use std::{io, mem};
use tempfile::NamedTempFile;
use tracing::{debug, trace};
use walkdir::WalkDir;
use zip::{CompressionMethod, ZipWriter};
use uv_distribution_filename::WheelFilename;
use uv_fs::Simplified;
use uv_globfilter::{GlobDirFilter, PortableGlobParser};
use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag};
use uv_preview::{Preview, PreviewFeature};
use uv_warnings::warn_user_once;
use crate::metadata::DEFAULT_EXCLUDES;
use crate::{
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
error_on_venv, find_roots,
};
pub fn build_wheel(
source_tree: &Path,
wheel_dir: &Path,
metadata_directory: Option<&Path>,
uv_version: &str,
show_warnings: bool,
preview: Preview,
) -> Result<WheelFilename, Error> {
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
for warning in pyproject_toml.check_build_system(uv_version) {
warn_user_once!("{warning}");
}
crate::check_metadata_directory(source_tree, metadata_directory, &pyproject_toml)?;
let filename = WheelFilename::new(
pyproject_toml.name().clone(),
pyproject_toml.version().clone(),
LanguageTag::Python {
major: 3,
minor: None,
},
AbiTag::None,
PlatformTag::Any,
);
let wheel_path = wheel_dir.join(filename.to_string());
debug!("Writing wheel at {}", wheel_path.user_display());
if wheel_path.exists() {
fs_err::remove_file(&wheel_path)?;
}
let temp_file = NamedTempFile::new_in(wheel_dir)?;
let wheel_writer = ZipDirectoryWriter::new_wheel(temp_file.as_file());
write_wheel(
source_tree,
&pyproject_toml,
&filename,
uv_version,
wheel_writer,
show_warnings,
preview,
)?;
temp_file
.persist(&wheel_path)
.map_err(|err| Error::Persist(wheel_path.clone(), err.error))?;
Ok(filename)
}
pub fn list_wheel(
source_tree: &Path,
uv_version: &str,
show_warnings: bool,
preview: Preview,
) -> Result<(WheelFilename, FileList), Error> {
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
for warning in pyproject_toml.check_build_system(uv_version) {
warn_user_once!("{warning}");
}
let filename = WheelFilename::new(
pyproject_toml.name().clone(),
pyproject_toml.version().clone(),
LanguageTag::Python {
major: 3,
minor: None,
},
AbiTag::None,
PlatformTag::Any,
);
let mut files = FileList::new();
let writer = ListWriter::new(&mut files);
write_wheel(
source_tree,
&pyproject_toml,
&filename,
uv_version,
writer,
show_warnings,
preview,
)?;
Ok((filename, files))
}
fn write_wheel(
source_tree: &Path,
pyproject_toml: &PyProjectToml,
filename: &WheelFilename,
uv_version: &str,
mut wheel_writer: impl DirectoryWriter,
show_warnings: bool,
preview: Preview,
) -> Result<(), Error> {
let settings = pyproject_toml
.settings()
.cloned()
.unwrap_or_else(BuildBackendSettings::default);
let mut excludes: Vec<String> = Vec::new();
if settings.default_excludes {
excludes.extend(DEFAULT_EXCLUDES.iter().map(ToString::to_string));
}
for exclude in settings.wheel_exclude {
if !excludes.contains(&exclude) {
excludes.push(exclude);
}
}
for exclude in &settings.source_exclude {
if !excludes.contains(exclude) {
excludes.push(exclude.clone());
}
}
debug!("Wheel excludes: {:?}", excludes);
let exclude_matcher = build_exclude_matcher(excludes)?;
debug!("Adding content files to wheel");
let (src_root, module_relative) = find_roots(
source_tree,
pyproject_toml,
&settings.module_root,
settings.module_name.as_ref(),
settings.namespace,
show_warnings,
)?;
let mut files_visited = 0;
let mut prefix_directories = FxHashSet::default();
for module_relative in module_relative {
for ancestor in module_relative.ancestors().skip(1) {
if ancestor == Path::new("") {
continue;
}
if prefix_directories.insert(ancestor.to_path_buf()) {
wheel_writer.write_directory(&ancestor.portable_display().to_string())?;
}
}
for entry in WalkDir::new(src_root.join(module_relative))
.sort_by_file_name()
.into_iter()
.filter_entry(|entry| !exclude_matcher.is_match(entry.path()))
{
let entry = entry.map_err(|err| Error::WalkDir {
root: source_tree.to_path_buf(),
err,
})?;
files_visited += 1;
if files_visited > 10000 {
warn_user_once!(
"Visited more than 10,000 files for wheel build. \
Consider using more constrained includes or more excludes."
);
}
let match_path = entry
.path()
.strip_prefix(source_tree)
.expect("walkdir starts with root");
let entry_path = entry
.path()
.strip_prefix(&src_root)
.expect("walkdir starts with root");
if exclude_matcher.is_match(match_path) {
trace!("Excluding from module: {}", match_path.user_display());
continue;
}
error_on_venv(entry.file_name(), entry.path())?;
let entry_path = entry_path.portable_display().to_string();
debug!("Adding to wheel: {entry_path}");
wheel_writer.write_dir_entry(&entry, &entry_path)?;
}
}
debug!("Visited {files_visited} files for wheel build");
if pyproject_toml.license_files_wheel().next().is_some() {
debug!("Adding license files");
let license_dir = format!(
"{}-{}.dist-info/licenses/",
pyproject_toml.name().as_dist_info_name(),
pyproject_toml.version()
);
wheel_subdir_from_globs(
source_tree,
&license_dir,
pyproject_toml.license_files_wheel(),
&mut wheel_writer,
"project.license-files",
)?;
}
for (name, directory) in settings.data.iter() {
debug!(
"Adding {name} data files from: {}",
directory.user_display()
);
if directory
.components()
.next()
.is_some_and(|component| !matches!(component, Component::CurDir | Component::Normal(_)))
{
return Err(Error::InvalidDataRoot {
name: name.to_string(),
path: directory.to_path_buf(),
});
}
let data_dir = format!(
"{}-{}.data/{}/",
pyproject_toml.name().as_dist_info_name(),
pyproject_toml.version(),
name
);
wheel_subdir_from_globs(
&source_tree.join(directory),
&data_dir,
&["**".to_string()],
&mut wheel_writer,
&format!("tool.uv.build-backend.data.{name}"),
)?;
}
debug!("Adding metadata files to wheel");
let dist_info_dir = write_dist_info(
&mut wheel_writer,
pyproject_toml,
filename,
source_tree,
uv_version,
preview,
)?;
wheel_writer.close(&dist_info_dir)?;
Ok(())
}
pub fn build_editable(
source_tree: &Path,
wheel_dir: &Path,
metadata_directory: Option<&Path>,
uv_version: &str,
show_warnings: bool,
preview: Preview,
) -> Result<WheelFilename, Error> {
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
for warning in pyproject_toml.check_build_system(uv_version) {
warn_user_once!("{warning}");
}
let settings = pyproject_toml
.settings()
.cloned()
.unwrap_or_else(BuildBackendSettings::default);
crate::check_metadata_directory(source_tree, metadata_directory, &pyproject_toml)?;
let filename = WheelFilename::new(
pyproject_toml.name().clone(),
pyproject_toml.version().clone(),
LanguageTag::Python {
major: 3,
minor: None,
},
AbiTag::None,
PlatformTag::Any,
);
let wheel_path = wheel_dir.join(filename.to_string());
debug!("Writing wheel at {}", wheel_path.user_display());
if wheel_path.exists() {
fs_err::remove_file(&wheel_path)?;
}
let temp_file = NamedTempFile::new_in(wheel_dir)?;
let mut wheel_writer = ZipDirectoryWriter::new_wheel(temp_file.as_file());
debug!("Adding pth file to {}", wheel_path.user_display());
let (src_root, _module_relative) = find_roots(
source_tree,
&pyproject_toml,
&settings.module_root,
settings.module_name.as_ref(),
settings.namespace,
show_warnings,
)?;
wheel_writer.write_bytes(
&format!("{}.pth", pyproject_toml.name().as_dist_info_name()),
src_root.as_os_str().as_encoded_bytes(),
)?;
debug!("Adding metadata files to: {}", wheel_path.user_display());
let dist_info_dir = write_dist_info(
&mut wheel_writer,
&pyproject_toml,
&filename,
source_tree,
uv_version,
preview,
)?;
wheel_writer.close(&dist_info_dir)?;
temp_file
.persist(&wheel_path)
.map_err(|err| Error::Persist(wheel_path.clone(), err.error))?;
Ok(filename)
}
pub fn metadata(
source_tree: &Path,
metadata_directory: &Path,
uv_version: &str,
preview: Preview,
) -> Result<String, Error> {
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
for warning in pyproject_toml.check_build_system(uv_version) {
warn_user_once!("{warning}");
}
let filename = WheelFilename::new(
pyproject_toml.name().clone(),
pyproject_toml.version().clone(),
LanguageTag::Python {
major: 3,
minor: None,
},
AbiTag::None,
PlatformTag::Any,
);
debug!(
"Writing metadata files to {}",
metadata_directory.user_display()
);
let mut wheel_writer = FilesystemWriter::new(metadata_directory);
let dist_info_dir = write_dist_info(
&mut wheel_writer,
&pyproject_toml,
&filename,
source_tree,
uv_version,
preview,
)?;
wheel_writer.close(&dist_info_dir)?;
Ok(dist_info_dir)
}
struct RecordEntry {
path: String,
hash: String,
size: usize,
}
fn write_hashed(
path: &str,
reader: &mut dyn Read,
writer: &mut dyn Write,
) -> Result<RecordEntry, io::Error> {
let mut hasher = Sha256::new();
let mut size = 0;
let mut buffer = vec![0; 8 * 1024];
loop {
let read = match reader.read(&mut buffer) {
Ok(read) => read,
Err(err) if err.kind() == io::ErrorKind::Interrupted => continue,
Err(err) => return Err(err),
};
if read == 0 {
break;
}
hasher.update(&buffer[..read]);
writer.write_all(&buffer[..read])?;
size += read;
}
Ok(RecordEntry {
path: path.to_string(),
hash: base64.encode(hasher.finalize()),
size,
})
}
fn write_record(
writer: &mut dyn Write,
dist_info_dir: &str,
record: Vec<RecordEntry>,
) -> Result<(), Error> {
let mut record_writer = csv::Writer::from_writer(writer);
for entry in record {
record_writer.write_record(&[
entry.path,
format!("sha256={}", entry.hash),
entry.size.to_string(),
])?;
}
record_writer.write_record(&[
format!("{dist_info_dir}/RECORD"),
String::new(),
String::new(),
])?;
record_writer.flush()?;
Ok(())
}
pub(crate) fn build_exclude_matcher(
excludes: impl IntoIterator<Item = impl AsRef<str>>,
) -> Result<GlobSet, Error> {
let mut exclude_builder = GlobSetBuilder::new();
for exclude in excludes {
let exclude = exclude.as_ref();
let exclude = if let Some(exclude) = exclude.strip_prefix("/") {
exclude.to_string()
} else {
format!("**/{exclude}").to_string()
};
let glob = PortableGlobParser::Uv
.parse(&exclude)
.map_err(|err| Error::PortableGlob {
field: "tool.uv.build-backend.*-exclude".to_string(),
source: err,
})?;
exclude_builder.add(glob);
}
let exclude_matcher = exclude_builder
.build()
.map_err(|err| Error::GlobSetTooLarge {
field: "tool.uv.build-backend.*-exclude".to_string(),
source: err,
})?;
Ok(exclude_matcher)
}
fn wheel_subdir_from_globs(
src: &Path,
target: &str,
globs: impl IntoIterator<Item = impl AsRef<str>>,
wheel_writer: &mut impl DirectoryWriter,
globs_field: &str,
) -> Result<(), Error> {
let license_files_globs: Vec<_> = globs
.into_iter()
.map(|license_files| {
let license_files = license_files.as_ref();
trace!(
"Including {} at `{}` with `{}`",
globs_field,
src.user_display(),
license_files
);
PortableGlobParser::Pep639.parse(license_files)
})
.collect::<Result<_, _>>()
.map_err(|err| Error::PortableGlob {
field: globs_field.to_string(),
source: err,
})?;
let matcher =
GlobDirFilter::from_globs(&license_files_globs).map_err(|err| Error::GlobSetTooLarge {
field: globs_field.to_string(),
source: err,
})?;
wheel_writer.write_directory(target)?;
for entry in WalkDir::new(src)
.sort_by_file_name()
.into_iter()
.filter_entry(|entry| {
let relative = entry
.path()
.strip_prefix(src)
.expect("walkdir starts with root");
matcher.match_directory(relative)
})
{
let entry = entry.map_err(|err| Error::WalkDir {
root: src.to_path_buf(),
err,
})?;
if entry.path() == src {
continue;
}
let relative = entry
.path()
.strip_prefix(src)
.expect("walkdir starts with root");
if !matcher.match_path(relative) {
trace!("Excluding {}: {}", globs_field, relative.user_display());
continue;
}
error_on_venv(entry.file_name(), entry.path())?;
let license_path = Path::new(target)
.join(relative)
.portable_display()
.to_string();
debug!("Adding for {}: {}", globs_field, relative.user_display());
wheel_writer.write_dir_entry(&entry, &license_path)?;
}
Ok(())
}
fn write_dist_info(
writer: &mut dyn DirectoryWriter,
pyproject_toml: &PyProjectToml,
filename: &WheelFilename,
root: &Path,
uv_version: &str,
preview: Preview,
) -> Result<String, Error> {
let dist_info_dir = format!(
"{}-{}.dist-info",
pyproject_toml.name().as_dist_info_name(),
pyproject_toml.version()
);
writer.write_directory(&dist_info_dir)?;
let wheel_info = WheelInfo::new(filename, uv_version);
writer.write_bytes(
&format!("{dist_info_dir}/WHEEL"),
wheel_info.to_string().as_bytes(),
)?;
if preview.is_enabled(PreviewFeature::MetadataJson) {
writer.write_bytes(
&format!("{dist_info_dir}/WHEEL.json"),
&serde_json::to_vec(&wheel_info).map_err(Error::Json)?,
)?;
}
if let Some(entrypoint) = pyproject_toml.to_entry_points()? {
writer.write_bytes(
&format!("{dist_info_dir}/entry_points.txt"),
entrypoint.as_bytes(),
)?;
}
let metadata = pyproject_toml.to_metadata(root)?;
writer.write_bytes(
&format!("{dist_info_dir}/METADATA"),
metadata.core_metadata_format().as_bytes(),
)?;
if preview.is_enabled(PreviewFeature::MetadataJson) {
writer.write_bytes(
&format!("{dist_info_dir}/METADATA.json"),
&serde_json::to_vec(&metadata).map_err(Error::Json)?,
)?;
}
Ok(dist_info_dir)
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
struct WheelInfo {
wheel_version: String,
generator: String,
root_is_purelib: bool,
tags: Vec<String>,
}
impl WheelInfo {
fn new(filename: &WheelFilename, uv_version: &str) -> Self {
let mut tags = Vec::new();
for python_tag in filename.python_tags() {
for abi_tag in filename.abi_tags() {
for platform_tag in filename.platform_tags() {
tags.push(format!("{python_tag}-{abi_tag}-{platform_tag}"));
}
}
}
Self {
wheel_version: "1.0".to_string(),
generator: format!("uv {uv_version}"),
root_is_purelib: true,
tags,
}
}
}
impl Display for WheelInfo {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
writeln!(f, "Wheel-Version: {}", self.wheel_version)?;
writeln!(f, "Generator: {}", self.generator)?;
writeln!(f, "Root-Is-Purelib: {}", self.root_is_purelib)?;
for tag in &self.tags {
writeln!(f, "Tag: {tag}")?;
}
Ok(())
}
}
struct ZipDirectoryWriter<W: Write + Seek> {
writer: ZipWriter<W>,
compression: CompressionMethod,
record: Vec<RecordEntry>,
}
impl<W: Write + Seek> ZipDirectoryWriter<W> {
fn new_wheel(writer: W) -> Self {
Self {
writer: ZipWriter::new(writer),
compression: CompressionMethod::Deflated,
record: Vec::new(),
}
}
#[expect(dead_code)]
fn new_editable(writer: W) -> Self {
Self {
writer: ZipWriter::new(writer),
compression: CompressionMethod::Stored,
record: Vec::new(),
}
}
fn new_writer<'slf>(
&'slf mut self,
path: &str,
executable_bit: bool,
) -> Result<Box<dyn Write + 'slf>, Error> {
let permissions = if executable_bit { 0o755 } else { 0o644 };
let options = zip::write::SimpleFileOptions::default()
.unix_permissions(permissions)
.compression_method(self.compression);
self.writer.start_file(path, options)?;
Ok(Box::new(&mut self.writer))
}
}
impl<W: Write + Seek> DirectoryWriter for ZipDirectoryWriter<W> {
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
trace!("Adding {}", path);
let options = zip::write::SimpleFileOptions::default()
.unix_permissions(0o644)
.compression_method(self.compression);
self.writer.start_file(path, options)?;
self.writer.write_all(bytes)?;
let hash = base64.encode(Sha256::new().chain_update(bytes).finalize());
self.record.push(RecordEntry {
path: path.to_string(),
hash,
size: bytes.len(),
});
Ok(())
}
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
trace!("Adding {} from {}", path, file.user_display());
let mut reader = BufReader::new(File::open(file)?);
#[cfg(unix)]
let executable_bit = {
use std::os::unix::fs::PermissionsExt;
file.metadata()?.permissions().mode() & 0o111 != 0
};
#[cfg(not(unix))]
let executable_bit = false;
let mut writer = self.new_writer(path, executable_bit)?;
let record = write_hashed(path, &mut reader, &mut writer)?;
drop(writer);
self.record.push(record);
Ok(())
}
fn write_directory(&mut self, directory: &str) -> Result<(), Error> {
trace!("Adding directory {}", directory);
let options = zip::write::SimpleFileOptions::default().compression_method(self.compression);
Ok(self.writer.add_directory(directory, options)?)
}
fn close(mut self, dist_info_dir: &str) -> Result<(), Error> {
let record_path = format!("{dist_info_dir}/RECORD");
trace!("Adding {record_path}");
let record = mem::take(&mut self.record);
write_record(
&mut self.new_writer(&record_path, false)?,
dist_info_dir,
record,
)?;
trace!("Adding central directory");
self.writer.finish()?;
Ok(())
}
}
struct FilesystemWriter {
root: PathBuf,
record: Vec<RecordEntry>,
}
impl FilesystemWriter {
fn new(root: &Path) -> Self {
Self {
root: root.to_owned(),
record: Vec::new(),
}
}
fn new_writer<'slf>(&'slf mut self, path: &str) -> Result<Box<dyn Write + 'slf>, Error> {
trace!("Adding {}", path);
Ok(Box::new(File::create(self.root.join(path))?))
}
}
impl DirectoryWriter for FilesystemWriter {
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
trace!("Adding {}", path);
let hash = base64.encode(Sha256::new().chain_update(bytes).finalize());
self.record.push(RecordEntry {
path: path.to_string(),
hash,
size: bytes.len(),
});
Ok(fs_err::write(self.root.join(path), bytes)?)
}
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
trace!("Adding {} from {}", path, file.user_display());
let mut reader = BufReader::new(File::open(file)?);
let mut writer = self.new_writer(path)?;
let record = write_hashed(path, &mut reader, &mut writer)?;
drop(writer);
self.record.push(record);
Ok(())
}
fn write_directory(&mut self, directory: &str) -> Result<(), Error> {
trace!("Adding directory {}", directory);
Ok(fs_err::create_dir(self.root.join(directory))?)
}
fn close(mut self, dist_info_dir: &str) -> Result<(), Error> {
let record = mem::take(&mut self.record);
write_record(
&mut self.new_writer(&format!("{dist_info_dir}/RECORD"))?,
dist_info_dir,
record,
)?;
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
use insta::assert_snapshot;
use std::path::Path;
use std::str::FromStr;
use tempfile::TempDir;
use uv_distribution_filename::WheelFilename;
use uv_fs::Simplified;
use uv_normalize::PackageName;
use uv_pep440::Version;
use uv_platform_tags::{AbiTag, PlatformTag};
use walkdir::WalkDir;
#[test]
fn test_wheel() {
let filename = WheelFilename::new(
PackageName::from_str("foo").unwrap(),
Version::from_str("1.2.3").unwrap(),
LanguageTag::Python {
major: 3,
minor: None,
},
AbiTag::None,
PlatformTag::Any,
);
assert_snapshot!(WheelInfo::new(&filename, "1.0.0+test").to_string(), @"
Wheel-Version: 1.0
Generator: uv 1.0.0+test
Root-Is-Purelib: true
Tag: py3-none-any
");
}
#[test]
fn test_record() {
let record = vec![RecordEntry {
path: "built_by_uv/__init__.py".to_string(),
hash: "ifhp5To6AGGlLAIz5kQtTXLegKii00BtnqC_05fteGU".to_string(),
size: 37,
}];
let mut writer = Vec::new();
write_record(&mut writer, "built_by_uv-0.1.0", record).unwrap();
assert_snapshot!(String::from_utf8(writer).unwrap(), @"
built_by_uv/__init__.py,sha256=ifhp5To6AGGlLAIz5kQtTXLegKii00BtnqC_05fteGU,37
built_by_uv-0.1.0/RECORD,,
");
}
#[test]
fn test_prepare_metadata() {
let metadata_dir = TempDir::new().unwrap();
let built_by_uv = Path::new("../../test/packages/built-by-uv");
metadata(
built_by_uv,
metadata_dir.path(),
"1.0.0+test",
Preview::default(),
)
.unwrap();
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
.sort_by_file_name()
.into_iter()
.map(|entry| {
entry
.unwrap()
.path()
.strip_prefix(metadata_dir.path())
.expect("walkdir starts with root")
.portable_display()
.to_string()
})
.filter(|path| !path.is_empty())
.collect();
files.sort();
assert_snapshot!(files.join("\n"), @"
built_by_uv-0.1.0.dist-info
built_by_uv-0.1.0.dist-info/METADATA
built_by_uv-0.1.0.dist-info/RECORD
built_by_uv-0.1.0.dist-info/WHEEL
built_by_uv-0.1.0.dist-info/entry_points.txt
");
let metadata_file = metadata_dir
.path()
.join("built_by_uv-0.1.0.dist-info/METADATA");
assert_snapshot!(fs_err::read_to_string(metadata_file).unwrap(), @"
Metadata-Version: 2.4
Name: built-by-uv
Version: 0.1.0
Summary: A package to be built with the uv build backend that uses all features exposed by the build backend
License-File: LICENSE-APACHE
License-File: LICENSE-MIT
License-File: third-party-licenses/PEP-401.txt
Requires-Dist: anyio>=4,<5
Requires-Python: >=3.12
Description-Content-Type: text/markdown
# built_by_uv
A package to be built with the uv build backend that uses all features exposed by the build backend.
");
let record_file = metadata_dir
.path()
.join("built_by_uv-0.1.0.dist-info/RECORD");
assert_snapshot!(fs_err::read_to_string(record_file).unwrap(), @"
built_by_uv-0.1.0.dist-info/WHEEL,sha256=JBpLtoa_WBz5WPGpRsAUTD4Dz6H0KkkdiKWCkfMSS1U,84
built_by_uv-0.1.0.dist-info/entry_points.txt,sha256=-IO6yaq6x6HSl-zWH96rZmgYvfyHlH00L5WQoCpz-YI,50
built_by_uv-0.1.0.dist-info/METADATA,sha256=m6EkVvKrGmqx43b_VR45LHD37IZxPYC0NI6Qx9_UXLE,474
built_by_uv-0.1.0.dist-info/RECORD,,
");
let wheel_file = metadata_dir
.path()
.join("built_by_uv-0.1.0.dist-info/WHEEL");
assert_snapshot!(fs_err::read_to_string(wheel_file).unwrap(), @"
Wheel-Version: 1.0
Generator: uv 1.0.0+test
Root-Is-Purelib: true
Tag: py3-none-any
");
}
}