use std::collections::{BTreeMap, VecDeque};
use std::path::Path;
use std::slice;
use rustc_hash::FxHashSet;
use uv_auth::CredentialsCache;
use uv_configuration::NoSources;
use uv_distribution_types::{IndexLocations, Requirement};
use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_pep508::MarkerTree;
use uv_workspace::dependency_groups::FlatDependencyGroups;
use uv_workspace::pyproject::{Sources, ToolUvSources};
use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache};
use crate::Metadata;
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
#[derive(Debug, Clone)]
pub struct RequiresDist {
pub name: PackageName,
pub requires_dist: Box<[Requirement]>,
pub provides_extra: Box<[ExtraName]>,
pub dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
pub dynamic: bool,
}
impl RequiresDist {
pub fn from_metadata23(metadata: uv_pypi_types::RequiresDist) -> Self {
Self {
name: metadata.name,
requires_dist: Box::into_iter(metadata.requires_dist)
.map(Requirement::from)
.collect(),
provides_extra: metadata.provides_extra,
dependency_groups: BTreeMap::default(),
dynamic: metadata.dynamic,
}
}
pub async fn from_project_maybe_workspace(
metadata: uv_pypi_types::RequiresDist,
install_path: &Path,
git_member: Option<&GitWorkspaceMember<'_>>,
locations: &IndexLocations,
sources: NoSources,
editable: bool,
cache: &WorkspaceCache,
credentials_cache: &CredentialsCache,
) -> Result<Self, MetadataError> {
let discovery = DiscoveryOptions {
stop_discovery_at: git_member.map(|git_member| {
git_member
.fetch_root
.parent()
.expect("git checkout has a parent")
.to_path_buf()
}),
members: if sources.is_none() {
MemberDiscovery::default()
} else {
MemberDiscovery::None
},
..DiscoveryOptions::default()
};
let Some(project_workspace) =
ProjectWorkspace::from_maybe_project_root(install_path, &discovery, cache).await?
else {
return Ok(Self::from_metadata23(metadata));
};
Self::from_project_workspace(
metadata,
&project_workspace,
git_member,
locations,
&sources,
editable,
credentials_cache,
)
}
fn from_project_workspace(
metadata: uv_pypi_types::RequiresDist,
project_workspace: &ProjectWorkspace,
git_member: Option<&GitWorkspaceMember<'_>>,
locations: &IndexLocations,
no_sources: &NoSources,
editable: bool,
credentials_cache: &CredentialsCache,
) -> Result<Self, MetadataError> {
let empty = vec![];
let project_indexes = project_workspace
.current_project()
.pyproject_toml()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.index.as_deref())
.unwrap_or(&empty);
let empty = BTreeMap::default();
let project_sources = project_workspace
.current_project()
.pyproject_toml()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
.map(ToolUvSources::inner)
.unwrap_or(&empty);
let dependency_groups = FlatDependencyGroups::from_pyproject_toml(
project_workspace.current_project().root(),
project_workspace.current_project().pyproject_toml(),
)?;
Self::validate_sources(project_sources, &metadata, &dependency_groups)?;
let dependency_groups = dependency_groups
.into_iter()
.map(|(name, flat_group)| {
let requirements = flat_group
.requirements
.into_iter()
.flat_map(|requirement| {
if no_sources.for_package(&requirement.name) {
vec![Ok(Requirement::from(requirement))].into_iter()
} else {
let requirement_name = requirement.name.clone();
let group = name.clone();
let extra = None;
LoweredRequirement::from_requirement(
requirement,
Some(&metadata.name),
project_workspace.project_root(),
project_sources,
project_indexes,
extra,
Some(&group),
locations,
project_workspace.workspace(),
git_member,
editable,
credentials_cache,
)
.map(move |requirement| match requirement {
Ok(requirement) => Ok(requirement.into_inner()),
Err(err) => Err(MetadataError::GroupLoweringError(
group.clone(),
requirement_name.clone(),
Box::new(err),
)),
})
.collect::<Vec<_>>()
.into_iter()
}
})
.collect::<Result<Box<_>, _>>()?;
Ok::<(GroupName, Box<_>), MetadataError>((name, requirements))
})
.collect::<Result<BTreeMap<_, _>, _>>()?;
let requires_dist = Box::into_iter(metadata.requires_dist);
let requires_dist = requires_dist
.flat_map(|requirement| {
if no_sources.for_package(&requirement.name) {
vec![Ok(Requirement::from(requirement))].into_iter()
} else {
let requirement_name = requirement.name.clone();
let extra = requirement.marker.top_level_extra_name();
let group = None;
LoweredRequirement::from_requirement(
requirement,
Some(&metadata.name),
project_workspace.project_root(),
project_sources,
project_indexes,
extra.as_deref(),
group,
locations,
project_workspace.workspace(),
git_member,
editable,
credentials_cache,
)
.map(move |requirement| match requirement {
Ok(requirement) => Ok(requirement.into_inner()),
Err(err) => Err(MetadataError::LoweringError(
requirement_name.clone(),
Box::new(err),
)),
})
.collect::<Vec<_>>()
.into_iter()
}
})
.collect::<Result<Box<_>, _>>()?;
Ok(Self {
name: metadata.name,
requires_dist,
dependency_groups,
provides_extra: metadata.provides_extra,
dynamic: metadata.dynamic,
})
}
fn validate_sources(
sources: &BTreeMap<PackageName, Sources>,
metadata: &uv_pypi_types::RequiresDist,
dependency_groups: &FlatDependencyGroups,
) -> Result<(), MetadataError> {
for (name, sources) in sources {
for source in sources.iter() {
if let Some(extra) = source.extra() {
if !metadata.provides_extra.contains(extra) {
return Err(MetadataError::MissingSourceExtra(
name.clone(),
extra.clone(),
));
}
if !metadata.requires_dist.iter().any(|requirement| {
requirement.name == *name
&& requirement.marker.top_level_extra_name().as_deref() == Some(extra)
}) {
return Err(MetadataError::IncompleteSourceExtra(
name.clone(),
extra.clone(),
));
}
}
if let Some(group) = source.group() {
let Some(flat_group) = dependency_groups.get(group) else {
return Err(MetadataError::MissingSourceGroup(
name.clone(),
group.clone(),
));
};
if !flat_group
.requirements
.iter()
.any(|requirement| requirement.name == *name)
{
return Err(MetadataError::IncompleteSourceGroup(
name.clone(),
group.clone(),
));
}
}
}
}
Ok(())
}
}
impl From<Metadata> for RequiresDist {
fn from(metadata: Metadata) -> Self {
Self {
name: metadata.name,
requires_dist: metadata.requires_dist,
provides_extra: metadata.provides_extra,
dependency_groups: metadata.dependency_groups,
dynamic: metadata.dynamic,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FlatRequiresDist(Box<[Requirement]>);
impl FlatRequiresDist {
pub fn from_requirements(requirements: Box<[Requirement]>, name: &PackageName) -> Self {
if requirements.iter().all(|req| req.name != *name) {
return Self(requirements);
}
let top_level_extras: Vec<_> = requirements
.iter()
.map(|req| req.marker.top_level_extra_name())
.collect();
let mut flattened = requirements.to_vec();
let mut seen = FxHashSet::<(ExtraName, MarkerTree)>::default();
let mut queue: VecDeque<_> = flattened
.iter()
.filter(|req| req.name == *name)
.flat_map(|req| req.extras.iter().cloned().map(|extra| (extra, req.marker)))
.collect();
while let Some((extra, marker)) = queue.pop_front() {
if !seen.insert((extra.clone(), marker)) {
continue;
}
for (requirement, top_level_extra) in requirements.iter().zip(top_level_extras.iter()) {
if top_level_extra.as_deref() != Some(&extra) {
continue;
}
let requirement = {
let mut marker = marker;
marker.and(requirement.marker);
Requirement {
name: requirement.name.clone(),
extras: requirement.extras.clone(),
groups: requirement.groups.clone(),
source: requirement.source.clone(),
origin: requirement.origin.clone(),
marker: marker.simplify_extras(slice::from_ref(&extra)),
}
};
if requirement.name == *name {
queue.extend(
requirement
.extras
.iter()
.cloned()
.map(|extra| (extra, requirement.marker)),
);
} else {
flattened.push(requirement);
}
}
}
flattened.retain(|req| req.name != *name);
for req in &requirements {
if req.name == *name {
if !req.source.is_empty() {
flattened.push(Requirement {
name: req.name.clone(),
extras: Box::new([]),
groups: req.groups.clone(),
source: req.source.clone(),
origin: req.origin.clone(),
marker: req.marker,
});
}
}
}
Self(flattened.into_boxed_slice())
}
pub fn into_inner(self) -> Box<[Requirement]> {
self.0
}
}
impl IntoIterator for FlatRequiresDist {
type Item = Requirement;
type IntoIter = <Box<[Requirement]> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
Box::into_iter(self.0)
}
}
#[cfg(test)]
mod test {
use std::fmt::Write;
use std::path::Path;
use std::str::FromStr;
use indoc::indoc;
use insta::assert_snapshot;
use tempfile::TempDir;
use uv_auth::CredentialsCache;
use uv_configuration::NoSources;
use uv_distribution_types::IndexLocations;
use uv_normalize::PackageName;
use uv_pep508::Requirement;
use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache};
use crate::RequiresDist;
use crate::metadata::requires_dist::FlatRequiresDist;
async fn requires_dist_from_pyproject_toml(
temp_dir: &Path,
contents: &str,
) -> anyhow::Result<RequiresDist> {
fs_err::write(temp_dir.join("pyproject.toml"), contents)?;
let project_workspace = ProjectWorkspace::discover(
temp_dir,
&DiscoveryOptions {
stop_discovery_at: Some(temp_dir.to_path_buf()),
..DiscoveryOptions::default()
},
&WorkspaceCache::default(),
)
.await?;
let pyproject_toml = uv_pypi_types::PyProjectToml::from_toml(contents, "pyproject.toml")?;
let requires_dist = uv_pypi_types::RequiresDist::from_pyproject_toml(pyproject_toml)?;
Ok(RequiresDist::from_project_workspace(
requires_dist,
&project_workspace,
None,
&IndexLocations::default(),
&NoSources::default(),
true,
&CredentialsCache::new(),
)?)
}
async fn format_err(input: &str) -> String {
let temp_dir = TempDir::new().unwrap();
let err = requires_dist_from_pyproject_toml(temp_dir.path(), input)
.await
.unwrap_err();
let mut causes = err.chain();
let mut message = String::new();
let _ = writeln!(message, "error: {}", causes.next().unwrap());
for err in causes {
let _ = writeln!(message, " Caused by: {err}");
}
message
.replace(&temp_dir.path().display().to_string(), "[PATH]")
.replace('\\', "/")
}
#[tokio::test]
async fn wrong_type() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = true
"#};
assert_snapshot!(format_err(input).await, @"
error: Failed to parse: `[PATH]/pyproject.toml`
Caused by: TOML parse error at line 8, column 8
|
8 | tqdm = true
| ^^^^
invalid type: boolean `true`, expected a single source (as a map) or list of sources
");
}
#[tokio::test]
async fn too_many_git_specs() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
"#};
assert_snapshot!(format_err(input).await, @r#"
error: Failed to parse: `[PATH]/pyproject.toml`
Caused by: TOML parse error at line 8, column 8
|
8 | tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
expected at most one of `rev`, `tag`, or `branch`
"#);
}
#[tokio::test]
async fn too_many_git_typo() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
"#};
assert_snapshot!(format_err(input).await, @r#"
error: Failed to parse: `[PATH]/pyproject.toml`
Caused by: TOML parse error at line 8, column 48
|
8 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
| ^^^
unknown field `ref`, expected one of `git`, `subdirectory`, `rev`, `tag`, `branch`, `lfs`, `url`, `path`, `editable`, `package`, `index`, `workspace`, `marker`, `extra`, `group`
"#);
}
#[tokio::test]
async fn extra_and_group() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = []
[tool.uv.sources]
tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
"#};
assert_snapshot!(format_err(input).await, @r#"
error: Failed to parse: `[PATH]/pyproject.toml`
Caused by: TOML parse error at line 7, column 8
|
7 | tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
cannot specify both `extra` and `group`
"#);
}
#[tokio::test]
async fn you_cant_mix_those() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
[tool.uv.sources]
tqdm = { path = "tqdm", index = "torch" }
"#};
assert_snapshot!(format_err(input).await, @r#"
error: Failed to parse: `[PATH]/pyproject.toml`
Caused by: TOML parse error at line 8, column 8
|
8 | tqdm = { path = "tqdm", index = "torch" }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
cannot specify both `path` and `index`
"#);
}
#[tokio::test]
async fn missing_constraint() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
]
"#};
let temp_dir = TempDir::new().unwrap();
assert!(
requires_dist_from_pyproject_toml(temp_dir.path(), input)
.await
.is_ok()
);
}
#[tokio::test]
async fn invalid_syntax() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
"#};
assert_snapshot!(format_err(input).await, @r#"
error: Failed to parse: `[PATH]/pyproject.toml`
Caused by: TOML parse error at line 8, column 16
|
8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
| ^
missing opening quote, expected `"`
"#);
}
#[tokio::test]
async fn invalid_url() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { url = "§invalid#+#*Ä" }
"#};
assert_snapshot!(format_err(input).await, @r#"
error: Failed to parse: `[PATH]/pyproject.toml`
Caused by: TOML parse error at line 8, column 16
|
8 | tqdm = { url = "§invalid#+#*Ä" }
| ^^^^^^^^^^^^^^^^^
relative URL without a base: "§invalid#+#*Ä"
"#);
}
#[tokio::test]
async fn workspace_and_url_spec() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm @ git+https://github.com/tqdm/tqdm",
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @"
error: Failed to parse entry: `tqdm`
Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
");
}
#[tokio::test]
async fn missing_workspace_package() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm ==4.66.0",
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @"
error: Failed to parse entry: `tqdm`
Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
");
}
#[tokio::test]
async fn cant_be_dynamic() {
let input = indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dynamic = [
"dependencies"
]
[tool.uv.sources]
tqdm = { workspace = true }
"#};
assert_snapshot!(format_err(input).await, @"error: The following field was marked as dynamic: dependencies");
}
#[tokio::test]
async fn missing_project_section() {
let input = indoc! {"
[tool.uv.sources]
tqdm = { workspace = true }
"};
assert_snapshot!(format_err(input).await, @"error: No `project` table found in: [PATH]/pyproject.toml");
}
#[test]
fn test_flat_requires_dist_noop() {
let name = PackageName::from_str("pkg").unwrap();
let requirements = [
Requirement::from_str("requests>=2.0.0").unwrap().into(),
Requirement::from_str("pytest; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'dev'")
.unwrap()
.into(),
];
let expected = FlatRequiresDist(
[
Requirement::from_str("requests>=2.0.0").unwrap().into(),
Requirement::from_str("pytest; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'dev'")
.unwrap()
.into(),
]
.into(),
);
let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
assert_eq!(actual, expected);
}
#[test]
fn test_flat_requires_dist_basic() {
let name = PackageName::from_str("pkg").unwrap();
let requirements = [
Requirement::from_str("requests>=2.0.0").unwrap().into(),
Requirement::from_str("pytest; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("pkg[dev]; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'dev'")
.unwrap()
.into(),
];
let expected = FlatRequiresDist(
[
Requirement::from_str("requests>=2.0.0").unwrap().into(),
Requirement::from_str("pytest; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'dev'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'test'")
.unwrap()
.into(),
]
.into(),
);
let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
assert_eq!(actual, expected);
}
#[test]
fn test_flat_requires_dist_with_markers() {
let name = PackageName::from_str("pkg").unwrap();
let requirements = vec![
Requirement::from_str("requests>=2.0.0").unwrap().into(),
Requirement::from_str("pytest; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("pkg[dev]; extra == 'test' and sys_platform == 'win32'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
.unwrap()
.into(),
];
let expected = FlatRequiresDist(
[
Requirement::from_str("requests>=2.0.0").unwrap().into(),
Requirement::from_str("pytest; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'test' and sys_platform == 'win32'")
.unwrap()
.into(),
]
.into(),
);
let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
assert_eq!(actual, expected);
}
#[test]
fn test_flat_requires_dist_self_constraint() {
let name = PackageName::from_str("pkg").unwrap();
let requirements = [
Requirement::from_str("requests>=2.0.0").unwrap().into(),
Requirement::from_str("pytest; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'dev'")
.unwrap()
.into(),
Requirement::from_str("pkg[async]==1.0.0").unwrap().into(),
];
let expected = FlatRequiresDist(
[
Requirement::from_str("requests>=2.0.0").unwrap().into(),
Requirement::from_str("pytest; extra == 'test'")
.unwrap()
.into(),
Requirement::from_str("black; extra == 'dev'")
.unwrap()
.into(),
Requirement::from_str("pkg==1.0.0").unwrap().into(),
]
.into(),
);
let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
assert_eq!(actual, expected);
}
}