uv_distribution/metadata/
requires_dist.rs

1use std::collections::{BTreeMap, VecDeque};
2use std::path::Path;
3use std::slice;
4
5use rustc_hash::FxHashSet;
6use uv_auth::CredentialsCache;
7use uv_configuration::SourceStrategy;
8use uv_distribution_types::{IndexLocations, Requirement};
9use uv_normalize::{ExtraName, GroupName, PackageName};
10use uv_pep508::MarkerTree;
11use uv_workspace::dependency_groups::FlatDependencyGroups;
12use uv_workspace::pyproject::{Sources, ToolUvSources};
13use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache};
14
15use crate::Metadata;
16use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
17
18#[derive(Debug, Clone)]
19pub struct RequiresDist {
20    pub name: PackageName,
21    pub requires_dist: Box<[Requirement]>,
22    pub provides_extra: Box<[ExtraName]>,
23    pub dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
24    pub dynamic: bool,
25}
26
27impl RequiresDist {
28    /// Lower without considering `tool.uv` in `pyproject.toml`, used for index and other archive
29    /// dependencies.
30    pub fn from_metadata23(metadata: uv_pypi_types::RequiresDist) -> Self {
31        Self {
32            name: metadata.name,
33            requires_dist: Box::into_iter(metadata.requires_dist)
34                .map(Requirement::from)
35                .collect(),
36            provides_extra: metadata.provides_extra,
37            dependency_groups: BTreeMap::default(),
38            dynamic: metadata.dynamic,
39        }
40    }
41
42    /// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory
43    /// dependencies.
44    pub async fn from_project_maybe_workspace(
45        metadata: uv_pypi_types::RequiresDist,
46        install_path: &Path,
47        git_member: Option<&GitWorkspaceMember<'_>>,
48        locations: &IndexLocations,
49        sources: SourceStrategy,
50        cache: &WorkspaceCache,
51        credentials_cache: &CredentialsCache,
52    ) -> Result<Self, MetadataError> {
53        let discovery = DiscoveryOptions {
54            stop_discovery_at: git_member.map(|git_member| {
55                git_member
56                    .fetch_root
57                    .parent()
58                    .expect("git checkout has a parent")
59                    .to_path_buf()
60            }),
61            members: match sources {
62                SourceStrategy::Enabled => MemberDiscovery::default(),
63                SourceStrategy::Disabled => MemberDiscovery::None,
64            },
65            ..DiscoveryOptions::default()
66        };
67        let Some(project_workspace) =
68            ProjectWorkspace::from_maybe_project_root(install_path, &discovery, cache).await?
69        else {
70            return Ok(Self::from_metadata23(metadata));
71        };
72
73        Self::from_project_workspace(
74            metadata,
75            &project_workspace,
76            git_member,
77            locations,
78            sources,
79            credentials_cache,
80        )
81    }
82
83    fn from_project_workspace(
84        metadata: uv_pypi_types::RequiresDist,
85        project_workspace: &ProjectWorkspace,
86        git_member: Option<&GitWorkspaceMember<'_>>,
87        locations: &IndexLocations,
88        source_strategy: SourceStrategy,
89        credentials_cache: &CredentialsCache,
90    ) -> Result<Self, MetadataError> {
91        // Collect any `tool.uv.index` entries.
92        let empty = vec![];
93        let project_indexes = match source_strategy {
94            SourceStrategy::Enabled => project_workspace
95                .current_project()
96                .pyproject_toml()
97                .tool
98                .as_ref()
99                .and_then(|tool| tool.uv.as_ref())
100                .and_then(|uv| uv.index.as_deref())
101                .unwrap_or(&empty),
102            SourceStrategy::Disabled => &empty,
103        };
104
105        // Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`.
106        let empty = BTreeMap::default();
107        let project_sources = match source_strategy {
108            SourceStrategy::Enabled => project_workspace
109                .current_project()
110                .pyproject_toml()
111                .tool
112                .as_ref()
113                .and_then(|tool| tool.uv.as_ref())
114                .and_then(|uv| uv.sources.as_ref())
115                .map(ToolUvSources::inner)
116                .unwrap_or(&empty),
117            SourceStrategy::Disabled => &empty,
118        };
119
120        let dependency_groups = FlatDependencyGroups::from_pyproject_toml(
121            project_workspace.current_project().root(),
122            project_workspace.current_project().pyproject_toml(),
123        )?;
124
125        // Now that we've resolved the dependency groups, we can validate that each source references
126        // a valid extra or group, if present.
127        Self::validate_sources(project_sources, &metadata, &dependency_groups)?;
128
129        // Lower the dependency groups.
130        let dependency_groups = dependency_groups
131            .into_iter()
132            .map(|(name, flat_group)| {
133                let requirements = match source_strategy {
134                    SourceStrategy::Enabled => flat_group
135                        .requirements
136                        .into_iter()
137                        .flat_map(|requirement| {
138                            let requirement_name = requirement.name.clone();
139                            let group = name.clone();
140                            let extra = None;
141                            LoweredRequirement::from_requirement(
142                                requirement,
143                                Some(&metadata.name),
144                                project_workspace.project_root(),
145                                project_sources,
146                                project_indexes,
147                                extra,
148                                Some(&group),
149                                locations,
150                                project_workspace.workspace(),
151                                git_member,
152                                credentials_cache,
153                            )
154                            .map(
155                                move |requirement| match requirement {
156                                    Ok(requirement) => Ok(requirement.into_inner()),
157                                    Err(err) => Err(MetadataError::GroupLoweringError(
158                                        group.clone(),
159                                        requirement_name.clone(),
160                                        Box::new(err),
161                                    )),
162                                },
163                            )
164                        })
165                        .collect::<Result<Box<_>, _>>(),
166                    SourceStrategy::Disabled => Ok(flat_group
167                        .requirements
168                        .into_iter()
169                        .map(Requirement::from)
170                        .collect()),
171                }?;
172                Ok::<(GroupName, Box<_>), MetadataError>((name, requirements))
173            })
174            .collect::<Result<BTreeMap<_, _>, _>>()?;
175
176        // Lower the requirements.
177        let requires_dist = Box::into_iter(metadata.requires_dist);
178        let requires_dist = match source_strategy {
179            SourceStrategy::Enabled => requires_dist
180                .flat_map(|requirement| {
181                    let requirement_name = requirement.name.clone();
182                    let extra = requirement.marker.top_level_extra_name();
183                    let group = None;
184                    LoweredRequirement::from_requirement(
185                        requirement,
186                        Some(&metadata.name),
187                        project_workspace.project_root(),
188                        project_sources,
189                        project_indexes,
190                        extra.as_deref(),
191                        group,
192                        locations,
193                        project_workspace.workspace(),
194                        git_member,
195                        credentials_cache,
196                    )
197                    .map(move |requirement| match requirement {
198                        Ok(requirement) => Ok(requirement.into_inner()),
199                        Err(err) => Err(MetadataError::LoweringError(
200                            requirement_name.clone(),
201                            Box::new(err),
202                        )),
203                    })
204                })
205                .collect::<Result<Box<_>, _>>()?,
206            SourceStrategy::Disabled => requires_dist.into_iter().map(Requirement::from).collect(),
207        };
208
209        Ok(Self {
210            name: metadata.name,
211            requires_dist,
212            dependency_groups,
213            provides_extra: metadata.provides_extra,
214            dynamic: metadata.dynamic,
215        })
216    }
217
218    /// Validate the sources for a given [`uv_pypi_types::RequiresDist`].
219    ///
220    /// If a source is requested with an `extra` or `group`, ensure that the relevant dependency is
221    /// present in the relevant `project.optional-dependencies` or `dependency-groups` section.
222    fn validate_sources(
223        sources: &BTreeMap<PackageName, Sources>,
224        metadata: &uv_pypi_types::RequiresDist,
225        dependency_groups: &FlatDependencyGroups,
226    ) -> Result<(), MetadataError> {
227        for (name, sources) in sources {
228            for source in sources.iter() {
229                if let Some(extra) = source.extra() {
230                    // If the extra doesn't exist at all, error.
231                    if !metadata.provides_extra.contains(extra) {
232                        return Err(MetadataError::MissingSourceExtra(
233                            name.clone(),
234                            extra.clone(),
235                        ));
236                    }
237
238                    // If there is no such requirement with the extra, error.
239                    if !metadata.requires_dist.iter().any(|requirement| {
240                        requirement.name == *name
241                            && requirement.marker.top_level_extra_name().as_deref() == Some(extra)
242                    }) {
243                        return Err(MetadataError::IncompleteSourceExtra(
244                            name.clone(),
245                            extra.clone(),
246                        ));
247                    }
248                }
249
250                if let Some(group) = source.group() {
251                    // If the group doesn't exist at all, error.
252                    let Some(flat_group) = dependency_groups.get(group) else {
253                        return Err(MetadataError::MissingSourceGroup(
254                            name.clone(),
255                            group.clone(),
256                        ));
257                    };
258
259                    // If there is no such requirement with the group, error.
260                    if !flat_group
261                        .requirements
262                        .iter()
263                        .any(|requirement| requirement.name == *name)
264                    {
265                        return Err(MetadataError::IncompleteSourceGroup(
266                            name.clone(),
267                            group.clone(),
268                        ));
269                    }
270                }
271            }
272        }
273
274        Ok(())
275    }
276}
277
278impl From<Metadata> for RequiresDist {
279    fn from(metadata: Metadata) -> Self {
280        Self {
281            name: metadata.name,
282            requires_dist: metadata.requires_dist,
283            provides_extra: metadata.provides_extra,
284            dependency_groups: metadata.dependency_groups,
285            dynamic: metadata.dynamic,
286        }
287    }
288}
289
290/// Like [`uv_pypi_types::RequiresDist`], but with any recursive (or self-referential) dependencies
291/// resolved.
292///
293/// For example, given:
294/// ```toml
295/// [project]
296/// name = "example"
297/// version = "0.1.0"
298/// requires-python = ">=3.13.0"
299/// dependencies = []
300///
301/// [project.optional-dependencies]
302/// all = [
303///     "example[async]",
304/// ]
305/// async = [
306///     "fastapi",
307/// ]
308/// ```
309///
310/// A build backend could return:
311/// ```txt
312/// Metadata-Version: 2.2
313/// Name: example
314/// Version: 0.1.0
315/// Requires-Python: >=3.13.0
316/// Provides-Extra: all
317/// Requires-Dist: example[async]; extra == "all"
318/// Provides-Extra: async
319/// Requires-Dist: fastapi; extra == "async"
320/// ```
321///
322/// Or:
323/// ```txt
324/// Metadata-Version: 2.4
325/// Name: example
326/// Version: 0.1.0
327/// Requires-Python: >=3.13.0
328/// Provides-Extra: all
329/// Requires-Dist: fastapi; extra == 'all'
330/// Provides-Extra: async
331/// Requires-Dist: fastapi; extra == 'async'
332/// ```
333///
334/// The [`FlatRequiresDist`] struct is used to flatten out the recursive dependencies, i.e., convert
335/// from the former to the latter.
336#[derive(Debug, Clone, PartialEq, Eq)]
337pub struct FlatRequiresDist(Box<[Requirement]>);
338
339impl FlatRequiresDist {
340    /// Flatten a set of requirements, resolving any self-references.
341    pub fn from_requirements(requirements: Box<[Requirement]>, name: &PackageName) -> Self {
342        // If there are no self-references, we can return early.
343        if requirements.iter().all(|req| req.name != *name) {
344            return Self(requirements);
345        }
346
347        // Memoize the top level extras, in the same order as `requirements`
348        let top_level_extras: Vec<_> = requirements
349            .iter()
350            .map(|req| req.marker.top_level_extra_name())
351            .collect();
352
353        // Transitively process all extras that are recursively included.
354        let mut flattened = requirements.to_vec();
355        let mut seen = FxHashSet::<(ExtraName, MarkerTree)>::default();
356        let mut queue: VecDeque<_> = flattened
357            .iter()
358            .filter(|req| req.name == *name)
359            .flat_map(|req| req.extras.iter().cloned().map(|extra| (extra, req.marker)))
360            .collect();
361        while let Some((extra, marker)) = queue.pop_front() {
362            if !seen.insert((extra.clone(), marker)) {
363                continue;
364            }
365
366            // Find the requirements for the extra.
367            for (requirement, top_level_extra) in requirements.iter().zip(top_level_extras.iter()) {
368                if top_level_extra.as_deref() != Some(&extra) {
369                    continue;
370                }
371                let requirement = {
372                    let mut marker = marker;
373                    marker.and(requirement.marker);
374                    Requirement {
375                        name: requirement.name.clone(),
376                        extras: requirement.extras.clone(),
377                        groups: requirement.groups.clone(),
378                        source: requirement.source.clone(),
379                        origin: requirement.origin.clone(),
380                        marker: marker.simplify_extras(slice::from_ref(&extra)),
381                    }
382                };
383                if requirement.name == *name {
384                    // Add each transitively included extra.
385                    queue.extend(
386                        requirement
387                            .extras
388                            .iter()
389                            .cloned()
390                            .map(|extra| (extra, requirement.marker)),
391                    );
392                } else {
393                    // Add the requirements for that extra.
394                    flattened.push(requirement);
395                }
396            }
397        }
398
399        // Drop all the self-references now that we've flattened them out.
400        flattened.retain(|req| req.name != *name);
401
402        // Retain any self-constraints for that extra, e.g., if `project[foo]` includes
403        // `project[bar]>1.0`, as a dependency, we need to propagate `project>1.0`, in addition to
404        // transitively expanding `project[bar]`.
405        for req in &requirements {
406            if req.name == *name {
407                if !req.source.is_empty() {
408                    flattened.push(Requirement {
409                        name: req.name.clone(),
410                        extras: Box::new([]),
411                        groups: req.groups.clone(),
412                        source: req.source.clone(),
413                        origin: req.origin.clone(),
414                        marker: req.marker,
415                    });
416                }
417            }
418        }
419
420        Self(flattened.into_boxed_slice())
421    }
422
423    /// Consume the [`FlatRequiresDist`] and return the inner requirements.
424    pub fn into_inner(self) -> Box<[Requirement]> {
425        self.0
426    }
427}
428
429impl IntoIterator for FlatRequiresDist {
430    type Item = Requirement;
431    type IntoIter = <Box<[Requirement]> as IntoIterator>::IntoIter;
432
433    fn into_iter(self) -> Self::IntoIter {
434        Box::into_iter(self.0)
435    }
436}
437
438#[cfg(test)]
439mod test {
440    use std::path::Path;
441    use std::str::FromStr;
442
443    use anyhow::Context;
444    use indoc::indoc;
445    use insta::assert_snapshot;
446    use uv_auth::CredentialsCache;
447    use uv_configuration::SourceStrategy;
448    use uv_distribution_types::IndexLocations;
449    use uv_normalize::PackageName;
450    use uv_pep508::Requirement;
451    use uv_workspace::pyproject::PyProjectToml;
452    use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache};
453
454    use crate::RequiresDist;
455    use crate::metadata::requires_dist::FlatRequiresDist;
456
457    async fn requires_dist_from_pyproject_toml(contents: &str) -> anyhow::Result<RequiresDist> {
458        let pyproject_toml = PyProjectToml::from_string(contents.to_string())?;
459        let path = Path::new("pyproject.toml");
460        let project_workspace = ProjectWorkspace::from_project(
461            path,
462            pyproject_toml
463                .project
464                .as_ref()
465                .context("metadata field project not found")?,
466            &pyproject_toml,
467            &DiscoveryOptions {
468                stop_discovery_at: Some(path.to_path_buf()),
469                ..DiscoveryOptions::default()
470            },
471            &WorkspaceCache::default(),
472        )
473        .await?;
474        let pyproject_toml = uv_pypi_types::PyProjectToml::from_toml(contents)?;
475        let requires_dist = uv_pypi_types::RequiresDist::from_pyproject_toml(pyproject_toml)?;
476        Ok(RequiresDist::from_project_workspace(
477            requires_dist,
478            &project_workspace,
479            None,
480            &IndexLocations::default(),
481            SourceStrategy::default(),
482            &CredentialsCache::new(),
483        )?)
484    }
485
486    async fn format_err(input: &str) -> String {
487        use std::fmt::Write;
488
489        let err = requires_dist_from_pyproject_toml(input).await.unwrap_err();
490        let mut causes = err.chain();
491        let mut message = String::new();
492        let _ = writeln!(message, "error: {}", causes.next().unwrap());
493        for err in causes {
494            let _ = writeln!(message, "  Caused by: {err}");
495        }
496        message
497    }
498
499    #[tokio::test]
500    async fn wrong_type() {
501        let input = indoc! {r#"
502            [project]
503            name = "foo"
504            version = "0.0.0"
505            dependencies = [
506              "tqdm",
507            ]
508            [tool.uv.sources]
509            tqdm = true
510        "#};
511
512        assert_snapshot!(format_err(input).await, @r###"
513        error: TOML parse error at line 8, column 8
514          |
515        8 | tqdm = true
516          |        ^^^^
517        invalid type: boolean `true`, expected a single source (as a map) or list of sources
518
519        "###);
520    }
521
522    #[tokio::test]
523    async fn too_many_git_specs() {
524        let input = indoc! {r#"
525            [project]
526            name = "foo"
527            version = "0.0.0"
528            dependencies = [
529              "tqdm",
530            ]
531            [tool.uv.sources]
532            tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
533        "#};
534
535        assert_snapshot!(format_err(input).await, @r###"
536        error: TOML parse error at line 8, column 8
537          |
538        8 | tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
539          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
540        expected at most one of `rev`, `tag`, or `branch`
541        "###);
542    }
543
544    #[tokio::test]
545    async fn too_many_git_typo() {
546        let input = indoc! {r#"
547            [project]
548            name = "foo"
549            version = "0.0.0"
550            dependencies = [
551              "tqdm",
552            ]
553            [tool.uv.sources]
554            tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
555        "#};
556
557        assert_snapshot!(format_err(input).await, @r#"
558        error: TOML parse error at line 8, column 48
559          |
560        8 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
561          |                                                ^^^
562        unknown field `ref`, expected one of `git`, `subdirectory`, `rev`, `tag`, `branch`, `lfs`, `url`, `path`, `editable`, `package`, `index`, `workspace`, `marker`, `extra`, `group`
563        "#);
564    }
565
566    #[tokio::test]
567    async fn extra_and_group() {
568        let input = indoc! {r#"
569            [project]
570            name = "foo"
571            version = "0.0.0"
572            dependencies = []
573
574            [tool.uv.sources]
575            tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
576        "#};
577
578        assert_snapshot!(format_err(input).await, @r###"
579        error: TOML parse error at line 7, column 8
580          |
581        7 | tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
582          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
583        cannot specify both `extra` and `group`
584        "###);
585    }
586
587    #[tokio::test]
588    async fn you_cant_mix_those() {
589        let input = indoc! {r#"
590            [project]
591            name = "foo"
592            version = "0.0.0"
593            dependencies = [
594              "tqdm",
595            ]
596            [tool.uv.sources]
597            tqdm = { path = "tqdm", index = "torch" }
598        "#};
599
600        assert_snapshot!(format_err(input).await, @r###"
601        error: TOML parse error at line 8, column 8
602          |
603        8 | tqdm = { path = "tqdm", index = "torch" }
604          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
605        cannot specify both `path` and `index`
606        "###);
607    }
608
609    #[tokio::test]
610    async fn missing_constraint() {
611        let input = indoc! {r#"
612            [project]
613            name = "foo"
614            version = "0.0.0"
615            dependencies = [
616              "tqdm",
617            ]
618        "#};
619        assert!(requires_dist_from_pyproject_toml(input).await.is_ok());
620    }
621
622    #[tokio::test]
623    async fn invalid_syntax() {
624        let input = indoc! {r#"
625            [project]
626            name = "foo"
627            version = "0.0.0"
628            dependencies = [
629              "tqdm ==4.66.0",
630            ]
631            [tool.uv.sources]
632            tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
633        "#};
634
635        assert_snapshot!(format_err(input).await, @r#"
636        error: TOML parse error at line 8, column 16
637          |
638        8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
639          |                ^
640        missing opening quote, expected `"`
641        "#);
642    }
643
644    #[tokio::test]
645    async fn invalid_url() {
646        let input = indoc! {r#"
647            [project]
648            name = "foo"
649            version = "0.0.0"
650            dependencies = [
651              "tqdm ==4.66.0",
652            ]
653            [tool.uv.sources]
654            tqdm = { url = "§invalid#+#*Ä" }
655        "#};
656
657        assert_snapshot!(format_err(input).await, @r###"
658        error: TOML parse error at line 8, column 16
659          |
660        8 | tqdm = { url = "§invalid#+#*Ä" }
661          |                ^^^^^^^^^^^^^^^^^
662        relative URL without a base: "§invalid#+#*Ä"
663        "###);
664    }
665
666    #[tokio::test]
667    async fn workspace_and_url_spec() {
668        let input = indoc! {r#"
669            [project]
670            name = "foo"
671            version = "0.0.0"
672            dependencies = [
673              "tqdm @ git+https://github.com/tqdm/tqdm",
674            ]
675            [tool.uv.sources]
676            tqdm = { workspace = true }
677        "#};
678
679        assert_snapshot!(format_err(input).await, @r###"
680        error: Failed to parse entry: `tqdm`
681          Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
682        "###);
683    }
684
685    #[tokio::test]
686    async fn missing_workspace_package() {
687        let input = indoc! {r#"
688            [project]
689            name = "foo"
690            version = "0.0.0"
691            dependencies = [
692              "tqdm ==4.66.0",
693            ]
694            [tool.uv.sources]
695            tqdm = { workspace = true }
696        "#};
697
698        assert_snapshot!(format_err(input).await, @r###"
699        error: Failed to parse entry: `tqdm`
700          Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
701        "###);
702    }
703
704    #[tokio::test]
705    async fn cant_be_dynamic() {
706        let input = indoc! {r#"
707            [project]
708            name = "foo"
709            version = "0.0.0"
710            dynamic = [
711                "dependencies"
712            ]
713            [tool.uv.sources]
714            tqdm = { workspace = true }
715        "#};
716
717        assert_snapshot!(format_err(input).await, @r###"
718        error: The following field was marked as dynamic: dependencies
719        "###);
720    }
721
722    #[tokio::test]
723    async fn missing_project_section() {
724        let input = indoc! {"
725            [tool.uv.sources]
726            tqdm = { workspace = true }
727        "};
728
729        assert_snapshot!(format_err(input).await, @r###"
730        error: metadata field project not found
731        "###);
732    }
733
734    #[test]
735    fn test_flat_requires_dist_noop() {
736        let name = PackageName::from_str("pkg").unwrap();
737        let requirements = [
738            Requirement::from_str("requests>=2.0.0").unwrap().into(),
739            Requirement::from_str("pytest; extra == 'test'")
740                .unwrap()
741                .into(),
742            Requirement::from_str("black; extra == 'dev'")
743                .unwrap()
744                .into(),
745        ];
746
747        let expected = FlatRequiresDist(
748            [
749                Requirement::from_str("requests>=2.0.0").unwrap().into(),
750                Requirement::from_str("pytest; extra == 'test'")
751                    .unwrap()
752                    .into(),
753                Requirement::from_str("black; extra == 'dev'")
754                    .unwrap()
755                    .into(),
756            ]
757            .into(),
758        );
759
760        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
761
762        assert_eq!(actual, expected);
763    }
764
765    #[test]
766    fn test_flat_requires_dist_basic() {
767        let name = PackageName::from_str("pkg").unwrap();
768        let requirements = [
769            Requirement::from_str("requests>=2.0.0").unwrap().into(),
770            Requirement::from_str("pytest; extra == 'test'")
771                .unwrap()
772                .into(),
773            Requirement::from_str("pkg[dev]; extra == 'test'")
774                .unwrap()
775                .into(),
776            Requirement::from_str("black; extra == 'dev'")
777                .unwrap()
778                .into(),
779        ];
780
781        let expected = FlatRequiresDist(
782            [
783                Requirement::from_str("requests>=2.0.0").unwrap().into(),
784                Requirement::from_str("pytest; extra == 'test'")
785                    .unwrap()
786                    .into(),
787                Requirement::from_str("black; extra == 'dev'")
788                    .unwrap()
789                    .into(),
790                Requirement::from_str("black; extra == 'test'")
791                    .unwrap()
792                    .into(),
793            ]
794            .into(),
795        );
796
797        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
798
799        assert_eq!(actual, expected);
800    }
801
802    #[test]
803    fn test_flat_requires_dist_with_markers() {
804        let name = PackageName::from_str("pkg").unwrap();
805        let requirements = vec![
806            Requirement::from_str("requests>=2.0.0").unwrap().into(),
807            Requirement::from_str("pytest; extra == 'test'")
808                .unwrap()
809                .into(),
810            Requirement::from_str("pkg[dev]; extra == 'test' and sys_platform == 'win32'")
811                .unwrap()
812                .into(),
813            Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
814                .unwrap()
815                .into(),
816        ];
817
818        let expected = FlatRequiresDist(
819            [
820                Requirement::from_str("requests>=2.0.0").unwrap().into(),
821                Requirement::from_str("pytest; extra == 'test'")
822                    .unwrap()
823                    .into(),
824                Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
825                    .unwrap()
826                    .into(),
827                Requirement::from_str("black; extra == 'test' and sys_platform == 'win32'")
828                    .unwrap()
829                    .into(),
830            ]
831            .into(),
832        );
833
834        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
835
836        assert_eq!(actual, expected);
837    }
838
839    #[test]
840    fn test_flat_requires_dist_self_constraint() {
841        let name = PackageName::from_str("pkg").unwrap();
842        let requirements = [
843            Requirement::from_str("requests>=2.0.0").unwrap().into(),
844            Requirement::from_str("pytest; extra == 'test'")
845                .unwrap()
846                .into(),
847            Requirement::from_str("black; extra == 'dev'")
848                .unwrap()
849                .into(),
850            Requirement::from_str("pkg[async]==1.0.0").unwrap().into(),
851        ];
852
853        let expected = FlatRequiresDist(
854            [
855                Requirement::from_str("requests>=2.0.0").unwrap().into(),
856                Requirement::from_str("pytest; extra == 'test'")
857                    .unwrap()
858                    .into(),
859                Requirement::from_str("black; extra == 'dev'")
860                    .unwrap()
861                    .into(),
862                Requirement::from_str("pkg==1.0.0").unwrap().into(),
863            ]
864            .into(),
865        );
866
867        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
868
869        assert_eq!(actual, expected);
870    }
871}