uv_distribution/metadata/
requires_dist.rs

1use std::collections::{BTreeMap, VecDeque};
2use std::path::Path;
3use std::slice;
4
5use rustc_hash::FxHashSet;
6
7use uv_configuration::SourceStrategy;
8use uv_distribution_types::{IndexLocations, Requirement};
9use uv_normalize::{ExtraName, GroupName, PackageName};
10use uv_pep508::MarkerTree;
11use uv_workspace::dependency_groups::FlatDependencyGroups;
12use uv_workspace::pyproject::{Sources, ToolUvSources};
13use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache};
14
15use crate::Metadata;
16use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
17
18#[derive(Debug, Clone)]
19pub struct RequiresDist {
20    pub name: PackageName,
21    pub requires_dist: Box<[Requirement]>,
22    pub provides_extra: Box<[ExtraName]>,
23    pub dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
24    pub dynamic: bool,
25}
26
27impl RequiresDist {
28    /// Lower without considering `tool.uv` in `pyproject.toml`, used for index and other archive
29    /// dependencies.
30    pub fn from_metadata23(metadata: uv_pypi_types::RequiresDist) -> Self {
31        Self {
32            name: metadata.name,
33            requires_dist: Box::into_iter(metadata.requires_dist)
34                .map(Requirement::from)
35                .collect(),
36            provides_extra: metadata.provides_extra,
37            dependency_groups: BTreeMap::default(),
38            dynamic: metadata.dynamic,
39        }
40    }
41
42    /// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory
43    /// dependencies.
44    pub async fn from_project_maybe_workspace(
45        metadata: uv_pypi_types::RequiresDist,
46        install_path: &Path,
47        git_member: Option<&GitWorkspaceMember<'_>>,
48        locations: &IndexLocations,
49        sources: SourceStrategy,
50        cache: &WorkspaceCache,
51    ) -> Result<Self, MetadataError> {
52        let discovery = DiscoveryOptions {
53            stop_discovery_at: git_member.map(|git_member| {
54                git_member
55                    .fetch_root
56                    .parent()
57                    .expect("git checkout has a parent")
58                    .to_path_buf()
59            }),
60            members: match sources {
61                SourceStrategy::Enabled => MemberDiscovery::default(),
62                SourceStrategy::Disabled => MemberDiscovery::None,
63            },
64            ..DiscoveryOptions::default()
65        };
66        let Some(project_workspace) =
67            ProjectWorkspace::from_maybe_project_root(install_path, &discovery, cache).await?
68        else {
69            return Ok(Self::from_metadata23(metadata));
70        };
71
72        Self::from_project_workspace(metadata, &project_workspace, git_member, locations, sources)
73    }
74
75    fn from_project_workspace(
76        metadata: uv_pypi_types::RequiresDist,
77        project_workspace: &ProjectWorkspace,
78        git_member: Option<&GitWorkspaceMember<'_>>,
79        locations: &IndexLocations,
80        source_strategy: SourceStrategy,
81    ) -> Result<Self, MetadataError> {
82        // Collect any `tool.uv.index` entries.
83        let empty = vec![];
84        let project_indexes = match source_strategy {
85            SourceStrategy::Enabled => project_workspace
86                .current_project()
87                .pyproject_toml()
88                .tool
89                .as_ref()
90                .and_then(|tool| tool.uv.as_ref())
91                .and_then(|uv| uv.index.as_deref())
92                .unwrap_or(&empty),
93            SourceStrategy::Disabled => &empty,
94        };
95
96        // Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`.
97        let empty = BTreeMap::default();
98        let project_sources = match source_strategy {
99            SourceStrategy::Enabled => project_workspace
100                .current_project()
101                .pyproject_toml()
102                .tool
103                .as_ref()
104                .and_then(|tool| tool.uv.as_ref())
105                .and_then(|uv| uv.sources.as_ref())
106                .map(ToolUvSources::inner)
107                .unwrap_or(&empty),
108            SourceStrategy::Disabled => &empty,
109        };
110
111        let dependency_groups = FlatDependencyGroups::from_pyproject_toml(
112            project_workspace.current_project().root(),
113            project_workspace.current_project().pyproject_toml(),
114        )?;
115
116        // Now that we've resolved the dependency groups, we can validate that each source references
117        // a valid extra or group, if present.
118        Self::validate_sources(project_sources, &metadata, &dependency_groups)?;
119
120        // Lower the dependency groups.
121        let dependency_groups = dependency_groups
122            .into_iter()
123            .map(|(name, flat_group)| {
124                let requirements = match source_strategy {
125                    SourceStrategy::Enabled => flat_group
126                        .requirements
127                        .into_iter()
128                        .flat_map(|requirement| {
129                            let requirement_name = requirement.name.clone();
130                            let group = name.clone();
131                            let extra = None;
132                            LoweredRequirement::from_requirement(
133                                requirement,
134                                Some(&metadata.name),
135                                project_workspace.project_root(),
136                                project_sources,
137                                project_indexes,
138                                extra,
139                                Some(&group),
140                                locations,
141                                project_workspace.workspace(),
142                                git_member,
143                            )
144                            .map(
145                                move |requirement| match requirement {
146                                    Ok(requirement) => Ok(requirement.into_inner()),
147                                    Err(err) => Err(MetadataError::GroupLoweringError(
148                                        group.clone(),
149                                        requirement_name.clone(),
150                                        Box::new(err),
151                                    )),
152                                },
153                            )
154                        })
155                        .collect::<Result<Box<_>, _>>(),
156                    SourceStrategy::Disabled => Ok(flat_group
157                        .requirements
158                        .into_iter()
159                        .map(Requirement::from)
160                        .collect()),
161                }?;
162                Ok::<(GroupName, Box<_>), MetadataError>((name, requirements))
163            })
164            .collect::<Result<BTreeMap<_, _>, _>>()?;
165
166        // Lower the requirements.
167        let requires_dist = Box::into_iter(metadata.requires_dist);
168        let requires_dist = match source_strategy {
169            SourceStrategy::Enabled => requires_dist
170                .flat_map(|requirement| {
171                    let requirement_name = requirement.name.clone();
172                    let extra = requirement.marker.top_level_extra_name();
173                    let group = None;
174                    LoweredRequirement::from_requirement(
175                        requirement,
176                        Some(&metadata.name),
177                        project_workspace.project_root(),
178                        project_sources,
179                        project_indexes,
180                        extra.as_deref(),
181                        group,
182                        locations,
183                        project_workspace.workspace(),
184                        git_member,
185                    )
186                    .map(move |requirement| match requirement {
187                        Ok(requirement) => Ok(requirement.into_inner()),
188                        Err(err) => Err(MetadataError::LoweringError(
189                            requirement_name.clone(),
190                            Box::new(err),
191                        )),
192                    })
193                })
194                .collect::<Result<Box<_>, _>>()?,
195            SourceStrategy::Disabled => requires_dist.into_iter().map(Requirement::from).collect(),
196        };
197
198        Ok(Self {
199            name: metadata.name,
200            requires_dist,
201            dependency_groups,
202            provides_extra: metadata.provides_extra,
203            dynamic: metadata.dynamic,
204        })
205    }
206
207    /// Validate the sources for a given [`uv_pypi_types::RequiresDist`].
208    ///
209    /// If a source is requested with an `extra` or `group`, ensure that the relevant dependency is
210    /// present in the relevant `project.optional-dependencies` or `dependency-groups` section.
211    fn validate_sources(
212        sources: &BTreeMap<PackageName, Sources>,
213        metadata: &uv_pypi_types::RequiresDist,
214        dependency_groups: &FlatDependencyGroups,
215    ) -> Result<(), MetadataError> {
216        for (name, sources) in sources {
217            for source in sources.iter() {
218                if let Some(extra) = source.extra() {
219                    // If the extra doesn't exist at all, error.
220                    if !metadata.provides_extra.contains(extra) {
221                        return Err(MetadataError::MissingSourceExtra(
222                            name.clone(),
223                            extra.clone(),
224                        ));
225                    }
226
227                    // If there is no such requirement with the extra, error.
228                    if !metadata.requires_dist.iter().any(|requirement| {
229                        requirement.name == *name
230                            && requirement.marker.top_level_extra_name().as_deref() == Some(extra)
231                    }) {
232                        return Err(MetadataError::IncompleteSourceExtra(
233                            name.clone(),
234                            extra.clone(),
235                        ));
236                    }
237                }
238
239                if let Some(group) = source.group() {
240                    // If the group doesn't exist at all, error.
241                    let Some(flat_group) = dependency_groups.get(group) else {
242                        return Err(MetadataError::MissingSourceGroup(
243                            name.clone(),
244                            group.clone(),
245                        ));
246                    };
247
248                    // If there is no such requirement with the group, error.
249                    if !flat_group
250                        .requirements
251                        .iter()
252                        .any(|requirement| requirement.name == *name)
253                    {
254                        return Err(MetadataError::IncompleteSourceGroup(
255                            name.clone(),
256                            group.clone(),
257                        ));
258                    }
259                }
260            }
261        }
262
263        Ok(())
264    }
265}
266
267impl From<Metadata> for RequiresDist {
268    fn from(metadata: Metadata) -> Self {
269        Self {
270            name: metadata.name,
271            requires_dist: metadata.requires_dist,
272            provides_extra: metadata.provides_extra,
273            dependency_groups: metadata.dependency_groups,
274            dynamic: metadata.dynamic,
275        }
276    }
277}
278
279/// Like [`uv_pypi_types::RequiresDist`], but with any recursive (or self-referential) dependencies
280/// resolved.
281///
282/// For example, given:
283/// ```toml
284/// [project]
285/// name = "example"
286/// version = "0.1.0"
287/// requires-python = ">=3.13.0"
288/// dependencies = []
289///
290/// [project.optional-dependencies]
291/// all = [
292///     "example[async]",
293/// ]
294/// async = [
295///     "fastapi",
296/// ]
297/// ```
298///
299/// A build backend could return:
300/// ```txt
301/// Metadata-Version: 2.2
302/// Name: example
303/// Version: 0.1.0
304/// Requires-Python: >=3.13.0
305/// Provides-Extra: all
306/// Requires-Dist: example[async]; extra == "all"
307/// Provides-Extra: async
308/// Requires-Dist: fastapi; extra == "async"
309/// ```
310///
311/// Or:
312/// ```txt
313/// Metadata-Version: 2.4
314/// Name: example
315/// Version: 0.1.0
316/// Requires-Python: >=3.13.0
317/// Provides-Extra: all
318/// Requires-Dist: fastapi; extra == 'all'
319/// Provides-Extra: async
320/// Requires-Dist: fastapi; extra == 'async'
321/// ```
322///
323/// The [`FlatRequiresDist`] struct is used to flatten out the recursive dependencies, i.e., convert
324/// from the former to the latter.
325#[derive(Debug, Clone, PartialEq, Eq)]
326pub struct FlatRequiresDist(Box<[Requirement]>);
327
328impl FlatRequiresDist {
329    /// Flatten a set of requirements, resolving any self-references.
330    pub fn from_requirements(requirements: Box<[Requirement]>, name: &PackageName) -> Self {
331        // If there are no self-references, we can return early.
332        if requirements.iter().all(|req| req.name != *name) {
333            return Self(requirements);
334        }
335
336        // Memoize the top level extras, in the same order as `requirements`
337        let top_level_extras: Vec<_> = requirements
338            .iter()
339            .map(|req| req.marker.top_level_extra_name())
340            .collect();
341
342        // Transitively process all extras that are recursively included.
343        let mut flattened = requirements.to_vec();
344        let mut seen = FxHashSet::<(ExtraName, MarkerTree)>::default();
345        let mut queue: VecDeque<_> = flattened
346            .iter()
347            .filter(|req| req.name == *name)
348            .flat_map(|req| req.extras.iter().cloned().map(|extra| (extra, req.marker)))
349            .collect();
350        while let Some((extra, marker)) = queue.pop_front() {
351            if !seen.insert((extra.clone(), marker)) {
352                continue;
353            }
354
355            // Find the requirements for the extra.
356            for (requirement, top_level_extra) in requirements.iter().zip(top_level_extras.iter()) {
357                if top_level_extra.as_deref() != Some(&extra) {
358                    continue;
359                }
360                let requirement = {
361                    let mut marker = marker;
362                    marker.and(requirement.marker);
363                    Requirement {
364                        name: requirement.name.clone(),
365                        extras: requirement.extras.clone(),
366                        groups: requirement.groups.clone(),
367                        source: requirement.source.clone(),
368                        origin: requirement.origin.clone(),
369                        marker: marker.simplify_extras(slice::from_ref(&extra)),
370                    }
371                };
372                if requirement.name == *name {
373                    // Add each transitively included extra.
374                    queue.extend(
375                        requirement
376                            .extras
377                            .iter()
378                            .cloned()
379                            .map(|extra| (extra, requirement.marker)),
380                    );
381                } else {
382                    // Add the requirements for that extra.
383                    flattened.push(requirement);
384                }
385            }
386        }
387
388        // Drop all the self-references now that we've flattened them out.
389        flattened.retain(|req| req.name != *name);
390
391        // Retain any self-constraints for that extra, e.g., if `project[foo]` includes
392        // `project[bar]>1.0`, as a dependency, we need to propagate `project>1.0`, in addition to
393        // transitively expanding `project[bar]`.
394        for req in &requirements {
395            if req.name == *name {
396                if !req.source.is_empty() {
397                    flattened.push(Requirement {
398                        name: req.name.clone(),
399                        extras: Box::new([]),
400                        groups: req.groups.clone(),
401                        source: req.source.clone(),
402                        origin: req.origin.clone(),
403                        marker: req.marker,
404                    });
405                }
406            }
407        }
408
409        Self(flattened.into_boxed_slice())
410    }
411
412    /// Consume the [`FlatRequiresDist`] and return the inner requirements.
413    pub fn into_inner(self) -> Box<[Requirement]> {
414        self.0
415    }
416}
417
418impl IntoIterator for FlatRequiresDist {
419    type Item = Requirement;
420    type IntoIter = <Box<[Requirement]> as IntoIterator>::IntoIter;
421
422    fn into_iter(self) -> Self::IntoIter {
423        Box::into_iter(self.0)
424    }
425}
426
427#[cfg(test)]
428mod test {
429    use std::path::Path;
430    use std::str::FromStr;
431
432    use anyhow::Context;
433    use indoc::indoc;
434    use insta::assert_snapshot;
435
436    use uv_configuration::SourceStrategy;
437    use uv_distribution_types::IndexLocations;
438    use uv_normalize::PackageName;
439    use uv_pep508::Requirement;
440    use uv_workspace::pyproject::PyProjectToml;
441    use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache};
442
443    use crate::RequiresDist;
444    use crate::metadata::requires_dist::FlatRequiresDist;
445
446    async fn requires_dist_from_pyproject_toml(contents: &str) -> anyhow::Result<RequiresDist> {
447        let pyproject_toml = PyProjectToml::from_string(contents.to_string())?;
448        let path = Path::new("pyproject.toml");
449        let project_workspace = ProjectWorkspace::from_project(
450            path,
451            pyproject_toml
452                .project
453                .as_ref()
454                .context("metadata field project not found")?,
455            &pyproject_toml,
456            &DiscoveryOptions {
457                stop_discovery_at: Some(path.to_path_buf()),
458                ..DiscoveryOptions::default()
459            },
460            &WorkspaceCache::default(),
461        )
462        .await?;
463        let pyproject_toml = uv_pypi_types::PyProjectToml::from_toml(contents)?;
464        let requires_dist = uv_pypi_types::RequiresDist::from_pyproject_toml(pyproject_toml)?;
465        Ok(RequiresDist::from_project_workspace(
466            requires_dist,
467            &project_workspace,
468            None,
469            &IndexLocations::default(),
470            SourceStrategy::default(),
471        )?)
472    }
473
474    async fn format_err(input: &str) -> String {
475        use std::fmt::Write;
476
477        let err = requires_dist_from_pyproject_toml(input).await.unwrap_err();
478        let mut causes = err.chain();
479        let mut message = String::new();
480        let _ = writeln!(message, "error: {}", causes.next().unwrap());
481        for err in causes {
482            let _ = writeln!(message, "  Caused by: {err}");
483        }
484        message
485    }
486
487    #[tokio::test]
488    async fn wrong_type() {
489        let input = indoc! {r#"
490            [project]
491            name = "foo"
492            version = "0.0.0"
493            dependencies = [
494              "tqdm",
495            ]
496            [tool.uv.sources]
497            tqdm = true
498        "#};
499
500        assert_snapshot!(format_err(input).await, @r###"
501        error: TOML parse error at line 8, column 8
502          |
503        8 | tqdm = true
504          |        ^^^^
505        invalid type: boolean `true`, expected a single source (as a map) or list of sources
506
507        "###);
508    }
509
510    #[tokio::test]
511    async fn too_many_git_specs() {
512        let input = indoc! {r#"
513            [project]
514            name = "foo"
515            version = "0.0.0"
516            dependencies = [
517              "tqdm",
518            ]
519            [tool.uv.sources]
520            tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
521        "#};
522
523        assert_snapshot!(format_err(input).await, @r###"
524        error: TOML parse error at line 8, column 8
525          |
526        8 | tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
527          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
528        expected at most one of `rev`, `tag`, or `branch`
529        "###);
530    }
531
532    #[tokio::test]
533    async fn too_many_git_typo() {
534        let input = indoc! {r#"
535            [project]
536            name = "foo"
537            version = "0.0.0"
538            dependencies = [
539              "tqdm",
540            ]
541            [tool.uv.sources]
542            tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
543        "#};
544
545        assert_snapshot!(format_err(input).await, @r###"
546        error: TOML parse error at line 8, column 48
547          |
548        8 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
549          |                                                ^^^
550        unknown field `ref`, expected one of `git`, `subdirectory`, `rev`, `tag`, `branch`, `url`, `path`, `editable`, `package`, `index`, `workspace`, `marker`, `extra`, `group`
551        "###);
552    }
553
554    #[tokio::test]
555    async fn extra_and_group() {
556        let input = indoc! {r#"
557            [project]
558            name = "foo"
559            version = "0.0.0"
560            dependencies = []
561
562            [tool.uv.sources]
563            tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
564        "#};
565
566        assert_snapshot!(format_err(input).await, @r###"
567        error: TOML parse error at line 7, column 8
568          |
569        7 | tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
570          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
571        cannot specify both `extra` and `group`
572        "###);
573    }
574
575    #[tokio::test]
576    async fn you_cant_mix_those() {
577        let input = indoc! {r#"
578            [project]
579            name = "foo"
580            version = "0.0.0"
581            dependencies = [
582              "tqdm",
583            ]
584            [tool.uv.sources]
585            tqdm = { path = "tqdm", index = "torch" }
586        "#};
587
588        assert_snapshot!(format_err(input).await, @r###"
589        error: TOML parse error at line 8, column 8
590          |
591        8 | tqdm = { path = "tqdm", index = "torch" }
592          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
593        cannot specify both `path` and `index`
594        "###);
595    }
596
597    #[tokio::test]
598    async fn missing_constraint() {
599        let input = indoc! {r#"
600            [project]
601            name = "foo"
602            version = "0.0.0"
603            dependencies = [
604              "tqdm",
605            ]
606        "#};
607        assert!(requires_dist_from_pyproject_toml(input).await.is_ok());
608    }
609
610    #[tokio::test]
611    async fn invalid_syntax() {
612        let input = indoc! {r#"
613            [project]
614            name = "foo"
615            version = "0.0.0"
616            dependencies = [
617              "tqdm ==4.66.0",
618            ]
619            [tool.uv.sources]
620            tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
621        "#};
622
623        assert_snapshot!(format_err(input).await, @r#"
624        error: TOML parse error at line 8, column 16
625          |
626        8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
627          |                ^
628        missing opening quote, expected `"`
629        "#);
630    }
631
632    #[tokio::test]
633    async fn invalid_url() {
634        let input = indoc! {r#"
635            [project]
636            name = "foo"
637            version = "0.0.0"
638            dependencies = [
639              "tqdm ==4.66.0",
640            ]
641            [tool.uv.sources]
642            tqdm = { url = "§invalid#+#*Ä" }
643        "#};
644
645        assert_snapshot!(format_err(input).await, @r###"
646        error: TOML parse error at line 8, column 16
647          |
648        8 | tqdm = { url = "§invalid#+#*Ä" }
649          |                ^^^^^^^^^^^^^^^^^
650        relative URL without a base: "§invalid#+#*Ä"
651        "###);
652    }
653
654    #[tokio::test]
655    async fn workspace_and_url_spec() {
656        let input = indoc! {r#"
657            [project]
658            name = "foo"
659            version = "0.0.0"
660            dependencies = [
661              "tqdm @ git+https://github.com/tqdm/tqdm",
662            ]
663            [tool.uv.sources]
664            tqdm = { workspace = true }
665        "#};
666
667        assert_snapshot!(format_err(input).await, @r###"
668        error: Failed to parse entry: `tqdm`
669          Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
670        "###);
671    }
672
673    #[tokio::test]
674    async fn missing_workspace_package() {
675        let input = indoc! {r#"
676            [project]
677            name = "foo"
678            version = "0.0.0"
679            dependencies = [
680              "tqdm ==4.66.0",
681            ]
682            [tool.uv.sources]
683            tqdm = { workspace = true }
684        "#};
685
686        assert_snapshot!(format_err(input).await, @r###"
687        error: Failed to parse entry: `tqdm`
688          Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
689        "###);
690    }
691
692    #[tokio::test]
693    async fn cant_be_dynamic() {
694        let input = indoc! {r#"
695            [project]
696            name = "foo"
697            version = "0.0.0"
698            dynamic = [
699                "dependencies"
700            ]
701            [tool.uv.sources]
702            tqdm = { workspace = true }
703        "#};
704
705        assert_snapshot!(format_err(input).await, @r###"
706        error: The following field was marked as dynamic: dependencies
707        "###);
708    }
709
710    #[tokio::test]
711    async fn missing_project_section() {
712        let input = indoc! {"
713            [tool.uv.sources]
714            tqdm = { workspace = true }
715        "};
716
717        assert_snapshot!(format_err(input).await, @r###"
718        error: metadata field project not found
719        "###);
720    }
721
722    #[test]
723    fn test_flat_requires_dist_noop() {
724        let name = PackageName::from_str("pkg").unwrap();
725        let requirements = [
726            Requirement::from_str("requests>=2.0.0").unwrap().into(),
727            Requirement::from_str("pytest; extra == 'test'")
728                .unwrap()
729                .into(),
730            Requirement::from_str("black; extra == 'dev'")
731                .unwrap()
732                .into(),
733        ];
734
735        let expected = FlatRequiresDist(
736            [
737                Requirement::from_str("requests>=2.0.0").unwrap().into(),
738                Requirement::from_str("pytest; extra == 'test'")
739                    .unwrap()
740                    .into(),
741                Requirement::from_str("black; extra == 'dev'")
742                    .unwrap()
743                    .into(),
744            ]
745            .into(),
746        );
747
748        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
749
750        assert_eq!(actual, expected);
751    }
752
753    #[test]
754    fn test_flat_requires_dist_basic() {
755        let name = PackageName::from_str("pkg").unwrap();
756        let requirements = [
757            Requirement::from_str("requests>=2.0.0").unwrap().into(),
758            Requirement::from_str("pytest; extra == 'test'")
759                .unwrap()
760                .into(),
761            Requirement::from_str("pkg[dev]; extra == 'test'")
762                .unwrap()
763                .into(),
764            Requirement::from_str("black; extra == 'dev'")
765                .unwrap()
766                .into(),
767        ];
768
769        let expected = FlatRequiresDist(
770            [
771                Requirement::from_str("requests>=2.0.0").unwrap().into(),
772                Requirement::from_str("pytest; extra == 'test'")
773                    .unwrap()
774                    .into(),
775                Requirement::from_str("black; extra == 'dev'")
776                    .unwrap()
777                    .into(),
778                Requirement::from_str("black; extra == 'test'")
779                    .unwrap()
780                    .into(),
781            ]
782            .into(),
783        );
784
785        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
786
787        assert_eq!(actual, expected);
788    }
789
790    #[test]
791    fn test_flat_requires_dist_with_markers() {
792        let name = PackageName::from_str("pkg").unwrap();
793        let requirements = vec![
794            Requirement::from_str("requests>=2.0.0").unwrap().into(),
795            Requirement::from_str("pytest; extra == 'test'")
796                .unwrap()
797                .into(),
798            Requirement::from_str("pkg[dev]; extra == 'test' and sys_platform == 'win32'")
799                .unwrap()
800                .into(),
801            Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
802                .unwrap()
803                .into(),
804        ];
805
806        let expected = FlatRequiresDist(
807            [
808                Requirement::from_str("requests>=2.0.0").unwrap().into(),
809                Requirement::from_str("pytest; extra == 'test'")
810                    .unwrap()
811                    .into(),
812                Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
813                    .unwrap()
814                    .into(),
815                Requirement::from_str("black; extra == 'test' and sys_platform == 'win32'")
816                    .unwrap()
817                    .into(),
818            ]
819            .into(),
820        );
821
822        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
823
824        assert_eq!(actual, expected);
825    }
826
827    #[test]
828    fn test_flat_requires_dist_self_constraint() {
829        let name = PackageName::from_str("pkg").unwrap();
830        let requirements = [
831            Requirement::from_str("requests>=2.0.0").unwrap().into(),
832            Requirement::from_str("pytest; extra == 'test'")
833                .unwrap()
834                .into(),
835            Requirement::from_str("black; extra == 'dev'")
836                .unwrap()
837                .into(),
838            Requirement::from_str("pkg[async]==1.0.0").unwrap().into(),
839        ];
840
841        let expected = FlatRequiresDist(
842            [
843                Requirement::from_str("requests>=2.0.0").unwrap().into(),
844                Requirement::from_str("pytest; extra == 'test'")
845                    .unwrap()
846                    .into(),
847                Requirement::from_str("black; extra == 'dev'")
848                    .unwrap()
849                    .into(),
850                Requirement::from_str("pkg==1.0.0").unwrap().into(),
851            ]
852            .into(),
853        );
854
855        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
856
857        assert_eq!(actual, expected);
858    }
859}