Skip to main content

uv_distribution/metadata/
requires_dist.rs

1use std::collections::{BTreeMap, VecDeque};
2use std::path::Path;
3use std::slice;
4
5use rustc_hash::FxHashSet;
6
7use uv_auth::CredentialsCache;
8use uv_configuration::NoSources;
9use uv_distribution_types::{IndexLocations, Requirement};
10use uv_normalize::{ExtraName, GroupName, PackageName};
11use uv_pep508::MarkerTree;
12use uv_workspace::dependency_groups::FlatDependencyGroups;
13use uv_workspace::pyproject::{Sources, ToolUvSources};
14use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache};
15
16use crate::Metadata;
17use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
18
19#[derive(Debug, Clone)]
20pub struct RequiresDist {
21    pub name: PackageName,
22    pub requires_dist: Box<[Requirement]>,
23    pub provides_extra: Box<[ExtraName]>,
24    pub dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
25    pub dynamic: bool,
26}
27
28impl RequiresDist {
29    /// Lower without considering `tool.uv` in `pyproject.toml`, used for index and other archive
30    /// dependencies.
31    pub fn from_metadata23(metadata: uv_pypi_types::RequiresDist) -> Self {
32        Self {
33            name: metadata.name,
34            requires_dist: Box::into_iter(metadata.requires_dist)
35                .map(Requirement::from)
36                .collect(),
37            provides_extra: metadata.provides_extra,
38            dependency_groups: BTreeMap::default(),
39            dynamic: metadata.dynamic,
40        }
41    }
42
43    /// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory
44    /// dependencies.
45    pub async fn from_project_maybe_workspace(
46        metadata: uv_pypi_types::RequiresDist,
47        install_path: &Path,
48        git_member: Option<&GitWorkspaceMember<'_>>,
49        locations: &IndexLocations,
50        sources: NoSources,
51        editable: bool,
52        cache: &WorkspaceCache,
53        credentials_cache: &CredentialsCache,
54    ) -> Result<Self, MetadataError> {
55        let discovery = DiscoveryOptions {
56            stop_discovery_at: git_member.map(|git_member| {
57                git_member
58                    .fetch_root
59                    .parent()
60                    .expect("git checkout has a parent")
61                    .to_path_buf()
62            }),
63            members: if sources.is_none() {
64                MemberDiscovery::default()
65            } else {
66                MemberDiscovery::None
67            },
68            ..DiscoveryOptions::default()
69        };
70        let Some(project_workspace) =
71            ProjectWorkspace::from_maybe_project_root(install_path, &discovery, cache).await?
72        else {
73            return Ok(Self::from_metadata23(metadata));
74        };
75
76        Self::from_project_workspace(
77            metadata,
78            &project_workspace,
79            git_member,
80            locations,
81            &sources,
82            editable,
83            credentials_cache,
84        )
85    }
86
87    fn from_project_workspace(
88        metadata: uv_pypi_types::RequiresDist,
89        project_workspace: &ProjectWorkspace,
90        git_member: Option<&GitWorkspaceMember<'_>>,
91        locations: &IndexLocations,
92        no_sources: &NoSources,
93        editable: bool,
94        credentials_cache: &CredentialsCache,
95    ) -> Result<Self, MetadataError> {
96        // Collect any `tool.uv.index` entries.
97        let empty = vec![];
98        let project_indexes = project_workspace
99            .current_project()
100            .pyproject_toml()
101            .tool
102            .as_ref()
103            .and_then(|tool| tool.uv.as_ref())
104            .and_then(|uv| uv.index.as_deref())
105            .unwrap_or(&empty);
106
107        // Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`.
108        let empty = BTreeMap::default();
109        let project_sources = project_workspace
110            .current_project()
111            .pyproject_toml()
112            .tool
113            .as_ref()
114            .and_then(|tool| tool.uv.as_ref())
115            .and_then(|uv| uv.sources.as_ref())
116            .map(ToolUvSources::inner)
117            .unwrap_or(&empty);
118
119        let dependency_groups = FlatDependencyGroups::from_pyproject_toml(
120            project_workspace.current_project().root(),
121            project_workspace.current_project().pyproject_toml(),
122        )?;
123
124        // Now that we've resolved the dependency groups, we can validate that each source references
125        // a valid extra or group, if present.
126        Self::validate_sources(project_sources, &metadata, &dependency_groups)?;
127
128        // Lower the dependency groups.
129        let dependency_groups = dependency_groups
130            .into_iter()
131            .map(|(name, flat_group)| {
132                let requirements = flat_group
133                    .requirements
134                    .into_iter()
135                    .flat_map(|requirement| {
136                        // Check if sources should be disabled for this specific package
137                        if no_sources.for_package(&requirement.name) {
138                            vec![Ok(Requirement::from(requirement))].into_iter()
139                        } else {
140                            let requirement_name = requirement.name.clone();
141                            let group = name.clone();
142                            let extra = None;
143
144                            LoweredRequirement::from_requirement(
145                                requirement,
146                                Some(&metadata.name),
147                                project_workspace.project_root(),
148                                project_sources,
149                                project_indexes,
150                                extra,
151                                Some(&group),
152                                locations,
153                                project_workspace.workspace(),
154                                git_member,
155                                editable,
156                                credentials_cache,
157                            )
158                            .map(move |requirement| match requirement {
159                                Ok(requirement) => Ok(requirement.into_inner()),
160                                Err(err) => Err(MetadataError::GroupLoweringError(
161                                    group.clone(),
162                                    requirement_name.clone(),
163                                    Box::new(err),
164                                )),
165                            })
166                            .collect::<Vec<_>>()
167                            .into_iter()
168                        }
169                    })
170                    .collect::<Result<Box<_>, _>>()?;
171                Ok::<(GroupName, Box<_>), MetadataError>((name, requirements))
172            })
173            .collect::<Result<BTreeMap<_, _>, _>>()?;
174
175        // Lower the requirements.
176        let requires_dist = Box::into_iter(metadata.requires_dist);
177        let requires_dist = requires_dist
178            .flat_map(|requirement| {
179                // Check if sources should be disabled for this specific package
180                if no_sources.for_package(&requirement.name) {
181                    vec![Ok(Requirement::from(requirement))].into_iter()
182                } else {
183                    let requirement_name = requirement.name.clone();
184                    let extra = requirement.marker.top_level_extra_name();
185                    let group = None;
186
187                    LoweredRequirement::from_requirement(
188                        requirement,
189                        Some(&metadata.name),
190                        project_workspace.project_root(),
191                        project_sources,
192                        project_indexes,
193                        extra.as_deref(),
194                        group,
195                        locations,
196                        project_workspace.workspace(),
197                        git_member,
198                        editable,
199                        credentials_cache,
200                    )
201                    .map(move |requirement| match requirement {
202                        Ok(requirement) => Ok(requirement.into_inner()),
203                        Err(err) => Err(MetadataError::LoweringError(
204                            requirement_name.clone(),
205                            Box::new(err),
206                        )),
207                    })
208                    .collect::<Vec<_>>()
209                    .into_iter()
210                }
211            })
212            .collect::<Result<Box<_>, _>>()?;
213
214        Ok(Self {
215            name: metadata.name,
216            requires_dist,
217            dependency_groups,
218            provides_extra: metadata.provides_extra,
219            dynamic: metadata.dynamic,
220        })
221    }
222
223    /// Validate the sources for a given [`uv_pypi_types::RequiresDist`].
224    ///
225    /// If a source is requested with an `extra` or `group`, ensure that the relevant dependency is
226    /// present in the relevant `project.optional-dependencies` or `dependency-groups` section.
227    fn validate_sources(
228        sources: &BTreeMap<PackageName, Sources>,
229        metadata: &uv_pypi_types::RequiresDist,
230        dependency_groups: &FlatDependencyGroups,
231    ) -> Result<(), MetadataError> {
232        for (name, sources) in sources {
233            for source in sources.iter() {
234                if let Some(extra) = source.extra() {
235                    // If the extra doesn't exist at all, error.
236                    if !metadata.provides_extra.contains(extra) {
237                        return Err(MetadataError::MissingSourceExtra(
238                            name.clone(),
239                            extra.clone(),
240                        ));
241                    }
242
243                    // If there is no such requirement with the extra, error.
244                    if !metadata.requires_dist.iter().any(|requirement| {
245                        requirement.name == *name
246                            && requirement.marker.top_level_extra_name().as_deref() == Some(extra)
247                    }) {
248                        return Err(MetadataError::IncompleteSourceExtra(
249                            name.clone(),
250                            extra.clone(),
251                        ));
252                    }
253                }
254
255                if let Some(group) = source.group() {
256                    // If the group doesn't exist at all, error.
257                    let Some(flat_group) = dependency_groups.get(group) else {
258                        return Err(MetadataError::MissingSourceGroup(
259                            name.clone(),
260                            group.clone(),
261                        ));
262                    };
263
264                    // If there is no such requirement with the group, error.
265                    if !flat_group
266                        .requirements
267                        .iter()
268                        .any(|requirement| requirement.name == *name)
269                    {
270                        return Err(MetadataError::IncompleteSourceGroup(
271                            name.clone(),
272                            group.clone(),
273                        ));
274                    }
275                }
276            }
277        }
278
279        Ok(())
280    }
281}
282
283impl From<Metadata> for RequiresDist {
284    fn from(metadata: Metadata) -> Self {
285        Self {
286            name: metadata.name,
287            requires_dist: metadata.requires_dist,
288            provides_extra: metadata.provides_extra,
289            dependency_groups: metadata.dependency_groups,
290            dynamic: metadata.dynamic,
291        }
292    }
293}
294
295/// Like [`uv_pypi_types::RequiresDist`], but with any recursive (or self-referential) dependencies
296/// resolved.
297///
298/// For example, given:
299/// ```toml
300/// [project]
301/// name = "example"
302/// version = "0.1.0"
303/// requires-python = ">=3.13.0"
304/// dependencies = []
305///
306/// [project.optional-dependencies]
307/// all = [
308///     "example[async]",
309/// ]
310/// async = [
311///     "fastapi",
312/// ]
313/// ```
314///
315/// A build backend could return:
316/// ```txt
317/// Metadata-Version: 2.2
318/// Name: example
319/// Version: 0.1.0
320/// Requires-Python: >=3.13.0
321/// Provides-Extra: all
322/// Requires-Dist: example[async]; extra == "all"
323/// Provides-Extra: async
324/// Requires-Dist: fastapi; extra == "async"
325/// ```
326///
327/// Or:
328/// ```txt
329/// Metadata-Version: 2.4
330/// Name: example
331/// Version: 0.1.0
332/// Requires-Python: >=3.13.0
333/// Provides-Extra: all
334/// Requires-Dist: fastapi; extra == 'all'
335/// Provides-Extra: async
336/// Requires-Dist: fastapi; extra == 'async'
337/// ```
338///
339/// The [`FlatRequiresDist`] struct is used to flatten out the recursive dependencies, i.e., convert
340/// from the former to the latter.
341#[derive(Debug, Clone, PartialEq, Eq)]
342pub struct FlatRequiresDist(Box<[Requirement]>);
343
344impl FlatRequiresDist {
345    /// Flatten a set of requirements, resolving any self-references.
346    pub fn from_requirements(requirements: Box<[Requirement]>, name: &PackageName) -> Self {
347        // If there are no self-references, we can return early.
348        if requirements.iter().all(|req| req.name != *name) {
349            return Self(requirements);
350        }
351
352        // Memoize the top level extras, in the same order as `requirements`
353        let top_level_extras: Vec<_> = requirements
354            .iter()
355            .map(|req| req.marker.top_level_extra_name())
356            .collect();
357
358        // Transitively process all extras that are recursively included.
359        let mut flattened = requirements.to_vec();
360        let mut seen = FxHashSet::<(ExtraName, MarkerTree)>::default();
361        let mut queue: VecDeque<_> = flattened
362            .iter()
363            .filter(|req| req.name == *name)
364            .flat_map(|req| req.extras.iter().cloned().map(|extra| (extra, req.marker)))
365            .collect();
366        while let Some((extra, marker)) = queue.pop_front() {
367            if !seen.insert((extra.clone(), marker)) {
368                continue;
369            }
370
371            // Find the requirements for the extra.
372            for (requirement, top_level_extra) in requirements.iter().zip(top_level_extras.iter()) {
373                if top_level_extra.as_deref() != Some(&extra) {
374                    continue;
375                }
376                let requirement = {
377                    let mut marker = marker;
378                    marker.and(requirement.marker);
379                    Requirement {
380                        name: requirement.name.clone(),
381                        extras: requirement.extras.clone(),
382                        groups: requirement.groups.clone(),
383                        source: requirement.source.clone(),
384                        origin: requirement.origin.clone(),
385                        marker: marker.simplify_extras(slice::from_ref(&extra)),
386                    }
387                };
388                if requirement.name == *name {
389                    // Add each transitively included extra.
390                    queue.extend(
391                        requirement
392                            .extras
393                            .iter()
394                            .cloned()
395                            .map(|extra| (extra, requirement.marker)),
396                    );
397                } else {
398                    // Add the requirements for that extra.
399                    flattened.push(requirement);
400                }
401            }
402        }
403
404        // Drop all the self-references now that we've flattened them out.
405        flattened.retain(|req| req.name != *name);
406
407        // Retain any self-constraints for that extra, e.g., if `project[foo]` includes
408        // `project[bar]>1.0`, as a dependency, we need to propagate `project>1.0`, in addition to
409        // transitively expanding `project[bar]`.
410        for req in &requirements {
411            if req.name == *name {
412                if !req.source.is_empty() {
413                    flattened.push(Requirement {
414                        name: req.name.clone(),
415                        extras: Box::new([]),
416                        groups: req.groups.clone(),
417                        source: req.source.clone(),
418                        origin: req.origin.clone(),
419                        marker: req.marker,
420                    });
421                }
422            }
423        }
424
425        Self(flattened.into_boxed_slice())
426    }
427
428    /// Consume the [`FlatRequiresDist`] and return the inner requirements.
429    pub fn into_inner(self) -> Box<[Requirement]> {
430        self.0
431    }
432}
433
434impl IntoIterator for FlatRequiresDist {
435    type Item = Requirement;
436    type IntoIter = <Box<[Requirement]> as IntoIterator>::IntoIter;
437
438    fn into_iter(self) -> Self::IntoIter {
439        Box::into_iter(self.0)
440    }
441}
442
443#[cfg(test)]
444mod test {
445    use std::fmt::Write;
446    use std::path::Path;
447    use std::str::FromStr;
448
449    use indoc::indoc;
450    use insta::assert_snapshot;
451    use tempfile::TempDir;
452
453    use uv_auth::CredentialsCache;
454    use uv_configuration::NoSources;
455    use uv_distribution_types::IndexLocations;
456    use uv_normalize::PackageName;
457    use uv_pep508::Requirement;
458    use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache};
459
460    use crate::RequiresDist;
461    use crate::metadata::requires_dist::FlatRequiresDist;
462
463    async fn requires_dist_from_pyproject_toml(
464        temp_dir: &Path,
465        contents: &str,
466    ) -> anyhow::Result<RequiresDist> {
467        fs_err::write(temp_dir.join("pyproject.toml"), contents)?;
468        let project_workspace = ProjectWorkspace::discover(
469            temp_dir,
470            &DiscoveryOptions {
471                stop_discovery_at: Some(temp_dir.to_path_buf()),
472                ..DiscoveryOptions::default()
473            },
474            &WorkspaceCache::default(),
475        )
476        .await?;
477        let pyproject_toml = uv_pypi_types::PyProjectToml::from_toml(contents, "pyproject.toml")?;
478        let requires_dist = uv_pypi_types::RequiresDist::from_pyproject_toml(pyproject_toml)?;
479        Ok(RequiresDist::from_project_workspace(
480            requires_dist,
481            &project_workspace,
482            None,
483            &IndexLocations::default(),
484            &NoSources::default(),
485            true,
486            &CredentialsCache::new(),
487        )?)
488    }
489
490    async fn format_err(input: &str) -> String {
491        let temp_dir = TempDir::new().unwrap();
492        let err = requires_dist_from_pyproject_toml(temp_dir.path(), input)
493            .await
494            .unwrap_err();
495        let mut causes = err.chain();
496        let mut message = String::new();
497        let _ = writeln!(message, "error: {}", causes.next().unwrap());
498        for err in causes {
499            let _ = writeln!(message, "  Caused by: {err}");
500        }
501        message
502            .replace(&temp_dir.path().display().to_string(), "[PATH]")
503            .replace('\\', "/")
504    }
505
506    #[tokio::test]
507    async fn wrong_type() {
508        let input = indoc! {r#"
509            [project]
510            name = "foo"
511            version = "0.0.0"
512            dependencies = [
513              "tqdm",
514            ]
515            [tool.uv.sources]
516            tqdm = true
517        "#};
518
519        assert_snapshot!(format_err(input).await, @"
520        error: Failed to parse: `[PATH]/pyproject.toml`
521          Caused by: TOML parse error at line 8, column 8
522          |
523        8 | tqdm = true
524          |        ^^^^
525        invalid type: boolean `true`, expected a single source (as a map) or list of sources
526        ");
527    }
528
529    #[tokio::test]
530    async fn too_many_git_specs() {
531        let input = indoc! {r#"
532            [project]
533            name = "foo"
534            version = "0.0.0"
535            dependencies = [
536              "tqdm",
537            ]
538            [tool.uv.sources]
539            tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
540        "#};
541
542        assert_snapshot!(format_err(input).await, @r#"
543        error: Failed to parse: `[PATH]/pyproject.toml`
544          Caused by: TOML parse error at line 8, column 8
545          |
546        8 | tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
547          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
548        expected at most one of `rev`, `tag`, or `branch`
549        "#);
550    }
551
552    #[tokio::test]
553    async fn too_many_git_typo() {
554        let input = indoc! {r#"
555            [project]
556            name = "foo"
557            version = "0.0.0"
558            dependencies = [
559              "tqdm",
560            ]
561            [tool.uv.sources]
562            tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
563        "#};
564
565        assert_snapshot!(format_err(input).await, @r#"
566        error: Failed to parse: `[PATH]/pyproject.toml`
567          Caused by: TOML parse error at line 8, column 48
568          |
569        8 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
570          |                                                ^^^
571        unknown field `ref`, expected one of `git`, `subdirectory`, `rev`, `tag`, `branch`, `lfs`, `url`, `path`, `editable`, `package`, `index`, `workspace`, `marker`, `extra`, `group`
572        "#);
573    }
574
575    #[tokio::test]
576    async fn extra_and_group() {
577        let input = indoc! {r#"
578            [project]
579            name = "foo"
580            version = "0.0.0"
581            dependencies = []
582
583            [tool.uv.sources]
584            tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
585        "#};
586
587        assert_snapshot!(format_err(input).await, @r#"
588        error: Failed to parse: `[PATH]/pyproject.toml`
589          Caused by: TOML parse error at line 7, column 8
590          |
591        7 | tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
592          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
593        cannot specify both `extra` and `group`
594        "#);
595    }
596
597    #[tokio::test]
598    async fn you_cant_mix_those() {
599        let input = indoc! {r#"
600            [project]
601            name = "foo"
602            version = "0.0.0"
603            dependencies = [
604              "tqdm",
605            ]
606            [tool.uv.sources]
607            tqdm = { path = "tqdm", index = "torch" }
608        "#};
609
610        assert_snapshot!(format_err(input).await, @r#"
611        error: Failed to parse: `[PATH]/pyproject.toml`
612          Caused by: TOML parse error at line 8, column 8
613          |
614        8 | tqdm = { path = "tqdm", index = "torch" }
615          |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
616        cannot specify both `path` and `index`
617        "#);
618    }
619
620    #[tokio::test]
621    async fn missing_constraint() {
622        let input = indoc! {r#"
623            [project]
624            name = "foo"
625            version = "0.0.0"
626            dependencies = [
627              "tqdm",
628            ]
629        "#};
630        let temp_dir = TempDir::new().unwrap();
631        assert!(
632            requires_dist_from_pyproject_toml(temp_dir.path(), input)
633                .await
634                .is_ok()
635        );
636    }
637
638    #[tokio::test]
639    async fn invalid_syntax() {
640        let input = indoc! {r#"
641            [project]
642            name = "foo"
643            version = "0.0.0"
644            dependencies = [
645              "tqdm ==4.66.0",
646            ]
647            [tool.uv.sources]
648            tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
649        "#};
650
651        assert_snapshot!(format_err(input).await, @r#"
652        error: Failed to parse: `[PATH]/pyproject.toml`
653          Caused by: TOML parse error at line 8, column 16
654          |
655        8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
656          |                ^
657        missing opening quote, expected `"`
658        "#);
659    }
660
661    #[tokio::test]
662    async fn invalid_url() {
663        let input = indoc! {r#"
664            [project]
665            name = "foo"
666            version = "0.0.0"
667            dependencies = [
668              "tqdm ==4.66.0",
669            ]
670            [tool.uv.sources]
671            tqdm = { url = "§invalid#+#*Ä" }
672        "#};
673
674        assert_snapshot!(format_err(input).await, @r#"
675        error: Failed to parse: `[PATH]/pyproject.toml`
676          Caused by: TOML parse error at line 8, column 16
677          |
678        8 | tqdm = { url = "§invalid#+#*Ä" }
679          |                ^^^^^^^^^^^^^^^^^
680        relative URL without a base: "§invalid#+#*Ä"
681        "#);
682    }
683
684    #[tokio::test]
685    async fn workspace_and_url_spec() {
686        let input = indoc! {r#"
687            [project]
688            name = "foo"
689            version = "0.0.0"
690            dependencies = [
691              "tqdm @ git+https://github.com/tqdm/tqdm",
692            ]
693            [tool.uv.sources]
694            tqdm = { workspace = true }
695        "#};
696
697        assert_snapshot!(format_err(input).await, @"
698        error: Failed to parse entry: `tqdm`
699          Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
700        ");
701    }
702
703    #[tokio::test]
704    async fn missing_workspace_package() {
705        let input = indoc! {r#"
706            [project]
707            name = "foo"
708            version = "0.0.0"
709            dependencies = [
710              "tqdm ==4.66.0",
711            ]
712            [tool.uv.sources]
713            tqdm = { workspace = true }
714        "#};
715
716        assert_snapshot!(format_err(input).await, @"
717        error: Failed to parse entry: `tqdm`
718          Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
719        ");
720    }
721
722    #[tokio::test]
723    async fn cant_be_dynamic() {
724        let input = indoc! {r#"
725            [project]
726            name = "foo"
727            version = "0.0.0"
728            dynamic = [
729                "dependencies"
730            ]
731            [tool.uv.sources]
732            tqdm = { workspace = true }
733        "#};
734
735        assert_snapshot!(format_err(input).await, @"error: The following field was marked as dynamic: dependencies");
736    }
737
738    #[tokio::test]
739    async fn missing_project_section() {
740        let input = indoc! {"
741            [tool.uv.sources]
742            tqdm = { workspace = true }
743        "};
744
745        assert_snapshot!(format_err(input).await, @"error: No `project` table found in: [PATH]/pyproject.toml");
746    }
747
748    #[test]
749    fn test_flat_requires_dist_noop() {
750        let name = PackageName::from_str("pkg").unwrap();
751        let requirements = [
752            Requirement::from_str("requests>=2.0.0").unwrap().into(),
753            Requirement::from_str("pytest; extra == 'test'")
754                .unwrap()
755                .into(),
756            Requirement::from_str("black; extra == 'dev'")
757                .unwrap()
758                .into(),
759        ];
760
761        let expected = FlatRequiresDist(
762            [
763                Requirement::from_str("requests>=2.0.0").unwrap().into(),
764                Requirement::from_str("pytest; extra == 'test'")
765                    .unwrap()
766                    .into(),
767                Requirement::from_str("black; extra == 'dev'")
768                    .unwrap()
769                    .into(),
770            ]
771            .into(),
772        );
773
774        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
775
776        assert_eq!(actual, expected);
777    }
778
779    #[test]
780    fn test_flat_requires_dist_basic() {
781        let name = PackageName::from_str("pkg").unwrap();
782        let requirements = [
783            Requirement::from_str("requests>=2.0.0").unwrap().into(),
784            Requirement::from_str("pytest; extra == 'test'")
785                .unwrap()
786                .into(),
787            Requirement::from_str("pkg[dev]; extra == 'test'")
788                .unwrap()
789                .into(),
790            Requirement::from_str("black; extra == 'dev'")
791                .unwrap()
792                .into(),
793        ];
794
795        let expected = FlatRequiresDist(
796            [
797                Requirement::from_str("requests>=2.0.0").unwrap().into(),
798                Requirement::from_str("pytest; extra == 'test'")
799                    .unwrap()
800                    .into(),
801                Requirement::from_str("black; extra == 'dev'")
802                    .unwrap()
803                    .into(),
804                Requirement::from_str("black; extra == 'test'")
805                    .unwrap()
806                    .into(),
807            ]
808            .into(),
809        );
810
811        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
812
813        assert_eq!(actual, expected);
814    }
815
816    #[test]
817    fn test_flat_requires_dist_with_markers() {
818        let name = PackageName::from_str("pkg").unwrap();
819        let requirements = vec![
820            Requirement::from_str("requests>=2.0.0").unwrap().into(),
821            Requirement::from_str("pytest; extra == 'test'")
822                .unwrap()
823                .into(),
824            Requirement::from_str("pkg[dev]; extra == 'test' and sys_platform == 'win32'")
825                .unwrap()
826                .into(),
827            Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
828                .unwrap()
829                .into(),
830        ];
831
832        let expected = FlatRequiresDist(
833            [
834                Requirement::from_str("requests>=2.0.0").unwrap().into(),
835                Requirement::from_str("pytest; extra == 'test'")
836                    .unwrap()
837                    .into(),
838                Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
839                    .unwrap()
840                    .into(),
841                Requirement::from_str("black; extra == 'test' and sys_platform == 'win32'")
842                    .unwrap()
843                    .into(),
844            ]
845            .into(),
846        );
847
848        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
849
850        assert_eq!(actual, expected);
851    }
852
853    #[test]
854    fn test_flat_requires_dist_self_constraint() {
855        let name = PackageName::from_str("pkg").unwrap();
856        let requirements = [
857            Requirement::from_str("requests>=2.0.0").unwrap().into(),
858            Requirement::from_str("pytest; extra == 'test'")
859                .unwrap()
860                .into(),
861            Requirement::from_str("black; extra == 'dev'")
862                .unwrap()
863                .into(),
864            Requirement::from_str("pkg[async]==1.0.0").unwrap().into(),
865        ];
866
867        let expected = FlatRequiresDist(
868            [
869                Requirement::from_str("requests>=2.0.0").unwrap().into(),
870                Requirement::from_str("pytest; extra == 'test'")
871                    .unwrap()
872                    .into(),
873                Requirement::from_str("black; extra == 'dev'")
874                    .unwrap()
875                    .into(),
876                Requirement::from_str("pkg==1.0.0").unwrap().into(),
877            ]
878            .into(),
879        );
880
881        let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
882
883        assert_eq!(actual, expected);
884    }
885}