1use std::collections::{BTreeMap, VecDeque};
2use std::path::Path;
3use std::slice;
4
5use rustc_hash::FxHashSet;
6
7use uv_auth::CredentialsCache;
8use uv_configuration::NoSources;
9use uv_distribution_types::{IndexLocations, Requirement};
10use uv_normalize::{ExtraName, GroupName, PackageName};
11use uv_pep508::MarkerTree;
12use uv_workspace::dependency_groups::FlatDependencyGroups;
13use uv_workspace::pyproject::{Sources, ToolUvSources};
14use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache};
15
16use crate::Metadata;
17use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
18
19#[derive(Debug, Clone)]
20pub struct RequiresDist {
21 pub name: PackageName,
22 pub requires_dist: Box<[Requirement]>,
23 pub provides_extra: Box<[ExtraName]>,
24 pub dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
25 pub dynamic: bool,
26}
27
28impl RequiresDist {
29 pub fn from_metadata23(metadata: uv_pypi_types::RequiresDist) -> Self {
32 Self {
33 name: metadata.name,
34 requires_dist: Box::into_iter(metadata.requires_dist)
35 .map(Requirement::from)
36 .collect(),
37 provides_extra: metadata.provides_extra,
38 dependency_groups: BTreeMap::default(),
39 dynamic: metadata.dynamic,
40 }
41 }
42
43 pub async fn from_project_maybe_workspace(
46 metadata: uv_pypi_types::RequiresDist,
47 install_path: &Path,
48 git_member: Option<&GitWorkspaceMember<'_>>,
49 locations: &IndexLocations,
50 sources: NoSources,
51 cache: &WorkspaceCache,
52 credentials_cache: &CredentialsCache,
53 ) -> Result<Self, MetadataError> {
54 let discovery = DiscoveryOptions {
55 stop_discovery_at: git_member.map(|git_member| {
56 git_member
57 .fetch_root
58 .parent()
59 .expect("git checkout has a parent")
60 .to_path_buf()
61 }),
62 members: if sources.is_none() {
63 MemberDiscovery::default()
64 } else {
65 MemberDiscovery::None
66 },
67 ..DiscoveryOptions::default()
68 };
69 let Some(project_workspace) =
70 ProjectWorkspace::from_maybe_project_root(install_path, &discovery, cache).await?
71 else {
72 return Ok(Self::from_metadata23(metadata));
73 };
74
75 Self::from_project_workspace(
76 metadata,
77 &project_workspace,
78 git_member,
79 locations,
80 &sources,
81 credentials_cache,
82 )
83 }
84
85 fn from_project_workspace(
86 metadata: uv_pypi_types::RequiresDist,
87 project_workspace: &ProjectWorkspace,
88 git_member: Option<&GitWorkspaceMember<'_>>,
89 locations: &IndexLocations,
90 no_sources: &NoSources,
91 credentials_cache: &CredentialsCache,
92 ) -> Result<Self, MetadataError> {
93 let empty = vec![];
95 let project_indexes = project_workspace
96 .current_project()
97 .pyproject_toml()
98 .tool
99 .as_ref()
100 .and_then(|tool| tool.uv.as_ref())
101 .and_then(|uv| uv.index.as_deref())
102 .unwrap_or(&empty);
103
104 let empty = BTreeMap::default();
106 let project_sources = project_workspace
107 .current_project()
108 .pyproject_toml()
109 .tool
110 .as_ref()
111 .and_then(|tool| tool.uv.as_ref())
112 .and_then(|uv| uv.sources.as_ref())
113 .map(ToolUvSources::inner)
114 .unwrap_or(&empty);
115
116 let dependency_groups = FlatDependencyGroups::from_pyproject_toml(
117 project_workspace.current_project().root(),
118 project_workspace.current_project().pyproject_toml(),
119 )?;
120
121 Self::validate_sources(project_sources, &metadata, &dependency_groups)?;
124
125 let dependency_groups = dependency_groups
127 .into_iter()
128 .map(|(name, flat_group)| {
129 let requirements = flat_group
130 .requirements
131 .into_iter()
132 .flat_map(|requirement| {
133 if no_sources.for_package(&requirement.name) {
135 vec![Ok(Requirement::from(requirement))].into_iter()
136 } else {
137 let requirement_name = requirement.name.clone();
138 let group = name.clone();
139 let extra = None;
140
141 LoweredRequirement::from_requirement(
142 requirement,
143 Some(&metadata.name),
144 project_workspace.project_root(),
145 project_sources,
146 project_indexes,
147 extra,
148 Some(&group),
149 locations,
150 project_workspace.workspace(),
151 git_member,
152 credentials_cache,
153 )
154 .map(move |requirement| match requirement {
155 Ok(requirement) => Ok(requirement.into_inner()),
156 Err(err) => Err(MetadataError::GroupLoweringError(
157 group.clone(),
158 requirement_name.clone(),
159 Box::new(err),
160 )),
161 })
162 .collect::<Vec<_>>()
163 .into_iter()
164 }
165 })
166 .collect::<Result<Box<_>, _>>()?;
167 Ok::<(GroupName, Box<_>), MetadataError>((name, requirements))
168 })
169 .collect::<Result<BTreeMap<_, _>, _>>()?;
170
171 let requires_dist = Box::into_iter(metadata.requires_dist);
173 let requires_dist = requires_dist
174 .flat_map(|requirement| {
175 if no_sources.for_package(&requirement.name) {
177 vec![Ok(Requirement::from(requirement))].into_iter()
178 } else {
179 let requirement_name = requirement.name.clone();
180 let extra = requirement.marker.top_level_extra_name();
181 let group = None;
182
183 LoweredRequirement::from_requirement(
184 requirement,
185 Some(&metadata.name),
186 project_workspace.project_root(),
187 project_sources,
188 project_indexes,
189 extra.as_deref(),
190 group,
191 locations,
192 project_workspace.workspace(),
193 git_member,
194 credentials_cache,
195 )
196 .map(move |requirement| match requirement {
197 Ok(requirement) => Ok(requirement.into_inner()),
198 Err(err) => Err(MetadataError::LoweringError(
199 requirement_name.clone(),
200 Box::new(err),
201 )),
202 })
203 .collect::<Vec<_>>()
204 .into_iter()
205 }
206 })
207 .collect::<Result<Box<_>, _>>()?;
208
209 Ok(Self {
210 name: metadata.name,
211 requires_dist,
212 dependency_groups,
213 provides_extra: metadata.provides_extra,
214 dynamic: metadata.dynamic,
215 })
216 }
217
218 fn validate_sources(
223 sources: &BTreeMap<PackageName, Sources>,
224 metadata: &uv_pypi_types::RequiresDist,
225 dependency_groups: &FlatDependencyGroups,
226 ) -> Result<(), MetadataError> {
227 for (name, sources) in sources {
228 for source in sources.iter() {
229 if let Some(extra) = source.extra() {
230 if !metadata.provides_extra.contains(extra) {
232 return Err(MetadataError::MissingSourceExtra(
233 name.clone(),
234 extra.clone(),
235 ));
236 }
237
238 if !metadata.requires_dist.iter().any(|requirement| {
240 requirement.name == *name
241 && requirement.marker.top_level_extra_name().as_deref() == Some(extra)
242 }) {
243 return Err(MetadataError::IncompleteSourceExtra(
244 name.clone(),
245 extra.clone(),
246 ));
247 }
248 }
249
250 if let Some(group) = source.group() {
251 let Some(flat_group) = dependency_groups.get(group) else {
253 return Err(MetadataError::MissingSourceGroup(
254 name.clone(),
255 group.clone(),
256 ));
257 };
258
259 if !flat_group
261 .requirements
262 .iter()
263 .any(|requirement| requirement.name == *name)
264 {
265 return Err(MetadataError::IncompleteSourceGroup(
266 name.clone(),
267 group.clone(),
268 ));
269 }
270 }
271 }
272 }
273
274 Ok(())
275 }
276}
277
278impl From<Metadata> for RequiresDist {
279 fn from(metadata: Metadata) -> Self {
280 Self {
281 name: metadata.name,
282 requires_dist: metadata.requires_dist,
283 provides_extra: metadata.provides_extra,
284 dependency_groups: metadata.dependency_groups,
285 dynamic: metadata.dynamic,
286 }
287 }
288}
289
290#[derive(Debug, Clone, PartialEq, Eq)]
337pub struct FlatRequiresDist(Box<[Requirement]>);
338
339impl FlatRequiresDist {
340 pub fn from_requirements(requirements: Box<[Requirement]>, name: &PackageName) -> Self {
342 if requirements.iter().all(|req| req.name != *name) {
344 return Self(requirements);
345 }
346
347 let top_level_extras: Vec<_> = requirements
349 .iter()
350 .map(|req| req.marker.top_level_extra_name())
351 .collect();
352
353 let mut flattened = requirements.to_vec();
355 let mut seen = FxHashSet::<(ExtraName, MarkerTree)>::default();
356 let mut queue: VecDeque<_> = flattened
357 .iter()
358 .filter(|req| req.name == *name)
359 .flat_map(|req| req.extras.iter().cloned().map(|extra| (extra, req.marker)))
360 .collect();
361 while let Some((extra, marker)) = queue.pop_front() {
362 if !seen.insert((extra.clone(), marker)) {
363 continue;
364 }
365
366 for (requirement, top_level_extra) in requirements.iter().zip(top_level_extras.iter()) {
368 if top_level_extra.as_deref() != Some(&extra) {
369 continue;
370 }
371 let requirement = {
372 let mut marker = marker;
373 marker.and(requirement.marker);
374 Requirement {
375 name: requirement.name.clone(),
376 extras: requirement.extras.clone(),
377 groups: requirement.groups.clone(),
378 source: requirement.source.clone(),
379 origin: requirement.origin.clone(),
380 marker: marker.simplify_extras(slice::from_ref(&extra)),
381 }
382 };
383 if requirement.name == *name {
384 queue.extend(
386 requirement
387 .extras
388 .iter()
389 .cloned()
390 .map(|extra| (extra, requirement.marker)),
391 );
392 } else {
393 flattened.push(requirement);
395 }
396 }
397 }
398
399 flattened.retain(|req| req.name != *name);
401
402 for req in &requirements {
406 if req.name == *name {
407 if !req.source.is_empty() {
408 flattened.push(Requirement {
409 name: req.name.clone(),
410 extras: Box::new([]),
411 groups: req.groups.clone(),
412 source: req.source.clone(),
413 origin: req.origin.clone(),
414 marker: req.marker,
415 });
416 }
417 }
418 }
419
420 Self(flattened.into_boxed_slice())
421 }
422
423 pub fn into_inner(self) -> Box<[Requirement]> {
425 self.0
426 }
427}
428
429impl IntoIterator for FlatRequiresDist {
430 type Item = Requirement;
431 type IntoIter = <Box<[Requirement]> as IntoIterator>::IntoIter;
432
433 fn into_iter(self) -> Self::IntoIter {
434 Box::into_iter(self.0)
435 }
436}
437
438#[cfg(test)]
439mod test {
440 use std::fmt::Write;
441 use std::path::Path;
442 use std::str::FromStr;
443
444 use indoc::indoc;
445 use insta::assert_snapshot;
446 use tempfile::TempDir;
447
448 use uv_auth::CredentialsCache;
449 use uv_configuration::NoSources;
450 use uv_distribution_types::IndexLocations;
451 use uv_normalize::PackageName;
452 use uv_pep508::Requirement;
453 use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache};
454
455 use crate::RequiresDist;
456 use crate::metadata::requires_dist::FlatRequiresDist;
457
458 async fn requires_dist_from_pyproject_toml(
459 temp_dir: &Path,
460 contents: &str,
461 ) -> anyhow::Result<RequiresDist> {
462 fs_err::write(temp_dir.join("pyproject.toml"), contents)?;
463 let project_workspace = ProjectWorkspace::discover(
464 temp_dir,
465 &DiscoveryOptions {
466 stop_discovery_at: Some(temp_dir.to_path_buf()),
467 ..DiscoveryOptions::default()
468 },
469 &WorkspaceCache::default(),
470 )
471 .await?;
472 let pyproject_toml = uv_pypi_types::PyProjectToml::from_toml(contents, "pyproject.toml")?;
473 let requires_dist = uv_pypi_types::RequiresDist::from_pyproject_toml(pyproject_toml)?;
474 Ok(RequiresDist::from_project_workspace(
475 requires_dist,
476 &project_workspace,
477 None,
478 &IndexLocations::default(),
479 &NoSources::default(),
480 &CredentialsCache::new(),
481 )?)
482 }
483
484 async fn format_err(input: &str) -> String {
485 let temp_dir = TempDir::new().unwrap();
486 let err = requires_dist_from_pyproject_toml(temp_dir.path(), input)
487 .await
488 .unwrap_err();
489 let mut causes = err.chain();
490 let mut message = String::new();
491 let _ = writeln!(message, "error: {}", causes.next().unwrap());
492 for err in causes {
493 let _ = writeln!(message, " Caused by: {err}");
494 }
495 message
496 .replace(&temp_dir.path().display().to_string(), "[PATH]")
497 .replace('\\', "/")
498 }
499
500 #[tokio::test]
501 async fn wrong_type() {
502 let input = indoc! {r#"
503 [project]
504 name = "foo"
505 version = "0.0.0"
506 dependencies = [
507 "tqdm",
508 ]
509 [tool.uv.sources]
510 tqdm = true
511 "#};
512
513 assert_snapshot!(format_err(input).await, @"
514 error: Failed to parse: `[PATH]/pyproject.toml`
515 Caused by: TOML parse error at line 8, column 8
516 |
517 8 | tqdm = true
518 | ^^^^
519 invalid type: boolean `true`, expected a single source (as a map) or list of sources
520 ");
521 }
522
523 #[tokio::test]
524 async fn too_many_git_specs() {
525 let input = indoc! {r#"
526 [project]
527 name = "foo"
528 version = "0.0.0"
529 dependencies = [
530 "tqdm",
531 ]
532 [tool.uv.sources]
533 tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
534 "#};
535
536 assert_snapshot!(format_err(input).await, @r#"
537 error: Failed to parse: `[PATH]/pyproject.toml`
538 Caused by: TOML parse error at line 8, column 8
539 |
540 8 | tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
541 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
542 expected at most one of `rev`, `tag`, or `branch`
543 "#);
544 }
545
546 #[tokio::test]
547 async fn too_many_git_typo() {
548 let input = indoc! {r#"
549 [project]
550 name = "foo"
551 version = "0.0.0"
552 dependencies = [
553 "tqdm",
554 ]
555 [tool.uv.sources]
556 tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
557 "#};
558
559 assert_snapshot!(format_err(input).await, @r#"
560 error: Failed to parse: `[PATH]/pyproject.toml`
561 Caused by: TOML parse error at line 8, column 48
562 |
563 8 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
564 | ^^^
565 unknown field `ref`, expected one of `git`, `subdirectory`, `rev`, `tag`, `branch`, `lfs`, `url`, `path`, `editable`, `package`, `index`, `workspace`, `marker`, `extra`, `group`
566 "#);
567 }
568
569 #[tokio::test]
570 async fn extra_and_group() {
571 let input = indoc! {r#"
572 [project]
573 name = "foo"
574 version = "0.0.0"
575 dependencies = []
576
577 [tool.uv.sources]
578 tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
579 "#};
580
581 assert_snapshot!(format_err(input).await, @r#"
582 error: Failed to parse: `[PATH]/pyproject.toml`
583 Caused by: TOML parse error at line 7, column 8
584 |
585 7 | tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
586 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
587 cannot specify both `extra` and `group`
588 "#);
589 }
590
591 #[tokio::test]
592 async fn you_cant_mix_those() {
593 let input = indoc! {r#"
594 [project]
595 name = "foo"
596 version = "0.0.0"
597 dependencies = [
598 "tqdm",
599 ]
600 [tool.uv.sources]
601 tqdm = { path = "tqdm", index = "torch" }
602 "#};
603
604 assert_snapshot!(format_err(input).await, @r#"
605 error: Failed to parse: `[PATH]/pyproject.toml`
606 Caused by: TOML parse error at line 8, column 8
607 |
608 8 | tqdm = { path = "tqdm", index = "torch" }
609 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
610 cannot specify both `path` and `index`
611 "#);
612 }
613
614 #[tokio::test]
615 async fn missing_constraint() {
616 let input = indoc! {r#"
617 [project]
618 name = "foo"
619 version = "0.0.0"
620 dependencies = [
621 "tqdm",
622 ]
623 "#};
624 let temp_dir = TempDir::new().unwrap();
625 assert!(
626 requires_dist_from_pyproject_toml(temp_dir.path(), input)
627 .await
628 .is_ok()
629 );
630 }
631
632 #[tokio::test]
633 async fn invalid_syntax() {
634 let input = indoc! {r#"
635 [project]
636 name = "foo"
637 version = "0.0.0"
638 dependencies = [
639 "tqdm ==4.66.0",
640 ]
641 [tool.uv.sources]
642 tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
643 "#};
644
645 assert_snapshot!(format_err(input).await, @r#"
646 error: Failed to parse: `[PATH]/pyproject.toml`
647 Caused by: TOML parse error at line 8, column 16
648 |
649 8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
650 | ^
651 missing opening quote, expected `"`
652 "#);
653 }
654
655 #[tokio::test]
656 async fn invalid_url() {
657 let input = indoc! {r#"
658 [project]
659 name = "foo"
660 version = "0.0.0"
661 dependencies = [
662 "tqdm ==4.66.0",
663 ]
664 [tool.uv.sources]
665 tqdm = { url = "§invalid#+#*Ä" }
666 "#};
667
668 assert_snapshot!(format_err(input).await, @r#"
669 error: Failed to parse: `[PATH]/pyproject.toml`
670 Caused by: TOML parse error at line 8, column 16
671 |
672 8 | tqdm = { url = "§invalid#+#*Ä" }
673 | ^^^^^^^^^^^^^^^^^
674 relative URL without a base: "§invalid#+#*Ä"
675 "#);
676 }
677
678 #[tokio::test]
679 async fn workspace_and_url_spec() {
680 let input = indoc! {r#"
681 [project]
682 name = "foo"
683 version = "0.0.0"
684 dependencies = [
685 "tqdm @ git+https://github.com/tqdm/tqdm",
686 ]
687 [tool.uv.sources]
688 tqdm = { workspace = true }
689 "#};
690
691 assert_snapshot!(format_err(input).await, @"
692 error: Failed to parse entry: `tqdm`
693 Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
694 ");
695 }
696
697 #[tokio::test]
698 async fn missing_workspace_package() {
699 let input = indoc! {r#"
700 [project]
701 name = "foo"
702 version = "0.0.0"
703 dependencies = [
704 "tqdm ==4.66.0",
705 ]
706 [tool.uv.sources]
707 tqdm = { workspace = true }
708 "#};
709
710 assert_snapshot!(format_err(input).await, @"
711 error: Failed to parse entry: `tqdm`
712 Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
713 ");
714 }
715
716 #[tokio::test]
717 async fn cant_be_dynamic() {
718 let input = indoc! {r#"
719 [project]
720 name = "foo"
721 version = "0.0.0"
722 dynamic = [
723 "dependencies"
724 ]
725 [tool.uv.sources]
726 tqdm = { workspace = true }
727 "#};
728
729 assert_snapshot!(format_err(input).await, @"error: The following field was marked as dynamic: dependencies");
730 }
731
732 #[tokio::test]
733 async fn missing_project_section() {
734 let input = indoc! {"
735 [tool.uv.sources]
736 tqdm = { workspace = true }
737 "};
738
739 assert_snapshot!(format_err(input).await, @"error: No `project` table found in: [PATH]/pyproject.toml");
740 }
741
742 #[test]
743 fn test_flat_requires_dist_noop() {
744 let name = PackageName::from_str("pkg").unwrap();
745 let requirements = [
746 Requirement::from_str("requests>=2.0.0").unwrap().into(),
747 Requirement::from_str("pytest; extra == 'test'")
748 .unwrap()
749 .into(),
750 Requirement::from_str("black; extra == 'dev'")
751 .unwrap()
752 .into(),
753 ];
754
755 let expected = FlatRequiresDist(
756 [
757 Requirement::from_str("requests>=2.0.0").unwrap().into(),
758 Requirement::from_str("pytest; extra == 'test'")
759 .unwrap()
760 .into(),
761 Requirement::from_str("black; extra == 'dev'")
762 .unwrap()
763 .into(),
764 ]
765 .into(),
766 );
767
768 let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
769
770 assert_eq!(actual, expected);
771 }
772
773 #[test]
774 fn test_flat_requires_dist_basic() {
775 let name = PackageName::from_str("pkg").unwrap();
776 let requirements = [
777 Requirement::from_str("requests>=2.0.0").unwrap().into(),
778 Requirement::from_str("pytest; extra == 'test'")
779 .unwrap()
780 .into(),
781 Requirement::from_str("pkg[dev]; extra == 'test'")
782 .unwrap()
783 .into(),
784 Requirement::from_str("black; extra == 'dev'")
785 .unwrap()
786 .into(),
787 ];
788
789 let expected = FlatRequiresDist(
790 [
791 Requirement::from_str("requests>=2.0.0").unwrap().into(),
792 Requirement::from_str("pytest; extra == 'test'")
793 .unwrap()
794 .into(),
795 Requirement::from_str("black; extra == 'dev'")
796 .unwrap()
797 .into(),
798 Requirement::from_str("black; extra == 'test'")
799 .unwrap()
800 .into(),
801 ]
802 .into(),
803 );
804
805 let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
806
807 assert_eq!(actual, expected);
808 }
809
810 #[test]
811 fn test_flat_requires_dist_with_markers() {
812 let name = PackageName::from_str("pkg").unwrap();
813 let requirements = vec![
814 Requirement::from_str("requests>=2.0.0").unwrap().into(),
815 Requirement::from_str("pytest; extra == 'test'")
816 .unwrap()
817 .into(),
818 Requirement::from_str("pkg[dev]; extra == 'test' and sys_platform == 'win32'")
819 .unwrap()
820 .into(),
821 Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
822 .unwrap()
823 .into(),
824 ];
825
826 let expected = FlatRequiresDist(
827 [
828 Requirement::from_str("requests>=2.0.0").unwrap().into(),
829 Requirement::from_str("pytest; extra == 'test'")
830 .unwrap()
831 .into(),
832 Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
833 .unwrap()
834 .into(),
835 Requirement::from_str("black; extra == 'test' and sys_platform == 'win32'")
836 .unwrap()
837 .into(),
838 ]
839 .into(),
840 );
841
842 let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
843
844 assert_eq!(actual, expected);
845 }
846
847 #[test]
848 fn test_flat_requires_dist_self_constraint() {
849 let name = PackageName::from_str("pkg").unwrap();
850 let requirements = [
851 Requirement::from_str("requests>=2.0.0").unwrap().into(),
852 Requirement::from_str("pytest; extra == 'test'")
853 .unwrap()
854 .into(),
855 Requirement::from_str("black; extra == 'dev'")
856 .unwrap()
857 .into(),
858 Requirement::from_str("pkg[async]==1.0.0").unwrap().into(),
859 ];
860
861 let expected = FlatRequiresDist(
862 [
863 Requirement::from_str("requests>=2.0.0").unwrap().into(),
864 Requirement::from_str("pytest; extra == 'test'")
865 .unwrap()
866 .into(),
867 Requirement::from_str("black; extra == 'dev'")
868 .unwrap()
869 .into(),
870 Requirement::from_str("pkg==1.0.0").unwrap().into(),
871 ]
872 .into(),
873 );
874
875 let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
876
877 assert_eq!(actual, expected);
878 }
879}