1use std::collections::{BTreeMap, VecDeque};
2use std::path::Path;
3use std::slice;
4
5use rustc_hash::FxHashSet;
6
7use uv_auth::CredentialsCache;
8use uv_configuration::NoSources;
9use uv_distribution_types::{IndexLocations, Requirement};
10use uv_normalize::{ExtraName, GroupName, PackageName};
11use uv_pep508::MarkerTree;
12use uv_workspace::dependency_groups::FlatDependencyGroups;
13use uv_workspace::pyproject::{Sources, ToolUvSources};
14use uv_workspace::{DiscoveryOptions, MemberDiscovery, ProjectWorkspace, WorkspaceCache};
15
16use crate::Metadata;
17use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
18
19#[derive(Debug, Clone)]
20pub struct RequiresDist {
21 pub name: PackageName,
22 pub requires_dist: Box<[Requirement]>,
23 pub provides_extra: Box<[ExtraName]>,
24 pub dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
25 pub dynamic: bool,
26}
27
28impl RequiresDist {
29 pub fn from_metadata23(metadata: uv_pypi_types::RequiresDist) -> Self {
32 Self {
33 name: metadata.name,
34 requires_dist: Box::into_iter(metadata.requires_dist)
35 .map(Requirement::from)
36 .collect(),
37 provides_extra: metadata.provides_extra,
38 dependency_groups: BTreeMap::default(),
39 dynamic: metadata.dynamic,
40 }
41 }
42
43 pub async fn from_project_maybe_workspace(
46 metadata: uv_pypi_types::RequiresDist,
47 install_path: &Path,
48 git_member: Option<&GitWorkspaceMember<'_>>,
49 locations: &IndexLocations,
50 sources: NoSources,
51 cache: &WorkspaceCache,
52 credentials_cache: &CredentialsCache,
53 ) -> Result<Self, MetadataError> {
54 let discovery = DiscoveryOptions {
55 stop_discovery_at: git_member.map(|git_member| {
56 git_member
57 .fetch_root
58 .parent()
59 .expect("git checkout has a parent")
60 .to_path_buf()
61 }),
62 members: if sources.is_none() {
63 MemberDiscovery::default()
64 } else {
65 MemberDiscovery::None
66 },
67 ..DiscoveryOptions::default()
68 };
69 let Some(project_workspace) =
70 ProjectWorkspace::from_maybe_project_root(install_path, &discovery, cache).await?
71 else {
72 return Ok(Self::from_metadata23(metadata));
73 };
74
75 Self::from_project_workspace(
76 metadata,
77 &project_workspace,
78 git_member,
79 locations,
80 &sources,
81 credentials_cache,
82 )
83 }
84
85 fn from_project_workspace(
86 metadata: uv_pypi_types::RequiresDist,
87 project_workspace: &ProjectWorkspace,
88 git_member: Option<&GitWorkspaceMember<'_>>,
89 locations: &IndexLocations,
90 no_sources: &NoSources,
91 credentials_cache: &CredentialsCache,
92 ) -> Result<Self, MetadataError> {
93 let empty = vec![];
95 let project_indexes = project_workspace
96 .current_project()
97 .pyproject_toml()
98 .tool
99 .as_ref()
100 .and_then(|tool| tool.uv.as_ref())
101 .and_then(|uv| uv.index.as_deref())
102 .unwrap_or(&empty);
103
104 let empty = BTreeMap::default();
106 let project_sources = project_workspace
107 .current_project()
108 .pyproject_toml()
109 .tool
110 .as_ref()
111 .and_then(|tool| tool.uv.as_ref())
112 .and_then(|uv| uv.sources.as_ref())
113 .map(ToolUvSources::inner)
114 .unwrap_or(&empty);
115
116 let dependency_groups = FlatDependencyGroups::from_pyproject_toml(
117 project_workspace.current_project().root(),
118 project_workspace.current_project().pyproject_toml(),
119 )?;
120
121 Self::validate_sources(project_sources, &metadata, &dependency_groups)?;
124
125 let dependency_groups = dependency_groups
127 .into_iter()
128 .map(|(name, flat_group)| {
129 let requirements = flat_group
130 .requirements
131 .into_iter()
132 .flat_map(|requirement| {
133 if no_sources.for_package(&requirement.name) {
135 vec![Ok(Requirement::from(requirement))].into_iter()
136 } else {
137 let requirement_name = requirement.name.clone();
138 let group = name.clone();
139 let extra = None;
140
141 LoweredRequirement::from_requirement(
142 requirement,
143 Some(&metadata.name),
144 project_workspace.project_root(),
145 project_sources,
146 project_indexes,
147 extra,
148 Some(&group),
149 locations,
150 project_workspace.workspace(),
151 git_member,
152 credentials_cache,
153 )
154 .map(move |requirement| match requirement {
155 Ok(requirement) => Ok(requirement.into_inner()),
156 Err(err) => Err(MetadataError::GroupLoweringError(
157 group.clone(),
158 requirement_name.clone(),
159 Box::new(err),
160 )),
161 })
162 .collect::<Vec<_>>()
163 .into_iter()
164 }
165 })
166 .collect::<Result<Box<_>, _>>()?;
167 Ok::<(GroupName, Box<_>), MetadataError>((name, requirements))
168 })
169 .collect::<Result<BTreeMap<_, _>, _>>()?;
170
171 let requires_dist = Box::into_iter(metadata.requires_dist);
173 let requires_dist = requires_dist
174 .flat_map(|requirement| {
175 if no_sources.for_package(&requirement.name) {
177 vec![Ok(Requirement::from(requirement))].into_iter()
178 } else {
179 let requirement_name = requirement.name.clone();
180 let extra = requirement.marker.top_level_extra_name();
181 let group = None;
182
183 LoweredRequirement::from_requirement(
184 requirement,
185 Some(&metadata.name),
186 project_workspace.project_root(),
187 project_sources,
188 project_indexes,
189 extra.as_deref(),
190 group,
191 locations,
192 project_workspace.workspace(),
193 git_member,
194 credentials_cache,
195 )
196 .map(move |requirement| match requirement {
197 Ok(requirement) => Ok(requirement.into_inner()),
198 Err(err) => Err(MetadataError::LoweringError(
199 requirement_name.clone(),
200 Box::new(err),
201 )),
202 })
203 .collect::<Vec<_>>()
204 .into_iter()
205 }
206 })
207 .collect::<Result<Box<_>, _>>()?;
208
209 Ok(Self {
210 name: metadata.name,
211 requires_dist,
212 dependency_groups,
213 provides_extra: metadata.provides_extra,
214 dynamic: metadata.dynamic,
215 })
216 }
217
218 fn validate_sources(
223 sources: &BTreeMap<PackageName, Sources>,
224 metadata: &uv_pypi_types::RequiresDist,
225 dependency_groups: &FlatDependencyGroups,
226 ) -> Result<(), MetadataError> {
227 for (name, sources) in sources {
228 for source in sources.iter() {
229 if let Some(extra) = source.extra() {
230 if !metadata.provides_extra.contains(extra) {
232 return Err(MetadataError::MissingSourceExtra(
233 name.clone(),
234 extra.clone(),
235 ));
236 }
237
238 if !metadata.requires_dist.iter().any(|requirement| {
240 requirement.name == *name
241 && requirement.marker.top_level_extra_name().as_deref() == Some(extra)
242 }) {
243 return Err(MetadataError::IncompleteSourceExtra(
244 name.clone(),
245 extra.clone(),
246 ));
247 }
248 }
249
250 if let Some(group) = source.group() {
251 let Some(flat_group) = dependency_groups.get(group) else {
253 return Err(MetadataError::MissingSourceGroup(
254 name.clone(),
255 group.clone(),
256 ));
257 };
258
259 if !flat_group
261 .requirements
262 .iter()
263 .any(|requirement| requirement.name == *name)
264 {
265 return Err(MetadataError::IncompleteSourceGroup(
266 name.clone(),
267 group.clone(),
268 ));
269 }
270 }
271 }
272 }
273
274 Ok(())
275 }
276}
277
278impl From<Metadata> for RequiresDist {
279 fn from(metadata: Metadata) -> Self {
280 Self {
281 name: metadata.name,
282 requires_dist: metadata.requires_dist,
283 provides_extra: metadata.provides_extra,
284 dependency_groups: metadata.dependency_groups,
285 dynamic: metadata.dynamic,
286 }
287 }
288}
289
290#[derive(Debug, Clone, PartialEq, Eq)]
337pub struct FlatRequiresDist(Box<[Requirement]>);
338
339impl FlatRequiresDist {
340 pub fn from_requirements(requirements: Box<[Requirement]>, name: &PackageName) -> Self {
342 if requirements.iter().all(|req| req.name != *name) {
344 return Self(requirements);
345 }
346
347 let top_level_extras: Vec<_> = requirements
349 .iter()
350 .map(|req| req.marker.top_level_extra_name())
351 .collect();
352
353 let mut flattened = requirements.to_vec();
355 let mut seen = FxHashSet::<(ExtraName, MarkerTree)>::default();
356 let mut queue: VecDeque<_> = flattened
357 .iter()
358 .filter(|req| req.name == *name)
359 .flat_map(|req| req.extras.iter().cloned().map(|extra| (extra, req.marker)))
360 .collect();
361 while let Some((extra, marker)) = queue.pop_front() {
362 if !seen.insert((extra.clone(), marker)) {
363 continue;
364 }
365
366 for (requirement, top_level_extra) in requirements.iter().zip(top_level_extras.iter()) {
368 if top_level_extra.as_deref() != Some(&extra) {
369 continue;
370 }
371 let requirement = {
372 let mut marker = marker;
373 marker.and(requirement.marker);
374 Requirement {
375 name: requirement.name.clone(),
376 extras: requirement.extras.clone(),
377 groups: requirement.groups.clone(),
378 source: requirement.source.clone(),
379 origin: requirement.origin.clone(),
380 marker: marker.simplify_extras(slice::from_ref(&extra)),
381 }
382 };
383 if requirement.name == *name {
384 queue.extend(
386 requirement
387 .extras
388 .iter()
389 .cloned()
390 .map(|extra| (extra, requirement.marker)),
391 );
392 } else {
393 flattened.push(requirement);
395 }
396 }
397 }
398
399 flattened.retain(|req| req.name != *name);
401
402 for req in &requirements {
406 if req.name == *name {
407 if !req.source.is_empty() {
408 flattened.push(Requirement {
409 name: req.name.clone(),
410 extras: Box::new([]),
411 groups: req.groups.clone(),
412 source: req.source.clone(),
413 origin: req.origin.clone(),
414 marker: req.marker,
415 });
416 }
417 }
418 }
419
420 Self(flattened.into_boxed_slice())
421 }
422
423 pub fn into_inner(self) -> Box<[Requirement]> {
425 self.0
426 }
427}
428
429impl IntoIterator for FlatRequiresDist {
430 type Item = Requirement;
431 type IntoIter = <Box<[Requirement]> as IntoIterator>::IntoIter;
432
433 fn into_iter(self) -> Self::IntoIter {
434 Box::into_iter(self.0)
435 }
436}
437
438#[cfg(test)]
439mod test {
440 use std::path::Path;
441 use std::str::FromStr;
442
443 use anyhow::Context;
444 use indoc::indoc;
445 use insta::assert_snapshot;
446
447 use uv_auth::CredentialsCache;
448 use uv_configuration::NoSources;
449 use uv_distribution_types::IndexLocations;
450 use uv_normalize::PackageName;
451 use uv_pep508::Requirement;
452 use uv_workspace::pyproject::PyProjectToml;
453 use uv_workspace::{DiscoveryOptions, ProjectWorkspace, WorkspaceCache};
454
455 use crate::RequiresDist;
456 use crate::metadata::requires_dist::FlatRequiresDist;
457
458 async fn requires_dist_from_pyproject_toml(contents: &str) -> anyhow::Result<RequiresDist> {
459 let pyproject_toml = PyProjectToml::from_string(contents.to_string())?;
460 let path = Path::new("pyproject.toml");
461 let project_workspace = ProjectWorkspace::from_project(
462 path,
463 pyproject_toml
464 .project
465 .as_ref()
466 .context("metadata field project not found")?,
467 &pyproject_toml,
468 &DiscoveryOptions {
469 stop_discovery_at: Some(path.to_path_buf()),
470 ..DiscoveryOptions::default()
471 },
472 &WorkspaceCache::default(),
473 )
474 .await?;
475 let pyproject_toml = uv_pypi_types::PyProjectToml::from_toml(contents)?;
476 let requires_dist = uv_pypi_types::RequiresDist::from_pyproject_toml(pyproject_toml)?;
477 Ok(RequiresDist::from_project_workspace(
478 requires_dist,
479 &project_workspace,
480 None,
481 &IndexLocations::default(),
482 &NoSources::default(),
483 &CredentialsCache::new(),
484 )?)
485 }
486
487 async fn format_err(input: &str) -> String {
488 use std::fmt::Write;
489
490 let err = requires_dist_from_pyproject_toml(input).await.unwrap_err();
491 let mut causes = err.chain();
492 let mut message = String::new();
493 let _ = writeln!(message, "error: {}", causes.next().unwrap());
494 for err in causes {
495 let _ = writeln!(message, " Caused by: {err}");
496 }
497 message
498 }
499
500 #[tokio::test]
501 async fn wrong_type() {
502 let input = indoc! {r#"
503 [project]
504 name = "foo"
505 version = "0.0.0"
506 dependencies = [
507 "tqdm",
508 ]
509 [tool.uv.sources]
510 tqdm = true
511 "#};
512
513 assert_snapshot!(format_err(input).await, @"
514 error: TOML parse error at line 8, column 8
515 |
516 8 | tqdm = true
517 | ^^^^
518 invalid type: boolean `true`, expected a single source (as a map) or list of sources
519 ");
520 }
521
522 #[tokio::test]
523 async fn too_many_git_specs() {
524 let input = indoc! {r#"
525 [project]
526 name = "foo"
527 version = "0.0.0"
528 dependencies = [
529 "tqdm",
530 ]
531 [tool.uv.sources]
532 tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
533 "#};
534
535 assert_snapshot!(format_err(input).await, @r#"
536 error: TOML parse error at line 8, column 8
537 |
538 8 | tqdm = { git = "https://github.com/tqdm/tqdm", rev = "baaaaaab", tag = "v1.0.0" }
539 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
540 expected at most one of `rev`, `tag`, or `branch`
541 "#);
542 }
543
544 #[tokio::test]
545 async fn too_many_git_typo() {
546 let input = indoc! {r#"
547 [project]
548 name = "foo"
549 version = "0.0.0"
550 dependencies = [
551 "tqdm",
552 ]
553 [tool.uv.sources]
554 tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
555 "#};
556
557 assert_snapshot!(format_err(input).await, @r#"
558 error: TOML parse error at line 8, column 48
559 |
560 8 | tqdm = { git = "https://github.com/tqdm/tqdm", ref = "baaaaaab" }
561 | ^^^
562 unknown field `ref`, expected one of `git`, `subdirectory`, `rev`, `tag`, `branch`, `lfs`, `url`, `path`, `editable`, `package`, `index`, `workspace`, `marker`, `extra`, `group`
563 "#);
564 }
565
566 #[tokio::test]
567 async fn extra_and_group() {
568 let input = indoc! {r#"
569 [project]
570 name = "foo"
571 version = "0.0.0"
572 dependencies = []
573
574 [tool.uv.sources]
575 tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
576 "#};
577
578 assert_snapshot!(format_err(input).await, @r#"
579 error: TOML parse error at line 7, column 8
580 |
581 7 | tqdm = { git = "https://github.com/tqdm/tqdm", extra = "torch", group = "dev" }
582 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
583 cannot specify both `extra` and `group`
584 "#);
585 }
586
587 #[tokio::test]
588 async fn you_cant_mix_those() {
589 let input = indoc! {r#"
590 [project]
591 name = "foo"
592 version = "0.0.0"
593 dependencies = [
594 "tqdm",
595 ]
596 [tool.uv.sources]
597 tqdm = { path = "tqdm", index = "torch" }
598 "#};
599
600 assert_snapshot!(format_err(input).await, @r#"
601 error: TOML parse error at line 8, column 8
602 |
603 8 | tqdm = { path = "tqdm", index = "torch" }
604 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
605 cannot specify both `path` and `index`
606 "#);
607 }
608
609 #[tokio::test]
610 async fn missing_constraint() {
611 let input = indoc! {r#"
612 [project]
613 name = "foo"
614 version = "0.0.0"
615 dependencies = [
616 "tqdm",
617 ]
618 "#};
619 assert!(requires_dist_from_pyproject_toml(input).await.is_ok());
620 }
621
622 #[tokio::test]
623 async fn invalid_syntax() {
624 let input = indoc! {r#"
625 [project]
626 name = "foo"
627 version = "0.0.0"
628 dependencies = [
629 "tqdm ==4.66.0",
630 ]
631 [tool.uv.sources]
632 tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
633 "#};
634
635 assert_snapshot!(format_err(input).await, @r#"
636 error: TOML parse error at line 8, column 16
637 |
638 8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
639 | ^
640 missing opening quote, expected `"`
641 "#);
642 }
643
644 #[tokio::test]
645 async fn invalid_url() {
646 let input = indoc! {r#"
647 [project]
648 name = "foo"
649 version = "0.0.0"
650 dependencies = [
651 "tqdm ==4.66.0",
652 ]
653 [tool.uv.sources]
654 tqdm = { url = "§invalid#+#*Ä" }
655 "#};
656
657 assert_snapshot!(format_err(input).await, @r#"
658 error: TOML parse error at line 8, column 16
659 |
660 8 | tqdm = { url = "§invalid#+#*Ä" }
661 | ^^^^^^^^^^^^^^^^^
662 relative URL without a base: "§invalid#+#*Ä"
663 "#);
664 }
665
666 #[tokio::test]
667 async fn workspace_and_url_spec() {
668 let input = indoc! {r#"
669 [project]
670 name = "foo"
671 version = "0.0.0"
672 dependencies = [
673 "tqdm @ git+https://github.com/tqdm/tqdm",
674 ]
675 [tool.uv.sources]
676 tqdm = { workspace = true }
677 "#};
678
679 assert_snapshot!(format_err(input).await, @"
680 error: Failed to parse entry: `tqdm`
681 Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
682 ");
683 }
684
685 #[tokio::test]
686 async fn missing_workspace_package() {
687 let input = indoc! {r#"
688 [project]
689 name = "foo"
690 version = "0.0.0"
691 dependencies = [
692 "tqdm ==4.66.0",
693 ]
694 [tool.uv.sources]
695 tqdm = { workspace = true }
696 "#};
697
698 assert_snapshot!(format_err(input).await, @"
699 error: Failed to parse entry: `tqdm`
700 Caused by: `tqdm` references a workspace in `tool.uv.sources` (e.g., `tqdm = { workspace = true }`), but is not a workspace member
701 ");
702 }
703
704 #[tokio::test]
705 async fn cant_be_dynamic() {
706 let input = indoc! {r#"
707 [project]
708 name = "foo"
709 version = "0.0.0"
710 dynamic = [
711 "dependencies"
712 ]
713 [tool.uv.sources]
714 tqdm = { workspace = true }
715 "#};
716
717 assert_snapshot!(format_err(input).await, @"error: The following field was marked as dynamic: dependencies");
718 }
719
720 #[tokio::test]
721 async fn missing_project_section() {
722 let input = indoc! {"
723 [tool.uv.sources]
724 tqdm = { workspace = true }
725 "};
726
727 assert_snapshot!(format_err(input).await, @"error: metadata field project not found");
728 }
729
730 #[test]
731 fn test_flat_requires_dist_noop() {
732 let name = PackageName::from_str("pkg").unwrap();
733 let requirements = [
734 Requirement::from_str("requests>=2.0.0").unwrap().into(),
735 Requirement::from_str("pytest; extra == 'test'")
736 .unwrap()
737 .into(),
738 Requirement::from_str("black; extra == 'dev'")
739 .unwrap()
740 .into(),
741 ];
742
743 let expected = FlatRequiresDist(
744 [
745 Requirement::from_str("requests>=2.0.0").unwrap().into(),
746 Requirement::from_str("pytest; extra == 'test'")
747 .unwrap()
748 .into(),
749 Requirement::from_str("black; extra == 'dev'")
750 .unwrap()
751 .into(),
752 ]
753 .into(),
754 );
755
756 let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
757
758 assert_eq!(actual, expected);
759 }
760
761 #[test]
762 fn test_flat_requires_dist_basic() {
763 let name = PackageName::from_str("pkg").unwrap();
764 let requirements = [
765 Requirement::from_str("requests>=2.0.0").unwrap().into(),
766 Requirement::from_str("pytest; extra == 'test'")
767 .unwrap()
768 .into(),
769 Requirement::from_str("pkg[dev]; extra == 'test'")
770 .unwrap()
771 .into(),
772 Requirement::from_str("black; extra == 'dev'")
773 .unwrap()
774 .into(),
775 ];
776
777 let expected = FlatRequiresDist(
778 [
779 Requirement::from_str("requests>=2.0.0").unwrap().into(),
780 Requirement::from_str("pytest; extra == 'test'")
781 .unwrap()
782 .into(),
783 Requirement::from_str("black; extra == 'dev'")
784 .unwrap()
785 .into(),
786 Requirement::from_str("black; extra == 'test'")
787 .unwrap()
788 .into(),
789 ]
790 .into(),
791 );
792
793 let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
794
795 assert_eq!(actual, expected);
796 }
797
798 #[test]
799 fn test_flat_requires_dist_with_markers() {
800 let name = PackageName::from_str("pkg").unwrap();
801 let requirements = vec![
802 Requirement::from_str("requests>=2.0.0").unwrap().into(),
803 Requirement::from_str("pytest; extra == 'test'")
804 .unwrap()
805 .into(),
806 Requirement::from_str("pkg[dev]; extra == 'test' and sys_platform == 'win32'")
807 .unwrap()
808 .into(),
809 Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
810 .unwrap()
811 .into(),
812 ];
813
814 let expected = FlatRequiresDist(
815 [
816 Requirement::from_str("requests>=2.0.0").unwrap().into(),
817 Requirement::from_str("pytest; extra == 'test'")
818 .unwrap()
819 .into(),
820 Requirement::from_str("black; extra == 'dev' and sys_platform == 'win32'")
821 .unwrap()
822 .into(),
823 Requirement::from_str("black; extra == 'test' and sys_platform == 'win32'")
824 .unwrap()
825 .into(),
826 ]
827 .into(),
828 );
829
830 let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
831
832 assert_eq!(actual, expected);
833 }
834
835 #[test]
836 fn test_flat_requires_dist_self_constraint() {
837 let name = PackageName::from_str("pkg").unwrap();
838 let requirements = [
839 Requirement::from_str("requests>=2.0.0").unwrap().into(),
840 Requirement::from_str("pytest; extra == 'test'")
841 .unwrap()
842 .into(),
843 Requirement::from_str("black; extra == 'dev'")
844 .unwrap()
845 .into(),
846 Requirement::from_str("pkg[async]==1.0.0").unwrap().into(),
847 ];
848
849 let expected = FlatRequiresDist(
850 [
851 Requirement::from_str("requests>=2.0.0").unwrap().into(),
852 Requirement::from_str("pytest; extra == 'test'")
853 .unwrap()
854 .into(),
855 Requirement::from_str("black; extra == 'dev'")
856 .unwrap()
857 .into(),
858 Requirement::from_str("pkg==1.0.0").unwrap().into(),
859 ]
860 .into(),
861 );
862
863 let actual = FlatRequiresDist::from_requirements(requirements.into(), &name);
864
865 assert_eq!(actual, expected);
866 }
867}