uv_resolver/lock/
mod.rs

1use std::borrow::Cow;
2use std::collections::{BTreeMap, BTreeSet, VecDeque};
3use std::error::Error;
4use std::fmt::{Debug, Display, Formatter};
5use std::io;
6use std::path::{Path, PathBuf};
7use std::str::FromStr;
8use std::sync::{Arc, LazyLock};
9
10use itertools::Itertools;
11use jiff::Timestamp;
12use owo_colors::OwoColorize;
13use petgraph::graph::NodeIndex;
14use petgraph::visit::EdgeRef;
15use rustc_hash::{FxHashMap, FxHashSet};
16use serde::Serializer;
17use toml_edit::{Array, ArrayOfTables, InlineTable, Item, Table, Value, value};
18use tracing::debug;
19use url::Url;
20
21use uv_cache_key::RepositoryUrl;
22use uv_configuration::{BuildOptions, Constraints, InstallTarget};
23use uv_distribution::{DistributionDatabase, FlatRequiresDist};
24use uv_distribution_filename::{
25    BuildTag, DistExtension, ExtensionError, SourceDistExtension, WheelFilename,
26};
27use uv_distribution_types::{
28    BuiltDist, DependencyMetadata, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist,
29    Dist, DistributionMetadata, FileLocation, GitSourceDist, IndexLocations, IndexMetadata,
30    IndexUrl, Name, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel,
31    RegistrySourceDist, RemoteSource, Requirement, RequirementSource, RequiresPython, ResolvedDist,
32    SimplifiedMarkerTree, StaticMetadata, ToUrlError, UrlString,
33};
34use uv_fs::{PortablePath, PortablePathBuf, relative_to};
35use uv_git::{RepositoryReference, ResolvedRepositoryReference};
36use uv_git_types::{GitLfs, GitOid, GitReference, GitUrl, GitUrlParseError};
37use uv_normalize::{ExtraName, GroupName, PackageName};
38use uv_pep440::Version;
39use uv_pep508::{MarkerEnvironment, MarkerTree, VerbatimUrl, VerbatimUrlError, split_scheme};
40use uv_platform_tags::{
41    AbiTag, IncompatibleTag, LanguageTag, PlatformTag, TagCompatibility, TagPriority, Tags,
42};
43use uv_pypi_types::{
44    ConflictKind, Conflicts, HashAlgorithm, HashDigest, HashDigests, Hashes, ParsedArchiveUrl,
45    ParsedGitUrl, PyProjectToml,
46};
47use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
48use uv_small_str::SmallString;
49use uv_types::{BuildContext, HashStrategy};
50use uv_workspace::{Editability, WorkspaceMember};
51
52use crate::exclude_newer::ExcludeNewerSpan;
53use crate::fork_strategy::ForkStrategy;
54pub(crate) use crate::lock::export::PylockTomlPackage;
55pub use crate::lock::export::RequirementsTxtExport;
56pub use crate::lock::export::{PylockToml, PylockTomlErrorKind, cyclonedx_json};
57pub use crate::lock::installable::Installable;
58pub use crate::lock::map::PackageMap;
59pub use crate::lock::tree::TreeDisplay;
60use crate::resolution::{AnnotatedDist, ResolutionGraphNode};
61use crate::universal_marker::{ConflictMarker, UniversalMarker};
62use crate::{
63    ExcludeNewer, ExcludeNewerPackage, ExcludeNewerValue, InMemoryIndex, MetadataResponse,
64    PrereleaseMode, ResolutionMode, ResolverOutput,
65};
66
67mod export;
68mod installable;
69mod map;
70mod tree;
71
72/// The current version of the lockfile format.
73pub const VERSION: u32 = 1;
74
75/// The current revision of the lockfile format.
76const REVISION: u32 = 3;
77
78static LINUX_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
79    let pep508 = MarkerTree::from_str("os_name == 'posix' and sys_platform == 'linux'").unwrap();
80    UniversalMarker::new(pep508, ConflictMarker::TRUE)
81});
82static WINDOWS_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
83    let pep508 = MarkerTree::from_str("os_name == 'nt' and sys_platform == 'win32'").unwrap();
84    UniversalMarker::new(pep508, ConflictMarker::TRUE)
85});
86static MAC_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
87    let pep508 = MarkerTree::from_str("os_name == 'posix' and sys_platform == 'darwin'").unwrap();
88    UniversalMarker::new(pep508, ConflictMarker::TRUE)
89});
90static ANDROID_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
91    let pep508 = MarkerTree::from_str("sys_platform == 'android'").unwrap();
92    UniversalMarker::new(pep508, ConflictMarker::TRUE)
93});
94static ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
95    let pep508 =
96        MarkerTree::from_str("platform_machine == 'aarch64' or platform_machine == 'arm64' or platform_machine == 'ARM64'")
97            .unwrap();
98    UniversalMarker::new(pep508, ConflictMarker::TRUE)
99});
100static X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
101    let pep508 =
102        MarkerTree::from_str("platform_machine == 'x86_64' or platform_machine == 'amd64' or platform_machine == 'AMD64'")
103            .unwrap();
104    UniversalMarker::new(pep508, ConflictMarker::TRUE)
105});
106static X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
107    let pep508 = MarkerTree::from_str(
108        "platform_machine == 'i686' or platform_machine == 'i386' or platform_machine == 'win32' or platform_machine == 'x86'",
109    )
110    .unwrap();
111    UniversalMarker::new(pep508, ConflictMarker::TRUE)
112});
113static PPC64LE_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
114    let pep508 = MarkerTree::from_str("platform_machine == 'ppc64le'").unwrap();
115    UniversalMarker::new(pep508, ConflictMarker::TRUE)
116});
117static PPC64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
118    let pep508 = MarkerTree::from_str("platform_machine == 'ppc64'").unwrap();
119    UniversalMarker::new(pep508, ConflictMarker::TRUE)
120});
121static S390X_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
122    let pep508 = MarkerTree::from_str("platform_machine == 's390x'").unwrap();
123    UniversalMarker::new(pep508, ConflictMarker::TRUE)
124});
125static RISCV64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
126    let pep508 = MarkerTree::from_str("platform_machine == 'riscv64'").unwrap();
127    UniversalMarker::new(pep508, ConflictMarker::TRUE)
128});
129static LOONGARCH64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
130    let pep508 = MarkerTree::from_str("platform_machine == 'loongarch64'").unwrap();
131    UniversalMarker::new(pep508, ConflictMarker::TRUE)
132});
133static ARMV7L_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
134    let pep508 =
135        MarkerTree::from_str("platform_machine == 'armv7l' or platform_machine == 'armv8l'")
136            .unwrap();
137    UniversalMarker::new(pep508, ConflictMarker::TRUE)
138});
139static ARMV6L_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
140    let pep508 = MarkerTree::from_str("platform_machine == 'armv6l'").unwrap();
141    UniversalMarker::new(pep508, ConflictMarker::TRUE)
142});
143static LINUX_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
144    let mut marker = *LINUX_MARKERS;
145    marker.and(*ARM_MARKERS);
146    marker
147});
148static LINUX_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
149    let mut marker = *LINUX_MARKERS;
150    marker.and(*X86_64_MARKERS);
151    marker
152});
153static LINUX_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
154    let mut marker = *LINUX_MARKERS;
155    marker.and(*X86_MARKERS);
156    marker
157});
158static LINUX_PPC64LE_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
159    let mut marker = *LINUX_MARKERS;
160    marker.and(*PPC64LE_MARKERS);
161    marker
162});
163static LINUX_PPC64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
164    let mut marker = *LINUX_MARKERS;
165    marker.and(*PPC64_MARKERS);
166    marker
167});
168static LINUX_S390X_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
169    let mut marker = *LINUX_MARKERS;
170    marker.and(*S390X_MARKERS);
171    marker
172});
173static LINUX_RISCV64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
174    let mut marker = *LINUX_MARKERS;
175    marker.and(*RISCV64_MARKERS);
176    marker
177});
178static LINUX_LOONGARCH64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
179    let mut marker = *LINUX_MARKERS;
180    marker.and(*LOONGARCH64_MARKERS);
181    marker
182});
183static LINUX_ARMV7L_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
184    let mut marker = *LINUX_MARKERS;
185    marker.and(*ARMV7L_MARKERS);
186    marker
187});
188static LINUX_ARMV6L_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
189    let mut marker = *LINUX_MARKERS;
190    marker.and(*ARMV6L_MARKERS);
191    marker
192});
193static WINDOWS_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
194    let mut marker = *WINDOWS_MARKERS;
195    marker.and(*ARM_MARKERS);
196    marker
197});
198static WINDOWS_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
199    let mut marker = *WINDOWS_MARKERS;
200    marker.and(*X86_64_MARKERS);
201    marker
202});
203static WINDOWS_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
204    let mut marker = *WINDOWS_MARKERS;
205    marker.and(*X86_MARKERS);
206    marker
207});
208static MAC_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
209    let mut marker = *MAC_MARKERS;
210    marker.and(*ARM_MARKERS);
211    marker
212});
213static MAC_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
214    let mut marker = *MAC_MARKERS;
215    marker.and(*X86_64_MARKERS);
216    marker
217});
218static MAC_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
219    let mut marker = *MAC_MARKERS;
220    marker.and(*X86_MARKERS);
221    marker
222});
223static ANDROID_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
224    let mut marker = *ANDROID_MARKERS;
225    marker.and(*ARM_MARKERS);
226    marker
227});
228static ANDROID_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
229    let mut marker = *ANDROID_MARKERS;
230    marker.and(*X86_64_MARKERS);
231    marker
232});
233static ANDROID_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
234    let mut marker = *ANDROID_MARKERS;
235    marker.and(*X86_MARKERS);
236    marker
237});
238
239/// A distribution with its associated hash.
240///
241/// This pairs a [`Dist`] with the [`HashDigests`] for the specific wheel or
242/// sdist that would be installed.
243pub(crate) struct HashedDist {
244    pub(crate) dist: Dist,
245    pub(crate) hashes: HashDigests,
246}
247
248#[derive(Clone, Debug, PartialEq, Eq, serde::Deserialize)]
249#[serde(try_from = "LockWire")]
250pub struct Lock {
251    /// The (major) version of the lockfile format.
252    ///
253    /// Changes to the major version indicate backwards- and forwards-incompatible changes to the
254    /// lockfile format. A given uv version only supports a single major version of the lockfile
255    /// format.
256    ///
257    /// In other words, a version of uv that supports version 2 of the lockfile format will not be
258    /// able to read lockfiles generated under version 1 or 3.
259    version: u32,
260    /// The revision of the lockfile format.
261    ///
262    /// Changes to the revision indicate backwards-compatible changes to the lockfile format.
263    /// In other words, versions of uv that only support revision 1 _will_ be able to read lockfiles
264    /// with a revision greater than 1 (though they may ignore newer fields).
265    revision: u32,
266    /// If this lockfile was built from a forking resolution with non-identical forks, store the
267    /// forks in the lockfile so we can recreate them in subsequent resolutions.
268    fork_markers: Vec<UniversalMarker>,
269    /// The conflicting groups/extras specified by the user.
270    conflicts: Conflicts,
271    /// The list of supported environments specified by the user.
272    supported_environments: Vec<MarkerTree>,
273    /// The list of required platforms specified by the user.
274    required_environments: Vec<MarkerTree>,
275    /// The range of supported Python versions.
276    requires_python: RequiresPython,
277    /// We discard the lockfile if these options don't match.
278    options: ResolverOptions,
279    /// The actual locked version and their metadata.
280    packages: Vec<Package>,
281    /// A map from package ID to index in `packages`.
282    ///
283    /// This can be used to quickly lookup the full package for any ID
284    /// in this lock. For example, the dependencies for each package are
285    /// listed as package IDs. This map can be used to find the full
286    /// package for each such dependency.
287    ///
288    /// It is guaranteed that every package in this lock has an entry in
289    /// this map, and that every dependency for every package has an ID
290    /// that exists in this map. That is, there are no dependencies that don't
291    /// have a corresponding locked package entry in the same lockfile.
292    by_id: FxHashMap<PackageId, usize>,
293    /// The input requirements to the resolution.
294    manifest: ResolverManifest,
295}
296
297impl Lock {
298    /// Initialize a [`Lock`] from a [`ResolverOutput`].
299    pub fn from_resolution(resolution: &ResolverOutput, root: &Path) -> Result<Self, LockError> {
300        let mut packages = BTreeMap::new();
301        let requires_python = resolution.requires_python.clone();
302
303        // Determine the set of packages included at multiple versions.
304        let mut seen = FxHashSet::default();
305        let mut duplicates = FxHashSet::default();
306        for node_index in resolution.graph.node_indices() {
307            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
308                continue;
309            };
310            if !dist.is_base() {
311                continue;
312            }
313            if !seen.insert(dist.name()) {
314                duplicates.insert(dist.name());
315            }
316        }
317
318        // Lock all base packages.
319        for node_index in resolution.graph.node_indices() {
320            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
321                continue;
322            };
323            if !dist.is_base() {
324                continue;
325            }
326
327            // If there are multiple distributions for the same package, include the markers of all
328            // forks that included the current distribution.
329            let fork_markers = if duplicates.contains(dist.name()) {
330                resolution
331                    .fork_markers
332                    .iter()
333                    .filter(|fork_markers| !fork_markers.is_disjoint(dist.marker))
334                    .copied()
335                    .collect()
336            } else {
337                vec![]
338            };
339
340            let mut package = Package::from_annotated_dist(dist, fork_markers, root)?;
341            Self::remove_unreachable_wheels(resolution, &requires_python, node_index, &mut package);
342
343            // Add all dependencies
344            for edge in resolution.graph.edges(node_index) {
345                let ResolutionGraphNode::Dist(dependency_dist) = &resolution.graph[edge.target()]
346                else {
347                    continue;
348                };
349                let marker = *edge.weight();
350                package.add_dependency(&requires_python, dependency_dist, marker, root)?;
351            }
352
353            let id = package.id.clone();
354            if let Some(locked_dist) = packages.insert(id, package) {
355                return Err(LockErrorKind::DuplicatePackage {
356                    id: locked_dist.id.clone(),
357                }
358                .into());
359            }
360        }
361
362        // Lock all extras and development dependencies.
363        for node_index in resolution.graph.node_indices() {
364            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
365                continue;
366            };
367            if let Some(extra) = dist.extra.as_ref() {
368                let id = PackageId::from_annotated_dist(dist, root)?;
369                let Some(package) = packages.get_mut(&id) else {
370                    return Err(LockErrorKind::MissingExtraBase {
371                        id,
372                        extra: extra.clone(),
373                    }
374                    .into());
375                };
376                for edge in resolution.graph.edges(node_index) {
377                    let ResolutionGraphNode::Dist(dependency_dist) =
378                        &resolution.graph[edge.target()]
379                    else {
380                        continue;
381                    };
382                    let marker = *edge.weight();
383                    package.add_optional_dependency(
384                        &requires_python,
385                        extra.clone(),
386                        dependency_dist,
387                        marker,
388                        root,
389                    )?;
390                }
391            }
392            if let Some(group) = dist.group.as_ref() {
393                let id = PackageId::from_annotated_dist(dist, root)?;
394                let Some(package) = packages.get_mut(&id) else {
395                    return Err(LockErrorKind::MissingDevBase {
396                        id,
397                        group: group.clone(),
398                    }
399                    .into());
400                };
401                for edge in resolution.graph.edges(node_index) {
402                    let ResolutionGraphNode::Dist(dependency_dist) =
403                        &resolution.graph[edge.target()]
404                    else {
405                        continue;
406                    };
407                    let marker = *edge.weight();
408                    package.add_group_dependency(
409                        &requires_python,
410                        group.clone(),
411                        dependency_dist,
412                        marker,
413                        root,
414                    )?;
415                }
416            }
417        }
418
419        let packages = packages.into_values().collect();
420
421        let options = ResolverOptions {
422            resolution_mode: resolution.options.resolution_mode,
423            prerelease_mode: resolution.options.prerelease_mode,
424            fork_strategy: resolution.options.fork_strategy,
425            exclude_newer: resolution.options.exclude_newer.clone().into(),
426        };
427        let lock = Self::new(
428            VERSION,
429            REVISION,
430            packages,
431            requires_python,
432            options,
433            ResolverManifest::default(),
434            Conflicts::empty(),
435            vec![],
436            vec![],
437            resolution.fork_markers.clone(),
438        )?;
439        Ok(lock)
440    }
441
442    /// Remove wheels that can't be selected for installation due to environment markers.
443    ///
444    /// For example, a package included under `sys_platform == 'win32'` does not need Linux
445    /// wheels.
446    fn remove_unreachable_wheels(
447        graph: &ResolverOutput,
448        requires_python: &RequiresPython,
449        node_index: NodeIndex,
450        locked_dist: &mut Package,
451    ) {
452        // Remove wheels that don't match `requires-python` and can't be selected for installation.
453        locked_dist
454            .wheels
455            .retain(|wheel| requires_python.matches_wheel_tag(&wheel.filename));
456
457        // Filter by platform tags.
458        locked_dist.wheels.retain(|wheel| {
459            // Naively, we'd check whether `platform_system == 'Linux'` is disjoint, or
460            // `os_name == 'posix'` is disjoint, or `sys_platform == 'linux'` is disjoint (each on its
461            // own sufficient to exclude linux wheels), but due to
462            // `(A ∩ (B ∩ C) = ∅) => ((A ∩ B = ∅) or (A ∩ C = ∅))`
463            // a single disjointness check with the intersection is sufficient, so we have one
464            // constant per platform.
465            let platform_tags = wheel.filename.platform_tags();
466
467            if platform_tags.iter().all(PlatformTag::is_any) {
468                return true;
469            }
470
471            if platform_tags.iter().all(PlatformTag::is_linux) {
472                if platform_tags.iter().all(PlatformTag::is_arm) {
473                    if graph.graph[node_index]
474                        .marker()
475                        .is_disjoint(*LINUX_ARM_MARKERS)
476                    {
477                        return false;
478                    }
479                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
480                    if graph.graph[node_index]
481                        .marker()
482                        .is_disjoint(*LINUX_X86_64_MARKERS)
483                    {
484                        return false;
485                    }
486                } else if platform_tags.iter().all(PlatformTag::is_x86) {
487                    if graph.graph[node_index]
488                        .marker()
489                        .is_disjoint(*LINUX_X86_MARKERS)
490                    {
491                        return false;
492                    }
493                } else if platform_tags.iter().all(PlatformTag::is_ppc64le) {
494                    if graph.graph[node_index]
495                        .marker()
496                        .is_disjoint(*LINUX_PPC64LE_MARKERS)
497                    {
498                        return false;
499                    }
500                } else if platform_tags.iter().all(PlatformTag::is_ppc64) {
501                    if graph.graph[node_index]
502                        .marker()
503                        .is_disjoint(*LINUX_PPC64_MARKERS)
504                    {
505                        return false;
506                    }
507                } else if platform_tags.iter().all(PlatformTag::is_s390x) {
508                    if graph.graph[node_index]
509                        .marker()
510                        .is_disjoint(*LINUX_S390X_MARKERS)
511                    {
512                        return false;
513                    }
514                } else if platform_tags.iter().all(PlatformTag::is_riscv64) {
515                    if graph.graph[node_index]
516                        .marker()
517                        .is_disjoint(*LINUX_RISCV64_MARKERS)
518                    {
519                        return false;
520                    }
521                } else if platform_tags.iter().all(PlatformTag::is_loongarch64) {
522                    if graph.graph[node_index]
523                        .marker()
524                        .is_disjoint(*LINUX_LOONGARCH64_MARKERS)
525                    {
526                        return false;
527                    }
528                } else if platform_tags.iter().all(PlatformTag::is_armv7l) {
529                    if graph.graph[node_index]
530                        .marker()
531                        .is_disjoint(*LINUX_ARMV7L_MARKERS)
532                    {
533                        return false;
534                    }
535                } else if platform_tags.iter().all(PlatformTag::is_armv6l) {
536                    if graph.graph[node_index]
537                        .marker()
538                        .is_disjoint(*LINUX_ARMV6L_MARKERS)
539                    {
540                        return false;
541                    }
542                } else if graph.graph[node_index].marker().is_disjoint(*LINUX_MARKERS) {
543                    return false;
544                }
545            }
546
547            if platform_tags.iter().all(PlatformTag::is_windows) {
548                if platform_tags.iter().all(PlatformTag::is_arm) {
549                    if graph.graph[node_index]
550                        .marker()
551                        .is_disjoint(*WINDOWS_ARM_MARKERS)
552                    {
553                        return false;
554                    }
555                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
556                    if graph.graph[node_index]
557                        .marker()
558                        .is_disjoint(*WINDOWS_X86_64_MARKERS)
559                    {
560                        return false;
561                    }
562                } else if platform_tags.iter().all(PlatformTag::is_x86) {
563                    if graph.graph[node_index]
564                        .marker()
565                        .is_disjoint(*WINDOWS_X86_MARKERS)
566                    {
567                        return false;
568                    }
569                } else if graph.graph[node_index]
570                    .marker()
571                    .is_disjoint(*WINDOWS_MARKERS)
572                {
573                    return false;
574                }
575            }
576
577            if platform_tags.iter().all(PlatformTag::is_macos) {
578                if platform_tags.iter().all(PlatformTag::is_arm) {
579                    if graph.graph[node_index]
580                        .marker()
581                        .is_disjoint(*MAC_ARM_MARKERS)
582                    {
583                        return false;
584                    }
585                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
586                    if graph.graph[node_index]
587                        .marker()
588                        .is_disjoint(*MAC_X86_64_MARKERS)
589                    {
590                        return false;
591                    }
592                } else if platform_tags.iter().all(PlatformTag::is_x86) {
593                    if graph.graph[node_index]
594                        .marker()
595                        .is_disjoint(*MAC_X86_MARKERS)
596                    {
597                        return false;
598                    }
599                } else if graph.graph[node_index].marker().is_disjoint(*MAC_MARKERS) {
600                    return false;
601                }
602            }
603
604            if platform_tags.iter().all(PlatformTag::is_android) {
605                if platform_tags.iter().all(PlatformTag::is_arm) {
606                    if graph.graph[node_index]
607                        .marker()
608                        .is_disjoint(*ANDROID_ARM_MARKERS)
609                    {
610                        return false;
611                    }
612                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
613                    if graph.graph[node_index]
614                        .marker()
615                        .is_disjoint(*ANDROID_X86_64_MARKERS)
616                    {
617                        return false;
618                    }
619                } else if platform_tags.iter().all(PlatformTag::is_x86) {
620                    if graph.graph[node_index]
621                        .marker()
622                        .is_disjoint(*ANDROID_X86_MARKERS)
623                    {
624                        return false;
625                    }
626                } else if graph.graph[node_index]
627                    .marker()
628                    .is_disjoint(*ANDROID_MARKERS)
629                {
630                    return false;
631                }
632            }
633
634            if platform_tags.iter().all(PlatformTag::is_arm) {
635                if graph.graph[node_index].marker().is_disjoint(*ARM_MARKERS) {
636                    return false;
637                }
638            }
639
640            if platform_tags.iter().all(PlatformTag::is_x86_64) {
641                if graph.graph[node_index]
642                    .marker()
643                    .is_disjoint(*X86_64_MARKERS)
644                {
645                    return false;
646                }
647            }
648
649            if platform_tags.iter().all(PlatformTag::is_x86) {
650                if graph.graph[node_index].marker().is_disjoint(*X86_MARKERS) {
651                    return false;
652                }
653            }
654
655            if platform_tags.iter().all(PlatformTag::is_ppc64le) {
656                if graph.graph[node_index]
657                    .marker()
658                    .is_disjoint(*PPC64LE_MARKERS)
659                {
660                    return false;
661                }
662            }
663
664            if platform_tags.iter().all(PlatformTag::is_ppc64) {
665                if graph.graph[node_index].marker().is_disjoint(*PPC64_MARKERS) {
666                    return false;
667                }
668            }
669
670            if platform_tags.iter().all(PlatformTag::is_s390x) {
671                if graph.graph[node_index].marker().is_disjoint(*S390X_MARKERS) {
672                    return false;
673                }
674            }
675
676            if platform_tags.iter().all(PlatformTag::is_riscv64) {
677                if graph.graph[node_index]
678                    .marker()
679                    .is_disjoint(*RISCV64_MARKERS)
680                {
681                    return false;
682                }
683            }
684
685            if platform_tags.iter().all(PlatformTag::is_loongarch64) {
686                if graph.graph[node_index]
687                    .marker()
688                    .is_disjoint(*LOONGARCH64_MARKERS)
689                {
690                    return false;
691                }
692            }
693
694            if platform_tags.iter().all(PlatformTag::is_armv7l) {
695                if graph.graph[node_index]
696                    .marker()
697                    .is_disjoint(*ARMV7L_MARKERS)
698                {
699                    return false;
700                }
701            }
702
703            if platform_tags.iter().all(PlatformTag::is_armv6l) {
704                if graph.graph[node_index]
705                    .marker()
706                    .is_disjoint(*ARMV6L_MARKERS)
707                {
708                    return false;
709                }
710            }
711
712            true
713        });
714    }
715
716    /// Initialize a [`Lock`] from a list of [`Package`] entries.
717    fn new(
718        version: u32,
719        revision: u32,
720        mut packages: Vec<Package>,
721        requires_python: RequiresPython,
722        options: ResolverOptions,
723        manifest: ResolverManifest,
724        conflicts: Conflicts,
725        supported_environments: Vec<MarkerTree>,
726        required_environments: Vec<MarkerTree>,
727        fork_markers: Vec<UniversalMarker>,
728    ) -> Result<Self, LockError> {
729        // Put all dependencies for each package in a canonical order and
730        // check for duplicates.
731        for package in &mut packages {
732            package.dependencies.sort();
733            for windows in package.dependencies.windows(2) {
734                let (dep1, dep2) = (&windows[0], &windows[1]);
735                if dep1 == dep2 {
736                    return Err(LockErrorKind::DuplicateDependency {
737                        id: package.id.clone(),
738                        dependency: dep1.clone(),
739                    }
740                    .into());
741                }
742            }
743
744            // Perform the same validation for optional dependencies.
745            for (extra, dependencies) in &mut package.optional_dependencies {
746                dependencies.sort();
747                for windows in dependencies.windows(2) {
748                    let (dep1, dep2) = (&windows[0], &windows[1]);
749                    if dep1 == dep2 {
750                        return Err(LockErrorKind::DuplicateOptionalDependency {
751                            id: package.id.clone(),
752                            extra: extra.clone(),
753                            dependency: dep1.clone(),
754                        }
755                        .into());
756                    }
757                }
758            }
759
760            // Perform the same validation for dev dependencies.
761            for (group, dependencies) in &mut package.dependency_groups {
762                dependencies.sort();
763                for windows in dependencies.windows(2) {
764                    let (dep1, dep2) = (&windows[0], &windows[1]);
765                    if dep1 == dep2 {
766                        return Err(LockErrorKind::DuplicateDevDependency {
767                            id: package.id.clone(),
768                            group: group.clone(),
769                            dependency: dep1.clone(),
770                        }
771                        .into());
772                    }
773                }
774            }
775        }
776        packages.sort_by(|dist1, dist2| dist1.id.cmp(&dist2.id));
777
778        // Check for duplicate package IDs and also build up the map for
779        // packages keyed by their ID.
780        let mut by_id = FxHashMap::default();
781        for (i, dist) in packages.iter().enumerate() {
782            if by_id.insert(dist.id.clone(), i).is_some() {
783                return Err(LockErrorKind::DuplicatePackage {
784                    id: dist.id.clone(),
785                }
786                .into());
787            }
788        }
789
790        // Build up a map from ID to extras.
791        let mut extras_by_id = FxHashMap::default();
792        for dist in &packages {
793            for extra in dist.optional_dependencies.keys() {
794                extras_by_id
795                    .entry(dist.id.clone())
796                    .or_insert_with(FxHashSet::default)
797                    .insert(extra.clone());
798            }
799        }
800
801        // Remove any non-existent extras (e.g., extras that were requested but don't exist).
802        for dist in &mut packages {
803            for dep in dist
804                .dependencies
805                .iter_mut()
806                .chain(dist.optional_dependencies.values_mut().flatten())
807                .chain(dist.dependency_groups.values_mut().flatten())
808            {
809                dep.extra.retain(|extra| {
810                    extras_by_id
811                        .get(&dep.package_id)
812                        .is_some_and(|extras| extras.contains(extra))
813                });
814            }
815        }
816
817        // Check that every dependency has an entry in `by_id`. If any don't,
818        // it implies we somehow have a dependency with no corresponding locked
819        // package.
820        for dist in &packages {
821            for dep in &dist.dependencies {
822                if !by_id.contains_key(&dep.package_id) {
823                    return Err(LockErrorKind::UnrecognizedDependency {
824                        id: dist.id.clone(),
825                        dependency: dep.clone(),
826                    }
827                    .into());
828                }
829            }
830
831            // Perform the same validation for optional dependencies.
832            for dependencies in dist.optional_dependencies.values() {
833                for dep in dependencies {
834                    if !by_id.contains_key(&dep.package_id) {
835                        return Err(LockErrorKind::UnrecognizedDependency {
836                            id: dist.id.clone(),
837                            dependency: dep.clone(),
838                        }
839                        .into());
840                    }
841                }
842            }
843
844            // Perform the same validation for dev dependencies.
845            for dependencies in dist.dependency_groups.values() {
846                for dep in dependencies {
847                    if !by_id.contains_key(&dep.package_id) {
848                        return Err(LockErrorKind::UnrecognizedDependency {
849                            id: dist.id.clone(),
850                            dependency: dep.clone(),
851                        }
852                        .into());
853                    }
854                }
855            }
856
857            // Also check that our sources are consistent with whether we have
858            // hashes or not.
859            if let Some(requires_hash) = dist.id.source.requires_hash() {
860                for wheel in &dist.wheels {
861                    if requires_hash != wheel.hash.is_some() {
862                        return Err(LockErrorKind::Hash {
863                            id: dist.id.clone(),
864                            artifact_type: "wheel",
865                            expected: requires_hash,
866                        }
867                        .into());
868                    }
869                }
870            }
871        }
872        let lock = Self {
873            version,
874            revision,
875            fork_markers,
876            conflicts,
877            supported_environments,
878            required_environments,
879            requires_python,
880            options,
881            packages,
882            by_id,
883            manifest,
884        };
885        Ok(lock)
886    }
887
888    /// Record the requirements that were used to generate this lock.
889    #[must_use]
890    pub fn with_manifest(mut self, manifest: ResolverManifest) -> Self {
891        self.manifest = manifest;
892        self
893    }
894
895    /// Record the conflicting groups that were used to generate this lock.
896    #[must_use]
897    pub fn with_conflicts(mut self, conflicts: Conflicts) -> Self {
898        self.conflicts = conflicts;
899        self
900    }
901
902    /// Record the supported environments that were used to generate this lock.
903    #[must_use]
904    pub fn with_supported_environments(mut self, supported_environments: Vec<MarkerTree>) -> Self {
905        // We "complexify" the markers given, since the supported
906        // environments given might be coming directly from what's written in
907        // `pyproject.toml`, and those are assumed to be simplified (i.e.,
908        // they assume `requires-python` is true). But a `Lock` always uses
909        // non-simplified markers internally, so we need to re-complexify them
910        // here.
911        //
912        // The nice thing about complexifying is that it's a no-op if the
913        // markers given have already been complexified.
914        self.supported_environments = supported_environments
915            .into_iter()
916            .map(|marker| self.requires_python.complexify_markers(marker))
917            .collect();
918        self
919    }
920
921    /// Record the required platforms that were used to generate this lock.
922    #[must_use]
923    pub fn with_required_environments(mut self, required_environments: Vec<MarkerTree>) -> Self {
924        self.required_environments = required_environments
925            .into_iter()
926            .map(|marker| self.requires_python.complexify_markers(marker))
927            .collect();
928        self
929    }
930
931    /// Returns `true` if this [`Lock`] includes `provides-extra` metadata.
932    pub fn supports_provides_extra(&self) -> bool {
933        // `provides-extra` was added in Version 1 Revision 1.
934        (self.version(), self.revision()) >= (1, 1)
935    }
936
937    /// Returns `true` if this [`Lock`] includes entries for empty `dependency-group` metadata.
938    pub fn includes_empty_groups(&self) -> bool {
939        // Empty dependency groups are included as of https://github.com/astral-sh/uv/pull/8598,
940        // but Version 1 Revision 1 is the first revision published after that change.
941        (self.version(), self.revision()) >= (1, 1)
942    }
943
944    /// Returns the lockfile version.
945    pub fn version(&self) -> u32 {
946        self.version
947    }
948
949    /// Returns the lockfile revision.
950    pub fn revision(&self) -> u32 {
951        self.revision
952    }
953
954    /// Returns the number of packages in the lockfile.
955    pub fn len(&self) -> usize {
956        self.packages.len()
957    }
958
959    /// Returns `true` if the lockfile contains no packages.
960    pub fn is_empty(&self) -> bool {
961        self.packages.is_empty()
962    }
963
964    /// Returns the [`Package`] entries in this lock.
965    pub fn packages(&self) -> &[Package] {
966        &self.packages
967    }
968
969    /// Returns the supported Python version range for the lockfile, if present.
970    pub fn requires_python(&self) -> &RequiresPython {
971        &self.requires_python
972    }
973
974    /// Returns the resolution mode used to generate this lock.
975    pub fn resolution_mode(&self) -> ResolutionMode {
976        self.options.resolution_mode
977    }
978
979    /// Returns the pre-release mode used to generate this lock.
980    pub fn prerelease_mode(&self) -> PrereleaseMode {
981        self.options.prerelease_mode
982    }
983
984    /// Returns the multi-version mode used to generate this lock.
985    pub fn fork_strategy(&self) -> ForkStrategy {
986        self.options.fork_strategy
987    }
988
989    /// Returns the exclude newer setting used to generate this lock.
990    pub fn exclude_newer(&self) -> ExcludeNewer {
991        // TODO(zanieb): It'd be nice not to hide this clone here, but I am hesitant to introduce
992        // a whole new `ExcludeNewerRef` type just for this
993        self.options.exclude_newer.clone().into()
994    }
995
996    /// Returns the conflicting groups that were used to generate this lock.
997    pub fn conflicts(&self) -> &Conflicts {
998        &self.conflicts
999    }
1000
1001    /// Returns the supported environments that were used to generate this lock.
1002    pub fn supported_environments(&self) -> &[MarkerTree] {
1003        &self.supported_environments
1004    }
1005
1006    /// Returns the required platforms that were used to generate this lock.
1007    pub fn required_environments(&self) -> &[MarkerTree] {
1008        &self.required_environments
1009    }
1010
1011    /// Returns the workspace members that were used to generate this lock.
1012    pub fn members(&self) -> &BTreeSet<PackageName> {
1013        &self.manifest.members
1014    }
1015
1016    /// Returns the dependency groups that were used to generate this lock.
1017    pub fn requirements(&self) -> &BTreeSet<Requirement> {
1018        &self.manifest.requirements
1019    }
1020
1021    /// Returns the dependency groups that were used to generate this lock.
1022    pub fn dependency_groups(&self) -> &BTreeMap<GroupName, BTreeSet<Requirement>> {
1023        &self.manifest.dependency_groups
1024    }
1025
1026    /// Returns the build constraints that were used to generate this lock.
1027    pub fn build_constraints(&self, root: &Path) -> Constraints {
1028        Constraints::from_requirements(
1029            self.manifest
1030                .build_constraints
1031                .iter()
1032                .cloned()
1033                .map(|requirement| requirement.to_absolute(root)),
1034        )
1035    }
1036
1037    /// Return the workspace root used to generate this lock.
1038    pub fn root(&self) -> Option<&Package> {
1039        self.packages.iter().find(|package| {
1040            let (Source::Editable(path) | Source::Virtual(path)) = &package.id.source else {
1041                return false;
1042            };
1043            path.as_ref() == Path::new("")
1044        })
1045    }
1046
1047    /// Returns the supported environments that were used to generate this
1048    /// lock.
1049    ///
1050    /// The markers returned here are "simplified" with respect to the lock
1051    /// file's `requires-python` setting. This means these should only be used
1052    /// for direct comparison purposes with the supported environments written
1053    /// by a human in `pyproject.toml`. (Think of "supported environments" in
1054    /// `pyproject.toml` as having an implicit `and python_full_version >=
1055    /// '{requires-python-bound}'` attached to each one.)
1056    pub fn simplified_supported_environments(&self) -> Vec<MarkerTree> {
1057        self.supported_environments()
1058            .iter()
1059            .copied()
1060            .map(|marker| self.simplify_environment(marker))
1061            .collect()
1062    }
1063
1064    /// Returns the required platforms that were used to generate this
1065    /// lock.
1066    pub fn simplified_required_environments(&self) -> Vec<MarkerTree> {
1067        self.required_environments()
1068            .iter()
1069            .copied()
1070            .map(|marker| self.simplify_environment(marker))
1071            .collect()
1072    }
1073
1074    /// Simplify the given marker environment with respect to the lockfile's
1075    /// `requires-python` setting.
1076    pub fn simplify_environment(&self, marker: MarkerTree) -> MarkerTree {
1077        self.requires_python.simplify_markers(marker)
1078    }
1079
1080    /// If this lockfile was built from a forking resolution with non-identical forks, return the
1081    /// markers of those forks, otherwise `None`.
1082    pub fn fork_markers(&self) -> &[UniversalMarker] {
1083        self.fork_markers.as_slice()
1084    }
1085
1086    /// Checks whether the fork markers cover the entire supported marker space.
1087    ///
1088    /// Returns the actually covered and the expected marker space on validation error.
1089    pub fn check_marker_coverage(&self) -> Result<(), (MarkerTree, MarkerTree)> {
1090        let fork_markers_union = if self.fork_markers().is_empty() {
1091            self.requires_python.to_marker_tree()
1092        } else {
1093            let mut fork_markers_union = MarkerTree::FALSE;
1094            for fork_marker in self.fork_markers() {
1095                fork_markers_union.or(fork_marker.pep508());
1096            }
1097            fork_markers_union
1098        };
1099        let mut environments_union = if !self.supported_environments.is_empty() {
1100            let mut environments_union = MarkerTree::FALSE;
1101            for fork_marker in &self.supported_environments {
1102                environments_union.or(*fork_marker);
1103            }
1104            environments_union
1105        } else {
1106            MarkerTree::TRUE
1107        };
1108        // When a user defines environments, they are implicitly constrained by requires-python.
1109        environments_union.and(self.requires_python.to_marker_tree());
1110        if fork_markers_union.negate().is_disjoint(environments_union) {
1111            Ok(())
1112        } else {
1113            Err((fork_markers_union, environments_union))
1114        }
1115    }
1116
1117    /// Checks whether the new requires-python specification is disjoint with
1118    /// the fork markers in this lock file.
1119    ///
1120    /// If they are disjoint, then the union of the fork markers along with the
1121    /// given requires-python specification (converted to a marker tree) are
1122    /// returned.
1123    ///
1124    /// When disjoint, the fork markers in the lock file should be dropped and
1125    /// not used.
1126    pub fn requires_python_coverage(
1127        &self,
1128        new_requires_python: &RequiresPython,
1129    ) -> Result<(), (MarkerTree, MarkerTree)> {
1130        let fork_markers_union = if self.fork_markers().is_empty() {
1131            self.requires_python.to_marker_tree()
1132        } else {
1133            let mut fork_markers_union = MarkerTree::FALSE;
1134            for fork_marker in self.fork_markers() {
1135                fork_markers_union.or(fork_marker.pep508());
1136            }
1137            fork_markers_union
1138        };
1139        let new_requires_python = new_requires_python.to_marker_tree();
1140        if fork_markers_union.is_disjoint(new_requires_python) {
1141            Err((fork_markers_union, new_requires_python))
1142        } else {
1143            Ok(())
1144        }
1145    }
1146
1147    /// Returns the TOML representation of this lockfile.
1148    pub fn to_toml(&self) -> Result<String, toml_edit::ser::Error> {
1149        // Catch a lockfile where the union of fork markers doesn't cover the supported
1150        // environments.
1151        debug_assert!(self.check_marker_coverage().is_ok());
1152
1153        // We construct a TOML document manually instead of going through Serde to enable
1154        // the use of inline tables.
1155        let mut doc = toml_edit::DocumentMut::new();
1156        doc.insert("version", value(i64::from(self.version)));
1157
1158        if self.revision > 0 {
1159            doc.insert("revision", value(i64::from(self.revision)));
1160        }
1161
1162        doc.insert("requires-python", value(self.requires_python.to_string()));
1163
1164        if !self.fork_markers.is_empty() {
1165            let fork_markers = each_element_on_its_line_array(
1166                simplified_universal_markers(&self.fork_markers, &self.requires_python).into_iter(),
1167            );
1168            if !fork_markers.is_empty() {
1169                doc.insert("resolution-markers", value(fork_markers));
1170            }
1171        }
1172
1173        if !self.supported_environments.is_empty() {
1174            let supported_environments = each_element_on_its_line_array(
1175                self.supported_environments
1176                    .iter()
1177                    .copied()
1178                    .map(|marker| SimplifiedMarkerTree::new(&self.requires_python, marker))
1179                    .filter_map(SimplifiedMarkerTree::try_to_string),
1180            );
1181            doc.insert("supported-markers", value(supported_environments));
1182        }
1183
1184        if !self.required_environments.is_empty() {
1185            let required_environments = each_element_on_its_line_array(
1186                self.required_environments
1187                    .iter()
1188                    .copied()
1189                    .map(|marker| SimplifiedMarkerTree::new(&self.requires_python, marker))
1190                    .filter_map(SimplifiedMarkerTree::try_to_string),
1191            );
1192            doc.insert("required-markers", value(required_environments));
1193        }
1194
1195        if !self.conflicts.is_empty() {
1196            let mut list = Array::new();
1197            for set in self.conflicts.iter() {
1198                list.push(each_element_on_its_line_array(set.iter().map(|item| {
1199                    let mut table = InlineTable::new();
1200                    table.insert("package", Value::from(item.package().to_string()));
1201                    match item.kind() {
1202                        ConflictKind::Project => {}
1203                        ConflictKind::Extra(extra) => {
1204                            table.insert("extra", Value::from(extra.to_string()));
1205                        }
1206                        ConflictKind::Group(group) => {
1207                            table.insert("group", Value::from(group.to_string()));
1208                        }
1209                    }
1210                    table
1211                })));
1212            }
1213            doc.insert("conflicts", value(list));
1214        }
1215
1216        // Write the settings that were used to generate the resolution.
1217        // This enables us to invalidate the lockfile if the user changes
1218        // their settings.
1219        {
1220            let mut options_table = Table::new();
1221
1222            if self.options.resolution_mode != ResolutionMode::default() {
1223                options_table.insert(
1224                    "resolution-mode",
1225                    value(self.options.resolution_mode.to_string()),
1226                );
1227            }
1228            if self.options.prerelease_mode != PrereleaseMode::default() {
1229                options_table.insert(
1230                    "prerelease-mode",
1231                    value(self.options.prerelease_mode.to_string()),
1232                );
1233            }
1234            if self.options.fork_strategy != ForkStrategy::default() {
1235                options_table.insert(
1236                    "fork-strategy",
1237                    value(self.options.fork_strategy.to_string()),
1238                );
1239            }
1240            let exclude_newer = ExcludeNewer::from(self.options.exclude_newer.clone());
1241            if !exclude_newer.is_empty() {
1242                // Always serialize global exclude-newer as a string
1243                if let Some(global) = &exclude_newer.global {
1244                    options_table.insert("exclude-newer", value(global.to_string()));
1245                    // Serialize the original span if present
1246                    if let Some(span) = global.span() {
1247                        options_table.insert("exclude-newer-span", value(span.to_string()));
1248                    }
1249                }
1250
1251                // Serialize package-specific exclusions as a separate field
1252                if !exclude_newer.package.is_empty() {
1253                    let mut package_table = toml_edit::Table::new();
1254                    for (name, exclude_newer_value) in &exclude_newer.package {
1255                        if let Some(span) = exclude_newer_value.span() {
1256                            // Serialize as inline table with timestamp and span
1257                            let mut inline = toml_edit::InlineTable::new();
1258                            inline.insert(
1259                                "timestamp",
1260                                exclude_newer_value.timestamp().to_string().into(),
1261                            );
1262                            inline.insert("span", span.to_string().into());
1263                            package_table.insert(name.as_ref(), Item::Value(inline.into()));
1264                        } else {
1265                            // Serialize as simple string
1266                            package_table
1267                                .insert(name.as_ref(), value(exclude_newer_value.to_string()));
1268                        }
1269                    }
1270                    options_table.insert("exclude-newer-package", Item::Table(package_table));
1271                }
1272            }
1273
1274            if !options_table.is_empty() {
1275                doc.insert("options", Item::Table(options_table));
1276            }
1277        }
1278
1279        // Write the manifest that was used to generate the resolution.
1280        {
1281            let mut manifest_table = Table::new();
1282
1283            if !self.manifest.members.is_empty() {
1284                manifest_table.insert(
1285                    "members",
1286                    value(each_element_on_its_line_array(
1287                        self.manifest
1288                            .members
1289                            .iter()
1290                            .map(std::string::ToString::to_string),
1291                    )),
1292                );
1293            }
1294
1295            if !self.manifest.requirements.is_empty() {
1296                let requirements = self
1297                    .manifest
1298                    .requirements
1299                    .iter()
1300                    .map(|requirement| {
1301                        serde::Serialize::serialize(
1302                            &requirement,
1303                            toml_edit::ser::ValueSerializer::new(),
1304                        )
1305                    })
1306                    .collect::<Result<Vec<_>, _>>()?;
1307                let requirements = match requirements.as_slice() {
1308                    [] => Array::new(),
1309                    [requirement] => Array::from_iter([requirement]),
1310                    requirements => each_element_on_its_line_array(requirements.iter()),
1311                };
1312                manifest_table.insert("requirements", value(requirements));
1313            }
1314
1315            if !self.manifest.constraints.is_empty() {
1316                let constraints = self
1317                    .manifest
1318                    .constraints
1319                    .iter()
1320                    .map(|requirement| {
1321                        serde::Serialize::serialize(
1322                            &requirement,
1323                            toml_edit::ser::ValueSerializer::new(),
1324                        )
1325                    })
1326                    .collect::<Result<Vec<_>, _>>()?;
1327                let constraints = match constraints.as_slice() {
1328                    [] => Array::new(),
1329                    [requirement] => Array::from_iter([requirement]),
1330                    constraints => each_element_on_its_line_array(constraints.iter()),
1331                };
1332                manifest_table.insert("constraints", value(constraints));
1333            }
1334
1335            if !self.manifest.overrides.is_empty() {
1336                let overrides = self
1337                    .manifest
1338                    .overrides
1339                    .iter()
1340                    .map(|requirement| {
1341                        serde::Serialize::serialize(
1342                            &requirement,
1343                            toml_edit::ser::ValueSerializer::new(),
1344                        )
1345                    })
1346                    .collect::<Result<Vec<_>, _>>()?;
1347                let overrides = match overrides.as_slice() {
1348                    [] => Array::new(),
1349                    [requirement] => Array::from_iter([requirement]),
1350                    overrides => each_element_on_its_line_array(overrides.iter()),
1351                };
1352                manifest_table.insert("overrides", value(overrides));
1353            }
1354
1355            if !self.manifest.excludes.is_empty() {
1356                let excludes = self
1357                    .manifest
1358                    .excludes
1359                    .iter()
1360                    .map(|name| {
1361                        serde::Serialize::serialize(&name, toml_edit::ser::ValueSerializer::new())
1362                    })
1363                    .collect::<Result<Vec<_>, _>>()?;
1364                let excludes = match excludes.as_slice() {
1365                    [] => Array::new(),
1366                    [name] => Array::from_iter([name]),
1367                    excludes => each_element_on_its_line_array(excludes.iter()),
1368                };
1369                manifest_table.insert("excludes", value(excludes));
1370            }
1371
1372            if !self.manifest.build_constraints.is_empty() {
1373                let build_constraints = self
1374                    .manifest
1375                    .build_constraints
1376                    .iter()
1377                    .map(|requirement| {
1378                        serde::Serialize::serialize(
1379                            &requirement,
1380                            toml_edit::ser::ValueSerializer::new(),
1381                        )
1382                    })
1383                    .collect::<Result<Vec<_>, _>>()?;
1384                let build_constraints = match build_constraints.as_slice() {
1385                    [] => Array::new(),
1386                    [requirement] => Array::from_iter([requirement]),
1387                    build_constraints => each_element_on_its_line_array(build_constraints.iter()),
1388                };
1389                manifest_table.insert("build-constraints", value(build_constraints));
1390            }
1391
1392            if !self.manifest.dependency_groups.is_empty() {
1393                let mut dependency_groups = Table::new();
1394                for (extra, requirements) in &self.manifest.dependency_groups {
1395                    let requirements = requirements
1396                        .iter()
1397                        .map(|requirement| {
1398                            serde::Serialize::serialize(
1399                                &requirement,
1400                                toml_edit::ser::ValueSerializer::new(),
1401                            )
1402                        })
1403                        .collect::<Result<Vec<_>, _>>()?;
1404                    let requirements = match requirements.as_slice() {
1405                        [] => Array::new(),
1406                        [requirement] => Array::from_iter([requirement]),
1407                        requirements => each_element_on_its_line_array(requirements.iter()),
1408                    };
1409                    if !requirements.is_empty() {
1410                        dependency_groups.insert(extra.as_ref(), value(requirements));
1411                    }
1412                }
1413                if !dependency_groups.is_empty() {
1414                    manifest_table.insert("dependency-groups", Item::Table(dependency_groups));
1415                }
1416            }
1417
1418            if !self.manifest.dependency_metadata.is_empty() {
1419                let mut tables = ArrayOfTables::new();
1420                for metadata in &self.manifest.dependency_metadata {
1421                    let mut table = Table::new();
1422                    table.insert("name", value(metadata.name.to_string()));
1423                    if let Some(version) = metadata.version.as_ref() {
1424                        table.insert("version", value(version.to_string()));
1425                    }
1426                    if !metadata.requires_dist.is_empty() {
1427                        table.insert(
1428                            "requires-dist",
1429                            value(serde::Serialize::serialize(
1430                                &metadata.requires_dist,
1431                                toml_edit::ser::ValueSerializer::new(),
1432                            )?),
1433                        );
1434                    }
1435                    if let Some(requires_python) = metadata.requires_python.as_ref() {
1436                        table.insert("requires-python", value(requires_python.to_string()));
1437                    }
1438                    if !metadata.provides_extra.is_empty() {
1439                        table.insert(
1440                            "provides-extras",
1441                            value(serde::Serialize::serialize(
1442                                &metadata.provides_extra,
1443                                toml_edit::ser::ValueSerializer::new(),
1444                            )?),
1445                        );
1446                    }
1447                    tables.push(table);
1448                }
1449                manifest_table.insert("dependency-metadata", Item::ArrayOfTables(tables));
1450            }
1451
1452            if !manifest_table.is_empty() {
1453                doc.insert("manifest", Item::Table(manifest_table));
1454            }
1455        }
1456
1457        // Count the number of packages for each package name. When
1458        // there's only one package for a particular package name (the
1459        // overwhelmingly common case), we can omit some data (like source and
1460        // version) on dependency edges since it is strictly redundant.
1461        let mut dist_count_by_name: FxHashMap<PackageName, u64> = FxHashMap::default();
1462        for dist in &self.packages {
1463            *dist_count_by_name.entry(dist.id.name.clone()).or_default() += 1;
1464        }
1465
1466        let mut packages = ArrayOfTables::new();
1467        for dist in &self.packages {
1468            packages.push(dist.to_toml(&self.requires_python, &dist_count_by_name)?);
1469        }
1470
1471        doc.insert("package", Item::ArrayOfTables(packages));
1472        Ok(doc.to_string())
1473    }
1474
1475    /// Returns the package with the given name. If there are multiple
1476    /// matching packages, then an error is returned. If there are no
1477    /// matching packages, then `Ok(None)` is returned.
1478    pub fn find_by_name(&self, name: &PackageName) -> Result<Option<&Package>, String> {
1479        let mut found_dist = None;
1480        for dist in &self.packages {
1481            if &dist.id.name == name {
1482                if found_dist.is_some() {
1483                    return Err(format!("found multiple packages matching `{name}`"));
1484                }
1485                found_dist = Some(dist);
1486            }
1487        }
1488        Ok(found_dist)
1489    }
1490
1491    /// Returns the package with the given name.
1492    ///
1493    /// If there are multiple matching packages, returns the package that
1494    /// corresponds to the given marker tree.
1495    ///
1496    /// If there are multiple packages that are relevant to the current
1497    /// markers, then an error is returned.
1498    ///
1499    /// If there are no matching packages, then `Ok(None)` is returned.
1500    fn find_by_markers(
1501        &self,
1502        name: &PackageName,
1503        marker_env: &MarkerEnvironment,
1504    ) -> Result<Option<&Package>, String> {
1505        let mut found_dist = None;
1506        for dist in &self.packages {
1507            if &dist.id.name == name {
1508                if dist.fork_markers.is_empty()
1509                    || dist
1510                        .fork_markers
1511                        .iter()
1512                        .any(|marker| marker.evaluate_no_extras(marker_env))
1513                {
1514                    if found_dist.is_some() {
1515                        return Err(format!("found multiple packages matching `{name}`"));
1516                    }
1517                    found_dist = Some(dist);
1518                }
1519            }
1520        }
1521        Ok(found_dist)
1522    }
1523
1524    fn find_by_id(&self, id: &PackageId) -> &Package {
1525        let index = *self.by_id.get(id).expect("locked package for ID");
1526
1527        (self.packages.get(index).expect("valid index for package")) as _
1528    }
1529
1530    /// Return a [`SatisfiesResult`] if the given extras do not match the [`Package`] metadata.
1531    fn satisfies_provides_extra<'lock>(
1532        &self,
1533        provides_extra: Box<[ExtraName]>,
1534        package: &'lock Package,
1535    ) -> SatisfiesResult<'lock> {
1536        if !self.supports_provides_extra() {
1537            return SatisfiesResult::Satisfied;
1538        }
1539
1540        let expected: BTreeSet<_> = provides_extra.iter().collect();
1541        let actual: BTreeSet<_> = package.metadata.provides_extra.iter().collect();
1542
1543        if expected != actual {
1544            let expected = Box::into_iter(provides_extra).collect();
1545            return SatisfiesResult::MismatchedPackageProvidesExtra(
1546                &package.id.name,
1547                package.id.version.as_ref(),
1548                expected,
1549                actual,
1550            );
1551        }
1552
1553        SatisfiesResult::Satisfied
1554    }
1555
1556    /// Return a [`SatisfiesResult`] if the given requirements do not match the [`Package`] metadata.
1557    #[allow(clippy::unused_self)]
1558    fn satisfies_requires_dist<'lock>(
1559        &self,
1560        requires_dist: Box<[Requirement]>,
1561        dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
1562        package: &'lock Package,
1563        root: &Path,
1564    ) -> Result<SatisfiesResult<'lock>, LockError> {
1565        // Special-case: if the version is dynamic, compare the flattened requirements.
1566        let flattened = if package.is_dynamic() {
1567            Some(
1568                FlatRequiresDist::from_requirements(requires_dist.clone(), &package.id.name)
1569                    .into_iter()
1570                    .map(|requirement| {
1571                        normalize_requirement(requirement, root, &self.requires_python)
1572                    })
1573                    .collect::<Result<BTreeSet<_>, _>>()?,
1574            )
1575        } else {
1576            None
1577        };
1578
1579        // Validate the `requires-dist` metadata.
1580        let expected: BTreeSet<_> = Box::into_iter(requires_dist)
1581            .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1582            .collect::<Result<_, _>>()?;
1583        let actual: BTreeSet<_> = package
1584            .metadata
1585            .requires_dist
1586            .iter()
1587            .cloned()
1588            .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1589            .collect::<Result<_, _>>()?;
1590
1591        if expected != actual && flattened.is_none_or(|expected| expected != actual) {
1592            return Ok(SatisfiesResult::MismatchedPackageRequirements(
1593                &package.id.name,
1594                package.id.version.as_ref(),
1595                expected,
1596                actual,
1597            ));
1598        }
1599
1600        // Validate the `dependency-groups` metadata.
1601        let expected: BTreeMap<GroupName, BTreeSet<Requirement>> = dependency_groups
1602            .into_iter()
1603            .filter(|(_, requirements)| self.includes_empty_groups() || !requirements.is_empty())
1604            .map(|(group, requirements)| {
1605                Ok::<_, LockError>((
1606                    group,
1607                    Box::into_iter(requirements)
1608                        .map(|requirement| {
1609                            normalize_requirement(requirement, root, &self.requires_python)
1610                        })
1611                        .collect::<Result<_, _>>()?,
1612                ))
1613            })
1614            .collect::<Result<_, _>>()?;
1615        let actual: BTreeMap<GroupName, BTreeSet<Requirement>> = package
1616            .metadata
1617            .dependency_groups
1618            .iter()
1619            .filter(|(_, requirements)| self.includes_empty_groups() || !requirements.is_empty())
1620            .map(|(group, requirements)| {
1621                Ok::<_, LockError>((
1622                    group.clone(),
1623                    requirements
1624                        .iter()
1625                        .cloned()
1626                        .map(|requirement| {
1627                            normalize_requirement(requirement, root, &self.requires_python)
1628                        })
1629                        .collect::<Result<_, _>>()?,
1630                ))
1631            })
1632            .collect::<Result<_, _>>()?;
1633
1634        if expected != actual {
1635            return Ok(SatisfiesResult::MismatchedPackageDependencyGroups(
1636                &package.id.name,
1637                package.id.version.as_ref(),
1638                expected,
1639                actual,
1640            ));
1641        }
1642
1643        Ok(SatisfiesResult::Satisfied)
1644    }
1645
1646    /// Check whether the lock matches the project structure, requirements and configuration.
1647    pub async fn satisfies<Context: BuildContext>(
1648        &self,
1649        root: &Path,
1650        packages: &BTreeMap<PackageName, WorkspaceMember>,
1651        members: &[PackageName],
1652        required_members: &BTreeMap<PackageName, Editability>,
1653        requirements: &[Requirement],
1654        constraints: &[Requirement],
1655        overrides: &[Requirement],
1656        excludes: &[PackageName],
1657        build_constraints: &[Requirement],
1658        dependency_groups: &BTreeMap<GroupName, Vec<Requirement>>,
1659        dependency_metadata: &DependencyMetadata,
1660        indexes: Option<&IndexLocations>,
1661        tags: &Tags,
1662        markers: &MarkerEnvironment,
1663        hasher: &HashStrategy,
1664        index: &InMemoryIndex,
1665        database: &DistributionDatabase<'_, Context>,
1666    ) -> Result<SatisfiesResult<'_>, LockError> {
1667        let mut queue: VecDeque<&Package> = VecDeque::new();
1668        let mut seen = FxHashSet::default();
1669
1670        // Validate that the lockfile was generated with the same root members.
1671        {
1672            let expected = members.iter().cloned().collect::<BTreeSet<_>>();
1673            let actual = &self.manifest.members;
1674            if expected != *actual {
1675                return Ok(SatisfiesResult::MismatchedMembers(expected, actual));
1676            }
1677        }
1678
1679        // Validate that the member sources have not changed (e.g., that they've switched from
1680        // virtual to non-virtual or vice versa).
1681        for (name, member) in packages {
1682            let source = self.find_by_name(name).ok().flatten();
1683
1684            // Determine whether the member was required by any other member.
1685            let value = required_members.get(name);
1686            let is_required_member = value.is_some();
1687            let editability = value.copied().flatten();
1688
1689            // Verify that the member is virtual (or not).
1690            let expected_virtual = !member.pyproject_toml().is_package(!is_required_member);
1691            let actual_virtual =
1692                source.map(|package| matches!(package.id.source, Source::Virtual(..)));
1693            if actual_virtual != Some(expected_virtual) {
1694                return Ok(SatisfiesResult::MismatchedVirtual(
1695                    name.clone(),
1696                    expected_virtual,
1697                ));
1698            }
1699
1700            // Verify that the member is editable (or not).
1701            let expected_editable = if expected_virtual {
1702                false
1703            } else {
1704                editability.unwrap_or(true)
1705            };
1706            let actual_editable =
1707                source.map(|package| matches!(package.id.source, Source::Editable(..)));
1708            if actual_editable != Some(expected_editable) {
1709                return Ok(SatisfiesResult::MismatchedEditable(
1710                    name.clone(),
1711                    expected_editable,
1712                ));
1713            }
1714        }
1715
1716        // Validate that the lockfile was generated with the same requirements.
1717        {
1718            let expected: BTreeSet<_> = requirements
1719                .iter()
1720                .cloned()
1721                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1722                .collect::<Result<_, _>>()?;
1723            let actual: BTreeSet<_> = self
1724                .manifest
1725                .requirements
1726                .iter()
1727                .cloned()
1728                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1729                .collect::<Result<_, _>>()?;
1730            if expected != actual {
1731                return Ok(SatisfiesResult::MismatchedRequirements(expected, actual));
1732            }
1733        }
1734
1735        // Validate that the lockfile was generated with the same constraints.
1736        {
1737            let expected: BTreeSet<_> = constraints
1738                .iter()
1739                .cloned()
1740                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1741                .collect::<Result<_, _>>()?;
1742            let actual: BTreeSet<_> = self
1743                .manifest
1744                .constraints
1745                .iter()
1746                .cloned()
1747                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1748                .collect::<Result<_, _>>()?;
1749            if expected != actual {
1750                return Ok(SatisfiesResult::MismatchedConstraints(expected, actual));
1751            }
1752        }
1753
1754        // Validate that the lockfile was generated with the same overrides.
1755        {
1756            let expected: BTreeSet<_> = overrides
1757                .iter()
1758                .cloned()
1759                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1760                .collect::<Result<_, _>>()?;
1761            let actual: BTreeSet<_> = self
1762                .manifest
1763                .overrides
1764                .iter()
1765                .cloned()
1766                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1767                .collect::<Result<_, _>>()?;
1768            if expected != actual {
1769                return Ok(SatisfiesResult::MismatchedOverrides(expected, actual));
1770            }
1771        }
1772
1773        // Validate that the lockfile was generated with the same excludes.
1774        {
1775            let expected: BTreeSet<_> = excludes.iter().cloned().collect();
1776            let actual: BTreeSet<_> = self.manifest.excludes.iter().cloned().collect();
1777            if expected != actual {
1778                return Ok(SatisfiesResult::MismatchedExcludes(expected, actual));
1779            }
1780        }
1781
1782        // Validate that the lockfile was generated with the same build constraints.
1783        {
1784            let expected: BTreeSet<_> = build_constraints
1785                .iter()
1786                .cloned()
1787                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1788                .collect::<Result<_, _>>()?;
1789            let actual: BTreeSet<_> = self
1790                .manifest
1791                .build_constraints
1792                .iter()
1793                .cloned()
1794                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1795                .collect::<Result<_, _>>()?;
1796            if expected != actual {
1797                return Ok(SatisfiesResult::MismatchedBuildConstraints(
1798                    expected, actual,
1799                ));
1800            }
1801        }
1802
1803        // Validate that the lockfile was generated with the dependency groups.
1804        {
1805            let expected: BTreeMap<GroupName, BTreeSet<Requirement>> = dependency_groups
1806                .iter()
1807                .filter(|(_, requirements)| !requirements.is_empty())
1808                .map(|(group, requirements)| {
1809                    Ok::<_, LockError>((
1810                        group.clone(),
1811                        requirements
1812                            .iter()
1813                            .cloned()
1814                            .map(|requirement| {
1815                                normalize_requirement(requirement, root, &self.requires_python)
1816                            })
1817                            .collect::<Result<_, _>>()?,
1818                    ))
1819                })
1820                .collect::<Result<_, _>>()?;
1821            let actual: BTreeMap<GroupName, BTreeSet<Requirement>> = self
1822                .manifest
1823                .dependency_groups
1824                .iter()
1825                .filter(|(_, requirements)| !requirements.is_empty())
1826                .map(|(group, requirements)| {
1827                    Ok::<_, LockError>((
1828                        group.clone(),
1829                        requirements
1830                            .iter()
1831                            .cloned()
1832                            .map(|requirement| {
1833                                normalize_requirement(requirement, root, &self.requires_python)
1834                            })
1835                            .collect::<Result<_, _>>()?,
1836                    ))
1837                })
1838                .collect::<Result<_, _>>()?;
1839            if expected != actual {
1840                return Ok(SatisfiesResult::MismatchedDependencyGroups(
1841                    expected, actual,
1842                ));
1843            }
1844        }
1845
1846        // Validate that the lockfile was generated with the same static metadata.
1847        {
1848            let expected = dependency_metadata
1849                .values()
1850                .cloned()
1851                .collect::<BTreeSet<_>>();
1852            let actual = &self.manifest.dependency_metadata;
1853            if expected != *actual {
1854                return Ok(SatisfiesResult::MismatchedStaticMetadata(expected, actual));
1855            }
1856        }
1857
1858        // Collect the set of available indexes (both `--index-url` and `--find-links` entries).
1859        let mut remotes = indexes.map(|locations| {
1860            locations
1861                .allowed_indexes()
1862                .into_iter()
1863                .filter_map(|index| match index.url() {
1864                    IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
1865                        Some(UrlString::from(index.url().without_credentials().as_ref()))
1866                    }
1867                    IndexUrl::Path(_) => None,
1868                })
1869                .collect::<BTreeSet<_>>()
1870        });
1871
1872        let mut locals = indexes.map(|locations| {
1873            locations
1874                .allowed_indexes()
1875                .into_iter()
1876                .filter_map(|index| match index.url() {
1877                    IndexUrl::Pypi(_) | IndexUrl::Url(_) => None,
1878                    IndexUrl::Path(url) => {
1879                        let path = url.to_file_path().ok()?;
1880                        let path = relative_to(&path, root)
1881                            .or_else(|_| std::path::absolute(path))
1882                            .ok()?
1883                            .into_boxed_path();
1884                        Some(path)
1885                    }
1886                })
1887                .collect::<BTreeSet<_>>()
1888        });
1889
1890        // Add the workspace packages to the queue.
1891        for root_name in packages.keys() {
1892            let root = self
1893                .find_by_name(root_name)
1894                .expect("found too many packages matching root");
1895
1896            let Some(root) = root else {
1897                // The package is not in the lockfile, so it can't be satisfied.
1898                return Ok(SatisfiesResult::MissingRoot(root_name.clone()));
1899            };
1900
1901            // Add the base package.
1902            queue.push_back(root);
1903        }
1904
1905        while let Some(package) = queue.pop_front() {
1906            // If the lockfile references an index that was not provided, we can't validate it.
1907            if let Source::Registry(index) = &package.id.source {
1908                match index {
1909                    RegistrySource::Url(url) => {
1910                        if remotes
1911                            .as_ref()
1912                            .is_some_and(|remotes| !remotes.contains(url))
1913                        {
1914                            let name = &package.id.name;
1915                            let version = &package
1916                                .id
1917                                .version
1918                                .as_ref()
1919                                .expect("version for registry source");
1920                            return Ok(SatisfiesResult::MissingRemoteIndex(name, version, url));
1921                        }
1922                    }
1923                    RegistrySource::Path(path) => {
1924                        if locals.as_ref().is_some_and(|locals| !locals.contains(path)) {
1925                            let name = &package.id.name;
1926                            let version = &package
1927                                .id
1928                                .version
1929                                .as_ref()
1930                                .expect("version for registry source");
1931                            return Ok(SatisfiesResult::MissingLocalIndex(name, version, path));
1932                        }
1933                    }
1934                }
1935            }
1936
1937            // If the package is immutable, we don't need to validate it (or its dependencies).
1938            if package.id.source.is_immutable() {
1939                continue;
1940            }
1941
1942            if let Some(version) = package.id.version.as_ref() {
1943                // For a non-dynamic package, fetch the metadata from the distribution database.
1944                let HashedDist { dist, .. } = package.to_dist(
1945                    root,
1946                    TagPolicy::Preferred(tags),
1947                    &BuildOptions::default(),
1948                    markers,
1949                )?;
1950
1951                let metadata = {
1952                    let id = dist.version_id();
1953                    if let Some(archive) =
1954                        index
1955                            .distributions()
1956                            .get(&id)
1957                            .as_deref()
1958                            .and_then(|response| {
1959                                if let MetadataResponse::Found(archive, ..) = response {
1960                                    Some(archive)
1961                                } else {
1962                                    None
1963                                }
1964                            })
1965                    {
1966                        // If the metadata is already in the index, return it.
1967                        archive.metadata.clone()
1968                    } else {
1969                        // Run the PEP 517 build process to extract metadata from the source distribution.
1970                        let archive = database
1971                            .get_or_build_wheel_metadata(&dist, hasher.get(&dist))
1972                            .await
1973                            .map_err(|err| LockErrorKind::Resolution {
1974                                id: package.id.clone(),
1975                                err,
1976                            })?;
1977
1978                        let metadata = archive.metadata.clone();
1979
1980                        // Insert the metadata into the index.
1981                        index
1982                            .distributions()
1983                            .done(id, Arc::new(MetadataResponse::Found(archive)));
1984
1985                        metadata
1986                    }
1987                };
1988
1989                // If this is a local package, validate that it hasn't become dynamic (in which
1990                // case, we'd expect the version to be omitted).
1991                if package.id.source.is_source_tree() {
1992                    if metadata.dynamic {
1993                        return Ok(SatisfiesResult::MismatchedDynamic(&package.id.name, false));
1994                    }
1995                }
1996
1997                // Validate the `version` metadata.
1998                if metadata.version != *version {
1999                    return Ok(SatisfiesResult::MismatchedVersion(
2000                        &package.id.name,
2001                        version.clone(),
2002                        Some(metadata.version.clone()),
2003                    ));
2004                }
2005
2006                // Validate the `provides-extras` metadata.
2007                match self.satisfies_provides_extra(metadata.provides_extra, package) {
2008                    SatisfiesResult::Satisfied => {}
2009                    result => return Ok(result),
2010                }
2011
2012                // Validate that the requirements are unchanged.
2013                match self.satisfies_requires_dist(
2014                    metadata.requires_dist,
2015                    metadata.dependency_groups,
2016                    package,
2017                    root,
2018                )? {
2019                    SatisfiesResult::Satisfied => {}
2020                    result => return Ok(result),
2021                }
2022            } else if let Some(source_tree) = package.id.source.as_source_tree() {
2023                // For dynamic packages, we don't need the version. We only need to know that the
2024                // package is still dynamic, and that the requirements are unchanged.
2025                //
2026                // If the distribution is a source tree, attempt to extract the requirements from the
2027                // `pyproject.toml` directly. The distribution database will do this too, but we can be
2028                // even more aggressive here since we _only_ need the requirements. So, for example,
2029                // even if the version is dynamic, we can still extract the requirements without
2030                // performing a build, unlike in the database where we typically construct a "complete"
2031                // metadata object.
2032                let parent = root.join(source_tree);
2033                let path = parent.join("pyproject.toml");
2034                let metadata =
2035                    match fs_err::tokio::read_to_string(&path).await {
2036                        Ok(contents) => {
2037                            let pyproject_toml = toml::from_str::<PyProjectToml>(&contents)
2038                                .map_err(|err| LockErrorKind::InvalidPyprojectToml {
2039                                    path: path.clone(),
2040                                    err,
2041                                })?;
2042                            database
2043                                .requires_dist(&parent, &pyproject_toml)
2044                                .await
2045                                .map_err(|err| LockErrorKind::Resolution {
2046                                    id: package.id.clone(),
2047                                    err,
2048                                })?
2049                        }
2050                        Err(err) if err.kind() == io::ErrorKind::NotFound => None,
2051                        Err(err) => {
2052                            return Err(LockErrorKind::UnreadablePyprojectToml { path, err }.into());
2053                        }
2054                    };
2055
2056                let satisfied = metadata.is_some_and(|metadata| {
2057                    // Validate that the package is still dynamic.
2058                    if !metadata.dynamic {
2059                        debug!("Static `requires-dist` for `{}` is out-of-date; falling back to distribution database", package.id);
2060                        return false;
2061                    }
2062
2063                    // Validate that the extras are unchanged.
2064                    if let SatisfiesResult::Satisfied = self.satisfies_provides_extra(metadata.provides_extra, package, ) {
2065                        debug!("Static `provides-extra` for `{}` is up-to-date", package.id);
2066                    } else {
2067                        debug!("Static `provides-extra` for `{}` is out-of-date; falling back to distribution database", package.id);
2068                        return false;
2069                    }
2070
2071                    // Validate that the requirements are unchanged.
2072                    match self.satisfies_requires_dist(metadata.requires_dist, metadata.dependency_groups, package, root) {
2073                        Ok(SatisfiesResult::Satisfied) => {
2074                            debug!("Static `requires-dist` for `{}` is up-to-date", package.id);
2075                        },
2076                        Ok(..) => {
2077                            debug!("Static `requires-dist` for `{}` is out-of-date; falling back to distribution database", package.id);
2078                            return false;
2079                        },
2080                        Err(..) => {
2081                            debug!("Static `requires-dist` for `{}` is invalid; falling back to distribution database", package.id);
2082                            return false;
2083                        },
2084                    }
2085
2086                    true
2087                });
2088
2089                // If the `requires-dist` metadata matches the requirements, we're done; otherwise,
2090                // fetch the "full" metadata, which may involve invoking the build system. In some
2091                // cases, build backends return metadata that does _not_ match the `pyproject.toml`
2092                // exactly. For example, `hatchling` will flatten any recursive (or self-referential)
2093                // extras, while `setuptools` will not.
2094                if !satisfied {
2095                    let HashedDist { dist, .. } = package.to_dist(
2096                        root,
2097                        TagPolicy::Preferred(tags),
2098                        &BuildOptions::default(),
2099                        markers,
2100                    )?;
2101
2102                    let metadata = {
2103                        let id = dist.version_id();
2104                        if let Some(archive) =
2105                            index
2106                                .distributions()
2107                                .get(&id)
2108                                .as_deref()
2109                                .and_then(|response| {
2110                                    if let MetadataResponse::Found(archive, ..) = response {
2111                                        Some(archive)
2112                                    } else {
2113                                        None
2114                                    }
2115                                })
2116                        {
2117                            // If the metadata is already in the index, return it.
2118                            archive.metadata.clone()
2119                        } else {
2120                            // Run the PEP 517 build process to extract metadata from the source distribution.
2121                            let archive = database
2122                                .get_or_build_wheel_metadata(&dist, hasher.get(&dist))
2123                                .await
2124                                .map_err(|err| LockErrorKind::Resolution {
2125                                    id: package.id.clone(),
2126                                    err,
2127                                })?;
2128
2129                            let metadata = archive.metadata.clone();
2130
2131                            // Insert the metadata into the index.
2132                            index
2133                                .distributions()
2134                                .done(id, Arc::new(MetadataResponse::Found(archive)));
2135
2136                            metadata
2137                        }
2138                    };
2139
2140                    // Validate that the package is still dynamic.
2141                    if !metadata.dynamic {
2142                        return Ok(SatisfiesResult::MismatchedDynamic(&package.id.name, true));
2143                    }
2144
2145                    // Validate that the extras are unchanged.
2146                    match self.satisfies_provides_extra(metadata.provides_extra, package) {
2147                        SatisfiesResult::Satisfied => {}
2148                        result => return Ok(result),
2149                    }
2150
2151                    // Validate that the requirements are unchanged.
2152                    match self.satisfies_requires_dist(
2153                        metadata.requires_dist,
2154                        metadata.dependency_groups,
2155                        package,
2156                        root,
2157                    )? {
2158                        SatisfiesResult::Satisfied => {}
2159                        result => return Ok(result),
2160                    }
2161                }
2162            } else {
2163                return Ok(SatisfiesResult::MissingVersion(&package.id.name));
2164            }
2165
2166            // Add any explicit indexes to the list of known locals or remotes. These indexes may
2167            // not be available as top-level configuration (i.e., if they're defined within a
2168            // workspace member), but we already validated that the dependencies are up-to-date, so
2169            // we can consider them "available".
2170            for requirement in package
2171                .metadata
2172                .requires_dist
2173                .iter()
2174                .chain(package.metadata.dependency_groups.values().flatten())
2175            {
2176                if let RequirementSource::Registry {
2177                    index: Some(index), ..
2178                } = &requirement.source
2179                {
2180                    match &index.url {
2181                        IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
2182                            if let Some(remotes) = remotes.as_mut() {
2183                                remotes.insert(UrlString::from(
2184                                    index.url().without_credentials().as_ref(),
2185                                ));
2186                            }
2187                        }
2188                        IndexUrl::Path(url) => {
2189                            if let Some(locals) = locals.as_mut() {
2190                                if let Some(path) = url.to_file_path().ok().and_then(|path| {
2191                                    relative_to(&path, root)
2192                                        .or_else(|_| std::path::absolute(path))
2193                                        .ok()
2194                                }) {
2195                                    locals.insert(path.into_boxed_path());
2196                                }
2197                            }
2198                        }
2199                    }
2200                }
2201            }
2202
2203            // Recurse.
2204            for dep in &package.dependencies {
2205                if seen.insert(&dep.package_id) {
2206                    let dep_dist = self.find_by_id(&dep.package_id);
2207                    queue.push_back(dep_dist);
2208                }
2209            }
2210
2211            for dependencies in package.optional_dependencies.values() {
2212                for dep in dependencies {
2213                    if seen.insert(&dep.package_id) {
2214                        let dep_dist = self.find_by_id(&dep.package_id);
2215                        queue.push_back(dep_dist);
2216                    }
2217                }
2218            }
2219
2220            for dependencies in package.dependency_groups.values() {
2221                for dep in dependencies {
2222                    if seen.insert(&dep.package_id) {
2223                        let dep_dist = self.find_by_id(&dep.package_id);
2224                        queue.push_back(dep_dist);
2225                    }
2226                }
2227            }
2228        }
2229
2230        Ok(SatisfiesResult::Satisfied)
2231    }
2232}
2233
2234#[derive(Debug, Copy, Clone)]
2235enum TagPolicy<'tags> {
2236    /// Exclusively consider wheels that match the specified platform tags.
2237    Required(&'tags Tags),
2238    /// Prefer wheels that match the specified platform tags, but fall back to incompatible wheels
2239    /// if necessary.
2240    Preferred(&'tags Tags),
2241}
2242
2243impl<'tags> TagPolicy<'tags> {
2244    /// Returns the platform tags to consider.
2245    fn tags(&self) -> &'tags Tags {
2246        match self {
2247            Self::Required(tags) | Self::Preferred(tags) => tags,
2248        }
2249    }
2250}
2251
2252/// The result of checking if a lockfile satisfies a set of requirements.
2253#[derive(Debug)]
2254pub enum SatisfiesResult<'lock> {
2255    /// The lockfile satisfies the requirements.
2256    Satisfied,
2257    /// The lockfile uses a different set of workspace members.
2258    MismatchedMembers(BTreeSet<PackageName>, &'lock BTreeSet<PackageName>),
2259    /// A workspace member switched from virtual to non-virtual or vice versa.
2260    MismatchedVirtual(PackageName, bool),
2261    /// A workspace member switched from editable to non-editable or vice versa.
2262    MismatchedEditable(PackageName, bool),
2263    /// A source tree switched from dynamic to non-dynamic or vice versa.
2264    MismatchedDynamic(&'lock PackageName, bool),
2265    /// The lockfile uses a different set of version for its workspace members.
2266    MismatchedVersion(&'lock PackageName, Version, Option<Version>),
2267    /// The lockfile uses a different set of requirements.
2268    MismatchedRequirements(BTreeSet<Requirement>, BTreeSet<Requirement>),
2269    /// The lockfile uses a different set of constraints.
2270    MismatchedConstraints(BTreeSet<Requirement>, BTreeSet<Requirement>),
2271    /// The lockfile uses a different set of overrides.
2272    MismatchedOverrides(BTreeSet<Requirement>, BTreeSet<Requirement>),
2273    /// The lockfile uses a different set of excludes.
2274    MismatchedExcludes(BTreeSet<PackageName>, BTreeSet<PackageName>),
2275    /// The lockfile uses a different set of build constraints.
2276    MismatchedBuildConstraints(BTreeSet<Requirement>, BTreeSet<Requirement>),
2277    /// The lockfile uses a different set of dependency groups.
2278    MismatchedDependencyGroups(
2279        BTreeMap<GroupName, BTreeSet<Requirement>>,
2280        BTreeMap<GroupName, BTreeSet<Requirement>>,
2281    ),
2282    /// The lockfile uses different static metadata.
2283    MismatchedStaticMetadata(BTreeSet<StaticMetadata>, &'lock BTreeSet<StaticMetadata>),
2284    /// The lockfile is missing a workspace member.
2285    MissingRoot(PackageName),
2286    /// The lockfile referenced a remote index that was not provided
2287    MissingRemoteIndex(&'lock PackageName, &'lock Version, &'lock UrlString),
2288    /// The lockfile referenced a local index that was not provided
2289    MissingLocalIndex(&'lock PackageName, &'lock Version, &'lock Path),
2290    /// A package in the lockfile contains different `requires-dist` metadata than expected.
2291    MismatchedPackageRequirements(
2292        &'lock PackageName,
2293        Option<&'lock Version>,
2294        BTreeSet<Requirement>,
2295        BTreeSet<Requirement>,
2296    ),
2297    /// A package in the lockfile contains different `provides-extra` metadata than expected.
2298    MismatchedPackageProvidesExtra(
2299        &'lock PackageName,
2300        Option<&'lock Version>,
2301        BTreeSet<ExtraName>,
2302        BTreeSet<&'lock ExtraName>,
2303    ),
2304    /// A package in the lockfile contains different `dependency-groups` metadata than expected.
2305    MismatchedPackageDependencyGroups(
2306        &'lock PackageName,
2307        Option<&'lock Version>,
2308        BTreeMap<GroupName, BTreeSet<Requirement>>,
2309        BTreeMap<GroupName, BTreeSet<Requirement>>,
2310    ),
2311    /// The lockfile is missing a version.
2312    MissingVersion(&'lock PackageName),
2313}
2314
2315/// We discard the lockfile if these options match.
2316#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2317#[serde(rename_all = "kebab-case")]
2318struct ResolverOptions {
2319    /// The [`ResolutionMode`] used to generate this lock.
2320    #[serde(default)]
2321    resolution_mode: ResolutionMode,
2322    /// The [`PrereleaseMode`] used to generate this lock.
2323    #[serde(default)]
2324    prerelease_mode: PrereleaseMode,
2325    /// The [`ForkStrategy`] used to generate this lock.
2326    #[serde(default)]
2327    fork_strategy: ForkStrategy,
2328    /// The [`ExcludeNewer`] setting used to generate this lock.
2329    #[serde(flatten)]
2330    exclude_newer: ExcludeNewerWire,
2331}
2332
2333#[allow(clippy::struct_field_names)]
2334#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2335#[serde(rename_all = "kebab-case")]
2336struct ExcludeNewerWire {
2337    exclude_newer: Option<Timestamp>,
2338    exclude_newer_span: Option<ExcludeNewerSpan>,
2339    #[serde(default, skip_serializing_if = "ExcludeNewerPackage::is_empty")]
2340    exclude_newer_package: ExcludeNewerPackage,
2341}
2342
2343impl From<ExcludeNewerWire> for ExcludeNewer {
2344    fn from(wire: ExcludeNewerWire) -> Self {
2345        Self {
2346            global: wire
2347                .exclude_newer
2348                .map(|timestamp| ExcludeNewerValue::new(timestamp, wire.exclude_newer_span)),
2349            package: wire.exclude_newer_package,
2350        }
2351    }
2352}
2353
2354impl From<ExcludeNewer> for ExcludeNewerWire {
2355    fn from(exclude_newer: ExcludeNewer) -> Self {
2356        let (timestamp, span) = exclude_newer
2357            .global
2358            .map(ExcludeNewerValue::into_parts)
2359            .map_or((None, None), |(t, s)| (Some(t), s));
2360        Self {
2361            exclude_newer: timestamp,
2362            exclude_newer_span: span,
2363            exclude_newer_package: exclude_newer.package,
2364        }
2365    }
2366}
2367
2368#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2369#[serde(rename_all = "kebab-case")]
2370pub struct ResolverManifest {
2371    /// The workspace members included in the lockfile.
2372    #[serde(default)]
2373    members: BTreeSet<PackageName>,
2374    /// The requirements provided to the resolver, exclusive of the workspace members.
2375    ///
2376    /// These are requirements that are attached to the project, but not to any of its
2377    /// workspace members. For example, the requirements in a PEP 723 script would be included here.
2378    #[serde(default)]
2379    requirements: BTreeSet<Requirement>,
2380    /// The dependency groups provided to the resolver, exclusive of the workspace members.
2381    ///
2382    /// These are dependency groups that are attached to the project, but not to any of its
2383    /// workspace members. For example, the dependency groups in a `pyproject.toml` without a
2384    /// `[project]` table would be included here.
2385    #[serde(default)]
2386    dependency_groups: BTreeMap<GroupName, BTreeSet<Requirement>>,
2387    /// The constraints provided to the resolver.
2388    #[serde(default)]
2389    constraints: BTreeSet<Requirement>,
2390    /// The overrides provided to the resolver.
2391    #[serde(default)]
2392    overrides: BTreeSet<Requirement>,
2393    /// The excludes provided to the resolver.
2394    #[serde(default)]
2395    excludes: BTreeSet<PackageName>,
2396    /// The build constraints provided to the resolver.
2397    #[serde(default)]
2398    build_constraints: BTreeSet<Requirement>,
2399    /// The static metadata provided to the resolver.
2400    #[serde(default)]
2401    dependency_metadata: BTreeSet<StaticMetadata>,
2402}
2403
2404impl ResolverManifest {
2405    /// Initialize a [`ResolverManifest`] with the given members, requirements, constraints, and
2406    /// overrides.
2407    pub fn new(
2408        members: impl IntoIterator<Item = PackageName>,
2409        requirements: impl IntoIterator<Item = Requirement>,
2410        constraints: impl IntoIterator<Item = Requirement>,
2411        overrides: impl IntoIterator<Item = Requirement>,
2412        excludes: impl IntoIterator<Item = PackageName>,
2413        build_constraints: impl IntoIterator<Item = Requirement>,
2414        dependency_groups: impl IntoIterator<Item = (GroupName, Vec<Requirement>)>,
2415        dependency_metadata: impl IntoIterator<Item = StaticMetadata>,
2416    ) -> Self {
2417        Self {
2418            members: members.into_iter().collect(),
2419            requirements: requirements.into_iter().collect(),
2420            constraints: constraints.into_iter().collect(),
2421            overrides: overrides.into_iter().collect(),
2422            excludes: excludes.into_iter().collect(),
2423            build_constraints: build_constraints.into_iter().collect(),
2424            dependency_groups: dependency_groups
2425                .into_iter()
2426                .map(|(group, requirements)| (group, requirements.into_iter().collect()))
2427                .collect(),
2428            dependency_metadata: dependency_metadata.into_iter().collect(),
2429        }
2430    }
2431
2432    /// Convert the manifest to a relative form using the given workspace.
2433    pub fn relative_to(self, root: &Path) -> Result<Self, io::Error> {
2434        Ok(Self {
2435            members: self.members,
2436            requirements: self
2437                .requirements
2438                .into_iter()
2439                .map(|requirement| requirement.relative_to(root))
2440                .collect::<Result<BTreeSet<_>, _>>()?,
2441            constraints: self
2442                .constraints
2443                .into_iter()
2444                .map(|requirement| requirement.relative_to(root))
2445                .collect::<Result<BTreeSet<_>, _>>()?,
2446            overrides: self
2447                .overrides
2448                .into_iter()
2449                .map(|requirement| requirement.relative_to(root))
2450                .collect::<Result<BTreeSet<_>, _>>()?,
2451            excludes: self.excludes,
2452            build_constraints: self
2453                .build_constraints
2454                .into_iter()
2455                .map(|requirement| requirement.relative_to(root))
2456                .collect::<Result<BTreeSet<_>, _>>()?,
2457            dependency_groups: self
2458                .dependency_groups
2459                .into_iter()
2460                .map(|(group, requirements)| {
2461                    Ok::<_, io::Error>((
2462                        group,
2463                        requirements
2464                            .into_iter()
2465                            .map(|requirement| requirement.relative_to(root))
2466                            .collect::<Result<BTreeSet<_>, _>>()?,
2467                    ))
2468                })
2469                .collect::<Result<BTreeMap<_, _>, _>>()?,
2470            dependency_metadata: self.dependency_metadata,
2471        })
2472    }
2473}
2474
2475#[derive(Clone, Debug, serde::Deserialize)]
2476#[serde(rename_all = "kebab-case")]
2477struct LockWire {
2478    version: u32,
2479    revision: Option<u32>,
2480    requires_python: RequiresPython,
2481    /// If this lockfile was built from a forking resolution with non-identical forks, store the
2482    /// forks in the lockfile so we can recreate them in subsequent resolutions.
2483    #[serde(rename = "resolution-markers", default)]
2484    fork_markers: Vec<SimplifiedMarkerTree>,
2485    #[serde(rename = "supported-markers", default)]
2486    supported_environments: Vec<SimplifiedMarkerTree>,
2487    #[serde(rename = "required-markers", default)]
2488    required_environments: Vec<SimplifiedMarkerTree>,
2489    #[serde(rename = "conflicts", default)]
2490    conflicts: Option<Conflicts>,
2491    /// We discard the lockfile if these options match.
2492    #[serde(default)]
2493    options: ResolverOptions,
2494    #[serde(default)]
2495    manifest: ResolverManifest,
2496    #[serde(rename = "package", alias = "distribution", default)]
2497    packages: Vec<PackageWire>,
2498}
2499
2500impl TryFrom<LockWire> for Lock {
2501    type Error = LockError;
2502
2503    fn try_from(wire: LockWire) -> Result<Self, LockError> {
2504        // Count the number of sources for each package name. When
2505        // there's only one source for a particular package name (the
2506        // overwhelmingly common case), we can omit some data (like source and
2507        // version) on dependency edges since it is strictly redundant.
2508        let mut unambiguous_package_ids: FxHashMap<PackageName, PackageId> = FxHashMap::default();
2509        let mut ambiguous = FxHashSet::default();
2510        for dist in &wire.packages {
2511            if ambiguous.contains(&dist.id.name) {
2512                continue;
2513            }
2514            if let Some(id) = unambiguous_package_ids.remove(&dist.id.name) {
2515                ambiguous.insert(id.name);
2516                continue;
2517            }
2518            unambiguous_package_ids.insert(dist.id.name.clone(), dist.id.clone());
2519        }
2520
2521        let packages = wire
2522            .packages
2523            .into_iter()
2524            .map(|dist| dist.unwire(&wire.requires_python, &unambiguous_package_ids))
2525            .collect::<Result<Vec<_>, _>>()?;
2526        let supported_environments = wire
2527            .supported_environments
2528            .into_iter()
2529            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2530            .collect();
2531        let required_environments = wire
2532            .required_environments
2533            .into_iter()
2534            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2535            .collect();
2536        let fork_markers = wire
2537            .fork_markers
2538            .into_iter()
2539            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2540            .map(UniversalMarker::from_combined)
2541            .collect();
2542        let lock = Self::new(
2543            wire.version,
2544            wire.revision.unwrap_or(0),
2545            packages,
2546            wire.requires_python,
2547            wire.options,
2548            wire.manifest,
2549            wire.conflicts.unwrap_or_else(Conflicts::empty),
2550            supported_environments,
2551            required_environments,
2552            fork_markers,
2553        )?;
2554
2555        Ok(lock)
2556    }
2557}
2558
2559/// Like [`Lock`], but limited to the version field. Used for error reporting: by limiting parsing
2560/// to the version field, we can verify compatibility for lockfiles that may otherwise be
2561/// unparsable.
2562#[derive(Clone, Debug, serde::Deserialize)]
2563#[serde(rename_all = "kebab-case")]
2564pub struct LockVersion {
2565    version: u32,
2566}
2567
2568impl LockVersion {
2569    /// Returns the lockfile version.
2570    pub fn version(&self) -> u32 {
2571        self.version
2572    }
2573}
2574
2575#[derive(Clone, Debug, PartialEq, Eq)]
2576pub struct Package {
2577    pub(crate) id: PackageId,
2578    sdist: Option<SourceDist>,
2579    wheels: Vec<Wheel>,
2580    /// If there are multiple versions or sources for the same package name, we add the markers of
2581    /// the fork(s) that contained this version or source, so we can set the correct preferences in
2582    /// the next resolution.
2583    ///
2584    /// Named `resolution-markers` in `uv.lock`.
2585    fork_markers: Vec<UniversalMarker>,
2586    /// The resolved dependencies of the package.
2587    dependencies: Vec<Dependency>,
2588    /// The resolved optional dependencies of the package.
2589    optional_dependencies: BTreeMap<ExtraName, Vec<Dependency>>,
2590    /// The resolved PEP 735 dependency groups of the package.
2591    dependency_groups: BTreeMap<GroupName, Vec<Dependency>>,
2592    /// The exact requirements from the package metadata.
2593    metadata: PackageMetadata,
2594}
2595
2596impl Package {
2597    fn from_annotated_dist(
2598        annotated_dist: &AnnotatedDist,
2599        fork_markers: Vec<UniversalMarker>,
2600        root: &Path,
2601    ) -> Result<Self, LockError> {
2602        let id = PackageId::from_annotated_dist(annotated_dist, root)?;
2603        let sdist = SourceDist::from_annotated_dist(&id, annotated_dist)?;
2604        let wheels = Wheel::from_annotated_dist(annotated_dist)?;
2605        let requires_dist = if id.source.is_immutable() {
2606            BTreeSet::default()
2607        } else {
2608            annotated_dist
2609                .metadata
2610                .as_ref()
2611                .expect("metadata is present")
2612                .requires_dist
2613                .iter()
2614                .cloned()
2615                .map(|requirement| requirement.relative_to(root))
2616                .collect::<Result<_, _>>()
2617                .map_err(LockErrorKind::RequirementRelativePath)?
2618        };
2619        let provides_extra = if id.source.is_immutable() {
2620            Box::default()
2621        } else {
2622            annotated_dist
2623                .metadata
2624                .as_ref()
2625                .expect("metadata is present")
2626                .provides_extra
2627                .clone()
2628        };
2629        let dependency_groups = if id.source.is_immutable() {
2630            BTreeMap::default()
2631        } else {
2632            annotated_dist
2633                .metadata
2634                .as_ref()
2635                .expect("metadata is present")
2636                .dependency_groups
2637                .iter()
2638                .map(|(group, requirements)| {
2639                    let requirements = requirements
2640                        .iter()
2641                        .cloned()
2642                        .map(|requirement| requirement.relative_to(root))
2643                        .collect::<Result<_, _>>()
2644                        .map_err(LockErrorKind::RequirementRelativePath)?;
2645                    Ok::<_, LockError>((group.clone(), requirements))
2646                })
2647                .collect::<Result<_, _>>()?
2648        };
2649        Ok(Self {
2650            id,
2651            sdist,
2652            wheels,
2653            fork_markers,
2654            dependencies: vec![],
2655            optional_dependencies: BTreeMap::default(),
2656            dependency_groups: BTreeMap::default(),
2657            metadata: PackageMetadata {
2658                requires_dist,
2659                provides_extra,
2660                dependency_groups,
2661            },
2662        })
2663    }
2664
2665    /// Add the [`AnnotatedDist`] as a dependency of the [`Package`].
2666    fn add_dependency(
2667        &mut self,
2668        requires_python: &RequiresPython,
2669        annotated_dist: &AnnotatedDist,
2670        marker: UniversalMarker,
2671        root: &Path,
2672    ) -> Result<(), LockError> {
2673        let new_dep =
2674            Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2675        for existing_dep in &mut self.dependencies {
2676            if existing_dep.package_id == new_dep.package_id
2677                // It's important that we do a comparison on
2678                // *simplified* markers here. In particular, when
2679                // we write markers out to the lock file, we use
2680                // "simplified" markers, or markers that are simplified
2681                // *given* that `requires-python` is satisfied. So if
2682                // we don't do equality based on what the simplified
2683                // marker is, we might wind up not merging dependencies
2684                // that ought to be merged and thus writing out extra
2685                // entries.
2686                //
2687                // For example, if `requires-python = '>=3.8'` and we
2688                // have `foo==1` and
2689                // `foo==1 ; python_version >= '3.8'` dependencies,
2690                // then they don't have equivalent complexified
2691                // markers, but their simplified markers are identical.
2692                //
2693                // NOTE: It does seem like perhaps this should
2694                // be implemented semantically/algebraically on
2695                // `MarkerTree` itself, but it wasn't totally clear
2696                // how to do that. I think `pep508` would need to
2697                // grow a concept of "requires python" and provide an
2698                // operation specifically for that.
2699                && existing_dep.simplified_marker == new_dep.simplified_marker
2700            {
2701                existing_dep.extra.extend(new_dep.extra);
2702                return Ok(());
2703            }
2704        }
2705
2706        self.dependencies.push(new_dep);
2707        Ok(())
2708    }
2709
2710    /// Add the [`AnnotatedDist`] as an optional dependency of the [`Package`].
2711    fn add_optional_dependency(
2712        &mut self,
2713        requires_python: &RequiresPython,
2714        extra: ExtraName,
2715        annotated_dist: &AnnotatedDist,
2716        marker: UniversalMarker,
2717        root: &Path,
2718    ) -> Result<(), LockError> {
2719        let dep = Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2720        let optional_deps = self.optional_dependencies.entry(extra).or_default();
2721        for existing_dep in &mut *optional_deps {
2722            if existing_dep.package_id == dep.package_id
2723                // See note in add_dependency for why we use
2724                // simplified markers here.
2725                && existing_dep.simplified_marker == dep.simplified_marker
2726            {
2727                existing_dep.extra.extend(dep.extra);
2728                return Ok(());
2729            }
2730        }
2731
2732        optional_deps.push(dep);
2733        Ok(())
2734    }
2735
2736    /// Add the [`AnnotatedDist`] to a dependency group of the [`Package`].
2737    fn add_group_dependency(
2738        &mut self,
2739        requires_python: &RequiresPython,
2740        group: GroupName,
2741        annotated_dist: &AnnotatedDist,
2742        marker: UniversalMarker,
2743        root: &Path,
2744    ) -> Result<(), LockError> {
2745        let dep = Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2746        let deps = self.dependency_groups.entry(group).or_default();
2747        for existing_dep in &mut *deps {
2748            if existing_dep.package_id == dep.package_id
2749                // See note in add_dependency for why we use
2750                // simplified markers here.
2751                && existing_dep.simplified_marker == dep.simplified_marker
2752            {
2753                existing_dep.extra.extend(dep.extra);
2754                return Ok(());
2755            }
2756        }
2757
2758        deps.push(dep);
2759        Ok(())
2760    }
2761
2762    /// Convert the [`Package`] to a [`Dist`] that can be used in installation, along with its hash.
2763    fn to_dist(
2764        &self,
2765        workspace_root: &Path,
2766        tag_policy: TagPolicy<'_>,
2767        build_options: &BuildOptions,
2768        markers: &MarkerEnvironment,
2769    ) -> Result<HashedDist, LockError> {
2770        let no_binary = build_options.no_binary_package(&self.id.name);
2771        let no_build = build_options.no_build_package(&self.id.name);
2772
2773        if !no_binary {
2774            if let Some(best_wheel_index) = self.find_best_wheel(tag_policy) {
2775                let hashes = {
2776                    let wheel = &self.wheels[best_wheel_index];
2777                    HashDigests::from(
2778                        wheel
2779                            .hash
2780                            .iter()
2781                            .chain(wheel.zstd.iter().flat_map(|z| z.hash.iter()))
2782                            .map(|h| h.0.clone())
2783                            .collect::<Vec<_>>(),
2784                    )
2785                };
2786
2787                let dist = match &self.id.source {
2788                    Source::Registry(source) => {
2789                        let wheels = self
2790                            .wheels
2791                            .iter()
2792                            .map(|wheel| wheel.to_registry_wheel(source, workspace_root))
2793                            .collect::<Result<_, LockError>>()?;
2794                        let reg_built_dist = RegistryBuiltDist {
2795                            wheels,
2796                            best_wheel_index,
2797                            sdist: None,
2798                        };
2799                        Dist::Built(BuiltDist::Registry(reg_built_dist))
2800                    }
2801                    Source::Path(path) => {
2802                        let filename: WheelFilename =
2803                            self.wheels[best_wheel_index].filename.clone();
2804                        let install_path = absolute_path(workspace_root, path)?;
2805                        let path_dist = PathBuiltDist {
2806                            filename,
2807                            url: verbatim_url(&install_path, &self.id)?,
2808                            install_path: absolute_path(workspace_root, path)?.into_boxed_path(),
2809                        };
2810                        let built_dist = BuiltDist::Path(path_dist);
2811                        Dist::Built(built_dist)
2812                    }
2813                    Source::Direct(url, direct) => {
2814                        let filename: WheelFilename =
2815                            self.wheels[best_wheel_index].filename.clone();
2816                        let url = DisplaySafeUrl::from(ParsedArchiveUrl {
2817                            url: url.to_url().map_err(LockErrorKind::InvalidUrl)?,
2818                            subdirectory: direct.subdirectory.clone(),
2819                            ext: DistExtension::Wheel,
2820                        });
2821                        let direct_dist = DirectUrlBuiltDist {
2822                            filename,
2823                            location: Box::new(url.clone()),
2824                            url: VerbatimUrl::from_url(url),
2825                        };
2826                        let built_dist = BuiltDist::DirectUrl(direct_dist);
2827                        Dist::Built(built_dist)
2828                    }
2829                    Source::Git(_, _) => {
2830                        return Err(LockErrorKind::InvalidWheelSource {
2831                            id: self.id.clone(),
2832                            source_type: "Git",
2833                        }
2834                        .into());
2835                    }
2836                    Source::Directory(_) => {
2837                        return Err(LockErrorKind::InvalidWheelSource {
2838                            id: self.id.clone(),
2839                            source_type: "directory",
2840                        }
2841                        .into());
2842                    }
2843                    Source::Editable(_) => {
2844                        return Err(LockErrorKind::InvalidWheelSource {
2845                            id: self.id.clone(),
2846                            source_type: "editable",
2847                        }
2848                        .into());
2849                    }
2850                    Source::Virtual(_) => {
2851                        return Err(LockErrorKind::InvalidWheelSource {
2852                            id: self.id.clone(),
2853                            source_type: "virtual",
2854                        }
2855                        .into());
2856                    }
2857                };
2858
2859                return Ok(HashedDist { dist, hashes });
2860            }
2861        }
2862
2863        if let Some(sdist) = self.to_source_dist(workspace_root)? {
2864            // Even with `--no-build`, allow virtual packages. (In the future, we may want to allow
2865            // any local source tree, or at least editable source trees, which we allow in
2866            // `uv pip`.)
2867            if !no_build || sdist.is_virtual() {
2868                let hashes = self
2869                    .sdist
2870                    .as_ref()
2871                    .and_then(|s| s.hash())
2872                    .map(|hash| HashDigests::from(vec![hash.0.clone()]))
2873                    .unwrap_or_else(|| HashDigests::from(vec![]));
2874                return Ok(HashedDist {
2875                    dist: Dist::Source(sdist),
2876                    hashes,
2877                });
2878            }
2879        }
2880
2881        match (no_binary, no_build) {
2882            (true, true) => Err(LockErrorKind::NoBinaryNoBuild {
2883                id: self.id.clone(),
2884            }
2885            .into()),
2886            (true, false) if self.id.source.is_wheel() => Err(LockErrorKind::NoBinaryWheelOnly {
2887                id: self.id.clone(),
2888            }
2889            .into()),
2890            (true, false) => Err(LockErrorKind::NoBinary {
2891                id: self.id.clone(),
2892            }
2893            .into()),
2894            (false, true) => Err(LockErrorKind::NoBuild {
2895                id: self.id.clone(),
2896            }
2897            .into()),
2898            (false, false) if self.id.source.is_wheel() => Err(LockError {
2899                kind: Box::new(LockErrorKind::IncompatibleWheelOnly {
2900                    id: self.id.clone(),
2901                }),
2902                hint: self.tag_hint(tag_policy, markers),
2903            }),
2904            (false, false) => Err(LockError {
2905                kind: Box::new(LockErrorKind::NeitherSourceDistNorWheel {
2906                    id: self.id.clone(),
2907                }),
2908                hint: self.tag_hint(tag_policy, markers),
2909            }),
2910        }
2911    }
2912
2913    /// Generate a [`WheelTagHint`] based on wheel-tag incompatibilities.
2914    fn tag_hint(
2915        &self,
2916        tag_policy: TagPolicy<'_>,
2917        markers: &MarkerEnvironment,
2918    ) -> Option<WheelTagHint> {
2919        let filenames = self
2920            .wheels
2921            .iter()
2922            .map(|wheel| &wheel.filename)
2923            .collect::<Vec<_>>();
2924        WheelTagHint::from_wheels(
2925            &self.id.name,
2926            self.id.version.as_ref(),
2927            &filenames,
2928            tag_policy.tags(),
2929            markers,
2930        )
2931    }
2932
2933    /// Convert the source of this [`Package`] to a [`SourceDist`] that can be used in installation.
2934    ///
2935    /// Returns `Ok(None)` if the source cannot be converted because `self.sdist` is `None`. This is required
2936    /// for registry sources.
2937    fn to_source_dist(
2938        &self,
2939        workspace_root: &Path,
2940    ) -> Result<Option<uv_distribution_types::SourceDist>, LockError> {
2941        let sdist = match &self.id.source {
2942            Source::Path(path) => {
2943                // A direct path source can also be a wheel, so validate the extension.
2944                let DistExtension::Source(ext) = DistExtension::from_path(path).map_err(|err| {
2945                    LockErrorKind::MissingExtension {
2946                        id: self.id.clone(),
2947                        err,
2948                    }
2949                })?
2950                else {
2951                    return Ok(None);
2952                };
2953                let install_path = absolute_path(workspace_root, path)?;
2954                let path_dist = PathSourceDist {
2955                    name: self.id.name.clone(),
2956                    version: self.id.version.clone(),
2957                    url: verbatim_url(&install_path, &self.id)?,
2958                    install_path: install_path.into_boxed_path(),
2959                    ext,
2960                };
2961                uv_distribution_types::SourceDist::Path(path_dist)
2962            }
2963            Source::Directory(path) => {
2964                let install_path = absolute_path(workspace_root, path)?;
2965                let dir_dist = DirectorySourceDist {
2966                    name: self.id.name.clone(),
2967                    url: verbatim_url(&install_path, &self.id)?,
2968                    install_path: install_path.into_boxed_path(),
2969                    editable: Some(false),
2970                    r#virtual: Some(false),
2971                };
2972                uv_distribution_types::SourceDist::Directory(dir_dist)
2973            }
2974            Source::Editable(path) => {
2975                let install_path = absolute_path(workspace_root, path)?;
2976                let dir_dist = DirectorySourceDist {
2977                    name: self.id.name.clone(),
2978                    url: verbatim_url(&install_path, &self.id)?,
2979                    install_path: install_path.into_boxed_path(),
2980                    editable: Some(true),
2981                    r#virtual: Some(false),
2982                };
2983                uv_distribution_types::SourceDist::Directory(dir_dist)
2984            }
2985            Source::Virtual(path) => {
2986                let install_path = absolute_path(workspace_root, path)?;
2987                let dir_dist = DirectorySourceDist {
2988                    name: self.id.name.clone(),
2989                    url: verbatim_url(&install_path, &self.id)?,
2990                    install_path: install_path.into_boxed_path(),
2991                    editable: Some(false),
2992                    r#virtual: Some(true),
2993                };
2994                uv_distribution_types::SourceDist::Directory(dir_dist)
2995            }
2996            Source::Git(url, git) => {
2997                // Remove the fragment and query from the URL; they're already present in the
2998                // `GitSource`.
2999                let mut url = url.to_url().map_err(LockErrorKind::InvalidUrl)?;
3000                url.set_fragment(None);
3001                url.set_query(None);
3002
3003                // Reconstruct the `GitUrl` from the `GitSource`.
3004                let git_url = GitUrl::from_commit(
3005                    url,
3006                    GitReference::from(git.kind.clone()),
3007                    git.precise,
3008                    git.lfs,
3009                )?;
3010
3011                // Reconstruct the PEP 508-compatible URL from the `GitSource`.
3012                let url = DisplaySafeUrl::from(ParsedGitUrl {
3013                    url: git_url.clone(),
3014                    subdirectory: git.subdirectory.clone(),
3015                });
3016
3017                let git_dist = GitSourceDist {
3018                    name: self.id.name.clone(),
3019                    url: VerbatimUrl::from_url(url),
3020                    git: Box::new(git_url),
3021                    subdirectory: git.subdirectory.clone(),
3022                };
3023                uv_distribution_types::SourceDist::Git(git_dist)
3024            }
3025            Source::Direct(url, direct) => {
3026                // A direct URL source can also be a wheel, so validate the extension.
3027                let DistExtension::Source(ext) =
3028                    DistExtension::from_path(url.base_str()).map_err(|err| {
3029                        LockErrorKind::MissingExtension {
3030                            id: self.id.clone(),
3031                            err,
3032                        }
3033                    })?
3034                else {
3035                    return Ok(None);
3036                };
3037                let location = url.to_url().map_err(LockErrorKind::InvalidUrl)?;
3038                let url = DisplaySafeUrl::from(ParsedArchiveUrl {
3039                    url: location.clone(),
3040                    subdirectory: direct.subdirectory.clone(),
3041                    ext: DistExtension::Source(ext),
3042                });
3043                let direct_dist = DirectUrlSourceDist {
3044                    name: self.id.name.clone(),
3045                    location: Box::new(location),
3046                    subdirectory: direct.subdirectory.clone(),
3047                    ext,
3048                    url: VerbatimUrl::from_url(url),
3049                };
3050                uv_distribution_types::SourceDist::DirectUrl(direct_dist)
3051            }
3052            Source::Registry(RegistrySource::Url(url)) => {
3053                let Some(ref sdist) = self.sdist else {
3054                    return Ok(None);
3055                };
3056
3057                let name = &self.id.name;
3058                let version = self
3059                    .id
3060                    .version
3061                    .as_ref()
3062                    .expect("version for registry source");
3063
3064                let file_url = sdist.url().ok_or_else(|| LockErrorKind::MissingUrl {
3065                    name: name.clone(),
3066                    version: version.clone(),
3067                })?;
3068                let filename = sdist
3069                    .filename()
3070                    .ok_or_else(|| LockErrorKind::MissingFilename {
3071                        id: self.id.clone(),
3072                    })?;
3073                let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| {
3074                    LockErrorKind::MissingExtension {
3075                        id: self.id.clone(),
3076                        err,
3077                    }
3078                })?;
3079                let file = Box::new(uv_distribution_types::File {
3080                    dist_info_metadata: false,
3081                    filename: SmallString::from(filename),
3082                    hashes: sdist.hash().map_or(HashDigests::empty(), |hash| {
3083                        HashDigests::from(hash.0.clone())
3084                    }),
3085                    requires_python: None,
3086                    size: sdist.size(),
3087                    upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond),
3088                    url: FileLocation::AbsoluteUrl(file_url.clone()),
3089                    yanked: None,
3090                    zstd: None,
3091                });
3092
3093                let index = IndexUrl::from(VerbatimUrl::from_url(
3094                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
3095                ));
3096
3097                let reg_dist = RegistrySourceDist {
3098                    name: name.clone(),
3099                    version: version.clone(),
3100                    file,
3101                    ext,
3102                    index,
3103                    wheels: vec![],
3104                };
3105                uv_distribution_types::SourceDist::Registry(reg_dist)
3106            }
3107            Source::Registry(RegistrySource::Path(path)) => {
3108                let Some(ref sdist) = self.sdist else {
3109                    return Ok(None);
3110                };
3111
3112                let name = &self.id.name;
3113                let version = self
3114                    .id
3115                    .version
3116                    .as_ref()
3117                    .expect("version for registry source");
3118
3119                let file_url = match sdist {
3120                    SourceDist::Url { url: file_url, .. } => {
3121                        FileLocation::AbsoluteUrl(file_url.clone())
3122                    }
3123                    SourceDist::Path {
3124                        path: file_path, ..
3125                    } => {
3126                        let file_path = workspace_root.join(path).join(file_path);
3127                        let file_url =
3128                            DisplaySafeUrl::from_file_path(&file_path).map_err(|()| {
3129                                LockErrorKind::PathToUrl {
3130                                    path: file_path.into_boxed_path(),
3131                                }
3132                            })?;
3133                        FileLocation::AbsoluteUrl(UrlString::from(file_url))
3134                    }
3135                    SourceDist::Metadata { .. } => {
3136                        return Err(LockErrorKind::MissingPath {
3137                            name: name.clone(),
3138                            version: version.clone(),
3139                        }
3140                        .into());
3141                    }
3142                };
3143                let filename = sdist
3144                    .filename()
3145                    .ok_or_else(|| LockErrorKind::MissingFilename {
3146                        id: self.id.clone(),
3147                    })?;
3148                let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| {
3149                    LockErrorKind::MissingExtension {
3150                        id: self.id.clone(),
3151                        err,
3152                    }
3153                })?;
3154                let file = Box::new(uv_distribution_types::File {
3155                    dist_info_metadata: false,
3156                    filename: SmallString::from(filename),
3157                    hashes: sdist.hash().map_or(HashDigests::empty(), |hash| {
3158                        HashDigests::from(hash.0.clone())
3159                    }),
3160                    requires_python: None,
3161                    size: sdist.size(),
3162                    upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond),
3163                    url: file_url,
3164                    yanked: None,
3165                    zstd: None,
3166                });
3167
3168                let index = IndexUrl::from(
3169                    VerbatimUrl::from_absolute_path(workspace_root.join(path))
3170                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
3171                );
3172
3173                let reg_dist = RegistrySourceDist {
3174                    name: name.clone(),
3175                    version: version.clone(),
3176                    file,
3177                    ext,
3178                    index,
3179                    wheels: vec![],
3180                };
3181                uv_distribution_types::SourceDist::Registry(reg_dist)
3182            }
3183        };
3184
3185        Ok(Some(sdist))
3186    }
3187
3188    fn to_toml(
3189        &self,
3190        requires_python: &RequiresPython,
3191        dist_count_by_name: &FxHashMap<PackageName, u64>,
3192    ) -> Result<Table, toml_edit::ser::Error> {
3193        let mut table = Table::new();
3194
3195        self.id.to_toml(None, &mut table);
3196
3197        if !self.fork_markers.is_empty() {
3198            let fork_markers = each_element_on_its_line_array(
3199                simplified_universal_markers(&self.fork_markers, requires_python).into_iter(),
3200            );
3201            if !fork_markers.is_empty() {
3202                table.insert("resolution-markers", value(fork_markers));
3203            }
3204        }
3205
3206        if !self.dependencies.is_empty() {
3207            let deps = each_element_on_its_line_array(self.dependencies.iter().map(|dep| {
3208                dep.to_toml(requires_python, dist_count_by_name)
3209                    .into_inline_table()
3210            }));
3211            table.insert("dependencies", value(deps));
3212        }
3213
3214        if !self.optional_dependencies.is_empty() {
3215            let mut optional_deps = Table::new();
3216            for (extra, deps) in &self.optional_dependencies {
3217                let deps = each_element_on_its_line_array(deps.iter().map(|dep| {
3218                    dep.to_toml(requires_python, dist_count_by_name)
3219                        .into_inline_table()
3220                }));
3221                if !deps.is_empty() {
3222                    optional_deps.insert(extra.as_ref(), value(deps));
3223                }
3224            }
3225            if !optional_deps.is_empty() {
3226                table.insert("optional-dependencies", Item::Table(optional_deps));
3227            }
3228        }
3229
3230        if !self.dependency_groups.is_empty() {
3231            let mut dependency_groups = Table::new();
3232            for (extra, deps) in &self.dependency_groups {
3233                let deps = each_element_on_its_line_array(deps.iter().map(|dep| {
3234                    dep.to_toml(requires_python, dist_count_by_name)
3235                        .into_inline_table()
3236                }));
3237                if !deps.is_empty() {
3238                    dependency_groups.insert(extra.as_ref(), value(deps));
3239                }
3240            }
3241            if !dependency_groups.is_empty() {
3242                table.insert("dev-dependencies", Item::Table(dependency_groups));
3243            }
3244        }
3245
3246        if let Some(ref sdist) = self.sdist {
3247            table.insert("sdist", value(sdist.to_toml()?));
3248        }
3249
3250        if !self.wheels.is_empty() {
3251            let wheels = each_element_on_its_line_array(
3252                self.wheels
3253                    .iter()
3254                    .map(Wheel::to_toml)
3255                    .collect::<Result<Vec<_>, _>>()?
3256                    .into_iter(),
3257            );
3258            table.insert("wheels", value(wheels));
3259        }
3260
3261        // Write the package metadata, if non-empty.
3262        {
3263            let mut metadata_table = Table::new();
3264
3265            if !self.metadata.requires_dist.is_empty() {
3266                let requires_dist = self
3267                    .metadata
3268                    .requires_dist
3269                    .iter()
3270                    .map(|requirement| {
3271                        serde::Serialize::serialize(
3272                            &requirement,
3273                            toml_edit::ser::ValueSerializer::new(),
3274                        )
3275                    })
3276                    .collect::<Result<Vec<_>, _>>()?;
3277                let requires_dist = match requires_dist.as_slice() {
3278                    [] => Array::new(),
3279                    [requirement] => Array::from_iter([requirement]),
3280                    requires_dist => each_element_on_its_line_array(requires_dist.iter()),
3281                };
3282                metadata_table.insert("requires-dist", value(requires_dist));
3283            }
3284
3285            if !self.metadata.dependency_groups.is_empty() {
3286                let mut dependency_groups = Table::new();
3287                for (extra, deps) in &self.metadata.dependency_groups {
3288                    let deps = deps
3289                        .iter()
3290                        .map(|requirement| {
3291                            serde::Serialize::serialize(
3292                                &requirement,
3293                                toml_edit::ser::ValueSerializer::new(),
3294                            )
3295                        })
3296                        .collect::<Result<Vec<_>, _>>()?;
3297                    let deps = match deps.as_slice() {
3298                        [] => Array::new(),
3299                        [requirement] => Array::from_iter([requirement]),
3300                        deps => each_element_on_its_line_array(deps.iter()),
3301                    };
3302                    dependency_groups.insert(extra.as_ref(), value(deps));
3303                }
3304                if !dependency_groups.is_empty() {
3305                    metadata_table.insert("requires-dev", Item::Table(dependency_groups));
3306                }
3307            }
3308
3309            if !self.metadata.provides_extra.is_empty() {
3310                let provides_extras = self
3311                    .metadata
3312                    .provides_extra
3313                    .iter()
3314                    .map(|extra| {
3315                        serde::Serialize::serialize(&extra, toml_edit::ser::ValueSerializer::new())
3316                    })
3317                    .collect::<Result<Vec<_>, _>>()?;
3318                // This is just a list of names, so linebreaking it is excessive.
3319                let provides_extras = Array::from_iter(provides_extras);
3320                metadata_table.insert("provides-extras", value(provides_extras));
3321            }
3322
3323            if !metadata_table.is_empty() {
3324                table.insert("metadata", Item::Table(metadata_table));
3325            }
3326        }
3327
3328        Ok(table)
3329    }
3330
3331    fn find_best_wheel(&self, tag_policy: TagPolicy<'_>) -> Option<usize> {
3332        type WheelPriority<'lock> = (TagPriority, Option<&'lock BuildTag>);
3333
3334        let mut best: Option<(WheelPriority, usize)> = None;
3335        for (i, wheel) in self.wheels.iter().enumerate() {
3336            let TagCompatibility::Compatible(tag_priority) =
3337                wheel.filename.compatibility(tag_policy.tags())
3338            else {
3339                continue;
3340            };
3341            let build_tag = wheel.filename.build_tag();
3342            let wheel_priority = (tag_priority, build_tag);
3343            match best {
3344                None => {
3345                    best = Some((wheel_priority, i));
3346                }
3347                Some((best_priority, _)) => {
3348                    if wheel_priority > best_priority {
3349                        best = Some((wheel_priority, i));
3350                    }
3351                }
3352            }
3353        }
3354
3355        let best = best.map(|(_, i)| i);
3356        match tag_policy {
3357            TagPolicy::Required(_) => best,
3358            TagPolicy::Preferred(_) => best.or_else(|| self.wheels.first().map(|_| 0)),
3359        }
3360    }
3361
3362    /// Returns the [`PackageName`] of the package.
3363    pub fn name(&self) -> &PackageName {
3364        &self.id.name
3365    }
3366
3367    /// Returns the [`Version`] of the package.
3368    pub fn version(&self) -> Option<&Version> {
3369        self.id.version.as_ref()
3370    }
3371
3372    /// Returns the Git SHA of the package, if it is a Git source.
3373    pub fn git_sha(&self) -> Option<&GitOid> {
3374        match &self.id.source {
3375            Source::Git(_, git) => Some(&git.precise),
3376            _ => None,
3377        }
3378    }
3379
3380    /// Return the fork markers for this package, if any.
3381    pub fn fork_markers(&self) -> &[UniversalMarker] {
3382        self.fork_markers.as_slice()
3383    }
3384
3385    /// Returns the [`IndexUrl`] for the package, if it is a registry source.
3386    pub fn index(&self, root: &Path) -> Result<Option<IndexUrl>, LockError> {
3387        match &self.id.source {
3388            Source::Registry(RegistrySource::Url(url)) => {
3389                let index = IndexUrl::from(VerbatimUrl::from_url(
3390                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
3391                ));
3392                Ok(Some(index))
3393            }
3394            Source::Registry(RegistrySource::Path(path)) => {
3395                let index = IndexUrl::from(
3396                    VerbatimUrl::from_absolute_path(root.join(path))
3397                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
3398                );
3399                Ok(Some(index))
3400            }
3401            _ => Ok(None),
3402        }
3403    }
3404
3405    /// Returns all the hashes associated with this [`Package`].
3406    fn hashes(&self) -> HashDigests {
3407        let mut hashes = Vec::with_capacity(
3408            usize::from(self.sdist.as_ref().and_then(|sdist| sdist.hash()).is_some())
3409                + self
3410                    .wheels
3411                    .iter()
3412                    .map(|wheel| usize::from(wheel.hash.is_some()))
3413                    .sum::<usize>(),
3414        );
3415        if let Some(ref sdist) = self.sdist {
3416            if let Some(hash) = sdist.hash() {
3417                hashes.push(hash.0.clone());
3418            }
3419        }
3420        for wheel in &self.wheels {
3421            hashes.extend(wheel.hash.as_ref().map(|h| h.0.clone()));
3422            if let Some(zstd) = wheel.zstd.as_ref() {
3423                hashes.extend(zstd.hash.as_ref().map(|h| h.0.clone()));
3424            }
3425        }
3426        HashDigests::from(hashes)
3427    }
3428
3429    /// Returns the [`ResolvedRepositoryReference`] for the package, if it is a Git source.
3430    pub fn as_git_ref(&self) -> Result<Option<ResolvedRepositoryReference>, LockError> {
3431        match &self.id.source {
3432            Source::Git(url, git) => Ok(Some(ResolvedRepositoryReference {
3433                reference: RepositoryReference {
3434                    url: RepositoryUrl::new(&url.to_url().map_err(LockErrorKind::InvalidUrl)?),
3435                    reference: GitReference::from(git.kind.clone()),
3436                },
3437                sha: git.precise,
3438            })),
3439            _ => Ok(None),
3440        }
3441    }
3442
3443    /// Returns `true` if the package is a dynamic source tree.
3444    fn is_dynamic(&self) -> bool {
3445        self.id.version.is_none()
3446    }
3447
3448    /// Returns the extras the package provides, if any.
3449    pub fn provides_extras(&self) -> &[ExtraName] {
3450        &self.metadata.provides_extra
3451    }
3452
3453    /// Returns the dependency groups the package provides, if any.
3454    pub fn dependency_groups(&self) -> &BTreeMap<GroupName, BTreeSet<Requirement>> {
3455        &self.metadata.dependency_groups
3456    }
3457
3458    /// Returns the dependencies of the package.
3459    pub fn dependencies(&self) -> &[Dependency] {
3460        &self.dependencies
3461    }
3462
3463    /// Returns the optional dependencies of the package.
3464    pub fn optional_dependencies(&self) -> &BTreeMap<ExtraName, Vec<Dependency>> {
3465        &self.optional_dependencies
3466    }
3467
3468    /// Returns the resolved PEP 735 dependency groups of the package.
3469    pub fn resolved_dependency_groups(&self) -> &BTreeMap<GroupName, Vec<Dependency>> {
3470        &self.dependency_groups
3471    }
3472
3473    /// Returns an [`InstallTarget`] view for filtering decisions.
3474    pub fn as_install_target(&self) -> InstallTarget<'_> {
3475        InstallTarget {
3476            name: self.name(),
3477            is_local: self.id.source.is_local(),
3478        }
3479    }
3480}
3481
3482/// Attempts to construct a `VerbatimUrl` from the given normalized `Path`.
3483fn verbatim_url(path: &Path, id: &PackageId) -> Result<VerbatimUrl, LockError> {
3484    let url =
3485        VerbatimUrl::from_normalized_path(path).map_err(|err| LockErrorKind::VerbatimUrl {
3486            id: id.clone(),
3487            err,
3488        })?;
3489    Ok(url)
3490}
3491
3492/// Attempts to construct an absolute path from the given `Path`.
3493fn absolute_path(workspace_root: &Path, path: &Path) -> Result<PathBuf, LockError> {
3494    let path = uv_fs::normalize_absolute_path(&workspace_root.join(path))
3495        .map_err(LockErrorKind::AbsolutePath)?;
3496    Ok(path)
3497}
3498
3499#[derive(Clone, Debug, serde::Deserialize)]
3500#[serde(rename_all = "kebab-case")]
3501struct PackageWire {
3502    #[serde(flatten)]
3503    id: PackageId,
3504    #[serde(default)]
3505    metadata: PackageMetadata,
3506    #[serde(default)]
3507    sdist: Option<SourceDist>,
3508    #[serde(default)]
3509    wheels: Vec<Wheel>,
3510    #[serde(default, rename = "resolution-markers")]
3511    fork_markers: Vec<SimplifiedMarkerTree>,
3512    #[serde(default)]
3513    dependencies: Vec<DependencyWire>,
3514    #[serde(default)]
3515    optional_dependencies: BTreeMap<ExtraName, Vec<DependencyWire>>,
3516    #[serde(default, rename = "dev-dependencies", alias = "dependency-groups")]
3517    dependency_groups: BTreeMap<GroupName, Vec<DependencyWire>>,
3518}
3519
3520#[derive(Clone, Default, Debug, Eq, PartialEq, serde::Deserialize)]
3521#[serde(rename_all = "kebab-case")]
3522struct PackageMetadata {
3523    #[serde(default)]
3524    requires_dist: BTreeSet<Requirement>,
3525    #[serde(default, rename = "provides-extras")]
3526    provides_extra: Box<[ExtraName]>,
3527    #[serde(default, rename = "requires-dev", alias = "dependency-groups")]
3528    dependency_groups: BTreeMap<GroupName, BTreeSet<Requirement>>,
3529}
3530
3531impl PackageWire {
3532    fn unwire(
3533        self,
3534        requires_python: &RequiresPython,
3535        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
3536    ) -> Result<Package, LockError> {
3537        // Consistency check
3538        if !uv_flags::contains(uv_flags::EnvironmentFlags::SKIP_WHEEL_FILENAME_CHECK) {
3539            if let Some(version) = &self.id.version {
3540                for wheel in &self.wheels {
3541                    if *version != wheel.filename.version
3542                        && *version != wheel.filename.version.clone().without_local()
3543                    {
3544                        return Err(LockError::from(LockErrorKind::InconsistentVersions {
3545                            name: self.id.name,
3546                            version: version.clone(),
3547                            wheel: wheel.clone(),
3548                        }));
3549                    }
3550                }
3551                // We can't check the source dist version since it does not need to contain the version
3552                // in the filename.
3553            }
3554        }
3555
3556        let unwire_deps = |deps: Vec<DependencyWire>| -> Result<Vec<Dependency>, LockError> {
3557            deps.into_iter()
3558                .map(|dep| dep.unwire(requires_python, unambiguous_package_ids))
3559                .collect()
3560        };
3561
3562        Ok(Package {
3563            id: self.id,
3564            metadata: self.metadata,
3565            sdist: self.sdist,
3566            wheels: self.wheels,
3567            fork_markers: self
3568                .fork_markers
3569                .into_iter()
3570                .map(|simplified_marker| simplified_marker.into_marker(requires_python))
3571                .map(UniversalMarker::from_combined)
3572                .collect(),
3573            dependencies: unwire_deps(self.dependencies)?,
3574            optional_dependencies: self
3575                .optional_dependencies
3576                .into_iter()
3577                .map(|(extra, deps)| Ok((extra, unwire_deps(deps)?)))
3578                .collect::<Result<_, LockError>>()?,
3579            dependency_groups: self
3580                .dependency_groups
3581                .into_iter()
3582                .map(|(group, deps)| Ok((group, unwire_deps(deps)?)))
3583                .collect::<Result<_, LockError>>()?,
3584        })
3585    }
3586}
3587
3588/// Inside the lockfile, we match a dependency entry to a package entry through a key made up
3589/// of the name, the version and the source url.
3590#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3591#[serde(rename_all = "kebab-case")]
3592pub(crate) struct PackageId {
3593    pub(crate) name: PackageName,
3594    pub(crate) version: Option<Version>,
3595    source: Source,
3596}
3597
3598impl PackageId {
3599    fn from_annotated_dist(annotated_dist: &AnnotatedDist, root: &Path) -> Result<Self, LockError> {
3600        // Identify the source of the package.
3601        let source = Source::from_resolved_dist(&annotated_dist.dist, root)?;
3602        // Omit versions for dynamic source trees.
3603        let version = if source.is_source_tree()
3604            && annotated_dist
3605                .metadata
3606                .as_ref()
3607                .is_some_and(|metadata| metadata.dynamic)
3608        {
3609            None
3610        } else {
3611            Some(annotated_dist.version.clone())
3612        };
3613        let name = annotated_dist.name.clone();
3614        Ok(Self {
3615            name,
3616            version,
3617            source,
3618        })
3619    }
3620
3621    /// Writes this package ID inline into the table given.
3622    ///
3623    /// When a map is given, and if the package name in this ID is unambiguous
3624    /// (i.e., it has a count of 1 in the map), then the `version` and `source`
3625    /// fields are omitted. In all other cases, including when a map is not
3626    /// given, the `version` and `source` fields are written.
3627    fn to_toml(&self, dist_count_by_name: Option<&FxHashMap<PackageName, u64>>, table: &mut Table) {
3628        let count = dist_count_by_name.and_then(|map| map.get(&self.name).copied());
3629        table.insert("name", value(self.name.to_string()));
3630        if count.map(|count| count > 1).unwrap_or(true) {
3631            if let Some(version) = &self.version {
3632                table.insert("version", value(version.to_string()));
3633            }
3634            self.source.to_toml(table);
3635        }
3636    }
3637}
3638
3639impl Display for PackageId {
3640    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
3641        if let Some(version) = &self.version {
3642            write!(f, "{}=={} @ {}", self.name, version, self.source)
3643        } else {
3644            write!(f, "{} @ {}", self.name, self.source)
3645        }
3646    }
3647}
3648
3649#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3650#[serde(rename_all = "kebab-case")]
3651struct PackageIdForDependency {
3652    name: PackageName,
3653    version: Option<Version>,
3654    source: Option<Source>,
3655}
3656
3657impl PackageIdForDependency {
3658    fn unwire(
3659        self,
3660        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
3661    ) -> Result<PackageId, LockError> {
3662        let unambiguous_package_id = unambiguous_package_ids.get(&self.name);
3663        let source = self.source.map(Ok::<_, LockError>).unwrap_or_else(|| {
3664            let Some(package_id) = unambiguous_package_id else {
3665                return Err(LockErrorKind::MissingDependencySource {
3666                    name: self.name.clone(),
3667                }
3668                .into());
3669            };
3670            Ok(package_id.source.clone())
3671        })?;
3672        let version = if let Some(version) = self.version {
3673            Some(version)
3674        } else {
3675            if let Some(package_id) = unambiguous_package_id {
3676                package_id.version.clone()
3677            } else {
3678                // If the package is a source tree, assume that the missing `self.version` field is
3679                // indicative of a dynamic version.
3680                if source.is_source_tree() {
3681                    None
3682                } else {
3683                    return Err(LockErrorKind::MissingDependencyVersion {
3684                        name: self.name.clone(),
3685                    }
3686                    .into());
3687                }
3688            }
3689        };
3690        Ok(PackageId {
3691            name: self.name,
3692            version,
3693            source,
3694        })
3695    }
3696}
3697
3698impl From<PackageId> for PackageIdForDependency {
3699    fn from(id: PackageId) -> Self {
3700        Self {
3701            name: id.name,
3702            version: id.version,
3703            source: Some(id.source),
3704        }
3705    }
3706}
3707
3708/// A unique identifier to differentiate between different sources for the same version of a
3709/// package.
3710///
3711/// NOTE: Care should be taken when adding variants to this enum. Namely, new
3712/// variants should be added without changing the relative ordering of other
3713/// variants. Otherwise, this could cause the lockfile to have a different
3714/// canonical ordering of sources.
3715#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3716#[serde(try_from = "SourceWire")]
3717enum Source {
3718    /// A registry or `--find-links` index.
3719    Registry(RegistrySource),
3720    /// A Git repository.
3721    Git(UrlString, GitSource),
3722    /// A direct HTTP(S) URL.
3723    Direct(UrlString, DirectSource),
3724    /// A path to a local source or built archive.
3725    Path(Box<Path>),
3726    /// A path to a local directory.
3727    Directory(Box<Path>),
3728    /// A path to a local directory that should be installed as editable.
3729    Editable(Box<Path>),
3730    /// A path to a local directory that should not be built or installed.
3731    Virtual(Box<Path>),
3732}
3733
3734impl Source {
3735    fn from_resolved_dist(resolved_dist: &ResolvedDist, root: &Path) -> Result<Self, LockError> {
3736        match *resolved_dist {
3737            // We pass empty installed packages for locking.
3738            ResolvedDist::Installed { .. } => unreachable!(),
3739            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(dist, root),
3740        }
3741    }
3742
3743    fn from_dist(dist: &Dist, root: &Path) -> Result<Self, LockError> {
3744        match *dist {
3745            Dist::Built(ref built_dist) => Self::from_built_dist(built_dist, root),
3746            Dist::Source(ref source_dist) => Self::from_source_dist(source_dist, root),
3747        }
3748    }
3749
3750    fn from_built_dist(built_dist: &BuiltDist, root: &Path) -> Result<Self, LockError> {
3751        match *built_dist {
3752            BuiltDist::Registry(ref reg_dist) => Self::from_registry_built_dist(reg_dist, root),
3753            BuiltDist::DirectUrl(ref direct_dist) => Ok(Self::from_direct_built_dist(direct_dist)),
3754            BuiltDist::Path(ref path_dist) => Self::from_path_built_dist(path_dist, root),
3755        }
3756    }
3757
3758    fn from_source_dist(
3759        source_dist: &uv_distribution_types::SourceDist,
3760        root: &Path,
3761    ) -> Result<Self, LockError> {
3762        match *source_dist {
3763            uv_distribution_types::SourceDist::Registry(ref reg_dist) => {
3764                Self::from_registry_source_dist(reg_dist, root)
3765            }
3766            uv_distribution_types::SourceDist::DirectUrl(ref direct_dist) => {
3767                Ok(Self::from_direct_source_dist(direct_dist))
3768            }
3769            uv_distribution_types::SourceDist::Git(ref git_dist) => {
3770                Ok(Self::from_git_dist(git_dist))
3771            }
3772            uv_distribution_types::SourceDist::Path(ref path_dist) => {
3773                Self::from_path_source_dist(path_dist, root)
3774            }
3775            uv_distribution_types::SourceDist::Directory(ref directory) => {
3776                Self::from_directory_source_dist(directory, root)
3777            }
3778        }
3779    }
3780
3781    fn from_registry_built_dist(
3782        reg_dist: &RegistryBuiltDist,
3783        root: &Path,
3784    ) -> Result<Self, LockError> {
3785        Self::from_index_url(&reg_dist.best_wheel().index, root)
3786    }
3787
3788    fn from_registry_source_dist(
3789        reg_dist: &RegistrySourceDist,
3790        root: &Path,
3791    ) -> Result<Self, LockError> {
3792        Self::from_index_url(&reg_dist.index, root)
3793    }
3794
3795    fn from_direct_built_dist(direct_dist: &DirectUrlBuiltDist) -> Self {
3796        Self::Direct(
3797            normalize_url(direct_dist.url.to_url()),
3798            DirectSource { subdirectory: None },
3799        )
3800    }
3801
3802    fn from_direct_source_dist(direct_dist: &DirectUrlSourceDist) -> Self {
3803        Self::Direct(
3804            normalize_url(direct_dist.url.to_url()),
3805            DirectSource {
3806                subdirectory: direct_dist.subdirectory.clone(),
3807            },
3808        )
3809    }
3810
3811    fn from_path_built_dist(path_dist: &PathBuiltDist, root: &Path) -> Result<Self, LockError> {
3812        let path = relative_to(&path_dist.install_path, root)
3813            .or_else(|_| std::path::absolute(&path_dist.install_path))
3814            .map_err(LockErrorKind::DistributionRelativePath)?;
3815        Ok(Self::Path(path.into_boxed_path()))
3816    }
3817
3818    fn from_path_source_dist(path_dist: &PathSourceDist, root: &Path) -> Result<Self, LockError> {
3819        let path = relative_to(&path_dist.install_path, root)
3820            .or_else(|_| std::path::absolute(&path_dist.install_path))
3821            .map_err(LockErrorKind::DistributionRelativePath)?;
3822        Ok(Self::Path(path.into_boxed_path()))
3823    }
3824
3825    fn from_directory_source_dist(
3826        directory_dist: &DirectorySourceDist,
3827        root: &Path,
3828    ) -> Result<Self, LockError> {
3829        let path = relative_to(&directory_dist.install_path, root)
3830            .or_else(|_| std::path::absolute(&directory_dist.install_path))
3831            .map_err(LockErrorKind::DistributionRelativePath)?;
3832        if directory_dist.editable.unwrap_or(false) {
3833            Ok(Self::Editable(path.into_boxed_path()))
3834        } else if directory_dist.r#virtual.unwrap_or(false) {
3835            Ok(Self::Virtual(path.into_boxed_path()))
3836        } else {
3837            Ok(Self::Directory(path.into_boxed_path()))
3838        }
3839    }
3840
3841    fn from_index_url(index_url: &IndexUrl, root: &Path) -> Result<Self, LockError> {
3842        match index_url {
3843            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
3844                // Remove any sensitive credentials from the index URL.
3845                let redacted = index_url.without_credentials();
3846                let source = RegistrySource::Url(UrlString::from(redacted.as_ref()));
3847                Ok(Self::Registry(source))
3848            }
3849            IndexUrl::Path(url) => {
3850                let path = url
3851                    .to_file_path()
3852                    .map_err(|()| LockErrorKind::UrlToPath { url: url.to_url() })?;
3853                let path = relative_to(&path, root)
3854                    .or_else(|_| std::path::absolute(&path))
3855                    .map_err(LockErrorKind::IndexRelativePath)?;
3856                let source = RegistrySource::Path(path.into_boxed_path());
3857                Ok(Self::Registry(source))
3858            }
3859        }
3860    }
3861
3862    fn from_git_dist(git_dist: &GitSourceDist) -> Self {
3863        Self::Git(
3864            UrlString::from(locked_git_url(git_dist)),
3865            GitSource {
3866                kind: GitSourceKind::from(git_dist.git.reference().clone()),
3867                precise: git_dist.git.precise().unwrap_or_else(|| {
3868                    panic!("Git distribution is missing a precise hash: {git_dist}")
3869                }),
3870                subdirectory: git_dist.subdirectory.clone(),
3871                lfs: git_dist.git.lfs(),
3872            },
3873        )
3874    }
3875
3876    /// Returns `true` if the source should be considered immutable.
3877    ///
3878    /// We assume that registry sources are immutable. In other words, we expect that once a
3879    /// package-version is published to a registry, its metadata will not change.
3880    ///
3881    /// We also assume that Git sources are immutable, since a Git source encodes a specific commit.
3882    fn is_immutable(&self) -> bool {
3883        matches!(self, Self::Registry(..) | Self::Git(_, _))
3884    }
3885
3886    /// Returns `true` if the source is that of a wheel.
3887    fn is_wheel(&self) -> bool {
3888        match self {
3889            Self::Path(path) => {
3890                matches!(
3891                    DistExtension::from_path(path).ok(),
3892                    Some(DistExtension::Wheel)
3893                )
3894            }
3895            Self::Direct(url, _) => {
3896                matches!(
3897                    DistExtension::from_path(url.as_ref()).ok(),
3898                    Some(DistExtension::Wheel)
3899                )
3900            }
3901            Self::Directory(..) => false,
3902            Self::Editable(..) => false,
3903            Self::Virtual(..) => false,
3904            Self::Git(..) => false,
3905            Self::Registry(..) => false,
3906        }
3907    }
3908
3909    /// Returns `true` if the source is that of a source tree.
3910    fn is_source_tree(&self) -> bool {
3911        match self {
3912            Self::Directory(..) | Self::Editable(..) | Self::Virtual(..) => true,
3913            Self::Path(..) | Self::Git(..) | Self::Registry(..) | Self::Direct(..) => false,
3914        }
3915    }
3916
3917    /// Returns the path to the source tree, if the source is a source tree.
3918    fn as_source_tree(&self) -> Option<&Path> {
3919        match self {
3920            Self::Directory(path) | Self::Editable(path) | Self::Virtual(path) => Some(path),
3921            Self::Path(..) | Self::Git(..) | Self::Registry(..) | Self::Direct(..) => None,
3922        }
3923    }
3924
3925    fn to_toml(&self, table: &mut Table) {
3926        let mut source_table = InlineTable::new();
3927        match self {
3928            Self::Registry(source) => match source {
3929                RegistrySource::Url(url) => {
3930                    source_table.insert("registry", Value::from(url.as_ref()));
3931                }
3932                RegistrySource::Path(path) => {
3933                    source_table.insert(
3934                        "registry",
3935                        Value::from(PortablePath::from(path).to_string()),
3936                    );
3937                }
3938            },
3939            Self::Git(url, _) => {
3940                source_table.insert("git", Value::from(url.as_ref()));
3941            }
3942            Self::Direct(url, DirectSource { subdirectory }) => {
3943                source_table.insert("url", Value::from(url.as_ref()));
3944                if let Some(ref subdirectory) = *subdirectory {
3945                    source_table.insert(
3946                        "subdirectory",
3947                        Value::from(PortablePath::from(subdirectory).to_string()),
3948                    );
3949                }
3950            }
3951            Self::Path(path) => {
3952                source_table.insert("path", Value::from(PortablePath::from(path).to_string()));
3953            }
3954            Self::Directory(path) => {
3955                source_table.insert(
3956                    "directory",
3957                    Value::from(PortablePath::from(path).to_string()),
3958                );
3959            }
3960            Self::Editable(path) => {
3961                source_table.insert(
3962                    "editable",
3963                    Value::from(PortablePath::from(path).to_string()),
3964                );
3965            }
3966            Self::Virtual(path) => {
3967                source_table.insert("virtual", Value::from(PortablePath::from(path).to_string()));
3968            }
3969        }
3970        table.insert("source", value(source_table));
3971    }
3972
3973    /// Check if a package is local by examining its source.
3974    pub(crate) fn is_local(&self) -> bool {
3975        matches!(
3976            self,
3977            Self::Path(_) | Self::Directory(_) | Self::Editable(_) | Self::Virtual(_)
3978        )
3979    }
3980}
3981
3982impl Display for Source {
3983    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
3984        match self {
3985            Self::Registry(RegistrySource::Url(url)) | Self::Git(url, _) | Self::Direct(url, _) => {
3986                write!(f, "{}+{}", self.name(), url)
3987            }
3988            Self::Registry(RegistrySource::Path(path))
3989            | Self::Path(path)
3990            | Self::Directory(path)
3991            | Self::Editable(path)
3992            | Self::Virtual(path) => {
3993                write!(f, "{}+{}", self.name(), PortablePath::from(path))
3994            }
3995        }
3996    }
3997}
3998
3999impl Source {
4000    fn name(&self) -> &str {
4001        match self {
4002            Self::Registry(..) => "registry",
4003            Self::Git(..) => "git",
4004            Self::Direct(..) => "direct",
4005            Self::Path(..) => "path",
4006            Self::Directory(..) => "directory",
4007            Self::Editable(..) => "editable",
4008            Self::Virtual(..) => "virtual",
4009        }
4010    }
4011
4012    /// Returns `Some(true)` to indicate that the source kind _must_ include a
4013    /// hash.
4014    ///
4015    /// Returns `Some(false)` to indicate that the source kind _must not_
4016    /// include a hash.
4017    ///
4018    /// Returns `None` to indicate that the source kind _may_ include a hash.
4019    fn requires_hash(&self) -> Option<bool> {
4020        match self {
4021            Self::Registry(..) => None,
4022            Self::Direct(..) | Self::Path(..) => Some(true),
4023            Self::Git(..) | Self::Directory(..) | Self::Editable(..) | Self::Virtual(..) => {
4024                Some(false)
4025            }
4026        }
4027    }
4028}
4029
4030#[derive(Clone, Debug, serde::Deserialize)]
4031#[serde(untagged, rename_all = "kebab-case")]
4032enum SourceWire {
4033    Registry {
4034        registry: RegistrySourceWire,
4035    },
4036    Git {
4037        git: String,
4038    },
4039    Direct {
4040        url: UrlString,
4041        subdirectory: Option<PortablePathBuf>,
4042    },
4043    Path {
4044        path: PortablePathBuf,
4045    },
4046    Directory {
4047        directory: PortablePathBuf,
4048    },
4049    Editable {
4050        editable: PortablePathBuf,
4051    },
4052    Virtual {
4053        r#virtual: PortablePathBuf,
4054    },
4055}
4056
4057impl TryFrom<SourceWire> for Source {
4058    type Error = LockError;
4059
4060    fn try_from(wire: SourceWire) -> Result<Self, LockError> {
4061        #[allow(clippy::enum_glob_use)]
4062        use self::SourceWire::*;
4063
4064        match wire {
4065            Registry { registry } => Ok(Self::Registry(registry.into())),
4066            Git { git } => {
4067                let url = DisplaySafeUrl::parse(&git)
4068                    .map_err(|err| SourceParseError::InvalidUrl {
4069                        given: git.clone(),
4070                        err,
4071                    })
4072                    .map_err(LockErrorKind::InvalidGitSourceUrl)?;
4073
4074                let git_source = GitSource::from_url(&url)
4075                    .map_err(|err| match err {
4076                        GitSourceError::InvalidSha => SourceParseError::InvalidSha { given: git },
4077                        GitSourceError::MissingSha => SourceParseError::MissingSha { given: git },
4078                    })
4079                    .map_err(LockErrorKind::InvalidGitSourceUrl)?;
4080
4081                Ok(Self::Git(UrlString::from(url), git_source))
4082            }
4083            Direct { url, subdirectory } => Ok(Self::Direct(
4084                url,
4085                DirectSource {
4086                    subdirectory: subdirectory.map(Box::<std::path::Path>::from),
4087                },
4088            )),
4089            Path { path } => Ok(Self::Path(path.into())),
4090            Directory { directory } => Ok(Self::Directory(directory.into())),
4091            Editable { editable } => Ok(Self::Editable(editable.into())),
4092            Virtual { r#virtual } => Ok(Self::Virtual(r#virtual.into())),
4093        }
4094    }
4095}
4096
4097/// The source for a registry, which could be a URL or a relative path.
4098#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
4099enum RegistrySource {
4100    /// Ex) `https://pypi.org/simple`
4101    Url(UrlString),
4102    /// Ex) `../path/to/local/index`
4103    Path(Box<Path>),
4104}
4105
4106impl Display for RegistrySource {
4107    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
4108        match self {
4109            Self::Url(url) => write!(f, "{url}"),
4110            Self::Path(path) => write!(f, "{}", path.display()),
4111        }
4112    }
4113}
4114
4115#[derive(Clone, Debug)]
4116enum RegistrySourceWire {
4117    /// Ex) `https://pypi.org/simple`
4118    Url(UrlString),
4119    /// Ex) `../path/to/local/index`
4120    Path(PortablePathBuf),
4121}
4122
4123impl<'de> serde::de::Deserialize<'de> for RegistrySourceWire {
4124    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
4125    where
4126        D: serde::de::Deserializer<'de>,
4127    {
4128        struct Visitor;
4129
4130        impl serde::de::Visitor<'_> for Visitor {
4131            type Value = RegistrySourceWire;
4132
4133            fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
4134                formatter.write_str("a valid URL or a file path")
4135            }
4136
4137            fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
4138            where
4139                E: serde::de::Error,
4140            {
4141                if split_scheme(value).is_some() {
4142                    Ok(
4143                        serde::Deserialize::deserialize(serde::de::value::StrDeserializer::new(
4144                            value,
4145                        ))
4146                        .map(RegistrySourceWire::Url)?,
4147                    )
4148                } else {
4149                    Ok(
4150                        serde::Deserialize::deserialize(serde::de::value::StrDeserializer::new(
4151                            value,
4152                        ))
4153                        .map(RegistrySourceWire::Path)?,
4154                    )
4155                }
4156            }
4157        }
4158
4159        deserializer.deserialize_str(Visitor)
4160    }
4161}
4162
4163impl From<RegistrySourceWire> for RegistrySource {
4164    fn from(wire: RegistrySourceWire) -> Self {
4165        match wire {
4166            RegistrySourceWire::Url(url) => Self::Url(url),
4167            RegistrySourceWire::Path(path) => Self::Path(path.into()),
4168        }
4169    }
4170}
4171
4172#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
4173#[serde(rename_all = "kebab-case")]
4174struct DirectSource {
4175    subdirectory: Option<Box<Path>>,
4176}
4177
4178/// NOTE: Care should be taken when adding variants to this enum. Namely, new
4179/// variants should be added without changing the relative ordering of other
4180/// variants. Otherwise, this could cause the lockfile to have a different
4181/// canonical ordering of package entries.
4182#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
4183struct GitSource {
4184    precise: GitOid,
4185    subdirectory: Option<Box<Path>>,
4186    kind: GitSourceKind,
4187    lfs: GitLfs,
4188}
4189
4190/// An error that occurs when a source string could not be parsed.
4191#[derive(Clone, Debug, Eq, PartialEq)]
4192enum GitSourceError {
4193    InvalidSha,
4194    MissingSha,
4195}
4196
4197impl GitSource {
4198    /// Extracts a Git source reference from the query pairs and the hash
4199    /// fragment in the given URL.
4200    fn from_url(url: &Url) -> Result<Self, GitSourceError> {
4201        let mut kind = GitSourceKind::DefaultBranch;
4202        let mut subdirectory = None;
4203        let mut lfs = GitLfs::Disabled;
4204        for (key, val) in url.query_pairs() {
4205            match &*key {
4206                "tag" => kind = GitSourceKind::Tag(val.into_owned()),
4207                "branch" => kind = GitSourceKind::Branch(val.into_owned()),
4208                "rev" => kind = GitSourceKind::Rev(val.into_owned()),
4209                "subdirectory" => subdirectory = Some(PortablePathBuf::from(val.as_ref()).into()),
4210                "lfs" => lfs = GitLfs::from(val.eq_ignore_ascii_case("true")),
4211                _ => {}
4212            }
4213        }
4214
4215        let precise = GitOid::from_str(url.fragment().ok_or(GitSourceError::MissingSha)?)
4216            .map_err(|_| GitSourceError::InvalidSha)?;
4217
4218        Ok(Self {
4219            precise,
4220            subdirectory,
4221            kind,
4222            lfs,
4223        })
4224    }
4225}
4226
4227#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
4228#[serde(rename_all = "kebab-case")]
4229enum GitSourceKind {
4230    Tag(String),
4231    Branch(String),
4232    Rev(String),
4233    DefaultBranch,
4234}
4235
4236/// Inspired by: <https://discuss.python.org/t/lock-files-again-but-this-time-w-sdists/46593>
4237#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4238#[serde(rename_all = "kebab-case")]
4239struct SourceDistMetadata {
4240    /// A hash of the source distribution.
4241    hash: Option<Hash>,
4242    /// The size of the source distribution in bytes.
4243    ///
4244    /// This is only present for source distributions that come from registries.
4245    size: Option<u64>,
4246    /// The upload time of the source distribution.
4247    #[serde(alias = "upload_time")]
4248    upload_time: Option<Timestamp>,
4249}
4250
4251/// A URL or file path where the source dist that was
4252/// locked against was found. The location does not need to exist in the
4253/// future, so this should be treated as only a hint to where to look
4254/// and/or recording where the source dist file originally came from.
4255#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4256#[serde(from = "SourceDistWire")]
4257enum SourceDist {
4258    Url {
4259        url: UrlString,
4260        #[serde(flatten)]
4261        metadata: SourceDistMetadata,
4262    },
4263    Path {
4264        path: Box<Path>,
4265        #[serde(flatten)]
4266        metadata: SourceDistMetadata,
4267    },
4268    Metadata {
4269        #[serde(flatten)]
4270        metadata: SourceDistMetadata,
4271    },
4272}
4273
4274impl SourceDist {
4275    fn filename(&self) -> Option<Cow<'_, str>> {
4276        match self {
4277            Self::Metadata { .. } => None,
4278            Self::Url { url, .. } => url.filename().ok(),
4279            Self::Path { path, .. } => path.file_name().map(|filename| filename.to_string_lossy()),
4280        }
4281    }
4282
4283    fn url(&self) -> Option<&UrlString> {
4284        match self {
4285            Self::Metadata { .. } => None,
4286            Self::Url { url, .. } => Some(url),
4287            Self::Path { .. } => None,
4288        }
4289    }
4290
4291    pub(crate) fn hash(&self) -> Option<&Hash> {
4292        match self {
4293            Self::Metadata { metadata } => metadata.hash.as_ref(),
4294            Self::Url { metadata, .. } => metadata.hash.as_ref(),
4295            Self::Path { metadata, .. } => metadata.hash.as_ref(),
4296        }
4297    }
4298
4299    pub(crate) fn size(&self) -> Option<u64> {
4300        match self {
4301            Self::Metadata { metadata } => metadata.size,
4302            Self::Url { metadata, .. } => metadata.size,
4303            Self::Path { metadata, .. } => metadata.size,
4304        }
4305    }
4306
4307    pub(crate) fn upload_time(&self) -> Option<Timestamp> {
4308        match self {
4309            Self::Metadata { metadata } => metadata.upload_time,
4310            Self::Url { metadata, .. } => metadata.upload_time,
4311            Self::Path { metadata, .. } => metadata.upload_time,
4312        }
4313    }
4314}
4315
4316impl SourceDist {
4317    fn from_annotated_dist(
4318        id: &PackageId,
4319        annotated_dist: &AnnotatedDist,
4320    ) -> Result<Option<Self>, LockError> {
4321        match annotated_dist.dist {
4322            // We pass empty installed packages for locking.
4323            ResolvedDist::Installed { .. } => unreachable!(),
4324            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(
4325                id,
4326                dist,
4327                annotated_dist.hashes.as_slice(),
4328                annotated_dist.index(),
4329            ),
4330        }
4331    }
4332
4333    fn from_dist(
4334        id: &PackageId,
4335        dist: &Dist,
4336        hashes: &[HashDigest],
4337        index: Option<&IndexUrl>,
4338    ) -> Result<Option<Self>, LockError> {
4339        match *dist {
4340            Dist::Built(BuiltDist::Registry(ref built_dist)) => {
4341                let Some(sdist) = built_dist.sdist.as_ref() else {
4342                    return Ok(None);
4343                };
4344                Self::from_registry_dist(sdist, index)
4345            }
4346            Dist::Built(_) => Ok(None),
4347            Dist::Source(ref source_dist) => Self::from_source_dist(id, source_dist, hashes, index),
4348        }
4349    }
4350
4351    fn from_source_dist(
4352        id: &PackageId,
4353        source_dist: &uv_distribution_types::SourceDist,
4354        hashes: &[HashDigest],
4355        index: Option<&IndexUrl>,
4356    ) -> Result<Option<Self>, LockError> {
4357        match *source_dist {
4358            uv_distribution_types::SourceDist::Registry(ref reg_dist) => {
4359                Self::from_registry_dist(reg_dist, index)
4360            }
4361            uv_distribution_types::SourceDist::DirectUrl(_) => {
4362                Self::from_direct_dist(id, hashes).map(Some)
4363            }
4364            uv_distribution_types::SourceDist::Path(_) => {
4365                Self::from_path_dist(id, hashes).map(Some)
4366            }
4367            // An actual sdist entry in the lockfile is only required when
4368            // it's from a registry or a direct URL. Otherwise, it's strictly
4369            // redundant with the information in all other kinds of `source`.
4370            uv_distribution_types::SourceDist::Git(_)
4371            | uv_distribution_types::SourceDist::Directory(_) => Ok(None),
4372        }
4373    }
4374
4375    fn from_registry_dist(
4376        reg_dist: &RegistrySourceDist,
4377        index: Option<&IndexUrl>,
4378    ) -> Result<Option<Self>, LockError> {
4379        // Reject distributions from registries that don't match the index URL, as can occur with
4380        // `--find-links`.
4381        if index.is_none_or(|index| *index != reg_dist.index) {
4382            return Ok(None);
4383        }
4384
4385        match &reg_dist.index {
4386            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
4387                let url = normalize_file_location(&reg_dist.file.url)
4388                    .map_err(LockErrorKind::InvalidUrl)
4389                    .map_err(LockError::from)?;
4390                let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4391                let size = reg_dist.file.size;
4392                let upload_time = reg_dist
4393                    .file
4394                    .upload_time_utc_ms
4395                    .map(Timestamp::from_millisecond)
4396                    .transpose()
4397                    .map_err(LockErrorKind::InvalidTimestamp)?;
4398                Ok(Some(Self::Url {
4399                    url,
4400                    metadata: SourceDistMetadata {
4401                        hash,
4402                        size,
4403                        upload_time,
4404                    },
4405                }))
4406            }
4407            IndexUrl::Path(path) => {
4408                let index_path = path
4409                    .to_file_path()
4410                    .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?;
4411                let url = reg_dist
4412                    .file
4413                    .url
4414                    .to_url()
4415                    .map_err(LockErrorKind::InvalidUrl)?;
4416
4417                if url.scheme() == "file" {
4418                    let reg_dist_path = url
4419                        .to_file_path()
4420                        .map_err(|()| LockErrorKind::UrlToPath { url })?;
4421                    let path = relative_to(&reg_dist_path, index_path)
4422                        .or_else(|_| std::path::absolute(&reg_dist_path))
4423                        .map_err(LockErrorKind::DistributionRelativePath)?
4424                        .into_boxed_path();
4425                    let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4426                    let size = reg_dist.file.size;
4427                    let upload_time = reg_dist
4428                        .file
4429                        .upload_time_utc_ms
4430                        .map(Timestamp::from_millisecond)
4431                        .transpose()
4432                        .map_err(LockErrorKind::InvalidTimestamp)?;
4433                    Ok(Some(Self::Path {
4434                        path,
4435                        metadata: SourceDistMetadata {
4436                            hash,
4437                            size,
4438                            upload_time,
4439                        },
4440                    }))
4441                } else {
4442                    let url = normalize_file_location(&reg_dist.file.url)
4443                        .map_err(LockErrorKind::InvalidUrl)
4444                        .map_err(LockError::from)?;
4445                    let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4446                    let size = reg_dist.file.size;
4447                    let upload_time = reg_dist
4448                        .file
4449                        .upload_time_utc_ms
4450                        .map(Timestamp::from_millisecond)
4451                        .transpose()
4452                        .map_err(LockErrorKind::InvalidTimestamp)?;
4453                    Ok(Some(Self::Url {
4454                        url,
4455                        metadata: SourceDistMetadata {
4456                            hash,
4457                            size,
4458                            upload_time,
4459                        },
4460                    }))
4461                }
4462            }
4463        }
4464    }
4465
4466    fn from_direct_dist(id: &PackageId, hashes: &[HashDigest]) -> Result<Self, LockError> {
4467        let Some(hash) = hashes.iter().max().cloned().map(Hash::from) else {
4468            let kind = LockErrorKind::Hash {
4469                id: id.clone(),
4470                artifact_type: "direct URL source distribution",
4471                expected: true,
4472            };
4473            return Err(kind.into());
4474        };
4475        Ok(Self::Metadata {
4476            metadata: SourceDistMetadata {
4477                hash: Some(hash),
4478                size: None,
4479                upload_time: None,
4480            },
4481        })
4482    }
4483
4484    fn from_path_dist(id: &PackageId, hashes: &[HashDigest]) -> Result<Self, LockError> {
4485        let Some(hash) = hashes.iter().max().cloned().map(Hash::from) else {
4486            let kind = LockErrorKind::Hash {
4487                id: id.clone(),
4488                artifact_type: "path source distribution",
4489                expected: true,
4490            };
4491            return Err(kind.into());
4492        };
4493        Ok(Self::Metadata {
4494            metadata: SourceDistMetadata {
4495                hash: Some(hash),
4496                size: None,
4497                upload_time: None,
4498            },
4499        })
4500    }
4501}
4502
4503#[derive(Clone, Debug, serde::Deserialize)]
4504#[serde(untagged, rename_all = "kebab-case")]
4505enum SourceDistWire {
4506    Url {
4507        url: UrlString,
4508        #[serde(flatten)]
4509        metadata: SourceDistMetadata,
4510    },
4511    Path {
4512        path: PortablePathBuf,
4513        #[serde(flatten)]
4514        metadata: SourceDistMetadata,
4515    },
4516    Metadata {
4517        #[serde(flatten)]
4518        metadata: SourceDistMetadata,
4519    },
4520}
4521
4522impl SourceDist {
4523    /// Returns the TOML representation of this source distribution.
4524    fn to_toml(&self) -> Result<InlineTable, toml_edit::ser::Error> {
4525        let mut table = InlineTable::new();
4526        match self {
4527            Self::Metadata { .. } => {}
4528            Self::Url { url, .. } => {
4529                table.insert("url", Value::from(url.as_ref()));
4530            }
4531            Self::Path { path, .. } => {
4532                table.insert("path", Value::from(PortablePath::from(path).to_string()));
4533            }
4534        }
4535        if let Some(hash) = self.hash() {
4536            table.insert("hash", Value::from(hash.to_string()));
4537        }
4538        if let Some(size) = self.size() {
4539            table.insert(
4540                "size",
4541                toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
4542            );
4543        }
4544        if let Some(upload_time) = self.upload_time() {
4545            table.insert("upload-time", Value::from(upload_time.to_string()));
4546        }
4547        Ok(table)
4548    }
4549}
4550
4551impl From<SourceDistWire> for SourceDist {
4552    fn from(wire: SourceDistWire) -> Self {
4553        match wire {
4554            SourceDistWire::Url { url, metadata } => Self::Url { url, metadata },
4555            SourceDistWire::Path { path, metadata } => Self::Path {
4556                path: path.into(),
4557                metadata,
4558            },
4559            SourceDistWire::Metadata { metadata } => Self::Metadata { metadata },
4560        }
4561    }
4562}
4563
4564impl From<GitReference> for GitSourceKind {
4565    fn from(value: GitReference) -> Self {
4566        match value {
4567            GitReference::Branch(branch) => Self::Branch(branch),
4568            GitReference::Tag(tag) => Self::Tag(tag),
4569            GitReference::BranchOrTag(rev) => Self::Rev(rev),
4570            GitReference::BranchOrTagOrCommit(rev) => Self::Rev(rev),
4571            GitReference::NamedRef(rev) => Self::Rev(rev),
4572            GitReference::DefaultBranch => Self::DefaultBranch,
4573        }
4574    }
4575}
4576
4577impl From<GitSourceKind> for GitReference {
4578    fn from(value: GitSourceKind) -> Self {
4579        match value {
4580            GitSourceKind::Branch(branch) => Self::Branch(branch),
4581            GitSourceKind::Tag(tag) => Self::Tag(tag),
4582            GitSourceKind::Rev(rev) => Self::from_rev(rev),
4583            GitSourceKind::DefaultBranch => Self::DefaultBranch,
4584        }
4585    }
4586}
4587
4588/// Construct the lockfile-compatible [`DisplaySafeUrl`] for a [`GitSourceDist`].
4589fn locked_git_url(git_dist: &GitSourceDist) -> DisplaySafeUrl {
4590    let mut url = git_dist.git.repository().clone();
4591
4592    // Remove the credentials.
4593    url.remove_credentials();
4594
4595    // Clear out any existing state.
4596    url.set_fragment(None);
4597    url.set_query(None);
4598
4599    // Put the subdirectory in the query.
4600    if let Some(subdirectory) = git_dist
4601        .subdirectory
4602        .as_deref()
4603        .map(PortablePath::from)
4604        .as_ref()
4605        .map(PortablePath::to_string)
4606    {
4607        url.query_pairs_mut()
4608            .append_pair("subdirectory", &subdirectory);
4609    }
4610
4611    // Put lfs=true in the package source git url only when explicitly enabled.
4612    if git_dist.git.lfs().enabled() {
4613        url.query_pairs_mut().append_pair("lfs", "true");
4614    }
4615
4616    // Put the requested reference in the query.
4617    match git_dist.git.reference() {
4618        GitReference::Branch(branch) => {
4619            url.query_pairs_mut().append_pair("branch", branch.as_str());
4620        }
4621        GitReference::Tag(tag) => {
4622            url.query_pairs_mut().append_pair("tag", tag.as_str());
4623        }
4624        GitReference::BranchOrTag(rev)
4625        | GitReference::BranchOrTagOrCommit(rev)
4626        | GitReference::NamedRef(rev) => {
4627            url.query_pairs_mut().append_pair("rev", rev.as_str());
4628        }
4629        GitReference::DefaultBranch => {}
4630    }
4631
4632    // Put the precise commit in the fragment.
4633    url.set_fragment(
4634        git_dist
4635            .git
4636            .precise()
4637            .as_ref()
4638            .map(GitOid::to_string)
4639            .as_deref(),
4640    );
4641
4642    url
4643}
4644
4645#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4646struct ZstdWheel {
4647    hash: Option<Hash>,
4648    size: Option<u64>,
4649}
4650
4651/// Inspired by: <https://discuss.python.org/t/lock-files-again-but-this-time-w-sdists/46593>
4652#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4653#[serde(try_from = "WheelWire")]
4654struct Wheel {
4655    /// A URL or file path (via `file://`) where the wheel that was locked
4656    /// against was found. The location does not need to exist in the future,
4657    /// so this should be treated as only a hint to where to look and/or
4658    /// recording where the wheel file originally came from.
4659    url: WheelWireSource,
4660    /// A hash of the built distribution.
4661    ///
4662    /// This is only present for wheels that come from registries and direct
4663    /// URLs. Wheels from git or path dependencies do not have hashes
4664    /// associated with them.
4665    hash: Option<Hash>,
4666    /// The size of the built distribution in bytes.
4667    ///
4668    /// This is only present for wheels that come from registries.
4669    size: Option<u64>,
4670    /// The upload time of the built distribution.
4671    ///
4672    /// This is only present for wheels that come from registries.
4673    upload_time: Option<Timestamp>,
4674    /// The filename of the wheel.
4675    ///
4676    /// This isn't part of the wire format since it's redundant with the
4677    /// URL. But we do use it for various things, and thus compute it at
4678    /// deserialization time. Not being able to extract a wheel filename from a
4679    /// wheel URL is thus a deserialization error.
4680    filename: WheelFilename,
4681    /// The zstandard-compressed wheel metadata, if any.
4682    zstd: Option<ZstdWheel>,
4683}
4684
4685impl Wheel {
4686    fn from_annotated_dist(annotated_dist: &AnnotatedDist) -> Result<Vec<Self>, LockError> {
4687        match annotated_dist.dist {
4688            // We pass empty installed packages for locking.
4689            ResolvedDist::Installed { .. } => unreachable!(),
4690            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(
4691                dist,
4692                annotated_dist.hashes.as_slice(),
4693                annotated_dist.index(),
4694            ),
4695        }
4696    }
4697
4698    fn from_dist(
4699        dist: &Dist,
4700        hashes: &[HashDigest],
4701        index: Option<&IndexUrl>,
4702    ) -> Result<Vec<Self>, LockError> {
4703        match *dist {
4704            Dist::Built(ref built_dist) => Self::from_built_dist(built_dist, hashes, index),
4705            Dist::Source(uv_distribution_types::SourceDist::Registry(ref source_dist)) => {
4706                source_dist
4707                    .wheels
4708                    .iter()
4709                    .filter(|wheel| {
4710                        // Reject distributions from registries that don't match the index URL, as can occur with
4711                        // `--find-links`.
4712                        index.is_some_and(|index| *index == wheel.index)
4713                    })
4714                    .map(Self::from_registry_wheel)
4715                    .collect()
4716            }
4717            Dist::Source(_) => Ok(vec![]),
4718        }
4719    }
4720
4721    fn from_built_dist(
4722        built_dist: &BuiltDist,
4723        hashes: &[HashDigest],
4724        index: Option<&IndexUrl>,
4725    ) -> Result<Vec<Self>, LockError> {
4726        match *built_dist {
4727            BuiltDist::Registry(ref reg_dist) => Self::from_registry_dist(reg_dist, index),
4728            BuiltDist::DirectUrl(ref direct_dist) => {
4729                Ok(vec![Self::from_direct_dist(direct_dist, hashes)])
4730            }
4731            BuiltDist::Path(ref path_dist) => Ok(vec![Self::from_path_dist(path_dist, hashes)]),
4732        }
4733    }
4734
4735    fn from_registry_dist(
4736        reg_dist: &RegistryBuiltDist,
4737        index: Option<&IndexUrl>,
4738    ) -> Result<Vec<Self>, LockError> {
4739        reg_dist
4740            .wheels
4741            .iter()
4742            .filter(|wheel| {
4743                // Reject distributions from registries that don't match the index URL, as can occur with
4744                // `--find-links`.
4745                index.is_some_and(|index| *index == wheel.index)
4746            })
4747            .map(Self::from_registry_wheel)
4748            .collect()
4749    }
4750
4751    fn from_registry_wheel(wheel: &RegistryBuiltWheel) -> Result<Self, LockError> {
4752        let url = match &wheel.index {
4753            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
4754                let url = normalize_file_location(&wheel.file.url)
4755                    .map_err(LockErrorKind::InvalidUrl)
4756                    .map_err(LockError::from)?;
4757                WheelWireSource::Url { url }
4758            }
4759            IndexUrl::Path(path) => {
4760                let index_path = path
4761                    .to_file_path()
4762                    .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?;
4763                let wheel_url = wheel.file.url.to_url().map_err(LockErrorKind::InvalidUrl)?;
4764
4765                if wheel_url.scheme() == "file" {
4766                    let wheel_path = wheel_url
4767                        .to_file_path()
4768                        .map_err(|()| LockErrorKind::UrlToPath { url: wheel_url })?;
4769                    let path = relative_to(&wheel_path, index_path)
4770                        .or_else(|_| std::path::absolute(&wheel_path))
4771                        .map_err(LockErrorKind::DistributionRelativePath)?
4772                        .into_boxed_path();
4773                    WheelWireSource::Path { path }
4774                } else {
4775                    let url = normalize_file_location(&wheel.file.url)
4776                        .map_err(LockErrorKind::InvalidUrl)
4777                        .map_err(LockError::from)?;
4778                    WheelWireSource::Url { url }
4779                }
4780            }
4781        };
4782        let filename = wheel.filename.clone();
4783        let hash = wheel.file.hashes.iter().max().cloned().map(Hash::from);
4784        let size = wheel.file.size;
4785        let upload_time = wheel
4786            .file
4787            .upload_time_utc_ms
4788            .map(Timestamp::from_millisecond)
4789            .transpose()
4790            .map_err(LockErrorKind::InvalidTimestamp)?;
4791        let zstd = wheel.file.zstd.as_ref().map(|zstd| ZstdWheel {
4792            hash: zstd.hashes.iter().max().cloned().map(Hash::from),
4793            size: zstd.size,
4794        });
4795        Ok(Self {
4796            url,
4797            hash,
4798            size,
4799            upload_time,
4800            filename,
4801            zstd,
4802        })
4803    }
4804
4805    fn from_direct_dist(direct_dist: &DirectUrlBuiltDist, hashes: &[HashDigest]) -> Self {
4806        Self {
4807            url: WheelWireSource::Url {
4808                url: normalize_url(direct_dist.url.to_url()),
4809            },
4810            hash: hashes.iter().max().cloned().map(Hash::from),
4811            size: None,
4812            upload_time: None,
4813            filename: direct_dist.filename.clone(),
4814            zstd: None,
4815        }
4816    }
4817
4818    fn from_path_dist(path_dist: &PathBuiltDist, hashes: &[HashDigest]) -> Self {
4819        Self {
4820            url: WheelWireSource::Filename {
4821                filename: path_dist.filename.clone(),
4822            },
4823            hash: hashes.iter().max().cloned().map(Hash::from),
4824            size: None,
4825            upload_time: None,
4826            filename: path_dist.filename.clone(),
4827            zstd: None,
4828        }
4829    }
4830
4831    pub(crate) fn to_registry_wheel(
4832        &self,
4833        source: &RegistrySource,
4834        root: &Path,
4835    ) -> Result<RegistryBuiltWheel, LockError> {
4836        let filename: WheelFilename = self.filename.clone();
4837
4838        match source {
4839            RegistrySource::Url(url) => {
4840                let file_location = match &self.url {
4841                    WheelWireSource::Url { url: file_url } => {
4842                        FileLocation::AbsoluteUrl(file_url.clone())
4843                    }
4844                    WheelWireSource::Path { .. } | WheelWireSource::Filename { .. } => {
4845                        return Err(LockErrorKind::MissingUrl {
4846                            name: filename.name,
4847                            version: filename.version,
4848                        }
4849                        .into());
4850                    }
4851                };
4852                let file = Box::new(uv_distribution_types::File {
4853                    dist_info_metadata: false,
4854                    filename: SmallString::from(filename.to_string()),
4855                    hashes: self.hash.iter().map(|h| h.0.clone()).collect(),
4856                    requires_python: None,
4857                    size: self.size,
4858                    upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
4859                    url: file_location,
4860                    yanked: None,
4861                    zstd: self
4862                        .zstd
4863                        .as_ref()
4864                        .map(|zstd| uv_distribution_types::Zstd {
4865                            hashes: zstd.hash.iter().map(|h| h.0.clone()).collect(),
4866                            size: zstd.size,
4867                        })
4868                        .map(Box::new),
4869                });
4870                let index = IndexUrl::from(VerbatimUrl::from_url(
4871                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
4872                ));
4873                Ok(RegistryBuiltWheel {
4874                    filename,
4875                    file,
4876                    index,
4877                })
4878            }
4879            RegistrySource::Path(index_path) => {
4880                let file_location = match &self.url {
4881                    WheelWireSource::Url { url: file_url } => {
4882                        FileLocation::AbsoluteUrl(file_url.clone())
4883                    }
4884                    WheelWireSource::Path { path: file_path } => {
4885                        let file_path = root.join(index_path).join(file_path);
4886                        let file_url =
4887                            DisplaySafeUrl::from_file_path(&file_path).map_err(|()| {
4888                                LockErrorKind::PathToUrl {
4889                                    path: file_path.into_boxed_path(),
4890                                }
4891                            })?;
4892                        FileLocation::AbsoluteUrl(UrlString::from(file_url))
4893                    }
4894                    WheelWireSource::Filename { .. } => {
4895                        return Err(LockErrorKind::MissingPath {
4896                            name: filename.name,
4897                            version: filename.version,
4898                        }
4899                        .into());
4900                    }
4901                };
4902                let file = Box::new(uv_distribution_types::File {
4903                    dist_info_metadata: false,
4904                    filename: SmallString::from(filename.to_string()),
4905                    hashes: self.hash.iter().map(|h| h.0.clone()).collect(),
4906                    requires_python: None,
4907                    size: self.size,
4908                    upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
4909                    url: file_location,
4910                    yanked: None,
4911                    zstd: self
4912                        .zstd
4913                        .as_ref()
4914                        .map(|zstd| uv_distribution_types::Zstd {
4915                            hashes: zstd.hash.iter().map(|h| h.0.clone()).collect(),
4916                            size: zstd.size,
4917                        })
4918                        .map(Box::new),
4919                });
4920                let index = IndexUrl::from(
4921                    VerbatimUrl::from_absolute_path(root.join(index_path))
4922                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
4923                );
4924                Ok(RegistryBuiltWheel {
4925                    filename,
4926                    file,
4927                    index,
4928                })
4929            }
4930        }
4931    }
4932}
4933
4934#[derive(Clone, Debug, serde::Deserialize)]
4935#[serde(rename_all = "kebab-case")]
4936struct WheelWire {
4937    #[serde(flatten)]
4938    url: WheelWireSource,
4939    /// A hash of the built distribution.
4940    ///
4941    /// This is only present for wheels that come from registries and direct
4942    /// URLs. Wheels from git or path dependencies do not have hashes
4943    /// associated with them.
4944    hash: Option<Hash>,
4945    /// The size of the built distribution in bytes.
4946    ///
4947    /// This is only present for wheels that come from registries.
4948    size: Option<u64>,
4949    /// The upload time of the built distribution.
4950    ///
4951    /// This is only present for wheels that come from registries.
4952    #[serde(alias = "upload_time")]
4953    upload_time: Option<Timestamp>,
4954    /// The zstandard-compressed wheel metadata, if any.
4955    #[serde(alias = "zstd")]
4956    zstd: Option<ZstdWheel>,
4957}
4958
4959#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4960#[serde(untagged, rename_all = "kebab-case")]
4961enum WheelWireSource {
4962    /// Used for all wheels that come from remote sources.
4963    Url {
4964        /// A URL where the wheel that was locked against was found. The location
4965        /// does not need to exist in the future, so this should be treated as
4966        /// only a hint to where to look and/or recording where the wheel file
4967        /// originally came from.
4968        url: UrlString,
4969    },
4970    /// Used for wheels that come from local registries (like `--find-links`).
4971    Path {
4972        /// The path to the wheel, relative to the index.
4973        path: Box<Path>,
4974    },
4975    /// Used for path wheels.
4976    ///
4977    /// We only store the filename for path wheel, since we can't store a relative path in the url
4978    Filename {
4979        /// We duplicate the filename since a lot of code relies on having the filename on the
4980        /// wheel entry.
4981        filename: WheelFilename,
4982    },
4983}
4984
4985impl Wheel {
4986    /// Returns the TOML representation of this wheel.
4987    fn to_toml(&self) -> Result<InlineTable, toml_edit::ser::Error> {
4988        let mut table = InlineTable::new();
4989        match &self.url {
4990            WheelWireSource::Url { url } => {
4991                table.insert("url", Value::from(url.as_ref()));
4992            }
4993            WheelWireSource::Path { path } => {
4994                table.insert("path", Value::from(PortablePath::from(path).to_string()));
4995            }
4996            WheelWireSource::Filename { filename } => {
4997                table.insert("filename", Value::from(filename.to_string()));
4998            }
4999        }
5000        if let Some(ref hash) = self.hash {
5001            table.insert("hash", Value::from(hash.to_string()));
5002        }
5003        if let Some(size) = self.size {
5004            table.insert(
5005                "size",
5006                toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
5007            );
5008        }
5009        if let Some(upload_time) = self.upload_time {
5010            table.insert("upload-time", Value::from(upload_time.to_string()));
5011        }
5012        if let Some(zstd) = &self.zstd {
5013            let mut inner = InlineTable::new();
5014            if let Some(ref hash) = zstd.hash {
5015                inner.insert("hash", Value::from(hash.to_string()));
5016            }
5017            if let Some(size) = zstd.size {
5018                inner.insert(
5019                    "size",
5020                    toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
5021                );
5022            }
5023            table.insert("zstd", Value::from(inner));
5024        }
5025        Ok(table)
5026    }
5027}
5028
5029impl TryFrom<WheelWire> for Wheel {
5030    type Error = String;
5031
5032    fn try_from(wire: WheelWire) -> Result<Self, String> {
5033        let filename = match &wire.url {
5034            WheelWireSource::Url { url } => {
5035                let filename = url.filename().map_err(|err| err.to_string())?;
5036                filename.parse::<WheelFilename>().map_err(|err| {
5037                    format!("failed to parse `{filename}` as wheel filename: {err}")
5038                })?
5039            }
5040            WheelWireSource::Path { path } => {
5041                let filename = path
5042                    .file_name()
5043                    .and_then(|file_name| file_name.to_str())
5044                    .ok_or_else(|| {
5045                        format!("path `{}` has no filename component", path.display())
5046                    })?;
5047                filename.parse::<WheelFilename>().map_err(|err| {
5048                    format!("failed to parse `{filename}` as wheel filename: {err}")
5049                })?
5050            }
5051            WheelWireSource::Filename { filename } => filename.clone(),
5052        };
5053
5054        Ok(Self {
5055            url: wire.url,
5056            hash: wire.hash,
5057            size: wire.size,
5058            upload_time: wire.upload_time,
5059            zstd: wire.zstd,
5060            filename,
5061        })
5062    }
5063}
5064
5065/// A single dependency of a package in a lockfile.
5066#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
5067pub struct Dependency {
5068    package_id: PackageId,
5069    extra: BTreeSet<ExtraName>,
5070    /// A marker simplified from the PEP 508 marker in `complexified_marker`
5071    /// by assuming `requires-python` is satisfied. So if
5072    /// `requires-python = '>=3.8'`, then
5073    /// `python_version >= '3.8' and python_version < '3.12'`
5074    /// gets simplified to `python_version < '3.12'`.
5075    ///
5076    /// Generally speaking, this marker should not be exposed to
5077    /// anything outside this module unless it's for a specialized use
5078    /// case. But specifically, it should never be used to evaluate
5079    /// against a marker environment or for disjointness checks or any
5080    /// other kind of marker algebra.
5081    ///
5082    /// It exists because there are some cases where we do actually
5083    /// want to compare markers in their "simplified" form. For
5084    /// example, when collapsing the extras on duplicate dependencies.
5085    /// Even if a dependency has different complexified markers,
5086    /// they might have identical markers once simplified. And since
5087    /// `requires-python` applies to the entire lock file, it's
5088    /// acceptable to do comparisons on the simplified form.
5089    simplified_marker: SimplifiedMarkerTree,
5090    /// The "complexified" marker is a universal marker whose PEP 508
5091    /// marker can stand on its own independent of `requires-python`.
5092    /// It can be safely used for any kind of marker algebra.
5093    complexified_marker: UniversalMarker,
5094}
5095
5096impl Dependency {
5097    fn new(
5098        requires_python: &RequiresPython,
5099        package_id: PackageId,
5100        extra: BTreeSet<ExtraName>,
5101        complexified_marker: UniversalMarker,
5102    ) -> Self {
5103        let simplified_marker =
5104            SimplifiedMarkerTree::new(requires_python, complexified_marker.combined());
5105        let complexified_marker = simplified_marker.into_marker(requires_python);
5106        Self {
5107            package_id,
5108            extra,
5109            simplified_marker,
5110            complexified_marker: UniversalMarker::from_combined(complexified_marker),
5111        }
5112    }
5113
5114    fn from_annotated_dist(
5115        requires_python: &RequiresPython,
5116        annotated_dist: &AnnotatedDist,
5117        complexified_marker: UniversalMarker,
5118        root: &Path,
5119    ) -> Result<Self, LockError> {
5120        let package_id = PackageId::from_annotated_dist(annotated_dist, root)?;
5121        let extra = annotated_dist.extra.iter().cloned().collect();
5122        Ok(Self::new(
5123            requires_python,
5124            package_id,
5125            extra,
5126            complexified_marker,
5127        ))
5128    }
5129
5130    /// Returns the TOML representation of this dependency.
5131    fn to_toml(
5132        &self,
5133        _requires_python: &RequiresPython,
5134        dist_count_by_name: &FxHashMap<PackageName, u64>,
5135    ) -> Table {
5136        let mut table = Table::new();
5137        self.package_id
5138            .to_toml(Some(dist_count_by_name), &mut table);
5139        if !self.extra.is_empty() {
5140            let extra_array = self
5141                .extra
5142                .iter()
5143                .map(ToString::to_string)
5144                .collect::<Array>();
5145            table.insert("extra", value(extra_array));
5146        }
5147        if let Some(marker) = self.simplified_marker.try_to_string() {
5148            table.insert("marker", value(marker));
5149        }
5150
5151        table
5152    }
5153
5154    /// Returns the package name of this dependency.
5155    pub fn package_name(&self) -> &PackageName {
5156        &self.package_id.name
5157    }
5158
5159    /// Returns the extras specified on this dependency.
5160    pub fn extra(&self) -> &BTreeSet<ExtraName> {
5161        &self.extra
5162    }
5163}
5164
5165impl Display for Dependency {
5166    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
5167        match (self.extra.is_empty(), self.package_id.version.as_ref()) {
5168            (true, Some(version)) => write!(f, "{}=={}", self.package_id.name, version),
5169            (true, None) => write!(f, "{}", self.package_id.name),
5170            (false, Some(version)) => write!(
5171                f,
5172                "{}[{}]=={}",
5173                self.package_id.name,
5174                self.extra.iter().join(","),
5175                version
5176            ),
5177            (false, None) => write!(
5178                f,
5179                "{}[{}]",
5180                self.package_id.name,
5181                self.extra.iter().join(",")
5182            ),
5183        }
5184    }
5185}
5186
5187/// A single dependency of a package in a lockfile.
5188#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, serde::Deserialize)]
5189#[serde(rename_all = "kebab-case")]
5190struct DependencyWire {
5191    #[serde(flatten)]
5192    package_id: PackageIdForDependency,
5193    #[serde(default)]
5194    extra: BTreeSet<ExtraName>,
5195    #[serde(default)]
5196    marker: SimplifiedMarkerTree,
5197}
5198
5199impl DependencyWire {
5200    fn unwire(
5201        self,
5202        requires_python: &RequiresPython,
5203        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
5204    ) -> Result<Dependency, LockError> {
5205        let complexified_marker = self.marker.into_marker(requires_python);
5206        Ok(Dependency {
5207            package_id: self.package_id.unwire(unambiguous_package_ids)?,
5208            extra: self.extra,
5209            simplified_marker: self.marker,
5210            complexified_marker: UniversalMarker::from_combined(complexified_marker),
5211        })
5212    }
5213}
5214
5215/// A single hash for a distribution artifact in a lockfile.
5216///
5217/// A hash is encoded as a single TOML string in the format
5218/// `{algorithm}:{digest}`.
5219#[derive(Clone, Debug, PartialEq, Eq)]
5220struct Hash(HashDigest);
5221
5222impl From<HashDigest> for Hash {
5223    fn from(hd: HashDigest) -> Self {
5224        Self(hd)
5225    }
5226}
5227
5228impl FromStr for Hash {
5229    type Err = HashParseError;
5230
5231    fn from_str(s: &str) -> Result<Self, HashParseError> {
5232        let (algorithm, digest) = s.split_once(':').ok_or(HashParseError(
5233            "expected '{algorithm}:{digest}', but found no ':' in hash digest",
5234        ))?;
5235        let algorithm = algorithm
5236            .parse()
5237            .map_err(|_| HashParseError("unrecognized hash algorithm"))?;
5238        Ok(Self(HashDigest {
5239            algorithm,
5240            digest: digest.into(),
5241        }))
5242    }
5243}
5244
5245impl Display for Hash {
5246    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
5247        write!(f, "{}:{}", self.0.algorithm, self.0.digest)
5248    }
5249}
5250
5251impl<'de> serde::Deserialize<'de> for Hash {
5252    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
5253    where
5254        D: serde::de::Deserializer<'de>,
5255    {
5256        struct Visitor;
5257
5258        impl serde::de::Visitor<'_> for Visitor {
5259            type Value = Hash;
5260
5261            fn expecting(&self, f: &mut Formatter) -> std::fmt::Result {
5262                f.write_str("a string")
5263            }
5264
5265            fn visit_str<E: serde::de::Error>(self, v: &str) -> Result<Self::Value, E> {
5266                Hash::from_str(v).map_err(serde::de::Error::custom)
5267            }
5268        }
5269
5270        deserializer.deserialize_str(Visitor)
5271    }
5272}
5273
5274impl From<Hash> for Hashes {
5275    fn from(value: Hash) -> Self {
5276        match value.0.algorithm {
5277            HashAlgorithm::Md5 => Self {
5278                md5: Some(value.0.digest),
5279                sha256: None,
5280                sha384: None,
5281                sha512: None,
5282                blake2b: None,
5283            },
5284            HashAlgorithm::Sha256 => Self {
5285                md5: None,
5286                sha256: Some(value.0.digest),
5287                sha384: None,
5288                sha512: None,
5289                blake2b: None,
5290            },
5291            HashAlgorithm::Sha384 => Self {
5292                md5: None,
5293                sha256: None,
5294                sha384: Some(value.0.digest),
5295                sha512: None,
5296                blake2b: None,
5297            },
5298            HashAlgorithm::Sha512 => Self {
5299                md5: None,
5300                sha256: None,
5301                sha384: None,
5302                sha512: Some(value.0.digest),
5303                blake2b: None,
5304            },
5305            HashAlgorithm::Blake2b => Self {
5306                md5: None,
5307                sha256: None,
5308                sha384: None,
5309                sha512: None,
5310                blake2b: Some(value.0.digest),
5311            },
5312        }
5313    }
5314}
5315
5316/// Convert a [`FileLocation`] into a normalized [`UrlString`].
5317fn normalize_file_location(location: &FileLocation) -> Result<UrlString, ToUrlError> {
5318    match location {
5319        FileLocation::AbsoluteUrl(absolute) => Ok(absolute.without_fragment().into_owned()),
5320        FileLocation::RelativeUrl(_, _) => Ok(normalize_url(location.to_url()?)),
5321    }
5322}
5323
5324/// Convert a [`DisplaySafeUrl`] into a normalized [`UrlString`] by removing the fragment.
5325fn normalize_url(mut url: DisplaySafeUrl) -> UrlString {
5326    url.set_fragment(None);
5327    UrlString::from(url)
5328}
5329
5330/// Normalize a [`Requirement`], which could come from a lockfile, a `pyproject.toml`, etc.
5331///
5332/// Performs the following steps:
5333///
5334/// 1. Removes any sensitive credentials.
5335/// 2. Ensures that the lock and install paths are appropriately framed with respect to the
5336///    current [`Workspace`].
5337/// 3. Removes the `origin` field, which is only used in `requirements.txt`.
5338/// 4. Simplifies the markers using the provided [`RequiresPython`] instance.
5339fn normalize_requirement(
5340    mut requirement: Requirement,
5341    root: &Path,
5342    requires_python: &RequiresPython,
5343) -> Result<Requirement, LockError> {
5344    // Sort the extras and groups for consistency.
5345    requirement.extras.sort();
5346    requirement.groups.sort();
5347
5348    // Normalize the requirement source.
5349    match requirement.source {
5350        RequirementSource::Git {
5351            git,
5352            subdirectory,
5353            url: _,
5354        } => {
5355            // Reconstruct the Git URL.
5356            let git = {
5357                let mut repository = git.repository().clone();
5358
5359                // Remove the credentials.
5360                repository.remove_credentials();
5361
5362                // Remove the fragment and query from the URL; they're already present in the source.
5363                repository.set_fragment(None);
5364                repository.set_query(None);
5365
5366                GitUrl::from_fields(
5367                    repository,
5368                    git.reference().clone(),
5369                    git.precise(),
5370                    git.lfs(),
5371                )?
5372            };
5373
5374            // Reconstruct the PEP 508 URL from the underlying data.
5375            let url = DisplaySafeUrl::from(ParsedGitUrl {
5376                url: git.clone(),
5377                subdirectory: subdirectory.clone(),
5378            });
5379
5380            Ok(Requirement {
5381                name: requirement.name,
5382                extras: requirement.extras,
5383                groups: requirement.groups,
5384                marker: requires_python.simplify_markers(requirement.marker),
5385                source: RequirementSource::Git {
5386                    git,
5387                    subdirectory,
5388                    url: VerbatimUrl::from_url(url),
5389                },
5390                origin: None,
5391            })
5392        }
5393        RequirementSource::Path {
5394            install_path,
5395            ext,
5396            url: _,
5397        } => {
5398            let install_path =
5399                uv_fs::normalize_path_buf(root.join(&install_path)).into_boxed_path();
5400            let url = VerbatimUrl::from_normalized_path(&install_path)
5401                .map_err(LockErrorKind::RequirementVerbatimUrl)?;
5402
5403            Ok(Requirement {
5404                name: requirement.name,
5405                extras: requirement.extras,
5406                groups: requirement.groups,
5407                marker: requires_python.simplify_markers(requirement.marker),
5408                source: RequirementSource::Path {
5409                    install_path,
5410                    ext,
5411                    url,
5412                },
5413                origin: None,
5414            })
5415        }
5416        RequirementSource::Directory {
5417            install_path,
5418            editable,
5419            r#virtual,
5420            url: _,
5421        } => {
5422            let install_path =
5423                uv_fs::normalize_path_buf(root.join(&install_path)).into_boxed_path();
5424            let url = VerbatimUrl::from_normalized_path(&install_path)
5425                .map_err(LockErrorKind::RequirementVerbatimUrl)?;
5426
5427            Ok(Requirement {
5428                name: requirement.name,
5429                extras: requirement.extras,
5430                groups: requirement.groups,
5431                marker: requires_python.simplify_markers(requirement.marker),
5432                source: RequirementSource::Directory {
5433                    install_path,
5434                    editable: Some(editable.unwrap_or(false)),
5435                    r#virtual: Some(r#virtual.unwrap_or(false)),
5436                    url,
5437                },
5438                origin: None,
5439            })
5440        }
5441        RequirementSource::Registry {
5442            specifier,
5443            index,
5444            conflict,
5445        } => {
5446            // Round-trip the index to remove anything apart from the URL.
5447            let index = index
5448                .map(|index| index.url.into_url())
5449                .map(|mut index| {
5450                    index.remove_credentials();
5451                    index
5452                })
5453                .map(|index| IndexMetadata::from(IndexUrl::from(VerbatimUrl::from_url(index))));
5454            Ok(Requirement {
5455                name: requirement.name,
5456                extras: requirement.extras,
5457                groups: requirement.groups,
5458                marker: requires_python.simplify_markers(requirement.marker),
5459                source: RequirementSource::Registry {
5460                    specifier,
5461                    index,
5462                    conflict,
5463                },
5464                origin: None,
5465            })
5466        }
5467        RequirementSource::Url {
5468            mut location,
5469            subdirectory,
5470            ext,
5471            url: _,
5472        } => {
5473            // Remove the credentials.
5474            location.remove_credentials();
5475
5476            // Remove the fragment from the URL; it's already present in the source.
5477            location.set_fragment(None);
5478
5479            // Reconstruct the PEP 508 URL from the underlying data.
5480            let url = DisplaySafeUrl::from(ParsedArchiveUrl {
5481                url: location.clone(),
5482                subdirectory: subdirectory.clone(),
5483                ext,
5484            });
5485
5486            Ok(Requirement {
5487                name: requirement.name,
5488                extras: requirement.extras,
5489                groups: requirement.groups,
5490                marker: requires_python.simplify_markers(requirement.marker),
5491                source: RequirementSource::Url {
5492                    location,
5493                    subdirectory,
5494                    ext,
5495                    url: VerbatimUrl::from_url(url),
5496                },
5497                origin: None,
5498            })
5499        }
5500    }
5501}
5502
5503#[derive(Debug)]
5504pub struct LockError {
5505    kind: Box<LockErrorKind>,
5506    hint: Option<WheelTagHint>,
5507}
5508
5509impl std::error::Error for LockError {
5510    fn source(&self) -> Option<&(dyn Error + 'static)> {
5511        self.kind.source()
5512    }
5513}
5514
5515impl std::fmt::Display for LockError {
5516    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
5517        write!(f, "{}", self.kind)?;
5518        if let Some(hint) = &self.hint {
5519            write!(f, "\n\n{hint}")?;
5520        }
5521        Ok(())
5522    }
5523}
5524
5525impl LockError {
5526    /// Returns true if the [`LockError`] is a resolver error.
5527    pub fn is_resolution(&self) -> bool {
5528        matches!(&*self.kind, LockErrorKind::Resolution { .. })
5529    }
5530}
5531
5532impl<E> From<E> for LockError
5533where
5534    LockErrorKind: From<E>,
5535{
5536    fn from(err: E) -> Self {
5537        Self {
5538            kind: Box::new(LockErrorKind::from(err)),
5539            hint: None,
5540        }
5541    }
5542}
5543
5544#[derive(Debug, Clone, PartialEq, Eq)]
5545#[allow(clippy::enum_variant_names)]
5546enum WheelTagHint {
5547    /// None of the available wheels for a package have a compatible Python language tag (e.g.,
5548    /// `cp310` in `cp310-abi3-manylinux_2_17_x86_64.whl`).
5549    LanguageTags {
5550        package: PackageName,
5551        version: Option<Version>,
5552        tags: BTreeSet<LanguageTag>,
5553        best: Option<LanguageTag>,
5554    },
5555    /// None of the available wheels for a package have a compatible ABI tag (e.g., `abi3` in
5556    /// `cp310-abi3-manylinux_2_17_x86_64.whl`).
5557    AbiTags {
5558        package: PackageName,
5559        version: Option<Version>,
5560        tags: BTreeSet<AbiTag>,
5561        best: Option<AbiTag>,
5562    },
5563    /// None of the available wheels for a package have a compatible platform tag (e.g.,
5564    /// `manylinux_2_17_x86_64` in `cp310-abi3-manylinux_2_17_x86_64.whl`).
5565    PlatformTags {
5566        package: PackageName,
5567        version: Option<Version>,
5568        tags: BTreeSet<PlatformTag>,
5569        best: Option<PlatformTag>,
5570        markers: MarkerEnvironment,
5571    },
5572}
5573
5574impl WheelTagHint {
5575    /// Generate a [`WheelTagHint`] from the given (incompatible) wheels.
5576    fn from_wheels(
5577        name: &PackageName,
5578        version: Option<&Version>,
5579        filenames: &[&WheelFilename],
5580        tags: &Tags,
5581        markers: &MarkerEnvironment,
5582    ) -> Option<Self> {
5583        let incompatibility = filenames
5584            .iter()
5585            .map(|filename| {
5586                tags.compatibility(
5587                    filename.python_tags(),
5588                    filename.abi_tags(),
5589                    filename.platform_tags(),
5590                )
5591            })
5592            .max()?;
5593        match incompatibility {
5594            TagCompatibility::Incompatible(IncompatibleTag::Python) => {
5595                let best = tags.python_tag();
5596                let tags = Self::python_tags(filenames.iter().copied()).collect::<BTreeSet<_>>();
5597                if tags.is_empty() {
5598                    None
5599                } else {
5600                    Some(Self::LanguageTags {
5601                        package: name.clone(),
5602                        version: version.cloned(),
5603                        tags,
5604                        best,
5605                    })
5606                }
5607            }
5608            TagCompatibility::Incompatible(IncompatibleTag::Abi) => {
5609                let best = tags.abi_tag();
5610                let tags = Self::abi_tags(filenames.iter().copied())
5611                    // Ignore `none`, which is universally compatible.
5612                    //
5613                    // As an example, `none` can appear here if we're solving for Python 3.13, and
5614                    // the distribution includes a wheel for `cp312-none-macosx_11_0_arm64`.
5615                    //
5616                    // In that case, the wheel isn't compatible, but when solving for Python 3.13,
5617                    // the `cp312` Python tag _can_ be compatible (e.g., for `cp312-abi3-macosx_11_0_arm64.whl`),
5618                    // so this is considered an ABI incompatibility rather than Python incompatibility.
5619                    .filter(|tag| *tag != AbiTag::None)
5620                    .collect::<BTreeSet<_>>();
5621                if tags.is_empty() {
5622                    None
5623                } else {
5624                    Some(Self::AbiTags {
5625                        package: name.clone(),
5626                        version: version.cloned(),
5627                        tags,
5628                        best,
5629                    })
5630                }
5631            }
5632            TagCompatibility::Incompatible(IncompatibleTag::Platform) => {
5633                let best = tags.platform_tag().cloned();
5634                let incompatible_tags = Self::platform_tags(filenames.iter().copied(), tags)
5635                    .cloned()
5636                    .collect::<BTreeSet<_>>();
5637                if incompatible_tags.is_empty() {
5638                    None
5639                } else {
5640                    Some(Self::PlatformTags {
5641                        package: name.clone(),
5642                        version: version.cloned(),
5643                        tags: incompatible_tags,
5644                        best,
5645                        markers: markers.clone(),
5646                    })
5647                }
5648            }
5649            _ => None,
5650        }
5651    }
5652
5653    /// Returns an iterator over the compatible Python tags of the available wheels.
5654    fn python_tags<'a>(
5655        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5656    ) -> impl Iterator<Item = LanguageTag> + 'a {
5657        filenames.flat_map(WheelFilename::python_tags).copied()
5658    }
5659
5660    /// Returns an iterator over the compatible Python tags of the available wheels.
5661    fn abi_tags<'a>(
5662        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5663    ) -> impl Iterator<Item = AbiTag> + 'a {
5664        filenames.flat_map(WheelFilename::abi_tags).copied()
5665    }
5666
5667    /// Returns the set of platform tags for the distribution that are ABI-compatible with the given
5668    /// tags.
5669    fn platform_tags<'a>(
5670        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5671        tags: &'a Tags,
5672    ) -> impl Iterator<Item = &'a PlatformTag> + 'a {
5673        filenames.flat_map(move |filename| {
5674            if filename.python_tags().iter().any(|wheel_py| {
5675                filename
5676                    .abi_tags()
5677                    .iter()
5678                    .any(|wheel_abi| tags.is_compatible_abi(*wheel_py, *wheel_abi))
5679            }) {
5680                filename.platform_tags().iter()
5681            } else {
5682                [].iter()
5683            }
5684        })
5685    }
5686
5687    fn suggest_environment_marker(markers: &MarkerEnvironment) -> String {
5688        let sys_platform = markers.sys_platform();
5689        let platform_machine = markers.platform_machine();
5690
5691        // Generate the marker string based on actual environment values
5692        if platform_machine.is_empty() {
5693            format!("sys_platform == '{sys_platform}'")
5694        } else {
5695            format!("sys_platform == '{sys_platform}' and platform_machine == '{platform_machine}'")
5696        }
5697    }
5698}
5699
5700impl std::fmt::Display for WheelTagHint {
5701    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
5702        match self {
5703            Self::LanguageTags {
5704                package,
5705                version,
5706                tags,
5707                best,
5708            } => {
5709                if let Some(best) = best {
5710                    let s = if tags.len() == 1 { "" } else { "s" };
5711                    let best = if let Some(pretty) = best.pretty() {
5712                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5713                    } else {
5714                        format!("{}", best.cyan())
5715                    };
5716                    if let Some(version) = version {
5717                        write!(
5718                            f,
5719                            "{}{} You're using {}, but `{}` ({}) only has wheels with the following Python implementation tag{s}: {}",
5720                            "hint".bold().cyan(),
5721                            ":".bold(),
5722                            best,
5723                            package.cyan(),
5724                            format!("v{version}").cyan(),
5725                            tags.iter()
5726                                .map(|tag| format!("`{}`", tag.cyan()))
5727                                .join(", "),
5728                        )
5729                    } else {
5730                        write!(
5731                            f,
5732                            "{}{} You're using {}, but `{}` only has wheels with the following Python implementation tag{s}: {}",
5733                            "hint".bold().cyan(),
5734                            ":".bold(),
5735                            best,
5736                            package.cyan(),
5737                            tags.iter()
5738                                .map(|tag| format!("`{}`", tag.cyan()))
5739                                .join(", "),
5740                        )
5741                    }
5742                } else {
5743                    let s = if tags.len() == 1 { "" } else { "s" };
5744                    if let Some(version) = version {
5745                        write!(
5746                            f,
5747                            "{}{} Wheels are available for `{}` ({}) with the following Python implementation tag{s}: {}",
5748                            "hint".bold().cyan(),
5749                            ":".bold(),
5750                            package.cyan(),
5751                            format!("v{version}").cyan(),
5752                            tags.iter()
5753                                .map(|tag| format!("`{}`", tag.cyan()))
5754                                .join(", "),
5755                        )
5756                    } else {
5757                        write!(
5758                            f,
5759                            "{}{} Wheels are available for `{}` with the following Python implementation tag{s}: {}",
5760                            "hint".bold().cyan(),
5761                            ":".bold(),
5762                            package.cyan(),
5763                            tags.iter()
5764                                .map(|tag| format!("`{}`", tag.cyan()))
5765                                .join(", "),
5766                        )
5767                    }
5768                }
5769            }
5770            Self::AbiTags {
5771                package,
5772                version,
5773                tags,
5774                best,
5775            } => {
5776                if let Some(best) = best {
5777                    let s = if tags.len() == 1 { "" } else { "s" };
5778                    let best = if let Some(pretty) = best.pretty() {
5779                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5780                    } else {
5781                        format!("{}", best.cyan())
5782                    };
5783                    if let Some(version) = version {
5784                        write!(
5785                            f,
5786                            "{}{} You're using {}, but `{}` ({}) only has wheels with the following Python ABI tag{s}: {}",
5787                            "hint".bold().cyan(),
5788                            ":".bold(),
5789                            best,
5790                            package.cyan(),
5791                            format!("v{version}").cyan(),
5792                            tags.iter()
5793                                .map(|tag| format!("`{}`", tag.cyan()))
5794                                .join(", "),
5795                        )
5796                    } else {
5797                        write!(
5798                            f,
5799                            "{}{} You're using {}, but `{}` only has wheels with the following Python ABI tag{s}: {}",
5800                            "hint".bold().cyan(),
5801                            ":".bold(),
5802                            best,
5803                            package.cyan(),
5804                            tags.iter()
5805                                .map(|tag| format!("`{}`", tag.cyan()))
5806                                .join(", "),
5807                        )
5808                    }
5809                } else {
5810                    let s = if tags.len() == 1 { "" } else { "s" };
5811                    if let Some(version) = version {
5812                        write!(
5813                            f,
5814                            "{}{} Wheels are available for `{}` ({}) with the following Python ABI tag{s}: {}",
5815                            "hint".bold().cyan(),
5816                            ":".bold(),
5817                            package.cyan(),
5818                            format!("v{version}").cyan(),
5819                            tags.iter()
5820                                .map(|tag| format!("`{}`", tag.cyan()))
5821                                .join(", "),
5822                        )
5823                    } else {
5824                        write!(
5825                            f,
5826                            "{}{} Wheels are available for `{}` with the following Python ABI tag{s}: {}",
5827                            "hint".bold().cyan(),
5828                            ":".bold(),
5829                            package.cyan(),
5830                            tags.iter()
5831                                .map(|tag| format!("`{}`", tag.cyan()))
5832                                .join(", "),
5833                        )
5834                    }
5835                }
5836            }
5837            Self::PlatformTags {
5838                package,
5839                version,
5840                tags,
5841                best,
5842                markers,
5843            } => {
5844                let s = if tags.len() == 1 { "" } else { "s" };
5845                if let Some(best) = best {
5846                    let example_marker = Self::suggest_environment_marker(markers);
5847                    let best = if let Some(pretty) = best.pretty() {
5848                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5849                    } else {
5850                        format!("`{}`", best.cyan())
5851                    };
5852                    let package_ref = if let Some(version) = version {
5853                        format!("`{}` ({})", package.cyan(), format!("v{version}").cyan())
5854                    } else {
5855                        format!("`{}`", package.cyan())
5856                    };
5857                    write!(
5858                        f,
5859                        "{}{} You're on {}, but {} only has wheels for the following platform{s}: {}; consider adding {} to `{}` to ensure uv resolves to a version with compatible wheels",
5860                        "hint".bold().cyan(),
5861                        ":".bold(),
5862                        best,
5863                        package_ref,
5864                        tags.iter()
5865                            .map(|tag| format!("`{}`", tag.cyan()))
5866                            .join(", "),
5867                        format!("\"{example_marker}\"").cyan(),
5868                        "tool.uv.required-environments".green()
5869                    )
5870                } else {
5871                    if let Some(version) = version {
5872                        write!(
5873                            f,
5874                            "{}{} Wheels are available for `{}` ({}) on the following platform{s}: {}",
5875                            "hint".bold().cyan(),
5876                            ":".bold(),
5877                            package.cyan(),
5878                            format!("v{version}").cyan(),
5879                            tags.iter()
5880                                .map(|tag| format!("`{}`", tag.cyan()))
5881                                .join(", "),
5882                        )
5883                    } else {
5884                        write!(
5885                            f,
5886                            "{}{} Wheels are available for `{}` on the following platform{s}: {}",
5887                            "hint".bold().cyan(),
5888                            ":".bold(),
5889                            package.cyan(),
5890                            tags.iter()
5891                                .map(|tag| format!("`{}`", tag.cyan()))
5892                                .join(", "),
5893                        )
5894                    }
5895                }
5896            }
5897        }
5898    }
5899}
5900
5901/// An error that occurs when generating a `Lock` data structure.
5902///
5903/// These errors are sometimes the result of possible programming bugs.
5904/// For example, if there are two or more duplicative distributions given
5905/// to `Lock::new`, then an error is returned. It's likely that the fault
5906/// is with the caller somewhere in such cases.
5907#[derive(Debug, thiserror::Error)]
5908enum LockErrorKind {
5909    /// An error that occurs when multiple packages with the same
5910    /// ID were found.
5911    #[error("Found duplicate package `{id}`", id = id.cyan())]
5912    DuplicatePackage {
5913        /// The ID of the conflicting package.
5914        id: PackageId,
5915    },
5916    /// An error that occurs when there are multiple dependencies for the
5917    /// same package that have identical identifiers.
5918    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = id.cyan(), dependency = dependency.cyan())]
5919    DuplicateDependency {
5920        /// The ID of the package for which a duplicate dependency was
5921        /// found.
5922        id: PackageId,
5923        /// The ID of the conflicting dependency.
5924        dependency: Dependency,
5925    },
5926    /// An error that occurs when there are multiple dependencies for the
5927    /// same package that have identical identifiers, as part of the
5928    /// that package's optional dependencies.
5929    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = format!("{id}[{extra}]").cyan(), dependency = dependency.cyan())]
5930    DuplicateOptionalDependency {
5931        /// The ID of the package for which a duplicate dependency was
5932        /// found.
5933        id: PackageId,
5934        /// The name of the extra.
5935        extra: ExtraName,
5936        /// The ID of the conflicting dependency.
5937        dependency: Dependency,
5938    },
5939    /// An error that occurs when there are multiple dependencies for the
5940    /// same package that have identical identifiers, as part of the
5941    /// that package's development dependencies.
5942    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = format!("{id}:{group}").cyan(), dependency = dependency.cyan())]
5943    DuplicateDevDependency {
5944        /// The ID of the package for which a duplicate dependency was
5945        /// found.
5946        id: PackageId,
5947        /// The name of the dev dependency group.
5948        group: GroupName,
5949        /// The ID of the conflicting dependency.
5950        dependency: Dependency,
5951    },
5952    /// An error that occurs when the URL to a file for a wheel or
5953    /// source dist could not be converted to a structured `url::Url`.
5954    #[error(transparent)]
5955    InvalidUrl(
5956        /// The underlying error that occurred. This includes the
5957        /// errant URL in its error message.
5958        #[from]
5959        ToUrlError,
5960    ),
5961    /// An error that occurs when the extension can't be determined
5962    /// for a given wheel or source distribution.
5963    #[error("Failed to parse file extension for `{id}`; expected one of: {err}", id = id.cyan())]
5964    MissingExtension {
5965        /// The filename that was expected to have an extension.
5966        id: PackageId,
5967        /// The list of valid extensions that were expected.
5968        err: ExtensionError,
5969    },
5970    /// Failed to parse a Git source URL.
5971    #[error("Failed to parse Git URL")]
5972    InvalidGitSourceUrl(
5973        /// The underlying error that occurred. This includes the
5974        /// errant URL in the message.
5975        #[source]
5976        SourceParseError,
5977    ),
5978    #[error("Failed to parse timestamp")]
5979    InvalidTimestamp(
5980        /// The underlying error that occurred. This includes the
5981        /// errant timestamp in the message.
5982        #[source]
5983        jiff::Error,
5984    ),
5985    /// An error that occurs when there's an unrecognized dependency.
5986    ///
5987    /// That is, a dependency for a package that isn't in the lockfile.
5988    #[error("For package `{id}`, found dependency `{dependency}` with no locked package", id = id.cyan(), dependency = dependency.cyan())]
5989    UnrecognizedDependency {
5990        /// The ID of the package that has an unrecognized dependency.
5991        id: PackageId,
5992        /// The ID of the dependency that doesn't have a corresponding package
5993        /// entry.
5994        dependency: Dependency,
5995    },
5996    /// An error that occurs when a hash is expected (or not) for a particular
5997    /// artifact, but one was not found (or was).
5998    #[error("Since the package `{id}` comes from a {source} dependency, a hash was {expected} but one was not found for {artifact_type}", id = id.cyan(), source = id.source.name(), expected = if *expected { "expected" } else { "not expected" })]
5999    Hash {
6000        /// The ID of the package that has a missing hash.
6001        id: PackageId,
6002        /// The specific type of artifact, e.g., "source package"
6003        /// or "wheel".
6004        artifact_type: &'static str,
6005        /// When true, a hash is expected to be present.
6006        expected: bool,
6007    },
6008    /// An error that occurs when a package is included with an extra name,
6009    /// but no corresponding base package (i.e., without the extra) exists.
6010    #[error("Found package `{id}` with extra `{extra}` but no base package", id = id.cyan(), extra = extra.cyan())]
6011    MissingExtraBase {
6012        /// The ID of the package that has a missing base.
6013        id: PackageId,
6014        /// The extra name that was found.
6015        extra: ExtraName,
6016    },
6017    /// An error that occurs when a package is included with a development
6018    /// dependency group, but no corresponding base package (i.e., without
6019    /// the group) exists.
6020    #[error("Found package `{id}` with development dependency group `{group}` but no base package", id = id.cyan())]
6021    MissingDevBase {
6022        /// The ID of the package that has a missing base.
6023        id: PackageId,
6024        /// The development dependency group that was found.
6025        group: GroupName,
6026    },
6027    /// An error that occurs from an invalid lockfile where a wheel comes from a non-wheel source
6028    /// such as a directory.
6029    #[error("Wheels cannot come from {source_type} sources")]
6030    InvalidWheelSource {
6031        /// The ID of the distribution that has a missing base.
6032        id: PackageId,
6033        /// The kind of the invalid source.
6034        source_type: &'static str,
6035    },
6036    /// An error that occurs when a distribution indicates that it is sourced from a remote
6037    /// registry, but is missing a URL.
6038    #[error("Found registry distribution `{name}` ({version}) without a valid URL", name = name.cyan(), version = format!("v{version}").cyan())]
6039    MissingUrl {
6040        /// The name of the distribution that is missing a URL.
6041        name: PackageName,
6042        /// The version of the distribution that is missing a URL.
6043        version: Version,
6044    },
6045    /// An error that occurs when a distribution indicates that it is sourced from a local registry,
6046    /// but is missing a path.
6047    #[error("Found registry distribution `{name}` ({version}) without a valid path", name = name.cyan(), version = format!("v{version}").cyan())]
6048    MissingPath {
6049        /// The name of the distribution that is missing a path.
6050        name: PackageName,
6051        /// The version of the distribution that is missing a path.
6052        version: Version,
6053    },
6054    /// An error that occurs when a distribution indicates that it is sourced from a registry, but
6055    /// is missing a filename.
6056    #[error("Found registry distribution `{id}` without a valid filename", id = id.cyan())]
6057    MissingFilename {
6058        /// The ID of the distribution that is missing a filename.
6059        id: PackageId,
6060    },
6061    /// An error that occurs when a distribution is included with neither wheels nor a source
6062    /// distribution.
6063    #[error("Distribution `{id}` can't be installed because it doesn't have a source distribution or wheel for the current platform", id = id.cyan())]
6064    NeitherSourceDistNorWheel {
6065        /// The ID of the distribution.
6066        id: PackageId,
6067    },
6068    /// An error that occurs when a distribution is marked as both `--no-binary` and `--no-build`.
6069    #[error("Distribution `{id}` can't be installed because it is marked as both `--no-binary` and `--no-build`", id = id.cyan())]
6070    NoBinaryNoBuild {
6071        /// The ID of the distribution.
6072        id: PackageId,
6073    },
6074    /// An error that occurs when a distribution is marked as `--no-binary`, but no source
6075    /// distribution is available.
6076    #[error("Distribution `{id}` can't be installed because it is marked as `--no-binary` but has no source distribution", id = id.cyan())]
6077    NoBinary {
6078        /// The ID of the distribution.
6079        id: PackageId,
6080    },
6081    /// An error that occurs when a distribution is marked as `--no-build`, but no binary
6082    /// distribution is available.
6083    #[error("Distribution `{id}` can't be installed because it is marked as `--no-build` but has no binary distribution", id = id.cyan())]
6084    NoBuild {
6085        /// The ID of the distribution.
6086        id: PackageId,
6087    },
6088    /// An error that occurs when a wheel-only distribution is incompatible with the current
6089    /// platform.
6090    #[error("Distribution `{id}` can't be installed because the binary distribution is incompatible with the current platform", id = id.cyan())]
6091    IncompatibleWheelOnly {
6092        /// The ID of the distribution.
6093        id: PackageId,
6094    },
6095    /// An error that occurs when a wheel-only source is marked as `--no-binary`.
6096    #[error("Distribution `{id}` can't be installed because it is marked as `--no-binary` but is itself a binary distribution", id = id.cyan())]
6097    NoBinaryWheelOnly {
6098        /// The ID of the distribution.
6099        id: PackageId,
6100    },
6101    /// An error that occurs when converting between URLs and paths.
6102    #[error("Found dependency `{id}` with no locked distribution", id = id.cyan())]
6103    VerbatimUrl {
6104        /// The ID of the distribution that has a missing base.
6105        id: PackageId,
6106        /// The inner error we forward.
6107        #[source]
6108        err: VerbatimUrlError,
6109    },
6110    /// An error that occurs when parsing an existing requirement.
6111    #[error("Could not compute relative path between workspace and distribution")]
6112    DistributionRelativePath(
6113        /// The inner error we forward.
6114        #[source]
6115        io::Error,
6116    ),
6117    /// An error that occurs when converting an index URL to a relative path
6118    #[error("Could not compute relative path between workspace and index")]
6119    IndexRelativePath(
6120        /// The inner error we forward.
6121        #[source]
6122        io::Error,
6123    ),
6124    /// An error that occurs when converting a lockfile path from relative to absolute.
6125    #[error("Could not compute absolute path from workspace root and lockfile path")]
6126    AbsolutePath(
6127        /// The inner error we forward.
6128        #[source]
6129        io::Error,
6130    ),
6131    /// An error that occurs when an ambiguous `package.dependency` is
6132    /// missing a `version` field.
6133    #[error("Dependency `{name}` has missing `version` field but has more than one matching package", name = name.cyan())]
6134    MissingDependencyVersion {
6135        /// The name of the dependency that is missing a `version` field.
6136        name: PackageName,
6137    },
6138    /// An error that occurs when an ambiguous `package.dependency` is
6139    /// missing a `source` field.
6140    #[error("Dependency `{name}` has missing `source` field but has more than one matching package", name = name.cyan())]
6141    MissingDependencySource {
6142        /// The name of the dependency that is missing a `source` field.
6143        name: PackageName,
6144    },
6145    /// An error that occurs when parsing an existing requirement.
6146    #[error("Could not compute relative path between workspace and requirement")]
6147    RequirementRelativePath(
6148        /// The inner error we forward.
6149        #[source]
6150        io::Error,
6151    ),
6152    /// An error that occurs when parsing an existing requirement.
6153    #[error("Could not convert between URL and path")]
6154    RequirementVerbatimUrl(
6155        /// The inner error we forward.
6156        #[source]
6157        VerbatimUrlError,
6158    ),
6159    /// An error that occurs when parsing a registry's index URL.
6160    #[error("Could not convert between URL and path")]
6161    RegistryVerbatimUrl(
6162        /// The inner error we forward.
6163        #[source]
6164        VerbatimUrlError,
6165    ),
6166    /// An error that occurs when converting a path to a URL.
6167    #[error("Failed to convert path to URL: {path}", path = path.display().cyan())]
6168    PathToUrl { path: Box<Path> },
6169    /// An error that occurs when converting a URL to a path
6170    #[error("Failed to convert URL to path: {url}", url = url.cyan())]
6171    UrlToPath { url: DisplaySafeUrl },
6172    /// An error that occurs when multiple packages with the same
6173    /// name were found when identifying the root packages.
6174    #[error("Found multiple packages matching `{name}`", name = name.cyan())]
6175    MultipleRootPackages {
6176        /// The ID of the package.
6177        name: PackageName,
6178    },
6179    /// An error that occurs when a root package can't be found.
6180    #[error("Could not find root package `{name}`", name = name.cyan())]
6181    MissingRootPackage {
6182        /// The ID of the package.
6183        name: PackageName,
6184    },
6185    /// An error that occurs when resolving metadata for a package.
6186    #[error("Failed to generate package metadata for `{id}`", id = id.cyan())]
6187    Resolution {
6188        /// The ID of the distribution that failed to resolve.
6189        id: PackageId,
6190        /// The inner error we forward.
6191        #[source]
6192        err: uv_distribution::Error,
6193    },
6194    /// A package has inconsistent versions in a single entry
6195    // Using name instead of id since the version in the id is part of the conflict.
6196    #[error("The entry for package `{name}` ({version}) has wheel `{wheel_filename}` with inconsistent version ({wheel_version}), which indicates a malformed wheel. If this is intentional, set `{env_var}`.", name = name.cyan(), wheel_filename = wheel.filename, wheel_version = wheel.filename.version, env_var = "UV_SKIP_WHEEL_FILENAME_CHECK=1".green())]
6197    InconsistentVersions {
6198        /// The name of the package with the inconsistent entry.
6199        name: PackageName,
6200        /// The version of the package with the inconsistent entry.
6201        version: Version,
6202        /// The wheel with the inconsistent version.
6203        wheel: Wheel,
6204    },
6205    #[error(
6206        "Found conflicting extras `{package1}[{extra1}]` \
6207         and `{package2}[{extra2}]` enabled simultaneously"
6208    )]
6209    ConflictingExtra {
6210        package1: PackageName,
6211        extra1: ExtraName,
6212        package2: PackageName,
6213        extra2: ExtraName,
6214    },
6215    #[error(transparent)]
6216    GitUrlParse(#[from] GitUrlParseError),
6217    #[error("Failed to read `{path}`")]
6218    UnreadablePyprojectToml {
6219        path: PathBuf,
6220        #[source]
6221        err: std::io::Error,
6222    },
6223    #[error("Failed to parse `{path}`")]
6224    InvalidPyprojectToml {
6225        path: PathBuf,
6226        #[source]
6227        err: toml::de::Error,
6228    },
6229    /// An error that occurs when a workspace member has a non-local source.
6230    #[error("Workspace member `{id}` has non-local source", id = id.cyan())]
6231    NonLocalWorkspaceMember {
6232        /// The ID of the workspace member with an invalid source.
6233        id: PackageId,
6234    },
6235}
6236
6237/// An error that occurs when a source string could not be parsed.
6238#[derive(Debug, thiserror::Error)]
6239enum SourceParseError {
6240    /// An error that occurs when the URL in the source is invalid.
6241    #[error("Invalid URL in source `{given}`")]
6242    InvalidUrl {
6243        /// The source string given.
6244        given: String,
6245        /// The URL parse error.
6246        #[source]
6247        err: DisplaySafeUrlError,
6248    },
6249    /// An error that occurs when a Git URL is missing a precise commit SHA.
6250    #[error("Missing SHA in source `{given}`")]
6251    MissingSha {
6252        /// The source string given.
6253        given: String,
6254    },
6255    /// An error that occurs when a Git URL has an invalid SHA.
6256    #[error("Invalid SHA in source `{given}`")]
6257    InvalidSha {
6258        /// The source string given.
6259        given: String,
6260    },
6261}
6262
6263/// An error that occurs when a hash digest could not be parsed.
6264#[derive(Clone, Debug, Eq, PartialEq)]
6265struct HashParseError(&'static str);
6266
6267impl std::error::Error for HashParseError {}
6268
6269impl Display for HashParseError {
6270    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
6271        Display::fmt(self.0, f)
6272    }
6273}
6274
6275/// Format an array so that each element is on its own line and has a trailing comma.
6276///
6277/// Example:
6278///
6279/// ```toml
6280/// dependencies = [
6281///     { name = "idna" },
6282///     { name = "sniffio" },
6283/// ]
6284/// ```
6285fn each_element_on_its_line_array(elements: impl Iterator<Item = impl Into<Value>>) -> Array {
6286    let mut array = elements
6287        .map(|item| {
6288            let mut value = item.into();
6289            // Each dependency is on its own line and indented.
6290            value.decor_mut().set_prefix("\n    ");
6291            value
6292        })
6293        .collect::<Array>();
6294    // With a trailing comma, inserting another entry doesn't change the preceding line,
6295    // reducing the diff noise.
6296    array.set_trailing_comma(true);
6297    // The line break between the last element's comma and the closing square bracket.
6298    array.set_trailing("\n");
6299    array
6300}
6301
6302/// Returns the simplified string-ified version of each marker given.
6303///
6304/// Note that the marker strings returned will include conflict markers if they
6305/// are present.
6306fn simplified_universal_markers(
6307    markers: &[UniversalMarker],
6308    requires_python: &RequiresPython,
6309) -> Vec<String> {
6310    let mut pep508_only = vec![];
6311    let mut seen = FxHashSet::default();
6312    for marker in markers {
6313        let simplified =
6314            SimplifiedMarkerTree::new(requires_python, marker.pep508()).as_simplified_marker_tree();
6315        if seen.insert(simplified) {
6316            pep508_only.push(simplified);
6317        }
6318    }
6319    let any_overlap = pep508_only
6320        .iter()
6321        .tuple_combinations()
6322        .any(|(&marker1, &marker2)| !marker1.is_disjoint(marker2));
6323    let markers = if !any_overlap {
6324        pep508_only
6325    } else {
6326        markers
6327            .iter()
6328            .map(|marker| {
6329                SimplifiedMarkerTree::new(requires_python, marker.combined())
6330                    .as_simplified_marker_tree()
6331            })
6332            .collect()
6333    };
6334    markers
6335        .into_iter()
6336        .filter_map(MarkerTree::try_to_string)
6337        .collect()
6338}
6339
6340#[cfg(test)]
6341mod tests {
6342    use uv_warnings::anstream;
6343
6344    use super::*;
6345
6346    /// Assert a given display snapshot, stripping ANSI color codes.
6347    macro_rules! assert_stripped_snapshot {
6348        ($expr:expr, @$snapshot:literal) => {{
6349            let expr = format!("{}", $expr);
6350            let expr = format!("{}", anstream::adapter::strip_str(&expr));
6351            insta::assert_snapshot!(expr, @$snapshot);
6352        }};
6353    }
6354
6355    #[test]
6356    fn missing_dependency_source_unambiguous() {
6357        let data = r#"
6358version = 1
6359requires-python = ">=3.12"
6360
6361[[package]]
6362name = "a"
6363version = "0.1.0"
6364source = { registry = "https://pypi.org/simple" }
6365sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6366
6367[[package]]
6368name = "b"
6369version = "0.1.0"
6370source = { registry = "https://pypi.org/simple" }
6371sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6372
6373[[package.dependencies]]
6374name = "a"
6375version = "0.1.0"
6376"#;
6377        let result: Result<Lock, _> = toml::from_str(data);
6378        insta::assert_debug_snapshot!(result);
6379    }
6380
6381    #[test]
6382    fn missing_dependency_version_unambiguous() {
6383        let data = r#"
6384version = 1
6385requires-python = ">=3.12"
6386
6387[[package]]
6388name = "a"
6389version = "0.1.0"
6390source = { registry = "https://pypi.org/simple" }
6391sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6392
6393[[package]]
6394name = "b"
6395version = "0.1.0"
6396source = { registry = "https://pypi.org/simple" }
6397sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6398
6399[[package.dependencies]]
6400name = "a"
6401source = { registry = "https://pypi.org/simple" }
6402"#;
6403        let result: Result<Lock, _> = toml::from_str(data);
6404        insta::assert_debug_snapshot!(result);
6405    }
6406
6407    #[test]
6408    fn missing_dependency_source_version_unambiguous() {
6409        let data = r#"
6410version = 1
6411requires-python = ">=3.12"
6412
6413[[package]]
6414name = "a"
6415version = "0.1.0"
6416source = { registry = "https://pypi.org/simple" }
6417sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6418
6419[[package]]
6420name = "b"
6421version = "0.1.0"
6422source = { registry = "https://pypi.org/simple" }
6423sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6424
6425[[package.dependencies]]
6426name = "a"
6427"#;
6428        let result: Result<Lock, _> = toml::from_str(data);
6429        insta::assert_debug_snapshot!(result);
6430    }
6431
6432    #[test]
6433    fn missing_dependency_source_ambiguous() {
6434        let data = r#"
6435version = 1
6436requires-python = ">=3.12"
6437
6438[[package]]
6439name = "a"
6440version = "0.1.0"
6441source = { registry = "https://pypi.org/simple" }
6442sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6443
6444[[package]]
6445name = "a"
6446version = "0.1.1"
6447source = { registry = "https://pypi.org/simple" }
6448sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6449
6450[[package]]
6451name = "b"
6452version = "0.1.0"
6453source = { registry = "https://pypi.org/simple" }
6454sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6455
6456[[package.dependencies]]
6457name = "a"
6458version = "0.1.0"
6459"#;
6460        let result = toml::from_str::<Lock>(data).unwrap_err();
6461        assert_stripped_snapshot!(result, @"Dependency `a` has missing `source` field but has more than one matching package");
6462    }
6463
6464    #[test]
6465    fn missing_dependency_version_ambiguous() {
6466        let data = r#"
6467version = 1
6468requires-python = ">=3.12"
6469
6470[[package]]
6471name = "a"
6472version = "0.1.0"
6473source = { registry = "https://pypi.org/simple" }
6474sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6475
6476[[package]]
6477name = "a"
6478version = "0.1.1"
6479source = { registry = "https://pypi.org/simple" }
6480sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6481
6482[[package]]
6483name = "b"
6484version = "0.1.0"
6485source = { registry = "https://pypi.org/simple" }
6486sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6487
6488[[package.dependencies]]
6489name = "a"
6490source = { registry = "https://pypi.org/simple" }
6491"#;
6492        let result = toml::from_str::<Lock>(data).unwrap_err();
6493        assert_stripped_snapshot!(result, @"Dependency `a` has missing `version` field but has more than one matching package");
6494    }
6495
6496    #[test]
6497    fn missing_dependency_source_version_ambiguous() {
6498        let data = r#"
6499version = 1
6500requires-python = ">=3.12"
6501
6502[[package]]
6503name = "a"
6504version = "0.1.0"
6505source = { registry = "https://pypi.org/simple" }
6506sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6507
6508[[package]]
6509name = "a"
6510version = "0.1.1"
6511source = { registry = "https://pypi.org/simple" }
6512sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6513
6514[[package]]
6515name = "b"
6516version = "0.1.0"
6517source = { registry = "https://pypi.org/simple" }
6518sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6519
6520[[package.dependencies]]
6521name = "a"
6522"#;
6523        let result = toml::from_str::<Lock>(data).unwrap_err();
6524        assert_stripped_snapshot!(result, @"Dependency `a` has missing `source` field but has more than one matching package");
6525    }
6526
6527    #[test]
6528    fn missing_dependency_version_dynamic() {
6529        let data = r#"
6530version = 1
6531requires-python = ">=3.12"
6532
6533[[package]]
6534name = "a"
6535source = { editable = "path/to/a" }
6536
6537[[package]]
6538name = "a"
6539version = "0.1.1"
6540source = { registry = "https://pypi.org/simple" }
6541sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6542
6543[[package]]
6544name = "b"
6545version = "0.1.0"
6546source = { registry = "https://pypi.org/simple" }
6547sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6548
6549[[package.dependencies]]
6550name = "a"
6551source = { editable = "path/to/a" }
6552"#;
6553        let result = toml::from_str::<Lock>(data);
6554        insta::assert_debug_snapshot!(result);
6555    }
6556
6557    #[test]
6558    fn hash_optional_missing() {
6559        let data = r#"
6560version = 1
6561requires-python = ">=3.12"
6562
6563[[package]]
6564name = "anyio"
6565version = "4.3.0"
6566source = { registry = "https://pypi.org/simple" }
6567wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl" }]
6568"#;
6569        let result: Result<Lock, _> = toml::from_str(data);
6570        insta::assert_debug_snapshot!(result);
6571    }
6572
6573    #[test]
6574    fn hash_optional_present() {
6575        let data = r#"
6576version = 1
6577requires-python = ">=3.12"
6578
6579[[package]]
6580name = "anyio"
6581version = "4.3.0"
6582source = { registry = "https://pypi.org/simple" }
6583wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" }]
6584"#;
6585        let result: Result<Lock, _> = toml::from_str(data);
6586        insta::assert_debug_snapshot!(result);
6587    }
6588
6589    #[test]
6590    fn hash_required_present() {
6591        let data = r#"
6592version = 1
6593requires-python = ">=3.12"
6594
6595[[package]]
6596name = "anyio"
6597version = "4.3.0"
6598source = { path = "file:///foo/bar" }
6599wheels = [{ url = "file:///foo/bar/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" }]
6600"#;
6601        let result: Result<Lock, _> = toml::from_str(data);
6602        insta::assert_debug_snapshot!(result);
6603    }
6604
6605    #[test]
6606    fn source_direct_no_subdir() {
6607        let data = r#"
6608version = 1
6609requires-python = ">=3.12"
6610
6611[[package]]
6612name = "anyio"
6613version = "4.3.0"
6614source = { url = "https://burntsushi.net" }
6615"#;
6616        let result: Result<Lock, _> = toml::from_str(data);
6617        insta::assert_debug_snapshot!(result);
6618    }
6619
6620    #[test]
6621    fn source_direct_has_subdir() {
6622        let data = r#"
6623version = 1
6624requires-python = ">=3.12"
6625
6626[[package]]
6627name = "anyio"
6628version = "4.3.0"
6629source = { url = "https://burntsushi.net", subdirectory = "wat/foo/bar" }
6630"#;
6631        let result: Result<Lock, _> = toml::from_str(data);
6632        insta::assert_debug_snapshot!(result);
6633    }
6634
6635    #[test]
6636    fn source_directory() {
6637        let data = r#"
6638version = 1
6639requires-python = ">=3.12"
6640
6641[[package]]
6642name = "anyio"
6643version = "4.3.0"
6644source = { directory = "path/to/dir" }
6645"#;
6646        let result: Result<Lock, _> = toml::from_str(data);
6647        insta::assert_debug_snapshot!(result);
6648    }
6649
6650    #[test]
6651    fn source_editable() {
6652        let data = r#"
6653version = 1
6654requires-python = ">=3.12"
6655
6656[[package]]
6657name = "anyio"
6658version = "4.3.0"
6659source = { editable = "path/to/dir" }
6660"#;
6661        let result: Result<Lock, _> = toml::from_str(data);
6662        insta::assert_debug_snapshot!(result);
6663    }
6664}