Skip to main content

uv_resolver/lock/
mod.rs

1use std::borrow::Cow;
2use std::collections::{BTreeMap, BTreeSet, VecDeque};
3use std::error::Error;
4use std::fmt::{Debug, Display, Formatter};
5use std::io;
6use std::path::{Path, PathBuf};
7use std::str::FromStr;
8use std::sync::{Arc, LazyLock};
9
10use itertools::Itertools;
11use jiff::Timestamp;
12use owo_colors::OwoColorize;
13use petgraph::graph::NodeIndex;
14use petgraph::visit::EdgeRef;
15use rustc_hash::{FxHashMap, FxHashSet};
16use serde::Serializer;
17use toml_edit::{Array, ArrayOfTables, InlineTable, Item, Table, Value, value};
18use tracing::debug;
19use url::Url;
20
21use uv_cache_key::RepositoryUrl;
22use uv_configuration::{BuildOptions, Constraints, InstallTarget};
23use uv_distribution::{DistributionDatabase, FlatRequiresDist};
24use uv_distribution_filename::{
25    BuildTag, DistExtension, ExtensionError, SourceDistExtension, WheelFilename,
26};
27use uv_distribution_types::{
28    BuiltDist, DependencyMetadata, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist,
29    Dist, DistributionMetadata, FileLocation, GitSourceDist, IndexLocations, IndexMetadata,
30    IndexUrl, Name, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel,
31    RegistrySourceDist, RemoteSource, Requirement, RequirementSource, RequiresPython, ResolvedDist,
32    SimplifiedMarkerTree, StaticMetadata, ToUrlError, UrlString,
33};
34use uv_fs::{PortablePath, PortablePathBuf, relative_to};
35use uv_git::{RepositoryReference, ResolvedRepositoryReference};
36use uv_git_types::{GitLfs, GitOid, GitReference, GitUrl, GitUrlParseError};
37use uv_normalize::{ExtraName, GroupName, PackageName};
38use uv_pep440::Version;
39use uv_pep508::{MarkerEnvironment, MarkerTree, VerbatimUrl, VerbatimUrlError, split_scheme};
40use uv_platform_tags::{
41    AbiTag, IncompatibleTag, LanguageTag, PlatformTag, TagCompatibility, TagPriority, Tags,
42};
43use uv_pypi_types::{
44    ConflictKind, Conflicts, HashAlgorithm, HashDigest, HashDigests, Hashes, ParsedArchiveUrl,
45    ParsedGitUrl, PyProjectToml,
46};
47use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
48use uv_small_str::SmallString;
49use uv_types::{BuildContext, HashStrategy};
50use uv_workspace::{Editability, WorkspaceMember};
51
52use crate::exclude_newer::ExcludeNewerSpan;
53use crate::fork_strategy::ForkStrategy;
54pub(crate) use crate::lock::export::PylockTomlPackage;
55pub use crate::lock::export::RequirementsTxtExport;
56pub use crate::lock::export::{PylockToml, PylockTomlErrorKind, cyclonedx_json};
57pub use crate::lock::installable::Installable;
58pub use crate::lock::map::PackageMap;
59pub use crate::lock::tree::TreeDisplay;
60use crate::resolution::{AnnotatedDist, ResolutionGraphNode};
61use crate::universal_marker::{ConflictMarker, UniversalMarker};
62use crate::{
63    ExcludeNewer, ExcludeNewerPackage, ExcludeNewerValue, InMemoryIndex, MetadataResponse,
64    PackageExcludeNewer, PrereleaseMode, ResolutionMode, ResolverOutput,
65};
66
67mod export;
68mod installable;
69mod map;
70mod tree;
71
72/// The current version of the lockfile format.
73pub const VERSION: u32 = 1;
74
75/// The current revision of the lockfile format.
76const REVISION: u32 = 3;
77
78static LINUX_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
79    let pep508 = MarkerTree::from_str("os_name == 'posix' and sys_platform == 'linux'").unwrap();
80    UniversalMarker::new(pep508, ConflictMarker::TRUE)
81});
82static WINDOWS_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
83    let pep508 = MarkerTree::from_str("os_name == 'nt' and sys_platform == 'win32'").unwrap();
84    UniversalMarker::new(pep508, ConflictMarker::TRUE)
85});
86static MAC_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
87    let pep508 = MarkerTree::from_str("os_name == 'posix' and sys_platform == 'darwin'").unwrap();
88    UniversalMarker::new(pep508, ConflictMarker::TRUE)
89});
90static ANDROID_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
91    let pep508 = MarkerTree::from_str("sys_platform == 'android'").unwrap();
92    UniversalMarker::new(pep508, ConflictMarker::TRUE)
93});
94static ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
95    let pep508 =
96        MarkerTree::from_str("platform_machine == 'aarch64' or platform_machine == 'arm64' or platform_machine == 'ARM64'")
97            .unwrap();
98    UniversalMarker::new(pep508, ConflictMarker::TRUE)
99});
100static X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
101    let pep508 =
102        MarkerTree::from_str("platform_machine == 'x86_64' or platform_machine == 'amd64' or platform_machine == 'AMD64'")
103            .unwrap();
104    UniversalMarker::new(pep508, ConflictMarker::TRUE)
105});
106static X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
107    let pep508 = MarkerTree::from_str(
108        "platform_machine == 'i686' or platform_machine == 'i386' or platform_machine == 'win32' or platform_machine == 'x86'",
109    )
110    .unwrap();
111    UniversalMarker::new(pep508, ConflictMarker::TRUE)
112});
113static PPC64LE_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
114    let pep508 = MarkerTree::from_str("platform_machine == 'ppc64le'").unwrap();
115    UniversalMarker::new(pep508, ConflictMarker::TRUE)
116});
117static PPC64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
118    let pep508 = MarkerTree::from_str("platform_machine == 'ppc64'").unwrap();
119    UniversalMarker::new(pep508, ConflictMarker::TRUE)
120});
121static S390X_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
122    let pep508 = MarkerTree::from_str("platform_machine == 's390x'").unwrap();
123    UniversalMarker::new(pep508, ConflictMarker::TRUE)
124});
125static RISCV64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
126    let pep508 = MarkerTree::from_str("platform_machine == 'riscv64'").unwrap();
127    UniversalMarker::new(pep508, ConflictMarker::TRUE)
128});
129static LOONGARCH64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
130    let pep508 = MarkerTree::from_str("platform_machine == 'loongarch64'").unwrap();
131    UniversalMarker::new(pep508, ConflictMarker::TRUE)
132});
133static ARMV7L_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
134    let pep508 =
135        MarkerTree::from_str("platform_machine == 'armv7l' or platform_machine == 'armv8l'")
136            .unwrap();
137    UniversalMarker::new(pep508, ConflictMarker::TRUE)
138});
139static ARMV6L_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
140    let pep508 = MarkerTree::from_str("platform_machine == 'armv6l'").unwrap();
141    UniversalMarker::new(pep508, ConflictMarker::TRUE)
142});
143static LINUX_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
144    let mut marker = *LINUX_MARKERS;
145    marker.and(*ARM_MARKERS);
146    marker
147});
148static LINUX_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
149    let mut marker = *LINUX_MARKERS;
150    marker.and(*X86_64_MARKERS);
151    marker
152});
153static LINUX_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
154    let mut marker = *LINUX_MARKERS;
155    marker.and(*X86_MARKERS);
156    marker
157});
158static LINUX_PPC64LE_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
159    let mut marker = *LINUX_MARKERS;
160    marker.and(*PPC64LE_MARKERS);
161    marker
162});
163static LINUX_PPC64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
164    let mut marker = *LINUX_MARKERS;
165    marker.and(*PPC64_MARKERS);
166    marker
167});
168static LINUX_S390X_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
169    let mut marker = *LINUX_MARKERS;
170    marker.and(*S390X_MARKERS);
171    marker
172});
173static LINUX_RISCV64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
174    let mut marker = *LINUX_MARKERS;
175    marker.and(*RISCV64_MARKERS);
176    marker
177});
178static LINUX_LOONGARCH64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
179    let mut marker = *LINUX_MARKERS;
180    marker.and(*LOONGARCH64_MARKERS);
181    marker
182});
183static LINUX_ARMV7L_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
184    let mut marker = *LINUX_MARKERS;
185    marker.and(*ARMV7L_MARKERS);
186    marker
187});
188static LINUX_ARMV6L_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
189    let mut marker = *LINUX_MARKERS;
190    marker.and(*ARMV6L_MARKERS);
191    marker
192});
193static WINDOWS_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
194    let mut marker = *WINDOWS_MARKERS;
195    marker.and(*ARM_MARKERS);
196    marker
197});
198static WINDOWS_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
199    let mut marker = *WINDOWS_MARKERS;
200    marker.and(*X86_64_MARKERS);
201    marker
202});
203static WINDOWS_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
204    let mut marker = *WINDOWS_MARKERS;
205    marker.and(*X86_MARKERS);
206    marker
207});
208static MAC_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
209    let mut marker = *MAC_MARKERS;
210    marker.and(*ARM_MARKERS);
211    marker
212});
213static MAC_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
214    let mut marker = *MAC_MARKERS;
215    marker.and(*X86_64_MARKERS);
216    marker
217});
218static MAC_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
219    let mut marker = *MAC_MARKERS;
220    marker.and(*X86_MARKERS);
221    marker
222});
223static ANDROID_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
224    let mut marker = *ANDROID_MARKERS;
225    marker.and(*ARM_MARKERS);
226    marker
227});
228static ANDROID_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
229    let mut marker = *ANDROID_MARKERS;
230    marker.and(*X86_64_MARKERS);
231    marker
232});
233static ANDROID_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
234    let mut marker = *ANDROID_MARKERS;
235    marker.and(*X86_MARKERS);
236    marker
237});
238
239/// A distribution with its associated hash.
240///
241/// This pairs a [`Dist`] with the [`HashDigests`] for the specific wheel or
242/// sdist that would be installed.
243pub(crate) struct HashedDist {
244    pub(crate) dist: Dist,
245    pub(crate) hashes: HashDigests,
246}
247
248#[derive(Clone, Debug, PartialEq, Eq, serde::Deserialize)]
249#[serde(try_from = "LockWire")]
250pub struct Lock {
251    /// The (major) version of the lockfile format.
252    ///
253    /// Changes to the major version indicate backwards- and forwards-incompatible changes to the
254    /// lockfile format. A given uv version only supports a single major version of the lockfile
255    /// format.
256    ///
257    /// In other words, a version of uv that supports version 2 of the lockfile format will not be
258    /// able to read lockfiles generated under version 1 or 3.
259    version: u32,
260    /// The revision of the lockfile format.
261    ///
262    /// Changes to the revision indicate backwards-compatible changes to the lockfile format.
263    /// In other words, versions of uv that only support revision 1 _will_ be able to read lockfiles
264    /// with a revision greater than 1 (though they may ignore newer fields).
265    revision: u32,
266    /// If this lockfile was built from a forking resolution with non-identical forks, store the
267    /// forks in the lockfile so we can recreate them in subsequent resolutions.
268    fork_markers: Vec<UniversalMarker>,
269    /// The conflicting groups/extras specified by the user.
270    conflicts: Conflicts,
271    /// The list of supported environments specified by the user.
272    supported_environments: Vec<MarkerTree>,
273    /// The list of required platforms specified by the user.
274    required_environments: Vec<MarkerTree>,
275    /// The range of supported Python versions.
276    requires_python: RequiresPython,
277    /// We discard the lockfile if these options don't match.
278    options: ResolverOptions,
279    /// The actual locked version and their metadata.
280    packages: Vec<Package>,
281    /// A map from package ID to index in `packages`.
282    ///
283    /// This can be used to quickly lookup the full package for any ID
284    /// in this lock. For example, the dependencies for each package are
285    /// listed as package IDs. This map can be used to find the full
286    /// package for each such dependency.
287    ///
288    /// It is guaranteed that every package in this lock has an entry in
289    /// this map, and that every dependency for every package has an ID
290    /// that exists in this map. That is, there are no dependencies that don't
291    /// have a corresponding locked package entry in the same lockfile.
292    by_id: FxHashMap<PackageId, usize>,
293    /// The input requirements to the resolution.
294    manifest: ResolverManifest,
295}
296
297impl Lock {
298    /// Initialize a [`Lock`] from a [`ResolverOutput`].
299    pub fn from_resolution(resolution: &ResolverOutput, root: &Path) -> Result<Self, LockError> {
300        let mut packages = BTreeMap::new();
301        let requires_python = resolution.requires_python.clone();
302
303        // Determine the set of packages included at multiple versions.
304        let mut seen = FxHashSet::default();
305        let mut duplicates = FxHashSet::default();
306        for node_index in resolution.graph.node_indices() {
307            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
308                continue;
309            };
310            if !dist.is_base() {
311                continue;
312            }
313            if !seen.insert(dist.name()) {
314                duplicates.insert(dist.name());
315            }
316        }
317
318        // Lock all base packages.
319        for node_index in resolution.graph.node_indices() {
320            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
321                continue;
322            };
323            if !dist.is_base() {
324                continue;
325            }
326
327            // If there are multiple distributions for the same package, include the markers of all
328            // forks that included the current distribution.
329            let fork_markers = if duplicates.contains(dist.name()) {
330                resolution
331                    .fork_markers
332                    .iter()
333                    .filter(|fork_markers| !fork_markers.is_disjoint(dist.marker))
334                    .copied()
335                    .collect()
336            } else {
337                vec![]
338            };
339
340            let mut package = Package::from_annotated_dist(dist, fork_markers, root)?;
341            Self::remove_unreachable_wheels(resolution, &requires_python, node_index, &mut package);
342
343            // Add all dependencies
344            for edge in resolution.graph.edges(node_index) {
345                let ResolutionGraphNode::Dist(dependency_dist) = &resolution.graph[edge.target()]
346                else {
347                    continue;
348                };
349                let marker = *edge.weight();
350                package.add_dependency(&requires_python, dependency_dist, marker, root)?;
351            }
352
353            let id = package.id.clone();
354            if let Some(locked_dist) = packages.insert(id, package) {
355                return Err(LockErrorKind::DuplicatePackage {
356                    id: locked_dist.id.clone(),
357                }
358                .into());
359            }
360        }
361
362        // Lock all extras and development dependencies.
363        for node_index in resolution.graph.node_indices() {
364            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
365                continue;
366            };
367            if let Some(extra) = dist.extra.as_ref() {
368                let id = PackageId::from_annotated_dist(dist, root)?;
369                let Some(package) = packages.get_mut(&id) else {
370                    return Err(LockErrorKind::MissingExtraBase {
371                        id,
372                        extra: extra.clone(),
373                    }
374                    .into());
375                };
376                for edge in resolution.graph.edges(node_index) {
377                    let ResolutionGraphNode::Dist(dependency_dist) =
378                        &resolution.graph[edge.target()]
379                    else {
380                        continue;
381                    };
382                    let marker = *edge.weight();
383                    package.add_optional_dependency(
384                        &requires_python,
385                        extra.clone(),
386                        dependency_dist,
387                        marker,
388                        root,
389                    )?;
390                }
391            }
392            if let Some(group) = dist.group.as_ref() {
393                let id = PackageId::from_annotated_dist(dist, root)?;
394                let Some(package) = packages.get_mut(&id) else {
395                    return Err(LockErrorKind::MissingDevBase {
396                        id,
397                        group: group.clone(),
398                    }
399                    .into());
400                };
401                for edge in resolution.graph.edges(node_index) {
402                    let ResolutionGraphNode::Dist(dependency_dist) =
403                        &resolution.graph[edge.target()]
404                    else {
405                        continue;
406                    };
407                    let marker = *edge.weight();
408                    package.add_group_dependency(
409                        &requires_python,
410                        group.clone(),
411                        dependency_dist,
412                        marker,
413                        root,
414                    )?;
415                }
416            }
417        }
418
419        let packages = packages.into_values().collect();
420
421        let options = ResolverOptions {
422            resolution_mode: resolution.options.resolution_mode,
423            prerelease_mode: resolution.options.prerelease_mode,
424            fork_strategy: resolution.options.fork_strategy,
425            exclude_newer: resolution.options.exclude_newer.clone().into(),
426        };
427        let lock = Self::new(
428            VERSION,
429            REVISION,
430            packages,
431            requires_python,
432            options,
433            ResolverManifest::default(),
434            Conflicts::empty(),
435            vec![],
436            vec![],
437            resolution.fork_markers.clone(),
438        )?;
439        Ok(lock)
440    }
441
442    /// Remove wheels that can't be selected for installation due to environment markers.
443    ///
444    /// For example, a package included under `sys_platform == 'win32'` does not need Linux
445    /// wheels.
446    fn remove_unreachable_wheels(
447        graph: &ResolverOutput,
448        requires_python: &RequiresPython,
449        node_index: NodeIndex,
450        locked_dist: &mut Package,
451    ) {
452        // Remove wheels that don't match `requires-python` and can't be selected for installation.
453        locked_dist
454            .wheels
455            .retain(|wheel| requires_python.matches_wheel_tag(&wheel.filename));
456
457        // Filter by platform tags.
458        locked_dist.wheels.retain(|wheel| {
459            // Naively, we'd check whether `platform_system == 'Linux'` is disjoint, or
460            // `os_name == 'posix'` is disjoint, or `sys_platform == 'linux'` is disjoint (each on its
461            // own sufficient to exclude linux wheels), but due to
462            // `(A ∩ (B ∩ C) = ∅) => ((A ∩ B = ∅) or (A ∩ C = ∅))`
463            // a single disjointness check with the intersection is sufficient, so we have one
464            // constant per platform.
465            let platform_tags = wheel.filename.platform_tags();
466
467            if platform_tags.iter().all(PlatformTag::is_any) {
468                return true;
469            }
470
471            if platform_tags.iter().all(PlatformTag::is_linux) {
472                if platform_tags.iter().all(PlatformTag::is_arm) {
473                    if graph.graph[node_index]
474                        .marker()
475                        .is_disjoint(*LINUX_ARM_MARKERS)
476                    {
477                        return false;
478                    }
479                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
480                    if graph.graph[node_index]
481                        .marker()
482                        .is_disjoint(*LINUX_X86_64_MARKERS)
483                    {
484                        return false;
485                    }
486                } else if platform_tags.iter().all(PlatformTag::is_x86) {
487                    if graph.graph[node_index]
488                        .marker()
489                        .is_disjoint(*LINUX_X86_MARKERS)
490                    {
491                        return false;
492                    }
493                } else if platform_tags.iter().all(PlatformTag::is_ppc64le) {
494                    if graph.graph[node_index]
495                        .marker()
496                        .is_disjoint(*LINUX_PPC64LE_MARKERS)
497                    {
498                        return false;
499                    }
500                } else if platform_tags.iter().all(PlatformTag::is_ppc64) {
501                    if graph.graph[node_index]
502                        .marker()
503                        .is_disjoint(*LINUX_PPC64_MARKERS)
504                    {
505                        return false;
506                    }
507                } else if platform_tags.iter().all(PlatformTag::is_s390x) {
508                    if graph.graph[node_index]
509                        .marker()
510                        .is_disjoint(*LINUX_S390X_MARKERS)
511                    {
512                        return false;
513                    }
514                } else if platform_tags.iter().all(PlatformTag::is_riscv64) {
515                    if graph.graph[node_index]
516                        .marker()
517                        .is_disjoint(*LINUX_RISCV64_MARKERS)
518                    {
519                        return false;
520                    }
521                } else if platform_tags.iter().all(PlatformTag::is_loongarch64) {
522                    if graph.graph[node_index]
523                        .marker()
524                        .is_disjoint(*LINUX_LOONGARCH64_MARKERS)
525                    {
526                        return false;
527                    }
528                } else if platform_tags.iter().all(PlatformTag::is_armv7l) {
529                    if graph.graph[node_index]
530                        .marker()
531                        .is_disjoint(*LINUX_ARMV7L_MARKERS)
532                    {
533                        return false;
534                    }
535                } else if platform_tags.iter().all(PlatformTag::is_armv6l) {
536                    if graph.graph[node_index]
537                        .marker()
538                        .is_disjoint(*LINUX_ARMV6L_MARKERS)
539                    {
540                        return false;
541                    }
542                } else if graph.graph[node_index].marker().is_disjoint(*LINUX_MARKERS) {
543                    return false;
544                }
545            }
546
547            if platform_tags.iter().all(PlatformTag::is_windows) {
548                if platform_tags.iter().all(PlatformTag::is_arm) {
549                    if graph.graph[node_index]
550                        .marker()
551                        .is_disjoint(*WINDOWS_ARM_MARKERS)
552                    {
553                        return false;
554                    }
555                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
556                    if graph.graph[node_index]
557                        .marker()
558                        .is_disjoint(*WINDOWS_X86_64_MARKERS)
559                    {
560                        return false;
561                    }
562                } else if platform_tags.iter().all(PlatformTag::is_x86) {
563                    if graph.graph[node_index]
564                        .marker()
565                        .is_disjoint(*WINDOWS_X86_MARKERS)
566                    {
567                        return false;
568                    }
569                } else if graph.graph[node_index]
570                    .marker()
571                    .is_disjoint(*WINDOWS_MARKERS)
572                {
573                    return false;
574                }
575            }
576
577            if platform_tags.iter().all(PlatformTag::is_macos) {
578                if platform_tags.iter().all(PlatformTag::is_arm) {
579                    if graph.graph[node_index]
580                        .marker()
581                        .is_disjoint(*MAC_ARM_MARKERS)
582                    {
583                        return false;
584                    }
585                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
586                    if graph.graph[node_index]
587                        .marker()
588                        .is_disjoint(*MAC_X86_64_MARKERS)
589                    {
590                        return false;
591                    }
592                } else if platform_tags.iter().all(PlatformTag::is_x86) {
593                    if graph.graph[node_index]
594                        .marker()
595                        .is_disjoint(*MAC_X86_MARKERS)
596                    {
597                        return false;
598                    }
599                } else if graph.graph[node_index].marker().is_disjoint(*MAC_MARKERS) {
600                    return false;
601                }
602            }
603
604            if platform_tags.iter().all(PlatformTag::is_android) {
605                if platform_tags.iter().all(PlatformTag::is_arm) {
606                    if graph.graph[node_index]
607                        .marker()
608                        .is_disjoint(*ANDROID_ARM_MARKERS)
609                    {
610                        return false;
611                    }
612                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
613                    if graph.graph[node_index]
614                        .marker()
615                        .is_disjoint(*ANDROID_X86_64_MARKERS)
616                    {
617                        return false;
618                    }
619                } else if platform_tags.iter().all(PlatformTag::is_x86) {
620                    if graph.graph[node_index]
621                        .marker()
622                        .is_disjoint(*ANDROID_X86_MARKERS)
623                    {
624                        return false;
625                    }
626                } else if graph.graph[node_index]
627                    .marker()
628                    .is_disjoint(*ANDROID_MARKERS)
629                {
630                    return false;
631                }
632            }
633
634            if platform_tags.iter().all(PlatformTag::is_arm) {
635                if graph.graph[node_index].marker().is_disjoint(*ARM_MARKERS) {
636                    return false;
637                }
638            }
639
640            if platform_tags.iter().all(PlatformTag::is_x86_64) {
641                if graph.graph[node_index]
642                    .marker()
643                    .is_disjoint(*X86_64_MARKERS)
644                {
645                    return false;
646                }
647            }
648
649            if platform_tags.iter().all(PlatformTag::is_x86) {
650                if graph.graph[node_index].marker().is_disjoint(*X86_MARKERS) {
651                    return false;
652                }
653            }
654
655            if platform_tags.iter().all(PlatformTag::is_ppc64le) {
656                if graph.graph[node_index]
657                    .marker()
658                    .is_disjoint(*PPC64LE_MARKERS)
659                {
660                    return false;
661                }
662            }
663
664            if platform_tags.iter().all(PlatformTag::is_ppc64) {
665                if graph.graph[node_index].marker().is_disjoint(*PPC64_MARKERS) {
666                    return false;
667                }
668            }
669
670            if platform_tags.iter().all(PlatformTag::is_s390x) {
671                if graph.graph[node_index].marker().is_disjoint(*S390X_MARKERS) {
672                    return false;
673                }
674            }
675
676            if platform_tags.iter().all(PlatformTag::is_riscv64) {
677                if graph.graph[node_index]
678                    .marker()
679                    .is_disjoint(*RISCV64_MARKERS)
680                {
681                    return false;
682                }
683            }
684
685            if platform_tags.iter().all(PlatformTag::is_loongarch64) {
686                if graph.graph[node_index]
687                    .marker()
688                    .is_disjoint(*LOONGARCH64_MARKERS)
689                {
690                    return false;
691                }
692            }
693
694            if platform_tags.iter().all(PlatformTag::is_armv7l) {
695                if graph.graph[node_index]
696                    .marker()
697                    .is_disjoint(*ARMV7L_MARKERS)
698                {
699                    return false;
700                }
701            }
702
703            if platform_tags.iter().all(PlatformTag::is_armv6l) {
704                if graph.graph[node_index]
705                    .marker()
706                    .is_disjoint(*ARMV6L_MARKERS)
707                {
708                    return false;
709                }
710            }
711
712            true
713        });
714    }
715
716    /// Initialize a [`Lock`] from a list of [`Package`] entries.
717    fn new(
718        version: u32,
719        revision: u32,
720        mut packages: Vec<Package>,
721        requires_python: RequiresPython,
722        options: ResolverOptions,
723        manifest: ResolverManifest,
724        conflicts: Conflicts,
725        supported_environments: Vec<MarkerTree>,
726        required_environments: Vec<MarkerTree>,
727        fork_markers: Vec<UniversalMarker>,
728    ) -> Result<Self, LockError> {
729        // Put all dependencies for each package in a canonical order and
730        // check for duplicates.
731        for package in &mut packages {
732            package.dependencies.sort();
733            for windows in package.dependencies.windows(2) {
734                let (dep1, dep2) = (&windows[0], &windows[1]);
735                if dep1 == dep2 {
736                    return Err(LockErrorKind::DuplicateDependency {
737                        id: package.id.clone(),
738                        dependency: dep1.clone(),
739                    }
740                    .into());
741                }
742            }
743
744            // Perform the same validation for optional dependencies.
745            for (extra, dependencies) in &mut package.optional_dependencies {
746                dependencies.sort();
747                for windows in dependencies.windows(2) {
748                    let (dep1, dep2) = (&windows[0], &windows[1]);
749                    if dep1 == dep2 {
750                        return Err(LockErrorKind::DuplicateOptionalDependency {
751                            id: package.id.clone(),
752                            extra: extra.clone(),
753                            dependency: dep1.clone(),
754                        }
755                        .into());
756                    }
757                }
758            }
759
760            // Perform the same validation for dev dependencies.
761            for (group, dependencies) in &mut package.dependency_groups {
762                dependencies.sort();
763                for windows in dependencies.windows(2) {
764                    let (dep1, dep2) = (&windows[0], &windows[1]);
765                    if dep1 == dep2 {
766                        return Err(LockErrorKind::DuplicateDevDependency {
767                            id: package.id.clone(),
768                            group: group.clone(),
769                            dependency: dep1.clone(),
770                        }
771                        .into());
772                    }
773                }
774            }
775        }
776        packages.sort_by(|dist1, dist2| dist1.id.cmp(&dist2.id));
777
778        // Check for duplicate package IDs and also build up the map for
779        // packages keyed by their ID.
780        let mut by_id = FxHashMap::default();
781        for (i, dist) in packages.iter().enumerate() {
782            if by_id.insert(dist.id.clone(), i).is_some() {
783                return Err(LockErrorKind::DuplicatePackage {
784                    id: dist.id.clone(),
785                }
786                .into());
787            }
788        }
789
790        // Build up a map from ID to extras.
791        let mut extras_by_id = FxHashMap::default();
792        for dist in &packages {
793            for extra in dist.optional_dependencies.keys() {
794                extras_by_id
795                    .entry(dist.id.clone())
796                    .or_insert_with(FxHashSet::default)
797                    .insert(extra.clone());
798            }
799        }
800
801        // Remove any non-existent extras (e.g., extras that were requested but don't exist).
802        for dist in &mut packages {
803            for dep in dist
804                .dependencies
805                .iter_mut()
806                .chain(dist.optional_dependencies.values_mut().flatten())
807                .chain(dist.dependency_groups.values_mut().flatten())
808            {
809                dep.extra.retain(|extra| {
810                    extras_by_id
811                        .get(&dep.package_id)
812                        .is_some_and(|extras| extras.contains(extra))
813                });
814            }
815        }
816
817        // Check that every dependency has an entry in `by_id`. If any don't,
818        // it implies we somehow have a dependency with no corresponding locked
819        // package.
820        for dist in &packages {
821            for dep in &dist.dependencies {
822                if !by_id.contains_key(&dep.package_id) {
823                    return Err(LockErrorKind::UnrecognizedDependency {
824                        id: dist.id.clone(),
825                        dependency: dep.clone(),
826                    }
827                    .into());
828                }
829            }
830
831            // Perform the same validation for optional dependencies.
832            for dependencies in dist.optional_dependencies.values() {
833                for dep in dependencies {
834                    if !by_id.contains_key(&dep.package_id) {
835                        return Err(LockErrorKind::UnrecognizedDependency {
836                            id: dist.id.clone(),
837                            dependency: dep.clone(),
838                        }
839                        .into());
840                    }
841                }
842            }
843
844            // Perform the same validation for dev dependencies.
845            for dependencies in dist.dependency_groups.values() {
846                for dep in dependencies {
847                    if !by_id.contains_key(&dep.package_id) {
848                        return Err(LockErrorKind::UnrecognizedDependency {
849                            id: dist.id.clone(),
850                            dependency: dep.clone(),
851                        }
852                        .into());
853                    }
854                }
855            }
856
857            // Also check that our sources are consistent with whether we have
858            // hashes or not.
859            if let Some(requires_hash) = dist.id.source.requires_hash() {
860                for wheel in &dist.wheels {
861                    if requires_hash != wheel.hash.is_some() {
862                        return Err(LockErrorKind::Hash {
863                            id: dist.id.clone(),
864                            artifact_type: "wheel",
865                            expected: requires_hash,
866                        }
867                        .into());
868                    }
869                }
870            }
871        }
872        let lock = Self {
873            version,
874            revision,
875            fork_markers,
876            conflicts,
877            supported_environments,
878            required_environments,
879            requires_python,
880            options,
881            packages,
882            by_id,
883            manifest,
884        };
885        Ok(lock)
886    }
887
888    /// Record the requirements that were used to generate this lock.
889    #[must_use]
890    pub fn with_manifest(mut self, manifest: ResolverManifest) -> Self {
891        self.manifest = manifest;
892        self
893    }
894
895    /// Record the conflicting groups that were used to generate this lock.
896    #[must_use]
897    pub fn with_conflicts(mut self, conflicts: Conflicts) -> Self {
898        self.conflicts = conflicts;
899        self
900    }
901
902    /// Record the supported environments that were used to generate this lock.
903    #[must_use]
904    pub fn with_supported_environments(mut self, supported_environments: Vec<MarkerTree>) -> Self {
905        // We "complexify" the markers given, since the supported
906        // environments given might be coming directly from what's written in
907        // `pyproject.toml`, and those are assumed to be simplified (i.e.,
908        // they assume `requires-python` is true). But a `Lock` always uses
909        // non-simplified markers internally, so we need to re-complexify them
910        // here.
911        //
912        // The nice thing about complexifying is that it's a no-op if the
913        // markers given have already been complexified.
914        self.supported_environments = supported_environments
915            .into_iter()
916            .map(|marker| self.requires_python.complexify_markers(marker))
917            .collect();
918        self
919    }
920
921    /// Record the required platforms that were used to generate this lock.
922    #[must_use]
923    pub fn with_required_environments(mut self, required_environments: Vec<MarkerTree>) -> Self {
924        self.required_environments = required_environments
925            .into_iter()
926            .map(|marker| self.requires_python.complexify_markers(marker))
927            .collect();
928        self
929    }
930
931    /// Returns `true` if this [`Lock`] includes `provides-extra` metadata.
932    pub fn supports_provides_extra(&self) -> bool {
933        // `provides-extra` was added in Version 1 Revision 1.
934        (self.version(), self.revision()) >= (1, 1)
935    }
936
937    /// Returns `true` if this [`Lock`] includes entries for empty `dependency-group` metadata.
938    pub fn includes_empty_groups(&self) -> bool {
939        // Empty dependency groups are included as of https://github.com/astral-sh/uv/pull/8598,
940        // but Version 1 Revision 1 is the first revision published after that change.
941        (self.version(), self.revision()) >= (1, 1)
942    }
943
944    /// Returns the lockfile version.
945    pub fn version(&self) -> u32 {
946        self.version
947    }
948
949    /// Returns the lockfile revision.
950    pub fn revision(&self) -> u32 {
951        self.revision
952    }
953
954    /// Returns the number of packages in the lockfile.
955    pub fn len(&self) -> usize {
956        self.packages.len()
957    }
958
959    /// Returns `true` if the lockfile contains no packages.
960    pub fn is_empty(&self) -> bool {
961        self.packages.is_empty()
962    }
963
964    /// Returns the [`Package`] entries in this lock.
965    pub fn packages(&self) -> &[Package] {
966        &self.packages
967    }
968
969    /// Returns the supported Python version range for the lockfile, if present.
970    pub fn requires_python(&self) -> &RequiresPython {
971        &self.requires_python
972    }
973
974    /// Returns the resolution mode used to generate this lock.
975    pub fn resolution_mode(&self) -> ResolutionMode {
976        self.options.resolution_mode
977    }
978
979    /// Returns the pre-release mode used to generate this lock.
980    pub fn prerelease_mode(&self) -> PrereleaseMode {
981        self.options.prerelease_mode
982    }
983
984    /// Returns the multi-version mode used to generate this lock.
985    pub fn fork_strategy(&self) -> ForkStrategy {
986        self.options.fork_strategy
987    }
988
989    /// Returns the exclude newer setting used to generate this lock.
990    pub fn exclude_newer(&self) -> ExcludeNewer {
991        // TODO(zanieb): It'd be nice not to hide this clone here, but I am hesitant to introduce
992        // a whole new `ExcludeNewerRef` type just for this
993        self.options.exclude_newer.clone().into()
994    }
995
996    /// Returns the conflicting groups that were used to generate this lock.
997    pub fn conflicts(&self) -> &Conflicts {
998        &self.conflicts
999    }
1000
1001    /// Returns the supported environments that were used to generate this lock.
1002    pub fn supported_environments(&self) -> &[MarkerTree] {
1003        &self.supported_environments
1004    }
1005
1006    /// Returns the required platforms that were used to generate this lock.
1007    pub fn required_environments(&self) -> &[MarkerTree] {
1008        &self.required_environments
1009    }
1010
1011    /// Returns the workspace members that were used to generate this lock.
1012    pub fn members(&self) -> &BTreeSet<PackageName> {
1013        &self.manifest.members
1014    }
1015
1016    /// Returns the dependency groups that were used to generate this lock.
1017    pub fn requirements(&self) -> &BTreeSet<Requirement> {
1018        &self.manifest.requirements
1019    }
1020
1021    /// Returns the dependency groups that were used to generate this lock.
1022    pub fn dependency_groups(&self) -> &BTreeMap<GroupName, BTreeSet<Requirement>> {
1023        &self.manifest.dependency_groups
1024    }
1025
1026    /// Returns the build constraints that were used to generate this lock.
1027    pub fn build_constraints(&self, root: &Path) -> Constraints {
1028        Constraints::from_requirements(
1029            self.manifest
1030                .build_constraints
1031                .iter()
1032                .cloned()
1033                .map(|requirement| requirement.to_absolute(root)),
1034        )
1035    }
1036
1037    /// Return the workspace root used to generate this lock.
1038    pub fn root(&self) -> Option<&Package> {
1039        self.packages.iter().find(|package| {
1040            let (Source::Editable(path) | Source::Virtual(path)) = &package.id.source else {
1041                return false;
1042            };
1043            path.as_ref() == Path::new("")
1044        })
1045    }
1046
1047    /// Returns the supported environments that were used to generate this
1048    /// lock.
1049    ///
1050    /// The markers returned here are "simplified" with respect to the lock
1051    /// file's `requires-python` setting. This means these should only be used
1052    /// for direct comparison purposes with the supported environments written
1053    /// by a human in `pyproject.toml`. (Think of "supported environments" in
1054    /// `pyproject.toml` as having an implicit `and python_full_version >=
1055    /// '{requires-python-bound}'` attached to each one.)
1056    pub fn simplified_supported_environments(&self) -> Vec<MarkerTree> {
1057        self.supported_environments()
1058            .iter()
1059            .copied()
1060            .map(|marker| self.simplify_environment(marker))
1061            .collect()
1062    }
1063
1064    /// Returns the required platforms that were used to generate this
1065    /// lock.
1066    pub fn simplified_required_environments(&self) -> Vec<MarkerTree> {
1067        self.required_environments()
1068            .iter()
1069            .copied()
1070            .map(|marker| self.simplify_environment(marker))
1071            .collect()
1072    }
1073
1074    /// Simplify the given marker environment with respect to the lockfile's
1075    /// `requires-python` setting.
1076    pub fn simplify_environment(&self, marker: MarkerTree) -> MarkerTree {
1077        self.requires_python.simplify_markers(marker)
1078    }
1079
1080    /// If this lockfile was built from a forking resolution with non-identical forks, return the
1081    /// markers of those forks, otherwise `None`.
1082    pub fn fork_markers(&self) -> &[UniversalMarker] {
1083        self.fork_markers.as_slice()
1084    }
1085
1086    /// Checks whether the fork markers cover the entire supported marker space.
1087    ///
1088    /// Returns the actually covered and the expected marker space on validation error.
1089    pub fn check_marker_coverage(&self) -> Result<(), (MarkerTree, MarkerTree)> {
1090        let fork_markers_union = if self.fork_markers().is_empty() {
1091            self.requires_python.to_marker_tree()
1092        } else {
1093            let mut fork_markers_union = MarkerTree::FALSE;
1094            for fork_marker in self.fork_markers() {
1095                fork_markers_union.or(fork_marker.pep508());
1096            }
1097            fork_markers_union
1098        };
1099        let mut environments_union = if !self.supported_environments.is_empty() {
1100            let mut environments_union = MarkerTree::FALSE;
1101            for fork_marker in &self.supported_environments {
1102                environments_union.or(*fork_marker);
1103            }
1104            environments_union
1105        } else {
1106            MarkerTree::TRUE
1107        };
1108        // When a user defines environments, they are implicitly constrained by requires-python.
1109        environments_union.and(self.requires_python.to_marker_tree());
1110        if fork_markers_union.negate().is_disjoint(environments_union) {
1111            Ok(())
1112        } else {
1113            Err((fork_markers_union, environments_union))
1114        }
1115    }
1116
1117    /// Checks whether the new requires-python specification is disjoint with
1118    /// the fork markers in this lock file.
1119    ///
1120    /// If they are disjoint, then the union of the fork markers along with the
1121    /// given requires-python specification (converted to a marker tree) are
1122    /// returned.
1123    ///
1124    /// When disjoint, the fork markers in the lock file should be dropped and
1125    /// not used.
1126    pub fn requires_python_coverage(
1127        &self,
1128        new_requires_python: &RequiresPython,
1129    ) -> Result<(), (MarkerTree, MarkerTree)> {
1130        let fork_markers_union = if self.fork_markers().is_empty() {
1131            self.requires_python.to_marker_tree()
1132        } else {
1133            let mut fork_markers_union = MarkerTree::FALSE;
1134            for fork_marker in self.fork_markers() {
1135                fork_markers_union.or(fork_marker.pep508());
1136            }
1137            fork_markers_union
1138        };
1139        let new_requires_python = new_requires_python.to_marker_tree();
1140        if fork_markers_union.is_disjoint(new_requires_python) {
1141            Err((fork_markers_union, new_requires_python))
1142        } else {
1143            Ok(())
1144        }
1145    }
1146
1147    /// Returns the TOML representation of this lockfile.
1148    pub fn to_toml(&self) -> Result<String, toml_edit::ser::Error> {
1149        // Catch a lockfile where the union of fork markers doesn't cover the supported
1150        // environments.
1151        debug_assert!(self.check_marker_coverage().is_ok());
1152
1153        // We construct a TOML document manually instead of going through Serde to enable
1154        // the use of inline tables.
1155        let mut doc = toml_edit::DocumentMut::new();
1156        doc.insert("version", value(i64::from(self.version)));
1157
1158        if self.revision > 0 {
1159            doc.insert("revision", value(i64::from(self.revision)));
1160        }
1161
1162        doc.insert("requires-python", value(self.requires_python.to_string()));
1163
1164        if !self.fork_markers.is_empty() {
1165            let fork_markers = each_element_on_its_line_array(
1166                simplified_universal_markers(&self.fork_markers, &self.requires_python).into_iter(),
1167            );
1168            if !fork_markers.is_empty() {
1169                doc.insert("resolution-markers", value(fork_markers));
1170            }
1171        }
1172
1173        if !self.supported_environments.is_empty() {
1174            let supported_environments = each_element_on_its_line_array(
1175                self.supported_environments
1176                    .iter()
1177                    .copied()
1178                    .map(|marker| SimplifiedMarkerTree::new(&self.requires_python, marker))
1179                    .filter_map(SimplifiedMarkerTree::try_to_string),
1180            );
1181            doc.insert("supported-markers", value(supported_environments));
1182        }
1183
1184        if !self.required_environments.is_empty() {
1185            let required_environments = each_element_on_its_line_array(
1186                self.required_environments
1187                    .iter()
1188                    .copied()
1189                    .map(|marker| SimplifiedMarkerTree::new(&self.requires_python, marker))
1190                    .filter_map(SimplifiedMarkerTree::try_to_string),
1191            );
1192            doc.insert("required-markers", value(required_environments));
1193        }
1194
1195        if !self.conflicts.is_empty() {
1196            let mut list = Array::new();
1197            for set in self.conflicts.iter() {
1198                list.push(each_element_on_its_line_array(set.iter().map(|item| {
1199                    let mut table = InlineTable::new();
1200                    table.insert("package", Value::from(item.package().to_string()));
1201                    match item.kind() {
1202                        ConflictKind::Project => {}
1203                        ConflictKind::Extra(extra) => {
1204                            table.insert("extra", Value::from(extra.to_string()));
1205                        }
1206                        ConflictKind::Group(group) => {
1207                            table.insert("group", Value::from(group.to_string()));
1208                        }
1209                    }
1210                    table
1211                })));
1212            }
1213            doc.insert("conflicts", value(list));
1214        }
1215
1216        // Write the settings that were used to generate the resolution.
1217        // This enables us to invalidate the lockfile if the user changes
1218        // their settings.
1219        {
1220            let mut options_table = Table::new();
1221
1222            if self.options.resolution_mode != ResolutionMode::default() {
1223                options_table.insert(
1224                    "resolution-mode",
1225                    value(self.options.resolution_mode.to_string()),
1226                );
1227            }
1228            if self.options.prerelease_mode != PrereleaseMode::default() {
1229                options_table.insert(
1230                    "prerelease-mode",
1231                    value(self.options.prerelease_mode.to_string()),
1232                );
1233            }
1234            if self.options.fork_strategy != ForkStrategy::default() {
1235                options_table.insert(
1236                    "fork-strategy",
1237                    value(self.options.fork_strategy.to_string()),
1238                );
1239            }
1240            let exclude_newer = ExcludeNewer::from(self.options.exclude_newer.clone());
1241            if !exclude_newer.is_empty() {
1242                // Always serialize global exclude-newer as a string
1243                if let Some(global) = &exclude_newer.global {
1244                    options_table.insert("exclude-newer", value(global.to_string()));
1245                    // Serialize the original span if present
1246                    if let Some(span) = global.span() {
1247                        options_table.insert("exclude-newer-span", value(span.to_string()));
1248                    }
1249                }
1250
1251                // Serialize package-specific exclusions as a separate field
1252                if !exclude_newer.package.is_empty() {
1253                    let mut package_table = toml_edit::Table::new();
1254                    for (name, setting) in &exclude_newer.package {
1255                        match setting {
1256                            PackageExcludeNewer::Enabled(exclude_newer_value) => {
1257                                if let Some(span) = exclude_newer_value.span() {
1258                                    // Serialize as inline table with timestamp and span
1259                                    let mut inline = toml_edit::InlineTable::new();
1260                                    inline.insert(
1261                                        "timestamp",
1262                                        exclude_newer_value.timestamp().to_string().into(),
1263                                    );
1264                                    inline.insert("span", span.to_string().into());
1265                                    package_table.insert(name.as_ref(), Item::Value(inline.into()));
1266                                } else {
1267                                    // Serialize as simple string
1268                                    package_table.insert(
1269                                        name.as_ref(),
1270                                        value(exclude_newer_value.to_string()),
1271                                    );
1272                                }
1273                            }
1274                            PackageExcludeNewer::Disabled => {
1275                                package_table.insert(name.as_ref(), value(false));
1276                            }
1277                        }
1278                    }
1279                    options_table.insert("exclude-newer-package", Item::Table(package_table));
1280                }
1281            }
1282
1283            if !options_table.is_empty() {
1284                doc.insert("options", Item::Table(options_table));
1285            }
1286        }
1287
1288        // Write the manifest that was used to generate the resolution.
1289        {
1290            let mut manifest_table = Table::new();
1291
1292            if !self.manifest.members.is_empty() {
1293                manifest_table.insert(
1294                    "members",
1295                    value(each_element_on_its_line_array(
1296                        self.manifest
1297                            .members
1298                            .iter()
1299                            .map(std::string::ToString::to_string),
1300                    )),
1301                );
1302            }
1303
1304            if !self.manifest.requirements.is_empty() {
1305                let requirements = self
1306                    .manifest
1307                    .requirements
1308                    .iter()
1309                    .map(|requirement| {
1310                        serde::Serialize::serialize(
1311                            &requirement,
1312                            toml_edit::ser::ValueSerializer::new(),
1313                        )
1314                    })
1315                    .collect::<Result<Vec<_>, _>>()?;
1316                let requirements = match requirements.as_slice() {
1317                    [] => Array::new(),
1318                    [requirement] => Array::from_iter([requirement]),
1319                    requirements => each_element_on_its_line_array(requirements.iter()),
1320                };
1321                manifest_table.insert("requirements", value(requirements));
1322            }
1323
1324            if !self.manifest.constraints.is_empty() {
1325                let constraints = self
1326                    .manifest
1327                    .constraints
1328                    .iter()
1329                    .map(|requirement| {
1330                        serde::Serialize::serialize(
1331                            &requirement,
1332                            toml_edit::ser::ValueSerializer::new(),
1333                        )
1334                    })
1335                    .collect::<Result<Vec<_>, _>>()?;
1336                let constraints = match constraints.as_slice() {
1337                    [] => Array::new(),
1338                    [requirement] => Array::from_iter([requirement]),
1339                    constraints => each_element_on_its_line_array(constraints.iter()),
1340                };
1341                manifest_table.insert("constraints", value(constraints));
1342            }
1343
1344            if !self.manifest.overrides.is_empty() {
1345                let overrides = self
1346                    .manifest
1347                    .overrides
1348                    .iter()
1349                    .map(|requirement| {
1350                        serde::Serialize::serialize(
1351                            &requirement,
1352                            toml_edit::ser::ValueSerializer::new(),
1353                        )
1354                    })
1355                    .collect::<Result<Vec<_>, _>>()?;
1356                let overrides = match overrides.as_slice() {
1357                    [] => Array::new(),
1358                    [requirement] => Array::from_iter([requirement]),
1359                    overrides => each_element_on_its_line_array(overrides.iter()),
1360                };
1361                manifest_table.insert("overrides", value(overrides));
1362            }
1363
1364            if !self.manifest.excludes.is_empty() {
1365                let excludes = self
1366                    .manifest
1367                    .excludes
1368                    .iter()
1369                    .map(|name| {
1370                        serde::Serialize::serialize(&name, toml_edit::ser::ValueSerializer::new())
1371                    })
1372                    .collect::<Result<Vec<_>, _>>()?;
1373                let excludes = match excludes.as_slice() {
1374                    [] => Array::new(),
1375                    [name] => Array::from_iter([name]),
1376                    excludes => each_element_on_its_line_array(excludes.iter()),
1377                };
1378                manifest_table.insert("excludes", value(excludes));
1379            }
1380
1381            if !self.manifest.build_constraints.is_empty() {
1382                let build_constraints = self
1383                    .manifest
1384                    .build_constraints
1385                    .iter()
1386                    .map(|requirement| {
1387                        serde::Serialize::serialize(
1388                            &requirement,
1389                            toml_edit::ser::ValueSerializer::new(),
1390                        )
1391                    })
1392                    .collect::<Result<Vec<_>, _>>()?;
1393                let build_constraints = match build_constraints.as_slice() {
1394                    [] => Array::new(),
1395                    [requirement] => Array::from_iter([requirement]),
1396                    build_constraints => each_element_on_its_line_array(build_constraints.iter()),
1397                };
1398                manifest_table.insert("build-constraints", value(build_constraints));
1399            }
1400
1401            if !self.manifest.dependency_groups.is_empty() {
1402                let mut dependency_groups = Table::new();
1403                for (extra, requirements) in &self.manifest.dependency_groups {
1404                    let requirements = requirements
1405                        .iter()
1406                        .map(|requirement| {
1407                            serde::Serialize::serialize(
1408                                &requirement,
1409                                toml_edit::ser::ValueSerializer::new(),
1410                            )
1411                        })
1412                        .collect::<Result<Vec<_>, _>>()?;
1413                    let requirements = match requirements.as_slice() {
1414                        [] => Array::new(),
1415                        [requirement] => Array::from_iter([requirement]),
1416                        requirements => each_element_on_its_line_array(requirements.iter()),
1417                    };
1418                    if !requirements.is_empty() {
1419                        dependency_groups.insert(extra.as_ref(), value(requirements));
1420                    }
1421                }
1422                if !dependency_groups.is_empty() {
1423                    manifest_table.insert("dependency-groups", Item::Table(dependency_groups));
1424                }
1425            }
1426
1427            if !self.manifest.dependency_metadata.is_empty() {
1428                let mut tables = ArrayOfTables::new();
1429                for metadata in &self.manifest.dependency_metadata {
1430                    let mut table = Table::new();
1431                    table.insert("name", value(metadata.name.to_string()));
1432                    if let Some(version) = metadata.version.as_ref() {
1433                        table.insert("version", value(version.to_string()));
1434                    }
1435                    if !metadata.requires_dist.is_empty() {
1436                        table.insert(
1437                            "requires-dist",
1438                            value(serde::Serialize::serialize(
1439                                &metadata.requires_dist,
1440                                toml_edit::ser::ValueSerializer::new(),
1441                            )?),
1442                        );
1443                    }
1444                    if let Some(requires_python) = metadata.requires_python.as_ref() {
1445                        table.insert("requires-python", value(requires_python.to_string()));
1446                    }
1447                    if !metadata.provides_extra.is_empty() {
1448                        table.insert(
1449                            "provides-extras",
1450                            value(serde::Serialize::serialize(
1451                                &metadata.provides_extra,
1452                                toml_edit::ser::ValueSerializer::new(),
1453                            )?),
1454                        );
1455                    }
1456                    tables.push(table);
1457                }
1458                manifest_table.insert("dependency-metadata", Item::ArrayOfTables(tables));
1459            }
1460
1461            if !manifest_table.is_empty() {
1462                doc.insert("manifest", Item::Table(manifest_table));
1463            }
1464        }
1465
1466        // Count the number of packages for each package name. When
1467        // there's only one package for a particular package name (the
1468        // overwhelmingly common case), we can omit some data (like source and
1469        // version) on dependency edges since it is strictly redundant.
1470        let mut dist_count_by_name: FxHashMap<PackageName, u64> = FxHashMap::default();
1471        for dist in &self.packages {
1472            *dist_count_by_name.entry(dist.id.name.clone()).or_default() += 1;
1473        }
1474
1475        let mut packages = ArrayOfTables::new();
1476        for dist in &self.packages {
1477            packages.push(dist.to_toml(&self.requires_python, &dist_count_by_name)?);
1478        }
1479
1480        doc.insert("package", Item::ArrayOfTables(packages));
1481        Ok(doc.to_string())
1482    }
1483
1484    /// Returns the package with the given name. If there are multiple
1485    /// matching packages, then an error is returned. If there are no
1486    /// matching packages, then `Ok(None)` is returned.
1487    pub fn find_by_name(&self, name: &PackageName) -> Result<Option<&Package>, String> {
1488        let mut found_dist = None;
1489        for dist in &self.packages {
1490            if &dist.id.name == name {
1491                if found_dist.is_some() {
1492                    return Err(format!("found multiple packages matching `{name}`"));
1493                }
1494                found_dist = Some(dist);
1495            }
1496        }
1497        Ok(found_dist)
1498    }
1499
1500    /// Returns the package with the given name.
1501    ///
1502    /// If there are multiple matching packages, returns the package that
1503    /// corresponds to the given marker tree.
1504    ///
1505    /// If there are multiple packages that are relevant to the current
1506    /// markers, then an error is returned.
1507    ///
1508    /// If there are no matching packages, then `Ok(None)` is returned.
1509    fn find_by_markers(
1510        &self,
1511        name: &PackageName,
1512        marker_env: &MarkerEnvironment,
1513    ) -> Result<Option<&Package>, String> {
1514        let mut found_dist = None;
1515        for dist in &self.packages {
1516            if &dist.id.name == name {
1517                if dist.fork_markers.is_empty()
1518                    || dist
1519                        .fork_markers
1520                        .iter()
1521                        .any(|marker| marker.evaluate_no_extras(marker_env))
1522                {
1523                    if found_dist.is_some() {
1524                        return Err(format!("found multiple packages matching `{name}`"));
1525                    }
1526                    found_dist = Some(dist);
1527                }
1528            }
1529        }
1530        Ok(found_dist)
1531    }
1532
1533    fn find_by_id(&self, id: &PackageId) -> &Package {
1534        let index = *self.by_id.get(id).expect("locked package for ID");
1535
1536        (self.packages.get(index).expect("valid index for package")) as _
1537    }
1538
1539    /// Return a [`SatisfiesResult`] if the given extras do not match the [`Package`] metadata.
1540    fn satisfies_provides_extra<'lock>(
1541        &self,
1542        provides_extra: Box<[ExtraName]>,
1543        package: &'lock Package,
1544    ) -> SatisfiesResult<'lock> {
1545        if !self.supports_provides_extra() {
1546            return SatisfiesResult::Satisfied;
1547        }
1548
1549        let expected: BTreeSet<_> = provides_extra.iter().collect();
1550        let actual: BTreeSet<_> = package.metadata.provides_extra.iter().collect();
1551
1552        if expected != actual {
1553            let expected = Box::into_iter(provides_extra).collect();
1554            return SatisfiesResult::MismatchedPackageProvidesExtra(
1555                &package.id.name,
1556                package.id.version.as_ref(),
1557                expected,
1558                actual,
1559            );
1560        }
1561
1562        SatisfiesResult::Satisfied
1563    }
1564
1565    /// Return a [`SatisfiesResult`] if the given requirements do not match the [`Package`] metadata.
1566    fn satisfies_requires_dist<'lock>(
1567        &self,
1568        requires_dist: Box<[Requirement]>,
1569        dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
1570        package: &'lock Package,
1571        root: &Path,
1572    ) -> Result<SatisfiesResult<'lock>, LockError> {
1573        // Special-case: if the version is dynamic, compare the flattened requirements.
1574        let flattened = if package.is_dynamic() {
1575            Some(
1576                FlatRequiresDist::from_requirements(requires_dist.clone(), &package.id.name)
1577                    .into_iter()
1578                    .map(|requirement| {
1579                        normalize_requirement(requirement, root, &self.requires_python)
1580                    })
1581                    .collect::<Result<BTreeSet<_>, _>>()?,
1582            )
1583        } else {
1584            None
1585        };
1586
1587        // Validate the `requires-dist` metadata.
1588        let expected: BTreeSet<_> = Box::into_iter(requires_dist)
1589            .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1590            .collect::<Result<_, _>>()?;
1591        let actual: BTreeSet<_> = package
1592            .metadata
1593            .requires_dist
1594            .iter()
1595            .cloned()
1596            .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1597            .collect::<Result<_, _>>()?;
1598
1599        if expected != actual && flattened.is_none_or(|expected| expected != actual) {
1600            return Ok(SatisfiesResult::MismatchedPackageRequirements(
1601                &package.id.name,
1602                package.id.version.as_ref(),
1603                expected,
1604                actual,
1605            ));
1606        }
1607
1608        // Validate the `dependency-groups` metadata.
1609        let expected: BTreeMap<GroupName, BTreeSet<Requirement>> = dependency_groups
1610            .into_iter()
1611            .filter(|(_, requirements)| self.includes_empty_groups() || !requirements.is_empty())
1612            .map(|(group, requirements)| {
1613                Ok::<_, LockError>((
1614                    group,
1615                    Box::into_iter(requirements)
1616                        .map(|requirement| {
1617                            normalize_requirement(requirement, root, &self.requires_python)
1618                        })
1619                        .collect::<Result<_, _>>()?,
1620                ))
1621            })
1622            .collect::<Result<_, _>>()?;
1623        let actual: BTreeMap<GroupName, BTreeSet<Requirement>> = package
1624            .metadata
1625            .dependency_groups
1626            .iter()
1627            .filter(|(_, requirements)| self.includes_empty_groups() || !requirements.is_empty())
1628            .map(|(group, requirements)| {
1629                Ok::<_, LockError>((
1630                    group.clone(),
1631                    requirements
1632                        .iter()
1633                        .cloned()
1634                        .map(|requirement| {
1635                            normalize_requirement(requirement, root, &self.requires_python)
1636                        })
1637                        .collect::<Result<_, _>>()?,
1638                ))
1639            })
1640            .collect::<Result<_, _>>()?;
1641
1642        if expected != actual {
1643            return Ok(SatisfiesResult::MismatchedPackageDependencyGroups(
1644                &package.id.name,
1645                package.id.version.as_ref(),
1646                expected,
1647                actual,
1648            ));
1649        }
1650
1651        Ok(SatisfiesResult::Satisfied)
1652    }
1653
1654    /// Check whether the lock matches the project structure, requirements and configuration.
1655    pub async fn satisfies<Context: BuildContext>(
1656        &self,
1657        root: &Path,
1658        packages: &BTreeMap<PackageName, WorkspaceMember>,
1659        members: &[PackageName],
1660        required_members: &BTreeMap<PackageName, Editability>,
1661        requirements: &[Requirement],
1662        constraints: &[Requirement],
1663        overrides: &[Requirement],
1664        excludes: &[PackageName],
1665        build_constraints: &[Requirement],
1666        dependency_groups: &BTreeMap<GroupName, Vec<Requirement>>,
1667        dependency_metadata: &DependencyMetadata,
1668        indexes: Option<&IndexLocations>,
1669        tags: &Tags,
1670        markers: &MarkerEnvironment,
1671        hasher: &HashStrategy,
1672        index: &InMemoryIndex,
1673        database: &DistributionDatabase<'_, Context>,
1674    ) -> Result<SatisfiesResult<'_>, LockError> {
1675        let mut queue: VecDeque<&Package> = VecDeque::new();
1676        let mut seen = FxHashSet::default();
1677
1678        // Validate that the lockfile was generated with the same root members.
1679        {
1680            let expected = members.iter().cloned().collect::<BTreeSet<_>>();
1681            let actual = &self.manifest.members;
1682            if expected != *actual {
1683                return Ok(SatisfiesResult::MismatchedMembers(expected, actual));
1684            }
1685        }
1686
1687        // Validate that the member sources have not changed (e.g., that they've switched from
1688        // virtual to non-virtual or vice versa).
1689        for (name, member) in packages {
1690            let source = self.find_by_name(name).ok().flatten();
1691
1692            // Determine whether the member was required by any other member.
1693            let value = required_members.get(name);
1694            let is_required_member = value.is_some();
1695            let editability = value.copied().flatten();
1696
1697            // Verify that the member is virtual (or not).
1698            let expected_virtual = !member.pyproject_toml().is_package(!is_required_member);
1699            let actual_virtual =
1700                source.map(|package| matches!(package.id.source, Source::Virtual(..)));
1701            if actual_virtual != Some(expected_virtual) {
1702                return Ok(SatisfiesResult::MismatchedVirtual(
1703                    name.clone(),
1704                    expected_virtual,
1705                ));
1706            }
1707
1708            // Verify that the member is editable (or not).
1709            let expected_editable = if expected_virtual {
1710                false
1711            } else {
1712                editability.unwrap_or(true)
1713            };
1714            let actual_editable =
1715                source.map(|package| matches!(package.id.source, Source::Editable(..)));
1716            if actual_editable != Some(expected_editable) {
1717                return Ok(SatisfiesResult::MismatchedEditable(
1718                    name.clone(),
1719                    expected_editable,
1720                ));
1721            }
1722        }
1723
1724        // Validate that the lockfile was generated with the same requirements.
1725        {
1726            let expected: BTreeSet<_> = requirements
1727                .iter()
1728                .cloned()
1729                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1730                .collect::<Result<_, _>>()?;
1731            let actual: BTreeSet<_> = self
1732                .manifest
1733                .requirements
1734                .iter()
1735                .cloned()
1736                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1737                .collect::<Result<_, _>>()?;
1738            if expected != actual {
1739                return Ok(SatisfiesResult::MismatchedRequirements(expected, actual));
1740            }
1741        }
1742
1743        // Validate that the lockfile was generated with the same constraints.
1744        {
1745            let expected: BTreeSet<_> = constraints
1746                .iter()
1747                .cloned()
1748                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1749                .collect::<Result<_, _>>()?;
1750            let actual: BTreeSet<_> = self
1751                .manifest
1752                .constraints
1753                .iter()
1754                .cloned()
1755                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1756                .collect::<Result<_, _>>()?;
1757            if expected != actual {
1758                return Ok(SatisfiesResult::MismatchedConstraints(expected, actual));
1759            }
1760        }
1761
1762        // Validate that the lockfile was generated with the same overrides.
1763        {
1764            let expected: BTreeSet<_> = overrides
1765                .iter()
1766                .cloned()
1767                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1768                .collect::<Result<_, _>>()?;
1769            let actual: BTreeSet<_> = self
1770                .manifest
1771                .overrides
1772                .iter()
1773                .cloned()
1774                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1775                .collect::<Result<_, _>>()?;
1776            if expected != actual {
1777                return Ok(SatisfiesResult::MismatchedOverrides(expected, actual));
1778            }
1779        }
1780
1781        // Validate that the lockfile was generated with the same excludes.
1782        {
1783            let expected: BTreeSet<_> = excludes.iter().cloned().collect();
1784            let actual: BTreeSet<_> = self.manifest.excludes.iter().cloned().collect();
1785            if expected != actual {
1786                return Ok(SatisfiesResult::MismatchedExcludes(expected, actual));
1787            }
1788        }
1789
1790        // Validate that the lockfile was generated with the same build constraints.
1791        {
1792            let expected: BTreeSet<_> = build_constraints
1793                .iter()
1794                .cloned()
1795                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1796                .collect::<Result<_, _>>()?;
1797            let actual: BTreeSet<_> = self
1798                .manifest
1799                .build_constraints
1800                .iter()
1801                .cloned()
1802                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1803                .collect::<Result<_, _>>()?;
1804            if expected != actual {
1805                return Ok(SatisfiesResult::MismatchedBuildConstraints(
1806                    expected, actual,
1807                ));
1808            }
1809        }
1810
1811        // Validate that the lockfile was generated with the dependency groups.
1812        {
1813            let expected: BTreeMap<GroupName, BTreeSet<Requirement>> = dependency_groups
1814                .iter()
1815                .filter(|(_, requirements)| !requirements.is_empty())
1816                .map(|(group, requirements)| {
1817                    Ok::<_, LockError>((
1818                        group.clone(),
1819                        requirements
1820                            .iter()
1821                            .cloned()
1822                            .map(|requirement| {
1823                                normalize_requirement(requirement, root, &self.requires_python)
1824                            })
1825                            .collect::<Result<_, _>>()?,
1826                    ))
1827                })
1828                .collect::<Result<_, _>>()?;
1829            let actual: BTreeMap<GroupName, BTreeSet<Requirement>> = self
1830                .manifest
1831                .dependency_groups
1832                .iter()
1833                .filter(|(_, requirements)| !requirements.is_empty())
1834                .map(|(group, requirements)| {
1835                    Ok::<_, LockError>((
1836                        group.clone(),
1837                        requirements
1838                            .iter()
1839                            .cloned()
1840                            .map(|requirement| {
1841                                normalize_requirement(requirement, root, &self.requires_python)
1842                            })
1843                            .collect::<Result<_, _>>()?,
1844                    ))
1845                })
1846                .collect::<Result<_, _>>()?;
1847            if expected != actual {
1848                return Ok(SatisfiesResult::MismatchedDependencyGroups(
1849                    expected, actual,
1850                ));
1851            }
1852        }
1853
1854        // Validate that the lockfile was generated with the same static metadata.
1855        {
1856            let expected = dependency_metadata
1857                .values()
1858                .cloned()
1859                .collect::<BTreeSet<_>>();
1860            let actual = &self.manifest.dependency_metadata;
1861            if expected != *actual {
1862                return Ok(SatisfiesResult::MismatchedStaticMetadata(expected, actual));
1863            }
1864        }
1865
1866        // Collect the set of available indexes (both `--index-url` and `--find-links` entries).
1867        let mut remotes = indexes.map(|locations| {
1868            locations
1869                .allowed_indexes()
1870                .into_iter()
1871                .filter_map(|index| match index.url() {
1872                    IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
1873                        Some(UrlString::from(index.url().without_credentials().as_ref()))
1874                    }
1875                    IndexUrl::Path(_) => None,
1876                })
1877                .collect::<BTreeSet<_>>()
1878        });
1879
1880        let mut locals = indexes.map(|locations| {
1881            locations
1882                .allowed_indexes()
1883                .into_iter()
1884                .filter_map(|index| match index.url() {
1885                    IndexUrl::Pypi(_) | IndexUrl::Url(_) => None,
1886                    IndexUrl::Path(url) => {
1887                        let path = url.to_file_path().ok()?;
1888                        let path = relative_to(&path, root)
1889                            .or_else(|_| std::path::absolute(path))
1890                            .ok()?
1891                            .into_boxed_path();
1892                        Some(path)
1893                    }
1894                })
1895                .collect::<BTreeSet<_>>()
1896        });
1897
1898        // Add the workspace packages to the queue.
1899        for root_name in packages.keys() {
1900            let root = self
1901                .find_by_name(root_name)
1902                .expect("found too many packages matching root");
1903
1904            let Some(root) = root else {
1905                // The package is not in the lockfile, so it can't be satisfied.
1906                return Ok(SatisfiesResult::MissingRoot(root_name.clone()));
1907            };
1908
1909            // Add the base package.
1910            queue.push_back(root);
1911        }
1912
1913        while let Some(package) = queue.pop_front() {
1914            // If the lockfile references an index that was not provided, we can't validate it.
1915            if let Source::Registry(index) = &package.id.source {
1916                match index {
1917                    RegistrySource::Url(url) => {
1918                        if remotes
1919                            .as_ref()
1920                            .is_some_and(|remotes| !remotes.contains(url))
1921                        {
1922                            let name = &package.id.name;
1923                            let version = &package
1924                                .id
1925                                .version
1926                                .as_ref()
1927                                .expect("version for registry source");
1928                            return Ok(SatisfiesResult::MissingRemoteIndex(name, version, url));
1929                        }
1930                    }
1931                    RegistrySource::Path(path) => {
1932                        if locals.as_ref().is_some_and(|locals| !locals.contains(path)) {
1933                            let name = &package.id.name;
1934                            let version = &package
1935                                .id
1936                                .version
1937                                .as_ref()
1938                                .expect("version for registry source");
1939                            return Ok(SatisfiesResult::MissingLocalIndex(name, version, path));
1940                        }
1941                    }
1942                }
1943            }
1944
1945            // If the package is immutable, we don't need to validate it (or its dependencies).
1946            if package.id.source.is_immutable() {
1947                continue;
1948            }
1949
1950            if let Some(version) = package.id.version.as_ref() {
1951                // For a non-dynamic package, fetch the metadata from the distribution database.
1952                let HashedDist { dist, .. } = package.to_dist(
1953                    root,
1954                    TagPolicy::Preferred(tags),
1955                    &BuildOptions::default(),
1956                    markers,
1957                )?;
1958
1959                let metadata = {
1960                    let id = dist.version_id();
1961                    if let Some(archive) =
1962                        index
1963                            .distributions()
1964                            .get(&id)
1965                            .as_deref()
1966                            .and_then(|response| {
1967                                if let MetadataResponse::Found(archive, ..) = response {
1968                                    Some(archive)
1969                                } else {
1970                                    None
1971                                }
1972                            })
1973                    {
1974                        // If the metadata is already in the index, return it.
1975                        archive.metadata.clone()
1976                    } else {
1977                        // Run the PEP 517 build process to extract metadata from the source distribution.
1978                        let archive = database
1979                            .get_or_build_wheel_metadata(&dist, hasher.get(&dist))
1980                            .await
1981                            .map_err(|err| LockErrorKind::Resolution {
1982                                id: package.id.clone(),
1983                                err,
1984                            })?;
1985
1986                        let metadata = archive.metadata.clone();
1987
1988                        // Insert the metadata into the index.
1989                        index
1990                            .distributions()
1991                            .done(id, Arc::new(MetadataResponse::Found(archive)));
1992
1993                        metadata
1994                    }
1995                };
1996
1997                // If this is a local package, validate that it hasn't become dynamic (in which
1998                // case, we'd expect the version to be omitted).
1999                if package.id.source.is_source_tree() {
2000                    if metadata.dynamic {
2001                        return Ok(SatisfiesResult::MismatchedDynamic(&package.id.name, false));
2002                    }
2003                }
2004
2005                // Validate the `version` metadata.
2006                if metadata.version != *version {
2007                    return Ok(SatisfiesResult::MismatchedVersion(
2008                        &package.id.name,
2009                        version.clone(),
2010                        Some(metadata.version.clone()),
2011                    ));
2012                }
2013
2014                // Validate the `provides-extras` metadata.
2015                match self.satisfies_provides_extra(metadata.provides_extra, package) {
2016                    SatisfiesResult::Satisfied => {}
2017                    result => return Ok(result),
2018                }
2019
2020                // Validate that the requirements are unchanged.
2021                match self.satisfies_requires_dist(
2022                    metadata.requires_dist,
2023                    metadata.dependency_groups,
2024                    package,
2025                    root,
2026                )? {
2027                    SatisfiesResult::Satisfied => {}
2028                    result => return Ok(result),
2029                }
2030            } else if let Some(source_tree) = package.id.source.as_source_tree() {
2031                // For dynamic packages, we don't need the version. We only need to know that the
2032                // package is still dynamic, and that the requirements are unchanged.
2033                //
2034                // If the distribution is a source tree, attempt to extract the requirements from the
2035                // `pyproject.toml` directly. The distribution database will do this too, but we can be
2036                // even more aggressive here since we _only_ need the requirements. So, for example,
2037                // even if the version is dynamic, we can still extract the requirements without
2038                // performing a build, unlike in the database where we typically construct a "complete"
2039                // metadata object.
2040                let parent = root.join(source_tree);
2041                let path = parent.join("pyproject.toml");
2042                let metadata =
2043                    match fs_err::tokio::read_to_string(&path).await {
2044                        Ok(contents) => {
2045                            let pyproject_toml = toml::from_str::<PyProjectToml>(&contents)
2046                                .map_err(|err| LockErrorKind::InvalidPyprojectToml {
2047                                    path: path.clone(),
2048                                    err,
2049                                })?;
2050                            database
2051                                .requires_dist(&parent, &pyproject_toml)
2052                                .await
2053                                .map_err(|err| LockErrorKind::Resolution {
2054                                    id: package.id.clone(),
2055                                    err,
2056                                })?
2057                        }
2058                        Err(err) if err.kind() == io::ErrorKind::NotFound => None,
2059                        Err(err) => {
2060                            return Err(LockErrorKind::UnreadablePyprojectToml { path, err }.into());
2061                        }
2062                    };
2063
2064                let satisfied = metadata.is_some_and(|metadata| {
2065                    // Validate that the package is still dynamic.
2066                    if !metadata.dynamic {
2067                        debug!("Static `requires-dist` for `{}` is out-of-date; falling back to distribution database", package.id);
2068                        return false;
2069                    }
2070
2071                    // Validate that the extras are unchanged.
2072                    if let SatisfiesResult::Satisfied = self.satisfies_provides_extra(metadata.provides_extra, package, ) {
2073                        debug!("Static `provides-extra` for `{}` is up-to-date", package.id);
2074                    } else {
2075                        debug!("Static `provides-extra` for `{}` is out-of-date; falling back to distribution database", package.id);
2076                        return false;
2077                    }
2078
2079                    // Validate that the requirements are unchanged.
2080                    match self.satisfies_requires_dist(metadata.requires_dist, metadata.dependency_groups, package, root) {
2081                        Ok(SatisfiesResult::Satisfied) => {
2082                            debug!("Static `requires-dist` for `{}` is up-to-date", package.id);
2083                        },
2084                        Ok(..) => {
2085                            debug!("Static `requires-dist` for `{}` is out-of-date; falling back to distribution database", package.id);
2086                            return false;
2087                        },
2088                        Err(..) => {
2089                            debug!("Static `requires-dist` for `{}` is invalid; falling back to distribution database", package.id);
2090                            return false;
2091                        },
2092                    }
2093
2094                    true
2095                });
2096
2097                // If the `requires-dist` metadata matches the requirements, we're done; otherwise,
2098                // fetch the "full" metadata, which may involve invoking the build system. In some
2099                // cases, build backends return metadata that does _not_ match the `pyproject.toml`
2100                // exactly. For example, `hatchling` will flatten any recursive (or self-referential)
2101                // extras, while `setuptools` will not.
2102                if !satisfied {
2103                    let HashedDist { dist, .. } = package.to_dist(
2104                        root,
2105                        TagPolicy::Preferred(tags),
2106                        &BuildOptions::default(),
2107                        markers,
2108                    )?;
2109
2110                    let metadata = {
2111                        let id = dist.version_id();
2112                        if let Some(archive) =
2113                            index
2114                                .distributions()
2115                                .get(&id)
2116                                .as_deref()
2117                                .and_then(|response| {
2118                                    if let MetadataResponse::Found(archive, ..) = response {
2119                                        Some(archive)
2120                                    } else {
2121                                        None
2122                                    }
2123                                })
2124                        {
2125                            // If the metadata is already in the index, return it.
2126                            archive.metadata.clone()
2127                        } else {
2128                            // Run the PEP 517 build process to extract metadata from the source distribution.
2129                            let archive = database
2130                                .get_or_build_wheel_metadata(&dist, hasher.get(&dist))
2131                                .await
2132                                .map_err(|err| LockErrorKind::Resolution {
2133                                    id: package.id.clone(),
2134                                    err,
2135                                })?;
2136
2137                            let metadata = archive.metadata.clone();
2138
2139                            // Insert the metadata into the index.
2140                            index
2141                                .distributions()
2142                                .done(id, Arc::new(MetadataResponse::Found(archive)));
2143
2144                            metadata
2145                        }
2146                    };
2147
2148                    // Validate that the package is still dynamic.
2149                    if !metadata.dynamic {
2150                        return Ok(SatisfiesResult::MismatchedDynamic(&package.id.name, true));
2151                    }
2152
2153                    // Validate that the extras are unchanged.
2154                    match self.satisfies_provides_extra(metadata.provides_extra, package) {
2155                        SatisfiesResult::Satisfied => {}
2156                        result => return Ok(result),
2157                    }
2158
2159                    // Validate that the requirements are unchanged.
2160                    match self.satisfies_requires_dist(
2161                        metadata.requires_dist,
2162                        metadata.dependency_groups,
2163                        package,
2164                        root,
2165                    )? {
2166                        SatisfiesResult::Satisfied => {}
2167                        result => return Ok(result),
2168                    }
2169                }
2170            } else {
2171                return Ok(SatisfiesResult::MissingVersion(&package.id.name));
2172            }
2173
2174            // Add any explicit indexes to the list of known locals or remotes. These indexes may
2175            // not be available as top-level configuration (i.e., if they're defined within a
2176            // workspace member), but we already validated that the dependencies are up-to-date, so
2177            // we can consider them "available".
2178            for requirement in package
2179                .metadata
2180                .requires_dist
2181                .iter()
2182                .chain(package.metadata.dependency_groups.values().flatten())
2183            {
2184                if let RequirementSource::Registry {
2185                    index: Some(index), ..
2186                } = &requirement.source
2187                {
2188                    match &index.url {
2189                        IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
2190                            if let Some(remotes) = remotes.as_mut() {
2191                                remotes.insert(UrlString::from(
2192                                    index.url().without_credentials().as_ref(),
2193                                ));
2194                            }
2195                        }
2196                        IndexUrl::Path(url) => {
2197                            if let Some(locals) = locals.as_mut() {
2198                                if let Some(path) = url.to_file_path().ok().and_then(|path| {
2199                                    relative_to(&path, root)
2200                                        .or_else(|_| std::path::absolute(path))
2201                                        .ok()
2202                                }) {
2203                                    locals.insert(path.into_boxed_path());
2204                                }
2205                            }
2206                        }
2207                    }
2208                }
2209            }
2210
2211            // Recurse.
2212            for dep in &package.dependencies {
2213                if seen.insert(&dep.package_id) {
2214                    let dep_dist = self.find_by_id(&dep.package_id);
2215                    queue.push_back(dep_dist);
2216                }
2217            }
2218
2219            for dependencies in package.optional_dependencies.values() {
2220                for dep in dependencies {
2221                    if seen.insert(&dep.package_id) {
2222                        let dep_dist = self.find_by_id(&dep.package_id);
2223                        queue.push_back(dep_dist);
2224                    }
2225                }
2226            }
2227
2228            for dependencies in package.dependency_groups.values() {
2229                for dep in dependencies {
2230                    if seen.insert(&dep.package_id) {
2231                        let dep_dist = self.find_by_id(&dep.package_id);
2232                        queue.push_back(dep_dist);
2233                    }
2234                }
2235            }
2236        }
2237
2238        Ok(SatisfiesResult::Satisfied)
2239    }
2240}
2241
2242#[derive(Debug, Copy, Clone)]
2243enum TagPolicy<'tags> {
2244    /// Exclusively consider wheels that match the specified platform tags.
2245    Required(&'tags Tags),
2246    /// Prefer wheels that match the specified platform tags, but fall back to incompatible wheels
2247    /// if necessary.
2248    Preferred(&'tags Tags),
2249}
2250
2251impl<'tags> TagPolicy<'tags> {
2252    /// Returns the platform tags to consider.
2253    fn tags(&self) -> &'tags Tags {
2254        match self {
2255            Self::Required(tags) | Self::Preferred(tags) => tags,
2256        }
2257    }
2258}
2259
2260/// The result of checking if a lockfile satisfies a set of requirements.
2261#[derive(Debug)]
2262pub enum SatisfiesResult<'lock> {
2263    /// The lockfile satisfies the requirements.
2264    Satisfied,
2265    /// The lockfile uses a different set of workspace members.
2266    MismatchedMembers(BTreeSet<PackageName>, &'lock BTreeSet<PackageName>),
2267    /// A workspace member switched from virtual to non-virtual or vice versa.
2268    MismatchedVirtual(PackageName, bool),
2269    /// A workspace member switched from editable to non-editable or vice versa.
2270    MismatchedEditable(PackageName, bool),
2271    /// A source tree switched from dynamic to non-dynamic or vice versa.
2272    MismatchedDynamic(&'lock PackageName, bool),
2273    /// The lockfile uses a different set of version for its workspace members.
2274    MismatchedVersion(&'lock PackageName, Version, Option<Version>),
2275    /// The lockfile uses a different set of requirements.
2276    MismatchedRequirements(BTreeSet<Requirement>, BTreeSet<Requirement>),
2277    /// The lockfile uses a different set of constraints.
2278    MismatchedConstraints(BTreeSet<Requirement>, BTreeSet<Requirement>),
2279    /// The lockfile uses a different set of overrides.
2280    MismatchedOverrides(BTreeSet<Requirement>, BTreeSet<Requirement>),
2281    /// The lockfile uses a different set of excludes.
2282    MismatchedExcludes(BTreeSet<PackageName>, BTreeSet<PackageName>),
2283    /// The lockfile uses a different set of build constraints.
2284    MismatchedBuildConstraints(BTreeSet<Requirement>, BTreeSet<Requirement>),
2285    /// The lockfile uses a different set of dependency groups.
2286    MismatchedDependencyGroups(
2287        BTreeMap<GroupName, BTreeSet<Requirement>>,
2288        BTreeMap<GroupName, BTreeSet<Requirement>>,
2289    ),
2290    /// The lockfile uses different static metadata.
2291    MismatchedStaticMetadata(BTreeSet<StaticMetadata>, &'lock BTreeSet<StaticMetadata>),
2292    /// The lockfile is missing a workspace member.
2293    MissingRoot(PackageName),
2294    /// The lockfile referenced a remote index that was not provided
2295    MissingRemoteIndex(&'lock PackageName, &'lock Version, &'lock UrlString),
2296    /// The lockfile referenced a local index that was not provided
2297    MissingLocalIndex(&'lock PackageName, &'lock Version, &'lock Path),
2298    /// A package in the lockfile contains different `requires-dist` metadata than expected.
2299    MismatchedPackageRequirements(
2300        &'lock PackageName,
2301        Option<&'lock Version>,
2302        BTreeSet<Requirement>,
2303        BTreeSet<Requirement>,
2304    ),
2305    /// A package in the lockfile contains different `provides-extra` metadata than expected.
2306    MismatchedPackageProvidesExtra(
2307        &'lock PackageName,
2308        Option<&'lock Version>,
2309        BTreeSet<ExtraName>,
2310        BTreeSet<&'lock ExtraName>,
2311    ),
2312    /// A package in the lockfile contains different `dependency-groups` metadata than expected.
2313    MismatchedPackageDependencyGroups(
2314        &'lock PackageName,
2315        Option<&'lock Version>,
2316        BTreeMap<GroupName, BTreeSet<Requirement>>,
2317        BTreeMap<GroupName, BTreeSet<Requirement>>,
2318    ),
2319    /// The lockfile is missing a version.
2320    MissingVersion(&'lock PackageName),
2321}
2322
2323/// We discard the lockfile if these options match.
2324#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2325#[serde(rename_all = "kebab-case")]
2326struct ResolverOptions {
2327    /// The [`ResolutionMode`] used to generate this lock.
2328    #[serde(default)]
2329    resolution_mode: ResolutionMode,
2330    /// The [`PrereleaseMode`] used to generate this lock.
2331    #[serde(default)]
2332    prerelease_mode: PrereleaseMode,
2333    /// The [`ForkStrategy`] used to generate this lock.
2334    #[serde(default)]
2335    fork_strategy: ForkStrategy,
2336    /// The [`ExcludeNewer`] setting used to generate this lock.
2337    #[serde(flatten)]
2338    exclude_newer: ExcludeNewerWire,
2339}
2340
2341#[expect(clippy::struct_field_names)]
2342#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2343#[serde(rename_all = "kebab-case")]
2344struct ExcludeNewerWire {
2345    exclude_newer: Option<Timestamp>,
2346    exclude_newer_span: Option<ExcludeNewerSpan>,
2347    #[serde(default, skip_serializing_if = "ExcludeNewerPackage::is_empty")]
2348    exclude_newer_package: ExcludeNewerPackage,
2349}
2350
2351impl From<ExcludeNewerWire> for ExcludeNewer {
2352    fn from(wire: ExcludeNewerWire) -> Self {
2353        Self {
2354            global: wire
2355                .exclude_newer
2356                .map(|timestamp| ExcludeNewerValue::new(timestamp, wire.exclude_newer_span)),
2357            package: wire.exclude_newer_package,
2358        }
2359    }
2360}
2361
2362impl From<ExcludeNewer> for ExcludeNewerWire {
2363    fn from(exclude_newer: ExcludeNewer) -> Self {
2364        let (timestamp, span) = exclude_newer
2365            .global
2366            .map(ExcludeNewerValue::into_parts)
2367            .map_or((None, None), |(t, s)| (Some(t), s));
2368        Self {
2369            exclude_newer: timestamp,
2370            exclude_newer_span: span,
2371            exclude_newer_package: exclude_newer.package,
2372        }
2373    }
2374}
2375
2376#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2377#[serde(rename_all = "kebab-case")]
2378pub struct ResolverManifest {
2379    /// The workspace members included in the lockfile.
2380    #[serde(default)]
2381    members: BTreeSet<PackageName>,
2382    /// The requirements provided to the resolver, exclusive of the workspace members.
2383    ///
2384    /// These are requirements that are attached to the project, but not to any of its
2385    /// workspace members. For example, the requirements in a PEP 723 script would be included here.
2386    #[serde(default)]
2387    requirements: BTreeSet<Requirement>,
2388    /// The dependency groups provided to the resolver, exclusive of the workspace members.
2389    ///
2390    /// These are dependency groups that are attached to the project, but not to any of its
2391    /// workspace members. For example, the dependency groups in a `pyproject.toml` without a
2392    /// `[project]` table would be included here.
2393    #[serde(default)]
2394    dependency_groups: BTreeMap<GroupName, BTreeSet<Requirement>>,
2395    /// The constraints provided to the resolver.
2396    #[serde(default)]
2397    constraints: BTreeSet<Requirement>,
2398    /// The overrides provided to the resolver.
2399    #[serde(default)]
2400    overrides: BTreeSet<Requirement>,
2401    /// The excludes provided to the resolver.
2402    #[serde(default)]
2403    excludes: BTreeSet<PackageName>,
2404    /// The build constraints provided to the resolver.
2405    #[serde(default)]
2406    build_constraints: BTreeSet<Requirement>,
2407    /// The static metadata provided to the resolver.
2408    #[serde(default)]
2409    dependency_metadata: BTreeSet<StaticMetadata>,
2410}
2411
2412impl ResolverManifest {
2413    /// Initialize a [`ResolverManifest`] with the given members, requirements, constraints, and
2414    /// overrides.
2415    pub fn new(
2416        members: impl IntoIterator<Item = PackageName>,
2417        requirements: impl IntoIterator<Item = Requirement>,
2418        constraints: impl IntoIterator<Item = Requirement>,
2419        overrides: impl IntoIterator<Item = Requirement>,
2420        excludes: impl IntoIterator<Item = PackageName>,
2421        build_constraints: impl IntoIterator<Item = Requirement>,
2422        dependency_groups: impl IntoIterator<Item = (GroupName, Vec<Requirement>)>,
2423        dependency_metadata: impl IntoIterator<Item = StaticMetadata>,
2424    ) -> Self {
2425        Self {
2426            members: members.into_iter().collect(),
2427            requirements: requirements.into_iter().collect(),
2428            constraints: constraints.into_iter().collect(),
2429            overrides: overrides.into_iter().collect(),
2430            excludes: excludes.into_iter().collect(),
2431            build_constraints: build_constraints.into_iter().collect(),
2432            dependency_groups: dependency_groups
2433                .into_iter()
2434                .map(|(group, requirements)| (group, requirements.into_iter().collect()))
2435                .collect(),
2436            dependency_metadata: dependency_metadata.into_iter().collect(),
2437        }
2438    }
2439
2440    /// Convert the manifest to a relative form using the given workspace.
2441    pub fn relative_to(self, root: &Path) -> Result<Self, io::Error> {
2442        Ok(Self {
2443            members: self.members,
2444            requirements: self
2445                .requirements
2446                .into_iter()
2447                .map(|requirement| requirement.relative_to(root))
2448                .collect::<Result<BTreeSet<_>, _>>()?,
2449            constraints: self
2450                .constraints
2451                .into_iter()
2452                .map(|requirement| requirement.relative_to(root))
2453                .collect::<Result<BTreeSet<_>, _>>()?,
2454            overrides: self
2455                .overrides
2456                .into_iter()
2457                .map(|requirement| requirement.relative_to(root))
2458                .collect::<Result<BTreeSet<_>, _>>()?,
2459            excludes: self.excludes,
2460            build_constraints: self
2461                .build_constraints
2462                .into_iter()
2463                .map(|requirement| requirement.relative_to(root))
2464                .collect::<Result<BTreeSet<_>, _>>()?,
2465            dependency_groups: self
2466                .dependency_groups
2467                .into_iter()
2468                .map(|(group, requirements)| {
2469                    Ok::<_, io::Error>((
2470                        group,
2471                        requirements
2472                            .into_iter()
2473                            .map(|requirement| requirement.relative_to(root))
2474                            .collect::<Result<BTreeSet<_>, _>>()?,
2475                    ))
2476                })
2477                .collect::<Result<BTreeMap<_, _>, _>>()?,
2478            dependency_metadata: self.dependency_metadata,
2479        })
2480    }
2481}
2482
2483#[derive(Clone, Debug, serde::Deserialize)]
2484#[serde(rename_all = "kebab-case")]
2485struct LockWire {
2486    version: u32,
2487    revision: Option<u32>,
2488    requires_python: RequiresPython,
2489    /// If this lockfile was built from a forking resolution with non-identical forks, store the
2490    /// forks in the lockfile so we can recreate them in subsequent resolutions.
2491    #[serde(rename = "resolution-markers", default)]
2492    fork_markers: Vec<SimplifiedMarkerTree>,
2493    #[serde(rename = "supported-markers", default)]
2494    supported_environments: Vec<SimplifiedMarkerTree>,
2495    #[serde(rename = "required-markers", default)]
2496    required_environments: Vec<SimplifiedMarkerTree>,
2497    #[serde(rename = "conflicts", default)]
2498    conflicts: Option<Conflicts>,
2499    /// We discard the lockfile if these options match.
2500    #[serde(default)]
2501    options: ResolverOptions,
2502    #[serde(default)]
2503    manifest: ResolverManifest,
2504    #[serde(rename = "package", alias = "distribution", default)]
2505    packages: Vec<PackageWire>,
2506}
2507
2508impl TryFrom<LockWire> for Lock {
2509    type Error = LockError;
2510
2511    fn try_from(wire: LockWire) -> Result<Self, LockError> {
2512        // Count the number of sources for each package name. When
2513        // there's only one source for a particular package name (the
2514        // overwhelmingly common case), we can omit some data (like source and
2515        // version) on dependency edges since it is strictly redundant.
2516        let mut unambiguous_package_ids: FxHashMap<PackageName, PackageId> = FxHashMap::default();
2517        let mut ambiguous = FxHashSet::default();
2518        for dist in &wire.packages {
2519            if ambiguous.contains(&dist.id.name) {
2520                continue;
2521            }
2522            if let Some(id) = unambiguous_package_ids.remove(&dist.id.name) {
2523                ambiguous.insert(id.name);
2524                continue;
2525            }
2526            unambiguous_package_ids.insert(dist.id.name.clone(), dist.id.clone());
2527        }
2528
2529        let packages = wire
2530            .packages
2531            .into_iter()
2532            .map(|dist| dist.unwire(&wire.requires_python, &unambiguous_package_ids))
2533            .collect::<Result<Vec<_>, _>>()?;
2534        let supported_environments = wire
2535            .supported_environments
2536            .into_iter()
2537            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2538            .collect();
2539        let required_environments = wire
2540            .required_environments
2541            .into_iter()
2542            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2543            .collect();
2544        let fork_markers = wire
2545            .fork_markers
2546            .into_iter()
2547            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2548            .map(UniversalMarker::from_combined)
2549            .collect();
2550        let lock = Self::new(
2551            wire.version,
2552            wire.revision.unwrap_or(0),
2553            packages,
2554            wire.requires_python,
2555            wire.options,
2556            wire.manifest,
2557            wire.conflicts.unwrap_or_else(Conflicts::empty),
2558            supported_environments,
2559            required_environments,
2560            fork_markers,
2561        )?;
2562
2563        Ok(lock)
2564    }
2565}
2566
2567/// Like [`Lock`], but limited to the version field. Used for error reporting: by limiting parsing
2568/// to the version field, we can verify compatibility for lockfiles that may otherwise be
2569/// unparsable.
2570#[derive(Clone, Debug, serde::Deserialize)]
2571#[serde(rename_all = "kebab-case")]
2572pub struct LockVersion {
2573    version: u32,
2574}
2575
2576impl LockVersion {
2577    /// Returns the lockfile version.
2578    pub fn version(&self) -> u32 {
2579        self.version
2580    }
2581}
2582
2583#[derive(Clone, Debug, PartialEq, Eq)]
2584pub struct Package {
2585    pub(crate) id: PackageId,
2586    sdist: Option<SourceDist>,
2587    wheels: Vec<Wheel>,
2588    /// If there are multiple versions or sources for the same package name, we add the markers of
2589    /// the fork(s) that contained this version or source, so we can set the correct preferences in
2590    /// the next resolution.
2591    ///
2592    /// Named `resolution-markers` in `uv.lock`.
2593    fork_markers: Vec<UniversalMarker>,
2594    /// The resolved dependencies of the package.
2595    dependencies: Vec<Dependency>,
2596    /// The resolved optional dependencies of the package.
2597    optional_dependencies: BTreeMap<ExtraName, Vec<Dependency>>,
2598    /// The resolved PEP 735 dependency groups of the package.
2599    dependency_groups: BTreeMap<GroupName, Vec<Dependency>>,
2600    /// The exact requirements from the package metadata.
2601    metadata: PackageMetadata,
2602}
2603
2604impl Package {
2605    fn from_annotated_dist(
2606        annotated_dist: &AnnotatedDist,
2607        fork_markers: Vec<UniversalMarker>,
2608        root: &Path,
2609    ) -> Result<Self, LockError> {
2610        let id = PackageId::from_annotated_dist(annotated_dist, root)?;
2611        let sdist = SourceDist::from_annotated_dist(&id, annotated_dist)?;
2612        let wheels = Wheel::from_annotated_dist(annotated_dist)?;
2613        let requires_dist = if id.source.is_immutable() {
2614            BTreeSet::default()
2615        } else {
2616            annotated_dist
2617                .metadata
2618                .as_ref()
2619                .expect("metadata is present")
2620                .requires_dist
2621                .iter()
2622                .cloned()
2623                .map(|requirement| requirement.relative_to(root))
2624                .collect::<Result<_, _>>()
2625                .map_err(LockErrorKind::RequirementRelativePath)?
2626        };
2627        let provides_extra = if id.source.is_immutable() {
2628            Box::default()
2629        } else {
2630            annotated_dist
2631                .metadata
2632                .as_ref()
2633                .expect("metadata is present")
2634                .provides_extra
2635                .clone()
2636        };
2637        let dependency_groups = if id.source.is_immutable() {
2638            BTreeMap::default()
2639        } else {
2640            annotated_dist
2641                .metadata
2642                .as_ref()
2643                .expect("metadata is present")
2644                .dependency_groups
2645                .iter()
2646                .map(|(group, requirements)| {
2647                    let requirements = requirements
2648                        .iter()
2649                        .cloned()
2650                        .map(|requirement| requirement.relative_to(root))
2651                        .collect::<Result<_, _>>()
2652                        .map_err(LockErrorKind::RequirementRelativePath)?;
2653                    Ok::<_, LockError>((group.clone(), requirements))
2654                })
2655                .collect::<Result<_, _>>()?
2656        };
2657        Ok(Self {
2658            id,
2659            sdist,
2660            wheels,
2661            fork_markers,
2662            dependencies: vec![],
2663            optional_dependencies: BTreeMap::default(),
2664            dependency_groups: BTreeMap::default(),
2665            metadata: PackageMetadata {
2666                requires_dist,
2667                provides_extra,
2668                dependency_groups,
2669            },
2670        })
2671    }
2672
2673    /// Add the [`AnnotatedDist`] as a dependency of the [`Package`].
2674    fn add_dependency(
2675        &mut self,
2676        requires_python: &RequiresPython,
2677        annotated_dist: &AnnotatedDist,
2678        marker: UniversalMarker,
2679        root: &Path,
2680    ) -> Result<(), LockError> {
2681        let new_dep =
2682            Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2683        for existing_dep in &mut self.dependencies {
2684            if existing_dep.package_id == new_dep.package_id
2685                // It's important that we do a comparison on
2686                // *simplified* markers here. In particular, when
2687                // we write markers out to the lock file, we use
2688                // "simplified" markers, or markers that are simplified
2689                // *given* that `requires-python` is satisfied. So if
2690                // we don't do equality based on what the simplified
2691                // marker is, we might wind up not merging dependencies
2692                // that ought to be merged and thus writing out extra
2693                // entries.
2694                //
2695                // For example, if `requires-python = '>=3.8'` and we
2696                // have `foo==1` and
2697                // `foo==1 ; python_version >= '3.8'` dependencies,
2698                // then they don't have equivalent complexified
2699                // markers, but their simplified markers are identical.
2700                //
2701                // NOTE: It does seem like perhaps this should
2702                // be implemented semantically/algebraically on
2703                // `MarkerTree` itself, but it wasn't totally clear
2704                // how to do that. I think `pep508` would need to
2705                // grow a concept of "requires python" and provide an
2706                // operation specifically for that.
2707                && existing_dep.simplified_marker == new_dep.simplified_marker
2708            {
2709                existing_dep.extra.extend(new_dep.extra);
2710                return Ok(());
2711            }
2712        }
2713
2714        self.dependencies.push(new_dep);
2715        Ok(())
2716    }
2717
2718    /// Add the [`AnnotatedDist`] as an optional dependency of the [`Package`].
2719    fn add_optional_dependency(
2720        &mut self,
2721        requires_python: &RequiresPython,
2722        extra: ExtraName,
2723        annotated_dist: &AnnotatedDist,
2724        marker: UniversalMarker,
2725        root: &Path,
2726    ) -> Result<(), LockError> {
2727        let dep = Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2728        let optional_deps = self.optional_dependencies.entry(extra).or_default();
2729        for existing_dep in &mut *optional_deps {
2730            if existing_dep.package_id == dep.package_id
2731                // See note in add_dependency for why we use
2732                // simplified markers here.
2733                && existing_dep.simplified_marker == dep.simplified_marker
2734            {
2735                existing_dep.extra.extend(dep.extra);
2736                return Ok(());
2737            }
2738        }
2739
2740        optional_deps.push(dep);
2741        Ok(())
2742    }
2743
2744    /// Add the [`AnnotatedDist`] to a dependency group of the [`Package`].
2745    fn add_group_dependency(
2746        &mut self,
2747        requires_python: &RequiresPython,
2748        group: GroupName,
2749        annotated_dist: &AnnotatedDist,
2750        marker: UniversalMarker,
2751        root: &Path,
2752    ) -> Result<(), LockError> {
2753        let dep = Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2754        let deps = self.dependency_groups.entry(group).or_default();
2755        for existing_dep in &mut *deps {
2756            if existing_dep.package_id == dep.package_id
2757                // See note in add_dependency for why we use
2758                // simplified markers here.
2759                && existing_dep.simplified_marker == dep.simplified_marker
2760            {
2761                existing_dep.extra.extend(dep.extra);
2762                return Ok(());
2763            }
2764        }
2765
2766        deps.push(dep);
2767        Ok(())
2768    }
2769
2770    /// Convert the [`Package`] to a [`Dist`] that can be used in installation, along with its hash.
2771    fn to_dist(
2772        &self,
2773        workspace_root: &Path,
2774        tag_policy: TagPolicy<'_>,
2775        build_options: &BuildOptions,
2776        markers: &MarkerEnvironment,
2777    ) -> Result<HashedDist, LockError> {
2778        let no_binary = build_options.no_binary_package(&self.id.name);
2779        let no_build = build_options.no_build_package(&self.id.name);
2780
2781        if !no_binary {
2782            if let Some(best_wheel_index) = self.find_best_wheel(tag_policy) {
2783                let hashes = {
2784                    let wheel = &self.wheels[best_wheel_index];
2785                    HashDigests::from(
2786                        wheel
2787                            .hash
2788                            .iter()
2789                            .chain(wheel.zstd.iter().flat_map(|z| z.hash.iter()))
2790                            .map(|h| h.0.clone())
2791                            .collect::<Vec<_>>(),
2792                    )
2793                };
2794
2795                let dist = match &self.id.source {
2796                    Source::Registry(source) => {
2797                        let wheels = self
2798                            .wheels
2799                            .iter()
2800                            .map(|wheel| wheel.to_registry_wheel(source, workspace_root))
2801                            .collect::<Result<_, LockError>>()?;
2802                        let reg_built_dist = RegistryBuiltDist {
2803                            wheels,
2804                            best_wheel_index,
2805                            sdist: None,
2806                        };
2807                        Dist::Built(BuiltDist::Registry(reg_built_dist))
2808                    }
2809                    Source::Path(path) => {
2810                        let filename: WheelFilename =
2811                            self.wheels[best_wheel_index].filename.clone();
2812                        let install_path = absolute_path(workspace_root, path)?;
2813                        let path_dist = PathBuiltDist {
2814                            filename,
2815                            url: verbatim_url(&install_path, &self.id)?,
2816                            install_path: absolute_path(workspace_root, path)?.into_boxed_path(),
2817                        };
2818                        let built_dist = BuiltDist::Path(path_dist);
2819                        Dist::Built(built_dist)
2820                    }
2821                    Source::Direct(url, direct) => {
2822                        let filename: WheelFilename =
2823                            self.wheels[best_wheel_index].filename.clone();
2824                        let url = DisplaySafeUrl::from(ParsedArchiveUrl {
2825                            url: url.to_url().map_err(LockErrorKind::InvalidUrl)?,
2826                            subdirectory: direct.subdirectory.clone(),
2827                            ext: DistExtension::Wheel,
2828                        });
2829                        let direct_dist = DirectUrlBuiltDist {
2830                            filename,
2831                            location: Box::new(url.clone()),
2832                            url: VerbatimUrl::from_url(url),
2833                        };
2834                        let built_dist = BuiltDist::DirectUrl(direct_dist);
2835                        Dist::Built(built_dist)
2836                    }
2837                    Source::Git(_, _) => {
2838                        return Err(LockErrorKind::InvalidWheelSource {
2839                            id: self.id.clone(),
2840                            source_type: "Git",
2841                        }
2842                        .into());
2843                    }
2844                    Source::Directory(_) => {
2845                        return Err(LockErrorKind::InvalidWheelSource {
2846                            id: self.id.clone(),
2847                            source_type: "directory",
2848                        }
2849                        .into());
2850                    }
2851                    Source::Editable(_) => {
2852                        return Err(LockErrorKind::InvalidWheelSource {
2853                            id: self.id.clone(),
2854                            source_type: "editable",
2855                        }
2856                        .into());
2857                    }
2858                    Source::Virtual(_) => {
2859                        return Err(LockErrorKind::InvalidWheelSource {
2860                            id: self.id.clone(),
2861                            source_type: "virtual",
2862                        }
2863                        .into());
2864                    }
2865                };
2866
2867                return Ok(HashedDist { dist, hashes });
2868            }
2869        }
2870
2871        if let Some(sdist) = self.to_source_dist(workspace_root)? {
2872            // Even with `--no-build`, allow virtual packages. (In the future, we may want to allow
2873            // any local source tree, or at least editable source trees, which we allow in
2874            // `uv pip`.)
2875            if !no_build || sdist.is_virtual() {
2876                let hashes = self
2877                    .sdist
2878                    .as_ref()
2879                    .and_then(|s| s.hash())
2880                    .map(|hash| HashDigests::from(vec![hash.0.clone()]))
2881                    .unwrap_or_else(|| HashDigests::from(vec![]));
2882                return Ok(HashedDist {
2883                    dist: Dist::Source(sdist),
2884                    hashes,
2885                });
2886            }
2887        }
2888
2889        match (no_binary, no_build) {
2890            (true, true) => Err(LockErrorKind::NoBinaryNoBuild {
2891                id: self.id.clone(),
2892            }
2893            .into()),
2894            (true, false) if self.id.source.is_wheel() => Err(LockErrorKind::NoBinaryWheelOnly {
2895                id: self.id.clone(),
2896            }
2897            .into()),
2898            (true, false) => Err(LockErrorKind::NoBinary {
2899                id: self.id.clone(),
2900            }
2901            .into()),
2902            (false, true) => Err(LockErrorKind::NoBuild {
2903                id: self.id.clone(),
2904            }
2905            .into()),
2906            (false, false) if self.id.source.is_wheel() => Err(LockError {
2907                kind: Box::new(LockErrorKind::IncompatibleWheelOnly {
2908                    id: self.id.clone(),
2909                }),
2910                hint: self.tag_hint(tag_policy, markers),
2911            }),
2912            (false, false) => Err(LockError {
2913                kind: Box::new(LockErrorKind::NeitherSourceDistNorWheel {
2914                    id: self.id.clone(),
2915                }),
2916                hint: self.tag_hint(tag_policy, markers),
2917            }),
2918        }
2919    }
2920
2921    /// Generate a [`WheelTagHint`] based on wheel-tag incompatibilities.
2922    fn tag_hint(
2923        &self,
2924        tag_policy: TagPolicy<'_>,
2925        markers: &MarkerEnvironment,
2926    ) -> Option<WheelTagHint> {
2927        let filenames = self
2928            .wheels
2929            .iter()
2930            .map(|wheel| &wheel.filename)
2931            .collect::<Vec<_>>();
2932        WheelTagHint::from_wheels(
2933            &self.id.name,
2934            self.id.version.as_ref(),
2935            &filenames,
2936            tag_policy.tags(),
2937            markers,
2938        )
2939    }
2940
2941    /// Convert the source of this [`Package`] to a [`SourceDist`] that can be used in installation.
2942    ///
2943    /// Returns `Ok(None)` if the source cannot be converted because `self.sdist` is `None`. This is required
2944    /// for registry sources.
2945    fn to_source_dist(
2946        &self,
2947        workspace_root: &Path,
2948    ) -> Result<Option<uv_distribution_types::SourceDist>, LockError> {
2949        let sdist = match &self.id.source {
2950            Source::Path(path) => {
2951                // A direct path source can also be a wheel, so validate the extension.
2952                let DistExtension::Source(ext) = DistExtension::from_path(path).map_err(|err| {
2953                    LockErrorKind::MissingExtension {
2954                        id: self.id.clone(),
2955                        err,
2956                    }
2957                })?
2958                else {
2959                    return Ok(None);
2960                };
2961                let install_path = absolute_path(workspace_root, path)?;
2962                let path_dist = PathSourceDist {
2963                    name: self.id.name.clone(),
2964                    version: self.id.version.clone(),
2965                    url: verbatim_url(&install_path, &self.id)?,
2966                    install_path: install_path.into_boxed_path(),
2967                    ext,
2968                };
2969                uv_distribution_types::SourceDist::Path(path_dist)
2970            }
2971            Source::Directory(path) => {
2972                let install_path = absolute_path(workspace_root, path)?;
2973                let dir_dist = DirectorySourceDist {
2974                    name: self.id.name.clone(),
2975                    url: verbatim_url(&install_path, &self.id)?,
2976                    install_path: install_path.into_boxed_path(),
2977                    editable: Some(false),
2978                    r#virtual: Some(false),
2979                };
2980                uv_distribution_types::SourceDist::Directory(dir_dist)
2981            }
2982            Source::Editable(path) => {
2983                let install_path = absolute_path(workspace_root, path)?;
2984                let dir_dist = DirectorySourceDist {
2985                    name: self.id.name.clone(),
2986                    url: verbatim_url(&install_path, &self.id)?,
2987                    install_path: install_path.into_boxed_path(),
2988                    editable: Some(true),
2989                    r#virtual: Some(false),
2990                };
2991                uv_distribution_types::SourceDist::Directory(dir_dist)
2992            }
2993            Source::Virtual(path) => {
2994                let install_path = absolute_path(workspace_root, path)?;
2995                let dir_dist = DirectorySourceDist {
2996                    name: self.id.name.clone(),
2997                    url: verbatim_url(&install_path, &self.id)?,
2998                    install_path: install_path.into_boxed_path(),
2999                    editable: Some(false),
3000                    r#virtual: Some(true),
3001                };
3002                uv_distribution_types::SourceDist::Directory(dir_dist)
3003            }
3004            Source::Git(url, git) => {
3005                // Remove the fragment and query from the URL; they're already present in the
3006                // `GitSource`.
3007                let mut url = url.to_url().map_err(LockErrorKind::InvalidUrl)?;
3008                url.set_fragment(None);
3009                url.set_query(None);
3010
3011                // Reconstruct the `GitUrl` from the `GitSource`.
3012                let git_url = GitUrl::from_commit(
3013                    url,
3014                    GitReference::from(git.kind.clone()),
3015                    git.precise,
3016                    git.lfs,
3017                )?;
3018
3019                // Reconstruct the PEP 508-compatible URL from the `GitSource`.
3020                let url = DisplaySafeUrl::from(ParsedGitUrl {
3021                    url: git_url.clone(),
3022                    subdirectory: git.subdirectory.clone(),
3023                });
3024
3025                let git_dist = GitSourceDist {
3026                    name: self.id.name.clone(),
3027                    url: VerbatimUrl::from_url(url),
3028                    git: Box::new(git_url),
3029                    subdirectory: git.subdirectory.clone(),
3030                };
3031                uv_distribution_types::SourceDist::Git(git_dist)
3032            }
3033            Source::Direct(url, direct) => {
3034                // A direct URL source can also be a wheel, so validate the extension.
3035                let DistExtension::Source(ext) =
3036                    DistExtension::from_path(url.base_str()).map_err(|err| {
3037                        LockErrorKind::MissingExtension {
3038                            id: self.id.clone(),
3039                            err,
3040                        }
3041                    })?
3042                else {
3043                    return Ok(None);
3044                };
3045                let location = url.to_url().map_err(LockErrorKind::InvalidUrl)?;
3046                let url = DisplaySafeUrl::from(ParsedArchiveUrl {
3047                    url: location.clone(),
3048                    subdirectory: direct.subdirectory.clone(),
3049                    ext: DistExtension::Source(ext),
3050                });
3051                let direct_dist = DirectUrlSourceDist {
3052                    name: self.id.name.clone(),
3053                    location: Box::new(location),
3054                    subdirectory: direct.subdirectory.clone(),
3055                    ext,
3056                    url: VerbatimUrl::from_url(url),
3057                };
3058                uv_distribution_types::SourceDist::DirectUrl(direct_dist)
3059            }
3060            Source::Registry(RegistrySource::Url(url)) => {
3061                let Some(ref sdist) = self.sdist else {
3062                    return Ok(None);
3063                };
3064
3065                let name = &self.id.name;
3066                let version = self
3067                    .id
3068                    .version
3069                    .as_ref()
3070                    .expect("version for registry source");
3071
3072                let file_url = sdist.url().ok_or_else(|| LockErrorKind::MissingUrl {
3073                    name: name.clone(),
3074                    version: version.clone(),
3075                })?;
3076                let filename = sdist
3077                    .filename()
3078                    .ok_or_else(|| LockErrorKind::MissingFilename {
3079                        id: self.id.clone(),
3080                    })?;
3081                let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| {
3082                    LockErrorKind::MissingExtension {
3083                        id: self.id.clone(),
3084                        err,
3085                    }
3086                })?;
3087                let file = Box::new(uv_distribution_types::File {
3088                    dist_info_metadata: false,
3089                    filename: SmallString::from(filename),
3090                    hashes: sdist.hash().map_or(HashDigests::empty(), |hash| {
3091                        HashDigests::from(hash.0.clone())
3092                    }),
3093                    requires_python: None,
3094                    size: sdist.size(),
3095                    upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond),
3096                    url: FileLocation::AbsoluteUrl(file_url.clone()),
3097                    yanked: None,
3098                    zstd: None,
3099                });
3100
3101                let index = IndexUrl::from(VerbatimUrl::from_url(
3102                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
3103                ));
3104
3105                let reg_dist = RegistrySourceDist {
3106                    name: name.clone(),
3107                    version: version.clone(),
3108                    file,
3109                    ext,
3110                    index,
3111                    wheels: vec![],
3112                };
3113                uv_distribution_types::SourceDist::Registry(reg_dist)
3114            }
3115            Source::Registry(RegistrySource::Path(path)) => {
3116                let Some(ref sdist) = self.sdist else {
3117                    return Ok(None);
3118                };
3119
3120                let name = &self.id.name;
3121                let version = self
3122                    .id
3123                    .version
3124                    .as_ref()
3125                    .expect("version for registry source");
3126
3127                let file_url = match sdist {
3128                    SourceDist::Url { url: file_url, .. } => {
3129                        FileLocation::AbsoluteUrl(file_url.clone())
3130                    }
3131                    SourceDist::Path {
3132                        path: file_path, ..
3133                    } => {
3134                        let file_path = workspace_root.join(path).join(file_path);
3135                        let file_url =
3136                            DisplaySafeUrl::from_file_path(&file_path).map_err(|()| {
3137                                LockErrorKind::PathToUrl {
3138                                    path: file_path.into_boxed_path(),
3139                                }
3140                            })?;
3141                        FileLocation::AbsoluteUrl(UrlString::from(file_url))
3142                    }
3143                    SourceDist::Metadata { .. } => {
3144                        return Err(LockErrorKind::MissingPath {
3145                            name: name.clone(),
3146                            version: version.clone(),
3147                        }
3148                        .into());
3149                    }
3150                };
3151                let filename = sdist
3152                    .filename()
3153                    .ok_or_else(|| LockErrorKind::MissingFilename {
3154                        id: self.id.clone(),
3155                    })?;
3156                let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| {
3157                    LockErrorKind::MissingExtension {
3158                        id: self.id.clone(),
3159                        err,
3160                    }
3161                })?;
3162                let file = Box::new(uv_distribution_types::File {
3163                    dist_info_metadata: false,
3164                    filename: SmallString::from(filename),
3165                    hashes: sdist.hash().map_or(HashDigests::empty(), |hash| {
3166                        HashDigests::from(hash.0.clone())
3167                    }),
3168                    requires_python: None,
3169                    size: sdist.size(),
3170                    upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond),
3171                    url: file_url,
3172                    yanked: None,
3173                    zstd: None,
3174                });
3175
3176                let index = IndexUrl::from(
3177                    VerbatimUrl::from_absolute_path(workspace_root.join(path))
3178                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
3179                );
3180
3181                let reg_dist = RegistrySourceDist {
3182                    name: name.clone(),
3183                    version: version.clone(),
3184                    file,
3185                    ext,
3186                    index,
3187                    wheels: vec![],
3188                };
3189                uv_distribution_types::SourceDist::Registry(reg_dist)
3190            }
3191        };
3192
3193        Ok(Some(sdist))
3194    }
3195
3196    fn to_toml(
3197        &self,
3198        requires_python: &RequiresPython,
3199        dist_count_by_name: &FxHashMap<PackageName, u64>,
3200    ) -> Result<Table, toml_edit::ser::Error> {
3201        let mut table = Table::new();
3202
3203        self.id.to_toml(None, &mut table);
3204
3205        if !self.fork_markers.is_empty() {
3206            let fork_markers = each_element_on_its_line_array(
3207                simplified_universal_markers(&self.fork_markers, requires_python).into_iter(),
3208            );
3209            if !fork_markers.is_empty() {
3210                table.insert("resolution-markers", value(fork_markers));
3211            }
3212        }
3213
3214        if !self.dependencies.is_empty() {
3215            let deps = each_element_on_its_line_array(self.dependencies.iter().map(|dep| {
3216                dep.to_toml(requires_python, dist_count_by_name)
3217                    .into_inline_table()
3218            }));
3219            table.insert("dependencies", value(deps));
3220        }
3221
3222        if !self.optional_dependencies.is_empty() {
3223            let mut optional_deps = Table::new();
3224            for (extra, deps) in &self.optional_dependencies {
3225                let deps = each_element_on_its_line_array(deps.iter().map(|dep| {
3226                    dep.to_toml(requires_python, dist_count_by_name)
3227                        .into_inline_table()
3228                }));
3229                if !deps.is_empty() {
3230                    optional_deps.insert(extra.as_ref(), value(deps));
3231                }
3232            }
3233            if !optional_deps.is_empty() {
3234                table.insert("optional-dependencies", Item::Table(optional_deps));
3235            }
3236        }
3237
3238        if !self.dependency_groups.is_empty() {
3239            let mut dependency_groups = Table::new();
3240            for (extra, deps) in &self.dependency_groups {
3241                let deps = each_element_on_its_line_array(deps.iter().map(|dep| {
3242                    dep.to_toml(requires_python, dist_count_by_name)
3243                        .into_inline_table()
3244                }));
3245                if !deps.is_empty() {
3246                    dependency_groups.insert(extra.as_ref(), value(deps));
3247                }
3248            }
3249            if !dependency_groups.is_empty() {
3250                table.insert("dev-dependencies", Item::Table(dependency_groups));
3251            }
3252        }
3253
3254        if let Some(ref sdist) = self.sdist {
3255            table.insert("sdist", value(sdist.to_toml()?));
3256        }
3257
3258        if !self.wheels.is_empty() {
3259            let wheels = each_element_on_its_line_array(
3260                self.wheels
3261                    .iter()
3262                    .map(Wheel::to_toml)
3263                    .collect::<Result<Vec<_>, _>>()?
3264                    .into_iter(),
3265            );
3266            table.insert("wheels", value(wheels));
3267        }
3268
3269        // Write the package metadata, if non-empty.
3270        {
3271            let mut metadata_table = Table::new();
3272
3273            if !self.metadata.requires_dist.is_empty() {
3274                let requires_dist = self
3275                    .metadata
3276                    .requires_dist
3277                    .iter()
3278                    .map(|requirement| {
3279                        serde::Serialize::serialize(
3280                            &requirement,
3281                            toml_edit::ser::ValueSerializer::new(),
3282                        )
3283                    })
3284                    .collect::<Result<Vec<_>, _>>()?;
3285                let requires_dist = match requires_dist.as_slice() {
3286                    [] => Array::new(),
3287                    [requirement] => Array::from_iter([requirement]),
3288                    requires_dist => each_element_on_its_line_array(requires_dist.iter()),
3289                };
3290                metadata_table.insert("requires-dist", value(requires_dist));
3291            }
3292
3293            if !self.metadata.dependency_groups.is_empty() {
3294                let mut dependency_groups = Table::new();
3295                for (extra, deps) in &self.metadata.dependency_groups {
3296                    let deps = deps
3297                        .iter()
3298                        .map(|requirement| {
3299                            serde::Serialize::serialize(
3300                                &requirement,
3301                                toml_edit::ser::ValueSerializer::new(),
3302                            )
3303                        })
3304                        .collect::<Result<Vec<_>, _>>()?;
3305                    let deps = match deps.as_slice() {
3306                        [] => Array::new(),
3307                        [requirement] => Array::from_iter([requirement]),
3308                        deps => each_element_on_its_line_array(deps.iter()),
3309                    };
3310                    dependency_groups.insert(extra.as_ref(), value(deps));
3311                }
3312                if !dependency_groups.is_empty() {
3313                    metadata_table.insert("requires-dev", Item::Table(dependency_groups));
3314                }
3315            }
3316
3317            if !self.metadata.provides_extra.is_empty() {
3318                let provides_extras = self
3319                    .metadata
3320                    .provides_extra
3321                    .iter()
3322                    .map(|extra| {
3323                        serde::Serialize::serialize(&extra, toml_edit::ser::ValueSerializer::new())
3324                    })
3325                    .collect::<Result<Vec<_>, _>>()?;
3326                // This is just a list of names, so linebreaking it is excessive.
3327                let provides_extras = Array::from_iter(provides_extras);
3328                metadata_table.insert("provides-extras", value(provides_extras));
3329            }
3330
3331            if !metadata_table.is_empty() {
3332                table.insert("metadata", Item::Table(metadata_table));
3333            }
3334        }
3335
3336        Ok(table)
3337    }
3338
3339    fn find_best_wheel(&self, tag_policy: TagPolicy<'_>) -> Option<usize> {
3340        type WheelPriority<'lock> = (TagPriority, Option<&'lock BuildTag>);
3341
3342        let mut best: Option<(WheelPriority, usize)> = None;
3343        for (i, wheel) in self.wheels.iter().enumerate() {
3344            let TagCompatibility::Compatible(tag_priority) =
3345                wheel.filename.compatibility(tag_policy.tags())
3346            else {
3347                continue;
3348            };
3349            let build_tag = wheel.filename.build_tag();
3350            let wheel_priority = (tag_priority, build_tag);
3351            match best {
3352                None => {
3353                    best = Some((wheel_priority, i));
3354                }
3355                Some((best_priority, _)) => {
3356                    if wheel_priority > best_priority {
3357                        best = Some((wheel_priority, i));
3358                    }
3359                }
3360            }
3361        }
3362
3363        let best = best.map(|(_, i)| i);
3364        match tag_policy {
3365            TagPolicy::Required(_) => best,
3366            TagPolicy::Preferred(_) => best.or_else(|| self.wheels.first().map(|_| 0)),
3367        }
3368    }
3369
3370    /// Returns the [`PackageName`] of the package.
3371    pub fn name(&self) -> &PackageName {
3372        &self.id.name
3373    }
3374
3375    /// Returns the [`Version`] of the package.
3376    pub fn version(&self) -> Option<&Version> {
3377        self.id.version.as_ref()
3378    }
3379
3380    /// Returns the Git SHA of the package, if it is a Git source.
3381    pub fn git_sha(&self) -> Option<&GitOid> {
3382        match &self.id.source {
3383            Source::Git(_, git) => Some(&git.precise),
3384            _ => None,
3385        }
3386    }
3387
3388    /// Return the fork markers for this package, if any.
3389    pub fn fork_markers(&self) -> &[UniversalMarker] {
3390        self.fork_markers.as_slice()
3391    }
3392
3393    /// Returns the [`IndexUrl`] for the package, if it is a registry source.
3394    pub fn index(&self, root: &Path) -> Result<Option<IndexUrl>, LockError> {
3395        match &self.id.source {
3396            Source::Registry(RegistrySource::Url(url)) => {
3397                let index = IndexUrl::from(VerbatimUrl::from_url(
3398                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
3399                ));
3400                Ok(Some(index))
3401            }
3402            Source::Registry(RegistrySource::Path(path)) => {
3403                let index = IndexUrl::from(
3404                    VerbatimUrl::from_absolute_path(root.join(path))
3405                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
3406                );
3407                Ok(Some(index))
3408            }
3409            _ => Ok(None),
3410        }
3411    }
3412
3413    /// Returns all the hashes associated with this [`Package`].
3414    fn hashes(&self) -> HashDigests {
3415        let mut hashes = Vec::with_capacity(
3416            usize::from(self.sdist.as_ref().and_then(|sdist| sdist.hash()).is_some())
3417                + self
3418                    .wheels
3419                    .iter()
3420                    .map(|wheel| usize::from(wheel.hash.is_some()))
3421                    .sum::<usize>(),
3422        );
3423        if let Some(ref sdist) = self.sdist {
3424            if let Some(hash) = sdist.hash() {
3425                hashes.push(hash.0.clone());
3426            }
3427        }
3428        for wheel in &self.wheels {
3429            hashes.extend(wheel.hash.as_ref().map(|h| h.0.clone()));
3430            if let Some(zstd) = wheel.zstd.as_ref() {
3431                hashes.extend(zstd.hash.as_ref().map(|h| h.0.clone()));
3432            }
3433        }
3434        HashDigests::from(hashes)
3435    }
3436
3437    /// Returns the [`ResolvedRepositoryReference`] for the package, if it is a Git source.
3438    pub fn as_git_ref(&self) -> Result<Option<ResolvedRepositoryReference>, LockError> {
3439        match &self.id.source {
3440            Source::Git(url, git) => Ok(Some(ResolvedRepositoryReference {
3441                reference: RepositoryReference {
3442                    url: RepositoryUrl::new(&url.to_url().map_err(LockErrorKind::InvalidUrl)?),
3443                    reference: GitReference::from(git.kind.clone()),
3444                },
3445                sha: git.precise,
3446            })),
3447            _ => Ok(None),
3448        }
3449    }
3450
3451    /// Returns `true` if the package is a dynamic source tree.
3452    fn is_dynamic(&self) -> bool {
3453        self.id.version.is_none()
3454    }
3455
3456    /// Returns the extras the package provides, if any.
3457    pub fn provides_extras(&self) -> &[ExtraName] {
3458        &self.metadata.provides_extra
3459    }
3460
3461    /// Returns the dependency groups the package provides, if any.
3462    pub fn dependency_groups(&self) -> &BTreeMap<GroupName, BTreeSet<Requirement>> {
3463        &self.metadata.dependency_groups
3464    }
3465
3466    /// Returns the dependencies of the package.
3467    pub fn dependencies(&self) -> &[Dependency] {
3468        &self.dependencies
3469    }
3470
3471    /// Returns the optional dependencies of the package.
3472    pub fn optional_dependencies(&self) -> &BTreeMap<ExtraName, Vec<Dependency>> {
3473        &self.optional_dependencies
3474    }
3475
3476    /// Returns the resolved PEP 735 dependency groups of the package.
3477    pub fn resolved_dependency_groups(&self) -> &BTreeMap<GroupName, Vec<Dependency>> {
3478        &self.dependency_groups
3479    }
3480
3481    /// Returns an [`InstallTarget`] view for filtering decisions.
3482    pub fn as_install_target(&self) -> InstallTarget<'_> {
3483        InstallTarget {
3484            name: self.name(),
3485            is_local: self.id.source.is_local(),
3486        }
3487    }
3488}
3489
3490/// Attempts to construct a `VerbatimUrl` from the given normalized `Path`.
3491fn verbatim_url(path: &Path, id: &PackageId) -> Result<VerbatimUrl, LockError> {
3492    let url =
3493        VerbatimUrl::from_normalized_path(path).map_err(|err| LockErrorKind::VerbatimUrl {
3494            id: id.clone(),
3495            err,
3496        })?;
3497    Ok(url)
3498}
3499
3500/// Attempts to construct an absolute path from the given `Path`.
3501fn absolute_path(workspace_root: &Path, path: &Path) -> Result<PathBuf, LockError> {
3502    let path = uv_fs::normalize_absolute_path(&workspace_root.join(path))
3503        .map_err(LockErrorKind::AbsolutePath)?;
3504    Ok(path)
3505}
3506
3507#[derive(Clone, Debug, serde::Deserialize)]
3508#[serde(rename_all = "kebab-case")]
3509struct PackageWire {
3510    #[serde(flatten)]
3511    id: PackageId,
3512    #[serde(default)]
3513    metadata: PackageMetadata,
3514    #[serde(default)]
3515    sdist: Option<SourceDist>,
3516    #[serde(default)]
3517    wheels: Vec<Wheel>,
3518    #[serde(default, rename = "resolution-markers")]
3519    fork_markers: Vec<SimplifiedMarkerTree>,
3520    #[serde(default)]
3521    dependencies: Vec<DependencyWire>,
3522    #[serde(default)]
3523    optional_dependencies: BTreeMap<ExtraName, Vec<DependencyWire>>,
3524    #[serde(default, rename = "dev-dependencies", alias = "dependency-groups")]
3525    dependency_groups: BTreeMap<GroupName, Vec<DependencyWire>>,
3526}
3527
3528#[derive(Clone, Default, Debug, Eq, PartialEq, serde::Deserialize)]
3529#[serde(rename_all = "kebab-case")]
3530struct PackageMetadata {
3531    #[serde(default)]
3532    requires_dist: BTreeSet<Requirement>,
3533    #[serde(default, rename = "provides-extras")]
3534    provides_extra: Box<[ExtraName]>,
3535    #[serde(default, rename = "requires-dev", alias = "dependency-groups")]
3536    dependency_groups: BTreeMap<GroupName, BTreeSet<Requirement>>,
3537}
3538
3539impl PackageWire {
3540    fn unwire(
3541        self,
3542        requires_python: &RequiresPython,
3543        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
3544    ) -> Result<Package, LockError> {
3545        // Consistency check
3546        if !uv_flags::contains(uv_flags::EnvironmentFlags::SKIP_WHEEL_FILENAME_CHECK) {
3547            if let Some(version) = &self.id.version {
3548                for wheel in &self.wheels {
3549                    if *version != wheel.filename.version
3550                        && *version != wheel.filename.version.clone().without_local()
3551                    {
3552                        return Err(LockError::from(LockErrorKind::InconsistentVersions {
3553                            name: self.id.name,
3554                            version: version.clone(),
3555                            wheel: wheel.clone(),
3556                        }));
3557                    }
3558                }
3559                // We can't check the source dist version since it does not need to contain the version
3560                // in the filename.
3561            }
3562        }
3563
3564        let unwire_deps = |deps: Vec<DependencyWire>| -> Result<Vec<Dependency>, LockError> {
3565            deps.into_iter()
3566                .map(|dep| dep.unwire(requires_python, unambiguous_package_ids))
3567                .collect()
3568        };
3569
3570        Ok(Package {
3571            id: self.id,
3572            metadata: self.metadata,
3573            sdist: self.sdist,
3574            wheels: self.wheels,
3575            fork_markers: self
3576                .fork_markers
3577                .into_iter()
3578                .map(|simplified_marker| simplified_marker.into_marker(requires_python))
3579                .map(UniversalMarker::from_combined)
3580                .collect(),
3581            dependencies: unwire_deps(self.dependencies)?,
3582            optional_dependencies: self
3583                .optional_dependencies
3584                .into_iter()
3585                .map(|(extra, deps)| Ok((extra, unwire_deps(deps)?)))
3586                .collect::<Result<_, LockError>>()?,
3587            dependency_groups: self
3588                .dependency_groups
3589                .into_iter()
3590                .map(|(group, deps)| Ok((group, unwire_deps(deps)?)))
3591                .collect::<Result<_, LockError>>()?,
3592        })
3593    }
3594}
3595
3596/// Inside the lockfile, we match a dependency entry to a package entry through a key made up
3597/// of the name, the version and the source url.
3598#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3599#[serde(rename_all = "kebab-case")]
3600pub(crate) struct PackageId {
3601    pub(crate) name: PackageName,
3602    pub(crate) version: Option<Version>,
3603    source: Source,
3604}
3605
3606impl PackageId {
3607    fn from_annotated_dist(annotated_dist: &AnnotatedDist, root: &Path) -> Result<Self, LockError> {
3608        // Identify the source of the package.
3609        let source = Source::from_resolved_dist(&annotated_dist.dist, root)?;
3610        // Omit versions for dynamic source trees.
3611        let version = if source.is_source_tree()
3612            && annotated_dist
3613                .metadata
3614                .as_ref()
3615                .is_some_and(|metadata| metadata.dynamic)
3616        {
3617            None
3618        } else {
3619            Some(annotated_dist.version.clone())
3620        };
3621        let name = annotated_dist.name.clone();
3622        Ok(Self {
3623            name,
3624            version,
3625            source,
3626        })
3627    }
3628
3629    /// Writes this package ID inline into the table given.
3630    ///
3631    /// When a map is given, and if the package name in this ID is unambiguous
3632    /// (i.e., it has a count of 1 in the map), then the `version` and `source`
3633    /// fields are omitted. In all other cases, including when a map is not
3634    /// given, the `version` and `source` fields are written.
3635    fn to_toml(&self, dist_count_by_name: Option<&FxHashMap<PackageName, u64>>, table: &mut Table) {
3636        let count = dist_count_by_name.and_then(|map| map.get(&self.name).copied());
3637        table.insert("name", value(self.name.to_string()));
3638        if count.map(|count| count > 1).unwrap_or(true) {
3639            if let Some(version) = &self.version {
3640                table.insert("version", value(version.to_string()));
3641            }
3642            self.source.to_toml(table);
3643        }
3644    }
3645}
3646
3647impl Display for PackageId {
3648    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
3649        if let Some(version) = &self.version {
3650            write!(f, "{}=={} @ {}", self.name, version, self.source)
3651        } else {
3652            write!(f, "{} @ {}", self.name, self.source)
3653        }
3654    }
3655}
3656
3657#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3658#[serde(rename_all = "kebab-case")]
3659struct PackageIdForDependency {
3660    name: PackageName,
3661    version: Option<Version>,
3662    source: Option<Source>,
3663}
3664
3665impl PackageIdForDependency {
3666    fn unwire(
3667        self,
3668        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
3669    ) -> Result<PackageId, LockError> {
3670        let unambiguous_package_id = unambiguous_package_ids.get(&self.name);
3671        let source = self.source.map(Ok::<_, LockError>).unwrap_or_else(|| {
3672            let Some(package_id) = unambiguous_package_id else {
3673                return Err(LockErrorKind::MissingDependencySource {
3674                    name: self.name.clone(),
3675                }
3676                .into());
3677            };
3678            Ok(package_id.source.clone())
3679        })?;
3680        let version = if let Some(version) = self.version {
3681            Some(version)
3682        } else {
3683            if let Some(package_id) = unambiguous_package_id {
3684                package_id.version.clone()
3685            } else {
3686                // If the package is a source tree, assume that the missing `self.version` field is
3687                // indicative of a dynamic version.
3688                if source.is_source_tree() {
3689                    None
3690                } else {
3691                    return Err(LockErrorKind::MissingDependencyVersion {
3692                        name: self.name.clone(),
3693                    }
3694                    .into());
3695                }
3696            }
3697        };
3698        Ok(PackageId {
3699            name: self.name,
3700            version,
3701            source,
3702        })
3703    }
3704}
3705
3706impl From<PackageId> for PackageIdForDependency {
3707    fn from(id: PackageId) -> Self {
3708        Self {
3709            name: id.name,
3710            version: id.version,
3711            source: Some(id.source),
3712        }
3713    }
3714}
3715
3716/// A unique identifier to differentiate between different sources for the same version of a
3717/// package.
3718///
3719/// NOTE: Care should be taken when adding variants to this enum. Namely, new
3720/// variants should be added without changing the relative ordering of other
3721/// variants. Otherwise, this could cause the lockfile to have a different
3722/// canonical ordering of sources.
3723#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3724#[serde(try_from = "SourceWire")]
3725enum Source {
3726    /// A registry or `--find-links` index.
3727    Registry(RegistrySource),
3728    /// A Git repository.
3729    Git(UrlString, GitSource),
3730    /// A direct HTTP(S) URL.
3731    Direct(UrlString, DirectSource),
3732    /// A path to a local source or built archive.
3733    Path(Box<Path>),
3734    /// A path to a local directory.
3735    Directory(Box<Path>),
3736    /// A path to a local directory that should be installed as editable.
3737    Editable(Box<Path>),
3738    /// A path to a local directory that should not be built or installed.
3739    Virtual(Box<Path>),
3740}
3741
3742impl Source {
3743    fn from_resolved_dist(resolved_dist: &ResolvedDist, root: &Path) -> Result<Self, LockError> {
3744        match *resolved_dist {
3745            // We pass empty installed packages for locking.
3746            ResolvedDist::Installed { .. } => unreachable!(),
3747            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(dist, root),
3748        }
3749    }
3750
3751    fn from_dist(dist: &Dist, root: &Path) -> Result<Self, LockError> {
3752        match *dist {
3753            Dist::Built(ref built_dist) => Self::from_built_dist(built_dist, root),
3754            Dist::Source(ref source_dist) => Self::from_source_dist(source_dist, root),
3755        }
3756    }
3757
3758    fn from_built_dist(built_dist: &BuiltDist, root: &Path) -> Result<Self, LockError> {
3759        match *built_dist {
3760            BuiltDist::Registry(ref reg_dist) => Self::from_registry_built_dist(reg_dist, root),
3761            BuiltDist::DirectUrl(ref direct_dist) => Ok(Self::from_direct_built_dist(direct_dist)),
3762            BuiltDist::Path(ref path_dist) => Self::from_path_built_dist(path_dist, root),
3763        }
3764    }
3765
3766    fn from_source_dist(
3767        source_dist: &uv_distribution_types::SourceDist,
3768        root: &Path,
3769    ) -> Result<Self, LockError> {
3770        match *source_dist {
3771            uv_distribution_types::SourceDist::Registry(ref reg_dist) => {
3772                Self::from_registry_source_dist(reg_dist, root)
3773            }
3774            uv_distribution_types::SourceDist::DirectUrl(ref direct_dist) => {
3775                Ok(Self::from_direct_source_dist(direct_dist))
3776            }
3777            uv_distribution_types::SourceDist::Git(ref git_dist) => {
3778                Ok(Self::from_git_dist(git_dist))
3779            }
3780            uv_distribution_types::SourceDist::Path(ref path_dist) => {
3781                Self::from_path_source_dist(path_dist, root)
3782            }
3783            uv_distribution_types::SourceDist::Directory(ref directory) => {
3784                Self::from_directory_source_dist(directory, root)
3785            }
3786        }
3787    }
3788
3789    fn from_registry_built_dist(
3790        reg_dist: &RegistryBuiltDist,
3791        root: &Path,
3792    ) -> Result<Self, LockError> {
3793        Self::from_index_url(&reg_dist.best_wheel().index, root)
3794    }
3795
3796    fn from_registry_source_dist(
3797        reg_dist: &RegistrySourceDist,
3798        root: &Path,
3799    ) -> Result<Self, LockError> {
3800        Self::from_index_url(&reg_dist.index, root)
3801    }
3802
3803    fn from_direct_built_dist(direct_dist: &DirectUrlBuiltDist) -> Self {
3804        Self::Direct(
3805            normalize_url(direct_dist.url.to_url()),
3806            DirectSource { subdirectory: None },
3807        )
3808    }
3809
3810    fn from_direct_source_dist(direct_dist: &DirectUrlSourceDist) -> Self {
3811        Self::Direct(
3812            normalize_url(direct_dist.url.to_url()),
3813            DirectSource {
3814                subdirectory: direct_dist.subdirectory.clone(),
3815            },
3816        )
3817    }
3818
3819    fn from_path_built_dist(path_dist: &PathBuiltDist, root: &Path) -> Result<Self, LockError> {
3820        let path = relative_to(&path_dist.install_path, root)
3821            .or_else(|_| std::path::absolute(&path_dist.install_path))
3822            .map_err(LockErrorKind::DistributionRelativePath)?;
3823        Ok(Self::Path(path.into_boxed_path()))
3824    }
3825
3826    fn from_path_source_dist(path_dist: &PathSourceDist, root: &Path) -> Result<Self, LockError> {
3827        let path = relative_to(&path_dist.install_path, root)
3828            .or_else(|_| std::path::absolute(&path_dist.install_path))
3829            .map_err(LockErrorKind::DistributionRelativePath)?;
3830        Ok(Self::Path(path.into_boxed_path()))
3831    }
3832
3833    fn from_directory_source_dist(
3834        directory_dist: &DirectorySourceDist,
3835        root: &Path,
3836    ) -> Result<Self, LockError> {
3837        let path = relative_to(&directory_dist.install_path, root)
3838            .or_else(|_| std::path::absolute(&directory_dist.install_path))
3839            .map_err(LockErrorKind::DistributionRelativePath)?;
3840        if directory_dist.editable.unwrap_or(false) {
3841            Ok(Self::Editable(path.into_boxed_path()))
3842        } else if directory_dist.r#virtual.unwrap_or(false) {
3843            Ok(Self::Virtual(path.into_boxed_path()))
3844        } else {
3845            Ok(Self::Directory(path.into_boxed_path()))
3846        }
3847    }
3848
3849    fn from_index_url(index_url: &IndexUrl, root: &Path) -> Result<Self, LockError> {
3850        match index_url {
3851            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
3852                // Remove any sensitive credentials from the index URL.
3853                let redacted = index_url.without_credentials();
3854                let source = RegistrySource::Url(UrlString::from(redacted.as_ref()));
3855                Ok(Self::Registry(source))
3856            }
3857            IndexUrl::Path(url) => {
3858                let path = url
3859                    .to_file_path()
3860                    .map_err(|()| LockErrorKind::UrlToPath { url: url.to_url() })?;
3861                let path = relative_to(&path, root)
3862                    .or_else(|_| std::path::absolute(&path))
3863                    .map_err(LockErrorKind::IndexRelativePath)?;
3864                let source = RegistrySource::Path(path.into_boxed_path());
3865                Ok(Self::Registry(source))
3866            }
3867        }
3868    }
3869
3870    fn from_git_dist(git_dist: &GitSourceDist) -> Self {
3871        Self::Git(
3872            UrlString::from(locked_git_url(git_dist)),
3873            GitSource {
3874                kind: GitSourceKind::from(git_dist.git.reference().clone()),
3875                precise: git_dist.git.precise().unwrap_or_else(|| {
3876                    panic!("Git distribution is missing a precise hash: {git_dist}")
3877                }),
3878                subdirectory: git_dist.subdirectory.clone(),
3879                lfs: git_dist.git.lfs(),
3880            },
3881        )
3882    }
3883
3884    /// Returns `true` if the source should be considered immutable.
3885    ///
3886    /// We assume that registry sources are immutable. In other words, we expect that once a
3887    /// package-version is published to a registry, its metadata will not change.
3888    ///
3889    /// We also assume that Git sources are immutable, since a Git source encodes a specific commit.
3890    fn is_immutable(&self) -> bool {
3891        matches!(self, Self::Registry(..) | Self::Git(_, _))
3892    }
3893
3894    /// Returns `true` if the source is that of a wheel.
3895    fn is_wheel(&self) -> bool {
3896        match self {
3897            Self::Path(path) => {
3898                matches!(
3899                    DistExtension::from_path(path).ok(),
3900                    Some(DistExtension::Wheel)
3901                )
3902            }
3903            Self::Direct(url, _) => {
3904                matches!(
3905                    DistExtension::from_path(url.as_ref()).ok(),
3906                    Some(DistExtension::Wheel)
3907                )
3908            }
3909            Self::Directory(..) => false,
3910            Self::Editable(..) => false,
3911            Self::Virtual(..) => false,
3912            Self::Git(..) => false,
3913            Self::Registry(..) => false,
3914        }
3915    }
3916
3917    /// Returns `true` if the source is that of a source tree.
3918    fn is_source_tree(&self) -> bool {
3919        match self {
3920            Self::Directory(..) | Self::Editable(..) | Self::Virtual(..) => true,
3921            Self::Path(..) | Self::Git(..) | Self::Registry(..) | Self::Direct(..) => false,
3922        }
3923    }
3924
3925    /// Returns the path to the source tree, if the source is a source tree.
3926    fn as_source_tree(&self) -> Option<&Path> {
3927        match self {
3928            Self::Directory(path) | Self::Editable(path) | Self::Virtual(path) => Some(path),
3929            Self::Path(..) | Self::Git(..) | Self::Registry(..) | Self::Direct(..) => None,
3930        }
3931    }
3932
3933    fn to_toml(&self, table: &mut Table) {
3934        let mut source_table = InlineTable::new();
3935        match self {
3936            Self::Registry(source) => match source {
3937                RegistrySource::Url(url) => {
3938                    source_table.insert("registry", Value::from(url.as_ref()));
3939                }
3940                RegistrySource::Path(path) => {
3941                    source_table.insert(
3942                        "registry",
3943                        Value::from(PortablePath::from(path).to_string()),
3944                    );
3945                }
3946            },
3947            Self::Git(url, _) => {
3948                source_table.insert("git", Value::from(url.as_ref()));
3949            }
3950            Self::Direct(url, DirectSource { subdirectory }) => {
3951                source_table.insert("url", Value::from(url.as_ref()));
3952                if let Some(ref subdirectory) = *subdirectory {
3953                    source_table.insert(
3954                        "subdirectory",
3955                        Value::from(PortablePath::from(subdirectory).to_string()),
3956                    );
3957                }
3958            }
3959            Self::Path(path) => {
3960                source_table.insert("path", Value::from(PortablePath::from(path).to_string()));
3961            }
3962            Self::Directory(path) => {
3963                source_table.insert(
3964                    "directory",
3965                    Value::from(PortablePath::from(path).to_string()),
3966                );
3967            }
3968            Self::Editable(path) => {
3969                source_table.insert(
3970                    "editable",
3971                    Value::from(PortablePath::from(path).to_string()),
3972                );
3973            }
3974            Self::Virtual(path) => {
3975                source_table.insert("virtual", Value::from(PortablePath::from(path).to_string()));
3976            }
3977        }
3978        table.insert("source", value(source_table));
3979    }
3980
3981    /// Check if a package is local by examining its source.
3982    pub(crate) fn is_local(&self) -> bool {
3983        matches!(
3984            self,
3985            Self::Path(_) | Self::Directory(_) | Self::Editable(_) | Self::Virtual(_)
3986        )
3987    }
3988}
3989
3990impl Display for Source {
3991    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
3992        match self {
3993            Self::Registry(RegistrySource::Url(url)) | Self::Git(url, _) | Self::Direct(url, _) => {
3994                write!(f, "{}+{}", self.name(), url)
3995            }
3996            Self::Registry(RegistrySource::Path(path))
3997            | Self::Path(path)
3998            | Self::Directory(path)
3999            | Self::Editable(path)
4000            | Self::Virtual(path) => {
4001                write!(f, "{}+{}", self.name(), PortablePath::from(path))
4002            }
4003        }
4004    }
4005}
4006
4007impl Source {
4008    fn name(&self) -> &str {
4009        match self {
4010            Self::Registry(..) => "registry",
4011            Self::Git(..) => "git",
4012            Self::Direct(..) => "direct",
4013            Self::Path(..) => "path",
4014            Self::Directory(..) => "directory",
4015            Self::Editable(..) => "editable",
4016            Self::Virtual(..) => "virtual",
4017        }
4018    }
4019
4020    /// Returns `Some(true)` to indicate that the source kind _must_ include a
4021    /// hash.
4022    ///
4023    /// Returns `Some(false)` to indicate that the source kind _must not_
4024    /// include a hash.
4025    ///
4026    /// Returns `None` to indicate that the source kind _may_ include a hash.
4027    fn requires_hash(&self) -> Option<bool> {
4028        match self {
4029            Self::Registry(..) => None,
4030            Self::Direct(..) | Self::Path(..) => Some(true),
4031            Self::Git(..) | Self::Directory(..) | Self::Editable(..) | Self::Virtual(..) => {
4032                Some(false)
4033            }
4034        }
4035    }
4036}
4037
4038#[derive(Clone, Debug, serde::Deserialize)]
4039#[serde(untagged, rename_all = "kebab-case")]
4040enum SourceWire {
4041    Registry {
4042        registry: RegistrySourceWire,
4043    },
4044    Git {
4045        git: String,
4046    },
4047    Direct {
4048        url: UrlString,
4049        subdirectory: Option<PortablePathBuf>,
4050    },
4051    Path {
4052        path: PortablePathBuf,
4053    },
4054    Directory {
4055        directory: PortablePathBuf,
4056    },
4057    Editable {
4058        editable: PortablePathBuf,
4059    },
4060    Virtual {
4061        r#virtual: PortablePathBuf,
4062    },
4063}
4064
4065impl TryFrom<SourceWire> for Source {
4066    type Error = LockError;
4067
4068    fn try_from(wire: SourceWire) -> Result<Self, LockError> {
4069        #[allow(clippy::enum_glob_use)]
4070        use self::SourceWire::*;
4071
4072        match wire {
4073            Registry { registry } => Ok(Self::Registry(registry.into())),
4074            Git { git } => {
4075                let url = DisplaySafeUrl::parse(&git)
4076                    .map_err(|err| SourceParseError::InvalidUrl {
4077                        given: git.clone(),
4078                        err,
4079                    })
4080                    .map_err(LockErrorKind::InvalidGitSourceUrl)?;
4081
4082                let git_source = GitSource::from_url(&url)
4083                    .map_err(|err| match err {
4084                        GitSourceError::InvalidSha => SourceParseError::InvalidSha { given: git },
4085                        GitSourceError::MissingSha => SourceParseError::MissingSha { given: git },
4086                    })
4087                    .map_err(LockErrorKind::InvalidGitSourceUrl)?;
4088
4089                Ok(Self::Git(UrlString::from(url), git_source))
4090            }
4091            Direct { url, subdirectory } => Ok(Self::Direct(
4092                url,
4093                DirectSource {
4094                    subdirectory: subdirectory.map(Box::<std::path::Path>::from),
4095                },
4096            )),
4097            Path { path } => Ok(Self::Path(path.into())),
4098            Directory { directory } => Ok(Self::Directory(directory.into())),
4099            Editable { editable } => Ok(Self::Editable(editable.into())),
4100            Virtual { r#virtual } => Ok(Self::Virtual(r#virtual.into())),
4101        }
4102    }
4103}
4104
4105/// The source for a registry, which could be a URL or a relative path.
4106#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
4107enum RegistrySource {
4108    /// Ex) `https://pypi.org/simple`
4109    Url(UrlString),
4110    /// Ex) `../path/to/local/index`
4111    Path(Box<Path>),
4112}
4113
4114impl Display for RegistrySource {
4115    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
4116        match self {
4117            Self::Url(url) => write!(f, "{url}"),
4118            Self::Path(path) => write!(f, "{}", path.display()),
4119        }
4120    }
4121}
4122
4123#[derive(Clone, Debug)]
4124enum RegistrySourceWire {
4125    /// Ex) `https://pypi.org/simple`
4126    Url(UrlString),
4127    /// Ex) `../path/to/local/index`
4128    Path(PortablePathBuf),
4129}
4130
4131impl<'de> serde::de::Deserialize<'de> for RegistrySourceWire {
4132    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
4133    where
4134        D: serde::de::Deserializer<'de>,
4135    {
4136        struct Visitor;
4137
4138        impl serde::de::Visitor<'_> for Visitor {
4139            type Value = RegistrySourceWire;
4140
4141            fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
4142                formatter.write_str("a valid URL or a file path")
4143            }
4144
4145            fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
4146            where
4147                E: serde::de::Error,
4148            {
4149                if split_scheme(value).is_some() {
4150                    Ok(
4151                        serde::Deserialize::deserialize(serde::de::value::StrDeserializer::new(
4152                            value,
4153                        ))
4154                        .map(RegistrySourceWire::Url)?,
4155                    )
4156                } else {
4157                    Ok(
4158                        serde::Deserialize::deserialize(serde::de::value::StrDeserializer::new(
4159                            value,
4160                        ))
4161                        .map(RegistrySourceWire::Path)?,
4162                    )
4163                }
4164            }
4165        }
4166
4167        deserializer.deserialize_str(Visitor)
4168    }
4169}
4170
4171impl From<RegistrySourceWire> for RegistrySource {
4172    fn from(wire: RegistrySourceWire) -> Self {
4173        match wire {
4174            RegistrySourceWire::Url(url) => Self::Url(url),
4175            RegistrySourceWire::Path(path) => Self::Path(path.into()),
4176        }
4177    }
4178}
4179
4180#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
4181#[serde(rename_all = "kebab-case")]
4182struct DirectSource {
4183    subdirectory: Option<Box<Path>>,
4184}
4185
4186/// NOTE: Care should be taken when adding variants to this enum. Namely, new
4187/// variants should be added without changing the relative ordering of other
4188/// variants. Otherwise, this could cause the lockfile to have a different
4189/// canonical ordering of package entries.
4190#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
4191struct GitSource {
4192    precise: GitOid,
4193    subdirectory: Option<Box<Path>>,
4194    kind: GitSourceKind,
4195    lfs: GitLfs,
4196}
4197
4198/// An error that occurs when a source string could not be parsed.
4199#[derive(Clone, Debug, Eq, PartialEq)]
4200enum GitSourceError {
4201    InvalidSha,
4202    MissingSha,
4203}
4204
4205impl GitSource {
4206    /// Extracts a Git source reference from the query pairs and the hash
4207    /// fragment in the given URL.
4208    fn from_url(url: &Url) -> Result<Self, GitSourceError> {
4209        let mut kind = GitSourceKind::DefaultBranch;
4210        let mut subdirectory = None;
4211        let mut lfs = GitLfs::Disabled;
4212        for (key, val) in url.query_pairs() {
4213            match &*key {
4214                "tag" => kind = GitSourceKind::Tag(val.into_owned()),
4215                "branch" => kind = GitSourceKind::Branch(val.into_owned()),
4216                "rev" => kind = GitSourceKind::Rev(val.into_owned()),
4217                "subdirectory" => subdirectory = Some(PortablePathBuf::from(val.as_ref()).into()),
4218                "lfs" => lfs = GitLfs::from(val.eq_ignore_ascii_case("true")),
4219                _ => {}
4220            }
4221        }
4222
4223        let precise = GitOid::from_str(url.fragment().ok_or(GitSourceError::MissingSha)?)
4224            .map_err(|_| GitSourceError::InvalidSha)?;
4225
4226        Ok(Self {
4227            precise,
4228            subdirectory,
4229            kind,
4230            lfs,
4231        })
4232    }
4233}
4234
4235#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
4236#[serde(rename_all = "kebab-case")]
4237enum GitSourceKind {
4238    Tag(String),
4239    Branch(String),
4240    Rev(String),
4241    DefaultBranch,
4242}
4243
4244/// Inspired by: <https://discuss.python.org/t/lock-files-again-but-this-time-w-sdists/46593>
4245#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4246#[serde(rename_all = "kebab-case")]
4247struct SourceDistMetadata {
4248    /// A hash of the source distribution.
4249    hash: Option<Hash>,
4250    /// The size of the source distribution in bytes.
4251    ///
4252    /// This is only present for source distributions that come from registries.
4253    size: Option<u64>,
4254    /// The upload time of the source distribution.
4255    #[serde(alias = "upload_time")]
4256    upload_time: Option<Timestamp>,
4257}
4258
4259/// A URL or file path where the source dist that was
4260/// locked against was found. The location does not need to exist in the
4261/// future, so this should be treated as only a hint to where to look
4262/// and/or recording where the source dist file originally came from.
4263#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4264#[serde(from = "SourceDistWire")]
4265enum SourceDist {
4266    Url {
4267        url: UrlString,
4268        #[serde(flatten)]
4269        metadata: SourceDistMetadata,
4270    },
4271    Path {
4272        path: Box<Path>,
4273        #[serde(flatten)]
4274        metadata: SourceDistMetadata,
4275    },
4276    Metadata {
4277        #[serde(flatten)]
4278        metadata: SourceDistMetadata,
4279    },
4280}
4281
4282impl SourceDist {
4283    fn filename(&self) -> Option<Cow<'_, str>> {
4284        match self {
4285            Self::Metadata { .. } => None,
4286            Self::Url { url, .. } => url.filename().ok(),
4287            Self::Path { path, .. } => path.file_name().map(|filename| filename.to_string_lossy()),
4288        }
4289    }
4290
4291    fn url(&self) -> Option<&UrlString> {
4292        match self {
4293            Self::Metadata { .. } => None,
4294            Self::Url { url, .. } => Some(url),
4295            Self::Path { .. } => None,
4296        }
4297    }
4298
4299    pub(crate) fn hash(&self) -> Option<&Hash> {
4300        match self {
4301            Self::Metadata { metadata } => metadata.hash.as_ref(),
4302            Self::Url { metadata, .. } => metadata.hash.as_ref(),
4303            Self::Path { metadata, .. } => metadata.hash.as_ref(),
4304        }
4305    }
4306
4307    pub(crate) fn size(&self) -> Option<u64> {
4308        match self {
4309            Self::Metadata { metadata } => metadata.size,
4310            Self::Url { metadata, .. } => metadata.size,
4311            Self::Path { metadata, .. } => metadata.size,
4312        }
4313    }
4314
4315    pub(crate) fn upload_time(&self) -> Option<Timestamp> {
4316        match self {
4317            Self::Metadata { metadata } => metadata.upload_time,
4318            Self::Url { metadata, .. } => metadata.upload_time,
4319            Self::Path { metadata, .. } => metadata.upload_time,
4320        }
4321    }
4322}
4323
4324impl SourceDist {
4325    fn from_annotated_dist(
4326        id: &PackageId,
4327        annotated_dist: &AnnotatedDist,
4328    ) -> Result<Option<Self>, LockError> {
4329        match annotated_dist.dist {
4330            // We pass empty installed packages for locking.
4331            ResolvedDist::Installed { .. } => unreachable!(),
4332            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(
4333                id,
4334                dist,
4335                annotated_dist.hashes.as_slice(),
4336                annotated_dist.index(),
4337            ),
4338        }
4339    }
4340
4341    fn from_dist(
4342        id: &PackageId,
4343        dist: &Dist,
4344        hashes: &[HashDigest],
4345        index: Option<&IndexUrl>,
4346    ) -> Result<Option<Self>, LockError> {
4347        match *dist {
4348            Dist::Built(BuiltDist::Registry(ref built_dist)) => {
4349                let Some(sdist) = built_dist.sdist.as_ref() else {
4350                    return Ok(None);
4351                };
4352                Self::from_registry_dist(sdist, index)
4353            }
4354            Dist::Built(_) => Ok(None),
4355            Dist::Source(ref source_dist) => Self::from_source_dist(id, source_dist, hashes, index),
4356        }
4357    }
4358
4359    fn from_source_dist(
4360        id: &PackageId,
4361        source_dist: &uv_distribution_types::SourceDist,
4362        hashes: &[HashDigest],
4363        index: Option<&IndexUrl>,
4364    ) -> Result<Option<Self>, LockError> {
4365        match *source_dist {
4366            uv_distribution_types::SourceDist::Registry(ref reg_dist) => {
4367                Self::from_registry_dist(reg_dist, index)
4368            }
4369            uv_distribution_types::SourceDist::DirectUrl(_) => {
4370                Self::from_direct_dist(id, hashes).map(Some)
4371            }
4372            uv_distribution_types::SourceDist::Path(_) => {
4373                Self::from_path_dist(id, hashes).map(Some)
4374            }
4375            // An actual sdist entry in the lockfile is only required when
4376            // it's from a registry or a direct URL. Otherwise, it's strictly
4377            // redundant with the information in all other kinds of `source`.
4378            uv_distribution_types::SourceDist::Git(_)
4379            | uv_distribution_types::SourceDist::Directory(_) => Ok(None),
4380        }
4381    }
4382
4383    fn from_registry_dist(
4384        reg_dist: &RegistrySourceDist,
4385        index: Option<&IndexUrl>,
4386    ) -> Result<Option<Self>, LockError> {
4387        // Reject distributions from registries that don't match the index URL, as can occur with
4388        // `--find-links`.
4389        if index.is_none_or(|index| *index != reg_dist.index) {
4390            return Ok(None);
4391        }
4392
4393        match &reg_dist.index {
4394            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
4395                let url = normalize_file_location(&reg_dist.file.url)
4396                    .map_err(LockErrorKind::InvalidUrl)
4397                    .map_err(LockError::from)?;
4398                let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4399                let size = reg_dist.file.size;
4400                let upload_time = reg_dist
4401                    .file
4402                    .upload_time_utc_ms
4403                    .map(Timestamp::from_millisecond)
4404                    .transpose()
4405                    .map_err(LockErrorKind::InvalidTimestamp)?;
4406                Ok(Some(Self::Url {
4407                    url,
4408                    metadata: SourceDistMetadata {
4409                        hash,
4410                        size,
4411                        upload_time,
4412                    },
4413                }))
4414            }
4415            IndexUrl::Path(path) => {
4416                let index_path = path
4417                    .to_file_path()
4418                    .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?;
4419                let url = reg_dist
4420                    .file
4421                    .url
4422                    .to_url()
4423                    .map_err(LockErrorKind::InvalidUrl)?;
4424
4425                if url.scheme() == "file" {
4426                    let reg_dist_path = url
4427                        .to_file_path()
4428                        .map_err(|()| LockErrorKind::UrlToPath { url })?;
4429                    let path = relative_to(&reg_dist_path, index_path)
4430                        .or_else(|_| std::path::absolute(&reg_dist_path))
4431                        .map_err(LockErrorKind::DistributionRelativePath)?
4432                        .into_boxed_path();
4433                    let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4434                    let size = reg_dist.file.size;
4435                    let upload_time = reg_dist
4436                        .file
4437                        .upload_time_utc_ms
4438                        .map(Timestamp::from_millisecond)
4439                        .transpose()
4440                        .map_err(LockErrorKind::InvalidTimestamp)?;
4441                    Ok(Some(Self::Path {
4442                        path,
4443                        metadata: SourceDistMetadata {
4444                            hash,
4445                            size,
4446                            upload_time,
4447                        },
4448                    }))
4449                } else {
4450                    let url = normalize_file_location(&reg_dist.file.url)
4451                        .map_err(LockErrorKind::InvalidUrl)
4452                        .map_err(LockError::from)?;
4453                    let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4454                    let size = reg_dist.file.size;
4455                    let upload_time = reg_dist
4456                        .file
4457                        .upload_time_utc_ms
4458                        .map(Timestamp::from_millisecond)
4459                        .transpose()
4460                        .map_err(LockErrorKind::InvalidTimestamp)?;
4461                    Ok(Some(Self::Url {
4462                        url,
4463                        metadata: SourceDistMetadata {
4464                            hash,
4465                            size,
4466                            upload_time,
4467                        },
4468                    }))
4469                }
4470            }
4471        }
4472    }
4473
4474    fn from_direct_dist(id: &PackageId, hashes: &[HashDigest]) -> Result<Self, LockError> {
4475        let Some(hash) = hashes.iter().max().cloned().map(Hash::from) else {
4476            let kind = LockErrorKind::Hash {
4477                id: id.clone(),
4478                artifact_type: "direct URL source distribution",
4479                expected: true,
4480            };
4481            return Err(kind.into());
4482        };
4483        Ok(Self::Metadata {
4484            metadata: SourceDistMetadata {
4485                hash: Some(hash),
4486                size: None,
4487                upload_time: None,
4488            },
4489        })
4490    }
4491
4492    fn from_path_dist(id: &PackageId, hashes: &[HashDigest]) -> Result<Self, LockError> {
4493        let Some(hash) = hashes.iter().max().cloned().map(Hash::from) else {
4494            let kind = LockErrorKind::Hash {
4495                id: id.clone(),
4496                artifact_type: "path source distribution",
4497                expected: true,
4498            };
4499            return Err(kind.into());
4500        };
4501        Ok(Self::Metadata {
4502            metadata: SourceDistMetadata {
4503                hash: Some(hash),
4504                size: None,
4505                upload_time: None,
4506            },
4507        })
4508    }
4509}
4510
4511#[derive(Clone, Debug, serde::Deserialize)]
4512#[serde(untagged, rename_all = "kebab-case")]
4513enum SourceDistWire {
4514    Url {
4515        url: UrlString,
4516        #[serde(flatten)]
4517        metadata: SourceDistMetadata,
4518    },
4519    Path {
4520        path: PortablePathBuf,
4521        #[serde(flatten)]
4522        metadata: SourceDistMetadata,
4523    },
4524    Metadata {
4525        #[serde(flatten)]
4526        metadata: SourceDistMetadata,
4527    },
4528}
4529
4530impl SourceDist {
4531    /// Returns the TOML representation of this source distribution.
4532    fn to_toml(&self) -> Result<InlineTable, toml_edit::ser::Error> {
4533        let mut table = InlineTable::new();
4534        match self {
4535            Self::Metadata { .. } => {}
4536            Self::Url { url, .. } => {
4537                table.insert("url", Value::from(url.as_ref()));
4538            }
4539            Self::Path { path, .. } => {
4540                table.insert("path", Value::from(PortablePath::from(path).to_string()));
4541            }
4542        }
4543        if let Some(hash) = self.hash() {
4544            table.insert("hash", Value::from(hash.to_string()));
4545        }
4546        if let Some(size) = self.size() {
4547            table.insert(
4548                "size",
4549                toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
4550            );
4551        }
4552        if let Some(upload_time) = self.upload_time() {
4553            table.insert("upload-time", Value::from(upload_time.to_string()));
4554        }
4555        Ok(table)
4556    }
4557}
4558
4559impl From<SourceDistWire> for SourceDist {
4560    fn from(wire: SourceDistWire) -> Self {
4561        match wire {
4562            SourceDistWire::Url { url, metadata } => Self::Url { url, metadata },
4563            SourceDistWire::Path { path, metadata } => Self::Path {
4564                path: path.into(),
4565                metadata,
4566            },
4567            SourceDistWire::Metadata { metadata } => Self::Metadata { metadata },
4568        }
4569    }
4570}
4571
4572impl From<GitReference> for GitSourceKind {
4573    fn from(value: GitReference) -> Self {
4574        match value {
4575            GitReference::Branch(branch) => Self::Branch(branch),
4576            GitReference::Tag(tag) => Self::Tag(tag),
4577            GitReference::BranchOrTag(rev) => Self::Rev(rev),
4578            GitReference::BranchOrTagOrCommit(rev) => Self::Rev(rev),
4579            GitReference::NamedRef(rev) => Self::Rev(rev),
4580            GitReference::DefaultBranch => Self::DefaultBranch,
4581        }
4582    }
4583}
4584
4585impl From<GitSourceKind> for GitReference {
4586    fn from(value: GitSourceKind) -> Self {
4587        match value {
4588            GitSourceKind::Branch(branch) => Self::Branch(branch),
4589            GitSourceKind::Tag(tag) => Self::Tag(tag),
4590            GitSourceKind::Rev(rev) => Self::from_rev(rev),
4591            GitSourceKind::DefaultBranch => Self::DefaultBranch,
4592        }
4593    }
4594}
4595
4596/// Construct the lockfile-compatible [`DisplaySafeUrl`] for a [`GitSourceDist`].
4597fn locked_git_url(git_dist: &GitSourceDist) -> DisplaySafeUrl {
4598    let mut url = git_dist.git.repository().clone();
4599
4600    // Remove the credentials.
4601    url.remove_credentials();
4602
4603    // Clear out any existing state.
4604    url.set_fragment(None);
4605    url.set_query(None);
4606
4607    // Put the subdirectory in the query.
4608    if let Some(subdirectory) = git_dist
4609        .subdirectory
4610        .as_deref()
4611        .map(PortablePath::from)
4612        .as_ref()
4613        .map(PortablePath::to_string)
4614    {
4615        url.query_pairs_mut()
4616            .append_pair("subdirectory", &subdirectory);
4617    }
4618
4619    // Put lfs=true in the package source git url only when explicitly enabled.
4620    if git_dist.git.lfs().enabled() {
4621        url.query_pairs_mut().append_pair("lfs", "true");
4622    }
4623
4624    // Put the requested reference in the query.
4625    match git_dist.git.reference() {
4626        GitReference::Branch(branch) => {
4627            url.query_pairs_mut().append_pair("branch", branch.as_str());
4628        }
4629        GitReference::Tag(tag) => {
4630            url.query_pairs_mut().append_pair("tag", tag.as_str());
4631        }
4632        GitReference::BranchOrTag(rev)
4633        | GitReference::BranchOrTagOrCommit(rev)
4634        | GitReference::NamedRef(rev) => {
4635            url.query_pairs_mut().append_pair("rev", rev.as_str());
4636        }
4637        GitReference::DefaultBranch => {}
4638    }
4639
4640    // Put the precise commit in the fragment.
4641    url.set_fragment(
4642        git_dist
4643            .git
4644            .precise()
4645            .as_ref()
4646            .map(GitOid::to_string)
4647            .as_deref(),
4648    );
4649
4650    url
4651}
4652
4653#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4654struct ZstdWheel {
4655    hash: Option<Hash>,
4656    size: Option<u64>,
4657}
4658
4659/// Inspired by: <https://discuss.python.org/t/lock-files-again-but-this-time-w-sdists/46593>
4660#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4661#[serde(try_from = "WheelWire")]
4662struct Wheel {
4663    /// A URL or file path (via `file://`) where the wheel that was locked
4664    /// against was found. The location does not need to exist in the future,
4665    /// so this should be treated as only a hint to where to look and/or
4666    /// recording where the wheel file originally came from.
4667    url: WheelWireSource,
4668    /// A hash of the built distribution.
4669    ///
4670    /// This is only present for wheels that come from registries and direct
4671    /// URLs. Wheels from git or path dependencies do not have hashes
4672    /// associated with them.
4673    hash: Option<Hash>,
4674    /// The size of the built distribution in bytes.
4675    ///
4676    /// This is only present for wheels that come from registries.
4677    size: Option<u64>,
4678    /// The upload time of the built distribution.
4679    ///
4680    /// This is only present for wheels that come from registries.
4681    upload_time: Option<Timestamp>,
4682    /// The filename of the wheel.
4683    ///
4684    /// This isn't part of the wire format since it's redundant with the
4685    /// URL. But we do use it for various things, and thus compute it at
4686    /// deserialization time. Not being able to extract a wheel filename from a
4687    /// wheel URL is thus a deserialization error.
4688    filename: WheelFilename,
4689    /// The zstandard-compressed wheel metadata, if any.
4690    zstd: Option<ZstdWheel>,
4691}
4692
4693impl Wheel {
4694    fn from_annotated_dist(annotated_dist: &AnnotatedDist) -> Result<Vec<Self>, LockError> {
4695        match annotated_dist.dist {
4696            // We pass empty installed packages for locking.
4697            ResolvedDist::Installed { .. } => unreachable!(),
4698            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(
4699                dist,
4700                annotated_dist.hashes.as_slice(),
4701                annotated_dist.index(),
4702            ),
4703        }
4704    }
4705
4706    fn from_dist(
4707        dist: &Dist,
4708        hashes: &[HashDigest],
4709        index: Option<&IndexUrl>,
4710    ) -> Result<Vec<Self>, LockError> {
4711        match *dist {
4712            Dist::Built(ref built_dist) => Self::from_built_dist(built_dist, hashes, index),
4713            Dist::Source(uv_distribution_types::SourceDist::Registry(ref source_dist)) => {
4714                source_dist
4715                    .wheels
4716                    .iter()
4717                    .filter(|wheel| {
4718                        // Reject distributions from registries that don't match the index URL, as can occur with
4719                        // `--find-links`.
4720                        index.is_some_and(|index| *index == wheel.index)
4721                    })
4722                    .map(Self::from_registry_wheel)
4723                    .collect()
4724            }
4725            Dist::Source(_) => Ok(vec![]),
4726        }
4727    }
4728
4729    fn from_built_dist(
4730        built_dist: &BuiltDist,
4731        hashes: &[HashDigest],
4732        index: Option<&IndexUrl>,
4733    ) -> Result<Vec<Self>, LockError> {
4734        match *built_dist {
4735            BuiltDist::Registry(ref reg_dist) => Self::from_registry_dist(reg_dist, index),
4736            BuiltDist::DirectUrl(ref direct_dist) => {
4737                Ok(vec![Self::from_direct_dist(direct_dist, hashes)])
4738            }
4739            BuiltDist::Path(ref path_dist) => Ok(vec![Self::from_path_dist(path_dist, hashes)]),
4740        }
4741    }
4742
4743    fn from_registry_dist(
4744        reg_dist: &RegistryBuiltDist,
4745        index: Option<&IndexUrl>,
4746    ) -> Result<Vec<Self>, LockError> {
4747        reg_dist
4748            .wheels
4749            .iter()
4750            .filter(|wheel| {
4751                // Reject distributions from registries that don't match the index URL, as can occur with
4752                // `--find-links`.
4753                index.is_some_and(|index| *index == wheel.index)
4754            })
4755            .map(Self::from_registry_wheel)
4756            .collect()
4757    }
4758
4759    fn from_registry_wheel(wheel: &RegistryBuiltWheel) -> Result<Self, LockError> {
4760        let url = match &wheel.index {
4761            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
4762                let url = normalize_file_location(&wheel.file.url)
4763                    .map_err(LockErrorKind::InvalidUrl)
4764                    .map_err(LockError::from)?;
4765                WheelWireSource::Url { url }
4766            }
4767            IndexUrl::Path(path) => {
4768                let index_path = path
4769                    .to_file_path()
4770                    .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?;
4771                let wheel_url = wheel.file.url.to_url().map_err(LockErrorKind::InvalidUrl)?;
4772
4773                if wheel_url.scheme() == "file" {
4774                    let wheel_path = wheel_url
4775                        .to_file_path()
4776                        .map_err(|()| LockErrorKind::UrlToPath { url: wheel_url })?;
4777                    let path = relative_to(&wheel_path, index_path)
4778                        .or_else(|_| std::path::absolute(&wheel_path))
4779                        .map_err(LockErrorKind::DistributionRelativePath)?
4780                        .into_boxed_path();
4781                    WheelWireSource::Path { path }
4782                } else {
4783                    let url = normalize_file_location(&wheel.file.url)
4784                        .map_err(LockErrorKind::InvalidUrl)
4785                        .map_err(LockError::from)?;
4786                    WheelWireSource::Url { url }
4787                }
4788            }
4789        };
4790        let filename = wheel.filename.clone();
4791        let hash = wheel.file.hashes.iter().max().cloned().map(Hash::from);
4792        let size = wheel.file.size;
4793        let upload_time = wheel
4794            .file
4795            .upload_time_utc_ms
4796            .map(Timestamp::from_millisecond)
4797            .transpose()
4798            .map_err(LockErrorKind::InvalidTimestamp)?;
4799        let zstd = wheel.file.zstd.as_ref().map(|zstd| ZstdWheel {
4800            hash: zstd.hashes.iter().max().cloned().map(Hash::from),
4801            size: zstd.size,
4802        });
4803        Ok(Self {
4804            url,
4805            hash,
4806            size,
4807            upload_time,
4808            filename,
4809            zstd,
4810        })
4811    }
4812
4813    fn from_direct_dist(direct_dist: &DirectUrlBuiltDist, hashes: &[HashDigest]) -> Self {
4814        Self {
4815            url: WheelWireSource::Url {
4816                url: normalize_url(direct_dist.url.to_url()),
4817            },
4818            hash: hashes.iter().max().cloned().map(Hash::from),
4819            size: None,
4820            upload_time: None,
4821            filename: direct_dist.filename.clone(),
4822            zstd: None,
4823        }
4824    }
4825
4826    fn from_path_dist(path_dist: &PathBuiltDist, hashes: &[HashDigest]) -> Self {
4827        Self {
4828            url: WheelWireSource::Filename {
4829                filename: path_dist.filename.clone(),
4830            },
4831            hash: hashes.iter().max().cloned().map(Hash::from),
4832            size: None,
4833            upload_time: None,
4834            filename: path_dist.filename.clone(),
4835            zstd: None,
4836        }
4837    }
4838
4839    pub(crate) fn to_registry_wheel(
4840        &self,
4841        source: &RegistrySource,
4842        root: &Path,
4843    ) -> Result<RegistryBuiltWheel, LockError> {
4844        let filename: WheelFilename = self.filename.clone();
4845
4846        match source {
4847            RegistrySource::Url(url) => {
4848                let file_location = match &self.url {
4849                    WheelWireSource::Url { url: file_url } => {
4850                        FileLocation::AbsoluteUrl(file_url.clone())
4851                    }
4852                    WheelWireSource::Path { .. } | WheelWireSource::Filename { .. } => {
4853                        return Err(LockErrorKind::MissingUrl {
4854                            name: filename.name,
4855                            version: filename.version,
4856                        }
4857                        .into());
4858                    }
4859                };
4860                let file = Box::new(uv_distribution_types::File {
4861                    dist_info_metadata: false,
4862                    filename: SmallString::from(filename.to_string()),
4863                    hashes: self.hash.iter().map(|h| h.0.clone()).collect(),
4864                    requires_python: None,
4865                    size: self.size,
4866                    upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
4867                    url: file_location,
4868                    yanked: None,
4869                    zstd: self
4870                        .zstd
4871                        .as_ref()
4872                        .map(|zstd| uv_distribution_types::Zstd {
4873                            hashes: zstd.hash.iter().map(|h| h.0.clone()).collect(),
4874                            size: zstd.size,
4875                        })
4876                        .map(Box::new),
4877                });
4878                let index = IndexUrl::from(VerbatimUrl::from_url(
4879                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
4880                ));
4881                Ok(RegistryBuiltWheel {
4882                    filename,
4883                    file,
4884                    index,
4885                })
4886            }
4887            RegistrySource::Path(index_path) => {
4888                let file_location = match &self.url {
4889                    WheelWireSource::Url { url: file_url } => {
4890                        FileLocation::AbsoluteUrl(file_url.clone())
4891                    }
4892                    WheelWireSource::Path { path: file_path } => {
4893                        let file_path = root.join(index_path).join(file_path);
4894                        let file_url =
4895                            DisplaySafeUrl::from_file_path(&file_path).map_err(|()| {
4896                                LockErrorKind::PathToUrl {
4897                                    path: file_path.into_boxed_path(),
4898                                }
4899                            })?;
4900                        FileLocation::AbsoluteUrl(UrlString::from(file_url))
4901                    }
4902                    WheelWireSource::Filename { .. } => {
4903                        return Err(LockErrorKind::MissingPath {
4904                            name: filename.name,
4905                            version: filename.version,
4906                        }
4907                        .into());
4908                    }
4909                };
4910                let file = Box::new(uv_distribution_types::File {
4911                    dist_info_metadata: false,
4912                    filename: SmallString::from(filename.to_string()),
4913                    hashes: self.hash.iter().map(|h| h.0.clone()).collect(),
4914                    requires_python: None,
4915                    size: self.size,
4916                    upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
4917                    url: file_location,
4918                    yanked: None,
4919                    zstd: self
4920                        .zstd
4921                        .as_ref()
4922                        .map(|zstd| uv_distribution_types::Zstd {
4923                            hashes: zstd.hash.iter().map(|h| h.0.clone()).collect(),
4924                            size: zstd.size,
4925                        })
4926                        .map(Box::new),
4927                });
4928                let index = IndexUrl::from(
4929                    VerbatimUrl::from_absolute_path(root.join(index_path))
4930                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
4931                );
4932                Ok(RegistryBuiltWheel {
4933                    filename,
4934                    file,
4935                    index,
4936                })
4937            }
4938        }
4939    }
4940}
4941
4942#[derive(Clone, Debug, serde::Deserialize)]
4943#[serde(rename_all = "kebab-case")]
4944struct WheelWire {
4945    #[serde(flatten)]
4946    url: WheelWireSource,
4947    /// A hash of the built distribution.
4948    ///
4949    /// This is only present for wheels that come from registries and direct
4950    /// URLs. Wheels from git or path dependencies do not have hashes
4951    /// associated with them.
4952    hash: Option<Hash>,
4953    /// The size of the built distribution in bytes.
4954    ///
4955    /// This is only present for wheels that come from registries.
4956    size: Option<u64>,
4957    /// The upload time of the built distribution.
4958    ///
4959    /// This is only present for wheels that come from registries.
4960    #[serde(alias = "upload_time")]
4961    upload_time: Option<Timestamp>,
4962    /// The zstandard-compressed wheel metadata, if any.
4963    #[serde(alias = "zstd")]
4964    zstd: Option<ZstdWheel>,
4965}
4966
4967#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4968#[serde(untagged, rename_all = "kebab-case")]
4969enum WheelWireSource {
4970    /// Used for all wheels that come from remote sources.
4971    Url {
4972        /// A URL where the wheel that was locked against was found. The location
4973        /// does not need to exist in the future, so this should be treated as
4974        /// only a hint to where to look and/or recording where the wheel file
4975        /// originally came from.
4976        url: UrlString,
4977    },
4978    /// Used for wheels that come from local registries (like `--find-links`).
4979    Path {
4980        /// The path to the wheel, relative to the index.
4981        path: Box<Path>,
4982    },
4983    /// Used for path wheels.
4984    ///
4985    /// We only store the filename for path wheel, since we can't store a relative path in the url
4986    Filename {
4987        /// We duplicate the filename since a lot of code relies on having the filename on the
4988        /// wheel entry.
4989        filename: WheelFilename,
4990    },
4991}
4992
4993impl Wheel {
4994    /// Returns the TOML representation of this wheel.
4995    fn to_toml(&self) -> Result<InlineTable, toml_edit::ser::Error> {
4996        let mut table = InlineTable::new();
4997        match &self.url {
4998            WheelWireSource::Url { url } => {
4999                table.insert("url", Value::from(url.as_ref()));
5000            }
5001            WheelWireSource::Path { path } => {
5002                table.insert("path", Value::from(PortablePath::from(path).to_string()));
5003            }
5004            WheelWireSource::Filename { filename } => {
5005                table.insert("filename", Value::from(filename.to_string()));
5006            }
5007        }
5008        if let Some(ref hash) = self.hash {
5009            table.insert("hash", Value::from(hash.to_string()));
5010        }
5011        if let Some(size) = self.size {
5012            table.insert(
5013                "size",
5014                toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
5015            );
5016        }
5017        if let Some(upload_time) = self.upload_time {
5018            table.insert("upload-time", Value::from(upload_time.to_string()));
5019        }
5020        if let Some(zstd) = &self.zstd {
5021            let mut inner = InlineTable::new();
5022            if let Some(ref hash) = zstd.hash {
5023                inner.insert("hash", Value::from(hash.to_string()));
5024            }
5025            if let Some(size) = zstd.size {
5026                inner.insert(
5027                    "size",
5028                    toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
5029                );
5030            }
5031            table.insert("zstd", Value::from(inner));
5032        }
5033        Ok(table)
5034    }
5035}
5036
5037impl TryFrom<WheelWire> for Wheel {
5038    type Error = String;
5039
5040    fn try_from(wire: WheelWire) -> Result<Self, String> {
5041        let filename = match &wire.url {
5042            WheelWireSource::Url { url } => {
5043                let filename = url.filename().map_err(|err| err.to_string())?;
5044                filename.parse::<WheelFilename>().map_err(|err| {
5045                    format!("failed to parse `{filename}` as wheel filename: {err}")
5046                })?
5047            }
5048            WheelWireSource::Path { path } => {
5049                let filename = path
5050                    .file_name()
5051                    .and_then(|file_name| file_name.to_str())
5052                    .ok_or_else(|| {
5053                        format!("path `{}` has no filename component", path.display())
5054                    })?;
5055                filename.parse::<WheelFilename>().map_err(|err| {
5056                    format!("failed to parse `{filename}` as wheel filename: {err}")
5057                })?
5058            }
5059            WheelWireSource::Filename { filename } => filename.clone(),
5060        };
5061
5062        Ok(Self {
5063            url: wire.url,
5064            hash: wire.hash,
5065            size: wire.size,
5066            upload_time: wire.upload_time,
5067            zstd: wire.zstd,
5068            filename,
5069        })
5070    }
5071}
5072
5073/// A single dependency of a package in a lockfile.
5074#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
5075pub struct Dependency {
5076    package_id: PackageId,
5077    extra: BTreeSet<ExtraName>,
5078    /// A marker simplified from the PEP 508 marker in `complexified_marker`
5079    /// by assuming `requires-python` is satisfied. So if
5080    /// `requires-python = '>=3.8'`, then
5081    /// `python_version >= '3.8' and python_version < '3.12'`
5082    /// gets simplified to `python_version < '3.12'`.
5083    ///
5084    /// Generally speaking, this marker should not be exposed to
5085    /// anything outside this module unless it's for a specialized use
5086    /// case. But specifically, it should never be used to evaluate
5087    /// against a marker environment or for disjointness checks or any
5088    /// other kind of marker algebra.
5089    ///
5090    /// It exists because there are some cases where we do actually
5091    /// want to compare markers in their "simplified" form. For
5092    /// example, when collapsing the extras on duplicate dependencies.
5093    /// Even if a dependency has different complexified markers,
5094    /// they might have identical markers once simplified. And since
5095    /// `requires-python` applies to the entire lock file, it's
5096    /// acceptable to do comparisons on the simplified form.
5097    simplified_marker: SimplifiedMarkerTree,
5098    /// The "complexified" marker is a universal marker whose PEP 508
5099    /// marker can stand on its own independent of `requires-python`.
5100    /// It can be safely used for any kind of marker algebra.
5101    complexified_marker: UniversalMarker,
5102}
5103
5104impl Dependency {
5105    fn new(
5106        requires_python: &RequiresPython,
5107        package_id: PackageId,
5108        extra: BTreeSet<ExtraName>,
5109        complexified_marker: UniversalMarker,
5110    ) -> Self {
5111        let simplified_marker =
5112            SimplifiedMarkerTree::new(requires_python, complexified_marker.combined());
5113        let complexified_marker = simplified_marker.into_marker(requires_python);
5114        Self {
5115            package_id,
5116            extra,
5117            simplified_marker,
5118            complexified_marker: UniversalMarker::from_combined(complexified_marker),
5119        }
5120    }
5121
5122    fn from_annotated_dist(
5123        requires_python: &RequiresPython,
5124        annotated_dist: &AnnotatedDist,
5125        complexified_marker: UniversalMarker,
5126        root: &Path,
5127    ) -> Result<Self, LockError> {
5128        let package_id = PackageId::from_annotated_dist(annotated_dist, root)?;
5129        let extra = annotated_dist.extra.iter().cloned().collect();
5130        Ok(Self::new(
5131            requires_python,
5132            package_id,
5133            extra,
5134            complexified_marker,
5135        ))
5136    }
5137
5138    /// Returns the TOML representation of this dependency.
5139    fn to_toml(
5140        &self,
5141        _requires_python: &RequiresPython,
5142        dist_count_by_name: &FxHashMap<PackageName, u64>,
5143    ) -> Table {
5144        let mut table = Table::new();
5145        self.package_id
5146            .to_toml(Some(dist_count_by_name), &mut table);
5147        if !self.extra.is_empty() {
5148            let extra_array = self
5149                .extra
5150                .iter()
5151                .map(ToString::to_string)
5152                .collect::<Array>();
5153            table.insert("extra", value(extra_array));
5154        }
5155        if let Some(marker) = self.simplified_marker.try_to_string() {
5156            table.insert("marker", value(marker));
5157        }
5158
5159        table
5160    }
5161
5162    /// Returns the package name of this dependency.
5163    pub fn package_name(&self) -> &PackageName {
5164        &self.package_id.name
5165    }
5166
5167    /// Returns the extras specified on this dependency.
5168    pub fn extra(&self) -> &BTreeSet<ExtraName> {
5169        &self.extra
5170    }
5171}
5172
5173impl Display for Dependency {
5174    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
5175        match (self.extra.is_empty(), self.package_id.version.as_ref()) {
5176            (true, Some(version)) => write!(f, "{}=={}", self.package_id.name, version),
5177            (true, None) => write!(f, "{}", self.package_id.name),
5178            (false, Some(version)) => write!(
5179                f,
5180                "{}[{}]=={}",
5181                self.package_id.name,
5182                self.extra.iter().join(","),
5183                version
5184            ),
5185            (false, None) => write!(
5186                f,
5187                "{}[{}]",
5188                self.package_id.name,
5189                self.extra.iter().join(",")
5190            ),
5191        }
5192    }
5193}
5194
5195/// A single dependency of a package in a lockfile.
5196#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, serde::Deserialize)]
5197#[serde(rename_all = "kebab-case")]
5198struct DependencyWire {
5199    #[serde(flatten)]
5200    package_id: PackageIdForDependency,
5201    #[serde(default)]
5202    extra: BTreeSet<ExtraName>,
5203    #[serde(default)]
5204    marker: SimplifiedMarkerTree,
5205}
5206
5207impl DependencyWire {
5208    fn unwire(
5209        self,
5210        requires_python: &RequiresPython,
5211        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
5212    ) -> Result<Dependency, LockError> {
5213        let complexified_marker = self.marker.into_marker(requires_python);
5214        Ok(Dependency {
5215            package_id: self.package_id.unwire(unambiguous_package_ids)?,
5216            extra: self.extra,
5217            simplified_marker: self.marker,
5218            complexified_marker: UniversalMarker::from_combined(complexified_marker),
5219        })
5220    }
5221}
5222
5223/// A single hash for a distribution artifact in a lockfile.
5224///
5225/// A hash is encoded as a single TOML string in the format
5226/// `{algorithm}:{digest}`.
5227#[derive(Clone, Debug, PartialEq, Eq)]
5228struct Hash(HashDigest);
5229
5230impl From<HashDigest> for Hash {
5231    fn from(hd: HashDigest) -> Self {
5232        Self(hd)
5233    }
5234}
5235
5236impl FromStr for Hash {
5237    type Err = HashParseError;
5238
5239    fn from_str(s: &str) -> Result<Self, HashParseError> {
5240        let (algorithm, digest) = s.split_once(':').ok_or(HashParseError(
5241            "expected '{algorithm}:{digest}', but found no ':' in hash digest",
5242        ))?;
5243        let algorithm = algorithm
5244            .parse()
5245            .map_err(|_| HashParseError("unrecognized hash algorithm"))?;
5246        Ok(Self(HashDigest {
5247            algorithm,
5248            digest: digest.into(),
5249        }))
5250    }
5251}
5252
5253impl Display for Hash {
5254    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
5255        write!(f, "{}:{}", self.0.algorithm, self.0.digest)
5256    }
5257}
5258
5259impl<'de> serde::Deserialize<'de> for Hash {
5260    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
5261    where
5262        D: serde::de::Deserializer<'de>,
5263    {
5264        struct Visitor;
5265
5266        impl serde::de::Visitor<'_> for Visitor {
5267            type Value = Hash;
5268
5269            fn expecting(&self, f: &mut Formatter) -> std::fmt::Result {
5270                f.write_str("a string")
5271            }
5272
5273            fn visit_str<E: serde::de::Error>(self, v: &str) -> Result<Self::Value, E> {
5274                Hash::from_str(v).map_err(serde::de::Error::custom)
5275            }
5276        }
5277
5278        deserializer.deserialize_str(Visitor)
5279    }
5280}
5281
5282impl From<Hash> for Hashes {
5283    fn from(value: Hash) -> Self {
5284        match value.0.algorithm {
5285            HashAlgorithm::Md5 => Self {
5286                md5: Some(value.0.digest),
5287                sha256: None,
5288                sha384: None,
5289                sha512: None,
5290                blake2b: None,
5291            },
5292            HashAlgorithm::Sha256 => Self {
5293                md5: None,
5294                sha256: Some(value.0.digest),
5295                sha384: None,
5296                sha512: None,
5297                blake2b: None,
5298            },
5299            HashAlgorithm::Sha384 => Self {
5300                md5: None,
5301                sha256: None,
5302                sha384: Some(value.0.digest),
5303                sha512: None,
5304                blake2b: None,
5305            },
5306            HashAlgorithm::Sha512 => Self {
5307                md5: None,
5308                sha256: None,
5309                sha384: None,
5310                sha512: Some(value.0.digest),
5311                blake2b: None,
5312            },
5313            HashAlgorithm::Blake2b => Self {
5314                md5: None,
5315                sha256: None,
5316                sha384: None,
5317                sha512: None,
5318                blake2b: Some(value.0.digest),
5319            },
5320        }
5321    }
5322}
5323
5324/// Convert a [`FileLocation`] into a normalized [`UrlString`].
5325fn normalize_file_location(location: &FileLocation) -> Result<UrlString, ToUrlError> {
5326    match location {
5327        FileLocation::AbsoluteUrl(absolute) => Ok(absolute.without_fragment().into_owned()),
5328        FileLocation::RelativeUrl(_, _) => Ok(normalize_url(location.to_url()?)),
5329    }
5330}
5331
5332/// Convert a [`DisplaySafeUrl`] into a normalized [`UrlString`] by removing the fragment.
5333fn normalize_url(mut url: DisplaySafeUrl) -> UrlString {
5334    url.set_fragment(None);
5335    UrlString::from(url)
5336}
5337
5338/// Normalize a [`Requirement`], which could come from a lockfile, a `pyproject.toml`, etc.
5339///
5340/// Performs the following steps:
5341///
5342/// 1. Removes any sensitive credentials.
5343/// 2. Ensures that the lock and install paths are appropriately framed with respect to the
5344///    current [`Workspace`].
5345/// 3. Removes the `origin` field, which is only used in `requirements.txt`.
5346/// 4. Simplifies the markers using the provided [`RequiresPython`] instance.
5347fn normalize_requirement(
5348    mut requirement: Requirement,
5349    root: &Path,
5350    requires_python: &RequiresPython,
5351) -> Result<Requirement, LockError> {
5352    // Sort the extras and groups for consistency.
5353    requirement.extras.sort();
5354    requirement.groups.sort();
5355
5356    // Normalize the requirement source.
5357    match requirement.source {
5358        RequirementSource::Git {
5359            git,
5360            subdirectory,
5361            url: _,
5362        } => {
5363            // Reconstruct the Git URL.
5364            let git = {
5365                let mut repository = git.repository().clone();
5366
5367                // Remove the credentials.
5368                repository.remove_credentials();
5369
5370                // Remove the fragment and query from the URL; they're already present in the source.
5371                repository.set_fragment(None);
5372                repository.set_query(None);
5373
5374                GitUrl::from_fields(
5375                    repository,
5376                    git.reference().clone(),
5377                    git.precise(),
5378                    git.lfs(),
5379                )?
5380            };
5381
5382            // Reconstruct the PEP 508 URL from the underlying data.
5383            let url = DisplaySafeUrl::from(ParsedGitUrl {
5384                url: git.clone(),
5385                subdirectory: subdirectory.clone(),
5386            });
5387
5388            Ok(Requirement {
5389                name: requirement.name,
5390                extras: requirement.extras,
5391                groups: requirement.groups,
5392                marker: requires_python.simplify_markers(requirement.marker),
5393                source: RequirementSource::Git {
5394                    git,
5395                    subdirectory,
5396                    url: VerbatimUrl::from_url(url),
5397                },
5398                origin: None,
5399            })
5400        }
5401        RequirementSource::Path {
5402            install_path,
5403            ext,
5404            url: _,
5405        } => {
5406            let install_path =
5407                uv_fs::normalize_path_buf(root.join(&install_path)).into_boxed_path();
5408            let url = VerbatimUrl::from_normalized_path(&install_path)
5409                .map_err(LockErrorKind::RequirementVerbatimUrl)?;
5410
5411            Ok(Requirement {
5412                name: requirement.name,
5413                extras: requirement.extras,
5414                groups: requirement.groups,
5415                marker: requires_python.simplify_markers(requirement.marker),
5416                source: RequirementSource::Path {
5417                    install_path,
5418                    ext,
5419                    url,
5420                },
5421                origin: None,
5422            })
5423        }
5424        RequirementSource::Directory {
5425            install_path,
5426            editable,
5427            r#virtual,
5428            url: _,
5429        } => {
5430            let install_path =
5431                uv_fs::normalize_path_buf(root.join(&install_path)).into_boxed_path();
5432            let url = VerbatimUrl::from_normalized_path(&install_path)
5433                .map_err(LockErrorKind::RequirementVerbatimUrl)?;
5434
5435            Ok(Requirement {
5436                name: requirement.name,
5437                extras: requirement.extras,
5438                groups: requirement.groups,
5439                marker: requires_python.simplify_markers(requirement.marker),
5440                source: RequirementSource::Directory {
5441                    install_path,
5442                    editable: Some(editable.unwrap_or(false)),
5443                    r#virtual: Some(r#virtual.unwrap_or(false)),
5444                    url,
5445                },
5446                origin: None,
5447            })
5448        }
5449        RequirementSource::Registry {
5450            specifier,
5451            index,
5452            conflict,
5453        } => {
5454            // Round-trip the index to remove anything apart from the URL.
5455            let index = index
5456                .map(|index| index.url.into_url())
5457                .map(|mut index| {
5458                    index.remove_credentials();
5459                    index
5460                })
5461                .map(|index| IndexMetadata::from(IndexUrl::from(VerbatimUrl::from_url(index))));
5462            Ok(Requirement {
5463                name: requirement.name,
5464                extras: requirement.extras,
5465                groups: requirement.groups,
5466                marker: requires_python.simplify_markers(requirement.marker),
5467                source: RequirementSource::Registry {
5468                    specifier,
5469                    index,
5470                    conflict,
5471                },
5472                origin: None,
5473            })
5474        }
5475        RequirementSource::Url {
5476            mut location,
5477            subdirectory,
5478            ext,
5479            url: _,
5480        } => {
5481            // Remove the credentials.
5482            location.remove_credentials();
5483
5484            // Remove the fragment from the URL; it's already present in the source.
5485            location.set_fragment(None);
5486
5487            // Reconstruct the PEP 508 URL from the underlying data.
5488            let url = DisplaySafeUrl::from(ParsedArchiveUrl {
5489                url: location.clone(),
5490                subdirectory: subdirectory.clone(),
5491                ext,
5492            });
5493
5494            Ok(Requirement {
5495                name: requirement.name,
5496                extras: requirement.extras,
5497                groups: requirement.groups,
5498                marker: requires_python.simplify_markers(requirement.marker),
5499                source: RequirementSource::Url {
5500                    location,
5501                    subdirectory,
5502                    ext,
5503                    url: VerbatimUrl::from_url(url),
5504                },
5505                origin: None,
5506            })
5507        }
5508    }
5509}
5510
5511#[derive(Debug)]
5512pub struct LockError {
5513    kind: Box<LockErrorKind>,
5514    hint: Option<WheelTagHint>,
5515}
5516
5517impl std::error::Error for LockError {
5518    fn source(&self) -> Option<&(dyn Error + 'static)> {
5519        self.kind.source()
5520    }
5521}
5522
5523impl std::fmt::Display for LockError {
5524    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
5525        write!(f, "{}", self.kind)?;
5526        if let Some(hint) = &self.hint {
5527            write!(f, "\n\n{hint}")?;
5528        }
5529        Ok(())
5530    }
5531}
5532
5533impl LockError {
5534    /// Returns true if the [`LockError`] is a resolver error.
5535    pub fn is_resolution(&self) -> bool {
5536        matches!(&*self.kind, LockErrorKind::Resolution { .. })
5537    }
5538}
5539
5540impl<E> From<E> for LockError
5541where
5542    LockErrorKind: From<E>,
5543{
5544    fn from(err: E) -> Self {
5545        Self {
5546            kind: Box::new(LockErrorKind::from(err)),
5547            hint: None,
5548        }
5549    }
5550}
5551
5552#[derive(Debug, Clone, PartialEq, Eq)]
5553#[expect(clippy::enum_variant_names)]
5554enum WheelTagHint {
5555    /// None of the available wheels for a package have a compatible Python language tag (e.g.,
5556    /// `cp310` in `cp310-abi3-manylinux_2_17_x86_64.whl`).
5557    LanguageTags {
5558        package: PackageName,
5559        version: Option<Version>,
5560        tags: BTreeSet<LanguageTag>,
5561        best: Option<LanguageTag>,
5562    },
5563    /// None of the available wheels for a package have a compatible ABI tag (e.g., `abi3` in
5564    /// `cp310-abi3-manylinux_2_17_x86_64.whl`).
5565    AbiTags {
5566        package: PackageName,
5567        version: Option<Version>,
5568        tags: BTreeSet<AbiTag>,
5569        best: Option<AbiTag>,
5570    },
5571    /// None of the available wheels for a package have a compatible platform tag (e.g.,
5572    /// `manylinux_2_17_x86_64` in `cp310-abi3-manylinux_2_17_x86_64.whl`).
5573    PlatformTags {
5574        package: PackageName,
5575        version: Option<Version>,
5576        tags: BTreeSet<PlatformTag>,
5577        best: Option<PlatformTag>,
5578        markers: MarkerEnvironment,
5579    },
5580}
5581
5582impl WheelTagHint {
5583    /// Generate a [`WheelTagHint`] from the given (incompatible) wheels.
5584    fn from_wheels(
5585        name: &PackageName,
5586        version: Option<&Version>,
5587        filenames: &[&WheelFilename],
5588        tags: &Tags,
5589        markers: &MarkerEnvironment,
5590    ) -> Option<Self> {
5591        let incompatibility = filenames
5592            .iter()
5593            .map(|filename| {
5594                tags.compatibility(
5595                    filename.python_tags(),
5596                    filename.abi_tags(),
5597                    filename.platform_tags(),
5598                )
5599            })
5600            .max()?;
5601        match incompatibility {
5602            TagCompatibility::Incompatible(IncompatibleTag::Python) => {
5603                let best = tags.python_tag();
5604                let tags = Self::python_tags(filenames.iter().copied()).collect::<BTreeSet<_>>();
5605                if tags.is_empty() {
5606                    None
5607                } else {
5608                    Some(Self::LanguageTags {
5609                        package: name.clone(),
5610                        version: version.cloned(),
5611                        tags,
5612                        best,
5613                    })
5614                }
5615            }
5616            TagCompatibility::Incompatible(IncompatibleTag::Abi) => {
5617                let best = tags.abi_tag();
5618                let tags = Self::abi_tags(filenames.iter().copied())
5619                    // Ignore `none`, which is universally compatible.
5620                    //
5621                    // As an example, `none` can appear here if we're solving for Python 3.13, and
5622                    // the distribution includes a wheel for `cp312-none-macosx_11_0_arm64`.
5623                    //
5624                    // In that case, the wheel isn't compatible, but when solving for Python 3.13,
5625                    // the `cp312` Python tag _can_ be compatible (e.g., for `cp312-abi3-macosx_11_0_arm64.whl`),
5626                    // so this is considered an ABI incompatibility rather than Python incompatibility.
5627                    .filter(|tag| *tag != AbiTag::None)
5628                    .collect::<BTreeSet<_>>();
5629                if tags.is_empty() {
5630                    None
5631                } else {
5632                    Some(Self::AbiTags {
5633                        package: name.clone(),
5634                        version: version.cloned(),
5635                        tags,
5636                        best,
5637                    })
5638                }
5639            }
5640            TagCompatibility::Incompatible(IncompatibleTag::Platform) => {
5641                let best = tags.platform_tag().cloned();
5642                let incompatible_tags = Self::platform_tags(filenames.iter().copied(), tags)
5643                    .cloned()
5644                    .collect::<BTreeSet<_>>();
5645                if incompatible_tags.is_empty() {
5646                    None
5647                } else {
5648                    Some(Self::PlatformTags {
5649                        package: name.clone(),
5650                        version: version.cloned(),
5651                        tags: incompatible_tags,
5652                        best,
5653                        markers: markers.clone(),
5654                    })
5655                }
5656            }
5657            _ => None,
5658        }
5659    }
5660
5661    /// Returns an iterator over the compatible Python tags of the available wheels.
5662    fn python_tags<'a>(
5663        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5664    ) -> impl Iterator<Item = LanguageTag> + 'a {
5665        filenames.flat_map(WheelFilename::python_tags).copied()
5666    }
5667
5668    /// Returns an iterator over the compatible Python tags of the available wheels.
5669    fn abi_tags<'a>(
5670        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5671    ) -> impl Iterator<Item = AbiTag> + 'a {
5672        filenames.flat_map(WheelFilename::abi_tags).copied()
5673    }
5674
5675    /// Returns the set of platform tags for the distribution that are ABI-compatible with the given
5676    /// tags.
5677    fn platform_tags<'a>(
5678        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5679        tags: &'a Tags,
5680    ) -> impl Iterator<Item = &'a PlatformTag> + 'a {
5681        filenames.flat_map(move |filename| {
5682            if filename.python_tags().iter().any(|wheel_py| {
5683                filename
5684                    .abi_tags()
5685                    .iter()
5686                    .any(|wheel_abi| tags.is_compatible_abi(*wheel_py, *wheel_abi))
5687            }) {
5688                filename.platform_tags().iter()
5689            } else {
5690                [].iter()
5691            }
5692        })
5693    }
5694
5695    fn suggest_environment_marker(markers: &MarkerEnvironment) -> String {
5696        let sys_platform = markers.sys_platform();
5697        let platform_machine = markers.platform_machine();
5698
5699        // Generate the marker string based on actual environment values
5700        if platform_machine.is_empty() {
5701            format!("sys_platform == '{sys_platform}'")
5702        } else {
5703            format!("sys_platform == '{sys_platform}' and platform_machine == '{platform_machine}'")
5704        }
5705    }
5706}
5707
5708impl std::fmt::Display for WheelTagHint {
5709    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
5710        match self {
5711            Self::LanguageTags {
5712                package,
5713                version,
5714                tags,
5715                best,
5716            } => {
5717                if let Some(best) = best {
5718                    let s = if tags.len() == 1 { "" } else { "s" };
5719                    let best = if let Some(pretty) = best.pretty() {
5720                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5721                    } else {
5722                        format!("{}", best.cyan())
5723                    };
5724                    if let Some(version) = version {
5725                        write!(
5726                            f,
5727                            "{}{} You're using {}, but `{}` ({}) only has wheels with the following Python implementation tag{s}: {}",
5728                            "hint".bold().cyan(),
5729                            ":".bold(),
5730                            best,
5731                            package.cyan(),
5732                            format!("v{version}").cyan(),
5733                            tags.iter()
5734                                .map(|tag| format!("`{}`", tag.cyan()))
5735                                .join(", "),
5736                        )
5737                    } else {
5738                        write!(
5739                            f,
5740                            "{}{} You're using {}, but `{}` only has wheels with the following Python implementation tag{s}: {}",
5741                            "hint".bold().cyan(),
5742                            ":".bold(),
5743                            best,
5744                            package.cyan(),
5745                            tags.iter()
5746                                .map(|tag| format!("`{}`", tag.cyan()))
5747                                .join(", "),
5748                        )
5749                    }
5750                } else {
5751                    let s = if tags.len() == 1 { "" } else { "s" };
5752                    if let Some(version) = version {
5753                        write!(
5754                            f,
5755                            "{}{} Wheels are available for `{}` ({}) with the following Python implementation tag{s}: {}",
5756                            "hint".bold().cyan(),
5757                            ":".bold(),
5758                            package.cyan(),
5759                            format!("v{version}").cyan(),
5760                            tags.iter()
5761                                .map(|tag| format!("`{}`", tag.cyan()))
5762                                .join(", "),
5763                        )
5764                    } else {
5765                        write!(
5766                            f,
5767                            "{}{} Wheels are available for `{}` with the following Python implementation tag{s}: {}",
5768                            "hint".bold().cyan(),
5769                            ":".bold(),
5770                            package.cyan(),
5771                            tags.iter()
5772                                .map(|tag| format!("`{}`", tag.cyan()))
5773                                .join(", "),
5774                        )
5775                    }
5776                }
5777            }
5778            Self::AbiTags {
5779                package,
5780                version,
5781                tags,
5782                best,
5783            } => {
5784                if let Some(best) = best {
5785                    let s = if tags.len() == 1 { "" } else { "s" };
5786                    let best = if let Some(pretty) = best.pretty() {
5787                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5788                    } else {
5789                        format!("{}", best.cyan())
5790                    };
5791                    if let Some(version) = version {
5792                        write!(
5793                            f,
5794                            "{}{} You're using {}, but `{}` ({}) only has wheels with the following Python ABI tag{s}: {}",
5795                            "hint".bold().cyan(),
5796                            ":".bold(),
5797                            best,
5798                            package.cyan(),
5799                            format!("v{version}").cyan(),
5800                            tags.iter()
5801                                .map(|tag| format!("`{}`", tag.cyan()))
5802                                .join(", "),
5803                        )
5804                    } else {
5805                        write!(
5806                            f,
5807                            "{}{} You're using {}, but `{}` only has wheels with the following Python ABI tag{s}: {}",
5808                            "hint".bold().cyan(),
5809                            ":".bold(),
5810                            best,
5811                            package.cyan(),
5812                            tags.iter()
5813                                .map(|tag| format!("`{}`", tag.cyan()))
5814                                .join(", "),
5815                        )
5816                    }
5817                } else {
5818                    let s = if tags.len() == 1 { "" } else { "s" };
5819                    if let Some(version) = version {
5820                        write!(
5821                            f,
5822                            "{}{} Wheels are available for `{}` ({}) with the following Python ABI tag{s}: {}",
5823                            "hint".bold().cyan(),
5824                            ":".bold(),
5825                            package.cyan(),
5826                            format!("v{version}").cyan(),
5827                            tags.iter()
5828                                .map(|tag| format!("`{}`", tag.cyan()))
5829                                .join(", "),
5830                        )
5831                    } else {
5832                        write!(
5833                            f,
5834                            "{}{} Wheels are available for `{}` with the following Python ABI tag{s}: {}",
5835                            "hint".bold().cyan(),
5836                            ":".bold(),
5837                            package.cyan(),
5838                            tags.iter()
5839                                .map(|tag| format!("`{}`", tag.cyan()))
5840                                .join(", "),
5841                        )
5842                    }
5843                }
5844            }
5845            Self::PlatformTags {
5846                package,
5847                version,
5848                tags,
5849                best,
5850                markers,
5851            } => {
5852                let s = if tags.len() == 1 { "" } else { "s" };
5853                if let Some(best) = best {
5854                    let example_marker = Self::suggest_environment_marker(markers);
5855                    let best = if let Some(pretty) = best.pretty() {
5856                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5857                    } else {
5858                        format!("`{}`", best.cyan())
5859                    };
5860                    let package_ref = if let Some(version) = version {
5861                        format!("`{}` ({})", package.cyan(), format!("v{version}").cyan())
5862                    } else {
5863                        format!("`{}`", package.cyan())
5864                    };
5865                    write!(
5866                        f,
5867                        "{}{} You're on {}, but {} only has wheels for the following platform{s}: {}; consider adding {} to `{}` to ensure uv resolves to a version with compatible wheels",
5868                        "hint".bold().cyan(),
5869                        ":".bold(),
5870                        best,
5871                        package_ref,
5872                        tags.iter()
5873                            .map(|tag| format!("`{}`", tag.cyan()))
5874                            .join(", "),
5875                        format!("\"{example_marker}\"").cyan(),
5876                        "tool.uv.required-environments".green()
5877                    )
5878                } else {
5879                    if let Some(version) = version {
5880                        write!(
5881                            f,
5882                            "{}{} Wheels are available for `{}` ({}) on the following platform{s}: {}",
5883                            "hint".bold().cyan(),
5884                            ":".bold(),
5885                            package.cyan(),
5886                            format!("v{version}").cyan(),
5887                            tags.iter()
5888                                .map(|tag| format!("`{}`", tag.cyan()))
5889                                .join(", "),
5890                        )
5891                    } else {
5892                        write!(
5893                            f,
5894                            "{}{} Wheels are available for `{}` on the following platform{s}: {}",
5895                            "hint".bold().cyan(),
5896                            ":".bold(),
5897                            package.cyan(),
5898                            tags.iter()
5899                                .map(|tag| format!("`{}`", tag.cyan()))
5900                                .join(", "),
5901                        )
5902                    }
5903                }
5904            }
5905        }
5906    }
5907}
5908
5909/// An error that occurs when generating a `Lock` data structure.
5910///
5911/// These errors are sometimes the result of possible programming bugs.
5912/// For example, if there are two or more duplicative distributions given
5913/// to `Lock::new`, then an error is returned. It's likely that the fault
5914/// is with the caller somewhere in such cases.
5915#[derive(Debug, thiserror::Error)]
5916enum LockErrorKind {
5917    /// An error that occurs when multiple packages with the same
5918    /// ID were found.
5919    #[error("Found duplicate package `{id}`", id = id.cyan())]
5920    DuplicatePackage {
5921        /// The ID of the conflicting package.
5922        id: PackageId,
5923    },
5924    /// An error that occurs when there are multiple dependencies for the
5925    /// same package that have identical identifiers.
5926    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = id.cyan(), dependency = dependency.cyan())]
5927    DuplicateDependency {
5928        /// The ID of the package for which a duplicate dependency was
5929        /// found.
5930        id: PackageId,
5931        /// The ID of the conflicting dependency.
5932        dependency: Dependency,
5933    },
5934    /// An error that occurs when there are multiple dependencies for the
5935    /// same package that have identical identifiers, as part of the
5936    /// that package's optional dependencies.
5937    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = format!("{id}[{extra}]").cyan(), dependency = dependency.cyan())]
5938    DuplicateOptionalDependency {
5939        /// The ID of the package for which a duplicate dependency was
5940        /// found.
5941        id: PackageId,
5942        /// The name of the extra.
5943        extra: ExtraName,
5944        /// The ID of the conflicting dependency.
5945        dependency: Dependency,
5946    },
5947    /// An error that occurs when there are multiple dependencies for the
5948    /// same package that have identical identifiers, as part of the
5949    /// that package's development dependencies.
5950    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = format!("{id}:{group}").cyan(), dependency = dependency.cyan())]
5951    DuplicateDevDependency {
5952        /// The ID of the package for which a duplicate dependency was
5953        /// found.
5954        id: PackageId,
5955        /// The name of the dev dependency group.
5956        group: GroupName,
5957        /// The ID of the conflicting dependency.
5958        dependency: Dependency,
5959    },
5960    /// An error that occurs when the URL to a file for a wheel or
5961    /// source dist could not be converted to a structured `url::Url`.
5962    #[error(transparent)]
5963    InvalidUrl(
5964        /// The underlying error that occurred. This includes the
5965        /// errant URL in its error message.
5966        #[from]
5967        ToUrlError,
5968    ),
5969    /// An error that occurs when the extension can't be determined
5970    /// for a given wheel or source distribution.
5971    #[error("Failed to parse file extension for `{id}`; expected one of: {err}", id = id.cyan())]
5972    MissingExtension {
5973        /// The filename that was expected to have an extension.
5974        id: PackageId,
5975        /// The list of valid extensions that were expected.
5976        err: ExtensionError,
5977    },
5978    /// Failed to parse a Git source URL.
5979    #[error("Failed to parse Git URL")]
5980    InvalidGitSourceUrl(
5981        /// The underlying error that occurred. This includes the
5982        /// errant URL in the message.
5983        #[source]
5984        SourceParseError,
5985    ),
5986    #[error("Failed to parse timestamp")]
5987    InvalidTimestamp(
5988        /// The underlying error that occurred. This includes the
5989        /// errant timestamp in the message.
5990        #[source]
5991        jiff::Error,
5992    ),
5993    /// An error that occurs when there's an unrecognized dependency.
5994    ///
5995    /// That is, a dependency for a package that isn't in the lockfile.
5996    #[error("For package `{id}`, found dependency `{dependency}` with no locked package", id = id.cyan(), dependency = dependency.cyan())]
5997    UnrecognizedDependency {
5998        /// The ID of the package that has an unrecognized dependency.
5999        id: PackageId,
6000        /// The ID of the dependency that doesn't have a corresponding package
6001        /// entry.
6002        dependency: Dependency,
6003    },
6004    /// An error that occurs when a hash is expected (or not) for a particular
6005    /// artifact, but one was not found (or was).
6006    #[error("Since the package `{id}` comes from a {source} dependency, a hash was {expected} but one was not found for {artifact_type}", id = id.cyan(), source = id.source.name(), expected = if *expected { "expected" } else { "not expected" })]
6007    Hash {
6008        /// The ID of the package that has a missing hash.
6009        id: PackageId,
6010        /// The specific type of artifact, e.g., "source package"
6011        /// or "wheel".
6012        artifact_type: &'static str,
6013        /// When true, a hash is expected to be present.
6014        expected: bool,
6015    },
6016    /// An error that occurs when a package is included with an extra name,
6017    /// but no corresponding base package (i.e., without the extra) exists.
6018    #[error("Found package `{id}` with extra `{extra}` but no base package", id = id.cyan(), extra = extra.cyan())]
6019    MissingExtraBase {
6020        /// The ID of the package that has a missing base.
6021        id: PackageId,
6022        /// The extra name that was found.
6023        extra: ExtraName,
6024    },
6025    /// An error that occurs when a package is included with a development
6026    /// dependency group, but no corresponding base package (i.e., without
6027    /// the group) exists.
6028    #[error("Found package `{id}` with development dependency group `{group}` but no base package", id = id.cyan())]
6029    MissingDevBase {
6030        /// The ID of the package that has a missing base.
6031        id: PackageId,
6032        /// The development dependency group that was found.
6033        group: GroupName,
6034    },
6035    /// An error that occurs from an invalid lockfile where a wheel comes from a non-wheel source
6036    /// such as a directory.
6037    #[error("Wheels cannot come from {source_type} sources")]
6038    InvalidWheelSource {
6039        /// The ID of the distribution that has a missing base.
6040        id: PackageId,
6041        /// The kind of the invalid source.
6042        source_type: &'static str,
6043    },
6044    /// An error that occurs when a distribution indicates that it is sourced from a remote
6045    /// registry, but is missing a URL.
6046    #[error("Found registry distribution `{name}` ({version}) without a valid URL", name = name.cyan(), version = format!("v{version}").cyan())]
6047    MissingUrl {
6048        /// The name of the distribution that is missing a URL.
6049        name: PackageName,
6050        /// The version of the distribution that is missing a URL.
6051        version: Version,
6052    },
6053    /// An error that occurs when a distribution indicates that it is sourced from a local registry,
6054    /// but is missing a path.
6055    #[error("Found registry distribution `{name}` ({version}) without a valid path", name = name.cyan(), version = format!("v{version}").cyan())]
6056    MissingPath {
6057        /// The name of the distribution that is missing a path.
6058        name: PackageName,
6059        /// The version of the distribution that is missing a path.
6060        version: Version,
6061    },
6062    /// An error that occurs when a distribution indicates that it is sourced from a registry, but
6063    /// is missing a filename.
6064    #[error("Found registry distribution `{id}` without a valid filename", id = id.cyan())]
6065    MissingFilename {
6066        /// The ID of the distribution that is missing a filename.
6067        id: PackageId,
6068    },
6069    /// An error that occurs when a distribution is included with neither wheels nor a source
6070    /// distribution.
6071    #[error("Distribution `{id}` can't be installed because it doesn't have a source distribution or wheel for the current platform", id = id.cyan())]
6072    NeitherSourceDistNorWheel {
6073        /// The ID of the distribution.
6074        id: PackageId,
6075    },
6076    /// An error that occurs when a distribution is marked as both `--no-binary` and `--no-build`.
6077    #[error("Distribution `{id}` can't be installed because it is marked as both `--no-binary` and `--no-build`", id = id.cyan())]
6078    NoBinaryNoBuild {
6079        /// The ID of the distribution.
6080        id: PackageId,
6081    },
6082    /// An error that occurs when a distribution is marked as `--no-binary`, but no source
6083    /// distribution is available.
6084    #[error("Distribution `{id}` can't be installed because it is marked as `--no-binary` but has no source distribution", id = id.cyan())]
6085    NoBinary {
6086        /// The ID of the distribution.
6087        id: PackageId,
6088    },
6089    /// An error that occurs when a distribution is marked as `--no-build`, but no binary
6090    /// distribution is available.
6091    #[error("Distribution `{id}` can't be installed because it is marked as `--no-build` but has no binary distribution", id = id.cyan())]
6092    NoBuild {
6093        /// The ID of the distribution.
6094        id: PackageId,
6095    },
6096    /// An error that occurs when a wheel-only distribution is incompatible with the current
6097    /// platform.
6098    #[error("Distribution `{id}` can't be installed because the binary distribution is incompatible with the current platform", id = id.cyan())]
6099    IncompatibleWheelOnly {
6100        /// The ID of the distribution.
6101        id: PackageId,
6102    },
6103    /// An error that occurs when a wheel-only source is marked as `--no-binary`.
6104    #[error("Distribution `{id}` can't be installed because it is marked as `--no-binary` but is itself a binary distribution", id = id.cyan())]
6105    NoBinaryWheelOnly {
6106        /// The ID of the distribution.
6107        id: PackageId,
6108    },
6109    /// An error that occurs when converting between URLs and paths.
6110    #[error("Found dependency `{id}` with no locked distribution", id = id.cyan())]
6111    VerbatimUrl {
6112        /// The ID of the distribution that has a missing base.
6113        id: PackageId,
6114        /// The inner error we forward.
6115        #[source]
6116        err: VerbatimUrlError,
6117    },
6118    /// An error that occurs when parsing an existing requirement.
6119    #[error("Could not compute relative path between workspace and distribution")]
6120    DistributionRelativePath(
6121        /// The inner error we forward.
6122        #[source]
6123        io::Error,
6124    ),
6125    /// An error that occurs when converting an index URL to a relative path
6126    #[error("Could not compute relative path between workspace and index")]
6127    IndexRelativePath(
6128        /// The inner error we forward.
6129        #[source]
6130        io::Error,
6131    ),
6132    /// An error that occurs when converting a lockfile path from relative to absolute.
6133    #[error("Could not compute absolute path from workspace root and lockfile path")]
6134    AbsolutePath(
6135        /// The inner error we forward.
6136        #[source]
6137        io::Error,
6138    ),
6139    /// An error that occurs when an ambiguous `package.dependency` is
6140    /// missing a `version` field.
6141    #[error("Dependency `{name}` has missing `version` field but has more than one matching package", name = name.cyan())]
6142    MissingDependencyVersion {
6143        /// The name of the dependency that is missing a `version` field.
6144        name: PackageName,
6145    },
6146    /// An error that occurs when an ambiguous `package.dependency` is
6147    /// missing a `source` field.
6148    #[error("Dependency `{name}` has missing `source` field but has more than one matching package", name = name.cyan())]
6149    MissingDependencySource {
6150        /// The name of the dependency that is missing a `source` field.
6151        name: PackageName,
6152    },
6153    /// An error that occurs when parsing an existing requirement.
6154    #[error("Could not compute relative path between workspace and requirement")]
6155    RequirementRelativePath(
6156        /// The inner error we forward.
6157        #[source]
6158        io::Error,
6159    ),
6160    /// An error that occurs when parsing an existing requirement.
6161    #[error("Could not convert between URL and path")]
6162    RequirementVerbatimUrl(
6163        /// The inner error we forward.
6164        #[source]
6165        VerbatimUrlError,
6166    ),
6167    /// An error that occurs when parsing a registry's index URL.
6168    #[error("Could not convert between URL and path")]
6169    RegistryVerbatimUrl(
6170        /// The inner error we forward.
6171        #[source]
6172        VerbatimUrlError,
6173    ),
6174    /// An error that occurs when converting a path to a URL.
6175    #[error("Failed to convert path to URL: {path}", path = path.display().cyan())]
6176    PathToUrl { path: Box<Path> },
6177    /// An error that occurs when converting a URL to a path
6178    #[error("Failed to convert URL to path: {url}", url = url.cyan())]
6179    UrlToPath { url: DisplaySafeUrl },
6180    /// An error that occurs when multiple packages with the same
6181    /// name were found when identifying the root packages.
6182    #[error("Found multiple packages matching `{name}`", name = name.cyan())]
6183    MultipleRootPackages {
6184        /// The ID of the package.
6185        name: PackageName,
6186    },
6187    /// An error that occurs when a root package can't be found.
6188    #[error("Could not find root package `{name}`", name = name.cyan())]
6189    MissingRootPackage {
6190        /// The ID of the package.
6191        name: PackageName,
6192    },
6193    /// An error that occurs when resolving metadata for a package.
6194    #[error("Failed to generate package metadata for `{id}`", id = id.cyan())]
6195    Resolution {
6196        /// The ID of the distribution that failed to resolve.
6197        id: PackageId,
6198        /// The inner error we forward.
6199        #[source]
6200        err: uv_distribution::Error,
6201    },
6202    /// A package has inconsistent versions in a single entry
6203    // Using name instead of id since the version in the id is part of the conflict.
6204    #[error("The entry for package `{name}` ({version}) has wheel `{wheel_filename}` with inconsistent version ({wheel_version}), which indicates a malformed wheel. If this is intentional, set `{env_var}`.", name = name.cyan(), wheel_filename = wheel.filename, wheel_version = wheel.filename.version, env_var = "UV_SKIP_WHEEL_FILENAME_CHECK=1".green())]
6205    InconsistentVersions {
6206        /// The name of the package with the inconsistent entry.
6207        name: PackageName,
6208        /// The version of the package with the inconsistent entry.
6209        version: Version,
6210        /// The wheel with the inconsistent version.
6211        wheel: Wheel,
6212    },
6213    #[error(
6214        "Found conflicting extras `{package1}[{extra1}]` \
6215         and `{package2}[{extra2}]` enabled simultaneously"
6216    )]
6217    ConflictingExtra {
6218        package1: PackageName,
6219        extra1: ExtraName,
6220        package2: PackageName,
6221        extra2: ExtraName,
6222    },
6223    #[error(transparent)]
6224    GitUrlParse(#[from] GitUrlParseError),
6225    #[error("Failed to read `{path}`")]
6226    UnreadablePyprojectToml {
6227        path: PathBuf,
6228        #[source]
6229        err: std::io::Error,
6230    },
6231    #[error("Failed to parse `{path}`")]
6232    InvalidPyprojectToml {
6233        path: PathBuf,
6234        #[source]
6235        err: toml::de::Error,
6236    },
6237    /// An error that occurs when a workspace member has a non-local source.
6238    #[error("Workspace member `{id}` has non-local source", id = id.cyan())]
6239    NonLocalWorkspaceMember {
6240        /// The ID of the workspace member with an invalid source.
6241        id: PackageId,
6242    },
6243}
6244
6245/// An error that occurs when a source string could not be parsed.
6246#[derive(Debug, thiserror::Error)]
6247enum SourceParseError {
6248    /// An error that occurs when the URL in the source is invalid.
6249    #[error("Invalid URL in source `{given}`")]
6250    InvalidUrl {
6251        /// The source string given.
6252        given: String,
6253        /// The URL parse error.
6254        #[source]
6255        err: DisplaySafeUrlError,
6256    },
6257    /// An error that occurs when a Git URL is missing a precise commit SHA.
6258    #[error("Missing SHA in source `{given}`")]
6259    MissingSha {
6260        /// The source string given.
6261        given: String,
6262    },
6263    /// An error that occurs when a Git URL has an invalid SHA.
6264    #[error("Invalid SHA in source `{given}`")]
6265    InvalidSha {
6266        /// The source string given.
6267        given: String,
6268    },
6269}
6270
6271/// An error that occurs when a hash digest could not be parsed.
6272#[derive(Clone, Debug, Eq, PartialEq)]
6273struct HashParseError(&'static str);
6274
6275impl std::error::Error for HashParseError {}
6276
6277impl Display for HashParseError {
6278    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
6279        Display::fmt(self.0, f)
6280    }
6281}
6282
6283/// Format an array so that each element is on its own line and has a trailing comma.
6284///
6285/// Example:
6286///
6287/// ```toml
6288/// dependencies = [
6289///     { name = "idna" },
6290///     { name = "sniffio" },
6291/// ]
6292/// ```
6293fn each_element_on_its_line_array(elements: impl Iterator<Item = impl Into<Value>>) -> Array {
6294    let mut array = elements
6295        .map(|item| {
6296            let mut value = item.into();
6297            // Each dependency is on its own line and indented.
6298            value.decor_mut().set_prefix("\n    ");
6299            value
6300        })
6301        .collect::<Array>();
6302    // With a trailing comma, inserting another entry doesn't change the preceding line,
6303    // reducing the diff noise.
6304    array.set_trailing_comma(true);
6305    // The line break between the last element's comma and the closing square bracket.
6306    array.set_trailing("\n");
6307    array
6308}
6309
6310/// Returns the simplified string-ified version of each marker given.
6311///
6312/// Note that the marker strings returned will include conflict markers if they
6313/// are present.
6314fn simplified_universal_markers(
6315    markers: &[UniversalMarker],
6316    requires_python: &RequiresPython,
6317) -> Vec<String> {
6318    let mut pep508_only = vec![];
6319    let mut seen = FxHashSet::default();
6320    for marker in markers {
6321        let simplified =
6322            SimplifiedMarkerTree::new(requires_python, marker.pep508()).as_simplified_marker_tree();
6323        if seen.insert(simplified) {
6324            pep508_only.push(simplified);
6325        }
6326    }
6327    let any_overlap = pep508_only
6328        .iter()
6329        .tuple_combinations()
6330        .any(|(&marker1, &marker2)| !marker1.is_disjoint(marker2));
6331    let markers = if !any_overlap {
6332        pep508_only
6333    } else {
6334        markers
6335            .iter()
6336            .map(|marker| {
6337                SimplifiedMarkerTree::new(requires_python, marker.combined())
6338                    .as_simplified_marker_tree()
6339            })
6340            .collect()
6341    };
6342    markers
6343        .into_iter()
6344        .filter_map(MarkerTree::try_to_string)
6345        .collect()
6346}
6347
6348#[cfg(test)]
6349mod tests {
6350    use uv_warnings::anstream;
6351
6352    use super::*;
6353
6354    /// Assert a given display snapshot, stripping ANSI color codes.
6355    macro_rules! assert_stripped_snapshot {
6356        ($expr:expr, @$snapshot:literal) => {{
6357            let expr = format!("{}", $expr);
6358            let expr = format!("{}", anstream::adapter::strip_str(&expr));
6359            insta::assert_snapshot!(expr, @$snapshot);
6360        }};
6361    }
6362
6363    #[test]
6364    fn missing_dependency_source_unambiguous() {
6365        let data = r#"
6366version = 1
6367requires-python = ">=3.12"
6368
6369[[package]]
6370name = "a"
6371version = "0.1.0"
6372source = { registry = "https://pypi.org/simple" }
6373sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6374
6375[[package]]
6376name = "b"
6377version = "0.1.0"
6378source = { registry = "https://pypi.org/simple" }
6379sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6380
6381[[package.dependencies]]
6382name = "a"
6383version = "0.1.0"
6384"#;
6385        let result: Result<Lock, _> = toml::from_str(data);
6386        insta::assert_debug_snapshot!(result);
6387    }
6388
6389    #[test]
6390    fn missing_dependency_version_unambiguous() {
6391        let data = r#"
6392version = 1
6393requires-python = ">=3.12"
6394
6395[[package]]
6396name = "a"
6397version = "0.1.0"
6398source = { registry = "https://pypi.org/simple" }
6399sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6400
6401[[package]]
6402name = "b"
6403version = "0.1.0"
6404source = { registry = "https://pypi.org/simple" }
6405sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6406
6407[[package.dependencies]]
6408name = "a"
6409source = { registry = "https://pypi.org/simple" }
6410"#;
6411        let result: Result<Lock, _> = toml::from_str(data);
6412        insta::assert_debug_snapshot!(result);
6413    }
6414
6415    #[test]
6416    fn missing_dependency_source_version_unambiguous() {
6417        let data = r#"
6418version = 1
6419requires-python = ">=3.12"
6420
6421[[package]]
6422name = "a"
6423version = "0.1.0"
6424source = { registry = "https://pypi.org/simple" }
6425sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6426
6427[[package]]
6428name = "b"
6429version = "0.1.0"
6430source = { registry = "https://pypi.org/simple" }
6431sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6432
6433[[package.dependencies]]
6434name = "a"
6435"#;
6436        let result: Result<Lock, _> = toml::from_str(data);
6437        insta::assert_debug_snapshot!(result);
6438    }
6439
6440    #[test]
6441    fn missing_dependency_source_ambiguous() {
6442        let data = r#"
6443version = 1
6444requires-python = ">=3.12"
6445
6446[[package]]
6447name = "a"
6448version = "0.1.0"
6449source = { registry = "https://pypi.org/simple" }
6450sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6451
6452[[package]]
6453name = "a"
6454version = "0.1.1"
6455source = { registry = "https://pypi.org/simple" }
6456sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6457
6458[[package]]
6459name = "b"
6460version = "0.1.0"
6461source = { registry = "https://pypi.org/simple" }
6462sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6463
6464[[package.dependencies]]
6465name = "a"
6466version = "0.1.0"
6467"#;
6468        let result = toml::from_str::<Lock>(data).unwrap_err();
6469        assert_stripped_snapshot!(result, @"Dependency `a` has missing `source` field but has more than one matching package");
6470    }
6471
6472    #[test]
6473    fn missing_dependency_version_ambiguous() {
6474        let data = r#"
6475version = 1
6476requires-python = ">=3.12"
6477
6478[[package]]
6479name = "a"
6480version = "0.1.0"
6481source = { registry = "https://pypi.org/simple" }
6482sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6483
6484[[package]]
6485name = "a"
6486version = "0.1.1"
6487source = { registry = "https://pypi.org/simple" }
6488sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6489
6490[[package]]
6491name = "b"
6492version = "0.1.0"
6493source = { registry = "https://pypi.org/simple" }
6494sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6495
6496[[package.dependencies]]
6497name = "a"
6498source = { registry = "https://pypi.org/simple" }
6499"#;
6500        let result = toml::from_str::<Lock>(data).unwrap_err();
6501        assert_stripped_snapshot!(result, @"Dependency `a` has missing `version` field but has more than one matching package");
6502    }
6503
6504    #[test]
6505    fn missing_dependency_source_version_ambiguous() {
6506        let data = r#"
6507version = 1
6508requires-python = ">=3.12"
6509
6510[[package]]
6511name = "a"
6512version = "0.1.0"
6513source = { registry = "https://pypi.org/simple" }
6514sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6515
6516[[package]]
6517name = "a"
6518version = "0.1.1"
6519source = { registry = "https://pypi.org/simple" }
6520sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6521
6522[[package]]
6523name = "b"
6524version = "0.1.0"
6525source = { registry = "https://pypi.org/simple" }
6526sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6527
6528[[package.dependencies]]
6529name = "a"
6530"#;
6531        let result = toml::from_str::<Lock>(data).unwrap_err();
6532        assert_stripped_snapshot!(result, @"Dependency `a` has missing `source` field but has more than one matching package");
6533    }
6534
6535    #[test]
6536    fn missing_dependency_version_dynamic() {
6537        let data = r#"
6538version = 1
6539requires-python = ">=3.12"
6540
6541[[package]]
6542name = "a"
6543source = { editable = "path/to/a" }
6544
6545[[package]]
6546name = "a"
6547version = "0.1.1"
6548source = { registry = "https://pypi.org/simple" }
6549sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6550
6551[[package]]
6552name = "b"
6553version = "0.1.0"
6554source = { registry = "https://pypi.org/simple" }
6555sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6556
6557[[package.dependencies]]
6558name = "a"
6559source = { editable = "path/to/a" }
6560"#;
6561        let result = toml::from_str::<Lock>(data);
6562        insta::assert_debug_snapshot!(result);
6563    }
6564
6565    #[test]
6566    fn hash_optional_missing() {
6567        let data = r#"
6568version = 1
6569requires-python = ">=3.12"
6570
6571[[package]]
6572name = "anyio"
6573version = "4.3.0"
6574source = { registry = "https://pypi.org/simple" }
6575wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl" }]
6576"#;
6577        let result: Result<Lock, _> = toml::from_str(data);
6578        insta::assert_debug_snapshot!(result);
6579    }
6580
6581    #[test]
6582    fn hash_optional_present() {
6583        let data = r#"
6584version = 1
6585requires-python = ">=3.12"
6586
6587[[package]]
6588name = "anyio"
6589version = "4.3.0"
6590source = { registry = "https://pypi.org/simple" }
6591wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" }]
6592"#;
6593        let result: Result<Lock, _> = toml::from_str(data);
6594        insta::assert_debug_snapshot!(result);
6595    }
6596
6597    #[test]
6598    fn hash_required_present() {
6599        let data = r#"
6600version = 1
6601requires-python = ">=3.12"
6602
6603[[package]]
6604name = "anyio"
6605version = "4.3.0"
6606source = { path = "file:///foo/bar" }
6607wheels = [{ url = "file:///foo/bar/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" }]
6608"#;
6609        let result: Result<Lock, _> = toml::from_str(data);
6610        insta::assert_debug_snapshot!(result);
6611    }
6612
6613    #[test]
6614    fn source_direct_no_subdir() {
6615        let data = r#"
6616version = 1
6617requires-python = ">=3.12"
6618
6619[[package]]
6620name = "anyio"
6621version = "4.3.0"
6622source = { url = "https://burntsushi.net" }
6623"#;
6624        let result: Result<Lock, _> = toml::from_str(data);
6625        insta::assert_debug_snapshot!(result);
6626    }
6627
6628    #[test]
6629    fn source_direct_has_subdir() {
6630        let data = r#"
6631version = 1
6632requires-python = ">=3.12"
6633
6634[[package]]
6635name = "anyio"
6636version = "4.3.0"
6637source = { url = "https://burntsushi.net", subdirectory = "wat/foo/bar" }
6638"#;
6639        let result: Result<Lock, _> = toml::from_str(data);
6640        insta::assert_debug_snapshot!(result);
6641    }
6642
6643    #[test]
6644    fn source_directory() {
6645        let data = r#"
6646version = 1
6647requires-python = ">=3.12"
6648
6649[[package]]
6650name = "anyio"
6651version = "4.3.0"
6652source = { directory = "path/to/dir" }
6653"#;
6654        let result: Result<Lock, _> = toml::from_str(data);
6655        insta::assert_debug_snapshot!(result);
6656    }
6657
6658    #[test]
6659    fn source_editable() {
6660        let data = r#"
6661version = 1
6662requires-python = ">=3.12"
6663
6664[[package]]
6665name = "anyio"
6666version = "4.3.0"
6667source = { editable = "path/to/dir" }
6668"#;
6669        let result: Result<Lock, _> = toml::from_str(data);
6670        insta::assert_debug_snapshot!(result);
6671    }
6672}