uv_resolver/lock/
mod.rs

1use std::borrow::Cow;
2use std::collections::{BTreeMap, BTreeSet, VecDeque};
3use std::error::Error;
4use std::fmt::{Debug, Display, Formatter};
5use std::io;
6use std::path::{Path, PathBuf};
7use std::str::FromStr;
8use std::sync::{Arc, LazyLock};
9
10use itertools::Itertools;
11use jiff::Timestamp;
12use owo_colors::OwoColorize;
13use petgraph::graph::NodeIndex;
14use petgraph::visit::EdgeRef;
15use rustc_hash::{FxHashMap, FxHashSet};
16use serde::Serializer;
17use toml_edit::{Array, ArrayOfTables, InlineTable, Item, Table, Value, value};
18use tracing::debug;
19use url::Url;
20
21use uv_cache_key::RepositoryUrl;
22use uv_configuration::{BuildOptions, Constraints, InstallTarget};
23use uv_distribution::{DistributionDatabase, FlatRequiresDist};
24use uv_distribution_filename::{
25    BuildTag, DistExtension, ExtensionError, SourceDistExtension, WheelFilename,
26};
27use uv_distribution_types::{
28    BuiltDist, DependencyMetadata, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist,
29    Dist, DistributionMetadata, FileLocation, GitSourceDist, IndexLocations, IndexMetadata,
30    IndexUrl, Name, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel,
31    RegistrySourceDist, RemoteSource, Requirement, RequirementSource, RequiresPython, ResolvedDist,
32    SimplifiedMarkerTree, StaticMetadata, ToUrlError, UrlString,
33};
34use uv_fs::{PortablePath, PortablePathBuf, relative_to};
35use uv_git::{RepositoryReference, ResolvedRepositoryReference};
36use uv_git_types::{GitLfs, GitOid, GitReference, GitUrl, GitUrlParseError};
37use uv_normalize::{ExtraName, GroupName, PackageName};
38use uv_pep440::Version;
39use uv_pep508::{MarkerEnvironment, MarkerTree, VerbatimUrl, VerbatimUrlError, split_scheme};
40use uv_platform_tags::{
41    AbiTag, IncompatibleTag, LanguageTag, PlatformTag, TagCompatibility, TagPriority, Tags,
42};
43use uv_pypi_types::{
44    ConflictKind, Conflicts, HashAlgorithm, HashDigest, HashDigests, Hashes, ParsedArchiveUrl,
45    ParsedGitUrl, PyProjectToml,
46};
47use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
48use uv_small_str::SmallString;
49use uv_types::{BuildContext, HashStrategy};
50use uv_workspace::{Editability, WorkspaceMember};
51
52use crate::fork_strategy::ForkStrategy;
53pub(crate) use crate::lock::export::PylockTomlPackage;
54pub use crate::lock::export::RequirementsTxtExport;
55pub use crate::lock::export::{PylockToml, PylockTomlErrorKind, cyclonedx_json};
56pub use crate::lock::installable::Installable;
57pub use crate::lock::map::PackageMap;
58pub use crate::lock::tree::TreeDisplay;
59use crate::resolution::{AnnotatedDist, ResolutionGraphNode};
60use crate::universal_marker::{ConflictMarker, UniversalMarker};
61use crate::{
62    ExcludeNewer, ExcludeNewerPackage, ExcludeNewerTimestamp, InMemoryIndex, MetadataResponse,
63    PrereleaseMode, ResolutionMode, ResolverOutput,
64};
65
66mod export;
67mod installable;
68mod map;
69mod tree;
70
71/// The current version of the lockfile format.
72pub const VERSION: u32 = 1;
73
74/// The current revision of the lockfile format.
75const REVISION: u32 = 3;
76
77static LINUX_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
78    let pep508 = MarkerTree::from_str("os_name == 'posix' and sys_platform == 'linux'").unwrap();
79    UniversalMarker::new(pep508, ConflictMarker::TRUE)
80});
81static WINDOWS_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
82    let pep508 = MarkerTree::from_str("os_name == 'nt' and sys_platform == 'win32'").unwrap();
83    UniversalMarker::new(pep508, ConflictMarker::TRUE)
84});
85static MAC_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
86    let pep508 = MarkerTree::from_str("os_name == 'posix' and sys_platform == 'darwin'").unwrap();
87    UniversalMarker::new(pep508, ConflictMarker::TRUE)
88});
89static ANDROID_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
90    let pep508 = MarkerTree::from_str("sys_platform == 'android'").unwrap();
91    UniversalMarker::new(pep508, ConflictMarker::TRUE)
92});
93static ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
94    let pep508 =
95        MarkerTree::from_str("platform_machine == 'aarch64' or platform_machine == 'arm64' or platform_machine == 'ARM64'")
96            .unwrap();
97    UniversalMarker::new(pep508, ConflictMarker::TRUE)
98});
99static X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
100    let pep508 =
101        MarkerTree::from_str("platform_machine == 'x86_64' or platform_machine == 'amd64' or platform_machine == 'AMD64'")
102            .unwrap();
103    UniversalMarker::new(pep508, ConflictMarker::TRUE)
104});
105static X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
106    let pep508 = MarkerTree::from_str(
107        "platform_machine == 'i686' or platform_machine == 'i386' or platform_machine == 'win32' or platform_machine == 'x86'",
108    )
109    .unwrap();
110    UniversalMarker::new(pep508, ConflictMarker::TRUE)
111});
112static LINUX_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
113    let mut marker = *LINUX_MARKERS;
114    marker.and(*ARM_MARKERS);
115    marker
116});
117static LINUX_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
118    let mut marker = *LINUX_MARKERS;
119    marker.and(*X86_64_MARKERS);
120    marker
121});
122static LINUX_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
123    let mut marker = *LINUX_MARKERS;
124    marker.and(*X86_MARKERS);
125    marker
126});
127static WINDOWS_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
128    let mut marker = *WINDOWS_MARKERS;
129    marker.and(*ARM_MARKERS);
130    marker
131});
132static WINDOWS_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
133    let mut marker = *WINDOWS_MARKERS;
134    marker.and(*X86_64_MARKERS);
135    marker
136});
137static WINDOWS_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
138    let mut marker = *WINDOWS_MARKERS;
139    marker.and(*X86_MARKERS);
140    marker
141});
142static MAC_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
143    let mut marker = *MAC_MARKERS;
144    marker.and(*ARM_MARKERS);
145    marker
146});
147static MAC_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
148    let mut marker = *MAC_MARKERS;
149    marker.and(*X86_64_MARKERS);
150    marker
151});
152static MAC_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
153    let mut marker = *MAC_MARKERS;
154    marker.and(*X86_MARKERS);
155    marker
156});
157static ANDROID_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
158    let mut marker = *ANDROID_MARKERS;
159    marker.and(*ARM_MARKERS);
160    marker
161});
162static ANDROID_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
163    let mut marker = *ANDROID_MARKERS;
164    marker.and(*X86_64_MARKERS);
165    marker
166});
167static ANDROID_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
168    let mut marker = *ANDROID_MARKERS;
169    marker.and(*X86_MARKERS);
170    marker
171});
172
173#[derive(Clone, Debug, PartialEq, Eq, serde::Deserialize)]
174#[serde(try_from = "LockWire")]
175pub struct Lock {
176    /// The (major) version of the lockfile format.
177    ///
178    /// Changes to the major version indicate backwards- and forwards-incompatible changes to the
179    /// lockfile format. A given uv version only supports a single major version of the lockfile
180    /// format.
181    ///
182    /// In other words, a version of uv that supports version 2 of the lockfile format will not be
183    /// able to read lockfiles generated under version 1 or 3.
184    version: u32,
185    /// The revision of the lockfile format.
186    ///
187    /// Changes to the revision indicate backwards-compatible changes to the lockfile format.
188    /// In other words, versions of uv that only support revision 1 _will_ be able to read lockfiles
189    /// with a revision greater than 1 (though they may ignore newer fields).
190    revision: u32,
191    /// If this lockfile was built from a forking resolution with non-identical forks, store the
192    /// forks in the lockfile so we can recreate them in subsequent resolutions.
193    fork_markers: Vec<UniversalMarker>,
194    /// The conflicting groups/extras specified by the user.
195    conflicts: Conflicts,
196    /// The list of supported environments specified by the user.
197    supported_environments: Vec<MarkerTree>,
198    /// The list of required platforms specified by the user.
199    required_environments: Vec<MarkerTree>,
200    /// The range of supported Python versions.
201    requires_python: RequiresPython,
202    /// We discard the lockfile if these options don't match.
203    options: ResolverOptions,
204    /// The actual locked version and their metadata.
205    packages: Vec<Package>,
206    /// A map from package ID to index in `packages`.
207    ///
208    /// This can be used to quickly lookup the full package for any ID
209    /// in this lock. For example, the dependencies for each package are
210    /// listed as package IDs. This map can be used to find the full
211    /// package for each such dependency.
212    ///
213    /// It is guaranteed that every package in this lock has an entry in
214    /// this map, and that every dependency for every package has an ID
215    /// that exists in this map. That is, there are no dependencies that don't
216    /// have a corresponding locked package entry in the same lockfile.
217    by_id: FxHashMap<PackageId, usize>,
218    /// The input requirements to the resolution.
219    manifest: ResolverManifest,
220}
221
222impl Lock {
223    /// Initialize a [`Lock`] from a [`ResolverOutput`].
224    pub fn from_resolution(resolution: &ResolverOutput, root: &Path) -> Result<Self, LockError> {
225        let mut packages = BTreeMap::new();
226        let requires_python = resolution.requires_python.clone();
227
228        // Determine the set of packages included at multiple versions.
229        let mut seen = FxHashSet::default();
230        let mut duplicates = FxHashSet::default();
231        for node_index in resolution.graph.node_indices() {
232            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
233                continue;
234            };
235            if !dist.is_base() {
236                continue;
237            }
238            if !seen.insert(dist.name()) {
239                duplicates.insert(dist.name());
240            }
241        }
242
243        // Lock all base packages.
244        for node_index in resolution.graph.node_indices() {
245            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
246                continue;
247            };
248            if !dist.is_base() {
249                continue;
250            }
251
252            // If there are multiple distributions for the same package, include the markers of all
253            // forks that included the current distribution.
254            let fork_markers = if duplicates.contains(dist.name()) {
255                resolution
256                    .fork_markers
257                    .iter()
258                    .filter(|fork_markers| !fork_markers.is_disjoint(dist.marker))
259                    .copied()
260                    .collect()
261            } else {
262                vec![]
263            };
264
265            let mut package = Package::from_annotated_dist(dist, fork_markers, root)?;
266            Self::remove_unreachable_wheels(resolution, &requires_python, node_index, &mut package);
267
268            // Add all dependencies
269            for edge in resolution.graph.edges(node_index) {
270                let ResolutionGraphNode::Dist(dependency_dist) = &resolution.graph[edge.target()]
271                else {
272                    continue;
273                };
274                let marker = *edge.weight();
275                package.add_dependency(&requires_python, dependency_dist, marker, root)?;
276            }
277
278            let id = package.id.clone();
279            if let Some(locked_dist) = packages.insert(id, package) {
280                return Err(LockErrorKind::DuplicatePackage {
281                    id: locked_dist.id.clone(),
282                }
283                .into());
284            }
285        }
286
287        // Lock all extras and development dependencies.
288        for node_index in resolution.graph.node_indices() {
289            let ResolutionGraphNode::Dist(dist) = &resolution.graph[node_index] else {
290                continue;
291            };
292            if let Some(extra) = dist.extra.as_ref() {
293                let id = PackageId::from_annotated_dist(dist, root)?;
294                let Some(package) = packages.get_mut(&id) else {
295                    return Err(LockErrorKind::MissingExtraBase {
296                        id,
297                        extra: extra.clone(),
298                    }
299                    .into());
300                };
301                for edge in resolution.graph.edges(node_index) {
302                    let ResolutionGraphNode::Dist(dependency_dist) =
303                        &resolution.graph[edge.target()]
304                    else {
305                        continue;
306                    };
307                    let marker = *edge.weight();
308                    package.add_optional_dependency(
309                        &requires_python,
310                        extra.clone(),
311                        dependency_dist,
312                        marker,
313                        root,
314                    )?;
315                }
316            }
317            if let Some(group) = dist.group.as_ref() {
318                let id = PackageId::from_annotated_dist(dist, root)?;
319                let Some(package) = packages.get_mut(&id) else {
320                    return Err(LockErrorKind::MissingDevBase {
321                        id,
322                        group: group.clone(),
323                    }
324                    .into());
325                };
326                for edge in resolution.graph.edges(node_index) {
327                    let ResolutionGraphNode::Dist(dependency_dist) =
328                        &resolution.graph[edge.target()]
329                    else {
330                        continue;
331                    };
332                    let marker = *edge.weight();
333                    package.add_group_dependency(
334                        &requires_python,
335                        group.clone(),
336                        dependency_dist,
337                        marker,
338                        root,
339                    )?;
340                }
341            }
342        }
343
344        let packages = packages.into_values().collect();
345
346        let options = ResolverOptions {
347            resolution_mode: resolution.options.resolution_mode,
348            prerelease_mode: resolution.options.prerelease_mode,
349            fork_strategy: resolution.options.fork_strategy,
350            exclude_newer: resolution.options.exclude_newer.clone().into(),
351        };
352        let lock = Self::new(
353            VERSION,
354            REVISION,
355            packages,
356            requires_python,
357            options,
358            ResolverManifest::default(),
359            Conflicts::empty(),
360            vec![],
361            vec![],
362            resolution.fork_markers.clone(),
363        )?;
364        Ok(lock)
365    }
366
367    /// Remove wheels that can't be selected for installation due to environment markers.
368    ///
369    /// For example, a package included under `sys_platform == 'win32'` does not need Linux
370    /// wheels.
371    fn remove_unreachable_wheels(
372        graph: &ResolverOutput,
373        requires_python: &RequiresPython,
374        node_index: NodeIndex,
375        locked_dist: &mut Package,
376    ) {
377        // Remove wheels that don't match `requires-python` and can't be selected for installation.
378        locked_dist
379            .wheels
380            .retain(|wheel| requires_python.matches_wheel_tag(&wheel.filename));
381
382        // Filter by platform tags.
383        locked_dist.wheels.retain(|wheel| {
384            // Naively, we'd check whether `platform_system == 'Linux'` is disjoint, or
385            // `os_name == 'posix'` is disjoint, or `sys_platform == 'linux'` is disjoint (each on its
386            // own sufficient to exclude linux wheels), but due to
387            // `(A ∩ (B ∩ C) = ∅) => ((A ∩ B = ∅) or (A ∩ C = ∅))`
388            // a single disjointness check with the intersection is sufficient, so we have one
389            // constant per platform.
390            let platform_tags = wheel.filename.platform_tags();
391
392            if platform_tags.iter().all(PlatformTag::is_any) {
393                return true;
394            }
395
396            if platform_tags.iter().all(PlatformTag::is_linux) {
397                if platform_tags.iter().all(PlatformTag::is_arm) {
398                    if graph.graph[node_index]
399                        .marker()
400                        .is_disjoint(*LINUX_ARM_MARKERS)
401                    {
402                        return false;
403                    }
404                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
405                    if graph.graph[node_index]
406                        .marker()
407                        .is_disjoint(*LINUX_X86_64_MARKERS)
408                    {
409                        return false;
410                    }
411                } else if platform_tags.iter().all(PlatformTag::is_x86) {
412                    if graph.graph[node_index]
413                        .marker()
414                        .is_disjoint(*LINUX_X86_MARKERS)
415                    {
416                        return false;
417                    }
418                } else if graph.graph[node_index].marker().is_disjoint(*LINUX_MARKERS) {
419                    return false;
420                }
421            }
422
423            if platform_tags.iter().all(PlatformTag::is_windows) {
424                if platform_tags.iter().all(PlatformTag::is_arm) {
425                    if graph.graph[node_index]
426                        .marker()
427                        .is_disjoint(*WINDOWS_ARM_MARKERS)
428                    {
429                        return false;
430                    }
431                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
432                    if graph.graph[node_index]
433                        .marker()
434                        .is_disjoint(*WINDOWS_X86_64_MARKERS)
435                    {
436                        return false;
437                    }
438                } else if platform_tags.iter().all(PlatformTag::is_x86) {
439                    if graph.graph[node_index]
440                        .marker()
441                        .is_disjoint(*WINDOWS_X86_MARKERS)
442                    {
443                        return false;
444                    }
445                } else if graph.graph[node_index]
446                    .marker()
447                    .is_disjoint(*WINDOWS_MARKERS)
448                {
449                    return false;
450                }
451            }
452
453            if platform_tags.iter().all(PlatformTag::is_macos) {
454                if platform_tags.iter().all(PlatformTag::is_arm) {
455                    if graph.graph[node_index]
456                        .marker()
457                        .is_disjoint(*MAC_ARM_MARKERS)
458                    {
459                        return false;
460                    }
461                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
462                    if graph.graph[node_index]
463                        .marker()
464                        .is_disjoint(*MAC_X86_64_MARKERS)
465                    {
466                        return false;
467                    }
468                } else if platform_tags.iter().all(PlatformTag::is_x86) {
469                    if graph.graph[node_index]
470                        .marker()
471                        .is_disjoint(*MAC_X86_MARKERS)
472                    {
473                        return false;
474                    }
475                } else if graph.graph[node_index].marker().is_disjoint(*MAC_MARKERS) {
476                    return false;
477                }
478            }
479
480            if platform_tags.iter().all(PlatformTag::is_android) {
481                if platform_tags.iter().all(PlatformTag::is_arm) {
482                    if graph.graph[node_index]
483                        .marker()
484                        .is_disjoint(*ANDROID_ARM_MARKERS)
485                    {
486                        return false;
487                    }
488                } else if platform_tags.iter().all(PlatformTag::is_x86_64) {
489                    if graph.graph[node_index]
490                        .marker()
491                        .is_disjoint(*ANDROID_X86_64_MARKERS)
492                    {
493                        return false;
494                    }
495                } else if platform_tags.iter().all(PlatformTag::is_x86) {
496                    if graph.graph[node_index]
497                        .marker()
498                        .is_disjoint(*ANDROID_X86_MARKERS)
499                    {
500                        return false;
501                    }
502                } else if graph.graph[node_index]
503                    .marker()
504                    .is_disjoint(*ANDROID_MARKERS)
505                {
506                    return false;
507                }
508            }
509
510            if platform_tags.iter().all(PlatformTag::is_arm) {
511                if graph.graph[node_index].marker().is_disjoint(*ARM_MARKERS) {
512                    return false;
513                }
514            }
515
516            if platform_tags.iter().all(PlatformTag::is_x86_64) {
517                if graph.graph[node_index]
518                    .marker()
519                    .is_disjoint(*X86_64_MARKERS)
520                {
521                    return false;
522                }
523            }
524
525            if platform_tags.iter().all(PlatformTag::is_x86) {
526                if graph.graph[node_index].marker().is_disjoint(*X86_MARKERS) {
527                    return false;
528                }
529            }
530
531            true
532        });
533    }
534
535    /// Initialize a [`Lock`] from a list of [`Package`] entries.
536    fn new(
537        version: u32,
538        revision: u32,
539        mut packages: Vec<Package>,
540        requires_python: RequiresPython,
541        options: ResolverOptions,
542        manifest: ResolverManifest,
543        conflicts: Conflicts,
544        supported_environments: Vec<MarkerTree>,
545        required_environments: Vec<MarkerTree>,
546        fork_markers: Vec<UniversalMarker>,
547    ) -> Result<Self, LockError> {
548        // Put all dependencies for each package in a canonical order and
549        // check for duplicates.
550        for package in &mut packages {
551            package.dependencies.sort();
552            for windows in package.dependencies.windows(2) {
553                let (dep1, dep2) = (&windows[0], &windows[1]);
554                if dep1 == dep2 {
555                    return Err(LockErrorKind::DuplicateDependency {
556                        id: package.id.clone(),
557                        dependency: dep1.clone(),
558                    }
559                    .into());
560                }
561            }
562
563            // Perform the same validation for optional dependencies.
564            for (extra, dependencies) in &mut package.optional_dependencies {
565                dependencies.sort();
566                for windows in dependencies.windows(2) {
567                    let (dep1, dep2) = (&windows[0], &windows[1]);
568                    if dep1 == dep2 {
569                        return Err(LockErrorKind::DuplicateOptionalDependency {
570                            id: package.id.clone(),
571                            extra: extra.clone(),
572                            dependency: dep1.clone(),
573                        }
574                        .into());
575                    }
576                }
577            }
578
579            // Perform the same validation for dev dependencies.
580            for (group, dependencies) in &mut package.dependency_groups {
581                dependencies.sort();
582                for windows in dependencies.windows(2) {
583                    let (dep1, dep2) = (&windows[0], &windows[1]);
584                    if dep1 == dep2 {
585                        return Err(LockErrorKind::DuplicateDevDependency {
586                            id: package.id.clone(),
587                            group: group.clone(),
588                            dependency: dep1.clone(),
589                        }
590                        .into());
591                    }
592                }
593            }
594        }
595        packages.sort_by(|dist1, dist2| dist1.id.cmp(&dist2.id));
596
597        // Check for duplicate package IDs and also build up the map for
598        // packages keyed by their ID.
599        let mut by_id = FxHashMap::default();
600        for (i, dist) in packages.iter().enumerate() {
601            if by_id.insert(dist.id.clone(), i).is_some() {
602                return Err(LockErrorKind::DuplicatePackage {
603                    id: dist.id.clone(),
604                }
605                .into());
606            }
607        }
608
609        // Build up a map from ID to extras.
610        let mut extras_by_id = FxHashMap::default();
611        for dist in &packages {
612            for extra in dist.optional_dependencies.keys() {
613                extras_by_id
614                    .entry(dist.id.clone())
615                    .or_insert_with(FxHashSet::default)
616                    .insert(extra.clone());
617            }
618        }
619
620        // Remove any non-existent extras (e.g., extras that were requested but don't exist).
621        for dist in &mut packages {
622            for dep in dist
623                .dependencies
624                .iter_mut()
625                .chain(dist.optional_dependencies.values_mut().flatten())
626                .chain(dist.dependency_groups.values_mut().flatten())
627            {
628                dep.extra.retain(|extra| {
629                    extras_by_id
630                        .get(&dep.package_id)
631                        .is_some_and(|extras| extras.contains(extra))
632                });
633            }
634        }
635
636        // Check that every dependency has an entry in `by_id`. If any don't,
637        // it implies we somehow have a dependency with no corresponding locked
638        // package.
639        for dist in &packages {
640            for dep in &dist.dependencies {
641                if !by_id.contains_key(&dep.package_id) {
642                    return Err(LockErrorKind::UnrecognizedDependency {
643                        id: dist.id.clone(),
644                        dependency: dep.clone(),
645                    }
646                    .into());
647                }
648            }
649
650            // Perform the same validation for optional dependencies.
651            for dependencies in dist.optional_dependencies.values() {
652                for dep in dependencies {
653                    if !by_id.contains_key(&dep.package_id) {
654                        return Err(LockErrorKind::UnrecognizedDependency {
655                            id: dist.id.clone(),
656                            dependency: dep.clone(),
657                        }
658                        .into());
659                    }
660                }
661            }
662
663            // Perform the same validation for dev dependencies.
664            for dependencies in dist.dependency_groups.values() {
665                for dep in dependencies {
666                    if !by_id.contains_key(&dep.package_id) {
667                        return Err(LockErrorKind::UnrecognizedDependency {
668                            id: dist.id.clone(),
669                            dependency: dep.clone(),
670                        }
671                        .into());
672                    }
673                }
674            }
675
676            // Also check that our sources are consistent with whether we have
677            // hashes or not.
678            if let Some(requires_hash) = dist.id.source.requires_hash() {
679                for wheel in &dist.wheels {
680                    if requires_hash != wheel.hash.is_some() {
681                        return Err(LockErrorKind::Hash {
682                            id: dist.id.clone(),
683                            artifact_type: "wheel",
684                            expected: requires_hash,
685                        }
686                        .into());
687                    }
688                }
689            }
690        }
691        let lock = Self {
692            version,
693            revision,
694            fork_markers,
695            conflicts,
696            supported_environments,
697            required_environments,
698            requires_python,
699            options,
700            packages,
701            by_id,
702            manifest,
703        };
704        Ok(lock)
705    }
706
707    /// Record the requirements that were used to generate this lock.
708    #[must_use]
709    pub fn with_manifest(mut self, manifest: ResolverManifest) -> Self {
710        self.manifest = manifest;
711        self
712    }
713
714    /// Record the conflicting groups that were used to generate this lock.
715    #[must_use]
716    pub fn with_conflicts(mut self, conflicts: Conflicts) -> Self {
717        self.conflicts = conflicts;
718        self
719    }
720
721    /// Record the supported environments that were used to generate this lock.
722    #[must_use]
723    pub fn with_supported_environments(mut self, supported_environments: Vec<MarkerTree>) -> Self {
724        // We "complexify" the markers given, since the supported
725        // environments given might be coming directly from what's written in
726        // `pyproject.toml`, and those are assumed to be simplified (i.e.,
727        // they assume `requires-python` is true). But a `Lock` always uses
728        // non-simplified markers internally, so we need to re-complexify them
729        // here.
730        //
731        // The nice thing about complexifying is that it's a no-op if the
732        // markers given have already been complexified.
733        self.supported_environments = supported_environments
734            .into_iter()
735            .map(|marker| self.requires_python.complexify_markers(marker))
736            .collect();
737        self
738    }
739
740    /// Record the required platforms that were used to generate this lock.
741    #[must_use]
742    pub fn with_required_environments(mut self, required_environments: Vec<MarkerTree>) -> Self {
743        self.required_environments = required_environments
744            .into_iter()
745            .map(|marker| self.requires_python.complexify_markers(marker))
746            .collect();
747        self
748    }
749
750    /// Returns `true` if this [`Lock`] includes `provides-extra` metadata.
751    pub fn supports_provides_extra(&self) -> bool {
752        // `provides-extra` was added in Version 1 Revision 1.
753        (self.version(), self.revision()) >= (1, 1)
754    }
755
756    /// Returns `true` if this [`Lock`] includes entries for empty `dependency-group` metadata.
757    pub fn includes_empty_groups(&self) -> bool {
758        // Empty dependency groups are included as of https://github.com/astral-sh/uv/pull/8598,
759        // but Version 1 Revision 1 is the first revision published after that change.
760        (self.version(), self.revision()) >= (1, 1)
761    }
762
763    /// Returns the lockfile version.
764    pub fn version(&self) -> u32 {
765        self.version
766    }
767
768    /// Returns the lockfile revision.
769    pub fn revision(&self) -> u32 {
770        self.revision
771    }
772
773    /// Returns the number of packages in the lockfile.
774    pub fn len(&self) -> usize {
775        self.packages.len()
776    }
777
778    /// Returns `true` if the lockfile contains no packages.
779    pub fn is_empty(&self) -> bool {
780        self.packages.is_empty()
781    }
782
783    /// Returns the [`Package`] entries in this lock.
784    pub fn packages(&self) -> &[Package] {
785        &self.packages
786    }
787
788    /// Returns the supported Python version range for the lockfile, if present.
789    pub fn requires_python(&self) -> &RequiresPython {
790        &self.requires_python
791    }
792
793    /// Returns the resolution mode used to generate this lock.
794    pub fn resolution_mode(&self) -> ResolutionMode {
795        self.options.resolution_mode
796    }
797
798    /// Returns the pre-release mode used to generate this lock.
799    pub fn prerelease_mode(&self) -> PrereleaseMode {
800        self.options.prerelease_mode
801    }
802
803    /// Returns the multi-version mode used to generate this lock.
804    pub fn fork_strategy(&self) -> ForkStrategy {
805        self.options.fork_strategy
806    }
807
808    /// Returns the exclude newer setting used to generate this lock.
809    pub fn exclude_newer(&self) -> ExcludeNewer {
810        // TODO(zanieb): It'd be nice not to hide this clone here, but I am hesitant to introduce
811        // a whole new `ExcludeNewerRef` type just for this
812        self.options.exclude_newer.clone().into()
813    }
814
815    /// Returns the conflicting groups that were used to generate this lock.
816    pub fn conflicts(&self) -> &Conflicts {
817        &self.conflicts
818    }
819
820    /// Returns the supported environments that were used to generate this lock.
821    pub fn supported_environments(&self) -> &[MarkerTree] {
822        &self.supported_environments
823    }
824
825    /// Returns the required platforms that were used to generate this lock.
826    pub fn required_environments(&self) -> &[MarkerTree] {
827        &self.required_environments
828    }
829
830    /// Returns the workspace members that were used to generate this lock.
831    pub fn members(&self) -> &BTreeSet<PackageName> {
832        &self.manifest.members
833    }
834
835    /// Returns the dependency groups that were used to generate this lock.
836    pub fn requirements(&self) -> &BTreeSet<Requirement> {
837        &self.manifest.requirements
838    }
839
840    /// Returns the dependency groups that were used to generate this lock.
841    pub fn dependency_groups(&self) -> &BTreeMap<GroupName, BTreeSet<Requirement>> {
842        &self.manifest.dependency_groups
843    }
844
845    /// Returns the build constraints that were used to generate this lock.
846    pub fn build_constraints(&self, root: &Path) -> Constraints {
847        Constraints::from_requirements(
848            self.manifest
849                .build_constraints
850                .iter()
851                .cloned()
852                .map(|requirement| requirement.to_absolute(root)),
853        )
854    }
855
856    /// Return the workspace root used to generate this lock.
857    pub fn root(&self) -> Option<&Package> {
858        self.packages.iter().find(|package| {
859            let (Source::Editable(path) | Source::Virtual(path)) = &package.id.source else {
860                return false;
861            };
862            path.as_ref() == Path::new("")
863        })
864    }
865
866    /// Returns the supported environments that were used to generate this
867    /// lock.
868    ///
869    /// The markers returned here are "simplified" with respect to the lock
870    /// file's `requires-python` setting. This means these should only be used
871    /// for direct comparison purposes with the supported environments written
872    /// by a human in `pyproject.toml`. (Think of "supported environments" in
873    /// `pyproject.toml` as having an implicit `and python_full_version >=
874    /// '{requires-python-bound}'` attached to each one.)
875    pub fn simplified_supported_environments(&self) -> Vec<MarkerTree> {
876        self.supported_environments()
877            .iter()
878            .copied()
879            .map(|marker| self.simplify_environment(marker))
880            .collect()
881    }
882
883    /// Returns the required platforms that were used to generate this
884    /// lock.
885    pub fn simplified_required_environments(&self) -> Vec<MarkerTree> {
886        self.required_environments()
887            .iter()
888            .copied()
889            .map(|marker| self.simplify_environment(marker))
890            .collect()
891    }
892
893    /// Simplify the given marker environment with respect to the lockfile's
894    /// `requires-python` setting.
895    pub fn simplify_environment(&self, marker: MarkerTree) -> MarkerTree {
896        self.requires_python.simplify_markers(marker)
897    }
898
899    /// If this lockfile was built from a forking resolution with non-identical forks, return the
900    /// markers of those forks, otherwise `None`.
901    pub fn fork_markers(&self) -> &[UniversalMarker] {
902        self.fork_markers.as_slice()
903    }
904
905    /// Checks whether the fork markers cover the entire supported marker space.
906    ///
907    /// Returns the actually covered and the expected marker space on validation error.
908    pub fn check_marker_coverage(&self) -> Result<(), (MarkerTree, MarkerTree)> {
909        let fork_markers_union = if self.fork_markers().is_empty() {
910            self.requires_python.to_marker_tree()
911        } else {
912            let mut fork_markers_union = MarkerTree::FALSE;
913            for fork_marker in self.fork_markers() {
914                fork_markers_union.or(fork_marker.pep508());
915            }
916            fork_markers_union
917        };
918        let mut environments_union = if !self.supported_environments.is_empty() {
919            let mut environments_union = MarkerTree::FALSE;
920            for fork_marker in &self.supported_environments {
921                environments_union.or(*fork_marker);
922            }
923            environments_union
924        } else {
925            MarkerTree::TRUE
926        };
927        // When a user defines environments, they are implicitly constrained by requires-python.
928        environments_union.and(self.requires_python.to_marker_tree());
929        if fork_markers_union.negate().is_disjoint(environments_union) {
930            Ok(())
931        } else {
932            Err((fork_markers_union, environments_union))
933        }
934    }
935
936    /// Checks whether the new requires-python specification is disjoint with
937    /// the fork markers in this lock file.
938    ///
939    /// If they are disjoint, then the union of the fork markers along with the
940    /// given requires-python specification (converted to a marker tree) are
941    /// returned.
942    ///
943    /// When disjoint, the fork markers in the lock file should be dropped and
944    /// not used.
945    pub fn requires_python_coverage(
946        &self,
947        new_requires_python: &RequiresPython,
948    ) -> Result<(), (MarkerTree, MarkerTree)> {
949        let fork_markers_union = if self.fork_markers().is_empty() {
950            self.requires_python.to_marker_tree()
951        } else {
952            let mut fork_markers_union = MarkerTree::FALSE;
953            for fork_marker in self.fork_markers() {
954                fork_markers_union.or(fork_marker.pep508());
955            }
956            fork_markers_union
957        };
958        let new_requires_python = new_requires_python.to_marker_tree();
959        if fork_markers_union.is_disjoint(new_requires_python) {
960            Err((fork_markers_union, new_requires_python))
961        } else {
962            Ok(())
963        }
964    }
965
966    /// Returns the TOML representation of this lockfile.
967    pub fn to_toml(&self) -> Result<String, toml_edit::ser::Error> {
968        // Catch a lockfile where the union of fork markers doesn't cover the supported
969        // environments.
970        debug_assert!(self.check_marker_coverage().is_ok());
971
972        // We construct a TOML document manually instead of going through Serde to enable
973        // the use of inline tables.
974        let mut doc = toml_edit::DocumentMut::new();
975        doc.insert("version", value(i64::from(self.version)));
976
977        if self.revision > 0 {
978            doc.insert("revision", value(i64::from(self.revision)));
979        }
980
981        doc.insert("requires-python", value(self.requires_python.to_string()));
982
983        if !self.fork_markers.is_empty() {
984            let fork_markers = each_element_on_its_line_array(
985                simplified_universal_markers(&self.fork_markers, &self.requires_python).into_iter(),
986            );
987            if !fork_markers.is_empty() {
988                doc.insert("resolution-markers", value(fork_markers));
989            }
990        }
991
992        if !self.supported_environments.is_empty() {
993            let supported_environments = each_element_on_its_line_array(
994                self.supported_environments
995                    .iter()
996                    .copied()
997                    .map(|marker| SimplifiedMarkerTree::new(&self.requires_python, marker))
998                    .filter_map(SimplifiedMarkerTree::try_to_string),
999            );
1000            doc.insert("supported-markers", value(supported_environments));
1001        }
1002
1003        if !self.required_environments.is_empty() {
1004            let required_environments = each_element_on_its_line_array(
1005                self.required_environments
1006                    .iter()
1007                    .copied()
1008                    .map(|marker| SimplifiedMarkerTree::new(&self.requires_python, marker))
1009                    .filter_map(SimplifiedMarkerTree::try_to_string),
1010            );
1011            doc.insert("required-markers", value(required_environments));
1012        }
1013
1014        if !self.conflicts.is_empty() {
1015            let mut list = Array::new();
1016            for set in self.conflicts.iter() {
1017                list.push(each_element_on_its_line_array(set.iter().map(|item| {
1018                    let mut table = InlineTable::new();
1019                    table.insert("package", Value::from(item.package().to_string()));
1020                    match item.kind() {
1021                        ConflictKind::Project => {}
1022                        ConflictKind::Extra(extra) => {
1023                            table.insert("extra", Value::from(extra.to_string()));
1024                        }
1025                        ConflictKind::Group(group) => {
1026                            table.insert("group", Value::from(group.to_string()));
1027                        }
1028                    }
1029                    table
1030                })));
1031            }
1032            doc.insert("conflicts", value(list));
1033        }
1034
1035        // Write the settings that were used to generate the resolution.
1036        // This enables us to invalidate the lockfile if the user changes
1037        // their settings.
1038        {
1039            let mut options_table = Table::new();
1040
1041            if self.options.resolution_mode != ResolutionMode::default() {
1042                options_table.insert(
1043                    "resolution-mode",
1044                    value(self.options.resolution_mode.to_string()),
1045                );
1046            }
1047            if self.options.prerelease_mode != PrereleaseMode::default() {
1048                options_table.insert(
1049                    "prerelease-mode",
1050                    value(self.options.prerelease_mode.to_string()),
1051                );
1052            }
1053            if self.options.fork_strategy != ForkStrategy::default() {
1054                options_table.insert(
1055                    "fork-strategy",
1056                    value(self.options.fork_strategy.to_string()),
1057                );
1058            }
1059            let exclude_newer = ExcludeNewer::from(self.options.exclude_newer.clone());
1060            if !exclude_newer.is_empty() {
1061                // Always serialize global exclude-newer as a string
1062                if let Some(global) = exclude_newer.global {
1063                    options_table.insert("exclude-newer", value(global.to_string()));
1064                }
1065
1066                // Serialize package-specific exclusions as a separate field
1067                if !exclude_newer.package.is_empty() {
1068                    let mut package_table = toml_edit::Table::new();
1069                    for (name, timestamp) in &exclude_newer.package {
1070                        package_table.insert(name.as_ref(), value(timestamp.to_string()));
1071                    }
1072                    options_table.insert("exclude-newer-package", Item::Table(package_table));
1073                }
1074            }
1075
1076            if !options_table.is_empty() {
1077                doc.insert("options", Item::Table(options_table));
1078            }
1079        }
1080
1081        // Write the manifest that was used to generate the resolution.
1082        {
1083            let mut manifest_table = Table::new();
1084
1085            if !self.manifest.members.is_empty() {
1086                manifest_table.insert(
1087                    "members",
1088                    value(each_element_on_its_line_array(
1089                        self.manifest
1090                            .members
1091                            .iter()
1092                            .map(std::string::ToString::to_string),
1093                    )),
1094                );
1095            }
1096
1097            if !self.manifest.requirements.is_empty() {
1098                let requirements = self
1099                    .manifest
1100                    .requirements
1101                    .iter()
1102                    .map(|requirement| {
1103                        serde::Serialize::serialize(
1104                            &requirement,
1105                            toml_edit::ser::ValueSerializer::new(),
1106                        )
1107                    })
1108                    .collect::<Result<Vec<_>, _>>()?;
1109                let requirements = match requirements.as_slice() {
1110                    [] => Array::new(),
1111                    [requirement] => Array::from_iter([requirement]),
1112                    requirements => each_element_on_its_line_array(requirements.iter()),
1113                };
1114                manifest_table.insert("requirements", value(requirements));
1115            }
1116
1117            if !self.manifest.constraints.is_empty() {
1118                let constraints = self
1119                    .manifest
1120                    .constraints
1121                    .iter()
1122                    .map(|requirement| {
1123                        serde::Serialize::serialize(
1124                            &requirement,
1125                            toml_edit::ser::ValueSerializer::new(),
1126                        )
1127                    })
1128                    .collect::<Result<Vec<_>, _>>()?;
1129                let constraints = match constraints.as_slice() {
1130                    [] => Array::new(),
1131                    [requirement] => Array::from_iter([requirement]),
1132                    constraints => each_element_on_its_line_array(constraints.iter()),
1133                };
1134                manifest_table.insert("constraints", value(constraints));
1135            }
1136
1137            if !self.manifest.overrides.is_empty() {
1138                let overrides = self
1139                    .manifest
1140                    .overrides
1141                    .iter()
1142                    .map(|requirement| {
1143                        serde::Serialize::serialize(
1144                            &requirement,
1145                            toml_edit::ser::ValueSerializer::new(),
1146                        )
1147                    })
1148                    .collect::<Result<Vec<_>, _>>()?;
1149                let overrides = match overrides.as_slice() {
1150                    [] => Array::new(),
1151                    [requirement] => Array::from_iter([requirement]),
1152                    overrides => each_element_on_its_line_array(overrides.iter()),
1153                };
1154                manifest_table.insert("overrides", value(overrides));
1155            }
1156
1157            if !self.manifest.excludes.is_empty() {
1158                let excludes = self
1159                    .manifest
1160                    .excludes
1161                    .iter()
1162                    .map(|name| {
1163                        serde::Serialize::serialize(&name, toml_edit::ser::ValueSerializer::new())
1164                    })
1165                    .collect::<Result<Vec<_>, _>>()?;
1166                let excludes = match excludes.as_slice() {
1167                    [] => Array::new(),
1168                    [name] => Array::from_iter([name]),
1169                    excludes => each_element_on_its_line_array(excludes.iter()),
1170                };
1171                manifest_table.insert("excludes", value(excludes));
1172            }
1173
1174            if !self.manifest.build_constraints.is_empty() {
1175                let build_constraints = self
1176                    .manifest
1177                    .build_constraints
1178                    .iter()
1179                    .map(|requirement| {
1180                        serde::Serialize::serialize(
1181                            &requirement,
1182                            toml_edit::ser::ValueSerializer::new(),
1183                        )
1184                    })
1185                    .collect::<Result<Vec<_>, _>>()?;
1186                let build_constraints = match build_constraints.as_slice() {
1187                    [] => Array::new(),
1188                    [requirement] => Array::from_iter([requirement]),
1189                    build_constraints => each_element_on_its_line_array(build_constraints.iter()),
1190                };
1191                manifest_table.insert("build-constraints", value(build_constraints));
1192            }
1193
1194            if !self.manifest.dependency_groups.is_empty() {
1195                let mut dependency_groups = Table::new();
1196                for (extra, requirements) in &self.manifest.dependency_groups {
1197                    let requirements = requirements
1198                        .iter()
1199                        .map(|requirement| {
1200                            serde::Serialize::serialize(
1201                                &requirement,
1202                                toml_edit::ser::ValueSerializer::new(),
1203                            )
1204                        })
1205                        .collect::<Result<Vec<_>, _>>()?;
1206                    let requirements = match requirements.as_slice() {
1207                        [] => Array::new(),
1208                        [requirement] => Array::from_iter([requirement]),
1209                        requirements => each_element_on_its_line_array(requirements.iter()),
1210                    };
1211                    if !requirements.is_empty() {
1212                        dependency_groups.insert(extra.as_ref(), value(requirements));
1213                    }
1214                }
1215                if !dependency_groups.is_empty() {
1216                    manifest_table.insert("dependency-groups", Item::Table(dependency_groups));
1217                }
1218            }
1219
1220            if !self.manifest.dependency_metadata.is_empty() {
1221                let mut tables = ArrayOfTables::new();
1222                for metadata in &self.manifest.dependency_metadata {
1223                    let mut table = Table::new();
1224                    table.insert("name", value(metadata.name.to_string()));
1225                    if let Some(version) = metadata.version.as_ref() {
1226                        table.insert("version", value(version.to_string()));
1227                    }
1228                    if !metadata.requires_dist.is_empty() {
1229                        table.insert(
1230                            "requires-dist",
1231                            value(serde::Serialize::serialize(
1232                                &metadata.requires_dist,
1233                                toml_edit::ser::ValueSerializer::new(),
1234                            )?),
1235                        );
1236                    }
1237                    if let Some(requires_python) = metadata.requires_python.as_ref() {
1238                        table.insert("requires-python", value(requires_python.to_string()));
1239                    }
1240                    if !metadata.provides_extra.is_empty() {
1241                        table.insert(
1242                            "provides-extras",
1243                            value(serde::Serialize::serialize(
1244                                &metadata.provides_extra,
1245                                toml_edit::ser::ValueSerializer::new(),
1246                            )?),
1247                        );
1248                    }
1249                    tables.push(table);
1250                }
1251                manifest_table.insert("dependency-metadata", Item::ArrayOfTables(tables));
1252            }
1253
1254            if !manifest_table.is_empty() {
1255                doc.insert("manifest", Item::Table(manifest_table));
1256            }
1257        }
1258
1259        // Count the number of packages for each package name. When
1260        // there's only one package for a particular package name (the
1261        // overwhelmingly common case), we can omit some data (like source and
1262        // version) on dependency edges since it is strictly redundant.
1263        let mut dist_count_by_name: FxHashMap<PackageName, u64> = FxHashMap::default();
1264        for dist in &self.packages {
1265            *dist_count_by_name.entry(dist.id.name.clone()).or_default() += 1;
1266        }
1267
1268        let mut packages = ArrayOfTables::new();
1269        for dist in &self.packages {
1270            packages.push(dist.to_toml(&self.requires_python, &dist_count_by_name)?);
1271        }
1272
1273        doc.insert("package", Item::ArrayOfTables(packages));
1274        Ok(doc.to_string())
1275    }
1276
1277    /// Returns the package with the given name. If there are multiple
1278    /// matching packages, then an error is returned. If there are no
1279    /// matching packages, then `Ok(None)` is returned.
1280    pub fn find_by_name(&self, name: &PackageName) -> Result<Option<&Package>, String> {
1281        let mut found_dist = None;
1282        for dist in &self.packages {
1283            if &dist.id.name == name {
1284                if found_dist.is_some() {
1285                    return Err(format!("found multiple packages matching `{name}`"));
1286                }
1287                found_dist = Some(dist);
1288            }
1289        }
1290        Ok(found_dist)
1291    }
1292
1293    /// Returns the package with the given name.
1294    ///
1295    /// If there are multiple matching packages, returns the package that
1296    /// corresponds to the given marker tree.
1297    ///
1298    /// If there are multiple packages that are relevant to the current
1299    /// markers, then an error is returned.
1300    ///
1301    /// If there are no matching packages, then `Ok(None)` is returned.
1302    fn find_by_markers(
1303        &self,
1304        name: &PackageName,
1305        marker_env: &MarkerEnvironment,
1306    ) -> Result<Option<&Package>, String> {
1307        let mut found_dist = None;
1308        for dist in &self.packages {
1309            if &dist.id.name == name {
1310                if dist.fork_markers.is_empty()
1311                    || dist
1312                        .fork_markers
1313                        .iter()
1314                        .any(|marker| marker.evaluate_no_extras(marker_env))
1315                {
1316                    if found_dist.is_some() {
1317                        return Err(format!("found multiple packages matching `{name}`"));
1318                    }
1319                    found_dist = Some(dist);
1320                }
1321            }
1322        }
1323        Ok(found_dist)
1324    }
1325
1326    fn find_by_id(&self, id: &PackageId) -> &Package {
1327        let index = *self.by_id.get(id).expect("locked package for ID");
1328
1329        (self.packages.get(index).expect("valid index for package")) as _
1330    }
1331
1332    /// Return a [`SatisfiesResult`] if the given extras do not match the [`Package`] metadata.
1333    fn satisfies_provides_extra<'lock>(
1334        &self,
1335        provides_extra: Box<[ExtraName]>,
1336        package: &'lock Package,
1337    ) -> SatisfiesResult<'lock> {
1338        if !self.supports_provides_extra() {
1339            return SatisfiesResult::Satisfied;
1340        }
1341
1342        let expected: BTreeSet<_> = provides_extra.iter().collect();
1343        let actual: BTreeSet<_> = package.metadata.provides_extra.iter().collect();
1344
1345        if expected != actual {
1346            let expected = Box::into_iter(provides_extra).collect();
1347            return SatisfiesResult::MismatchedPackageProvidesExtra(
1348                &package.id.name,
1349                package.id.version.as_ref(),
1350                expected,
1351                actual,
1352            );
1353        }
1354
1355        SatisfiesResult::Satisfied
1356    }
1357
1358    /// Return a [`SatisfiesResult`] if the given requirements do not match the [`Package`] metadata.
1359    #[allow(clippy::unused_self)]
1360    fn satisfies_requires_dist<'lock>(
1361        &self,
1362        requires_dist: Box<[Requirement]>,
1363        dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
1364        package: &'lock Package,
1365        root: &Path,
1366    ) -> Result<SatisfiesResult<'lock>, LockError> {
1367        // Special-case: if the version is dynamic, compare the flattened requirements.
1368        let flattened = if package.is_dynamic() {
1369            Some(
1370                FlatRequiresDist::from_requirements(requires_dist.clone(), &package.id.name)
1371                    .into_iter()
1372                    .map(|requirement| {
1373                        normalize_requirement(requirement, root, &self.requires_python)
1374                    })
1375                    .collect::<Result<BTreeSet<_>, _>>()?,
1376            )
1377        } else {
1378            None
1379        };
1380
1381        // Validate the `requires-dist` metadata.
1382        let expected: BTreeSet<_> = Box::into_iter(requires_dist)
1383            .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1384            .collect::<Result<_, _>>()?;
1385        let actual: BTreeSet<_> = package
1386            .metadata
1387            .requires_dist
1388            .iter()
1389            .cloned()
1390            .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1391            .collect::<Result<_, _>>()?;
1392
1393        if expected != actual && flattened.is_none_or(|expected| expected != actual) {
1394            return Ok(SatisfiesResult::MismatchedPackageRequirements(
1395                &package.id.name,
1396                package.id.version.as_ref(),
1397                expected,
1398                actual,
1399            ));
1400        }
1401
1402        // Validate the `dependency-groups` metadata.
1403        let expected: BTreeMap<GroupName, BTreeSet<Requirement>> = dependency_groups
1404            .into_iter()
1405            .filter(|(_, requirements)| self.includes_empty_groups() || !requirements.is_empty())
1406            .map(|(group, requirements)| {
1407                Ok::<_, LockError>((
1408                    group,
1409                    Box::into_iter(requirements)
1410                        .map(|requirement| {
1411                            normalize_requirement(requirement, root, &self.requires_python)
1412                        })
1413                        .collect::<Result<_, _>>()?,
1414                ))
1415            })
1416            .collect::<Result<_, _>>()?;
1417        let actual: BTreeMap<GroupName, BTreeSet<Requirement>> = package
1418            .metadata
1419            .dependency_groups
1420            .iter()
1421            .filter(|(_, requirements)| self.includes_empty_groups() || !requirements.is_empty())
1422            .map(|(group, requirements)| {
1423                Ok::<_, LockError>((
1424                    group.clone(),
1425                    requirements
1426                        .iter()
1427                        .cloned()
1428                        .map(|requirement| {
1429                            normalize_requirement(requirement, root, &self.requires_python)
1430                        })
1431                        .collect::<Result<_, _>>()?,
1432                ))
1433            })
1434            .collect::<Result<_, _>>()?;
1435
1436        if expected != actual {
1437            return Ok(SatisfiesResult::MismatchedPackageDependencyGroups(
1438                &package.id.name,
1439                package.id.version.as_ref(),
1440                expected,
1441                actual,
1442            ));
1443        }
1444
1445        Ok(SatisfiesResult::Satisfied)
1446    }
1447
1448    /// Convert the [`Lock`] to a [`Resolution`] using the given marker environment, tags, and root.
1449    pub async fn satisfies<Context: BuildContext>(
1450        &self,
1451        root: &Path,
1452        packages: &BTreeMap<PackageName, WorkspaceMember>,
1453        members: &[PackageName],
1454        required_members: &BTreeMap<PackageName, Editability>,
1455        requirements: &[Requirement],
1456        constraints: &[Requirement],
1457        overrides: &[Requirement],
1458        excludes: &[PackageName],
1459        build_constraints: &[Requirement],
1460        dependency_groups: &BTreeMap<GroupName, Vec<Requirement>>,
1461        dependency_metadata: &DependencyMetadata,
1462        indexes: Option<&IndexLocations>,
1463        tags: &Tags,
1464        markers: &MarkerEnvironment,
1465        hasher: &HashStrategy,
1466        index: &InMemoryIndex,
1467        database: &DistributionDatabase<'_, Context>,
1468    ) -> Result<SatisfiesResult<'_>, LockError> {
1469        let mut queue: VecDeque<&Package> = VecDeque::new();
1470        let mut seen = FxHashSet::default();
1471
1472        // Validate that the lockfile was generated with the same root members.
1473        {
1474            let expected = members.iter().cloned().collect::<BTreeSet<_>>();
1475            let actual = &self.manifest.members;
1476            if expected != *actual {
1477                return Ok(SatisfiesResult::MismatchedMembers(expected, actual));
1478            }
1479        }
1480
1481        // Validate that the member sources have not changed (e.g., that they've switched from
1482        // virtual to non-virtual or vice versa).
1483        for (name, member) in packages {
1484            let source = self.find_by_name(name).ok().flatten();
1485
1486            // Determine whether the member was required by any other member.
1487            let value = required_members.get(name);
1488            let is_required_member = value.is_some();
1489            let editability = value.copied().flatten();
1490
1491            // Verify that the member is virtual (or not).
1492            let expected_virtual = !member.pyproject_toml().is_package(!is_required_member);
1493            let actual_virtual =
1494                source.map(|package| matches!(package.id.source, Source::Virtual(..)));
1495            if actual_virtual != Some(expected_virtual) {
1496                return Ok(SatisfiesResult::MismatchedVirtual(
1497                    name.clone(),
1498                    expected_virtual,
1499                ));
1500            }
1501
1502            // Verify that the member is editable (or not).
1503            let expected_editable = if expected_virtual {
1504                false
1505            } else {
1506                editability.unwrap_or(true)
1507            };
1508            let actual_editable =
1509                source.map(|package| matches!(package.id.source, Source::Editable(..)));
1510            if actual_editable != Some(expected_editable) {
1511                return Ok(SatisfiesResult::MismatchedEditable(
1512                    name.clone(),
1513                    expected_editable,
1514                ));
1515            }
1516        }
1517
1518        // Validate that the lockfile was generated with the same requirements.
1519        {
1520            let expected: BTreeSet<_> = requirements
1521                .iter()
1522                .cloned()
1523                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1524                .collect::<Result<_, _>>()?;
1525            let actual: BTreeSet<_> = self
1526                .manifest
1527                .requirements
1528                .iter()
1529                .cloned()
1530                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1531                .collect::<Result<_, _>>()?;
1532            if expected != actual {
1533                return Ok(SatisfiesResult::MismatchedRequirements(expected, actual));
1534            }
1535        }
1536
1537        // Validate that the lockfile was generated with the same constraints.
1538        {
1539            let expected: BTreeSet<_> = constraints
1540                .iter()
1541                .cloned()
1542                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1543                .collect::<Result<_, _>>()?;
1544            let actual: BTreeSet<_> = self
1545                .manifest
1546                .constraints
1547                .iter()
1548                .cloned()
1549                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1550                .collect::<Result<_, _>>()?;
1551            if expected != actual {
1552                return Ok(SatisfiesResult::MismatchedConstraints(expected, actual));
1553            }
1554        }
1555
1556        // Validate that the lockfile was generated with the same overrides.
1557        {
1558            let expected: BTreeSet<_> = overrides
1559                .iter()
1560                .cloned()
1561                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1562                .collect::<Result<_, _>>()?;
1563            let actual: BTreeSet<_> = self
1564                .manifest
1565                .overrides
1566                .iter()
1567                .cloned()
1568                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1569                .collect::<Result<_, _>>()?;
1570            if expected != actual {
1571                return Ok(SatisfiesResult::MismatchedOverrides(expected, actual));
1572            }
1573        }
1574
1575        // Validate that the lockfile was generated with the same excludes.
1576        {
1577            let expected: BTreeSet<_> = excludes.iter().cloned().collect();
1578            let actual: BTreeSet<_> = self.manifest.excludes.iter().cloned().collect();
1579            if expected != actual {
1580                return Ok(SatisfiesResult::MismatchedExcludes(expected, actual));
1581            }
1582        }
1583
1584        // Validate that the lockfile was generated with the same build constraints.
1585        {
1586            let expected: BTreeSet<_> = build_constraints
1587                .iter()
1588                .cloned()
1589                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1590                .collect::<Result<_, _>>()?;
1591            let actual: BTreeSet<_> = self
1592                .manifest
1593                .build_constraints
1594                .iter()
1595                .cloned()
1596                .map(|requirement| normalize_requirement(requirement, root, &self.requires_python))
1597                .collect::<Result<_, _>>()?;
1598            if expected != actual {
1599                return Ok(SatisfiesResult::MismatchedBuildConstraints(
1600                    expected, actual,
1601                ));
1602            }
1603        }
1604
1605        // Validate that the lockfile was generated with the dependency groups.
1606        {
1607            let expected: BTreeMap<GroupName, BTreeSet<Requirement>> = dependency_groups
1608                .iter()
1609                .filter(|(_, requirements)| !requirements.is_empty())
1610                .map(|(group, requirements)| {
1611                    Ok::<_, LockError>((
1612                        group.clone(),
1613                        requirements
1614                            .iter()
1615                            .cloned()
1616                            .map(|requirement| {
1617                                normalize_requirement(requirement, root, &self.requires_python)
1618                            })
1619                            .collect::<Result<_, _>>()?,
1620                    ))
1621                })
1622                .collect::<Result<_, _>>()?;
1623            let actual: BTreeMap<GroupName, BTreeSet<Requirement>> = self
1624                .manifest
1625                .dependency_groups
1626                .iter()
1627                .filter(|(_, requirements)| !requirements.is_empty())
1628                .map(|(group, requirements)| {
1629                    Ok::<_, LockError>((
1630                        group.clone(),
1631                        requirements
1632                            .iter()
1633                            .cloned()
1634                            .map(|requirement| {
1635                                normalize_requirement(requirement, root, &self.requires_python)
1636                            })
1637                            .collect::<Result<_, _>>()?,
1638                    ))
1639                })
1640                .collect::<Result<_, _>>()?;
1641            if expected != actual {
1642                return Ok(SatisfiesResult::MismatchedDependencyGroups(
1643                    expected, actual,
1644                ));
1645            }
1646        }
1647
1648        // Validate that the lockfile was generated with the same static metadata.
1649        {
1650            let expected = dependency_metadata
1651                .values()
1652                .cloned()
1653                .collect::<BTreeSet<_>>();
1654            let actual = &self.manifest.dependency_metadata;
1655            if expected != *actual {
1656                return Ok(SatisfiesResult::MismatchedStaticMetadata(expected, actual));
1657            }
1658        }
1659
1660        // Collect the set of available indexes (both `--index-url` and `--find-links` entries).
1661        let mut remotes = indexes.map(|locations| {
1662            locations
1663                .allowed_indexes()
1664                .into_iter()
1665                .filter_map(|index| match index.url() {
1666                    IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
1667                        Some(UrlString::from(index.url().without_credentials().as_ref()))
1668                    }
1669                    IndexUrl::Path(_) => None,
1670                })
1671                .collect::<BTreeSet<_>>()
1672        });
1673
1674        let mut locals = indexes.map(|locations| {
1675            locations
1676                .allowed_indexes()
1677                .into_iter()
1678                .filter_map(|index| match index.url() {
1679                    IndexUrl::Pypi(_) | IndexUrl::Url(_) => None,
1680                    IndexUrl::Path(url) => {
1681                        let path = url.to_file_path().ok()?;
1682                        let path = relative_to(&path, root)
1683                            .or_else(|_| std::path::absolute(path))
1684                            .ok()?
1685                            .into_boxed_path();
1686                        Some(path)
1687                    }
1688                })
1689                .collect::<BTreeSet<_>>()
1690        });
1691
1692        // Add the workspace packages to the queue.
1693        for root_name in packages.keys() {
1694            let root = self
1695                .find_by_name(root_name)
1696                .expect("found too many packages matching root");
1697
1698            let Some(root) = root else {
1699                // The package is not in the lockfile, so it can't be satisfied.
1700                return Ok(SatisfiesResult::MissingRoot(root_name.clone()));
1701            };
1702
1703            // Add the base package.
1704            queue.push_back(root);
1705        }
1706
1707        while let Some(package) = queue.pop_front() {
1708            // If the lockfile references an index that was not provided, we can't validate it.
1709            if let Source::Registry(index) = &package.id.source {
1710                match index {
1711                    RegistrySource::Url(url) => {
1712                        if remotes
1713                            .as_ref()
1714                            .is_some_and(|remotes| !remotes.contains(url))
1715                        {
1716                            let name = &package.id.name;
1717                            let version = &package
1718                                .id
1719                                .version
1720                                .as_ref()
1721                                .expect("version for registry source");
1722                            return Ok(SatisfiesResult::MissingRemoteIndex(name, version, url));
1723                        }
1724                    }
1725                    RegistrySource::Path(path) => {
1726                        if locals.as_ref().is_some_and(|locals| !locals.contains(path)) {
1727                            let name = &package.id.name;
1728                            let version = &package
1729                                .id
1730                                .version
1731                                .as_ref()
1732                                .expect("version for registry source");
1733                            return Ok(SatisfiesResult::MissingLocalIndex(name, version, path));
1734                        }
1735                    }
1736                }
1737            }
1738
1739            // If the package is immutable, we don't need to validate it (or its dependencies).
1740            if package.id.source.is_immutable() {
1741                continue;
1742            }
1743
1744            if let Some(version) = package.id.version.as_ref() {
1745                // For a non-dynamic package, fetch the metadata from the distribution database.
1746                let dist = package.to_dist(
1747                    root,
1748                    TagPolicy::Preferred(tags),
1749                    &BuildOptions::default(),
1750                    markers,
1751                )?;
1752
1753                let metadata = {
1754                    let id = dist.version_id();
1755                    if let Some(archive) =
1756                        index
1757                            .distributions()
1758                            .get(&id)
1759                            .as_deref()
1760                            .and_then(|response| {
1761                                if let MetadataResponse::Found(archive, ..) = response {
1762                                    Some(archive)
1763                                } else {
1764                                    None
1765                                }
1766                            })
1767                    {
1768                        // If the metadata is already in the index, return it.
1769                        archive.metadata.clone()
1770                    } else {
1771                        // Run the PEP 517 build process to extract metadata from the source distribution.
1772                        let archive = database
1773                            .get_or_build_wheel_metadata(&dist, hasher.get(&dist))
1774                            .await
1775                            .map_err(|err| LockErrorKind::Resolution {
1776                                id: package.id.clone(),
1777                                err,
1778                            })?;
1779
1780                        let metadata = archive.metadata.clone();
1781
1782                        // Insert the metadata into the index.
1783                        index
1784                            .distributions()
1785                            .done(id, Arc::new(MetadataResponse::Found(archive)));
1786
1787                        metadata
1788                    }
1789                };
1790
1791                // If this is a local package, validate that it hasn't become dynamic (in which
1792                // case, we'd expect the version to be omitted).
1793                if package.id.source.is_source_tree() {
1794                    if metadata.dynamic {
1795                        return Ok(SatisfiesResult::MismatchedDynamic(&package.id.name, false));
1796                    }
1797                }
1798
1799                // Validate the `version` metadata.
1800                if metadata.version != *version {
1801                    return Ok(SatisfiesResult::MismatchedVersion(
1802                        &package.id.name,
1803                        version.clone(),
1804                        Some(metadata.version.clone()),
1805                    ));
1806                }
1807
1808                // Validate the `provides-extras` metadata.
1809                match self.satisfies_provides_extra(metadata.provides_extra, package) {
1810                    SatisfiesResult::Satisfied => {}
1811                    result => return Ok(result),
1812                }
1813
1814                // Validate that the requirements are unchanged.
1815                match self.satisfies_requires_dist(
1816                    metadata.requires_dist,
1817                    metadata.dependency_groups,
1818                    package,
1819                    root,
1820                )? {
1821                    SatisfiesResult::Satisfied => {}
1822                    result => return Ok(result),
1823                }
1824            } else if let Some(source_tree) = package.id.source.as_source_tree() {
1825                // For dynamic packages, we don't need the version. We only need to know that the
1826                // package is still dynamic, and that the requirements are unchanged.
1827                //
1828                // If the distribution is a source tree, attempt to extract the requirements from the
1829                // `pyproject.toml` directly. The distribution database will do this too, but we can be
1830                // even more aggressive here since we _only_ need the requirements. So, for example,
1831                // even if the version is dynamic, we can still extract the requirements without
1832                // performing a build, unlike in the database where we typically construct a "complete"
1833                // metadata object.
1834                let parent = root.join(source_tree);
1835                let path = parent.join("pyproject.toml");
1836                let metadata =
1837                    match fs_err::tokio::read_to_string(&path).await {
1838                        Ok(contents) => {
1839                            let pyproject_toml = toml::from_str::<PyProjectToml>(&contents)
1840                                .map_err(|err| LockErrorKind::InvalidPyprojectToml {
1841                                    path: path.clone(),
1842                                    err,
1843                                })?;
1844                            database
1845                                .requires_dist(&parent, &pyproject_toml)
1846                                .await
1847                                .map_err(|err| LockErrorKind::Resolution {
1848                                    id: package.id.clone(),
1849                                    err,
1850                                })?
1851                        }
1852                        Err(err) if err.kind() == io::ErrorKind::NotFound => None,
1853                        Err(err) => {
1854                            return Err(LockErrorKind::UnreadablePyprojectToml { path, err }.into());
1855                        }
1856                    };
1857
1858                let satisfied = metadata.is_some_and(|metadata| {
1859                    // Validate that the package is still dynamic.
1860                    if !metadata.dynamic {
1861                        debug!("Static `requires-dist` for `{}` is out-of-date; falling back to distribution database", package.id);
1862                        return false;
1863                    }
1864
1865                    // Validate that the extras are unchanged.
1866                    if let SatisfiesResult::Satisfied = self.satisfies_provides_extra(metadata.provides_extra, package, ) {
1867                        debug!("Static `provides-extra` for `{}` is up-to-date", package.id);
1868                    } else {
1869                        debug!("Static `provides-extra` for `{}` is out-of-date; falling back to distribution database", package.id);
1870                        return false;
1871                    }
1872
1873                    // Validate that the requirements are unchanged.
1874                    match self.satisfies_requires_dist(metadata.requires_dist, metadata.dependency_groups, package, root) {
1875                        Ok(SatisfiesResult::Satisfied) => {
1876                            debug!("Static `requires-dist` for `{}` is up-to-date", package.id);
1877                        },
1878                        Ok(..) => {
1879                            debug!("Static `requires-dist` for `{}` is out-of-date; falling back to distribution database", package.id);
1880                            return false;
1881                        },
1882                        Err(..) => {
1883                            debug!("Static `requires-dist` for `{}` is invalid; falling back to distribution database", package.id);
1884                            return false;
1885                        },
1886                    }
1887
1888                    true
1889                });
1890
1891                // If the `requires-dist` metadata matches the requirements, we're done; otherwise,
1892                // fetch the "full" metadata, which may involve invoking the build system. In some
1893                // cases, build backends return metadata that does _not_ match the `pyproject.toml`
1894                // exactly. For example, `hatchling` will flatten any recursive (or self-referential)
1895                // extras, while `setuptools` will not.
1896                if !satisfied {
1897                    let dist = package.to_dist(
1898                        root,
1899                        TagPolicy::Preferred(tags),
1900                        &BuildOptions::default(),
1901                        markers,
1902                    )?;
1903
1904                    let metadata = {
1905                        let id = dist.version_id();
1906                        if let Some(archive) =
1907                            index
1908                                .distributions()
1909                                .get(&id)
1910                                .as_deref()
1911                                .and_then(|response| {
1912                                    if let MetadataResponse::Found(archive, ..) = response {
1913                                        Some(archive)
1914                                    } else {
1915                                        None
1916                                    }
1917                                })
1918                        {
1919                            // If the metadata is already in the index, return it.
1920                            archive.metadata.clone()
1921                        } else {
1922                            // Run the PEP 517 build process to extract metadata from the source distribution.
1923                            let archive = database
1924                                .get_or_build_wheel_metadata(&dist, hasher.get(&dist))
1925                                .await
1926                                .map_err(|err| LockErrorKind::Resolution {
1927                                    id: package.id.clone(),
1928                                    err,
1929                                })?;
1930
1931                            let metadata = archive.metadata.clone();
1932
1933                            // Insert the metadata into the index.
1934                            index
1935                                .distributions()
1936                                .done(id, Arc::new(MetadataResponse::Found(archive)));
1937
1938                            metadata
1939                        }
1940                    };
1941
1942                    // Validate that the package is still dynamic.
1943                    if !metadata.dynamic {
1944                        return Ok(SatisfiesResult::MismatchedDynamic(&package.id.name, true));
1945                    }
1946
1947                    // Validate that the extras are unchanged.
1948                    match self.satisfies_provides_extra(metadata.provides_extra, package) {
1949                        SatisfiesResult::Satisfied => {}
1950                        result => return Ok(result),
1951                    }
1952
1953                    // Validate that the requirements are unchanged.
1954                    match self.satisfies_requires_dist(
1955                        metadata.requires_dist,
1956                        metadata.dependency_groups,
1957                        package,
1958                        root,
1959                    )? {
1960                        SatisfiesResult::Satisfied => {}
1961                        result => return Ok(result),
1962                    }
1963                }
1964            } else {
1965                return Ok(SatisfiesResult::MissingVersion(&package.id.name));
1966            }
1967
1968            // Add any explicit indexes to the list of known locals or remotes. These indexes may
1969            // not be available as top-level configuration (i.e., if they're defined within a
1970            // workspace member), but we already validated that the dependencies are up-to-date, so
1971            // we can consider them "available".
1972            for requirement in &package.metadata.requires_dist {
1973                if let RequirementSource::Registry {
1974                    index: Some(index), ..
1975                } = &requirement.source
1976                {
1977                    match &index.url {
1978                        IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
1979                            if let Some(remotes) = remotes.as_mut() {
1980                                remotes.insert(UrlString::from(
1981                                    index.url().without_credentials().as_ref(),
1982                                ));
1983                            }
1984                        }
1985                        IndexUrl::Path(url) => {
1986                            if let Some(locals) = locals.as_mut() {
1987                                if let Some(path) = url.to_file_path().ok().and_then(|path| {
1988                                    relative_to(&path, root)
1989                                        .or_else(|_| std::path::absolute(path))
1990                                        .ok()
1991                                }) {
1992                                    locals.insert(path.into_boxed_path());
1993                                }
1994                            }
1995                        }
1996                    }
1997                }
1998            }
1999
2000            // Recurse.
2001            for dep in &package.dependencies {
2002                if seen.insert(&dep.package_id) {
2003                    let dep_dist = self.find_by_id(&dep.package_id);
2004                    queue.push_back(dep_dist);
2005                }
2006            }
2007
2008            for dependencies in package.optional_dependencies.values() {
2009                for dep in dependencies {
2010                    if seen.insert(&dep.package_id) {
2011                        let dep_dist = self.find_by_id(&dep.package_id);
2012                        queue.push_back(dep_dist);
2013                    }
2014                }
2015            }
2016
2017            for dependencies in package.dependency_groups.values() {
2018                for dep in dependencies {
2019                    if seen.insert(&dep.package_id) {
2020                        let dep_dist = self.find_by_id(&dep.package_id);
2021                        queue.push_back(dep_dist);
2022                    }
2023                }
2024            }
2025        }
2026
2027        Ok(SatisfiesResult::Satisfied)
2028    }
2029}
2030
2031#[derive(Debug, Copy, Clone)]
2032enum TagPolicy<'tags> {
2033    /// Exclusively consider wheels that match the specified platform tags.
2034    Required(&'tags Tags),
2035    /// Prefer wheels that match the specified platform tags, but fall back to incompatible wheels
2036    /// if necessary.
2037    Preferred(&'tags Tags),
2038}
2039
2040impl<'tags> TagPolicy<'tags> {
2041    /// Returns the platform tags to consider.
2042    fn tags(&self) -> &'tags Tags {
2043        match self {
2044            Self::Required(tags) | Self::Preferred(tags) => tags,
2045        }
2046    }
2047}
2048
2049/// The result of checking if a lockfile satisfies a set of requirements.
2050#[derive(Debug)]
2051pub enum SatisfiesResult<'lock> {
2052    /// The lockfile satisfies the requirements.
2053    Satisfied,
2054    /// The lockfile uses a different set of workspace members.
2055    MismatchedMembers(BTreeSet<PackageName>, &'lock BTreeSet<PackageName>),
2056    /// A workspace member switched from virtual to non-virtual or vice versa.
2057    MismatchedVirtual(PackageName, bool),
2058    /// A workspace member switched from editable to non-editable or vice versa.
2059    MismatchedEditable(PackageName, bool),
2060    /// A source tree switched from dynamic to non-dynamic or vice versa.
2061    MismatchedDynamic(&'lock PackageName, bool),
2062    /// The lockfile uses a different set of version for its workspace members.
2063    MismatchedVersion(&'lock PackageName, Version, Option<Version>),
2064    /// The lockfile uses a different set of requirements.
2065    MismatchedRequirements(BTreeSet<Requirement>, BTreeSet<Requirement>),
2066    /// The lockfile uses a different set of constraints.
2067    MismatchedConstraints(BTreeSet<Requirement>, BTreeSet<Requirement>),
2068    /// The lockfile uses a different set of overrides.
2069    MismatchedOverrides(BTreeSet<Requirement>, BTreeSet<Requirement>),
2070    /// The lockfile uses a different set of excludes.
2071    MismatchedExcludes(BTreeSet<PackageName>, BTreeSet<PackageName>),
2072    /// The lockfile uses a different set of build constraints.
2073    MismatchedBuildConstraints(BTreeSet<Requirement>, BTreeSet<Requirement>),
2074    /// The lockfile uses a different set of dependency groups.
2075    MismatchedDependencyGroups(
2076        BTreeMap<GroupName, BTreeSet<Requirement>>,
2077        BTreeMap<GroupName, BTreeSet<Requirement>>,
2078    ),
2079    /// The lockfile uses different static metadata.
2080    MismatchedStaticMetadata(BTreeSet<StaticMetadata>, &'lock BTreeSet<StaticMetadata>),
2081    /// The lockfile is missing a workspace member.
2082    MissingRoot(PackageName),
2083    /// The lockfile referenced a remote index that was not provided
2084    MissingRemoteIndex(&'lock PackageName, &'lock Version, &'lock UrlString),
2085    /// The lockfile referenced a local index that was not provided
2086    MissingLocalIndex(&'lock PackageName, &'lock Version, &'lock Path),
2087    /// A package in the lockfile contains different `requires-dist` metadata than expected.
2088    MismatchedPackageRequirements(
2089        &'lock PackageName,
2090        Option<&'lock Version>,
2091        BTreeSet<Requirement>,
2092        BTreeSet<Requirement>,
2093    ),
2094    /// A package in the lockfile contains different `provides-extra` metadata than expected.
2095    MismatchedPackageProvidesExtra(
2096        &'lock PackageName,
2097        Option<&'lock Version>,
2098        BTreeSet<ExtraName>,
2099        BTreeSet<&'lock ExtraName>,
2100    ),
2101    /// A package in the lockfile contains different `dependency-groups` metadata than expected.
2102    MismatchedPackageDependencyGroups(
2103        &'lock PackageName,
2104        Option<&'lock Version>,
2105        BTreeMap<GroupName, BTreeSet<Requirement>>,
2106        BTreeMap<GroupName, BTreeSet<Requirement>>,
2107    ),
2108    /// The lockfile is missing a version.
2109    MissingVersion(&'lock PackageName),
2110}
2111
2112/// We discard the lockfile if these options match.
2113#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2114#[serde(rename_all = "kebab-case")]
2115struct ResolverOptions {
2116    /// The [`ResolutionMode`] used to generate this lock.
2117    #[serde(default)]
2118    resolution_mode: ResolutionMode,
2119    /// The [`PrereleaseMode`] used to generate this lock.
2120    #[serde(default)]
2121    prerelease_mode: PrereleaseMode,
2122    /// The [`ForkStrategy`] used to generate this lock.
2123    #[serde(default)]
2124    fork_strategy: ForkStrategy,
2125    /// The [`ExcludeNewer`] setting used to generate this lock.
2126    #[serde(flatten)]
2127    exclude_newer: ExcludeNewerWire,
2128}
2129
2130#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2131#[serde(rename_all = "kebab-case")]
2132struct ExcludeNewerWire {
2133    exclude_newer: Option<ExcludeNewerTimestamp>,
2134    #[serde(default, skip_serializing_if = "ExcludeNewerPackage::is_empty")]
2135    exclude_newer_package: ExcludeNewerPackage,
2136}
2137
2138impl From<ExcludeNewerWire> for ExcludeNewer {
2139    fn from(wire: ExcludeNewerWire) -> Self {
2140        Self {
2141            global: wire.exclude_newer,
2142            package: wire.exclude_newer_package,
2143        }
2144    }
2145}
2146
2147impl From<ExcludeNewer> for ExcludeNewerWire {
2148    fn from(exclude_newer: ExcludeNewer) -> Self {
2149        Self {
2150            exclude_newer: exclude_newer.global,
2151            exclude_newer_package: exclude_newer.package,
2152        }
2153    }
2154}
2155
2156#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]
2157#[serde(rename_all = "kebab-case")]
2158pub struct ResolverManifest {
2159    /// The workspace members included in the lockfile.
2160    #[serde(default)]
2161    members: BTreeSet<PackageName>,
2162    /// The requirements provided to the resolver, exclusive of the workspace members.
2163    ///
2164    /// These are requirements that are attached to the project, but not to any of its
2165    /// workspace members. For example, the requirements in a PEP 723 script would be included here.
2166    #[serde(default)]
2167    requirements: BTreeSet<Requirement>,
2168    /// The dependency groups provided to the resolver, exclusive of the workspace members.
2169    ///
2170    /// These are dependency groups that are attached to the project, but not to any of its
2171    /// workspace members. For example, the dependency groups in a `pyproject.toml` without a
2172    /// `[project]` table would be included here.
2173    #[serde(default)]
2174    dependency_groups: BTreeMap<GroupName, BTreeSet<Requirement>>,
2175    /// The constraints provided to the resolver.
2176    #[serde(default)]
2177    constraints: BTreeSet<Requirement>,
2178    /// The overrides provided to the resolver.
2179    #[serde(default)]
2180    overrides: BTreeSet<Requirement>,
2181    /// The excludes provided to the resolver.
2182    #[serde(default)]
2183    excludes: BTreeSet<PackageName>,
2184    /// The build constraints provided to the resolver.
2185    #[serde(default)]
2186    build_constraints: BTreeSet<Requirement>,
2187    /// The static metadata provided to the resolver.
2188    #[serde(default)]
2189    dependency_metadata: BTreeSet<StaticMetadata>,
2190}
2191
2192impl ResolverManifest {
2193    /// Initialize a [`ResolverManifest`] with the given members, requirements, constraints, and
2194    /// overrides.
2195    pub fn new(
2196        members: impl IntoIterator<Item = PackageName>,
2197        requirements: impl IntoIterator<Item = Requirement>,
2198        constraints: impl IntoIterator<Item = Requirement>,
2199        overrides: impl IntoIterator<Item = Requirement>,
2200        excludes: impl IntoIterator<Item = PackageName>,
2201        build_constraints: impl IntoIterator<Item = Requirement>,
2202        dependency_groups: impl IntoIterator<Item = (GroupName, Vec<Requirement>)>,
2203        dependency_metadata: impl IntoIterator<Item = StaticMetadata>,
2204    ) -> Self {
2205        Self {
2206            members: members.into_iter().collect(),
2207            requirements: requirements.into_iter().collect(),
2208            constraints: constraints.into_iter().collect(),
2209            overrides: overrides.into_iter().collect(),
2210            excludes: excludes.into_iter().collect(),
2211            build_constraints: build_constraints.into_iter().collect(),
2212            dependency_groups: dependency_groups
2213                .into_iter()
2214                .map(|(group, requirements)| (group, requirements.into_iter().collect()))
2215                .collect(),
2216            dependency_metadata: dependency_metadata.into_iter().collect(),
2217        }
2218    }
2219
2220    /// Convert the manifest to a relative form using the given workspace.
2221    pub fn relative_to(self, root: &Path) -> Result<Self, io::Error> {
2222        Ok(Self {
2223            members: self.members,
2224            requirements: self
2225                .requirements
2226                .into_iter()
2227                .map(|requirement| requirement.relative_to(root))
2228                .collect::<Result<BTreeSet<_>, _>>()?,
2229            constraints: self
2230                .constraints
2231                .into_iter()
2232                .map(|requirement| requirement.relative_to(root))
2233                .collect::<Result<BTreeSet<_>, _>>()?,
2234            overrides: self
2235                .overrides
2236                .into_iter()
2237                .map(|requirement| requirement.relative_to(root))
2238                .collect::<Result<BTreeSet<_>, _>>()?,
2239            excludes: self.excludes,
2240            build_constraints: self
2241                .build_constraints
2242                .into_iter()
2243                .map(|requirement| requirement.relative_to(root))
2244                .collect::<Result<BTreeSet<_>, _>>()?,
2245            dependency_groups: self
2246                .dependency_groups
2247                .into_iter()
2248                .map(|(group, requirements)| {
2249                    Ok::<_, io::Error>((
2250                        group,
2251                        requirements
2252                            .into_iter()
2253                            .map(|requirement| requirement.relative_to(root))
2254                            .collect::<Result<BTreeSet<_>, _>>()?,
2255                    ))
2256                })
2257                .collect::<Result<BTreeMap<_, _>, _>>()?,
2258            dependency_metadata: self.dependency_metadata,
2259        })
2260    }
2261}
2262
2263#[derive(Clone, Debug, serde::Deserialize)]
2264#[serde(rename_all = "kebab-case")]
2265struct LockWire {
2266    version: u32,
2267    revision: Option<u32>,
2268    requires_python: RequiresPython,
2269    /// If this lockfile was built from a forking resolution with non-identical forks, store the
2270    /// forks in the lockfile so we can recreate them in subsequent resolutions.
2271    #[serde(rename = "resolution-markers", default)]
2272    fork_markers: Vec<SimplifiedMarkerTree>,
2273    #[serde(rename = "supported-markers", default)]
2274    supported_environments: Vec<SimplifiedMarkerTree>,
2275    #[serde(rename = "required-markers", default)]
2276    required_environments: Vec<SimplifiedMarkerTree>,
2277    #[serde(rename = "conflicts", default)]
2278    conflicts: Option<Conflicts>,
2279    /// We discard the lockfile if these options match.
2280    #[serde(default)]
2281    options: ResolverOptions,
2282    #[serde(default)]
2283    manifest: ResolverManifest,
2284    #[serde(rename = "package", alias = "distribution", default)]
2285    packages: Vec<PackageWire>,
2286}
2287
2288impl TryFrom<LockWire> for Lock {
2289    type Error = LockError;
2290
2291    fn try_from(wire: LockWire) -> Result<Self, LockError> {
2292        // Count the number of sources for each package name. When
2293        // there's only one source for a particular package name (the
2294        // overwhelmingly common case), we can omit some data (like source and
2295        // version) on dependency edges since it is strictly redundant.
2296        let mut unambiguous_package_ids: FxHashMap<PackageName, PackageId> = FxHashMap::default();
2297        let mut ambiguous = FxHashSet::default();
2298        for dist in &wire.packages {
2299            if ambiguous.contains(&dist.id.name) {
2300                continue;
2301            }
2302            if let Some(id) = unambiguous_package_ids.remove(&dist.id.name) {
2303                ambiguous.insert(id.name);
2304                continue;
2305            }
2306            unambiguous_package_ids.insert(dist.id.name.clone(), dist.id.clone());
2307        }
2308
2309        let packages = wire
2310            .packages
2311            .into_iter()
2312            .map(|dist| dist.unwire(&wire.requires_python, &unambiguous_package_ids))
2313            .collect::<Result<Vec<_>, _>>()?;
2314        let supported_environments = wire
2315            .supported_environments
2316            .into_iter()
2317            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2318            .collect();
2319        let required_environments = wire
2320            .required_environments
2321            .into_iter()
2322            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2323            .collect();
2324        let fork_markers = wire
2325            .fork_markers
2326            .into_iter()
2327            .map(|simplified_marker| simplified_marker.into_marker(&wire.requires_python))
2328            .map(UniversalMarker::from_combined)
2329            .collect();
2330        let lock = Self::new(
2331            wire.version,
2332            wire.revision.unwrap_or(0),
2333            packages,
2334            wire.requires_python,
2335            wire.options,
2336            wire.manifest,
2337            wire.conflicts.unwrap_or_else(Conflicts::empty),
2338            supported_environments,
2339            required_environments,
2340            fork_markers,
2341        )?;
2342
2343        Ok(lock)
2344    }
2345}
2346
2347/// Like [`Lock`], but limited to the version field. Used for error reporting: by limiting parsing
2348/// to the version field, we can verify compatibility for lockfiles that may otherwise be
2349/// unparsable.
2350#[derive(Clone, Debug, serde::Deserialize)]
2351#[serde(rename_all = "kebab-case")]
2352pub struct LockVersion {
2353    version: u32,
2354}
2355
2356impl LockVersion {
2357    /// Returns the lockfile version.
2358    pub fn version(&self) -> u32 {
2359        self.version
2360    }
2361}
2362
2363#[derive(Clone, Debug, PartialEq, Eq)]
2364pub struct Package {
2365    pub(crate) id: PackageId,
2366    sdist: Option<SourceDist>,
2367    wheels: Vec<Wheel>,
2368    /// If there are multiple versions or sources for the same package name, we add the markers of
2369    /// the fork(s) that contained this version or source, so we can set the correct preferences in
2370    /// the next resolution.
2371    ///
2372    /// Named `resolution-markers` in `uv.lock`.
2373    fork_markers: Vec<UniversalMarker>,
2374    /// The resolved dependencies of the package.
2375    dependencies: Vec<Dependency>,
2376    /// The resolved optional dependencies of the package.
2377    optional_dependencies: BTreeMap<ExtraName, Vec<Dependency>>,
2378    /// The resolved PEP 735 dependency groups of the package.
2379    dependency_groups: BTreeMap<GroupName, Vec<Dependency>>,
2380    /// The exact requirements from the package metadata.
2381    metadata: PackageMetadata,
2382}
2383
2384impl Package {
2385    fn from_annotated_dist(
2386        annotated_dist: &AnnotatedDist,
2387        fork_markers: Vec<UniversalMarker>,
2388        root: &Path,
2389    ) -> Result<Self, LockError> {
2390        let id = PackageId::from_annotated_dist(annotated_dist, root)?;
2391        let sdist = SourceDist::from_annotated_dist(&id, annotated_dist)?;
2392        let wheels = Wheel::from_annotated_dist(annotated_dist)?;
2393        let requires_dist = if id.source.is_immutable() {
2394            BTreeSet::default()
2395        } else {
2396            annotated_dist
2397                .metadata
2398                .as_ref()
2399                .expect("metadata is present")
2400                .requires_dist
2401                .iter()
2402                .cloned()
2403                .map(|requirement| requirement.relative_to(root))
2404                .collect::<Result<_, _>>()
2405                .map_err(LockErrorKind::RequirementRelativePath)?
2406        };
2407        let provides_extra = if id.source.is_immutable() {
2408            Box::default()
2409        } else {
2410            annotated_dist
2411                .metadata
2412                .as_ref()
2413                .expect("metadata is present")
2414                .provides_extra
2415                .clone()
2416        };
2417        let dependency_groups = if id.source.is_immutable() {
2418            BTreeMap::default()
2419        } else {
2420            annotated_dist
2421                .metadata
2422                .as_ref()
2423                .expect("metadata is present")
2424                .dependency_groups
2425                .iter()
2426                .map(|(group, requirements)| {
2427                    let requirements = requirements
2428                        .iter()
2429                        .cloned()
2430                        .map(|requirement| requirement.relative_to(root))
2431                        .collect::<Result<_, _>>()
2432                        .map_err(LockErrorKind::RequirementRelativePath)?;
2433                    Ok::<_, LockError>((group.clone(), requirements))
2434                })
2435                .collect::<Result<_, _>>()?
2436        };
2437        Ok(Self {
2438            id,
2439            sdist,
2440            wheels,
2441            fork_markers,
2442            dependencies: vec![],
2443            optional_dependencies: BTreeMap::default(),
2444            dependency_groups: BTreeMap::default(),
2445            metadata: PackageMetadata {
2446                requires_dist,
2447                provides_extra,
2448                dependency_groups,
2449            },
2450        })
2451    }
2452
2453    /// Add the [`AnnotatedDist`] as a dependency of the [`Package`].
2454    fn add_dependency(
2455        &mut self,
2456        requires_python: &RequiresPython,
2457        annotated_dist: &AnnotatedDist,
2458        marker: UniversalMarker,
2459        root: &Path,
2460    ) -> Result<(), LockError> {
2461        let new_dep =
2462            Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2463        for existing_dep in &mut self.dependencies {
2464            if existing_dep.package_id == new_dep.package_id
2465                // It's important that we do a comparison on
2466                // *simplified* markers here. In particular, when
2467                // we write markers out to the lock file, we use
2468                // "simplified" markers, or markers that are simplified
2469                // *given* that `requires-python` is satisfied. So if
2470                // we don't do equality based on what the simplified
2471                // marker is, we might wind up not merging dependencies
2472                // that ought to be merged and thus writing out extra
2473                // entries.
2474                //
2475                // For example, if `requires-python = '>=3.8'` and we
2476                // have `foo==1` and
2477                // `foo==1 ; python_version >= '3.8'` dependencies,
2478                // then they don't have equivalent complexified
2479                // markers, but their simplified markers are identical.
2480                //
2481                // NOTE: It does seem like perhaps this should
2482                // be implemented semantically/algebraically on
2483                // `MarkerTree` itself, but it wasn't totally clear
2484                // how to do that. I think `pep508` would need to
2485                // grow a concept of "requires python" and provide an
2486                // operation specifically for that.
2487                && existing_dep.simplified_marker == new_dep.simplified_marker
2488            {
2489                existing_dep.extra.extend(new_dep.extra);
2490                return Ok(());
2491            }
2492        }
2493
2494        self.dependencies.push(new_dep);
2495        Ok(())
2496    }
2497
2498    /// Add the [`AnnotatedDist`] as an optional dependency of the [`Package`].
2499    fn add_optional_dependency(
2500        &mut self,
2501        requires_python: &RequiresPython,
2502        extra: ExtraName,
2503        annotated_dist: &AnnotatedDist,
2504        marker: UniversalMarker,
2505        root: &Path,
2506    ) -> Result<(), LockError> {
2507        let dep = Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2508        let optional_deps = self.optional_dependencies.entry(extra).or_default();
2509        for existing_dep in &mut *optional_deps {
2510            if existing_dep.package_id == dep.package_id
2511                // See note in add_dependency for why we use
2512                // simplified markers here.
2513                && existing_dep.simplified_marker == dep.simplified_marker
2514            {
2515                existing_dep.extra.extend(dep.extra);
2516                return Ok(());
2517            }
2518        }
2519
2520        optional_deps.push(dep);
2521        Ok(())
2522    }
2523
2524    /// Add the [`AnnotatedDist`] to a dependency group of the [`Package`].
2525    fn add_group_dependency(
2526        &mut self,
2527        requires_python: &RequiresPython,
2528        group: GroupName,
2529        annotated_dist: &AnnotatedDist,
2530        marker: UniversalMarker,
2531        root: &Path,
2532    ) -> Result<(), LockError> {
2533        let dep = Dependency::from_annotated_dist(requires_python, annotated_dist, marker, root)?;
2534        let deps = self.dependency_groups.entry(group).or_default();
2535        for existing_dep in &mut *deps {
2536            if existing_dep.package_id == dep.package_id
2537                // See note in add_dependency for why we use
2538                // simplified markers here.
2539                && existing_dep.simplified_marker == dep.simplified_marker
2540            {
2541                existing_dep.extra.extend(dep.extra);
2542                return Ok(());
2543            }
2544        }
2545
2546        deps.push(dep);
2547        Ok(())
2548    }
2549
2550    /// Convert the [`Package`] to a [`Dist`] that can be used in installation.
2551    fn to_dist(
2552        &self,
2553        workspace_root: &Path,
2554        tag_policy: TagPolicy<'_>,
2555        build_options: &BuildOptions,
2556        markers: &MarkerEnvironment,
2557    ) -> Result<Dist, LockError> {
2558        let no_binary = build_options.no_binary_package(&self.id.name);
2559        let no_build = build_options.no_build_package(&self.id.name);
2560
2561        if !no_binary {
2562            if let Some(best_wheel_index) = self.find_best_wheel(tag_policy) {
2563                return match &self.id.source {
2564                    Source::Registry(source) => {
2565                        let wheels = self
2566                            .wheels
2567                            .iter()
2568                            .map(|wheel| wheel.to_registry_wheel(source, workspace_root))
2569                            .collect::<Result<_, LockError>>()?;
2570                        let reg_built_dist = RegistryBuiltDist {
2571                            wheels,
2572                            best_wheel_index,
2573                            sdist: None,
2574                        };
2575                        Ok(Dist::Built(BuiltDist::Registry(reg_built_dist)))
2576                    }
2577                    Source::Path(path) => {
2578                        let filename: WheelFilename =
2579                            self.wheels[best_wheel_index].filename.clone();
2580                        let install_path = absolute_path(workspace_root, path)?;
2581                        let path_dist = PathBuiltDist {
2582                            filename,
2583                            url: verbatim_url(&install_path, &self.id)?,
2584                            install_path: absolute_path(workspace_root, path)?.into_boxed_path(),
2585                        };
2586                        let built_dist = BuiltDist::Path(path_dist);
2587                        Ok(Dist::Built(built_dist))
2588                    }
2589                    Source::Direct(url, direct) => {
2590                        let filename: WheelFilename =
2591                            self.wheels[best_wheel_index].filename.clone();
2592                        let url = DisplaySafeUrl::from(ParsedArchiveUrl {
2593                            url: url.to_url().map_err(LockErrorKind::InvalidUrl)?,
2594                            subdirectory: direct.subdirectory.clone(),
2595                            ext: DistExtension::Wheel,
2596                        });
2597                        let direct_dist = DirectUrlBuiltDist {
2598                            filename,
2599                            location: Box::new(url.clone()),
2600                            url: VerbatimUrl::from_url(url),
2601                        };
2602                        let built_dist = BuiltDist::DirectUrl(direct_dist);
2603                        Ok(Dist::Built(built_dist))
2604                    }
2605                    Source::Git(_, _) => Err(LockErrorKind::InvalidWheelSource {
2606                        id: self.id.clone(),
2607                        source_type: "Git",
2608                    }
2609                    .into()),
2610                    Source::Directory(_) => Err(LockErrorKind::InvalidWheelSource {
2611                        id: self.id.clone(),
2612                        source_type: "directory",
2613                    }
2614                    .into()),
2615                    Source::Editable(_) => Err(LockErrorKind::InvalidWheelSource {
2616                        id: self.id.clone(),
2617                        source_type: "editable",
2618                    }
2619                    .into()),
2620                    Source::Virtual(_) => Err(LockErrorKind::InvalidWheelSource {
2621                        id: self.id.clone(),
2622                        source_type: "virtual",
2623                    }
2624                    .into()),
2625                };
2626            }
2627        }
2628
2629        if let Some(sdist) = self.to_source_dist(workspace_root)? {
2630            // Even with `--no-build`, allow virtual packages. (In the future, we may want to allow
2631            // any local source tree, or at least editable source trees, which we allow in
2632            // `uv pip`.)
2633            if !no_build || sdist.is_virtual() {
2634                return Ok(Dist::Source(sdist));
2635            }
2636        }
2637
2638        match (no_binary, no_build) {
2639            (true, true) => Err(LockErrorKind::NoBinaryNoBuild {
2640                id: self.id.clone(),
2641            }
2642            .into()),
2643            (true, false) if self.id.source.is_wheel() => Err(LockErrorKind::NoBinaryWheelOnly {
2644                id: self.id.clone(),
2645            }
2646            .into()),
2647            (true, false) => Err(LockErrorKind::NoBinary {
2648                id: self.id.clone(),
2649            }
2650            .into()),
2651            (false, true) => Err(LockErrorKind::NoBuild {
2652                id: self.id.clone(),
2653            }
2654            .into()),
2655            (false, false) if self.id.source.is_wheel() => Err(LockError {
2656                kind: Box::new(LockErrorKind::IncompatibleWheelOnly {
2657                    id: self.id.clone(),
2658                }),
2659                hint: self.tag_hint(tag_policy, markers),
2660            }),
2661            (false, false) => Err(LockError {
2662                kind: Box::new(LockErrorKind::NeitherSourceDistNorWheel {
2663                    id: self.id.clone(),
2664                }),
2665                hint: self.tag_hint(tag_policy, markers),
2666            }),
2667        }
2668    }
2669
2670    /// Generate a [`WheelTagHint`] based on wheel-tag incompatibilities.
2671    fn tag_hint(
2672        &self,
2673        tag_policy: TagPolicy<'_>,
2674        markers: &MarkerEnvironment,
2675    ) -> Option<WheelTagHint> {
2676        let filenames = self
2677            .wheels
2678            .iter()
2679            .map(|wheel| &wheel.filename)
2680            .collect::<Vec<_>>();
2681        WheelTagHint::from_wheels(
2682            &self.id.name,
2683            self.id.version.as_ref(),
2684            &filenames,
2685            tag_policy.tags(),
2686            markers,
2687        )
2688    }
2689
2690    /// Convert the source of this [`Package`] to a [`SourceDist`] that can be used in installation.
2691    ///
2692    /// Returns `Ok(None)` if the source cannot be converted because `self.sdist` is `None`. This is required
2693    /// for registry sources.
2694    fn to_source_dist(
2695        &self,
2696        workspace_root: &Path,
2697    ) -> Result<Option<uv_distribution_types::SourceDist>, LockError> {
2698        let sdist = match &self.id.source {
2699            Source::Path(path) => {
2700                // A direct path source can also be a wheel, so validate the extension.
2701                let DistExtension::Source(ext) = DistExtension::from_path(path).map_err(|err| {
2702                    LockErrorKind::MissingExtension {
2703                        id: self.id.clone(),
2704                        err,
2705                    }
2706                })?
2707                else {
2708                    return Ok(None);
2709                };
2710                let install_path = absolute_path(workspace_root, path)?;
2711                let path_dist = PathSourceDist {
2712                    name: self.id.name.clone(),
2713                    version: self.id.version.clone(),
2714                    url: verbatim_url(&install_path, &self.id)?,
2715                    install_path: install_path.into_boxed_path(),
2716                    ext,
2717                };
2718                uv_distribution_types::SourceDist::Path(path_dist)
2719            }
2720            Source::Directory(path) => {
2721                let install_path = absolute_path(workspace_root, path)?;
2722                let dir_dist = DirectorySourceDist {
2723                    name: self.id.name.clone(),
2724                    url: verbatim_url(&install_path, &self.id)?,
2725                    install_path: install_path.into_boxed_path(),
2726                    editable: Some(false),
2727                    r#virtual: Some(false),
2728                };
2729                uv_distribution_types::SourceDist::Directory(dir_dist)
2730            }
2731            Source::Editable(path) => {
2732                let install_path = absolute_path(workspace_root, path)?;
2733                let dir_dist = DirectorySourceDist {
2734                    name: self.id.name.clone(),
2735                    url: verbatim_url(&install_path, &self.id)?,
2736                    install_path: install_path.into_boxed_path(),
2737                    editable: Some(true),
2738                    r#virtual: Some(false),
2739                };
2740                uv_distribution_types::SourceDist::Directory(dir_dist)
2741            }
2742            Source::Virtual(path) => {
2743                let install_path = absolute_path(workspace_root, path)?;
2744                let dir_dist = DirectorySourceDist {
2745                    name: self.id.name.clone(),
2746                    url: verbatim_url(&install_path, &self.id)?,
2747                    install_path: install_path.into_boxed_path(),
2748                    editable: Some(false),
2749                    r#virtual: Some(true),
2750                };
2751                uv_distribution_types::SourceDist::Directory(dir_dist)
2752            }
2753            Source::Git(url, git) => {
2754                // Remove the fragment and query from the URL; they're already present in the
2755                // `GitSource`.
2756                let mut url = url.to_url().map_err(LockErrorKind::InvalidUrl)?;
2757                url.set_fragment(None);
2758                url.set_query(None);
2759
2760                // Reconstruct the `GitUrl` from the `GitSource`.
2761                let git_url = GitUrl::from_commit(
2762                    url,
2763                    GitReference::from(git.kind.clone()),
2764                    git.precise,
2765                    git.lfs,
2766                )?;
2767
2768                // Reconstruct the PEP 508-compatible URL from the `GitSource`.
2769                let url = DisplaySafeUrl::from(ParsedGitUrl {
2770                    url: git_url.clone(),
2771                    subdirectory: git.subdirectory.clone(),
2772                });
2773
2774                let git_dist = GitSourceDist {
2775                    name: self.id.name.clone(),
2776                    url: VerbatimUrl::from_url(url),
2777                    git: Box::new(git_url),
2778                    subdirectory: git.subdirectory.clone(),
2779                };
2780                uv_distribution_types::SourceDist::Git(git_dist)
2781            }
2782            Source::Direct(url, direct) => {
2783                // A direct URL source can also be a wheel, so validate the extension.
2784                let DistExtension::Source(ext) =
2785                    DistExtension::from_path(url.base_str()).map_err(|err| {
2786                        LockErrorKind::MissingExtension {
2787                            id: self.id.clone(),
2788                            err,
2789                        }
2790                    })?
2791                else {
2792                    return Ok(None);
2793                };
2794                let location = url.to_url().map_err(LockErrorKind::InvalidUrl)?;
2795                let url = DisplaySafeUrl::from(ParsedArchiveUrl {
2796                    url: location.clone(),
2797                    subdirectory: direct.subdirectory.clone(),
2798                    ext: DistExtension::Source(ext),
2799                });
2800                let direct_dist = DirectUrlSourceDist {
2801                    name: self.id.name.clone(),
2802                    location: Box::new(location),
2803                    subdirectory: direct.subdirectory.clone(),
2804                    ext,
2805                    url: VerbatimUrl::from_url(url),
2806                };
2807                uv_distribution_types::SourceDist::DirectUrl(direct_dist)
2808            }
2809            Source::Registry(RegistrySource::Url(url)) => {
2810                let Some(ref sdist) = self.sdist else {
2811                    return Ok(None);
2812                };
2813
2814                let name = &self.id.name;
2815                let version = self
2816                    .id
2817                    .version
2818                    .as_ref()
2819                    .expect("version for registry source");
2820
2821                let file_url = sdist.url().ok_or_else(|| LockErrorKind::MissingUrl {
2822                    name: name.clone(),
2823                    version: version.clone(),
2824                })?;
2825                let filename = sdist
2826                    .filename()
2827                    .ok_or_else(|| LockErrorKind::MissingFilename {
2828                        id: self.id.clone(),
2829                    })?;
2830                let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| {
2831                    LockErrorKind::MissingExtension {
2832                        id: self.id.clone(),
2833                        err,
2834                    }
2835                })?;
2836                let file = Box::new(uv_distribution_types::File {
2837                    dist_info_metadata: false,
2838                    filename: SmallString::from(filename),
2839                    hashes: sdist.hash().map_or(HashDigests::empty(), |hash| {
2840                        HashDigests::from(hash.0.clone())
2841                    }),
2842                    requires_python: None,
2843                    size: sdist.size(),
2844                    upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond),
2845                    url: FileLocation::AbsoluteUrl(file_url.clone()),
2846                    yanked: None,
2847                    zstd: None,
2848                });
2849
2850                let index = IndexUrl::from(VerbatimUrl::from_url(
2851                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
2852                ));
2853
2854                let reg_dist = RegistrySourceDist {
2855                    name: name.clone(),
2856                    version: version.clone(),
2857                    file,
2858                    ext,
2859                    index,
2860                    wheels: vec![],
2861                };
2862                uv_distribution_types::SourceDist::Registry(reg_dist)
2863            }
2864            Source::Registry(RegistrySource::Path(path)) => {
2865                let Some(ref sdist) = self.sdist else {
2866                    return Ok(None);
2867                };
2868
2869                let name = &self.id.name;
2870                let version = self
2871                    .id
2872                    .version
2873                    .as_ref()
2874                    .expect("version for registry source");
2875
2876                let file_url = match sdist {
2877                    SourceDist::Url { url: file_url, .. } => {
2878                        FileLocation::AbsoluteUrl(file_url.clone())
2879                    }
2880                    SourceDist::Path {
2881                        path: file_path, ..
2882                    } => {
2883                        let file_path = workspace_root.join(path).join(file_path);
2884                        let file_url =
2885                            DisplaySafeUrl::from_file_path(&file_path).map_err(|()| {
2886                                LockErrorKind::PathToUrl {
2887                                    path: file_path.into_boxed_path(),
2888                                }
2889                            })?;
2890                        FileLocation::AbsoluteUrl(UrlString::from(file_url))
2891                    }
2892                    SourceDist::Metadata { .. } => {
2893                        return Err(LockErrorKind::MissingPath {
2894                            name: name.clone(),
2895                            version: version.clone(),
2896                        }
2897                        .into());
2898                    }
2899                };
2900                let filename = sdist
2901                    .filename()
2902                    .ok_or_else(|| LockErrorKind::MissingFilename {
2903                        id: self.id.clone(),
2904                    })?;
2905                let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| {
2906                    LockErrorKind::MissingExtension {
2907                        id: self.id.clone(),
2908                        err,
2909                    }
2910                })?;
2911                let file = Box::new(uv_distribution_types::File {
2912                    dist_info_metadata: false,
2913                    filename: SmallString::from(filename),
2914                    hashes: sdist.hash().map_or(HashDigests::empty(), |hash| {
2915                        HashDigests::from(hash.0.clone())
2916                    }),
2917                    requires_python: None,
2918                    size: sdist.size(),
2919                    upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond),
2920                    url: file_url,
2921                    yanked: None,
2922                    zstd: None,
2923                });
2924
2925                let index = IndexUrl::from(
2926                    VerbatimUrl::from_absolute_path(workspace_root.join(path))
2927                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
2928                );
2929
2930                let reg_dist = RegistrySourceDist {
2931                    name: name.clone(),
2932                    version: version.clone(),
2933                    file,
2934                    ext,
2935                    index,
2936                    wheels: vec![],
2937                };
2938                uv_distribution_types::SourceDist::Registry(reg_dist)
2939            }
2940        };
2941
2942        Ok(Some(sdist))
2943    }
2944
2945    fn to_toml(
2946        &self,
2947        requires_python: &RequiresPython,
2948        dist_count_by_name: &FxHashMap<PackageName, u64>,
2949    ) -> Result<Table, toml_edit::ser::Error> {
2950        let mut table = Table::new();
2951
2952        self.id.to_toml(None, &mut table);
2953
2954        if !self.fork_markers.is_empty() {
2955            let fork_markers = each_element_on_its_line_array(
2956                simplified_universal_markers(&self.fork_markers, requires_python).into_iter(),
2957            );
2958            if !fork_markers.is_empty() {
2959                table.insert("resolution-markers", value(fork_markers));
2960            }
2961        }
2962
2963        if !self.dependencies.is_empty() {
2964            let deps = each_element_on_its_line_array(self.dependencies.iter().map(|dep| {
2965                dep.to_toml(requires_python, dist_count_by_name)
2966                    .into_inline_table()
2967            }));
2968            table.insert("dependencies", value(deps));
2969        }
2970
2971        if !self.optional_dependencies.is_empty() {
2972            let mut optional_deps = Table::new();
2973            for (extra, deps) in &self.optional_dependencies {
2974                let deps = each_element_on_its_line_array(deps.iter().map(|dep| {
2975                    dep.to_toml(requires_python, dist_count_by_name)
2976                        .into_inline_table()
2977                }));
2978                if !deps.is_empty() {
2979                    optional_deps.insert(extra.as_ref(), value(deps));
2980                }
2981            }
2982            if !optional_deps.is_empty() {
2983                table.insert("optional-dependencies", Item::Table(optional_deps));
2984            }
2985        }
2986
2987        if !self.dependency_groups.is_empty() {
2988            let mut dependency_groups = Table::new();
2989            for (extra, deps) in &self.dependency_groups {
2990                let deps = each_element_on_its_line_array(deps.iter().map(|dep| {
2991                    dep.to_toml(requires_python, dist_count_by_name)
2992                        .into_inline_table()
2993                }));
2994                if !deps.is_empty() {
2995                    dependency_groups.insert(extra.as_ref(), value(deps));
2996                }
2997            }
2998            if !dependency_groups.is_empty() {
2999                table.insert("dev-dependencies", Item::Table(dependency_groups));
3000            }
3001        }
3002
3003        if let Some(ref sdist) = self.sdist {
3004            table.insert("sdist", value(sdist.to_toml()?));
3005        }
3006
3007        if !self.wheels.is_empty() {
3008            let wheels = each_element_on_its_line_array(
3009                self.wheels
3010                    .iter()
3011                    .map(Wheel::to_toml)
3012                    .collect::<Result<Vec<_>, _>>()?
3013                    .into_iter(),
3014            );
3015            table.insert("wheels", value(wheels));
3016        }
3017
3018        // Write the package metadata, if non-empty.
3019        {
3020            let mut metadata_table = Table::new();
3021
3022            if !self.metadata.requires_dist.is_empty() {
3023                let requires_dist = self
3024                    .metadata
3025                    .requires_dist
3026                    .iter()
3027                    .map(|requirement| {
3028                        serde::Serialize::serialize(
3029                            &requirement,
3030                            toml_edit::ser::ValueSerializer::new(),
3031                        )
3032                    })
3033                    .collect::<Result<Vec<_>, _>>()?;
3034                let requires_dist = match requires_dist.as_slice() {
3035                    [] => Array::new(),
3036                    [requirement] => Array::from_iter([requirement]),
3037                    requires_dist => each_element_on_its_line_array(requires_dist.iter()),
3038                };
3039                metadata_table.insert("requires-dist", value(requires_dist));
3040            }
3041
3042            if !self.metadata.dependency_groups.is_empty() {
3043                let mut dependency_groups = Table::new();
3044                for (extra, deps) in &self.metadata.dependency_groups {
3045                    let deps = deps
3046                        .iter()
3047                        .map(|requirement| {
3048                            serde::Serialize::serialize(
3049                                &requirement,
3050                                toml_edit::ser::ValueSerializer::new(),
3051                            )
3052                        })
3053                        .collect::<Result<Vec<_>, _>>()?;
3054                    let deps = match deps.as_slice() {
3055                        [] => Array::new(),
3056                        [requirement] => Array::from_iter([requirement]),
3057                        deps => each_element_on_its_line_array(deps.iter()),
3058                    };
3059                    dependency_groups.insert(extra.as_ref(), value(deps));
3060                }
3061                if !dependency_groups.is_empty() {
3062                    metadata_table.insert("requires-dev", Item::Table(dependency_groups));
3063                }
3064            }
3065
3066            if !self.metadata.provides_extra.is_empty() {
3067                let provides_extras = self
3068                    .metadata
3069                    .provides_extra
3070                    .iter()
3071                    .map(|extra| {
3072                        serde::Serialize::serialize(&extra, toml_edit::ser::ValueSerializer::new())
3073                    })
3074                    .collect::<Result<Vec<_>, _>>()?;
3075                // This is just a list of names, so linebreaking it is excessive.
3076                let provides_extras = Array::from_iter(provides_extras);
3077                metadata_table.insert("provides-extras", value(provides_extras));
3078            }
3079
3080            if !metadata_table.is_empty() {
3081                table.insert("metadata", Item::Table(metadata_table));
3082            }
3083        }
3084
3085        Ok(table)
3086    }
3087
3088    fn find_best_wheel(&self, tag_policy: TagPolicy<'_>) -> Option<usize> {
3089        type WheelPriority<'lock> = (TagPriority, Option<&'lock BuildTag>);
3090
3091        let mut best: Option<(WheelPriority, usize)> = None;
3092        for (i, wheel) in self.wheels.iter().enumerate() {
3093            let TagCompatibility::Compatible(tag_priority) =
3094                wheel.filename.compatibility(tag_policy.tags())
3095            else {
3096                continue;
3097            };
3098            let build_tag = wheel.filename.build_tag();
3099            let wheel_priority = (tag_priority, build_tag);
3100            match best {
3101                None => {
3102                    best = Some((wheel_priority, i));
3103                }
3104                Some((best_priority, _)) => {
3105                    if wheel_priority > best_priority {
3106                        best = Some((wheel_priority, i));
3107                    }
3108                }
3109            }
3110        }
3111
3112        let best = best.map(|(_, i)| i);
3113        match tag_policy {
3114            TagPolicy::Required(_) => best,
3115            TagPolicy::Preferred(_) => best.or_else(|| self.wheels.first().map(|_| 0)),
3116        }
3117    }
3118
3119    /// Returns the [`PackageName`] of the package.
3120    pub fn name(&self) -> &PackageName {
3121        &self.id.name
3122    }
3123
3124    /// Returns the [`Version`] of the package.
3125    pub fn version(&self) -> Option<&Version> {
3126        self.id.version.as_ref()
3127    }
3128
3129    /// Returns the Git SHA of the package, if it is a Git source.
3130    pub fn git_sha(&self) -> Option<&GitOid> {
3131        match &self.id.source {
3132            Source::Git(_, git) => Some(&git.precise),
3133            _ => None,
3134        }
3135    }
3136
3137    /// Return the fork markers for this package, if any.
3138    pub fn fork_markers(&self) -> &[UniversalMarker] {
3139        self.fork_markers.as_slice()
3140    }
3141
3142    /// Returns the [`IndexUrl`] for the package, if it is a registry source.
3143    pub fn index(&self, root: &Path) -> Result<Option<IndexUrl>, LockError> {
3144        match &self.id.source {
3145            Source::Registry(RegistrySource::Url(url)) => {
3146                let index = IndexUrl::from(VerbatimUrl::from_url(
3147                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
3148                ));
3149                Ok(Some(index))
3150            }
3151            Source::Registry(RegistrySource::Path(path)) => {
3152                let index = IndexUrl::from(
3153                    VerbatimUrl::from_absolute_path(root.join(path))
3154                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
3155                );
3156                Ok(Some(index))
3157            }
3158            _ => Ok(None),
3159        }
3160    }
3161
3162    /// Returns all the hashes associated with this [`Package`].
3163    fn hashes(&self) -> HashDigests {
3164        let mut hashes = Vec::with_capacity(
3165            usize::from(self.sdist.as_ref().and_then(|sdist| sdist.hash()).is_some())
3166                + self
3167                    .wheels
3168                    .iter()
3169                    .map(|wheel| usize::from(wheel.hash.is_some()))
3170                    .sum::<usize>(),
3171        );
3172        if let Some(ref sdist) = self.sdist {
3173            if let Some(hash) = sdist.hash() {
3174                hashes.push(hash.0.clone());
3175            }
3176        }
3177        for wheel in &self.wheels {
3178            hashes.extend(wheel.hash.as_ref().map(|h| h.0.clone()));
3179            if let Some(zstd) = wheel.zstd.as_ref() {
3180                hashes.extend(zstd.hash.as_ref().map(|h| h.0.clone()));
3181            }
3182        }
3183        HashDigests::from(hashes)
3184    }
3185
3186    /// Returns the [`ResolvedRepositoryReference`] for the package, if it is a Git source.
3187    pub fn as_git_ref(&self) -> Result<Option<ResolvedRepositoryReference>, LockError> {
3188        match &self.id.source {
3189            Source::Git(url, git) => Ok(Some(ResolvedRepositoryReference {
3190                reference: RepositoryReference {
3191                    url: RepositoryUrl::new(&url.to_url().map_err(LockErrorKind::InvalidUrl)?),
3192                    reference: GitReference::from(git.kind.clone()),
3193                },
3194                sha: git.precise,
3195            })),
3196            _ => Ok(None),
3197        }
3198    }
3199
3200    /// Returns `true` if the package is a dynamic source tree.
3201    fn is_dynamic(&self) -> bool {
3202        self.id.version.is_none()
3203    }
3204
3205    /// Returns the extras the package provides, if any.
3206    pub fn provides_extras(&self) -> &[ExtraName] {
3207        &self.metadata.provides_extra
3208    }
3209
3210    /// Returns the dependency groups the package provides, if any.
3211    pub fn dependency_groups(&self) -> &BTreeMap<GroupName, BTreeSet<Requirement>> {
3212        &self.metadata.dependency_groups
3213    }
3214
3215    /// Returns the dependencies of the package.
3216    pub fn dependencies(&self) -> &[Dependency] {
3217        &self.dependencies
3218    }
3219
3220    /// Returns the optional dependencies of the package.
3221    pub fn optional_dependencies(&self) -> &BTreeMap<ExtraName, Vec<Dependency>> {
3222        &self.optional_dependencies
3223    }
3224
3225    /// Returns the resolved PEP 735 dependency groups of the package.
3226    pub fn resolved_dependency_groups(&self) -> &BTreeMap<GroupName, Vec<Dependency>> {
3227        &self.dependency_groups
3228    }
3229
3230    /// Returns an [`InstallTarget`] view for filtering decisions.
3231    pub fn as_install_target(&self) -> InstallTarget<'_> {
3232        InstallTarget {
3233            name: self.name(),
3234            is_local: self.id.source.is_local(),
3235        }
3236    }
3237}
3238
3239/// Attempts to construct a `VerbatimUrl` from the given normalized `Path`.
3240fn verbatim_url(path: &Path, id: &PackageId) -> Result<VerbatimUrl, LockError> {
3241    let url =
3242        VerbatimUrl::from_normalized_path(path).map_err(|err| LockErrorKind::VerbatimUrl {
3243            id: id.clone(),
3244            err,
3245        })?;
3246    Ok(url)
3247}
3248
3249/// Attempts to construct an absolute path from the given `Path`.
3250fn absolute_path(workspace_root: &Path, path: &Path) -> Result<PathBuf, LockError> {
3251    let path = uv_fs::normalize_absolute_path(&workspace_root.join(path))
3252        .map_err(LockErrorKind::AbsolutePath)?;
3253    Ok(path)
3254}
3255
3256#[derive(Clone, Debug, serde::Deserialize)]
3257#[serde(rename_all = "kebab-case")]
3258struct PackageWire {
3259    #[serde(flatten)]
3260    id: PackageId,
3261    #[serde(default)]
3262    metadata: PackageMetadata,
3263    #[serde(default)]
3264    sdist: Option<SourceDist>,
3265    #[serde(default)]
3266    wheels: Vec<Wheel>,
3267    #[serde(default, rename = "resolution-markers")]
3268    fork_markers: Vec<SimplifiedMarkerTree>,
3269    #[serde(default)]
3270    dependencies: Vec<DependencyWire>,
3271    #[serde(default)]
3272    optional_dependencies: BTreeMap<ExtraName, Vec<DependencyWire>>,
3273    #[serde(default, rename = "dev-dependencies", alias = "dependency-groups")]
3274    dependency_groups: BTreeMap<GroupName, Vec<DependencyWire>>,
3275}
3276
3277#[derive(Clone, Default, Debug, Eq, PartialEq, serde::Deserialize)]
3278#[serde(rename_all = "kebab-case")]
3279struct PackageMetadata {
3280    #[serde(default)]
3281    requires_dist: BTreeSet<Requirement>,
3282    #[serde(default, rename = "provides-extras")]
3283    provides_extra: Box<[ExtraName]>,
3284    #[serde(default, rename = "requires-dev", alias = "dependency-groups")]
3285    dependency_groups: BTreeMap<GroupName, BTreeSet<Requirement>>,
3286}
3287
3288impl PackageWire {
3289    fn unwire(
3290        self,
3291        requires_python: &RequiresPython,
3292        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
3293    ) -> Result<Package, LockError> {
3294        // Consistency check
3295        if !uv_flags::contains(uv_flags::EnvironmentFlags::SKIP_WHEEL_FILENAME_CHECK) {
3296            if let Some(version) = &self.id.version {
3297                for wheel in &self.wheels {
3298                    if *version != wheel.filename.version
3299                        && *version != wheel.filename.version.clone().without_local()
3300                    {
3301                        return Err(LockError::from(LockErrorKind::InconsistentVersions {
3302                            name: self.id.name,
3303                            version: version.clone(),
3304                            wheel: wheel.clone(),
3305                        }));
3306                    }
3307                }
3308                // We can't check the source dist version since it does not need to contain the version
3309                // in the filename.
3310            }
3311        }
3312
3313        let unwire_deps = |deps: Vec<DependencyWire>| -> Result<Vec<Dependency>, LockError> {
3314            deps.into_iter()
3315                .map(|dep| dep.unwire(requires_python, unambiguous_package_ids))
3316                .collect()
3317        };
3318
3319        Ok(Package {
3320            id: self.id,
3321            metadata: self.metadata,
3322            sdist: self.sdist,
3323            wheels: self.wheels,
3324            fork_markers: self
3325                .fork_markers
3326                .into_iter()
3327                .map(|simplified_marker| simplified_marker.into_marker(requires_python))
3328                .map(UniversalMarker::from_combined)
3329                .collect(),
3330            dependencies: unwire_deps(self.dependencies)?,
3331            optional_dependencies: self
3332                .optional_dependencies
3333                .into_iter()
3334                .map(|(extra, deps)| Ok((extra, unwire_deps(deps)?)))
3335                .collect::<Result<_, LockError>>()?,
3336            dependency_groups: self
3337                .dependency_groups
3338                .into_iter()
3339                .map(|(group, deps)| Ok((group, unwire_deps(deps)?)))
3340                .collect::<Result<_, LockError>>()?,
3341        })
3342    }
3343}
3344
3345/// Inside the lockfile, we match a dependency entry to a package entry through a key made up
3346/// of the name, the version and the source url.
3347#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3348#[serde(rename_all = "kebab-case")]
3349pub(crate) struct PackageId {
3350    pub(crate) name: PackageName,
3351    pub(crate) version: Option<Version>,
3352    source: Source,
3353}
3354
3355impl PackageId {
3356    fn from_annotated_dist(annotated_dist: &AnnotatedDist, root: &Path) -> Result<Self, LockError> {
3357        // Identify the source of the package.
3358        let source = Source::from_resolved_dist(&annotated_dist.dist, root)?;
3359        // Omit versions for dynamic source trees.
3360        let version = if source.is_source_tree()
3361            && annotated_dist
3362                .metadata
3363                .as_ref()
3364                .is_some_and(|metadata| metadata.dynamic)
3365        {
3366            None
3367        } else {
3368            Some(annotated_dist.version.clone())
3369        };
3370        let name = annotated_dist.name.clone();
3371        Ok(Self {
3372            name,
3373            version,
3374            source,
3375        })
3376    }
3377
3378    /// Writes this package ID inline into the table given.
3379    ///
3380    /// When a map is given, and if the package name in this ID is unambiguous
3381    /// (i.e., it has a count of 1 in the map), then the `version` and `source`
3382    /// fields are omitted. In all other cases, including when a map is not
3383    /// given, the `version` and `source` fields are written.
3384    fn to_toml(&self, dist_count_by_name: Option<&FxHashMap<PackageName, u64>>, table: &mut Table) {
3385        let count = dist_count_by_name.and_then(|map| map.get(&self.name).copied());
3386        table.insert("name", value(self.name.to_string()));
3387        if count.map(|count| count > 1).unwrap_or(true) {
3388            if let Some(version) = &self.version {
3389                table.insert("version", value(version.to_string()));
3390            }
3391            self.source.to_toml(table);
3392        }
3393    }
3394}
3395
3396impl Display for PackageId {
3397    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
3398        if let Some(version) = &self.version {
3399            write!(f, "{}=={} @ {}", self.name, version, self.source)
3400        } else {
3401            write!(f, "{} @ {}", self.name, self.source)
3402        }
3403    }
3404}
3405
3406#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3407#[serde(rename_all = "kebab-case")]
3408struct PackageIdForDependency {
3409    name: PackageName,
3410    version: Option<Version>,
3411    source: Option<Source>,
3412}
3413
3414impl PackageIdForDependency {
3415    fn unwire(
3416        self,
3417        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
3418    ) -> Result<PackageId, LockError> {
3419        let unambiguous_package_id = unambiguous_package_ids.get(&self.name);
3420        let source = self.source.map(Ok::<_, LockError>).unwrap_or_else(|| {
3421            let Some(package_id) = unambiguous_package_id else {
3422                return Err(LockErrorKind::MissingDependencySource {
3423                    name: self.name.clone(),
3424                }
3425                .into());
3426            };
3427            Ok(package_id.source.clone())
3428        })?;
3429        let version = if let Some(version) = self.version {
3430            Some(version)
3431        } else {
3432            if let Some(package_id) = unambiguous_package_id {
3433                package_id.version.clone()
3434            } else {
3435                // If the package is a source tree, assume that the missing `self.version` field is
3436                // indicative of a dynamic version.
3437                if source.is_source_tree() {
3438                    None
3439                } else {
3440                    return Err(LockErrorKind::MissingDependencyVersion {
3441                        name: self.name.clone(),
3442                    }
3443                    .into());
3444                }
3445            }
3446        };
3447        Ok(PackageId {
3448            name: self.name,
3449            version,
3450            source,
3451        })
3452    }
3453}
3454
3455impl From<PackageId> for PackageIdForDependency {
3456    fn from(id: PackageId) -> Self {
3457        Self {
3458            name: id.name,
3459            version: id.version,
3460            source: Some(id.source),
3461        }
3462    }
3463}
3464
3465/// A unique identifier to differentiate between different sources for the same version of a
3466/// package.
3467///
3468/// NOTE: Care should be taken when adding variants to this enum. Namely, new
3469/// variants should be added without changing the relative ordering of other
3470/// variants. Otherwise, this could cause the lockfile to have a different
3471/// canonical ordering of sources.
3472#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3473#[serde(try_from = "SourceWire")]
3474enum Source {
3475    /// A registry or `--find-links` index.
3476    Registry(RegistrySource),
3477    /// A Git repository.
3478    Git(UrlString, GitSource),
3479    /// A direct HTTP(S) URL.
3480    Direct(UrlString, DirectSource),
3481    /// A path to a local source or built archive.
3482    Path(Box<Path>),
3483    /// A path to a local directory.
3484    Directory(Box<Path>),
3485    /// A path to a local directory that should be installed as editable.
3486    Editable(Box<Path>),
3487    /// A path to a local directory that should not be built or installed.
3488    Virtual(Box<Path>),
3489}
3490
3491impl Source {
3492    fn from_resolved_dist(resolved_dist: &ResolvedDist, root: &Path) -> Result<Self, LockError> {
3493        match *resolved_dist {
3494            // We pass empty installed packages for locking.
3495            ResolvedDist::Installed { .. } => unreachable!(),
3496            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(dist, root),
3497        }
3498    }
3499
3500    fn from_dist(dist: &Dist, root: &Path) -> Result<Self, LockError> {
3501        match *dist {
3502            Dist::Built(ref built_dist) => Self::from_built_dist(built_dist, root),
3503            Dist::Source(ref source_dist) => Self::from_source_dist(source_dist, root),
3504        }
3505    }
3506
3507    fn from_built_dist(built_dist: &BuiltDist, root: &Path) -> Result<Self, LockError> {
3508        match *built_dist {
3509            BuiltDist::Registry(ref reg_dist) => Self::from_registry_built_dist(reg_dist, root),
3510            BuiltDist::DirectUrl(ref direct_dist) => Ok(Self::from_direct_built_dist(direct_dist)),
3511            BuiltDist::Path(ref path_dist) => Self::from_path_built_dist(path_dist, root),
3512        }
3513    }
3514
3515    fn from_source_dist(
3516        source_dist: &uv_distribution_types::SourceDist,
3517        root: &Path,
3518    ) -> Result<Self, LockError> {
3519        match *source_dist {
3520            uv_distribution_types::SourceDist::Registry(ref reg_dist) => {
3521                Self::from_registry_source_dist(reg_dist, root)
3522            }
3523            uv_distribution_types::SourceDist::DirectUrl(ref direct_dist) => {
3524                Ok(Self::from_direct_source_dist(direct_dist))
3525            }
3526            uv_distribution_types::SourceDist::Git(ref git_dist) => {
3527                Ok(Self::from_git_dist(git_dist))
3528            }
3529            uv_distribution_types::SourceDist::Path(ref path_dist) => {
3530                Self::from_path_source_dist(path_dist, root)
3531            }
3532            uv_distribution_types::SourceDist::Directory(ref directory) => {
3533                Self::from_directory_source_dist(directory, root)
3534            }
3535        }
3536    }
3537
3538    fn from_registry_built_dist(
3539        reg_dist: &RegistryBuiltDist,
3540        root: &Path,
3541    ) -> Result<Self, LockError> {
3542        Self::from_index_url(&reg_dist.best_wheel().index, root)
3543    }
3544
3545    fn from_registry_source_dist(
3546        reg_dist: &RegistrySourceDist,
3547        root: &Path,
3548    ) -> Result<Self, LockError> {
3549        Self::from_index_url(&reg_dist.index, root)
3550    }
3551
3552    fn from_direct_built_dist(direct_dist: &DirectUrlBuiltDist) -> Self {
3553        Self::Direct(
3554            normalize_url(direct_dist.url.to_url()),
3555            DirectSource { subdirectory: None },
3556        )
3557    }
3558
3559    fn from_direct_source_dist(direct_dist: &DirectUrlSourceDist) -> Self {
3560        Self::Direct(
3561            normalize_url(direct_dist.url.to_url()),
3562            DirectSource {
3563                subdirectory: direct_dist.subdirectory.clone(),
3564            },
3565        )
3566    }
3567
3568    fn from_path_built_dist(path_dist: &PathBuiltDist, root: &Path) -> Result<Self, LockError> {
3569        let path = relative_to(&path_dist.install_path, root)
3570            .or_else(|_| std::path::absolute(&path_dist.install_path))
3571            .map_err(LockErrorKind::DistributionRelativePath)?;
3572        Ok(Self::Path(path.into_boxed_path()))
3573    }
3574
3575    fn from_path_source_dist(path_dist: &PathSourceDist, root: &Path) -> Result<Self, LockError> {
3576        let path = relative_to(&path_dist.install_path, root)
3577            .or_else(|_| std::path::absolute(&path_dist.install_path))
3578            .map_err(LockErrorKind::DistributionRelativePath)?;
3579        Ok(Self::Path(path.into_boxed_path()))
3580    }
3581
3582    fn from_directory_source_dist(
3583        directory_dist: &DirectorySourceDist,
3584        root: &Path,
3585    ) -> Result<Self, LockError> {
3586        let path = relative_to(&directory_dist.install_path, root)
3587            .or_else(|_| std::path::absolute(&directory_dist.install_path))
3588            .map_err(LockErrorKind::DistributionRelativePath)?;
3589        if directory_dist.editable.unwrap_or(false) {
3590            Ok(Self::Editable(path.into_boxed_path()))
3591        } else if directory_dist.r#virtual.unwrap_or(false) {
3592            Ok(Self::Virtual(path.into_boxed_path()))
3593        } else {
3594            Ok(Self::Directory(path.into_boxed_path()))
3595        }
3596    }
3597
3598    fn from_index_url(index_url: &IndexUrl, root: &Path) -> Result<Self, LockError> {
3599        match index_url {
3600            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
3601                // Remove any sensitive credentials from the index URL.
3602                let redacted = index_url.without_credentials();
3603                let source = RegistrySource::Url(UrlString::from(redacted.as_ref()));
3604                Ok(Self::Registry(source))
3605            }
3606            IndexUrl::Path(url) => {
3607                let path = url
3608                    .to_file_path()
3609                    .map_err(|()| LockErrorKind::UrlToPath { url: url.to_url() })?;
3610                let path = relative_to(&path, root)
3611                    .or_else(|_| std::path::absolute(&path))
3612                    .map_err(LockErrorKind::IndexRelativePath)?;
3613                let source = RegistrySource::Path(path.into_boxed_path());
3614                Ok(Self::Registry(source))
3615            }
3616        }
3617    }
3618
3619    fn from_git_dist(git_dist: &GitSourceDist) -> Self {
3620        Self::Git(
3621            UrlString::from(locked_git_url(git_dist)),
3622            GitSource {
3623                kind: GitSourceKind::from(git_dist.git.reference().clone()),
3624                precise: git_dist.git.precise().unwrap_or_else(|| {
3625                    panic!("Git distribution is missing a precise hash: {git_dist}")
3626                }),
3627                subdirectory: git_dist.subdirectory.clone(),
3628                lfs: git_dist.git.lfs(),
3629            },
3630        )
3631    }
3632
3633    /// Returns `true` if the source should be considered immutable.
3634    ///
3635    /// We assume that registry sources are immutable. In other words, we expect that once a
3636    /// package-version is published to a registry, its metadata will not change.
3637    ///
3638    /// We also assume that Git sources are immutable, since a Git source encodes a specific commit.
3639    fn is_immutable(&self) -> bool {
3640        matches!(self, Self::Registry(..) | Self::Git(_, _))
3641    }
3642
3643    /// Returns `true` if the source is that of a wheel.
3644    fn is_wheel(&self) -> bool {
3645        match self {
3646            Self::Path(path) => {
3647                matches!(
3648                    DistExtension::from_path(path).ok(),
3649                    Some(DistExtension::Wheel)
3650                )
3651            }
3652            Self::Direct(url, _) => {
3653                matches!(
3654                    DistExtension::from_path(url.as_ref()).ok(),
3655                    Some(DistExtension::Wheel)
3656                )
3657            }
3658            Self::Directory(..) => false,
3659            Self::Editable(..) => false,
3660            Self::Virtual(..) => false,
3661            Self::Git(..) => false,
3662            Self::Registry(..) => false,
3663        }
3664    }
3665
3666    /// Returns `true` if the source is that of a source tree.
3667    fn is_source_tree(&self) -> bool {
3668        match self {
3669            Self::Directory(..) | Self::Editable(..) | Self::Virtual(..) => true,
3670            Self::Path(..) | Self::Git(..) | Self::Registry(..) | Self::Direct(..) => false,
3671        }
3672    }
3673
3674    /// Returns the path to the source tree, if the source is a source tree.
3675    fn as_source_tree(&self) -> Option<&Path> {
3676        match self {
3677            Self::Directory(path) | Self::Editable(path) | Self::Virtual(path) => Some(path),
3678            Self::Path(..) | Self::Git(..) | Self::Registry(..) | Self::Direct(..) => None,
3679        }
3680    }
3681
3682    fn to_toml(&self, table: &mut Table) {
3683        let mut source_table = InlineTable::new();
3684        match self {
3685            Self::Registry(source) => match source {
3686                RegistrySource::Url(url) => {
3687                    source_table.insert("registry", Value::from(url.as_ref()));
3688                }
3689                RegistrySource::Path(path) => {
3690                    source_table.insert(
3691                        "registry",
3692                        Value::from(PortablePath::from(path).to_string()),
3693                    );
3694                }
3695            },
3696            Self::Git(url, _) => {
3697                source_table.insert("git", Value::from(url.as_ref()));
3698            }
3699            Self::Direct(url, DirectSource { subdirectory }) => {
3700                source_table.insert("url", Value::from(url.as_ref()));
3701                if let Some(ref subdirectory) = *subdirectory {
3702                    source_table.insert(
3703                        "subdirectory",
3704                        Value::from(PortablePath::from(subdirectory).to_string()),
3705                    );
3706                }
3707            }
3708            Self::Path(path) => {
3709                source_table.insert("path", Value::from(PortablePath::from(path).to_string()));
3710            }
3711            Self::Directory(path) => {
3712                source_table.insert(
3713                    "directory",
3714                    Value::from(PortablePath::from(path).to_string()),
3715                );
3716            }
3717            Self::Editable(path) => {
3718                source_table.insert(
3719                    "editable",
3720                    Value::from(PortablePath::from(path).to_string()),
3721                );
3722            }
3723            Self::Virtual(path) => {
3724                source_table.insert("virtual", Value::from(PortablePath::from(path).to_string()));
3725            }
3726        }
3727        table.insert("source", value(source_table));
3728    }
3729
3730    /// Check if a package is local by examining its source.
3731    pub(crate) fn is_local(&self) -> bool {
3732        matches!(
3733            self,
3734            Self::Path(_) | Self::Directory(_) | Self::Editable(_) | Self::Virtual(_)
3735        )
3736    }
3737}
3738
3739impl Display for Source {
3740    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
3741        match self {
3742            Self::Registry(RegistrySource::Url(url)) | Self::Git(url, _) | Self::Direct(url, _) => {
3743                write!(f, "{}+{}", self.name(), url)
3744            }
3745            Self::Registry(RegistrySource::Path(path))
3746            | Self::Path(path)
3747            | Self::Directory(path)
3748            | Self::Editable(path)
3749            | Self::Virtual(path) => {
3750                write!(f, "{}+{}", self.name(), PortablePath::from(path))
3751            }
3752        }
3753    }
3754}
3755
3756impl Source {
3757    fn name(&self) -> &str {
3758        match self {
3759            Self::Registry(..) => "registry",
3760            Self::Git(..) => "git",
3761            Self::Direct(..) => "direct",
3762            Self::Path(..) => "path",
3763            Self::Directory(..) => "directory",
3764            Self::Editable(..) => "editable",
3765            Self::Virtual(..) => "virtual",
3766        }
3767    }
3768
3769    /// Returns `Some(true)` to indicate that the source kind _must_ include a
3770    /// hash.
3771    ///
3772    /// Returns `Some(false)` to indicate that the source kind _must not_
3773    /// include a hash.
3774    ///
3775    /// Returns `None` to indicate that the source kind _may_ include a hash.
3776    fn requires_hash(&self) -> Option<bool> {
3777        match self {
3778            Self::Registry(..) => None,
3779            Self::Direct(..) | Self::Path(..) => Some(true),
3780            Self::Git(..) | Self::Directory(..) | Self::Editable(..) | Self::Virtual(..) => {
3781                Some(false)
3782            }
3783        }
3784    }
3785}
3786
3787#[derive(Clone, Debug, serde::Deserialize)]
3788#[serde(untagged, rename_all = "kebab-case")]
3789enum SourceWire {
3790    Registry {
3791        registry: RegistrySourceWire,
3792    },
3793    Git {
3794        git: String,
3795    },
3796    Direct {
3797        url: UrlString,
3798        subdirectory: Option<PortablePathBuf>,
3799    },
3800    Path {
3801        path: PortablePathBuf,
3802    },
3803    Directory {
3804        directory: PortablePathBuf,
3805    },
3806    Editable {
3807        editable: PortablePathBuf,
3808    },
3809    Virtual {
3810        r#virtual: PortablePathBuf,
3811    },
3812}
3813
3814impl TryFrom<SourceWire> for Source {
3815    type Error = LockError;
3816
3817    fn try_from(wire: SourceWire) -> Result<Self, LockError> {
3818        #[allow(clippy::enum_glob_use)]
3819        use self::SourceWire::*;
3820
3821        match wire {
3822            Registry { registry } => Ok(Self::Registry(registry.into())),
3823            Git { git } => {
3824                let url = DisplaySafeUrl::parse(&git)
3825                    .map_err(|err| SourceParseError::InvalidUrl {
3826                        given: git.clone(),
3827                        err,
3828                    })
3829                    .map_err(LockErrorKind::InvalidGitSourceUrl)?;
3830
3831                let git_source = GitSource::from_url(&url)
3832                    .map_err(|err| match err {
3833                        GitSourceError::InvalidSha => SourceParseError::InvalidSha { given: git },
3834                        GitSourceError::MissingSha => SourceParseError::MissingSha { given: git },
3835                    })
3836                    .map_err(LockErrorKind::InvalidGitSourceUrl)?;
3837
3838                Ok(Self::Git(UrlString::from(url), git_source))
3839            }
3840            Direct { url, subdirectory } => Ok(Self::Direct(
3841                url,
3842                DirectSource {
3843                    subdirectory: subdirectory.map(Box::<std::path::Path>::from),
3844                },
3845            )),
3846            Path { path } => Ok(Self::Path(path.into())),
3847            Directory { directory } => Ok(Self::Directory(directory.into())),
3848            Editable { editable } => Ok(Self::Editable(editable.into())),
3849            Virtual { r#virtual } => Ok(Self::Virtual(r#virtual.into())),
3850        }
3851    }
3852}
3853
3854/// The source for a registry, which could be a URL or a relative path.
3855#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
3856enum RegistrySource {
3857    /// Ex) `https://pypi.org/simple`
3858    Url(UrlString),
3859    /// Ex) `../path/to/local/index`
3860    Path(Box<Path>),
3861}
3862
3863impl Display for RegistrySource {
3864    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
3865        match self {
3866            Self::Url(url) => write!(f, "{url}"),
3867            Self::Path(path) => write!(f, "{}", path.display()),
3868        }
3869    }
3870}
3871
3872#[derive(Clone, Debug)]
3873enum RegistrySourceWire {
3874    /// Ex) `https://pypi.org/simple`
3875    Url(UrlString),
3876    /// Ex) `../path/to/local/index`
3877    Path(PortablePathBuf),
3878}
3879
3880impl<'de> serde::de::Deserialize<'de> for RegistrySourceWire {
3881    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
3882    where
3883        D: serde::de::Deserializer<'de>,
3884    {
3885        struct Visitor;
3886
3887        impl serde::de::Visitor<'_> for Visitor {
3888            type Value = RegistrySourceWire;
3889
3890            fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
3891                formatter.write_str("a valid URL or a file path")
3892            }
3893
3894            fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
3895            where
3896                E: serde::de::Error,
3897            {
3898                if split_scheme(value).is_some() {
3899                    Ok(
3900                        serde::Deserialize::deserialize(serde::de::value::StrDeserializer::new(
3901                            value,
3902                        ))
3903                        .map(RegistrySourceWire::Url)?,
3904                    )
3905                } else {
3906                    Ok(
3907                        serde::Deserialize::deserialize(serde::de::value::StrDeserializer::new(
3908                            value,
3909                        ))
3910                        .map(RegistrySourceWire::Path)?,
3911                    )
3912                }
3913            }
3914        }
3915
3916        deserializer.deserialize_str(Visitor)
3917    }
3918}
3919
3920impl From<RegistrySourceWire> for RegistrySource {
3921    fn from(wire: RegistrySourceWire) -> Self {
3922        match wire {
3923            RegistrySourceWire::Url(url) => Self::Url(url),
3924            RegistrySourceWire::Path(path) => Self::Path(path.into()),
3925        }
3926    }
3927}
3928
3929#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3930#[serde(rename_all = "kebab-case")]
3931struct DirectSource {
3932    subdirectory: Option<Box<Path>>,
3933}
3934
3935/// NOTE: Care should be taken when adding variants to this enum. Namely, new
3936/// variants should be added without changing the relative ordering of other
3937/// variants. Otherwise, this could cause the lockfile to have a different
3938/// canonical ordering of package entries.
3939#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
3940struct GitSource {
3941    precise: GitOid,
3942    subdirectory: Option<Box<Path>>,
3943    kind: GitSourceKind,
3944    lfs: GitLfs,
3945}
3946
3947/// An error that occurs when a source string could not be parsed.
3948#[derive(Clone, Debug, Eq, PartialEq)]
3949enum GitSourceError {
3950    InvalidSha,
3951    MissingSha,
3952}
3953
3954impl GitSource {
3955    /// Extracts a Git source reference from the query pairs and the hash
3956    /// fragment in the given URL.
3957    fn from_url(url: &Url) -> Result<Self, GitSourceError> {
3958        let mut kind = GitSourceKind::DefaultBranch;
3959        let mut subdirectory = None;
3960        let mut lfs = GitLfs::Disabled;
3961        for (key, val) in url.query_pairs() {
3962            match &*key {
3963                "tag" => kind = GitSourceKind::Tag(val.into_owned()),
3964                "branch" => kind = GitSourceKind::Branch(val.into_owned()),
3965                "rev" => kind = GitSourceKind::Rev(val.into_owned()),
3966                "subdirectory" => subdirectory = Some(PortablePathBuf::from(val.as_ref()).into()),
3967                "lfs" => lfs = GitLfs::from(val.eq_ignore_ascii_case("true")),
3968                _ => {}
3969            }
3970        }
3971
3972        let precise = GitOid::from_str(url.fragment().ok_or(GitSourceError::MissingSha)?)
3973            .map_err(|_| GitSourceError::InvalidSha)?;
3974
3975        Ok(Self {
3976            precise,
3977            subdirectory,
3978            kind,
3979            lfs,
3980        })
3981    }
3982}
3983
3984#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, serde::Deserialize)]
3985#[serde(rename_all = "kebab-case")]
3986enum GitSourceKind {
3987    Tag(String),
3988    Branch(String),
3989    Rev(String),
3990    DefaultBranch,
3991}
3992
3993/// Inspired by: <https://discuss.python.org/t/lock-files-again-but-this-time-w-sdists/46593>
3994#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
3995#[serde(rename_all = "kebab-case")]
3996struct SourceDistMetadata {
3997    /// A hash of the source distribution.
3998    hash: Option<Hash>,
3999    /// The size of the source distribution in bytes.
4000    ///
4001    /// This is only present for source distributions that come from registries.
4002    size: Option<u64>,
4003    /// The upload time of the source distribution.
4004    #[serde(alias = "upload_time")]
4005    upload_time: Option<Timestamp>,
4006}
4007
4008/// A URL or file path where the source dist that was
4009/// locked against was found. The location does not need to exist in the
4010/// future, so this should be treated as only a hint to where to look
4011/// and/or recording where the source dist file originally came from.
4012#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4013#[serde(from = "SourceDistWire")]
4014enum SourceDist {
4015    Url {
4016        url: UrlString,
4017        #[serde(flatten)]
4018        metadata: SourceDistMetadata,
4019    },
4020    Path {
4021        path: Box<Path>,
4022        #[serde(flatten)]
4023        metadata: SourceDistMetadata,
4024    },
4025    Metadata {
4026        #[serde(flatten)]
4027        metadata: SourceDistMetadata,
4028    },
4029}
4030
4031impl SourceDist {
4032    fn filename(&self) -> Option<Cow<'_, str>> {
4033        match self {
4034            Self::Metadata { .. } => None,
4035            Self::Url { url, .. } => url.filename().ok(),
4036            Self::Path { path, .. } => path.file_name().map(|filename| filename.to_string_lossy()),
4037        }
4038    }
4039
4040    fn url(&self) -> Option<&UrlString> {
4041        match self {
4042            Self::Metadata { .. } => None,
4043            Self::Url { url, .. } => Some(url),
4044            Self::Path { .. } => None,
4045        }
4046    }
4047
4048    pub(crate) fn hash(&self) -> Option<&Hash> {
4049        match self {
4050            Self::Metadata { metadata } => metadata.hash.as_ref(),
4051            Self::Url { metadata, .. } => metadata.hash.as_ref(),
4052            Self::Path { metadata, .. } => metadata.hash.as_ref(),
4053        }
4054    }
4055
4056    pub(crate) fn size(&self) -> Option<u64> {
4057        match self {
4058            Self::Metadata { metadata } => metadata.size,
4059            Self::Url { metadata, .. } => metadata.size,
4060            Self::Path { metadata, .. } => metadata.size,
4061        }
4062    }
4063
4064    pub(crate) fn upload_time(&self) -> Option<Timestamp> {
4065        match self {
4066            Self::Metadata { metadata } => metadata.upload_time,
4067            Self::Url { metadata, .. } => metadata.upload_time,
4068            Self::Path { metadata, .. } => metadata.upload_time,
4069        }
4070    }
4071}
4072
4073impl SourceDist {
4074    fn from_annotated_dist(
4075        id: &PackageId,
4076        annotated_dist: &AnnotatedDist,
4077    ) -> Result<Option<Self>, LockError> {
4078        match annotated_dist.dist {
4079            // We pass empty installed packages for locking.
4080            ResolvedDist::Installed { .. } => unreachable!(),
4081            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(
4082                id,
4083                dist,
4084                annotated_dist.hashes.as_slice(),
4085                annotated_dist.index(),
4086            ),
4087        }
4088    }
4089
4090    fn from_dist(
4091        id: &PackageId,
4092        dist: &Dist,
4093        hashes: &[HashDigest],
4094        index: Option<&IndexUrl>,
4095    ) -> Result<Option<Self>, LockError> {
4096        match *dist {
4097            Dist::Built(BuiltDist::Registry(ref built_dist)) => {
4098                let Some(sdist) = built_dist.sdist.as_ref() else {
4099                    return Ok(None);
4100                };
4101                Self::from_registry_dist(sdist, index)
4102            }
4103            Dist::Built(_) => Ok(None),
4104            Dist::Source(ref source_dist) => Self::from_source_dist(id, source_dist, hashes, index),
4105        }
4106    }
4107
4108    fn from_source_dist(
4109        id: &PackageId,
4110        source_dist: &uv_distribution_types::SourceDist,
4111        hashes: &[HashDigest],
4112        index: Option<&IndexUrl>,
4113    ) -> Result<Option<Self>, LockError> {
4114        match *source_dist {
4115            uv_distribution_types::SourceDist::Registry(ref reg_dist) => {
4116                Self::from_registry_dist(reg_dist, index)
4117            }
4118            uv_distribution_types::SourceDist::DirectUrl(_) => {
4119                Self::from_direct_dist(id, hashes).map(Some)
4120            }
4121            uv_distribution_types::SourceDist::Path(_) => {
4122                Self::from_path_dist(id, hashes).map(Some)
4123            }
4124            // An actual sdist entry in the lockfile is only required when
4125            // it's from a registry or a direct URL. Otherwise, it's strictly
4126            // redundant with the information in all other kinds of `source`.
4127            uv_distribution_types::SourceDist::Git(_)
4128            | uv_distribution_types::SourceDist::Directory(_) => Ok(None),
4129        }
4130    }
4131
4132    fn from_registry_dist(
4133        reg_dist: &RegistrySourceDist,
4134        index: Option<&IndexUrl>,
4135    ) -> Result<Option<Self>, LockError> {
4136        // Reject distributions from registries that don't match the index URL, as can occur with
4137        // `--find-links`.
4138        if index.is_none_or(|index| *index != reg_dist.index) {
4139            return Ok(None);
4140        }
4141
4142        match &reg_dist.index {
4143            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
4144                let url = normalize_file_location(&reg_dist.file.url)
4145                    .map_err(LockErrorKind::InvalidUrl)
4146                    .map_err(LockError::from)?;
4147                let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4148                let size = reg_dist.file.size;
4149                let upload_time = reg_dist
4150                    .file
4151                    .upload_time_utc_ms
4152                    .map(Timestamp::from_millisecond)
4153                    .transpose()
4154                    .map_err(LockErrorKind::InvalidTimestamp)?;
4155                Ok(Some(Self::Url {
4156                    url,
4157                    metadata: SourceDistMetadata {
4158                        hash,
4159                        size,
4160                        upload_time,
4161                    },
4162                }))
4163            }
4164            IndexUrl::Path(path) => {
4165                let index_path = path
4166                    .to_file_path()
4167                    .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?;
4168                let url = reg_dist
4169                    .file
4170                    .url
4171                    .to_url()
4172                    .map_err(LockErrorKind::InvalidUrl)?;
4173
4174                if url.scheme() == "file" {
4175                    let reg_dist_path = url
4176                        .to_file_path()
4177                        .map_err(|()| LockErrorKind::UrlToPath { url })?;
4178                    let path = relative_to(&reg_dist_path, index_path)
4179                        .or_else(|_| std::path::absolute(&reg_dist_path))
4180                        .map_err(LockErrorKind::DistributionRelativePath)?
4181                        .into_boxed_path();
4182                    let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4183                    let size = reg_dist.file.size;
4184                    let upload_time = reg_dist
4185                        .file
4186                        .upload_time_utc_ms
4187                        .map(Timestamp::from_millisecond)
4188                        .transpose()
4189                        .map_err(LockErrorKind::InvalidTimestamp)?;
4190                    Ok(Some(Self::Path {
4191                        path,
4192                        metadata: SourceDistMetadata {
4193                            hash,
4194                            size,
4195                            upload_time,
4196                        },
4197                    }))
4198                } else {
4199                    let url = normalize_file_location(&reg_dist.file.url)
4200                        .map_err(LockErrorKind::InvalidUrl)
4201                        .map_err(LockError::from)?;
4202                    let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from);
4203                    let size = reg_dist.file.size;
4204                    let upload_time = reg_dist
4205                        .file
4206                        .upload_time_utc_ms
4207                        .map(Timestamp::from_millisecond)
4208                        .transpose()
4209                        .map_err(LockErrorKind::InvalidTimestamp)?;
4210                    Ok(Some(Self::Url {
4211                        url,
4212                        metadata: SourceDistMetadata {
4213                            hash,
4214                            size,
4215                            upload_time,
4216                        },
4217                    }))
4218                }
4219            }
4220        }
4221    }
4222
4223    fn from_direct_dist(id: &PackageId, hashes: &[HashDigest]) -> Result<Self, LockError> {
4224        let Some(hash) = hashes.iter().max().cloned().map(Hash::from) else {
4225            let kind = LockErrorKind::Hash {
4226                id: id.clone(),
4227                artifact_type: "direct URL source distribution",
4228                expected: true,
4229            };
4230            return Err(kind.into());
4231        };
4232        Ok(Self::Metadata {
4233            metadata: SourceDistMetadata {
4234                hash: Some(hash),
4235                size: None,
4236                upload_time: None,
4237            },
4238        })
4239    }
4240
4241    fn from_path_dist(id: &PackageId, hashes: &[HashDigest]) -> Result<Self, LockError> {
4242        let Some(hash) = hashes.iter().max().cloned().map(Hash::from) else {
4243            let kind = LockErrorKind::Hash {
4244                id: id.clone(),
4245                artifact_type: "path source distribution",
4246                expected: true,
4247            };
4248            return Err(kind.into());
4249        };
4250        Ok(Self::Metadata {
4251            metadata: SourceDistMetadata {
4252                hash: Some(hash),
4253                size: None,
4254                upload_time: None,
4255            },
4256        })
4257    }
4258}
4259
4260#[derive(Clone, Debug, serde::Deserialize)]
4261#[serde(untagged, rename_all = "kebab-case")]
4262enum SourceDistWire {
4263    Url {
4264        url: UrlString,
4265        #[serde(flatten)]
4266        metadata: SourceDistMetadata,
4267    },
4268    Path {
4269        path: PortablePathBuf,
4270        #[serde(flatten)]
4271        metadata: SourceDistMetadata,
4272    },
4273    Metadata {
4274        #[serde(flatten)]
4275        metadata: SourceDistMetadata,
4276    },
4277}
4278
4279impl SourceDist {
4280    /// Returns the TOML representation of this source distribution.
4281    fn to_toml(&self) -> Result<InlineTable, toml_edit::ser::Error> {
4282        let mut table = InlineTable::new();
4283        match self {
4284            Self::Metadata { .. } => {}
4285            Self::Url { url, .. } => {
4286                table.insert("url", Value::from(url.as_ref()));
4287            }
4288            Self::Path { path, .. } => {
4289                table.insert("path", Value::from(PortablePath::from(path).to_string()));
4290            }
4291        }
4292        if let Some(hash) = self.hash() {
4293            table.insert("hash", Value::from(hash.to_string()));
4294        }
4295        if let Some(size) = self.size() {
4296            table.insert(
4297                "size",
4298                toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
4299            );
4300        }
4301        if let Some(upload_time) = self.upload_time() {
4302            table.insert("upload-time", Value::from(upload_time.to_string()));
4303        }
4304        Ok(table)
4305    }
4306}
4307
4308impl From<SourceDistWire> for SourceDist {
4309    fn from(wire: SourceDistWire) -> Self {
4310        match wire {
4311            SourceDistWire::Url { url, metadata } => Self::Url { url, metadata },
4312            SourceDistWire::Path { path, metadata } => Self::Path {
4313                path: path.into(),
4314                metadata,
4315            },
4316            SourceDistWire::Metadata { metadata } => Self::Metadata { metadata },
4317        }
4318    }
4319}
4320
4321impl From<GitReference> for GitSourceKind {
4322    fn from(value: GitReference) -> Self {
4323        match value {
4324            GitReference::Branch(branch) => Self::Branch(branch),
4325            GitReference::Tag(tag) => Self::Tag(tag),
4326            GitReference::BranchOrTag(rev) => Self::Rev(rev),
4327            GitReference::BranchOrTagOrCommit(rev) => Self::Rev(rev),
4328            GitReference::NamedRef(rev) => Self::Rev(rev),
4329            GitReference::DefaultBranch => Self::DefaultBranch,
4330        }
4331    }
4332}
4333
4334impl From<GitSourceKind> for GitReference {
4335    fn from(value: GitSourceKind) -> Self {
4336        match value {
4337            GitSourceKind::Branch(branch) => Self::Branch(branch),
4338            GitSourceKind::Tag(tag) => Self::Tag(tag),
4339            GitSourceKind::Rev(rev) => Self::from_rev(rev),
4340            GitSourceKind::DefaultBranch => Self::DefaultBranch,
4341        }
4342    }
4343}
4344
4345/// Construct the lockfile-compatible [`DisplaySafeUrl`] for a [`GitSourceDist`].
4346fn locked_git_url(git_dist: &GitSourceDist) -> DisplaySafeUrl {
4347    let mut url = git_dist.git.repository().clone();
4348
4349    // Remove the credentials.
4350    url.remove_credentials();
4351
4352    // Clear out any existing state.
4353    url.set_fragment(None);
4354    url.set_query(None);
4355
4356    // Put the subdirectory in the query.
4357    if let Some(subdirectory) = git_dist
4358        .subdirectory
4359        .as_deref()
4360        .map(PortablePath::from)
4361        .as_ref()
4362        .map(PortablePath::to_string)
4363    {
4364        url.query_pairs_mut()
4365            .append_pair("subdirectory", &subdirectory);
4366    }
4367
4368    // Put lfs=true in the package source git url only when explicitly enabled.
4369    if git_dist.git.lfs().enabled() {
4370        url.query_pairs_mut().append_pair("lfs", "true");
4371    }
4372
4373    // Put the requested reference in the query.
4374    match git_dist.git.reference() {
4375        GitReference::Branch(branch) => {
4376            url.query_pairs_mut().append_pair("branch", branch.as_str());
4377        }
4378        GitReference::Tag(tag) => {
4379            url.query_pairs_mut().append_pair("tag", tag.as_str());
4380        }
4381        GitReference::BranchOrTag(rev)
4382        | GitReference::BranchOrTagOrCommit(rev)
4383        | GitReference::NamedRef(rev) => {
4384            url.query_pairs_mut().append_pair("rev", rev.as_str());
4385        }
4386        GitReference::DefaultBranch => {}
4387    }
4388
4389    // Put the precise commit in the fragment.
4390    url.set_fragment(
4391        git_dist
4392            .git
4393            .precise()
4394            .as_ref()
4395            .map(GitOid::to_string)
4396            .as_deref(),
4397    );
4398
4399    url
4400}
4401
4402#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4403struct ZstdWheel {
4404    hash: Option<Hash>,
4405    size: Option<u64>,
4406}
4407
4408/// Inspired by: <https://discuss.python.org/t/lock-files-again-but-this-time-w-sdists/46593>
4409#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4410#[serde(try_from = "WheelWire")]
4411struct Wheel {
4412    /// A URL or file path (via `file://`) where the wheel that was locked
4413    /// against was found. The location does not need to exist in the future,
4414    /// so this should be treated as only a hint to where to look and/or
4415    /// recording where the wheel file originally came from.
4416    url: WheelWireSource,
4417    /// A hash of the built distribution.
4418    ///
4419    /// This is only present for wheels that come from registries and direct
4420    /// URLs. Wheels from git or path dependencies do not have hashes
4421    /// associated with them.
4422    hash: Option<Hash>,
4423    /// The size of the built distribution in bytes.
4424    ///
4425    /// This is only present for wheels that come from registries.
4426    size: Option<u64>,
4427    /// The upload time of the built distribution.
4428    ///
4429    /// This is only present for wheels that come from registries.
4430    upload_time: Option<Timestamp>,
4431    /// The filename of the wheel.
4432    ///
4433    /// This isn't part of the wire format since it's redundant with the
4434    /// URL. But we do use it for various things, and thus compute it at
4435    /// deserialization time. Not being able to extract a wheel filename from a
4436    /// wheel URL is thus a deserialization error.
4437    filename: WheelFilename,
4438    /// The zstandard-compressed wheel metadata, if any.
4439    zstd: Option<ZstdWheel>,
4440}
4441
4442impl Wheel {
4443    fn from_annotated_dist(annotated_dist: &AnnotatedDist) -> Result<Vec<Self>, LockError> {
4444        match annotated_dist.dist {
4445            // We pass empty installed packages for locking.
4446            ResolvedDist::Installed { .. } => unreachable!(),
4447            ResolvedDist::Installable { ref dist, .. } => Self::from_dist(
4448                dist,
4449                annotated_dist.hashes.as_slice(),
4450                annotated_dist.index(),
4451            ),
4452        }
4453    }
4454
4455    fn from_dist(
4456        dist: &Dist,
4457        hashes: &[HashDigest],
4458        index: Option<&IndexUrl>,
4459    ) -> Result<Vec<Self>, LockError> {
4460        match *dist {
4461            Dist::Built(ref built_dist) => Self::from_built_dist(built_dist, hashes, index),
4462            Dist::Source(uv_distribution_types::SourceDist::Registry(ref source_dist)) => {
4463                source_dist
4464                    .wheels
4465                    .iter()
4466                    .filter(|wheel| {
4467                        // Reject distributions from registries that don't match the index URL, as can occur with
4468                        // `--find-links`.
4469                        index.is_some_and(|index| *index == wheel.index)
4470                    })
4471                    .map(Self::from_registry_wheel)
4472                    .collect()
4473            }
4474            Dist::Source(_) => Ok(vec![]),
4475        }
4476    }
4477
4478    fn from_built_dist(
4479        built_dist: &BuiltDist,
4480        hashes: &[HashDigest],
4481        index: Option<&IndexUrl>,
4482    ) -> Result<Vec<Self>, LockError> {
4483        match *built_dist {
4484            BuiltDist::Registry(ref reg_dist) => Self::from_registry_dist(reg_dist, index),
4485            BuiltDist::DirectUrl(ref direct_dist) => {
4486                Ok(vec![Self::from_direct_dist(direct_dist, hashes)])
4487            }
4488            BuiltDist::Path(ref path_dist) => Ok(vec![Self::from_path_dist(path_dist, hashes)]),
4489        }
4490    }
4491
4492    fn from_registry_dist(
4493        reg_dist: &RegistryBuiltDist,
4494        index: Option<&IndexUrl>,
4495    ) -> Result<Vec<Self>, LockError> {
4496        reg_dist
4497            .wheels
4498            .iter()
4499            .filter(|wheel| {
4500                // Reject distributions from registries that don't match the index URL, as can occur with
4501                // `--find-links`.
4502                index.is_some_and(|index| *index == wheel.index)
4503            })
4504            .map(Self::from_registry_wheel)
4505            .collect()
4506    }
4507
4508    fn from_registry_wheel(wheel: &RegistryBuiltWheel) -> Result<Self, LockError> {
4509        let url = match &wheel.index {
4510            IndexUrl::Pypi(_) | IndexUrl::Url(_) => {
4511                let url = normalize_file_location(&wheel.file.url)
4512                    .map_err(LockErrorKind::InvalidUrl)
4513                    .map_err(LockError::from)?;
4514                WheelWireSource::Url { url }
4515            }
4516            IndexUrl::Path(path) => {
4517                let index_path = path
4518                    .to_file_path()
4519                    .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?;
4520                let wheel_url = wheel.file.url.to_url().map_err(LockErrorKind::InvalidUrl)?;
4521
4522                if wheel_url.scheme() == "file" {
4523                    let wheel_path = wheel_url
4524                        .to_file_path()
4525                        .map_err(|()| LockErrorKind::UrlToPath { url: wheel_url })?;
4526                    let path = relative_to(&wheel_path, index_path)
4527                        .or_else(|_| std::path::absolute(&wheel_path))
4528                        .map_err(LockErrorKind::DistributionRelativePath)?
4529                        .into_boxed_path();
4530                    WheelWireSource::Path { path }
4531                } else {
4532                    let url = normalize_file_location(&wheel.file.url)
4533                        .map_err(LockErrorKind::InvalidUrl)
4534                        .map_err(LockError::from)?;
4535                    WheelWireSource::Url { url }
4536                }
4537            }
4538        };
4539        let filename = wheel.filename.clone();
4540        let hash = wheel.file.hashes.iter().max().cloned().map(Hash::from);
4541        let size = wheel.file.size;
4542        let upload_time = wheel
4543            .file
4544            .upload_time_utc_ms
4545            .map(Timestamp::from_millisecond)
4546            .transpose()
4547            .map_err(LockErrorKind::InvalidTimestamp)?;
4548        let zstd = wheel.file.zstd.as_ref().map(|zstd| ZstdWheel {
4549            hash: zstd.hashes.iter().max().cloned().map(Hash::from),
4550            size: zstd.size,
4551        });
4552        Ok(Self {
4553            url,
4554            hash,
4555            size,
4556            upload_time,
4557            filename,
4558            zstd,
4559        })
4560    }
4561
4562    fn from_direct_dist(direct_dist: &DirectUrlBuiltDist, hashes: &[HashDigest]) -> Self {
4563        Self {
4564            url: WheelWireSource::Url {
4565                url: normalize_url(direct_dist.url.to_url()),
4566            },
4567            hash: hashes.iter().max().cloned().map(Hash::from),
4568            size: None,
4569            upload_time: None,
4570            filename: direct_dist.filename.clone(),
4571            zstd: None,
4572        }
4573    }
4574
4575    fn from_path_dist(path_dist: &PathBuiltDist, hashes: &[HashDigest]) -> Self {
4576        Self {
4577            url: WheelWireSource::Filename {
4578                filename: path_dist.filename.clone(),
4579            },
4580            hash: hashes.iter().max().cloned().map(Hash::from),
4581            size: None,
4582            upload_time: None,
4583            filename: path_dist.filename.clone(),
4584            zstd: None,
4585        }
4586    }
4587
4588    pub(crate) fn to_registry_wheel(
4589        &self,
4590        source: &RegistrySource,
4591        root: &Path,
4592    ) -> Result<RegistryBuiltWheel, LockError> {
4593        let filename: WheelFilename = self.filename.clone();
4594
4595        match source {
4596            RegistrySource::Url(url) => {
4597                let file_location = match &self.url {
4598                    WheelWireSource::Url { url: file_url } => {
4599                        FileLocation::AbsoluteUrl(file_url.clone())
4600                    }
4601                    WheelWireSource::Path { .. } | WheelWireSource::Filename { .. } => {
4602                        return Err(LockErrorKind::MissingUrl {
4603                            name: filename.name,
4604                            version: filename.version,
4605                        }
4606                        .into());
4607                    }
4608                };
4609                let file = Box::new(uv_distribution_types::File {
4610                    dist_info_metadata: false,
4611                    filename: SmallString::from(filename.to_string()),
4612                    hashes: self.hash.iter().map(|h| h.0.clone()).collect(),
4613                    requires_python: None,
4614                    size: self.size,
4615                    upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
4616                    url: file_location,
4617                    yanked: None,
4618                    zstd: self
4619                        .zstd
4620                        .as_ref()
4621                        .map(|zstd| uv_distribution_types::Zstd {
4622                            hashes: zstd.hash.iter().map(|h| h.0.clone()).collect(),
4623                            size: zstd.size,
4624                        })
4625                        .map(Box::new),
4626                });
4627                let index = IndexUrl::from(VerbatimUrl::from_url(
4628                    url.to_url().map_err(LockErrorKind::InvalidUrl)?,
4629                ));
4630                Ok(RegistryBuiltWheel {
4631                    filename,
4632                    file,
4633                    index,
4634                })
4635            }
4636            RegistrySource::Path(index_path) => {
4637                let file_location = match &self.url {
4638                    WheelWireSource::Url { url: file_url } => {
4639                        FileLocation::AbsoluteUrl(file_url.clone())
4640                    }
4641                    WheelWireSource::Path { path: file_path } => {
4642                        let file_path = root.join(index_path).join(file_path);
4643                        let file_url =
4644                            DisplaySafeUrl::from_file_path(&file_path).map_err(|()| {
4645                                LockErrorKind::PathToUrl {
4646                                    path: file_path.into_boxed_path(),
4647                                }
4648                            })?;
4649                        FileLocation::AbsoluteUrl(UrlString::from(file_url))
4650                    }
4651                    WheelWireSource::Filename { .. } => {
4652                        return Err(LockErrorKind::MissingPath {
4653                            name: filename.name,
4654                            version: filename.version,
4655                        }
4656                        .into());
4657                    }
4658                };
4659                let file = Box::new(uv_distribution_types::File {
4660                    dist_info_metadata: false,
4661                    filename: SmallString::from(filename.to_string()),
4662                    hashes: self.hash.iter().map(|h| h.0.clone()).collect(),
4663                    requires_python: None,
4664                    size: self.size,
4665                    upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond),
4666                    url: file_location,
4667                    yanked: None,
4668                    zstd: self
4669                        .zstd
4670                        .as_ref()
4671                        .map(|zstd| uv_distribution_types::Zstd {
4672                            hashes: zstd.hash.iter().map(|h| h.0.clone()).collect(),
4673                            size: zstd.size,
4674                        })
4675                        .map(Box::new),
4676                });
4677                let index = IndexUrl::from(
4678                    VerbatimUrl::from_absolute_path(root.join(index_path))
4679                        .map_err(LockErrorKind::RegistryVerbatimUrl)?,
4680                );
4681                Ok(RegistryBuiltWheel {
4682                    filename,
4683                    file,
4684                    index,
4685                })
4686            }
4687        }
4688    }
4689}
4690
4691#[derive(Clone, Debug, serde::Deserialize)]
4692#[serde(rename_all = "kebab-case")]
4693struct WheelWire {
4694    #[serde(flatten)]
4695    url: WheelWireSource,
4696    /// A hash of the built distribution.
4697    ///
4698    /// This is only present for wheels that come from registries and direct
4699    /// URLs. Wheels from git or path dependencies do not have hashes
4700    /// associated with them.
4701    hash: Option<Hash>,
4702    /// The size of the built distribution in bytes.
4703    ///
4704    /// This is only present for wheels that come from registries.
4705    size: Option<u64>,
4706    /// The upload time of the built distribution.
4707    ///
4708    /// This is only present for wheels that come from registries.
4709    #[serde(alias = "upload_time")]
4710    upload_time: Option<Timestamp>,
4711    /// The zstandard-compressed wheel metadata, if any.
4712    #[serde(alias = "zstd")]
4713    zstd: Option<ZstdWheel>,
4714}
4715
4716#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
4717#[serde(untagged, rename_all = "kebab-case")]
4718enum WheelWireSource {
4719    /// Used for all wheels that come from remote sources.
4720    Url {
4721        /// A URL where the wheel that was locked against was found. The location
4722        /// does not need to exist in the future, so this should be treated as
4723        /// only a hint to where to look and/or recording where the wheel file
4724        /// originally came from.
4725        url: UrlString,
4726    },
4727    /// Used for wheels that come from local registries (like `--find-links`).
4728    Path {
4729        /// The path to the wheel, relative to the index.
4730        path: Box<Path>,
4731    },
4732    /// Used for path wheels.
4733    ///
4734    /// We only store the filename for path wheel, since we can't store a relative path in the url
4735    Filename {
4736        /// We duplicate the filename since a lot of code relies on having the filename on the
4737        /// wheel entry.
4738        filename: WheelFilename,
4739    },
4740}
4741
4742impl Wheel {
4743    /// Returns the TOML representation of this wheel.
4744    fn to_toml(&self) -> Result<InlineTable, toml_edit::ser::Error> {
4745        let mut table = InlineTable::new();
4746        match &self.url {
4747            WheelWireSource::Url { url } => {
4748                table.insert("url", Value::from(url.as_ref()));
4749            }
4750            WheelWireSource::Path { path } => {
4751                table.insert("path", Value::from(PortablePath::from(path).to_string()));
4752            }
4753            WheelWireSource::Filename { filename } => {
4754                table.insert("filename", Value::from(filename.to_string()));
4755            }
4756        }
4757        if let Some(ref hash) = self.hash {
4758            table.insert("hash", Value::from(hash.to_string()));
4759        }
4760        if let Some(size) = self.size {
4761            table.insert(
4762                "size",
4763                toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
4764            );
4765        }
4766        if let Some(upload_time) = self.upload_time {
4767            table.insert("upload-time", Value::from(upload_time.to_string()));
4768        }
4769        if let Some(zstd) = &self.zstd {
4770            let mut inner = InlineTable::new();
4771            if let Some(ref hash) = zstd.hash {
4772                inner.insert("hash", Value::from(hash.to_string()));
4773            }
4774            if let Some(size) = zstd.size {
4775                inner.insert(
4776                    "size",
4777                    toml_edit::ser::ValueSerializer::new().serialize_u64(size)?,
4778                );
4779            }
4780            table.insert("zstd", Value::from(inner));
4781        }
4782        Ok(table)
4783    }
4784}
4785
4786impl TryFrom<WheelWire> for Wheel {
4787    type Error = String;
4788
4789    fn try_from(wire: WheelWire) -> Result<Self, String> {
4790        let filename = match &wire.url {
4791            WheelWireSource::Url { url } => {
4792                let filename = url.filename().map_err(|err| err.to_string())?;
4793                filename.parse::<WheelFilename>().map_err(|err| {
4794                    format!("failed to parse `{filename}` as wheel filename: {err}")
4795                })?
4796            }
4797            WheelWireSource::Path { path } => {
4798                let filename = path
4799                    .file_name()
4800                    .and_then(|file_name| file_name.to_str())
4801                    .ok_or_else(|| {
4802                        format!("path `{}` has no filename component", path.display())
4803                    })?;
4804                filename.parse::<WheelFilename>().map_err(|err| {
4805                    format!("failed to parse `{filename}` as wheel filename: {err}")
4806                })?
4807            }
4808            WheelWireSource::Filename { filename } => filename.clone(),
4809        };
4810
4811        Ok(Self {
4812            url: wire.url,
4813            hash: wire.hash,
4814            size: wire.size,
4815            upload_time: wire.upload_time,
4816            zstd: wire.zstd,
4817            filename,
4818        })
4819    }
4820}
4821
4822/// A single dependency of a package in a lockfile.
4823#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
4824pub struct Dependency {
4825    package_id: PackageId,
4826    extra: BTreeSet<ExtraName>,
4827    /// A marker simplified from the PEP 508 marker in `complexified_marker`
4828    /// by assuming `requires-python` is satisfied. So if
4829    /// `requires-python = '>=3.8'`, then
4830    /// `python_version >= '3.8' and python_version < '3.12'`
4831    /// gets simplified to `python_version < '3.12'`.
4832    ///
4833    /// Generally speaking, this marker should not be exposed to
4834    /// anything outside this module unless it's for a specialized use
4835    /// case. But specifically, it should never be used to evaluate
4836    /// against a marker environment or for disjointness checks or any
4837    /// other kind of marker algebra.
4838    ///
4839    /// It exists because there are some cases where we do actually
4840    /// want to compare markers in their "simplified" form. For
4841    /// example, when collapsing the extras on duplicate dependencies.
4842    /// Even if a dependency has different complexified markers,
4843    /// they might have identical markers once simplified. And since
4844    /// `requires-python` applies to the entire lock file, it's
4845    /// acceptable to do comparisons on the simplified form.
4846    simplified_marker: SimplifiedMarkerTree,
4847    /// The "complexified" marker is a universal marker whose PEP 508
4848    /// marker can stand on its own independent of `requires-python`.
4849    /// It can be safely used for any kind of marker algebra.
4850    complexified_marker: UniversalMarker,
4851}
4852
4853impl Dependency {
4854    fn new(
4855        requires_python: &RequiresPython,
4856        package_id: PackageId,
4857        extra: BTreeSet<ExtraName>,
4858        complexified_marker: UniversalMarker,
4859    ) -> Self {
4860        let simplified_marker =
4861            SimplifiedMarkerTree::new(requires_python, complexified_marker.combined());
4862        let complexified_marker = simplified_marker.into_marker(requires_python);
4863        Self {
4864            package_id,
4865            extra,
4866            simplified_marker,
4867            complexified_marker: UniversalMarker::from_combined(complexified_marker),
4868        }
4869    }
4870
4871    fn from_annotated_dist(
4872        requires_python: &RequiresPython,
4873        annotated_dist: &AnnotatedDist,
4874        complexified_marker: UniversalMarker,
4875        root: &Path,
4876    ) -> Result<Self, LockError> {
4877        let package_id = PackageId::from_annotated_dist(annotated_dist, root)?;
4878        let extra = annotated_dist.extra.iter().cloned().collect();
4879        Ok(Self::new(
4880            requires_python,
4881            package_id,
4882            extra,
4883            complexified_marker,
4884        ))
4885    }
4886
4887    /// Returns the TOML representation of this dependency.
4888    fn to_toml(
4889        &self,
4890        _requires_python: &RequiresPython,
4891        dist_count_by_name: &FxHashMap<PackageName, u64>,
4892    ) -> Table {
4893        let mut table = Table::new();
4894        self.package_id
4895            .to_toml(Some(dist_count_by_name), &mut table);
4896        if !self.extra.is_empty() {
4897            let extra_array = self
4898                .extra
4899                .iter()
4900                .map(ToString::to_string)
4901                .collect::<Array>();
4902            table.insert("extra", value(extra_array));
4903        }
4904        if let Some(marker) = self.simplified_marker.try_to_string() {
4905            table.insert("marker", value(marker));
4906        }
4907
4908        table
4909    }
4910
4911    /// Returns the package name of this dependency.
4912    pub fn package_name(&self) -> &PackageName {
4913        &self.package_id.name
4914    }
4915
4916    /// Returns the extras specified on this dependency.
4917    pub fn extra(&self) -> &BTreeSet<ExtraName> {
4918        &self.extra
4919    }
4920}
4921
4922impl Display for Dependency {
4923    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
4924        match (self.extra.is_empty(), self.package_id.version.as_ref()) {
4925            (true, Some(version)) => write!(f, "{}=={}", self.package_id.name, version),
4926            (true, None) => write!(f, "{}", self.package_id.name),
4927            (false, Some(version)) => write!(
4928                f,
4929                "{}[{}]=={}",
4930                self.package_id.name,
4931                self.extra.iter().join(","),
4932                version
4933            ),
4934            (false, None) => write!(
4935                f,
4936                "{}[{}]",
4937                self.package_id.name,
4938                self.extra.iter().join(",")
4939            ),
4940        }
4941    }
4942}
4943
4944/// A single dependency of a package in a lockfile.
4945#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, serde::Deserialize)]
4946#[serde(rename_all = "kebab-case")]
4947struct DependencyWire {
4948    #[serde(flatten)]
4949    package_id: PackageIdForDependency,
4950    #[serde(default)]
4951    extra: BTreeSet<ExtraName>,
4952    #[serde(default)]
4953    marker: SimplifiedMarkerTree,
4954}
4955
4956impl DependencyWire {
4957    fn unwire(
4958        self,
4959        requires_python: &RequiresPython,
4960        unambiguous_package_ids: &FxHashMap<PackageName, PackageId>,
4961    ) -> Result<Dependency, LockError> {
4962        let complexified_marker = self.marker.into_marker(requires_python);
4963        Ok(Dependency {
4964            package_id: self.package_id.unwire(unambiguous_package_ids)?,
4965            extra: self.extra,
4966            simplified_marker: self.marker,
4967            complexified_marker: UniversalMarker::from_combined(complexified_marker),
4968        })
4969    }
4970}
4971
4972/// A single hash for a distribution artifact in a lockfile.
4973///
4974/// A hash is encoded as a single TOML string in the format
4975/// `{algorithm}:{digest}`.
4976#[derive(Clone, Debug, PartialEq, Eq)]
4977struct Hash(HashDigest);
4978
4979impl From<HashDigest> for Hash {
4980    fn from(hd: HashDigest) -> Self {
4981        Self(hd)
4982    }
4983}
4984
4985impl FromStr for Hash {
4986    type Err = HashParseError;
4987
4988    fn from_str(s: &str) -> Result<Self, HashParseError> {
4989        let (algorithm, digest) = s.split_once(':').ok_or(HashParseError(
4990            "expected '{algorithm}:{digest}', but found no ':' in hash digest",
4991        ))?;
4992        let algorithm = algorithm
4993            .parse()
4994            .map_err(|_| HashParseError("unrecognized hash algorithm"))?;
4995        Ok(Self(HashDigest {
4996            algorithm,
4997            digest: digest.into(),
4998        }))
4999    }
5000}
5001
5002impl Display for Hash {
5003    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
5004        write!(f, "{}:{}", self.0.algorithm, self.0.digest)
5005    }
5006}
5007
5008impl<'de> serde::Deserialize<'de> for Hash {
5009    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
5010    where
5011        D: serde::de::Deserializer<'de>,
5012    {
5013        struct Visitor;
5014
5015        impl serde::de::Visitor<'_> for Visitor {
5016            type Value = Hash;
5017
5018            fn expecting(&self, f: &mut Formatter) -> std::fmt::Result {
5019                f.write_str("a string")
5020            }
5021
5022            fn visit_str<E: serde::de::Error>(self, v: &str) -> Result<Self::Value, E> {
5023                Hash::from_str(v).map_err(serde::de::Error::custom)
5024            }
5025        }
5026
5027        deserializer.deserialize_str(Visitor)
5028    }
5029}
5030
5031impl From<Hash> for Hashes {
5032    fn from(value: Hash) -> Self {
5033        match value.0.algorithm {
5034            HashAlgorithm::Md5 => Self {
5035                md5: Some(value.0.digest),
5036                sha256: None,
5037                sha384: None,
5038                sha512: None,
5039                blake2b: None,
5040            },
5041            HashAlgorithm::Sha256 => Self {
5042                md5: None,
5043                sha256: Some(value.0.digest),
5044                sha384: None,
5045                sha512: None,
5046                blake2b: None,
5047            },
5048            HashAlgorithm::Sha384 => Self {
5049                md5: None,
5050                sha256: None,
5051                sha384: Some(value.0.digest),
5052                sha512: None,
5053                blake2b: None,
5054            },
5055            HashAlgorithm::Sha512 => Self {
5056                md5: None,
5057                sha256: None,
5058                sha384: None,
5059                sha512: Some(value.0.digest),
5060                blake2b: None,
5061            },
5062            HashAlgorithm::Blake2b => Self {
5063                md5: None,
5064                sha256: None,
5065                sha384: None,
5066                sha512: None,
5067                blake2b: Some(value.0.digest),
5068            },
5069        }
5070    }
5071}
5072
5073/// Convert a [`FileLocation`] into a normalized [`UrlString`].
5074fn normalize_file_location(location: &FileLocation) -> Result<UrlString, ToUrlError> {
5075    match location {
5076        FileLocation::AbsoluteUrl(absolute) => Ok(absolute.without_fragment().into_owned()),
5077        FileLocation::RelativeUrl(_, _) => Ok(normalize_url(location.to_url()?)),
5078    }
5079}
5080
5081/// Convert a [`DisplaySafeUrl`] into a normalized [`UrlString`] by removing the fragment.
5082fn normalize_url(mut url: DisplaySafeUrl) -> UrlString {
5083    url.set_fragment(None);
5084    UrlString::from(url)
5085}
5086
5087/// Normalize a [`Requirement`], which could come from a lockfile, a `pyproject.toml`, etc.
5088///
5089/// Performs the following steps:
5090///
5091/// 1. Removes any sensitive credentials.
5092/// 2. Ensures that the lock and install paths are appropriately framed with respect to the
5093///    current [`Workspace`].
5094/// 3. Removes the `origin` field, which is only used in `requirements.txt`.
5095/// 4. Simplifies the markers using the provided [`RequiresPython`] instance.
5096fn normalize_requirement(
5097    mut requirement: Requirement,
5098    root: &Path,
5099    requires_python: &RequiresPython,
5100) -> Result<Requirement, LockError> {
5101    // Sort the extras and groups for consistency.
5102    requirement.extras.sort();
5103    requirement.groups.sort();
5104
5105    // Normalize the requirement source.
5106    match requirement.source {
5107        RequirementSource::Git {
5108            git,
5109            subdirectory,
5110            url: _,
5111        } => {
5112            // Reconstruct the Git URL.
5113            let git = {
5114                let mut repository = git.repository().clone();
5115
5116                // Remove the credentials.
5117                repository.remove_credentials();
5118
5119                // Remove the fragment and query from the URL; they're already present in the source.
5120                repository.set_fragment(None);
5121                repository.set_query(None);
5122
5123                GitUrl::from_fields(
5124                    repository,
5125                    git.reference().clone(),
5126                    git.precise(),
5127                    git.lfs(),
5128                )?
5129            };
5130
5131            // Reconstruct the PEP 508 URL from the underlying data.
5132            let url = DisplaySafeUrl::from(ParsedGitUrl {
5133                url: git.clone(),
5134                subdirectory: subdirectory.clone(),
5135            });
5136
5137            Ok(Requirement {
5138                name: requirement.name,
5139                extras: requirement.extras,
5140                groups: requirement.groups,
5141                marker: requires_python.simplify_markers(requirement.marker),
5142                source: RequirementSource::Git {
5143                    git,
5144                    subdirectory,
5145                    url: VerbatimUrl::from_url(url),
5146                },
5147                origin: None,
5148            })
5149        }
5150        RequirementSource::Path {
5151            install_path,
5152            ext,
5153            url: _,
5154        } => {
5155            let install_path =
5156                uv_fs::normalize_path_buf(root.join(&install_path)).into_boxed_path();
5157            let url = VerbatimUrl::from_normalized_path(&install_path)
5158                .map_err(LockErrorKind::RequirementVerbatimUrl)?;
5159
5160            Ok(Requirement {
5161                name: requirement.name,
5162                extras: requirement.extras,
5163                groups: requirement.groups,
5164                marker: requires_python.simplify_markers(requirement.marker),
5165                source: RequirementSource::Path {
5166                    install_path,
5167                    ext,
5168                    url,
5169                },
5170                origin: None,
5171            })
5172        }
5173        RequirementSource::Directory {
5174            install_path,
5175            editable,
5176            r#virtual,
5177            url: _,
5178        } => {
5179            let install_path =
5180                uv_fs::normalize_path_buf(root.join(&install_path)).into_boxed_path();
5181            let url = VerbatimUrl::from_normalized_path(&install_path)
5182                .map_err(LockErrorKind::RequirementVerbatimUrl)?;
5183
5184            Ok(Requirement {
5185                name: requirement.name,
5186                extras: requirement.extras,
5187                groups: requirement.groups,
5188                marker: requires_python.simplify_markers(requirement.marker),
5189                source: RequirementSource::Directory {
5190                    install_path,
5191                    editable: Some(editable.unwrap_or(false)),
5192                    r#virtual: Some(r#virtual.unwrap_or(false)),
5193                    url,
5194                },
5195                origin: None,
5196            })
5197        }
5198        RequirementSource::Registry {
5199            specifier,
5200            index,
5201            conflict,
5202        } => {
5203            // Round-trip the index to remove anything apart from the URL.
5204            let index = index
5205                .map(|index| index.url.into_url())
5206                .map(|mut index| {
5207                    index.remove_credentials();
5208                    index
5209                })
5210                .map(|index| IndexMetadata::from(IndexUrl::from(VerbatimUrl::from_url(index))));
5211            Ok(Requirement {
5212                name: requirement.name,
5213                extras: requirement.extras,
5214                groups: requirement.groups,
5215                marker: requires_python.simplify_markers(requirement.marker),
5216                source: RequirementSource::Registry {
5217                    specifier,
5218                    index,
5219                    conflict,
5220                },
5221                origin: None,
5222            })
5223        }
5224        RequirementSource::Url {
5225            mut location,
5226            subdirectory,
5227            ext,
5228            url: _,
5229        } => {
5230            // Remove the credentials.
5231            location.remove_credentials();
5232
5233            // Remove the fragment from the URL; it's already present in the source.
5234            location.set_fragment(None);
5235
5236            // Reconstruct the PEP 508 URL from the underlying data.
5237            let url = DisplaySafeUrl::from(ParsedArchiveUrl {
5238                url: location.clone(),
5239                subdirectory: subdirectory.clone(),
5240                ext,
5241            });
5242
5243            Ok(Requirement {
5244                name: requirement.name,
5245                extras: requirement.extras,
5246                groups: requirement.groups,
5247                marker: requires_python.simplify_markers(requirement.marker),
5248                source: RequirementSource::Url {
5249                    location,
5250                    subdirectory,
5251                    ext,
5252                    url: VerbatimUrl::from_url(url),
5253                },
5254                origin: None,
5255            })
5256        }
5257    }
5258}
5259
5260#[derive(Debug)]
5261pub struct LockError {
5262    kind: Box<LockErrorKind>,
5263    hint: Option<WheelTagHint>,
5264}
5265
5266impl std::error::Error for LockError {
5267    fn source(&self) -> Option<&(dyn Error + 'static)> {
5268        self.kind.source()
5269    }
5270}
5271
5272impl std::fmt::Display for LockError {
5273    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
5274        write!(f, "{}", self.kind)?;
5275        if let Some(hint) = &self.hint {
5276            write!(f, "\n\n{hint}")?;
5277        }
5278        Ok(())
5279    }
5280}
5281
5282impl LockError {
5283    /// Returns true if the [`LockError`] is a resolver error.
5284    pub fn is_resolution(&self) -> bool {
5285        matches!(&*self.kind, LockErrorKind::Resolution { .. })
5286    }
5287}
5288
5289impl<E> From<E> for LockError
5290where
5291    LockErrorKind: From<E>,
5292{
5293    fn from(err: E) -> Self {
5294        Self {
5295            kind: Box::new(LockErrorKind::from(err)),
5296            hint: None,
5297        }
5298    }
5299}
5300
5301#[derive(Debug, Clone, PartialEq, Eq)]
5302#[allow(clippy::enum_variant_names)]
5303enum WheelTagHint {
5304    /// None of the available wheels for a package have a compatible Python language tag (e.g.,
5305    /// `cp310` in `cp310-abi3-manylinux_2_17_x86_64.whl`).
5306    LanguageTags {
5307        package: PackageName,
5308        version: Option<Version>,
5309        tags: BTreeSet<LanguageTag>,
5310        best: Option<LanguageTag>,
5311    },
5312    /// None of the available wheels for a package have a compatible ABI tag (e.g., `abi3` in
5313    /// `cp310-abi3-manylinux_2_17_x86_64.whl`).
5314    AbiTags {
5315        package: PackageName,
5316        version: Option<Version>,
5317        tags: BTreeSet<AbiTag>,
5318        best: Option<AbiTag>,
5319    },
5320    /// None of the available wheels for a package have a compatible platform tag (e.g.,
5321    /// `manylinux_2_17_x86_64` in `cp310-abi3-manylinux_2_17_x86_64.whl`).
5322    PlatformTags {
5323        package: PackageName,
5324        version: Option<Version>,
5325        tags: BTreeSet<PlatformTag>,
5326        best: Option<PlatformTag>,
5327        markers: MarkerEnvironment,
5328    },
5329}
5330
5331impl WheelTagHint {
5332    /// Generate a [`WheelTagHint`] from the given (incompatible) wheels.
5333    fn from_wheels(
5334        name: &PackageName,
5335        version: Option<&Version>,
5336        filenames: &[&WheelFilename],
5337        tags: &Tags,
5338        markers: &MarkerEnvironment,
5339    ) -> Option<Self> {
5340        let incompatibility = filenames
5341            .iter()
5342            .map(|filename| {
5343                tags.compatibility(
5344                    filename.python_tags(),
5345                    filename.abi_tags(),
5346                    filename.platform_tags(),
5347                )
5348            })
5349            .max()?;
5350        match incompatibility {
5351            TagCompatibility::Incompatible(IncompatibleTag::Python) => {
5352                let best = tags.python_tag();
5353                let tags = Self::python_tags(filenames.iter().copied()).collect::<BTreeSet<_>>();
5354                if tags.is_empty() {
5355                    None
5356                } else {
5357                    Some(Self::LanguageTags {
5358                        package: name.clone(),
5359                        version: version.cloned(),
5360                        tags,
5361                        best,
5362                    })
5363                }
5364            }
5365            TagCompatibility::Incompatible(IncompatibleTag::Abi) => {
5366                let best = tags.abi_tag();
5367                let tags = Self::abi_tags(filenames.iter().copied())
5368                    // Ignore `none`, which is universally compatible.
5369                    //
5370                    // As an example, `none` can appear here if we're solving for Python 3.13, and
5371                    // the distribution includes a wheel for `cp312-none-macosx_11_0_arm64`.
5372                    //
5373                    // In that case, the wheel isn't compatible, but when solving for Python 3.13,
5374                    // the `cp312` Python tag _can_ be compatible (e.g., for `cp312-abi3-macosx_11_0_arm64.whl`),
5375                    // so this is considered an ABI incompatibility rather than Python incompatibility.
5376                    .filter(|tag| *tag != AbiTag::None)
5377                    .collect::<BTreeSet<_>>();
5378                if tags.is_empty() {
5379                    None
5380                } else {
5381                    Some(Self::AbiTags {
5382                        package: name.clone(),
5383                        version: version.cloned(),
5384                        tags,
5385                        best,
5386                    })
5387                }
5388            }
5389            TagCompatibility::Incompatible(IncompatibleTag::Platform) => {
5390                let best = tags.platform_tag().cloned();
5391                let incompatible_tags = Self::platform_tags(filenames.iter().copied(), tags)
5392                    .cloned()
5393                    .collect::<BTreeSet<_>>();
5394                if incompatible_tags.is_empty() {
5395                    None
5396                } else {
5397                    Some(Self::PlatformTags {
5398                        package: name.clone(),
5399                        version: version.cloned(),
5400                        tags: incompatible_tags,
5401                        best,
5402                        markers: markers.clone(),
5403                    })
5404                }
5405            }
5406            _ => None,
5407        }
5408    }
5409
5410    /// Returns an iterator over the compatible Python tags of the available wheels.
5411    fn python_tags<'a>(
5412        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5413    ) -> impl Iterator<Item = LanguageTag> + 'a {
5414        filenames.flat_map(WheelFilename::python_tags).copied()
5415    }
5416
5417    /// Returns an iterator over the compatible Python tags of the available wheels.
5418    fn abi_tags<'a>(
5419        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5420    ) -> impl Iterator<Item = AbiTag> + 'a {
5421        filenames.flat_map(WheelFilename::abi_tags).copied()
5422    }
5423
5424    /// Returns the set of platform tags for the distribution that are ABI-compatible with the given
5425    /// tags.
5426    fn platform_tags<'a>(
5427        filenames: impl Iterator<Item = &'a WheelFilename> + 'a,
5428        tags: &'a Tags,
5429    ) -> impl Iterator<Item = &'a PlatformTag> + 'a {
5430        filenames.flat_map(move |filename| {
5431            if filename.python_tags().iter().any(|wheel_py| {
5432                filename
5433                    .abi_tags()
5434                    .iter()
5435                    .any(|wheel_abi| tags.is_compatible_abi(*wheel_py, *wheel_abi))
5436            }) {
5437                filename.platform_tags().iter()
5438            } else {
5439                [].iter()
5440            }
5441        })
5442    }
5443
5444    fn suggest_environment_marker(markers: &MarkerEnvironment) -> String {
5445        let sys_platform = markers.sys_platform();
5446        let platform_machine = markers.platform_machine();
5447
5448        // Generate the marker string based on actual environment values
5449        if platform_machine.is_empty() {
5450            format!("sys_platform == '{sys_platform}'")
5451        } else {
5452            format!("sys_platform == '{sys_platform}' and platform_machine == '{platform_machine}'")
5453        }
5454    }
5455}
5456
5457impl std::fmt::Display for WheelTagHint {
5458    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
5459        match self {
5460            Self::LanguageTags {
5461                package,
5462                version,
5463                tags,
5464                best,
5465            } => {
5466                if let Some(best) = best {
5467                    let s = if tags.len() == 1 { "" } else { "s" };
5468                    let best = if let Some(pretty) = best.pretty() {
5469                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5470                    } else {
5471                        format!("{}", best.cyan())
5472                    };
5473                    if let Some(version) = version {
5474                        write!(
5475                            f,
5476                            "{}{} You're using {}, but `{}` ({}) only has wheels with the following Python implementation tag{s}: {}",
5477                            "hint".bold().cyan(),
5478                            ":".bold(),
5479                            best,
5480                            package.cyan(),
5481                            format!("v{version}").cyan(),
5482                            tags.iter()
5483                                .map(|tag| format!("`{}`", tag.cyan()))
5484                                .join(", "),
5485                        )
5486                    } else {
5487                        write!(
5488                            f,
5489                            "{}{} You're using {}, but `{}` only has wheels with the following Python implementation tag{s}: {}",
5490                            "hint".bold().cyan(),
5491                            ":".bold(),
5492                            best,
5493                            package.cyan(),
5494                            tags.iter()
5495                                .map(|tag| format!("`{}`", tag.cyan()))
5496                                .join(", "),
5497                        )
5498                    }
5499                } else {
5500                    let s = if tags.len() == 1 { "" } else { "s" };
5501                    if let Some(version) = version {
5502                        write!(
5503                            f,
5504                            "{}{} Wheels are available for `{}` ({}) with the following Python implementation tag{s}: {}",
5505                            "hint".bold().cyan(),
5506                            ":".bold(),
5507                            package.cyan(),
5508                            format!("v{version}").cyan(),
5509                            tags.iter()
5510                                .map(|tag| format!("`{}`", tag.cyan()))
5511                                .join(", "),
5512                        )
5513                    } else {
5514                        write!(
5515                            f,
5516                            "{}{} Wheels are available for `{}` with the following Python implementation tag{s}: {}",
5517                            "hint".bold().cyan(),
5518                            ":".bold(),
5519                            package.cyan(),
5520                            tags.iter()
5521                                .map(|tag| format!("`{}`", tag.cyan()))
5522                                .join(", "),
5523                        )
5524                    }
5525                }
5526            }
5527            Self::AbiTags {
5528                package,
5529                version,
5530                tags,
5531                best,
5532            } => {
5533                if let Some(best) = best {
5534                    let s = if tags.len() == 1 { "" } else { "s" };
5535                    let best = if let Some(pretty) = best.pretty() {
5536                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5537                    } else {
5538                        format!("{}", best.cyan())
5539                    };
5540                    if let Some(version) = version {
5541                        write!(
5542                            f,
5543                            "{}{} You're using {}, but `{}` ({}) only has wheels with the following Python ABI tag{s}: {}",
5544                            "hint".bold().cyan(),
5545                            ":".bold(),
5546                            best,
5547                            package.cyan(),
5548                            format!("v{version}").cyan(),
5549                            tags.iter()
5550                                .map(|tag| format!("`{}`", tag.cyan()))
5551                                .join(", "),
5552                        )
5553                    } else {
5554                        write!(
5555                            f,
5556                            "{}{} You're using {}, but `{}` only has wheels with the following Python ABI tag{s}: {}",
5557                            "hint".bold().cyan(),
5558                            ":".bold(),
5559                            best,
5560                            package.cyan(),
5561                            tags.iter()
5562                                .map(|tag| format!("`{}`", tag.cyan()))
5563                                .join(", "),
5564                        )
5565                    }
5566                } else {
5567                    let s = if tags.len() == 1 { "" } else { "s" };
5568                    if let Some(version) = version {
5569                        write!(
5570                            f,
5571                            "{}{} Wheels are available for `{}` ({}) with the following Python ABI tag{s}: {}",
5572                            "hint".bold().cyan(),
5573                            ":".bold(),
5574                            package.cyan(),
5575                            format!("v{version}").cyan(),
5576                            tags.iter()
5577                                .map(|tag| format!("`{}`", tag.cyan()))
5578                                .join(", "),
5579                        )
5580                    } else {
5581                        write!(
5582                            f,
5583                            "{}{} Wheels are available for `{}` with the following Python ABI tag{s}: {}",
5584                            "hint".bold().cyan(),
5585                            ":".bold(),
5586                            package.cyan(),
5587                            tags.iter()
5588                                .map(|tag| format!("`{}`", tag.cyan()))
5589                                .join(", "),
5590                        )
5591                    }
5592                }
5593            }
5594            Self::PlatformTags {
5595                package,
5596                version,
5597                tags,
5598                best,
5599                markers,
5600            } => {
5601                let s = if tags.len() == 1 { "" } else { "s" };
5602                if let Some(best) = best {
5603                    let example_marker = Self::suggest_environment_marker(markers);
5604                    let best = if let Some(pretty) = best.pretty() {
5605                        format!("{} (`{}`)", pretty.cyan(), best.cyan())
5606                    } else {
5607                        format!("`{}`", best.cyan())
5608                    };
5609                    let package_ref = if let Some(version) = version {
5610                        format!("`{}` ({})", package.cyan(), format!("v{version}").cyan())
5611                    } else {
5612                        format!("`{}`", package.cyan())
5613                    };
5614                    write!(
5615                        f,
5616                        "{}{} You're on {}, but {} only has wheels for the following platform{s}: {}; consider adding {} to `{}` to ensure uv resolves to a version with compatible wheels",
5617                        "hint".bold().cyan(),
5618                        ":".bold(),
5619                        best,
5620                        package_ref,
5621                        tags.iter()
5622                            .map(|tag| format!("`{}`", tag.cyan()))
5623                            .join(", "),
5624                        format!("\"{example_marker}\"").cyan(),
5625                        "tool.uv.required-environments".green()
5626                    )
5627                } else {
5628                    if let Some(version) = version {
5629                        write!(
5630                            f,
5631                            "{}{} Wheels are available for `{}` ({}) on the following platform{s}: {}",
5632                            "hint".bold().cyan(),
5633                            ":".bold(),
5634                            package.cyan(),
5635                            format!("v{version}").cyan(),
5636                            tags.iter()
5637                                .map(|tag| format!("`{}`", tag.cyan()))
5638                                .join(", "),
5639                        )
5640                    } else {
5641                        write!(
5642                            f,
5643                            "{}{} Wheels are available for `{}` on the following platform{s}: {}",
5644                            "hint".bold().cyan(),
5645                            ":".bold(),
5646                            package.cyan(),
5647                            tags.iter()
5648                                .map(|tag| format!("`{}`", tag.cyan()))
5649                                .join(", "),
5650                        )
5651                    }
5652                }
5653            }
5654        }
5655    }
5656}
5657
5658/// An error that occurs when generating a `Lock` data structure.
5659///
5660/// These errors are sometimes the result of possible programming bugs.
5661/// For example, if there are two or more duplicative distributions given
5662/// to `Lock::new`, then an error is returned. It's likely that the fault
5663/// is with the caller somewhere in such cases.
5664#[derive(Debug, thiserror::Error)]
5665enum LockErrorKind {
5666    /// An error that occurs when multiple packages with the same
5667    /// ID were found.
5668    #[error("Found duplicate package `{id}`", id = id.cyan())]
5669    DuplicatePackage {
5670        /// The ID of the conflicting package.
5671        id: PackageId,
5672    },
5673    /// An error that occurs when there are multiple dependencies for the
5674    /// same package that have identical identifiers.
5675    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = id.cyan(), dependency = dependency.cyan())]
5676    DuplicateDependency {
5677        /// The ID of the package for which a duplicate dependency was
5678        /// found.
5679        id: PackageId,
5680        /// The ID of the conflicting dependency.
5681        dependency: Dependency,
5682    },
5683    /// An error that occurs when there are multiple dependencies for the
5684    /// same package that have identical identifiers, as part of the
5685    /// that package's optional dependencies.
5686    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = format!("{id}[{extra}]").cyan(), dependency = dependency.cyan())]
5687    DuplicateOptionalDependency {
5688        /// The ID of the package for which a duplicate dependency was
5689        /// found.
5690        id: PackageId,
5691        /// The name of the extra.
5692        extra: ExtraName,
5693        /// The ID of the conflicting dependency.
5694        dependency: Dependency,
5695    },
5696    /// An error that occurs when there are multiple dependencies for the
5697    /// same package that have identical identifiers, as part of the
5698    /// that package's development dependencies.
5699    #[error("For package `{id}`, found duplicate dependency `{dependency}`", id = format!("{id}:{group}").cyan(), dependency = dependency.cyan())]
5700    DuplicateDevDependency {
5701        /// The ID of the package for which a duplicate dependency was
5702        /// found.
5703        id: PackageId,
5704        /// The name of the dev dependency group.
5705        group: GroupName,
5706        /// The ID of the conflicting dependency.
5707        dependency: Dependency,
5708    },
5709    /// An error that occurs when the URL to a file for a wheel or
5710    /// source dist could not be converted to a structured `url::Url`.
5711    #[error(transparent)]
5712    InvalidUrl(
5713        /// The underlying error that occurred. This includes the
5714        /// errant URL in its error message.
5715        #[from]
5716        ToUrlError,
5717    ),
5718    /// An error that occurs when the extension can't be determined
5719    /// for a given wheel or source distribution.
5720    #[error("Failed to parse file extension for `{id}`; expected one of: {err}", id = id.cyan())]
5721    MissingExtension {
5722        /// The filename that was expected to have an extension.
5723        id: PackageId,
5724        /// The list of valid extensions that were expected.
5725        err: ExtensionError,
5726    },
5727    /// Failed to parse a Git source URL.
5728    #[error("Failed to parse Git URL")]
5729    InvalidGitSourceUrl(
5730        /// The underlying error that occurred. This includes the
5731        /// errant URL in the message.
5732        #[source]
5733        SourceParseError,
5734    ),
5735    #[error("Failed to parse timestamp")]
5736    InvalidTimestamp(
5737        /// The underlying error that occurred. This includes the
5738        /// errant timestamp in the message.
5739        #[source]
5740        jiff::Error,
5741    ),
5742    /// An error that occurs when there's an unrecognized dependency.
5743    ///
5744    /// That is, a dependency for a package that isn't in the lockfile.
5745    #[error("For package `{id}`, found dependency `{dependency}` with no locked package", id = id.cyan(), dependency = dependency.cyan())]
5746    UnrecognizedDependency {
5747        /// The ID of the package that has an unrecognized dependency.
5748        id: PackageId,
5749        /// The ID of the dependency that doesn't have a corresponding package
5750        /// entry.
5751        dependency: Dependency,
5752    },
5753    /// An error that occurs when a hash is expected (or not) for a particular
5754    /// artifact, but one was not found (or was).
5755    #[error("Since the package `{id}` comes from a {source} dependency, a hash was {expected} but one was not found for {artifact_type}", id = id.cyan(), source = id.source.name(), expected = if *expected { "expected" } else { "not expected" })]
5756    Hash {
5757        /// The ID of the package that has a missing hash.
5758        id: PackageId,
5759        /// The specific type of artifact, e.g., "source package"
5760        /// or "wheel".
5761        artifact_type: &'static str,
5762        /// When true, a hash is expected to be present.
5763        expected: bool,
5764    },
5765    /// An error that occurs when a package is included with an extra name,
5766    /// but no corresponding base package (i.e., without the extra) exists.
5767    #[error("Found package `{id}` with extra `{extra}` but no base package", id = id.cyan(), extra = extra.cyan())]
5768    MissingExtraBase {
5769        /// The ID of the package that has a missing base.
5770        id: PackageId,
5771        /// The extra name that was found.
5772        extra: ExtraName,
5773    },
5774    /// An error that occurs when a package is included with a development
5775    /// dependency group, but no corresponding base package (i.e., without
5776    /// the group) exists.
5777    #[error("Found package `{id}` with development dependency group `{group}` but no base package", id = id.cyan())]
5778    MissingDevBase {
5779        /// The ID of the package that has a missing base.
5780        id: PackageId,
5781        /// The development dependency group that was found.
5782        group: GroupName,
5783    },
5784    /// An error that occurs from an invalid lockfile where a wheel comes from a non-wheel source
5785    /// such as a directory.
5786    #[error("Wheels cannot come from {source_type} sources")]
5787    InvalidWheelSource {
5788        /// The ID of the distribution that has a missing base.
5789        id: PackageId,
5790        /// The kind of the invalid source.
5791        source_type: &'static str,
5792    },
5793    /// An error that occurs when a distribution indicates that it is sourced from a remote
5794    /// registry, but is missing a URL.
5795    #[error("Found registry distribution `{name}` ({version}) without a valid URL", name = name.cyan(), version = format!("v{version}").cyan())]
5796    MissingUrl {
5797        /// The name of the distribution that is missing a URL.
5798        name: PackageName,
5799        /// The version of the distribution that is missing a URL.
5800        version: Version,
5801    },
5802    /// An error that occurs when a distribution indicates that it is sourced from a local registry,
5803    /// but is missing a path.
5804    #[error("Found registry distribution `{name}` ({version}) without a valid path", name = name.cyan(), version = format!("v{version}").cyan())]
5805    MissingPath {
5806        /// The name of the distribution that is missing a path.
5807        name: PackageName,
5808        /// The version of the distribution that is missing a path.
5809        version: Version,
5810    },
5811    /// An error that occurs when a distribution indicates that it is sourced from a registry, but
5812    /// is missing a filename.
5813    #[error("Found registry distribution `{id}` without a valid filename", id = id.cyan())]
5814    MissingFilename {
5815        /// The ID of the distribution that is missing a filename.
5816        id: PackageId,
5817    },
5818    /// An error that occurs when a distribution is included with neither wheels nor a source
5819    /// distribution.
5820    #[error("Distribution `{id}` can't be installed because it doesn't have a source distribution or wheel for the current platform", id = id.cyan())]
5821    NeitherSourceDistNorWheel {
5822        /// The ID of the distribution.
5823        id: PackageId,
5824    },
5825    /// An error that occurs when a distribution is marked as both `--no-binary` and `--no-build`.
5826    #[error("Distribution `{id}` can't be installed because it is marked as both `--no-binary` and `--no-build`", id = id.cyan())]
5827    NoBinaryNoBuild {
5828        /// The ID of the distribution.
5829        id: PackageId,
5830    },
5831    /// An error that occurs when a distribution is marked as `--no-binary`, but no source
5832    /// distribution is available.
5833    #[error("Distribution `{id}` can't be installed because it is marked as `--no-binary` but has no source distribution", id = id.cyan())]
5834    NoBinary {
5835        /// The ID of the distribution.
5836        id: PackageId,
5837    },
5838    /// An error that occurs when a distribution is marked as `--no-build`, but no binary
5839    /// distribution is available.
5840    #[error("Distribution `{id}` can't be installed because it is marked as `--no-build` but has no binary distribution", id = id.cyan())]
5841    NoBuild {
5842        /// The ID of the distribution.
5843        id: PackageId,
5844    },
5845    /// An error that occurs when a wheel-only distribution is incompatible with the current
5846    /// platform.
5847    #[error("Distribution `{id}` can't be installed because the binary distribution is incompatible with the current platform", id = id.cyan())]
5848    IncompatibleWheelOnly {
5849        /// The ID of the distribution.
5850        id: PackageId,
5851    },
5852    /// An error that occurs when a wheel-only source is marked as `--no-binary`.
5853    #[error("Distribution `{id}` can't be installed because it is marked as `--no-binary` but is itself a binary distribution", id = id.cyan())]
5854    NoBinaryWheelOnly {
5855        /// The ID of the distribution.
5856        id: PackageId,
5857    },
5858    /// An error that occurs when converting between URLs and paths.
5859    #[error("Found dependency `{id}` with no locked distribution", id = id.cyan())]
5860    VerbatimUrl {
5861        /// The ID of the distribution that has a missing base.
5862        id: PackageId,
5863        /// The inner error we forward.
5864        #[source]
5865        err: VerbatimUrlError,
5866    },
5867    /// An error that occurs when parsing an existing requirement.
5868    #[error("Could not compute relative path between workspace and distribution")]
5869    DistributionRelativePath(
5870        /// The inner error we forward.
5871        #[source]
5872        io::Error,
5873    ),
5874    /// An error that occurs when converting an index URL to a relative path
5875    #[error("Could not compute relative path between workspace and index")]
5876    IndexRelativePath(
5877        /// The inner error we forward.
5878        #[source]
5879        io::Error,
5880    ),
5881    /// An error that occurs when converting a lockfile path from relative to absolute.
5882    #[error("Could not compute absolute path from workspace root and lockfile path")]
5883    AbsolutePath(
5884        /// The inner error we forward.
5885        #[source]
5886        io::Error,
5887    ),
5888    /// An error that occurs when an ambiguous `package.dependency` is
5889    /// missing a `version` field.
5890    #[error("Dependency `{name}` has missing `version` field but has more than one matching package", name = name.cyan())]
5891    MissingDependencyVersion {
5892        /// The name of the dependency that is missing a `version` field.
5893        name: PackageName,
5894    },
5895    /// An error that occurs when an ambiguous `package.dependency` is
5896    /// missing a `source` field.
5897    #[error("Dependency `{name}` has missing `source` field but has more than one matching package", name = name.cyan())]
5898    MissingDependencySource {
5899        /// The name of the dependency that is missing a `source` field.
5900        name: PackageName,
5901    },
5902    /// An error that occurs when parsing an existing requirement.
5903    #[error("Could not compute relative path between workspace and requirement")]
5904    RequirementRelativePath(
5905        /// The inner error we forward.
5906        #[source]
5907        io::Error,
5908    ),
5909    /// An error that occurs when parsing an existing requirement.
5910    #[error("Could not convert between URL and path")]
5911    RequirementVerbatimUrl(
5912        /// The inner error we forward.
5913        #[source]
5914        VerbatimUrlError,
5915    ),
5916    /// An error that occurs when parsing a registry's index URL.
5917    #[error("Could not convert between URL and path")]
5918    RegistryVerbatimUrl(
5919        /// The inner error we forward.
5920        #[source]
5921        VerbatimUrlError,
5922    ),
5923    /// An error that occurs when converting a path to a URL.
5924    #[error("Failed to convert path to URL: {path}", path = path.display().cyan())]
5925    PathToUrl { path: Box<Path> },
5926    /// An error that occurs when converting a URL to a path
5927    #[error("Failed to convert URL to path: {url}", url = url.cyan())]
5928    UrlToPath { url: DisplaySafeUrl },
5929    /// An error that occurs when multiple packages with the same
5930    /// name were found when identifying the root packages.
5931    #[error("Found multiple packages matching `{name}`", name = name.cyan())]
5932    MultipleRootPackages {
5933        /// The ID of the package.
5934        name: PackageName,
5935    },
5936    /// An error that occurs when a root package can't be found.
5937    #[error("Could not find root package `{name}`", name = name.cyan())]
5938    MissingRootPackage {
5939        /// The ID of the package.
5940        name: PackageName,
5941    },
5942    /// An error that occurs when resolving metadata for a package.
5943    #[error("Failed to generate package metadata for `{id}`", id = id.cyan())]
5944    Resolution {
5945        /// The ID of the distribution that failed to resolve.
5946        id: PackageId,
5947        /// The inner error we forward.
5948        #[source]
5949        err: uv_distribution::Error,
5950    },
5951    /// A package has inconsistent versions in a single entry
5952    // Using name instead of id since the version in the id is part of the conflict.
5953    #[error("The entry for package `{name}` ({version}) has wheel `{wheel_filename}` with inconsistent version ({wheel_version}), which indicates a malformed wheel. If this is intentional, set `{env_var}`.", name = name.cyan(), wheel_filename = wheel.filename, wheel_version = wheel.filename.version, env_var = "UV_SKIP_WHEEL_FILENAME_CHECK=1".green())]
5954    InconsistentVersions {
5955        /// The name of the package with the inconsistent entry.
5956        name: PackageName,
5957        /// The version of the package with the inconsistent entry.
5958        version: Version,
5959        /// The wheel with the inconsistent version.
5960        wheel: Wheel,
5961    },
5962    #[error(
5963        "Found conflicting extras `{package1}[{extra1}]` \
5964         and `{package2}[{extra2}]` enabled simultaneously"
5965    )]
5966    ConflictingExtra {
5967        package1: PackageName,
5968        extra1: ExtraName,
5969        package2: PackageName,
5970        extra2: ExtraName,
5971    },
5972    #[error(transparent)]
5973    GitUrlParse(#[from] GitUrlParseError),
5974    #[error("Failed to read `{path}`")]
5975    UnreadablePyprojectToml {
5976        path: PathBuf,
5977        #[source]
5978        err: std::io::Error,
5979    },
5980    #[error("Failed to parse `{path}`")]
5981    InvalidPyprojectToml {
5982        path: PathBuf,
5983        #[source]
5984        err: toml::de::Error,
5985    },
5986    /// An error that occurs when a workspace member has a non-local source.
5987    #[error("Workspace member `{id}` has non-local source", id = id.cyan())]
5988    NonLocalWorkspaceMember {
5989        /// The ID of the workspace member with an invalid source.
5990        id: PackageId,
5991    },
5992}
5993
5994/// An error that occurs when a source string could not be parsed.
5995#[derive(Debug, thiserror::Error)]
5996enum SourceParseError {
5997    /// An error that occurs when the URL in the source is invalid.
5998    #[error("Invalid URL in source `{given}`")]
5999    InvalidUrl {
6000        /// The source string given.
6001        given: String,
6002        /// The URL parse error.
6003        #[source]
6004        err: DisplaySafeUrlError,
6005    },
6006    /// An error that occurs when a Git URL is missing a precise commit SHA.
6007    #[error("Missing SHA in source `{given}`")]
6008    MissingSha {
6009        /// The source string given.
6010        given: String,
6011    },
6012    /// An error that occurs when a Git URL has an invalid SHA.
6013    #[error("Invalid SHA in source `{given}`")]
6014    InvalidSha {
6015        /// The source string given.
6016        given: String,
6017    },
6018}
6019
6020/// An error that occurs when a hash digest could not be parsed.
6021#[derive(Clone, Debug, Eq, PartialEq)]
6022struct HashParseError(&'static str);
6023
6024impl std::error::Error for HashParseError {}
6025
6026impl Display for HashParseError {
6027    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
6028        Display::fmt(self.0, f)
6029    }
6030}
6031
6032/// Format an array so that each element is on its own line and has a trailing comma.
6033///
6034/// Example:
6035///
6036/// ```toml
6037/// dependencies = [
6038///     { name = "idna" },
6039///     { name = "sniffio" },
6040/// ]
6041/// ```
6042fn each_element_on_its_line_array(elements: impl Iterator<Item = impl Into<Value>>) -> Array {
6043    let mut array = elements
6044        .map(|item| {
6045            let mut value = item.into();
6046            // Each dependency is on its own line and indented.
6047            value.decor_mut().set_prefix("\n    ");
6048            value
6049        })
6050        .collect::<Array>();
6051    // With a trailing comma, inserting another entry doesn't change the preceding line,
6052    // reducing the diff noise.
6053    array.set_trailing_comma(true);
6054    // The line break between the last element's comma and the closing square bracket.
6055    array.set_trailing("\n");
6056    array
6057}
6058
6059/// Returns the simplified string-ified version of each marker given.
6060///
6061/// Note that the marker strings returned will include conflict markers if they
6062/// are present.
6063fn simplified_universal_markers(
6064    markers: &[UniversalMarker],
6065    requires_python: &RequiresPython,
6066) -> Vec<String> {
6067    let mut pep508_only = vec![];
6068    let mut seen = FxHashSet::default();
6069    for marker in markers {
6070        let simplified =
6071            SimplifiedMarkerTree::new(requires_python, marker.pep508()).as_simplified_marker_tree();
6072        if seen.insert(simplified) {
6073            pep508_only.push(simplified);
6074        }
6075    }
6076    let any_overlap = pep508_only
6077        .iter()
6078        .tuple_combinations()
6079        .any(|(&marker1, &marker2)| !marker1.is_disjoint(marker2));
6080    let markers = if !any_overlap {
6081        pep508_only
6082    } else {
6083        markers
6084            .iter()
6085            .map(|marker| {
6086                SimplifiedMarkerTree::new(requires_python, marker.combined())
6087                    .as_simplified_marker_tree()
6088            })
6089            .collect()
6090    };
6091    markers
6092        .into_iter()
6093        .filter_map(MarkerTree::try_to_string)
6094        .collect()
6095}
6096
6097#[cfg(test)]
6098mod tests {
6099    use uv_warnings::anstream;
6100
6101    use super::*;
6102
6103    /// Assert a given display snapshot, stripping ANSI color codes.
6104    macro_rules! assert_stripped_snapshot {
6105        ($expr:expr, @$snapshot:literal) => {{
6106            let expr = format!("{}", $expr);
6107            let expr = format!("{}", anstream::adapter::strip_str(&expr));
6108            insta::assert_snapshot!(expr, @$snapshot);
6109        }};
6110    }
6111
6112    #[test]
6113    fn missing_dependency_source_unambiguous() {
6114        let data = r#"
6115version = 1
6116requires-python = ">=3.12"
6117
6118[[package]]
6119name = "a"
6120version = "0.1.0"
6121source = { registry = "https://pypi.org/simple" }
6122sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6123
6124[[package]]
6125name = "b"
6126version = "0.1.0"
6127source = { registry = "https://pypi.org/simple" }
6128sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6129
6130[[package.dependencies]]
6131name = "a"
6132version = "0.1.0"
6133"#;
6134        let result: Result<Lock, _> = toml::from_str(data);
6135        insta::assert_debug_snapshot!(result);
6136    }
6137
6138    #[test]
6139    fn missing_dependency_version_unambiguous() {
6140        let data = r#"
6141version = 1
6142requires-python = ">=3.12"
6143
6144[[package]]
6145name = "a"
6146version = "0.1.0"
6147source = { registry = "https://pypi.org/simple" }
6148sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6149
6150[[package]]
6151name = "b"
6152version = "0.1.0"
6153source = { registry = "https://pypi.org/simple" }
6154sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6155
6156[[package.dependencies]]
6157name = "a"
6158source = { registry = "https://pypi.org/simple" }
6159"#;
6160        let result: Result<Lock, _> = toml::from_str(data);
6161        insta::assert_debug_snapshot!(result);
6162    }
6163
6164    #[test]
6165    fn missing_dependency_source_version_unambiguous() {
6166        let data = r#"
6167version = 1
6168requires-python = ">=3.12"
6169
6170[[package]]
6171name = "a"
6172version = "0.1.0"
6173source = { registry = "https://pypi.org/simple" }
6174sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6175
6176[[package]]
6177name = "b"
6178version = "0.1.0"
6179source = { registry = "https://pypi.org/simple" }
6180sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6181
6182[[package.dependencies]]
6183name = "a"
6184"#;
6185        let result: Result<Lock, _> = toml::from_str(data);
6186        insta::assert_debug_snapshot!(result);
6187    }
6188
6189    #[test]
6190    fn missing_dependency_source_ambiguous() {
6191        let data = r#"
6192version = 1
6193requires-python = ">=3.12"
6194
6195[[package]]
6196name = "a"
6197version = "0.1.0"
6198source = { registry = "https://pypi.org/simple" }
6199sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6200
6201[[package]]
6202name = "a"
6203version = "0.1.1"
6204source = { registry = "https://pypi.org/simple" }
6205sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6206
6207[[package]]
6208name = "b"
6209version = "0.1.0"
6210source = { registry = "https://pypi.org/simple" }
6211sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6212
6213[[package.dependencies]]
6214name = "a"
6215version = "0.1.0"
6216"#;
6217        let result = toml::from_str::<Lock>(data).unwrap_err();
6218        assert_stripped_snapshot!(result, @"Dependency `a` has missing `source` field but has more than one matching package");
6219    }
6220
6221    #[test]
6222    fn missing_dependency_version_ambiguous() {
6223        let data = r#"
6224version = 1
6225requires-python = ">=3.12"
6226
6227[[package]]
6228name = "a"
6229version = "0.1.0"
6230source = { registry = "https://pypi.org/simple" }
6231sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6232
6233[[package]]
6234name = "a"
6235version = "0.1.1"
6236source = { registry = "https://pypi.org/simple" }
6237sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6238
6239[[package]]
6240name = "b"
6241version = "0.1.0"
6242source = { registry = "https://pypi.org/simple" }
6243sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6244
6245[[package.dependencies]]
6246name = "a"
6247source = { registry = "https://pypi.org/simple" }
6248"#;
6249        let result = toml::from_str::<Lock>(data).unwrap_err();
6250        assert_stripped_snapshot!(result, @"Dependency `a` has missing `version` field but has more than one matching package");
6251    }
6252
6253    #[test]
6254    fn missing_dependency_source_version_ambiguous() {
6255        let data = r#"
6256version = 1
6257requires-python = ">=3.12"
6258
6259[[package]]
6260name = "a"
6261version = "0.1.0"
6262source = { registry = "https://pypi.org/simple" }
6263sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6264
6265[[package]]
6266name = "a"
6267version = "0.1.1"
6268source = { registry = "https://pypi.org/simple" }
6269sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6270
6271[[package]]
6272name = "b"
6273version = "0.1.0"
6274source = { registry = "https://pypi.org/simple" }
6275sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6276
6277[[package.dependencies]]
6278name = "a"
6279"#;
6280        let result = toml::from_str::<Lock>(data).unwrap_err();
6281        assert_stripped_snapshot!(result, @"Dependency `a` has missing `source` field but has more than one matching package");
6282    }
6283
6284    #[test]
6285    fn missing_dependency_version_dynamic() {
6286        let data = r#"
6287version = 1
6288requires-python = ">=3.12"
6289
6290[[package]]
6291name = "a"
6292source = { editable = "path/to/a" }
6293
6294[[package]]
6295name = "a"
6296version = "0.1.1"
6297source = { registry = "https://pypi.org/simple" }
6298sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6299
6300[[package]]
6301name = "b"
6302version = "0.1.0"
6303source = { registry = "https://pypi.org/simple" }
6304sdist = { url = "https://example.com", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 0 }
6305
6306[[package.dependencies]]
6307name = "a"
6308source = { editable = "path/to/a" }
6309"#;
6310        let result = toml::from_str::<Lock>(data);
6311        insta::assert_debug_snapshot!(result);
6312    }
6313
6314    #[test]
6315    fn hash_optional_missing() {
6316        let data = r#"
6317version = 1
6318requires-python = ">=3.12"
6319
6320[[package]]
6321name = "anyio"
6322version = "4.3.0"
6323source = { registry = "https://pypi.org/simple" }
6324wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl" }]
6325"#;
6326        let result: Result<Lock, _> = toml::from_str(data);
6327        insta::assert_debug_snapshot!(result);
6328    }
6329
6330    #[test]
6331    fn hash_optional_present() {
6332        let data = r#"
6333version = 1
6334requires-python = ">=3.12"
6335
6336[[package]]
6337name = "anyio"
6338version = "4.3.0"
6339source = { registry = "https://pypi.org/simple" }
6340wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" }]
6341"#;
6342        let result: Result<Lock, _> = toml::from_str(data);
6343        insta::assert_debug_snapshot!(result);
6344    }
6345
6346    #[test]
6347    fn hash_required_present() {
6348        let data = r#"
6349version = 1
6350requires-python = ">=3.12"
6351
6352[[package]]
6353name = "anyio"
6354version = "4.3.0"
6355source = { path = "file:///foo/bar" }
6356wheels = [{ url = "file:///foo/bar/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" }]
6357"#;
6358        let result: Result<Lock, _> = toml::from_str(data);
6359        insta::assert_debug_snapshot!(result);
6360    }
6361
6362    #[test]
6363    fn source_direct_no_subdir() {
6364        let data = r#"
6365version = 1
6366requires-python = ">=3.12"
6367
6368[[package]]
6369name = "anyio"
6370version = "4.3.0"
6371source = { url = "https://burntsushi.net" }
6372"#;
6373        let result: Result<Lock, _> = toml::from_str(data);
6374        insta::assert_debug_snapshot!(result);
6375    }
6376
6377    #[test]
6378    fn source_direct_has_subdir() {
6379        let data = r#"
6380version = 1
6381requires-python = ">=3.12"
6382
6383[[package]]
6384name = "anyio"
6385version = "4.3.0"
6386source = { url = "https://burntsushi.net", subdirectory = "wat/foo/bar" }
6387"#;
6388        let result: Result<Lock, _> = toml::from_str(data);
6389        insta::assert_debug_snapshot!(result);
6390    }
6391
6392    #[test]
6393    fn source_directory() {
6394        let data = r#"
6395version = 1
6396requires-python = ">=3.12"
6397
6398[[package]]
6399name = "anyio"
6400version = "4.3.0"
6401source = { directory = "path/to/dir" }
6402"#;
6403        let result: Result<Lock, _> = toml::from_str(data);
6404        insta::assert_debug_snapshot!(result);
6405    }
6406
6407    #[test]
6408    fn source_editable() {
6409        let data = r#"
6410version = 1
6411requires-python = ">=3.12"
6412
6413[[package]]
6414name = "anyio"
6415version = "4.3.0"
6416source = { editable = "path/to/dir" }
6417"#;
6418        let result: Result<Lock, _> = toml::from_str(data);
6419        insta::assert_debug_snapshot!(result);
6420    }
6421}