Skip to main content

cargo/core/compiler/context/
mod.rs

1#![allow(deprecated)]
2use std::collections::{BTreeSet, HashMap, HashSet};
3use std::path::PathBuf;
4use std::sync::{Arc, Mutex};
5
6use filetime::FileTime;
7use jobserver::Client;
8
9use crate::core::compiler::{self, compilation, Unit};
10use crate::core::PackageId;
11use crate::util::errors::{CargoResult, CargoResultExt};
12use crate::util::{profile, Config};
13
14use super::build_plan::BuildPlan;
15use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
16use super::fingerprint::Fingerprint;
17use super::job_queue::JobQueue;
18use super::layout::Layout;
19use super::unit_graph::{UnitDep, UnitGraph};
20use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor};
21
22mod compilation_files;
23use self::compilation_files::CompilationFiles;
24pub use self::compilation_files::{Metadata, OutputFile};
25
26/// Collection of all the stuff that is needed to perform a build.
27pub struct Context<'a, 'cfg> {
28    /// Mostly static information about the build task.
29    pub bcx: &'a BuildContext<'a, 'cfg>,
30    /// A large collection of information about the result of the entire compilation.
31    pub compilation: Compilation<'cfg>,
32    /// Output from build scripts, updated after each build script runs.
33    pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
34    /// Dependencies (like rerun-if-changed) declared by a build script.
35    /// This is *only* populated from the output from previous runs.
36    /// If the build script hasn't ever been run, then it must be run.
37    pub build_explicit_deps: HashMap<Unit<'a>, BuildDeps>,
38    /// Fingerprints used to detect if a unit is out-of-date.
39    pub fingerprints: HashMap<Unit<'a>, Arc<Fingerprint>>,
40    /// Cache of file mtimes to reduce filesystem hits.
41    pub mtime_cache: HashMap<PathBuf, FileTime>,
42    /// A set used to track which units have been compiled.
43    /// A unit may appear in the job graph multiple times as a dependency of
44    /// multiple packages, but it only needs to run once.
45    pub compiled: HashSet<Unit<'a>>,
46    /// Linking information for each `Unit`.
47    /// See `build_map` for details.
48    pub build_scripts: HashMap<Unit<'a>, Arc<BuildScripts>>,
49    /// Job server client to manage concurrency with other processes.
50    pub jobserver: Client,
51    /// "Primary" packages are the ones the user selected on the command-line
52    /// with `-p` flags. If no flags are specified, then it is the defaults
53    /// based on the current directory and the default workspace members.
54    primary_packages: HashSet<PackageId>,
55    /// The dependency graph of units to compile.
56    unit_dependencies: UnitGraph<'a>,
57    /// An abstraction of the files and directories that will be generated by
58    /// the compilation. This is `None` until after `unit_dependencies` has
59    /// been computed.
60    files: Option<CompilationFiles<'a, 'cfg>>,
61
62    /// A flag indicating whether pipelining is enabled for this compilation
63    /// session. Pipelining largely only affects the edges of the dependency
64    /// graph that we generate at the end, and otherwise it's pretty
65    /// straightforward.
66    pipelining: bool,
67
68    /// A set of units which are compiling rlibs and are expected to produce
69    /// metadata files in addition to the rlib itself. This is only filled in
70    /// when `pipelining` above is enabled.
71    rmeta_required: HashSet<Unit<'a>>,
72
73    /// When we're in jobserver-per-rustc process mode, this keeps those
74    /// jobserver clients for each Unit (which eventually becomes a rustc
75    /// process).
76    pub rustc_clients: HashMap<Unit<'a>, Client>,
77}
78
79impl<'a, 'cfg> Context<'a, 'cfg> {
80    pub fn new(
81        config: &'cfg Config,
82        bcx: &'a BuildContext<'a, 'cfg>,
83        unit_dependencies: UnitGraph<'a>,
84        default_kind: CompileKind,
85    ) -> CargoResult<Self> {
86        // Load up the jobserver that we'll use to manage our parallelism. This
87        // is the same as the GNU make implementation of a jobserver, and
88        // intentionally so! It's hoped that we can interact with GNU make and
89        // all share the same jobserver.
90        //
91        // Note that if we don't have a jobserver in our environment then we
92        // create our own, and we create it with `n` tokens, but immediately
93        // acquire one, because one token is ourself, a running process.
94        let jobserver = match config.jobserver_from_env() {
95            Some(c) => c.clone(),
96            None => {
97                let client = Client::new(bcx.build_config.jobs as usize)
98                    .chain_err(|| "failed to create jobserver")?;
99                client.acquire_raw()?;
100                client
101            }
102        };
103
104        let pipelining = bcx.config.build_config()?.pipelining.unwrap_or(true);
105
106        Ok(Self {
107            bcx,
108            compilation: Compilation::new(bcx, default_kind)?,
109            build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
110            fingerprints: HashMap::new(),
111            mtime_cache: HashMap::new(),
112            compiled: HashSet::new(),
113            build_scripts: HashMap::new(),
114            build_explicit_deps: HashMap::new(),
115            jobserver,
116            primary_packages: HashSet::new(),
117            unit_dependencies,
118            files: None,
119            rmeta_required: HashSet::new(),
120            rustc_clients: HashMap::new(),
121            pipelining,
122        })
123    }
124
125    /// Starts compilation, waits for it to finish, and returns information
126    /// about the result of compilation.
127    pub fn compile(
128        mut self,
129        units: &[Unit<'a>],
130        export_dir: Option<PathBuf>,
131        exec: &Arc<dyn Executor>,
132    ) -> CargoResult<Compilation<'cfg>> {
133        let mut queue = JobQueue::new(self.bcx, units);
134        let mut plan = BuildPlan::new();
135        let build_plan = self.bcx.build_config.build_plan;
136        self.prepare_units(export_dir, units)?;
137        self.prepare()?;
138        custom_build::build_map(&mut self, units)?;
139        self.check_collistions()?;
140
141        for unit in units.iter() {
142            // Build up a list of pending jobs, each of which represent
143            // compiling a particular package. No actual work is executed as
144            // part of this, that's all done next as part of the `execute`
145            // function which will run everything in order with proper
146            // parallelism.
147            let force_rebuild = self.bcx.build_config.force_rebuild;
148            super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
149        }
150
151        // Now that we've got the full job queue and we've done all our
152        // fingerprint analysis to determine what to run, bust all the memoized
153        // fingerprint hashes to ensure that during the build they all get the
154        // most up-to-date values. In theory we only need to bust hashes that
155        // transitively depend on a dirty build script, but it shouldn't matter
156        // that much for performance anyway.
157        for fingerprint in self.fingerprints.values() {
158            fingerprint.clear_memoized();
159        }
160
161        // Now that we've figured out everything that we're going to do, do it!
162        queue.execute(&mut self, &mut plan)?;
163
164        if build_plan {
165            plan.set_inputs(self.build_plan_inputs()?);
166            plan.output_plan();
167        }
168
169        // Collect the result of the build into `self.compilation`.
170        for unit in units.iter() {
171            // Collect tests and executables.
172            for output in self.outputs(unit)?.iter() {
173                if output.flavor == FileFlavor::DebugInfo || output.flavor == FileFlavor::Auxiliary
174                {
175                    continue;
176                }
177
178                let bindst = output.bin_dst();
179
180                if unit.mode == CompileMode::Test {
181                    self.compilation.tests.push((
182                        unit.pkg.clone(),
183                        unit.target.clone(),
184                        output.path.clone(),
185                    ));
186                } else if unit.target.is_executable() {
187                    self.compilation.binaries.push(bindst.clone());
188                }
189            }
190
191            // If the unit has a build script, add `OUT_DIR` to the
192            // environment variables.
193            if unit.target.is_lib() {
194                for dep in &self.unit_dependencies[unit] {
195                    if dep.unit.mode.is_run_custom_build() {
196                        let out_dir = self
197                            .files()
198                            .build_script_out_dir(&dep.unit)
199                            .display()
200                            .to_string();
201                        self.compilation
202                            .extra_env
203                            .entry(dep.unit.pkg.package_id())
204                            .or_insert_with(Vec::new)
205                            .push(("OUT_DIR".to_string(), out_dir));
206                    }
207                }
208            }
209
210            // Collect information for `rustdoc --test`.
211            if unit.mode.is_doc_test() {
212                let mut unstable_opts = false;
213                let args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
214                self.compilation.to_doc_test.push(compilation::Doctest {
215                    package: unit.pkg.clone(),
216                    target: unit.target.clone(),
217                    args,
218                    unstable_opts,
219                });
220            }
221
222            // Collect the enabled features.
223            let feats = &unit.features;
224            if !feats.is_empty() {
225                self.compilation
226                    .cfgs
227                    .entry(unit.pkg.package_id())
228                    .or_insert_with(|| {
229                        feats
230                            .iter()
231                            .map(|feat| format!("feature=\"{}\"", feat))
232                            .collect()
233                    });
234            }
235
236            // Collect rustdocflags.
237            let rustdocflags = self.bcx.rustdocflags_args(unit);
238            if !rustdocflags.is_empty() {
239                self.compilation
240                    .rustdocflags
241                    .entry(unit.pkg.package_id())
242                    .or_insert_with(|| rustdocflags.to_vec());
243            }
244
245            super::output_depinfo(&mut self, unit)?;
246        }
247
248        for (pkg_id, output) in self.build_script_outputs.lock().unwrap().iter() {
249            self.compilation
250                .cfgs
251                .entry(pkg_id)
252                .or_insert_with(HashSet::new)
253                .extend(output.cfgs.iter().cloned());
254
255            self.compilation
256                .extra_env
257                .entry(pkg_id)
258                .or_insert_with(Vec::new)
259                .extend(output.env.iter().cloned());
260
261            for dir in output.library_paths.iter() {
262                self.compilation.native_dirs.insert(dir.clone());
263            }
264        }
265        Ok(self.compilation)
266    }
267
268    /// Returns the executable for the specified unit (if any).
269    pub fn get_executable(&mut self, unit: &Unit<'a>) -> CargoResult<Option<PathBuf>> {
270        for output in self.outputs(unit)?.iter() {
271            if output.flavor == FileFlavor::DebugInfo {
272                continue;
273            }
274
275            let is_binary = unit.target.is_executable();
276            let is_test = unit.mode.is_any_test() && !unit.mode.is_check();
277
278            if is_binary || is_test {
279                return Ok(Option::Some(output.bin_dst().clone()));
280            }
281        }
282        Ok(None)
283    }
284
285    pub fn prepare_units(
286        &mut self,
287        export_dir: Option<PathBuf>,
288        units: &[Unit<'a>],
289    ) -> CargoResult<()> {
290        let dest = self.bcx.profiles.get_dir_name();
291        let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
292        let mut targets = HashMap::new();
293        if let CompileKind::Target(target) = self.bcx.build_config.requested_kind {
294            let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
295            targets.insert(target, layout);
296        }
297        self.primary_packages
298            .extend(units.iter().map(|u| u.pkg.package_id()));
299
300        self.record_units_requiring_metadata();
301
302        let files =
303            CompilationFiles::new(units, host_layout, targets, export_dir, self.bcx.ws, self);
304        self.files = Some(files);
305        Ok(())
306    }
307
308    /// Prepare this context, ensuring that all filesystem directories are in
309    /// place.
310    pub fn prepare(&mut self) -> CargoResult<()> {
311        let _p = profile::start("preparing layout");
312
313        self.files_mut()
314            .host
315            .prepare()
316            .chain_err(|| "couldn't prepare build directories")?;
317        for target in self.files.as_mut().unwrap().target.values_mut() {
318            target
319                .prepare()
320                .chain_err(|| "couldn't prepare build directories")?;
321        }
322
323        self.compilation.host_deps_output = self.files_mut().host.deps().to_path_buf();
324
325        let files = self.files.as_ref().unwrap();
326        let layout = files.layout(self.bcx.build_config.requested_kind);
327        self.compilation.root_output = layout.dest().to_path_buf();
328        self.compilation.deps_output = layout.deps().to_path_buf();
329        Ok(())
330    }
331
332    pub fn files(&self) -> &CompilationFiles<'a, 'cfg> {
333        self.files.as_ref().unwrap()
334    }
335
336    fn files_mut(&mut self) -> &mut CompilationFiles<'a, 'cfg> {
337        self.files.as_mut().unwrap()
338    }
339
340    /// Returns the filenames that the given unit will generate.
341    pub fn outputs(&self, unit: &Unit<'a>) -> CargoResult<Arc<Vec<OutputFile>>> {
342        self.files.as_ref().unwrap().outputs(unit, self.bcx)
343    }
344
345    /// Direct dependencies for the given unit.
346    pub fn unit_deps(&self, unit: &Unit<'a>) -> &[UnitDep<'a>] {
347        &self.unit_dependencies[unit]
348    }
349
350    /// Returns the RunCustomBuild Unit associated with the given Unit.
351    ///
352    /// If the package does not have a build script, this returns None.
353    pub fn find_build_script_unit(&self, unit: Unit<'a>) -> Option<Unit<'a>> {
354        if unit.mode.is_run_custom_build() {
355            return Some(unit);
356        }
357        self.unit_dependencies[&unit]
358            .iter()
359            .find(|unit_dep| {
360                unit_dep.unit.mode.is_run_custom_build()
361                    && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
362            })
363            .map(|unit_dep| unit_dep.unit)
364    }
365
366    /// Returns the metadata hash for the RunCustomBuild Unit associated with
367    /// the given unit.
368    ///
369    /// If the package does not have a build script, this returns None.
370    pub fn find_build_script_metadata(&self, unit: Unit<'a>) -> Option<Metadata> {
371        let script_unit = self.find_build_script_unit(unit)?;
372        Some(self.get_run_build_script_metadata(&script_unit))
373    }
374
375    /// Returns the metadata hash for a RunCustomBuild unit.
376    pub fn get_run_build_script_metadata(&self, unit: &Unit<'a>) -> Metadata {
377        assert!(unit.mode.is_run_custom_build());
378        self.files()
379            .metadata(unit)
380            .expect("build script should always have hash")
381    }
382
383    pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool {
384        self.primary_packages.contains(&unit.pkg.package_id())
385    }
386
387    /// Returns the list of filenames read by cargo to generate the `BuildContext`
388    /// (all `Cargo.toml`, etc.).
389    pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
390        // Keep sorted for consistency.
391        let mut inputs = BTreeSet::new();
392        // Note: dev-deps are skipped if they are not present in the unit graph.
393        for unit in self.unit_dependencies.keys() {
394            inputs.insert(unit.pkg.manifest_path().to_path_buf());
395        }
396        Ok(inputs.into_iter().collect())
397    }
398
399    fn check_collistions(&self) -> CargoResult<()> {
400        let mut output_collisions = HashMap::new();
401        let describe_collision =
402            |unit: &Unit<'_>, other_unit: &Unit<'_>, path: &PathBuf| -> String {
403                format!(
404                    "The {} target `{}` in package `{}` has the same output \
405                     filename as the {} target `{}` in package `{}`.\n\
406                     Colliding filename is: {}\n",
407                    unit.target.kind().description(),
408                    unit.target.name(),
409                    unit.pkg.package_id(),
410                    other_unit.target.kind().description(),
411                    other_unit.target.name(),
412                    other_unit.pkg.package_id(),
413                    path.display()
414                )
415            };
416        let suggestion =
417            "Consider changing their names to be unique or compiling them separately.\n\
418             This may become a hard error in the future; see \
419             <https://github.com/rust-lang/cargo/issues/6313>.";
420        let rustdoc_suggestion =
421            "This is a known bug where multiple crates with the same name use\n\
422             the same path; see <https://github.com/rust-lang/cargo/issues/6313>.";
423        let report_collision = |unit: &Unit<'_>,
424                                other_unit: &Unit<'_>,
425                                path: &PathBuf,
426                                suggestion: &str|
427         -> CargoResult<()> {
428            if unit.target.name() == other_unit.target.name() {
429                self.bcx.config.shell().warn(format!(
430                    "output filename collision.\n\
431                     {}\
432                     The targets should have unique names.\n\
433                     {}",
434                    describe_collision(unit, other_unit, path),
435                    suggestion
436                ))
437            } else {
438                self.bcx.config.shell().warn(format!(
439                    "output filename collision.\n\
440                    {}\
441                    The output filenames should be unique.\n\
442                    {}\n\
443                    If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\
444                    https://github.com/rust-lang/cargo/issues/ with as much information as you\n\
445                    can provide.\n\
446                    {} running on `{}` target `{}`\n\
447                    First unit: {:?}\n\
448                    Second unit: {:?}",
449                    describe_collision(unit, other_unit, path),
450                    suggestion,
451                    crate::version(),
452                    self.bcx.host_triple(),
453                    self.bcx.target_data.short_name(&unit.kind),
454                    unit,
455                    other_unit))
456            }
457        };
458
459        let mut keys = self
460            .unit_dependencies
461            .keys()
462            .filter(|unit| !unit.mode.is_run_custom_build())
463            .collect::<Vec<_>>();
464        // Sort for consistent error messages.
465        keys.sort_unstable();
466        for unit in keys {
467            for output in self.outputs(unit)?.iter() {
468                if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
469                    if unit.mode.is_doc() {
470                        // See https://github.com/rust-lang/rust/issues/56169
471                        // and https://github.com/rust-lang/rust/issues/61378
472                        report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?;
473                    } else {
474                        report_collision(unit, other_unit, &output.path, suggestion)?;
475                    }
476                }
477                if let Some(hardlink) = output.hardlink.as_ref() {
478                    if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
479                        report_collision(unit, other_unit, hardlink, suggestion)?;
480                    }
481                }
482                if let Some(ref export_path) = output.export_path {
483                    if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
484                        self.bcx.config.shell().warn(format!(
485                            "`--out-dir` filename collision.\n\
486                             {}\
487                             The exported filenames should be unique.\n\
488                             {}",
489                            describe_collision(unit, other_unit, export_path),
490                            suggestion
491                        ))?;
492                    }
493                }
494            }
495        }
496        Ok(())
497    }
498
499    /// Records the list of units which are required to emit metadata.
500    ///
501    /// Units which depend only on the metadata of others requires the others to
502    /// actually produce metadata, so we'll record that here.
503    fn record_units_requiring_metadata(&mut self) {
504        for (key, deps) in self.unit_dependencies.iter() {
505            for dep in deps {
506                if self.only_requires_rmeta(key, &dep.unit) {
507                    self.rmeta_required.insert(dep.unit);
508                }
509            }
510        }
511    }
512
513    /// Returns whether when `parent` depends on `dep` if it only requires the
514    /// metadata file from `dep`.
515    pub fn only_requires_rmeta(&self, parent: &Unit<'a>, dep: &Unit<'a>) -> bool {
516        // this is only enabled when pipelining is enabled
517        self.pipelining
518            // We're only a candidate for requiring an `rmeta` file if we
519            // ourselves are building an rlib,
520            && !parent.requires_upstream_objects()
521            && parent.mode == CompileMode::Build
522            // Our dependency must also be built as an rlib, otherwise the
523            // object code must be useful in some fashion
524            && !dep.requires_upstream_objects()
525            && dep.mode == CompileMode::Build
526    }
527
528    /// Returns whether when `unit` is built whether it should emit metadata as
529    /// well because some compilations rely on that.
530    pub fn rmeta_required(&self, unit: &Unit<'a>) -> bool {
531        self.rmeta_required.contains(unit) || self.bcx.config.cli_unstable().timings.is_some()
532    }
533
534    pub fn new_jobserver(&mut self) -> CargoResult<Client> {
535        let tokens = self.bcx.build_config.jobs as usize;
536        let client = Client::new(tokens).chain_err(|| "failed to create jobserver")?;
537
538        // Drain the client fully
539        for i in 0..tokens {
540            client.acquire_raw().chain_err(|| {
541                format!(
542                    "failed to fully drain {}/{} token from jobserver at startup",
543                    i, tokens,
544                )
545            })?;
546        }
547
548        Ok(client)
549    }
550}