cargo/core/compiler/build_runner/
mod.rs

1//! [`BuildRunner`] is the mutable state used during the build process.
2
3use std::collections::{BTreeSet, HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use anyhow::{Context as _, bail};
13use filetime::FileTime;
14use itertools::Itertools;
15use jobserver::Client;
16
17use super::build_plan::BuildPlan;
18use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
19use super::fingerprint::{Checksum, Fingerprint};
20use super::job_queue::JobQueue;
21use super::layout::Layout;
22use super::lto::Lto;
23use super::unit_graph::UnitDep;
24use super::{
25    BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
26};
27
28mod compilation_files;
29use self::compilation_files::CompilationFiles;
30pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
31
32/// Collection of all the stuff that is needed to perform a build.
33///
34/// Different from the [`BuildContext`], `Context` is a _mutable_ state used
35/// throughout the entire build process. Everything is coordinated through this.
36///
37/// [`BuildContext`]: crate::core::compiler::BuildContext
38pub struct BuildRunner<'a, 'gctx> {
39    /// Mostly static information about the build task.
40    pub bcx: &'a BuildContext<'a, 'gctx>,
41    /// A large collection of information about the result of the entire compilation.
42    pub compilation: Compilation<'gctx>,
43    /// Output from build scripts, updated after each build script runs.
44    pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
45    /// Dependencies (like rerun-if-changed) declared by a build script.
46    /// This is *only* populated from the output from previous runs.
47    /// If the build script hasn't ever been run, then it must be run.
48    pub build_explicit_deps: HashMap<Unit, BuildDeps>,
49    /// Fingerprints used to detect if a unit is out-of-date.
50    pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
51    /// Cache of file mtimes to reduce filesystem hits.
52    pub mtime_cache: HashMap<PathBuf, FileTime>,
53    /// Cache of file checksums to reduce filesystem reads.
54    pub checksum_cache: HashMap<PathBuf, Checksum>,
55    /// A set used to track which units have been compiled.
56    /// A unit may appear in the job graph multiple times as a dependency of
57    /// multiple packages, but it only needs to run once.
58    pub compiled: HashSet<Unit>,
59    /// Linking information for each `Unit`.
60    /// See `build_map` for details.
61    pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
62    /// Job server client to manage concurrency with other processes.
63    pub jobserver: Client,
64    /// "Primary" packages are the ones the user selected on the command-line
65    /// with `-p` flags. If no flags are specified, then it is the defaults
66    /// based on the current directory and the default workspace members.
67    primary_packages: HashSet<PackageId>,
68    /// An abstraction of the files and directories that will be generated by
69    /// the compilation. This is `None` until after `unit_dependencies` has
70    /// been computed.
71    files: Option<CompilationFiles<'a, 'gctx>>,
72
73    /// A set of units which are compiling rlibs and are expected to produce
74    /// metadata files in addition to the rlib itself.
75    rmeta_required: HashSet<Unit>,
76
77    /// Map of the LTO-status of each unit. This indicates what sort of
78    /// compilation is happening (only object, only bitcode, both, etc), and is
79    /// precalculated early on.
80    pub lto: HashMap<Unit, Lto>,
81
82    /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
83    /// See `Context::find_metadata_units` for more details.
84    pub metadata_for_doc_units: HashMap<Unit, Metadata>,
85
86    /// Set of metadata of Docscrape units that fail before completion, e.g.
87    /// because the target has a type error. This is in an Arc<Mutex<..>>
88    /// because it is continuously updated as the job progresses.
89    pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
90}
91
92impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
93    pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
94        // Load up the jobserver that we'll use to manage our parallelism. This
95        // is the same as the GNU make implementation of a jobserver, and
96        // intentionally so! It's hoped that we can interact with GNU make and
97        // all share the same jobserver.
98        //
99        // Note that if we don't have a jobserver in our environment then we
100        // create our own, and we create it with `n` tokens, but immediately
101        // acquire one, because one token is ourself, a running process.
102        let jobserver = match bcx.gctx.jobserver_from_env() {
103            Some(c) => c.clone(),
104            None => {
105                let client =
106                    Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
107                client.acquire_raw()?;
108                client
109            }
110        };
111
112        Ok(Self {
113            bcx,
114            compilation: Compilation::new(bcx)?,
115            build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
116            fingerprints: HashMap::new(),
117            mtime_cache: HashMap::new(),
118            checksum_cache: HashMap::new(),
119            compiled: HashSet::new(),
120            build_scripts: HashMap::new(),
121            build_explicit_deps: HashMap::new(),
122            jobserver,
123            primary_packages: HashSet::new(),
124            files: None,
125            rmeta_required: HashSet::new(),
126            lto: HashMap::new(),
127            metadata_for_doc_units: HashMap::new(),
128            failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
129        })
130    }
131
132    /// Dry-run the compilation without actually running it.
133    ///
134    /// This is expected to collect information like the location of output artifacts.
135    /// Please keep in sync with non-compilation part in [`BuildRunner::compile`].
136    pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
137        let _lock = self
138            .bcx
139            .gctx
140            .acquire_package_cache_lock(CacheLockMode::Shared)?;
141        self.lto = super::lto::generate(self.bcx)?;
142        self.prepare_units()?;
143        self.prepare()?;
144        self.check_collisions()?;
145
146        for unit in &self.bcx.roots {
147            self.collect_tests_and_executables(unit)?;
148        }
149
150        Ok(self.compilation)
151    }
152
153    /// Starts compilation, waits for it to finish, and returns information
154    /// about the result of compilation.
155    ///
156    /// See [`ops::cargo_compile`] for a higher-level view of the compile process.
157    ///
158    /// [`ops::cargo_compile`]: crate::ops::cargo_compile
159    #[tracing::instrument(skip_all)]
160    pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
161        // A shared lock is held during the duration of the build since rustc
162        // needs to read from the `src` cache, and we don't want other
163        // commands modifying the `src` cache while it is running.
164        let _lock = self
165            .bcx
166            .gctx
167            .acquire_package_cache_lock(CacheLockMode::Shared)?;
168        let mut queue = JobQueue::new(self.bcx);
169        let mut plan = BuildPlan::new();
170        let build_plan = self.bcx.build_config.build_plan;
171        self.lto = super::lto::generate(self.bcx)?;
172        self.prepare_units()?;
173        self.prepare()?;
174        custom_build::build_map(&mut self)?;
175        self.check_collisions()?;
176        self.compute_metadata_for_doc_units();
177
178        // We need to make sure that if there were any previous docs
179        // already compiled, they were compiled with the same Rustc version that we're currently
180        // using. Otherwise we must remove the `doc/` folder and compile again forcing a rebuild.
181        //
182        // This is important because the `.js`/`.html` & `.css` files that are generated by Rustc don't have
183        // any versioning (See https://github.com/rust-lang/cargo/issues/8461).
184        // Therefore, we can end up with weird bugs and behaviours if we mix different
185        // versions of these files.
186        if self.bcx.build_config.intent.is_doc() {
187            RustDocFingerprint::check_rustdoc_fingerprint(&self)?
188        }
189
190        for unit in &self.bcx.roots {
191            let force_rebuild = self.bcx.build_config.force_rebuild;
192            super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
193        }
194
195        // Now that we've got the full job queue and we've done all our
196        // fingerprint analysis to determine what to run, bust all the memoized
197        // fingerprint hashes to ensure that during the build they all get the
198        // most up-to-date values. In theory we only need to bust hashes that
199        // transitively depend on a dirty build script, but it shouldn't matter
200        // that much for performance anyway.
201        for fingerprint in self.fingerprints.values() {
202            fingerprint.clear_memoized();
203        }
204
205        // Now that we've figured out everything that we're going to do, do it!
206        queue.execute(&mut self, &mut plan)?;
207
208        if build_plan {
209            plan.set_inputs(self.build_plan_inputs()?);
210            plan.output_plan(self.bcx.gctx);
211        }
212
213        // Add `OUT_DIR` to env vars if unit has a build script.
214        let units_with_build_script = &self
215            .bcx
216            .roots
217            .iter()
218            .filter(|unit| self.build_scripts.contains_key(unit))
219            .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
220            .collect::<Vec<_>>();
221        for unit in units_with_build_script {
222            for dep in &self.bcx.unit_graph[unit] {
223                if dep.unit.mode.is_run_custom_build() {
224                    let out_dir = self
225                        .files()
226                        .build_script_out_dir(&dep.unit)
227                        .display()
228                        .to_string();
229                    let script_meta = self.get_run_build_script_metadata(&dep.unit);
230                    self.compilation
231                        .extra_env
232                        .entry(script_meta)
233                        .or_insert_with(Vec::new)
234                        .push(("OUT_DIR".to_string(), out_dir));
235                }
236            }
237        }
238
239        // Collect the result of the build into `self.compilation`.
240        for unit in &self.bcx.roots {
241            self.collect_tests_and_executables(unit)?;
242
243            // Collect information for `rustdoc --test`.
244            if unit.mode.is_doc_test() {
245                let mut unstable_opts = false;
246                let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
247                args.extend(compiler::lto_args(&self, unit));
248                args.extend(compiler::features_args(unit));
249                args.extend(compiler::check_cfg_args(unit));
250
251                let script_metas = self.find_build_script_metadatas(unit);
252                if let Some(meta_vec) = script_metas.clone() {
253                    for meta in meta_vec {
254                        if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
255                            for cfg in &output.cfgs {
256                                args.push("--cfg".into());
257                                args.push(cfg.into());
258                            }
259
260                            for check_cfg in &output.check_cfgs {
261                                args.push("--check-cfg".into());
262                                args.push(check_cfg.into());
263                            }
264
265                            for (lt, arg) in &output.linker_args {
266                                if lt.applies_to(&unit.target, unit.mode) {
267                                    args.push("-C".into());
268                                    args.push(format!("link-arg={}", arg).into());
269                                }
270                            }
271                        }
272                    }
273                }
274                args.extend(unit.rustdocflags.iter().map(Into::into));
275
276                use super::MessageFormat;
277                let format = match self.bcx.build_config.message_format {
278                    MessageFormat::Short => "short",
279                    MessageFormat::Human => "human",
280                    MessageFormat::Json { .. } => "json",
281                };
282                args.push("--error-format".into());
283                args.push(format.into());
284
285                self.compilation.to_doc_test.push(compilation::Doctest {
286                    unit: unit.clone(),
287                    args,
288                    unstable_opts,
289                    linker: self.compilation.target_linker(unit.kind).clone(),
290                    script_metas,
291                    env: artifact::get_env(&self, self.unit_deps(unit))?,
292                });
293            }
294
295            super::output_depinfo(&mut self, unit)?;
296        }
297
298        for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
299            self.compilation
300                .extra_env
301                .entry(*script_meta)
302                .or_insert_with(Vec::new)
303                .extend(output.env.iter().cloned());
304
305            for dir in output.library_paths.iter() {
306                self.compilation
307                    .native_dirs
308                    .insert(dir.clone().into_path_buf());
309            }
310        }
311        Ok(self.compilation)
312    }
313
314    fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
315        for output in self.outputs(unit)?.iter() {
316            if matches!(
317                output.flavor,
318                FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
319            ) {
320                continue;
321            }
322
323            let bindst = output.bin_dst();
324
325            if unit.mode == CompileMode::Test {
326                self.compilation
327                    .tests
328                    .push(self.unit_output(unit, &output.path));
329            } else if unit.target.is_executable() {
330                self.compilation
331                    .binaries
332                    .push(self.unit_output(unit, bindst));
333            } else if unit.target.is_cdylib()
334                && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
335            {
336                self.compilation
337                    .cdylibs
338                    .push(self.unit_output(unit, bindst));
339            }
340        }
341        Ok(())
342    }
343
344    /// Returns the executable for the specified unit (if any).
345    pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
346        let is_binary = unit.target.is_executable();
347        let is_test = unit.mode.is_any_test();
348        if !unit.mode.generates_executable() || !(is_binary || is_test) {
349            return Ok(None);
350        }
351        Ok(self
352            .outputs(unit)?
353            .iter()
354            .find(|o| o.flavor == FileFlavor::Normal)
355            .map(|output| output.bin_dst().clone()))
356    }
357
358    #[tracing::instrument(skip_all)]
359    pub fn prepare_units(&mut self) -> CargoResult<()> {
360        let dest = self.bcx.profiles.get_dir_name();
361        let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
362        let mut targets = HashMap::new();
363        for kind in self.bcx.all_kinds.iter() {
364            if let CompileKind::Target(target) = *kind {
365                let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
366                targets.insert(target, layout);
367            }
368        }
369        self.primary_packages
370            .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
371        self.compilation
372            .root_crate_names
373            .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
374
375        self.record_units_requiring_metadata();
376
377        let files = CompilationFiles::new(self, host_layout, targets);
378        self.files = Some(files);
379        Ok(())
380    }
381
382    /// Prepare this context, ensuring that all filesystem directories are in
383    /// place.
384    #[tracing::instrument(skip_all)]
385    pub fn prepare(&mut self) -> CargoResult<()> {
386        self.files
387            .as_mut()
388            .unwrap()
389            .host
390            .prepare()
391            .context("couldn't prepare build directories")?;
392        for target in self.files.as_mut().unwrap().target.values_mut() {
393            target
394                .prepare()
395                .context("couldn't prepare build directories")?;
396        }
397
398        let files = self.files.as_ref().unwrap();
399        for &kind in self.bcx.all_kinds.iter() {
400            let layout = files.layout(kind);
401            self.compilation
402                .root_output
403                .insert(kind, layout.dest().to_path_buf());
404            self.compilation
405                .deps_output
406                .insert(kind, layout.deps().to_path_buf());
407        }
408        Ok(())
409    }
410
411    pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
412        self.files.as_ref().unwrap()
413    }
414
415    /// Returns the filenames that the given unit will generate.
416    pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
417        self.files.as_ref().unwrap().outputs(unit, self.bcx)
418    }
419
420    /// Direct dependencies for the given unit.
421    pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
422        &self.bcx.unit_graph[unit]
423    }
424
425    /// Returns the `RunCustomBuild` Units associated with the given Unit.
426    ///
427    /// If the package does not have a build script, this returns None.
428    pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
429        if unit.mode.is_run_custom_build() {
430            return Some(vec![unit.clone()]);
431        }
432
433        let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
434            .iter()
435            .filter(|unit_dep| {
436                unit_dep.unit.mode.is_run_custom_build()
437                    && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
438            })
439            .map(|unit_dep| unit_dep.unit.clone())
440            .collect();
441        if build_script_units.is_empty() {
442            None
443        } else {
444            Some(build_script_units)
445        }
446    }
447
448    /// Returns the metadata hash for the `RunCustomBuild` Unit associated with
449    /// the given unit.
450    ///
451    /// If the package does not have a build script, this returns None.
452    pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
453        self.find_build_script_units(unit).map(|units| {
454            units
455                .iter()
456                .map(|u| self.get_run_build_script_metadata(u))
457                .collect()
458        })
459    }
460
461    /// Returns the metadata hash for a `RunCustomBuild` unit.
462    pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
463        assert!(unit.mode.is_run_custom_build());
464        self.files().metadata(unit).unit_id()
465    }
466
467    /// Returns the list of SBOM output file paths for a given [`Unit`].
468    pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
469        Ok(self
470            .outputs(unit)?
471            .iter()
472            .filter(|o| o.flavor == FileFlavor::Sbom)
473            .map(|o| o.path.clone())
474            .collect())
475    }
476
477    pub fn is_primary_package(&self, unit: &Unit) -> bool {
478        self.primary_packages.contains(&unit.pkg.package_id())
479    }
480
481    /// Returns the list of filenames read by cargo to generate the [`BuildContext`]
482    /// (all `Cargo.toml`, etc.).
483    pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
484        // Keep sorted for consistency.
485        let mut inputs = BTreeSet::new();
486        // Note: dev-deps are skipped if they are not present in the unit graph.
487        for unit in self.bcx.unit_graph.keys() {
488            inputs.insert(unit.pkg.manifest_path().to_path_buf());
489        }
490        Ok(inputs.into_iter().collect())
491    }
492
493    /// Returns a [`UnitOutput`] which represents some information about the
494    /// output of a unit.
495    pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
496        let script_metas = self.find_build_script_metadatas(unit);
497        UnitOutput {
498            unit: unit.clone(),
499            path: path.to_path_buf(),
500            script_metas,
501        }
502    }
503
504    /// Check if any output file name collision happens.
505    /// See <https://github.com/rust-lang/cargo/issues/6313> for more.
506    #[tracing::instrument(skip_all)]
507    fn check_collisions(&self) -> CargoResult<()> {
508        let mut output_collisions = HashMap::new();
509        let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String {
510            format!(
511                "The {} target `{}` in package `{}` has the same output \
512                     filename as the {} target `{}` in package `{}`.\n\
513                     Colliding filename is: {}\n",
514                unit.target.kind().description(),
515                unit.target.name(),
516                unit.pkg.package_id(),
517                other_unit.target.kind().description(),
518                other_unit.target.name(),
519                other_unit.pkg.package_id(),
520                path.display()
521            )
522        };
523        let suggestion = "Consider changing their names to be unique or compiling them separately.\n\
524             This may become a hard error in the future; see \
525             <https://github.com/rust-lang/cargo/issues/6313>.";
526        let rustdoc_suggestion = "This is a known bug where multiple crates with the same name use\n\
527             the same path; see <https://github.com/rust-lang/cargo/issues/6313>.";
528        let report_collision = |unit: &Unit,
529                                other_unit: &Unit,
530                                path: &PathBuf,
531                                suggestion: &str|
532         -> CargoResult<()> {
533            if unit.target.name() == other_unit.target.name() {
534                self.bcx.gctx.shell().warn(format!(
535                    "output filename collision.\n\
536                     {}\
537                     The targets should have unique names.\n\
538                     {}",
539                    describe_collision(unit, other_unit, path),
540                    suggestion
541                ))
542            } else {
543                self.bcx.gctx.shell().warn(format!(
544                    "output filename collision.\n\
545                    {}\
546                    The output filenames should be unique.\n\
547                    {}\n\
548                    If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\
549                    https://github.com/rust-lang/cargo/issues/ with as much information as you\n\
550                    can provide.\n\
551                    cargo {} running on `{}` target `{}`\n\
552                    First unit: {:?}\n\
553                    Second unit: {:?}",
554                    describe_collision(unit, other_unit, path),
555                    suggestion,
556                    crate::version(),
557                    self.bcx.host_triple(),
558                    self.bcx.target_data.short_name(&unit.kind),
559                    unit,
560                    other_unit))
561            }
562        };
563
564        fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
565            bail!(
566                "document output filename collision\n\
567                 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
568                 Only one may be documented at once since they output to the same path.\n\
569                 Consider documenting only one, renaming one, \
570                 or marking one with `doc = false` in Cargo.toml.",
571                unit.target.kind().description(),
572                unit.target.name(),
573                unit.pkg,
574                other_unit.target.kind().description(),
575                other_unit.target.name(),
576                other_unit.pkg,
577            );
578        }
579
580        let mut keys = self
581            .bcx
582            .unit_graph
583            .keys()
584            .filter(|unit| !unit.mode.is_run_custom_build())
585            .collect::<Vec<_>>();
586        // Sort for consistent error messages.
587        keys.sort_unstable();
588        // These are kept separate to retain compatibility with older
589        // versions, which generated an error when there was a duplicate lib
590        // or bin (but the old code did not check bin<->lib collisions). To
591        // retain backwards compatibility, this only generates an error for
592        // duplicate libs or duplicate bins (but not both). Ideally this
593        // shouldn't be here, but since there isn't a complete workaround,
594        // yet, this retains the old behavior.
595        let mut doc_libs = HashMap::new();
596        let mut doc_bins = HashMap::new();
597        for unit in keys {
598            if unit.mode.is_doc() && self.is_primary_package(unit) {
599                // These situations have been an error since before 1.0, so it
600                // is not a warning like the other situations.
601                if unit.target.is_lib() {
602                    if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
603                    {
604                        doc_collision_error(unit, prev)?;
605                    }
606                } else if let Some(prev) =
607                    doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
608                {
609                    doc_collision_error(unit, prev)?;
610                }
611            }
612            for output in self.outputs(unit)?.iter() {
613                if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
614                    if unit.mode.is_doc() {
615                        // See https://github.com/rust-lang/rust/issues/56169
616                        // and https://github.com/rust-lang/rust/issues/61378
617                        report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?;
618                    } else {
619                        report_collision(unit, other_unit, &output.path, suggestion)?;
620                    }
621                }
622                if let Some(hardlink) = output.hardlink.as_ref() {
623                    if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
624                        report_collision(unit, other_unit, hardlink, suggestion)?;
625                    }
626                }
627                if let Some(ref export_path) = output.export_path {
628                    if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
629                        self.bcx.gctx.shell().warn(format!(
630                            "`--artifact-dir` filename collision.\n\
631                             {}\
632                             The exported filenames should be unique.\n\
633                             {}",
634                            describe_collision(unit, other_unit, export_path),
635                            suggestion
636                        ))?;
637                    }
638                }
639            }
640        }
641        Ok(())
642    }
643
644    /// Records the list of units which are required to emit metadata.
645    ///
646    /// Units which depend only on the metadata of others requires the others to
647    /// actually produce metadata, so we'll record that here.
648    fn record_units_requiring_metadata(&mut self) {
649        for (key, deps) in self.bcx.unit_graph.iter() {
650            for dep in deps {
651                if self.only_requires_rmeta(key, &dep.unit) {
652                    self.rmeta_required.insert(dep.unit.clone());
653                }
654            }
655        }
656    }
657
658    /// Returns whether when `parent` depends on `dep` if it only requires the
659    /// metadata file from `dep`.
660    pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
661        // We're only a candidate for requiring an `rmeta` file if we
662        // ourselves are building an rlib,
663        !parent.requires_upstream_objects()
664            && parent.mode == CompileMode::Build
665            // Our dependency must also be built as an rlib, otherwise the
666            // object code must be useful in some fashion
667            && !dep.requires_upstream_objects()
668            && dep.mode == CompileMode::Build
669    }
670
671    /// Returns whether when `unit` is built whether it should emit metadata as
672    /// well because some compilations rely on that.
673    pub fn rmeta_required(&self, unit: &Unit) -> bool {
674        self.rmeta_required.contains(unit)
675    }
676
677    /// Finds metadata for Doc/Docscrape units.
678    ///
679    /// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to
680    /// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies
681    /// will be the metadata of the Cargo unit that generated the current library's rmeta file,
682    /// which should be a Check unit.
683    ///
684    /// If the current crate has reverse-dependencies, such a Check unit should exist, and so
685    /// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples
686    /// scraped from the current crate can be used when documenting the current crate.
687    #[tracing::instrument(skip_all)]
688    pub fn compute_metadata_for_doc_units(&mut self) {
689        for unit in self.bcx.unit_graph.keys() {
690            if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
691                continue;
692            }
693
694            let matching_units = self
695                .bcx
696                .unit_graph
697                .keys()
698                .filter(|other| {
699                    unit.pkg == other.pkg
700                        && unit.target == other.target
701                        && !other.mode.is_doc_scrape()
702                })
703                .collect::<Vec<_>>();
704            let metadata_unit = matching_units
705                .iter()
706                .find(|other| other.mode.is_check())
707                .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
708                .unwrap_or(&unit);
709            self.metadata_for_doc_units
710                .insert(unit.clone(), self.files().metadata(metadata_unit));
711        }
712    }
713}