cargo/ops/cargo_compile/
mod.rs

1//! The entry point for starting the compilation process for commands like
2//! `build`, `test`, `doc`, `rustc`, etc.
3//!
4//! The [`compile`] function will do all the work to compile a workspace. A
5//! rough outline is:
6//!
7//! 1. Resolve the dependency graph (see [`ops::resolve`]).
8//! 2. Download any packages needed (see [`PackageSet`]).
9//! 3. Generate a list of top-level "units" of work for the targets the user
10//!   requested on the command-line. Each [`Unit`] corresponds to a compiler
11//!   invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).
12//! 4. Starting from the root [`Unit`]s, generate the [`UnitGraph`] by walking the dependency graph
13//!   from the resolver.  See also [`unit_dependencies`].
14//! 5. Construct the [`BuildContext`] with all of the information collected so
15//!   far. This is the end of the "front end" of compilation.
16//! 6. Create a [`BuildRunner`] which coordinates the compilation process
17//!   and will perform the following steps:
18//!     1. Prepare the `target` directory (see [`Layout`]).
19//!     2. Create a [`JobQueue`]. The queue checks the
20//!       fingerprint of each `Unit` to determine if it should run or be
21//!       skipped.
22//!     3. Execute the queue via [`drain_the_queue`]. Each leaf in the queue's dependency graph is
23//!        executed, and then removed from the graph when finished. This repeats until the queue is
24//!        empty.  Note that this is the only point in cargo that currently uses threads.
25//! 7. The result of the compilation is stored in the [`Compilation`] struct. This can be used for
26//!    various things, such as running tests after the compilation  has finished.
27//!
28//! **Note**: "target" inside this module generally refers to ["Cargo Target"],
29//! which corresponds to artifact that will be built in a package. Not to be
30//! confused with target-triple or target architecture.
31//!
32//! [`unit_dependencies`]: crate::core::compiler::unit_dependencies
33//! [`Layout`]: crate::core::compiler::Layout
34//! [`JobQueue`]: crate::core::compiler::job_queue
35//! [`drain_the_queue`]: crate::core::compiler::job_queue
36//! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html
37
38use std::collections::{HashMap, HashSet};
39use std::hash::{Hash, Hasher};
40use std::sync::Arc;
41
42use crate::core::compiler::UserIntent;
43use crate::core::compiler::unit_dependencies::build_unit_dependencies;
44use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
45use crate::core::compiler::{BuildConfig, BuildContext, BuildRunner, Compilation};
46use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData, Unit};
47use crate::core::compiler::{CrateType, TargetInfo, apply_env_config, standard_lib};
48use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
49use crate::core::profiles::Profiles;
50use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
51use crate::core::resolver::{HasDevUnits, Resolve};
52use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace};
53use crate::drop_println;
54use crate::ops;
55use crate::ops::resolve::{SpecsAndResolvedFeatures, WorkspaceResolve};
56use crate::util::context::{GlobalContext, WarningHandling};
57use crate::util::interning::InternedString;
58use crate::util::{CargoResult, StableHasher};
59
60mod compile_filter;
61pub use compile_filter::{CompileFilter, FilterRule, LibRule};
62
63mod unit_generator;
64use unit_generator::UnitGenerator;
65
66mod packages;
67
68pub use packages::Packages;
69
70/// Contains information about how a package should be compiled.
71///
72/// Note on distinction between `CompileOptions` and [`BuildConfig`]:
73/// `BuildConfig` contains values that need to be retained after
74/// [`BuildContext`] is created. The other fields are no longer necessary. Think
75/// of it as `CompileOptions` are high-level settings requested on the
76/// command-line, and `BuildConfig` are low-level settings for actually
77/// driving `rustc`.
78#[derive(Debug, Clone)]
79pub struct CompileOptions {
80    /// Configuration information for a rustc build
81    pub build_config: BuildConfig,
82    /// Feature flags requested by the user.
83    pub cli_features: CliFeatures,
84    /// A set of packages to build.
85    pub spec: Packages,
86    /// Filter to apply to the root package to select which targets will be
87    /// built.
88    pub filter: CompileFilter,
89    /// Extra arguments to be passed to rustdoc (single target only)
90    pub target_rustdoc_args: Option<Vec<String>>,
91    /// The specified target will be compiled with all the available arguments,
92    /// note that this only accounts for the *final* invocation of rustc
93    pub target_rustc_args: Option<Vec<String>>,
94    /// Crate types to be passed to rustc (single target only)
95    pub target_rustc_crate_types: Option<Vec<String>>,
96    /// Whether the `--document-private-items` flags was specified and should
97    /// be forwarded to `rustdoc`.
98    pub rustdoc_document_private_items: bool,
99    /// Whether the build process should check the minimum Rust version
100    /// defined in the cargo metadata for a crate.
101    pub honor_rust_version: Option<bool>,
102}
103
104impl CompileOptions {
105    pub fn new(gctx: &GlobalContext, intent: UserIntent) -> CargoResult<CompileOptions> {
106        let jobs = None;
107        let keep_going = false;
108        Ok(CompileOptions {
109            build_config: BuildConfig::new(gctx, jobs, keep_going, &[], intent)?,
110            cli_features: CliFeatures::new_all(false),
111            spec: ops::Packages::Packages(Vec::new()),
112            filter: CompileFilter::Default {
113                required_features_filterable: false,
114            },
115            target_rustdoc_args: None,
116            target_rustc_args: None,
117            target_rustc_crate_types: None,
118            rustdoc_document_private_items: false,
119            honor_rust_version: None,
120        })
121    }
122}
123
124/// Compiles!
125///
126/// This uses the [`DefaultExecutor`]. To use a custom [`Executor`], see [`compile_with_exec`].
127pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
128    let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
129    compile_with_exec(ws, options, &exec)
130}
131
132/// Like [`compile`] but allows specifying a custom [`Executor`]
133/// that will be able to intercept build calls and add custom logic.
134///
135/// [`compile`] uses [`DefaultExecutor`] which just passes calls through.
136pub fn compile_with_exec<'a>(
137    ws: &Workspace<'a>,
138    options: &CompileOptions,
139    exec: &Arc<dyn Executor>,
140) -> CargoResult<Compilation<'a>> {
141    ws.emit_warnings()?;
142    let compilation = compile_ws(ws, options, exec)?;
143    if ws.gctx().warning_handling()? == WarningHandling::Deny && compilation.warning_count > 0 {
144        anyhow::bail!("warnings are denied by `build.warnings` configuration")
145    }
146    Ok(compilation)
147}
148
149/// Like [`compile_with_exec`] but without warnings from manifest parsing.
150#[tracing::instrument(skip_all)]
151pub fn compile_ws<'a>(
152    ws: &Workspace<'a>,
153    options: &CompileOptions,
154    exec: &Arc<dyn Executor>,
155) -> CargoResult<Compilation<'a>> {
156    let interner = UnitInterner::new();
157    let bcx = create_bcx(ws, options, &interner)?;
158    if options.build_config.unit_graph {
159        unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.gctx())?;
160        return Compilation::new(&bcx);
161    }
162    crate::core::gc::auto_gc(bcx.gctx);
163    let build_runner = BuildRunner::new(&bcx)?;
164    if options.build_config.dry_run {
165        build_runner.dry_run()
166    } else {
167        build_runner.compile(exec)
168    }
169}
170
171/// Executes `rustc --print <VALUE>`.
172///
173/// * `print_opt_value` is the VALUE passed through.
174pub fn print<'a>(
175    ws: &Workspace<'a>,
176    options: &CompileOptions,
177    print_opt_value: &str,
178) -> CargoResult<()> {
179    let CompileOptions {
180        ref build_config,
181        ref target_rustc_args,
182        ..
183    } = *options;
184    let gctx = ws.gctx();
185    let rustc = gctx.load_global_rustc(Some(ws))?;
186    for (index, kind) in build_config.requested_kinds.iter().enumerate() {
187        if index != 0 {
188            drop_println!(gctx);
189        }
190        let target_info = TargetInfo::new(gctx, &build_config.requested_kinds, &rustc, *kind)?;
191        let mut process = rustc.process();
192        apply_env_config(gctx, &mut process)?;
193        process.args(&target_info.rustflags);
194        if let Some(args) = target_rustc_args {
195            process.args(args);
196        }
197        if let CompileKind::Target(t) = kind {
198            process.arg("--target").arg(t.rustc_target());
199        }
200        process.arg("--print").arg(print_opt_value);
201        process.exec()?;
202    }
203    Ok(())
204}
205
206/// Prepares all required information for the actual compilation.
207///
208/// For how it works and what data it collects,
209/// please see the [module-level documentation](self).
210#[tracing::instrument(skip_all)]
211pub fn create_bcx<'a, 'gctx>(
212    ws: &'a Workspace<'gctx>,
213    options: &'a CompileOptions,
214    interner: &'a UnitInterner,
215) -> CargoResult<BuildContext<'a, 'gctx>> {
216    let CompileOptions {
217        ref build_config,
218        ref spec,
219        ref cli_features,
220        ref filter,
221        ref target_rustdoc_args,
222        ref target_rustc_args,
223        ref target_rustc_crate_types,
224        rustdoc_document_private_items,
225        honor_rust_version,
226    } = *options;
227    let gctx = ws.gctx();
228
229    // Perform some pre-flight validation.
230    match build_config.intent {
231        UserIntent::Test | UserIntent::Build | UserIntent::Check { .. } | UserIntent::Bench => {
232            if ws.gctx().get_env("RUST_FLAGS").is_ok() {
233                gctx.shell().warn(
234                    "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?",
235                )?;
236            }
237        }
238        UserIntent::Doc { .. } | UserIntent::Doctest => {
239            if ws.gctx().get_env("RUSTDOC_FLAGS").is_ok() {
240                gctx.shell().warn(
241                    "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?"
242                )?;
243            }
244        }
245    }
246    gctx.validate_term_config()?;
247
248    let mut target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
249
250    let specs = spec.to_package_id_specs(ws)?;
251    let has_dev_units = {
252        // Rustdoc itself doesn't need dev-dependencies. But to scrape examples from packages in the
253        // workspace, if any of those packages need dev-dependencies, then we need include dev-dependencies
254        // to scrape those packages.
255        let any_pkg_has_scrape_enabled = ws
256            .members_with_features(&specs, cli_features)?
257            .iter()
258            .any(|(pkg, _)| {
259                pkg.targets()
260                    .iter()
261                    .any(|target| target.is_example() && target.doc_scrape_examples().is_enabled())
262            });
263
264        if filter.need_dev_deps(build_config.intent)
265            || (build_config.intent.is_doc() && any_pkg_has_scrape_enabled)
266        {
267            HasDevUnits::Yes
268        } else {
269            HasDevUnits::No
270        }
271    };
272    let dry_run = false;
273    let resolve = ops::resolve_ws_with_opts(
274        ws,
275        &mut target_data,
276        &build_config.requested_kinds,
277        cli_features,
278        &specs,
279        has_dev_units,
280        crate::core::resolver::features::ForceAllTargets::No,
281        dry_run,
282    )?;
283    let WorkspaceResolve {
284        mut pkg_set,
285        workspace_resolve,
286        targeted_resolve: resolve,
287        specs_and_features,
288    } = resolve;
289
290    let std_resolve_features = if let Some(crates) = &gctx.cli_unstable().build_std {
291        let (std_package_set, std_resolve, std_features) = standard_lib::resolve_std(
292            ws,
293            &mut target_data,
294            &build_config,
295            crates,
296            &build_config.requested_kinds,
297        )?;
298        pkg_set.add_set(std_package_set);
299        Some((std_resolve, std_features))
300    } else {
301        None
302    };
303
304    // Find the packages in the resolver that the user wants to build (those
305    // passed in with `-p` or the defaults from the workspace), and convert
306    // Vec<PackageIdSpec> to a Vec<PackageId>.
307    let to_build_ids = resolve.specs_to_ids(&specs)?;
308    // Now get the `Package` for each `PackageId`. This may trigger a download
309    // if the user specified `-p` for a dependency that is not downloaded.
310    // Dependencies will be downloaded during build_unit_dependencies.
311    let mut to_builds = pkg_set.get_many(to_build_ids)?;
312
313    // The ordering here affects some error messages coming out of cargo, so
314    // let's be test and CLI friendly by always printing in the same order if
315    // there's an error.
316    to_builds.sort_by_key(|p| p.package_id());
317
318    for pkg in to_builds.iter() {
319        pkg.manifest().print_teapot(gctx);
320
321        if build_config.intent.is_any_test()
322            && !ws.is_member(pkg)
323            && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
324        {
325            anyhow::bail!(
326                "package `{}` cannot be tested because it requires dev-dependencies \
327                 and is not a member of the workspace",
328                pkg.name()
329            );
330        }
331    }
332
333    let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
334        (Some(args), _) => (Some(args.clone()), "rustc"),
335        (_, Some(args)) => (Some(args.clone()), "rustdoc"),
336        _ => (None, ""),
337    };
338
339    if extra_args.is_some() && to_builds.len() != 1 {
340        panic!(
341            "`{}` should not accept multiple `-p` flags",
342            extra_args_name
343        );
344    }
345
346    let profiles = Profiles::new(ws, build_config.requested_profile)?;
347    profiles.validate_packages(
348        ws.profiles(),
349        &mut gctx.shell(),
350        workspace_resolve.as_ref().unwrap_or(&resolve),
351    )?;
352
353    // If `--target` has not been specified, then the unit graph is built
354    // assuming `--target $HOST` was specified. See
355    // `rebuild_unit_graph_shared` for more on why this is done.
356    let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?);
357    let explicit_host_kinds: Vec<_> = build_config
358        .requested_kinds
359        .iter()
360        .map(|kind| match kind {
361            CompileKind::Host => explicit_host_kind,
362            CompileKind::Target(t) => CompileKind::Target(*t),
363        })
364        .collect();
365
366    let mut units = Vec::new();
367    let mut unit_graph = HashMap::new();
368    let mut scrape_units = Vec::new();
369
370    for SpecsAndResolvedFeatures {
371        specs,
372        resolved_features,
373    } in &specs_and_features
374    {
375        // Passing `build_config.requested_kinds` instead of
376        // `explicit_host_kinds` here so that `generate_root_units` can do
377        // its own special handling of `CompileKind::Host`. It will
378        // internally replace the host kind by the `explicit_host_kind`
379        // before setting as a unit.
380        let spec_names = specs.iter().map(|spec| spec.name()).collect::<Vec<_>>();
381        let packages = to_builds
382            .iter()
383            .filter(|package| spec_names.contains(&package.name().as_str()))
384            .cloned()
385            .collect::<Vec<_>>();
386        let generator = UnitGenerator {
387            ws,
388            packages: &packages,
389            spec,
390            target_data: &target_data,
391            filter,
392            requested_kinds: &build_config.requested_kinds,
393            explicit_host_kind,
394            intent: build_config.intent,
395            resolve: &resolve,
396            workspace_resolve: &workspace_resolve,
397            resolved_features: &resolved_features,
398            package_set: &pkg_set,
399            profiles: &profiles,
400            interner,
401            has_dev_units,
402        };
403        let mut targeted_root_units = generator.generate_root_units()?;
404
405        if let Some(args) = target_rustc_crate_types {
406            override_rustc_crate_types(&mut targeted_root_units, args, interner)?;
407        }
408
409        let should_scrape =
410            build_config.intent.is_doc() && gctx.cli_unstable().rustdoc_scrape_examples;
411        let targeted_scrape_units = if should_scrape {
412            generator.generate_scrape_units(&targeted_root_units)?
413        } else {
414            Vec::new()
415        };
416
417        let std_roots = if let Some(crates) = gctx.cli_unstable().build_std.as_ref() {
418            let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
419            standard_lib::generate_std_roots(
420                &crates,
421                &targeted_root_units,
422                std_resolve,
423                std_features,
424                &explicit_host_kinds,
425                &pkg_set,
426                interner,
427                &profiles,
428                &target_data,
429            )?
430        } else {
431            Default::default()
432        };
433
434        unit_graph.extend(build_unit_dependencies(
435            ws,
436            &pkg_set,
437            &resolve,
438            &resolved_features,
439            std_resolve_features.as_ref(),
440            &targeted_root_units,
441            &targeted_scrape_units,
442            &std_roots,
443            build_config.intent,
444            &target_data,
445            &profiles,
446            interner,
447        )?);
448        units.extend(targeted_root_units);
449        scrape_units.extend(targeted_scrape_units);
450    }
451
452    // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
453    // what heuristics to use in that case.
454    if build_config.intent.wants_deps_docs() {
455        remove_duplicate_doc(build_config, &units, &mut unit_graph);
456    }
457
458    let host_kind_requested = build_config
459        .requested_kinds
460        .iter()
461        .any(CompileKind::is_host);
462    // Rebuild the unit graph, replacing the explicit host targets with
463    // CompileKind::Host, removing `artifact_target_for_features` and merging any dependencies
464    // shared with build and artifact dependencies.
465    (units, scrape_units, unit_graph) = rebuild_unit_graph_shared(
466        interner,
467        unit_graph,
468        &units,
469        &scrape_units,
470        host_kind_requested.then_some(explicit_host_kind),
471        build_config.compile_time_deps_only,
472    );
473
474    let mut extra_compiler_args = HashMap::new();
475    if let Some(args) = extra_args {
476        if units.len() != 1 {
477            anyhow::bail!(
478                "extra arguments to `{}` can only be passed to one \
479                 target, consider filtering\nthe package by passing, \
480                 e.g., `--lib` or `--bin NAME` to specify a single target",
481                extra_args_name
482            );
483        }
484        extra_compiler_args.insert(units[0].clone(), args);
485    }
486
487    for unit in units
488        .iter()
489        .filter(|unit| unit.mode.is_doc() || unit.mode.is_doc_test())
490        .filter(|unit| rustdoc_document_private_items || unit.target.is_bin())
491    {
492        // Add `--document-private-items` rustdoc flag if requested or if
493        // the target is a binary. Binary crates get their private items
494        // documented by default.
495        let mut args = vec!["--document-private-items".into()];
496        if unit.target.is_bin() {
497            // This warning only makes sense if it's possible to document private items
498            // sometimes and ignore them at other times. But cargo consistently passes
499            // `--document-private-items`, so the warning isn't useful.
500            args.push("-Arustdoc::private-intra-doc-links".into());
501        }
502        extra_compiler_args
503            .entry(unit.clone())
504            .or_default()
505            .extend(args);
506    }
507
508    if honor_rust_version.unwrap_or(true) {
509        let rustc_version = target_data.rustc.version.clone().into();
510
511        let mut incompatible = Vec::new();
512        let mut local_incompatible = false;
513        for unit in unit_graph.keys() {
514            let Some(pkg_msrv) = unit.pkg.rust_version() else {
515                continue;
516            };
517
518            if pkg_msrv.is_compatible_with(&rustc_version) {
519                continue;
520            }
521
522            local_incompatible |= unit.is_local();
523            incompatible.push((unit, pkg_msrv));
524        }
525        if !incompatible.is_empty() {
526            use std::fmt::Write as _;
527
528            let plural = if incompatible.len() == 1 { "" } else { "s" };
529            let mut message = format!(
530                "rustc {rustc_version} is not supported by the following package{plural}:\n"
531            );
532            incompatible.sort_by_key(|(unit, _)| (unit.pkg.name(), unit.pkg.version()));
533            for (unit, msrv) in incompatible {
534                let name = &unit.pkg.name();
535                let version = &unit.pkg.version();
536                writeln!(&mut message, "  {name}@{version} requires rustc {msrv}").unwrap();
537            }
538            if ws.is_ephemeral() {
539                if ws.ignore_lock() {
540                    writeln!(
541                        &mut message,
542                        "Try re-running `cargo install` with `--locked`"
543                    )
544                    .unwrap();
545                }
546            } else if !local_incompatible {
547                writeln!(
548                    &mut message,
549                    "Either upgrade rustc or select compatible dependency versions with
550`cargo update <name>@<current-ver> --precise <compatible-ver>`
551where `<compatible-ver>` is the latest version supporting rustc {rustc_version}",
552                )
553                .unwrap();
554            }
555            return Err(anyhow::Error::msg(message));
556        }
557    }
558
559    let bcx = BuildContext::new(
560        ws,
561        pkg_set,
562        build_config,
563        profiles,
564        extra_compiler_args,
565        target_data,
566        units,
567        unit_graph,
568        scrape_units,
569    )?;
570
571    Ok(bcx)
572}
573
574/// This is used to rebuild the unit graph, sharing host dependencies if possible,
575/// and applying other unit adjustments based on the whole graph.
576///
577/// This will translate any unit's `CompileKind::Target(host)` to
578/// `CompileKind::Host` if `to_host` is not `None` and the kind is equal to `to_host`.
579/// This also handles generating the unit `dep_hash`, and merging shared units if possible.
580///
581/// This is necessary because if normal dependencies used `CompileKind::Host`,
582/// there would be no way to distinguish those units from build-dependency
583/// units or artifact dependency units.
584/// This can cause a problem if a shared normal/build/artifact dependency needs
585/// to link to another dependency whose features differ based on whether or
586/// not it is a normal, build or artifact dependency. If all units used
587/// `CompileKind::Host`, then they would end up being identical, causing a
588/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
589/// value or the other.
590///
591/// The solution is to keep normal, build and artifact dependencies separate when
592/// building the unit graph, and then run this second pass which will try to
593/// combine shared dependencies safely. By adding a hash of the dependencies
594/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
595/// and `artifact_target_for_features` to be removed without fear of an unwanted
596/// collision for build or artifact dependencies.
597///
598/// This is also responsible for adjusting the `strip` profile option to
599/// opportunistically strip if debug is 0 for all dependencies. This helps
600/// remove debuginfo added by the standard library.
601///
602/// This is also responsible for adjusting the `debug` setting for host
603/// dependencies, turning off debug if the user has not explicitly enabled it,
604/// and the unit is not shared with a target unit.
605///
606/// This is also responsible for adjusting whether each unit should be compiled
607/// or not regarding `--compile-time-deps` flag.
608fn rebuild_unit_graph_shared(
609    interner: &UnitInterner,
610    unit_graph: UnitGraph,
611    roots: &[Unit],
612    scrape_units: &[Unit],
613    to_host: Option<CompileKind>,
614    compile_time_deps_only: bool,
615) -> (Vec<Unit>, Vec<Unit>, UnitGraph) {
616    let mut result = UnitGraph::new();
617    // Map of the old unit to the new unit, used to avoid recursing into units
618    // that have already been computed to improve performance.
619    let mut memo = HashMap::new();
620    let new_roots = roots
621        .iter()
622        .map(|root| {
623            traverse_and_share(
624                interner,
625                &mut memo,
626                &mut result,
627                &unit_graph,
628                root,
629                true,
630                false,
631                to_host,
632                compile_time_deps_only,
633            )
634        })
635        .collect();
636    // If no unit in the unit graph ended up having scrape units attached as dependencies,
637    // then they won't have been discovered in traverse_and_share and hence won't be in
638    // memo. So we filter out missing scrape units.
639    let new_scrape_units = scrape_units
640        .iter()
641        .map(|unit| memo.get(unit).unwrap().clone())
642        .collect();
643    (new_roots, new_scrape_units, result)
644}
645
646/// Recursive function for rebuilding the graph.
647///
648/// This walks `unit_graph`, starting at the given `unit`. It inserts the new
649/// units into `new_graph`, and returns a new updated version of the given
650/// unit (`dep_hash` is filled in, and `kind` switched if necessary).
651fn traverse_and_share(
652    interner: &UnitInterner,
653    memo: &mut HashMap<Unit, Unit>,
654    new_graph: &mut UnitGraph,
655    unit_graph: &UnitGraph,
656    unit: &Unit,
657    unit_is_root: bool,
658    unit_is_for_host: bool,
659    to_host: Option<CompileKind>,
660    compile_time_deps_only: bool,
661) -> Unit {
662    if let Some(new_unit) = memo.get(unit) {
663        // Already computed, no need to recompute.
664        return new_unit.clone();
665    }
666    let mut dep_hash = StableHasher::new();
667    let skip_non_compile_time_deps = compile_time_deps_only
668        && (!unit.target.is_compile_time_dependency() ||
669            // Root unit is not a dependency unless other units are dependant
670            // to it.
671            unit_is_root);
672    let new_deps: Vec<_> = unit_graph[unit]
673        .iter()
674        .map(|dep| {
675            let new_dep_unit = traverse_and_share(
676                interner,
677                memo,
678                new_graph,
679                unit_graph,
680                &dep.unit,
681                false,
682                dep.unit_for.is_for_host(),
683                to_host,
684                // If we should compile the current unit, we should also compile
685                // its dependencies. And if not, we should compile compile time
686                // dependencies only.
687                skip_non_compile_time_deps,
688            );
689            new_dep_unit.hash(&mut dep_hash);
690            UnitDep {
691                unit: new_dep_unit,
692                ..dep.clone()
693            }
694        })
695        .collect();
696    // Here, we have recursively traversed this unit's dependencies, and hashed them: we can
697    // finalize the dep hash.
698    let new_dep_hash = Hasher::finish(&dep_hash);
699
700    // This is the key part of the sharing process: if the unit is a runtime dependency, whose
701    // target is the same as the host, we canonicalize the compile kind to `CompileKind::Host`.
702    // A possible host dependency counterpart to this unit would have that kind, and if such a unit
703    // exists in the current `unit_graph`, they will unify in the new unit graph map `new_graph`.
704    // The resulting unit graph will be optimized with less units, thanks to sharing these host
705    // dependencies.
706    let canonical_kind = match to_host {
707        Some(to_host) if to_host == unit.kind => CompileKind::Host,
708        _ => unit.kind,
709    };
710
711    let mut profile = unit.profile.clone();
712    if profile.strip.is_deferred() {
713        // If strip was not manually set, and all dependencies of this unit together
714        // with this unit have debuginfo turned off, we enable debuginfo stripping.
715        // This will remove pre-existing debug symbols coming from the standard library.
716        if !profile.debuginfo.is_turned_on()
717            && new_deps
718                .iter()
719                .all(|dep| !dep.unit.profile.debuginfo.is_turned_on())
720        {
721            profile.strip = profile.strip.strip_debuginfo();
722        }
723    }
724
725    // If this is a build dependency, and it's not shared with runtime dependencies, we can weaken
726    // its debuginfo level to optimize build times. We do nothing if it's an artifact dependency,
727    // as it and its debuginfo may end up embedded in the main program.
728    if unit_is_for_host
729        && to_host.is_some()
730        && profile.debuginfo.is_deferred()
731        && !unit.artifact.is_true()
732    {
733        // We create a "probe" test to see if a unit with the same explicit debuginfo level exists
734        // in the graph. This is the level we'd expect if it was set manually or the default value
735        // set by a profile for a runtime dependency: its canonical value.
736        let canonical_debuginfo = profile.debuginfo.finalize();
737        let mut canonical_profile = profile.clone();
738        canonical_profile.debuginfo = canonical_debuginfo;
739        let unit_probe = interner.intern(
740            &unit.pkg,
741            &unit.target,
742            canonical_profile,
743            to_host.unwrap(),
744            unit.mode,
745            unit.features.clone(),
746            unit.rustflags.clone(),
747            unit.rustdocflags.clone(),
748            unit.links_overrides.clone(),
749            unit.is_std,
750            unit.dep_hash,
751            unit.artifact,
752            unit.artifact_target_for_features,
753            unit.skip_non_compile_time_dep,
754        );
755
756        // We can now turn the deferred value into its actual final value.
757        profile.debuginfo = if unit_graph.contains_key(&unit_probe) {
758            // The unit is present in both build time and runtime subgraphs: we canonicalize its
759            // level to the other unit's, thus ensuring reuse between the two to optimize build times.
760            canonical_debuginfo
761        } else {
762            // The unit is only present in the build time subgraph, we can weaken its debuginfo
763            // level to optimize build times.
764            canonical_debuginfo.weaken()
765        }
766    }
767
768    let new_unit = interner.intern(
769        &unit.pkg,
770        &unit.target,
771        profile,
772        canonical_kind,
773        unit.mode,
774        unit.features.clone(),
775        unit.rustflags.clone(),
776        unit.rustdocflags.clone(),
777        unit.links_overrides.clone(),
778        unit.is_std,
779        new_dep_hash,
780        unit.artifact,
781        // Since `dep_hash` is now filled in, there's no need to specify the artifact target
782        // for target-dependent feature resolution
783        None,
784        skip_non_compile_time_deps,
785    );
786    if !unit_is_root || !compile_time_deps_only {
787        assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
788    }
789    new_graph.entry(new_unit.clone()).or_insert(new_deps);
790    new_unit
791}
792
793/// Removes duplicate `CompileMode::Doc` units that would cause problems with
794/// filename collisions.
795///
796/// Rustdoc only separates units by crate name in the file directory
797/// structure. If any two units with the same crate name exist, this would
798/// cause a filename collision, causing different rustdoc invocations to stomp
799/// on one another's files.
800///
801/// Unfortunately this does not remove all duplicates, as some of them are
802/// either user error, or difficult to remove. Cases that I can think of:
803///
804/// - Same target name in different packages. See the `collision_doc` test.
805/// - Different sources. See `collision_doc_sources` test.
806///
807/// Ideally this would not be necessary.
808fn remove_duplicate_doc(
809    build_config: &BuildConfig,
810    root_units: &[Unit],
811    unit_graph: &mut UnitGraph,
812) {
813    // First, create a mapping of crate_name -> Unit so we can see where the
814    // duplicates are.
815    let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
816    for unit in unit_graph.keys() {
817        if unit.mode.is_doc() {
818            all_docs
819                .entry(unit.target.crate_name())
820                .or_default()
821                .push(unit.clone());
822        }
823    }
824    // Keep track of units to remove so that they can be efficiently removed
825    // from the unit_deps.
826    let mut removed_units: HashSet<Unit> = HashSet::new();
827    let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
828        let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
829            .into_iter()
830            .partition(|unit| cb(unit) && !root_units.contains(unit));
831        for unit in to_remove {
832            tracing::debug!(
833                "removing duplicate doc due to {} for package {} target `{}`",
834                reason,
835                unit.pkg,
836                unit.target.name()
837            );
838            unit_graph.remove(&unit);
839            removed_units.insert(unit);
840        }
841        remaining_units
842    };
843    // Iterate over the duplicates and try to remove them from unit_graph.
844    for (_crate_name, mut units) in all_docs {
845        if units.len() == 1 {
846            continue;
847        }
848        // Prefer target over host if --target was not specified.
849        if build_config
850            .requested_kinds
851            .iter()
852            .all(CompileKind::is_host)
853        {
854            // Note these duplicates may not be real duplicates, since they
855            // might get merged in rebuild_unit_graph_shared. Either way, it
856            // shouldn't hurt to remove them early (although the report in the
857            // log might be confusing).
858            units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
859            if units.len() == 1 {
860                continue;
861            }
862        }
863        // Prefer newer versions over older.
864        let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
865            HashMap::new();
866        for unit in units {
867            let pkg_id = unit.pkg.package_id();
868            // Note, this does not detect duplicates from different sources.
869            source_map
870                .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
871                .or_default()
872                .push(unit);
873        }
874        let mut remaining_units = Vec::new();
875        for (_key, mut units) in source_map {
876            if units.len() > 1 {
877                units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
878                // Remove any entries with version < newest.
879                let newest_version = units.last().unwrap().pkg.version().clone();
880                let keep_units = remove(units, "older version", &|unit| {
881                    unit.pkg.version() < &newest_version
882                });
883                remaining_units.extend(keep_units);
884            } else {
885                remaining_units.extend(units);
886            }
887        }
888        if remaining_units.len() == 1 {
889            continue;
890        }
891        // Are there other heuristics to remove duplicates that would make
892        // sense? Maybe prefer path sources over all others?
893    }
894    // Also remove units from the unit_deps so there aren't any dangling edges.
895    for unit_deps in unit_graph.values_mut() {
896        unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
897    }
898    // Remove any orphan units that were detached from the graph.
899    let mut visited = HashSet::new();
900    fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
901        if !visited.insert(unit.clone()) {
902            return;
903        }
904        for dep in &graph[unit] {
905            visit(&dep.unit, graph, visited);
906        }
907    }
908    for unit in root_units {
909        visit(unit, unit_graph, &mut visited);
910    }
911    unit_graph.retain(|unit, _| visited.contains(unit));
912}
913
914/// Override crate types for given units.
915///
916/// This is primarily used by `cargo rustc --crate-type`.
917fn override_rustc_crate_types(
918    units: &mut [Unit],
919    args: &[String],
920    interner: &UnitInterner,
921) -> CargoResult<()> {
922    if units.len() != 1 {
923        anyhow::bail!(
924            "crate types to rustc can only be passed to one \
925            target, consider filtering\nthe package by passing, \
926            e.g., `--lib` or `--example` to specify a single target"
927        );
928    }
929
930    let unit = &units[0];
931    let override_unit = |f: fn(Vec<CrateType>) -> TargetKind| {
932        let crate_types = args.iter().map(|s| s.into()).collect();
933        let mut target = unit.target.clone();
934        target.set_kind(f(crate_types));
935        interner.intern(
936            &unit.pkg,
937            &target,
938            unit.profile.clone(),
939            unit.kind,
940            unit.mode,
941            unit.features.clone(),
942            unit.rustflags.clone(),
943            unit.rustdocflags.clone(),
944            unit.links_overrides.clone(),
945            unit.is_std,
946            unit.dep_hash,
947            unit.artifact,
948            unit.artifact_target_for_features,
949            unit.skip_non_compile_time_dep,
950        )
951    };
952    units[0] = match unit.target.kind() {
953        TargetKind::Lib(_) => override_unit(TargetKind::Lib),
954        TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib),
955        _ => {
956            anyhow::bail!(
957                "crate types can only be specified for libraries and example libraries.\n\
958                Binaries, tests, and benchmarks are always the `bin` crate type"
959            );
960        }
961    };
962
963    Ok(())
964}
965
966/// Gets all of the features enabled for a package, plus its dependencies'
967/// features.
968///
969/// Dependencies are added as `dep_name/feat_name` because `required-features`
970/// wants to support that syntax.
971pub fn resolve_all_features(
972    resolve_with_overrides: &Resolve,
973    resolved_features: &features::ResolvedFeatures,
974    package_set: &PackageSet<'_>,
975    package_id: PackageId,
976) -> HashSet<String> {
977    let mut features: HashSet<String> = resolved_features
978        .activated_features(package_id, FeaturesFor::NormalOrDev)
979        .iter()
980        .map(|s| s.to_string())
981        .collect();
982
983    // Include features enabled for use by dependencies so targets can also use them with the
984    // required-features field when deciding whether to be built or skipped.
985    for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
986        let is_proc_macro = package_set
987            .get_one(dep_id)
988            .expect("packages downloaded")
989            .proc_macro();
990        for dep in deps {
991            let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
992            for feature in resolved_features
993                .activated_features_unverified(dep_id, features_for)
994                .unwrap_or_default()
995            {
996                features.insert(format!("{}/{}", dep.name_in_toml(), feature));
997            }
998        }
999    }
1000
1001    features
1002}