cargo/core/compiler/build_runner/
mod.rs1use std::collections::{BTreeSet, HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use anyhow::{Context as _, bail};
13use filetime::FileTime;
14use itertools::Itertools;
15use jobserver::Client;
16
17use super::build_plan::BuildPlan;
18use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
19use super::fingerprint::{Checksum, Fingerprint};
20use super::job_queue::JobQueue;
21use super::layout::Layout;
22use super::lto::Lto;
23use super::unit_graph::UnitDep;
24use super::{
25 BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
26};
27
28mod compilation_files;
29use self::compilation_files::CompilationFiles;
30pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
31
32pub struct BuildRunner<'a, 'gctx> {
39 pub bcx: &'a BuildContext<'a, 'gctx>,
41 pub compilation: Compilation<'gctx>,
43 pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
45 pub build_explicit_deps: HashMap<Unit, BuildDeps>,
49 pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
51 pub mtime_cache: HashMap<PathBuf, FileTime>,
53 pub checksum_cache: HashMap<PathBuf, Checksum>,
55 pub compiled: HashSet<Unit>,
59 pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
62 pub jobserver: Client,
64 primary_packages: HashSet<PackageId>,
68 files: Option<CompilationFiles<'a, 'gctx>>,
72
73 rmeta_required: HashSet<Unit>,
76
77 pub lto: HashMap<Unit, Lto>,
81
82 pub metadata_for_doc_units: HashMap<Unit, Metadata>,
85
86 pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
90}
91
92impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
93 pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
94 let jobserver = match bcx.gctx.jobserver_from_env() {
103 Some(c) => c.clone(),
104 None => {
105 let client =
106 Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
107 client.acquire_raw()?;
108 client
109 }
110 };
111
112 Ok(Self {
113 bcx,
114 compilation: Compilation::new(bcx)?,
115 build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
116 fingerprints: HashMap::new(),
117 mtime_cache: HashMap::new(),
118 checksum_cache: HashMap::new(),
119 compiled: HashSet::new(),
120 build_scripts: HashMap::new(),
121 build_explicit_deps: HashMap::new(),
122 jobserver,
123 primary_packages: HashSet::new(),
124 files: None,
125 rmeta_required: HashSet::new(),
126 lto: HashMap::new(),
127 metadata_for_doc_units: HashMap::new(),
128 failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
129 })
130 }
131
132 pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
137 let _lock = self
138 .bcx
139 .gctx
140 .acquire_package_cache_lock(CacheLockMode::Shared)?;
141 self.lto = super::lto::generate(self.bcx)?;
142 self.prepare_units()?;
143 self.prepare()?;
144 self.check_collisions()?;
145
146 for unit in &self.bcx.roots {
147 self.collect_tests_and_executables(unit)?;
148 }
149
150 Ok(self.compilation)
151 }
152
153 #[tracing::instrument(skip_all)]
160 pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
161 let _lock = self
165 .bcx
166 .gctx
167 .acquire_package_cache_lock(CacheLockMode::Shared)?;
168 let mut queue = JobQueue::new(self.bcx);
169 let mut plan = BuildPlan::new();
170 let build_plan = self.bcx.build_config.build_plan;
171 self.lto = super::lto::generate(self.bcx)?;
172 self.prepare_units()?;
173 self.prepare()?;
174 custom_build::build_map(&mut self)?;
175 self.check_collisions()?;
176 self.compute_metadata_for_doc_units();
177
178 if self.bcx.build_config.intent.is_doc() {
187 RustDocFingerprint::check_rustdoc_fingerprint(&self)?
188 }
189
190 for unit in &self.bcx.roots {
191 let force_rebuild = self.bcx.build_config.force_rebuild;
192 super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
193 }
194
195 for fingerprint in self.fingerprints.values() {
202 fingerprint.clear_memoized();
203 }
204
205 queue.execute(&mut self, &mut plan)?;
207
208 if build_plan {
209 plan.set_inputs(self.build_plan_inputs()?);
210 plan.output_plan(self.bcx.gctx);
211 }
212
213 let units_with_build_script = &self
215 .bcx
216 .roots
217 .iter()
218 .filter(|unit| self.build_scripts.contains_key(unit))
219 .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
220 .collect::<Vec<_>>();
221 for unit in units_with_build_script {
222 for dep in &self.bcx.unit_graph[unit] {
223 if dep.unit.mode.is_run_custom_build() {
224 let out_dir = self
225 .files()
226 .build_script_out_dir(&dep.unit)
227 .display()
228 .to_string();
229 let script_meta = self.get_run_build_script_metadata(&dep.unit);
230 self.compilation
231 .extra_env
232 .entry(script_meta)
233 .or_insert_with(Vec::new)
234 .push(("OUT_DIR".to_string(), out_dir));
235 }
236 }
237 }
238
239 for unit in &self.bcx.roots {
241 self.collect_tests_and_executables(unit)?;
242
243 if unit.mode.is_doc_test() {
245 let mut unstable_opts = false;
246 let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
247 args.extend(compiler::lto_args(&self, unit));
248 args.extend(compiler::features_args(unit));
249 args.extend(compiler::check_cfg_args(unit));
250
251 let script_metas = self.find_build_script_metadatas(unit);
252 if let Some(meta_vec) = script_metas.clone() {
253 for meta in meta_vec {
254 if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
255 for cfg in &output.cfgs {
256 args.push("--cfg".into());
257 args.push(cfg.into());
258 }
259
260 for check_cfg in &output.check_cfgs {
261 args.push("--check-cfg".into());
262 args.push(check_cfg.into());
263 }
264
265 for (lt, arg) in &output.linker_args {
266 if lt.applies_to(&unit.target, unit.mode) {
267 args.push("-C".into());
268 args.push(format!("link-arg={}", arg).into());
269 }
270 }
271 }
272 }
273 }
274 args.extend(unit.rustdocflags.iter().map(Into::into));
275
276 use super::MessageFormat;
277 let format = match self.bcx.build_config.message_format {
278 MessageFormat::Short => "short",
279 MessageFormat::Human => "human",
280 MessageFormat::Json { .. } => "json",
281 };
282 args.push("--error-format".into());
283 args.push(format.into());
284
285 self.compilation.to_doc_test.push(compilation::Doctest {
286 unit: unit.clone(),
287 args,
288 unstable_opts,
289 linker: self.compilation.target_linker(unit.kind).clone(),
290 script_metas,
291 env: artifact::get_env(&self, self.unit_deps(unit))?,
292 });
293 }
294
295 super::output_depinfo(&mut self, unit)?;
296 }
297
298 for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
299 self.compilation
300 .extra_env
301 .entry(*script_meta)
302 .or_insert_with(Vec::new)
303 .extend(output.env.iter().cloned());
304
305 for dir in output.library_paths.iter() {
306 self.compilation
307 .native_dirs
308 .insert(dir.clone().into_path_buf());
309 }
310 }
311 Ok(self.compilation)
312 }
313
314 fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
315 for output in self.outputs(unit)?.iter() {
316 if matches!(
317 output.flavor,
318 FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
319 ) {
320 continue;
321 }
322
323 let bindst = output.bin_dst();
324
325 if unit.mode == CompileMode::Test {
326 self.compilation
327 .tests
328 .push(self.unit_output(unit, &output.path));
329 } else if unit.target.is_executable() {
330 self.compilation
331 .binaries
332 .push(self.unit_output(unit, bindst));
333 } else if unit.target.is_cdylib()
334 && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
335 {
336 self.compilation
337 .cdylibs
338 .push(self.unit_output(unit, bindst));
339 }
340 }
341 Ok(())
342 }
343
344 pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
346 let is_binary = unit.target.is_executable();
347 let is_test = unit.mode.is_any_test();
348 if !unit.mode.generates_executable() || !(is_binary || is_test) {
349 return Ok(None);
350 }
351 Ok(self
352 .outputs(unit)?
353 .iter()
354 .find(|o| o.flavor == FileFlavor::Normal)
355 .map(|output| output.bin_dst().clone()))
356 }
357
358 #[tracing::instrument(skip_all)]
359 pub fn prepare_units(&mut self) -> CargoResult<()> {
360 let dest = self.bcx.profiles.get_dir_name();
361 let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
362 let mut targets = HashMap::new();
363 for kind in self.bcx.all_kinds.iter() {
364 if let CompileKind::Target(target) = *kind {
365 let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
366 targets.insert(target, layout);
367 }
368 }
369 self.primary_packages
370 .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
371 self.compilation
372 .root_crate_names
373 .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
374
375 self.record_units_requiring_metadata();
376
377 let files = CompilationFiles::new(self, host_layout, targets);
378 self.files = Some(files);
379 Ok(())
380 }
381
382 #[tracing::instrument(skip_all)]
385 pub fn prepare(&mut self) -> CargoResult<()> {
386 self.files
387 .as_mut()
388 .unwrap()
389 .host
390 .prepare()
391 .context("couldn't prepare build directories")?;
392 for target in self.files.as_mut().unwrap().target.values_mut() {
393 target
394 .prepare()
395 .context("couldn't prepare build directories")?;
396 }
397
398 let files = self.files.as_ref().unwrap();
399 for &kind in self.bcx.all_kinds.iter() {
400 let layout = files.layout(kind);
401 self.compilation
402 .root_output
403 .insert(kind, layout.dest().to_path_buf());
404 self.compilation
405 .deps_output
406 .insert(kind, layout.deps().to_path_buf());
407 }
408 Ok(())
409 }
410
411 pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
412 self.files.as_ref().unwrap()
413 }
414
415 pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
417 self.files.as_ref().unwrap().outputs(unit, self.bcx)
418 }
419
420 pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
422 &self.bcx.unit_graph[unit]
423 }
424
425 pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
429 if unit.mode.is_run_custom_build() {
430 return Some(vec![unit.clone()]);
431 }
432
433 let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
434 .iter()
435 .filter(|unit_dep| {
436 unit_dep.unit.mode.is_run_custom_build()
437 && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
438 })
439 .map(|unit_dep| unit_dep.unit.clone())
440 .collect();
441 if build_script_units.is_empty() {
442 None
443 } else {
444 Some(build_script_units)
445 }
446 }
447
448 pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
453 self.find_build_script_units(unit).map(|units| {
454 units
455 .iter()
456 .map(|u| self.get_run_build_script_metadata(u))
457 .collect()
458 })
459 }
460
461 pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
463 assert!(unit.mode.is_run_custom_build());
464 self.files().metadata(unit).unit_id()
465 }
466
467 pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
469 Ok(self
470 .outputs(unit)?
471 .iter()
472 .filter(|o| o.flavor == FileFlavor::Sbom)
473 .map(|o| o.path.clone())
474 .collect())
475 }
476
477 pub fn is_primary_package(&self, unit: &Unit) -> bool {
478 self.primary_packages.contains(&unit.pkg.package_id())
479 }
480
481 pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
484 let mut inputs = BTreeSet::new();
486 for unit in self.bcx.unit_graph.keys() {
488 inputs.insert(unit.pkg.manifest_path().to_path_buf());
489 }
490 Ok(inputs.into_iter().collect())
491 }
492
493 pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
496 let script_metas = self.find_build_script_metadatas(unit);
497 UnitOutput {
498 unit: unit.clone(),
499 path: path.to_path_buf(),
500 script_metas,
501 }
502 }
503
504 #[tracing::instrument(skip_all)]
507 fn check_collisions(&self) -> CargoResult<()> {
508 let mut output_collisions = HashMap::new();
509 let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String {
510 format!(
511 "The {} target `{}` in package `{}` has the same output \
512 filename as the {} target `{}` in package `{}`.\n\
513 Colliding filename is: {}\n",
514 unit.target.kind().description(),
515 unit.target.name(),
516 unit.pkg.package_id(),
517 other_unit.target.kind().description(),
518 other_unit.target.name(),
519 other_unit.pkg.package_id(),
520 path.display()
521 )
522 };
523 let suggestion = "Consider changing their names to be unique or compiling them separately.\n\
524 This may become a hard error in the future; see \
525 <https://github.com/rust-lang/cargo/issues/6313>.";
526 let rustdoc_suggestion = "This is a known bug where multiple crates with the same name use\n\
527 the same path; see <https://github.com/rust-lang/cargo/issues/6313>.";
528 let report_collision = |unit: &Unit,
529 other_unit: &Unit,
530 path: &PathBuf,
531 suggestion: &str|
532 -> CargoResult<()> {
533 if unit.target.name() == other_unit.target.name() {
534 self.bcx.gctx.shell().warn(format!(
535 "output filename collision.\n\
536 {}\
537 The targets should have unique names.\n\
538 {}",
539 describe_collision(unit, other_unit, path),
540 suggestion
541 ))
542 } else {
543 self.bcx.gctx.shell().warn(format!(
544 "output filename collision.\n\
545 {}\
546 The output filenames should be unique.\n\
547 {}\n\
548 If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\
549 https://github.com/rust-lang/cargo/issues/ with as much information as you\n\
550 can provide.\n\
551 cargo {} running on `{}` target `{}`\n\
552 First unit: {:?}\n\
553 Second unit: {:?}",
554 describe_collision(unit, other_unit, path),
555 suggestion,
556 crate::version(),
557 self.bcx.host_triple(),
558 self.bcx.target_data.short_name(&unit.kind),
559 unit,
560 other_unit))
561 }
562 };
563
564 fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
565 bail!(
566 "document output filename collision\n\
567 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
568 Only one may be documented at once since they output to the same path.\n\
569 Consider documenting only one, renaming one, \
570 or marking one with `doc = false` in Cargo.toml.",
571 unit.target.kind().description(),
572 unit.target.name(),
573 unit.pkg,
574 other_unit.target.kind().description(),
575 other_unit.target.name(),
576 other_unit.pkg,
577 );
578 }
579
580 let mut keys = self
581 .bcx
582 .unit_graph
583 .keys()
584 .filter(|unit| !unit.mode.is_run_custom_build())
585 .collect::<Vec<_>>();
586 keys.sort_unstable();
588 let mut doc_libs = HashMap::new();
596 let mut doc_bins = HashMap::new();
597 for unit in keys {
598 if unit.mode.is_doc() && self.is_primary_package(unit) {
599 if unit.target.is_lib() {
602 if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
603 {
604 doc_collision_error(unit, prev)?;
605 }
606 } else if let Some(prev) =
607 doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
608 {
609 doc_collision_error(unit, prev)?;
610 }
611 }
612 for output in self.outputs(unit)?.iter() {
613 if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
614 if unit.mode.is_doc() {
615 report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?;
618 } else {
619 report_collision(unit, other_unit, &output.path, suggestion)?;
620 }
621 }
622 if let Some(hardlink) = output.hardlink.as_ref() {
623 if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
624 report_collision(unit, other_unit, hardlink, suggestion)?;
625 }
626 }
627 if let Some(ref export_path) = output.export_path {
628 if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
629 self.bcx.gctx.shell().warn(format!(
630 "`--artifact-dir` filename collision.\n\
631 {}\
632 The exported filenames should be unique.\n\
633 {}",
634 describe_collision(unit, other_unit, export_path),
635 suggestion
636 ))?;
637 }
638 }
639 }
640 }
641 Ok(())
642 }
643
644 fn record_units_requiring_metadata(&mut self) {
649 for (key, deps) in self.bcx.unit_graph.iter() {
650 for dep in deps {
651 if self.only_requires_rmeta(key, &dep.unit) {
652 self.rmeta_required.insert(dep.unit.clone());
653 }
654 }
655 }
656 }
657
658 pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
661 !parent.requires_upstream_objects()
664 && parent.mode == CompileMode::Build
665 && !dep.requires_upstream_objects()
668 && dep.mode == CompileMode::Build
669 }
670
671 pub fn rmeta_required(&self, unit: &Unit) -> bool {
674 self.rmeta_required.contains(unit)
675 }
676
677 #[tracing::instrument(skip_all)]
688 pub fn compute_metadata_for_doc_units(&mut self) {
689 for unit in self.bcx.unit_graph.keys() {
690 if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
691 continue;
692 }
693
694 let matching_units = self
695 .bcx
696 .unit_graph
697 .keys()
698 .filter(|other| {
699 unit.pkg == other.pkg
700 && unit.target == other.target
701 && !other.mode.is_doc_scrape()
702 })
703 .collect::<Vec<_>>();
704 let metadata_unit = matching_units
705 .iter()
706 .find(|other| other.mode.is_check())
707 .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
708 .unwrap_or(&unit);
709 self.metadata_for_doc_units
710 .insert(unit.clone(), self.files().metadata(metadata_unit));
711 }
712 }
713}