1use std::collections::{HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, UserIntent, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use annotate_snippets::{Level, Message};
13use anyhow::{Context as _, bail};
14use cargo_util::paths;
15use filetime::FileTime;
16use itertools::Itertools;
17use jobserver::Client;
18
19use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
20use super::fingerprint::{Checksum, Fingerprint};
21use super::job_queue::JobQueue;
22use super::layout::Layout;
23use super::lto::Lto;
24use super::unit_graph::UnitDep;
25use super::{
26 BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
27};
28
29mod compilation_files;
30use self::compilation_files::CompilationFiles;
31pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
32
33pub struct BuildRunner<'a, 'gctx> {
40 pub bcx: &'a BuildContext<'a, 'gctx>,
42 pub compilation: Compilation<'gctx>,
44 pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
46 pub build_explicit_deps: HashMap<Unit, BuildDeps>,
50 pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
52 pub mtime_cache: HashMap<PathBuf, FileTime>,
54 pub checksum_cache: HashMap<PathBuf, Checksum>,
56 pub compiled: HashSet<Unit>,
60 pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
63 pub jobserver: Client,
65 primary_packages: HashSet<PackageId>,
69 files: Option<CompilationFiles<'a, 'gctx>>,
73
74 rmeta_required: HashSet<Unit>,
77
78 pub lto: HashMap<Unit, Lto>,
82
83 pub metadata_for_doc_units: HashMap<Unit, Metadata>,
86
87 pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
91}
92
93impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
94 pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
95 let jobserver = match bcx.gctx.jobserver_from_env() {
104 Some(c) => c.clone(),
105 None => {
106 let client =
107 Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
108 client.acquire_raw()?;
109 client
110 }
111 };
112
113 Ok(Self {
114 bcx,
115 compilation: Compilation::new(bcx)?,
116 build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
117 fingerprints: HashMap::new(),
118 mtime_cache: HashMap::new(),
119 checksum_cache: HashMap::new(),
120 compiled: HashSet::new(),
121 build_scripts: HashMap::new(),
122 build_explicit_deps: HashMap::new(),
123 jobserver,
124 primary_packages: HashSet::new(),
125 files: None,
126 rmeta_required: HashSet::new(),
127 lto: HashMap::new(),
128 metadata_for_doc_units: HashMap::new(),
129 failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
130 })
131 }
132
133 pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
138 let _lock = self
139 .bcx
140 .gctx
141 .acquire_package_cache_lock(CacheLockMode::Shared)?;
142 self.lto = super::lto::generate(self.bcx)?;
143 self.prepare_units()?;
144 self.prepare()?;
145 self.check_collisions()?;
146
147 for unit in &self.bcx.roots {
148 self.collect_tests_and_executables(unit)?;
149 }
150
151 Ok(self.compilation)
152 }
153
154 #[tracing::instrument(skip_all)]
161 pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
162 let _lock = self
166 .bcx
167 .gctx
168 .acquire_package_cache_lock(CacheLockMode::Shared)?;
169 let mut queue = JobQueue::new(self.bcx);
170 self.lto = super::lto::generate(self.bcx)?;
171 self.prepare_units()?;
172 self.prepare()?;
173 custom_build::build_map(&mut self)?;
174 self.check_collisions()?;
175 self.compute_metadata_for_doc_units();
176
177 if self.bcx.build_config.intent.is_doc() {
186 RustDocFingerprint::check_rustdoc_fingerprint(&self)?
187 }
188
189 for unit in &self.bcx.roots {
190 let force_rebuild = self.bcx.build_config.force_rebuild;
191 super::compile(&mut self, &mut queue, unit, exec, force_rebuild)?;
192 }
193
194 for fingerprint in self.fingerprints.values() {
201 fingerprint.clear_memoized();
202 }
203
204 queue.execute(&mut self)?;
206
207 let units_with_build_script = &self
209 .bcx
210 .roots
211 .iter()
212 .filter(|unit| self.build_scripts.contains_key(unit))
213 .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
214 .collect::<Vec<_>>();
215 for unit in units_with_build_script {
216 for dep in &self.bcx.unit_graph[unit] {
217 if dep.unit.mode.is_run_custom_build() {
218 let out_dir = self
219 .files()
220 .build_script_out_dir(&dep.unit)
221 .display()
222 .to_string();
223 let script_meta = self.get_run_build_script_metadata(&dep.unit);
224 self.compilation
225 .extra_env
226 .entry(script_meta)
227 .or_insert_with(Vec::new)
228 .push(("OUT_DIR".to_string(), out_dir));
229 }
230 }
231 }
232
233 for unit in &self.bcx.roots {
235 self.collect_tests_and_executables(unit)?;
236
237 if unit.mode.is_doc_test() {
239 let mut unstable_opts = false;
240 let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
241 args.extend(compiler::lto_args(&self, unit));
242 args.extend(compiler::features_args(unit));
243 args.extend(compiler::check_cfg_args(unit));
244
245 let script_metas = self.find_build_script_metadatas(unit);
246 if let Some(meta_vec) = script_metas.clone() {
247 for meta in meta_vec {
248 if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
249 for cfg in &output.cfgs {
250 args.push("--cfg".into());
251 args.push(cfg.into());
252 }
253
254 for check_cfg in &output.check_cfgs {
255 args.push("--check-cfg".into());
256 args.push(check_cfg.into());
257 }
258
259 for (lt, arg) in &output.linker_args {
260 if lt.applies_to(&unit.target, unit.mode) {
261 args.push("-C".into());
262 args.push(format!("link-arg={}", arg).into());
263 }
264 }
265 }
266 }
267 }
268 args.extend(unit.rustdocflags.iter().map(Into::into));
269
270 use super::MessageFormat;
271 let format = match self.bcx.build_config.message_format {
272 MessageFormat::Short => "short",
273 MessageFormat::Human => "human",
274 MessageFormat::Json { .. } => "json",
275 };
276 args.push("--error-format".into());
277 args.push(format.into());
278
279 self.compilation.to_doc_test.push(compilation::Doctest {
280 unit: unit.clone(),
281 args,
282 unstable_opts,
283 linker: self.compilation.target_linker(unit.kind).clone(),
284 script_metas,
285 env: artifact::get_env(&self, self.unit_deps(unit))?,
286 });
287 }
288
289 super::output_depinfo(&mut self, unit)?;
290 }
291
292 for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
293 self.compilation
294 .extra_env
295 .entry(*script_meta)
296 .or_insert_with(Vec::new)
297 .extend(output.env.iter().cloned());
298
299 for dir in output.library_paths.iter() {
300 self.compilation
301 .native_dirs
302 .insert(dir.clone().into_path_buf());
303 }
304 }
305 Ok(self.compilation)
306 }
307
308 fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
309 for output in self.outputs(unit)?.iter() {
310 if matches!(
311 output.flavor,
312 FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
313 ) {
314 continue;
315 }
316
317 let bindst = output.bin_dst();
318
319 if unit.mode == CompileMode::Test {
320 self.compilation
321 .tests
322 .push(self.unit_output(unit, &output.path));
323 } else if unit.target.is_executable() {
324 self.compilation
325 .binaries
326 .push(self.unit_output(unit, bindst));
327 } else if unit.target.is_cdylib()
328 && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
329 {
330 self.compilation
331 .cdylibs
332 .push(self.unit_output(unit, bindst));
333 }
334 }
335 Ok(())
336 }
337
338 pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
340 let is_binary = unit.target.is_executable();
341 let is_test = unit.mode.is_any_test();
342 if !unit.mode.generates_executable() || !(is_binary || is_test) {
343 return Ok(None);
344 }
345 Ok(self
346 .outputs(unit)?
347 .iter()
348 .find(|o| o.flavor == FileFlavor::Normal)
349 .map(|output| output.bin_dst().clone()))
350 }
351
352 #[tracing::instrument(skip_all)]
353 pub fn prepare_units(&mut self) -> CargoResult<()> {
354 let dest = self.bcx.profiles.get_dir_name();
355 let must_take_artifact_dir_lock = match self.bcx.build_config.intent {
359 UserIntent::Check { .. } => {
360 !self.bcx.build_config.timing_outputs.is_empty()
364 }
365 UserIntent::Build
366 | UserIntent::Test
367 | UserIntent::Doc { .. }
368 | UserIntent::Doctest
369 | UserIntent::Bench => true,
370 };
371 let host_layout = Layout::new(self.bcx.ws, None, &dest, must_take_artifact_dir_lock)?;
372 let mut targets = HashMap::new();
373 for kind in self.bcx.all_kinds.iter() {
374 if let CompileKind::Target(target) = *kind {
375 let layout = Layout::new(
376 self.bcx.ws,
377 Some(target),
378 &dest,
379 must_take_artifact_dir_lock,
380 )?;
381 targets.insert(target, layout);
382 }
383 }
384 self.primary_packages
385 .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
386 self.compilation
387 .root_crate_names
388 .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
389
390 self.record_units_requiring_metadata();
391
392 let files = CompilationFiles::new(self, host_layout, targets);
393 self.files = Some(files);
394 Ok(())
395 }
396
397 #[tracing::instrument(skip_all)]
400 pub fn prepare(&mut self) -> CargoResult<()> {
401 self.files
402 .as_mut()
403 .unwrap()
404 .host
405 .prepare()
406 .context("couldn't prepare build directories")?;
407 for target in self.files.as_mut().unwrap().target.values_mut() {
408 target
409 .prepare()
410 .context("couldn't prepare build directories")?;
411 }
412
413 let files = self.files.as_ref().unwrap();
414 for &kind in self.bcx.all_kinds.iter() {
415 let layout = files.layout(kind);
416 if let Some(artifact_dir) = layout.artifact_dir() {
417 self.compilation
418 .root_output
419 .insert(kind, artifact_dir.dest().to_path_buf());
420 }
421 if self.bcx.gctx.cli_unstable().build_dir_new_layout {
422 for (unit, _) in self.bcx.unit_graph.iter() {
423 let dep_dir = self.files().deps_dir(unit);
424 paths::create_dir_all(&dep_dir)?;
425 self.compilation.deps_output.insert(kind, dep_dir);
426 }
427 } else {
428 self.compilation
429 .deps_output
430 .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
431 }
432 }
433 Ok(())
434 }
435
436 pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
437 self.files.as_ref().unwrap()
438 }
439
440 pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
442 self.files.as_ref().unwrap().outputs(unit, self.bcx)
443 }
444
445 pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
447 &self.bcx.unit_graph[unit]
448 }
449
450 pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
454 if unit.mode.is_run_custom_build() {
455 return Some(vec![unit.clone()]);
456 }
457
458 let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
459 .iter()
460 .filter(|unit_dep| {
461 unit_dep.unit.mode.is_run_custom_build()
462 && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
463 })
464 .map(|unit_dep| unit_dep.unit.clone())
465 .collect();
466 if build_script_units.is_empty() {
467 None
468 } else {
469 Some(build_script_units)
470 }
471 }
472
473 pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
478 self.find_build_script_units(unit).map(|units| {
479 units
480 .iter()
481 .map(|u| self.get_run_build_script_metadata(u))
482 .collect()
483 })
484 }
485
486 pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
488 assert!(unit.mode.is_run_custom_build());
489 self.files().metadata(unit).unit_id()
490 }
491
492 pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
494 Ok(self
495 .outputs(unit)?
496 .iter()
497 .filter(|o| o.flavor == FileFlavor::Sbom)
498 .map(|o| o.path.clone())
499 .collect())
500 }
501
502 pub fn is_primary_package(&self, unit: &Unit) -> bool {
503 self.primary_packages.contains(&unit.pkg.package_id())
504 }
505
506 pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
509 let script_metas = self.find_build_script_metadatas(unit);
510 UnitOutput {
511 unit: unit.clone(),
512 path: path.to_path_buf(),
513 script_metas,
514 }
515 }
516
517 #[tracing::instrument(skip_all)]
520 fn check_collisions(&self) -> CargoResult<()> {
521 let mut output_collisions = HashMap::new();
522 let describe_collision = |unit: &Unit, other_unit: &Unit| -> String {
523 format!(
524 "the {} target `{}` in package `{}` has the same output filename as the {} target `{}` in package `{}`",
525 unit.target.kind().description(),
526 unit.target.name(),
527 unit.pkg.package_id(),
528 other_unit.target.kind().description(),
529 other_unit.target.name(),
530 other_unit.pkg.package_id(),
531 )
532 };
533 let suggestion = [
534 Level::NOTE.message("this may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>"),
535 Level::HELP.message("consider changing their names to be unique or compiling them separately")
536 ];
537 let rustdoc_suggestion = [
538 Level::NOTE.message("this is a known bug where multiple crates with the same name use the same path; see <https://github.com/rust-lang/cargo/issues/6313>")
539 ];
540 let report_collision = |unit: &Unit,
541 other_unit: &Unit,
542 path: &PathBuf,
543 messages: &[Message<'_>]|
544 -> CargoResult<()> {
545 if unit.target.name() == other_unit.target.name() {
546 self.bcx.gctx.shell().print_report(
547 &[Level::WARNING
548 .secondary_title(format!("output filename collision at {}", path.display()))
549 .elements(
550 [Level::NOTE.message(describe_collision(unit, other_unit))]
551 .into_iter()
552 .chain(messages.iter().cloned()),
553 )],
554 false,
555 )
556 } else {
557 self.bcx.gctx.shell().print_report(
558 &[Level::WARNING
559 .secondary_title(format!("output filename collision at {}", path.display()))
560 .elements([
561 Level::NOTE.message(describe_collision(unit, other_unit)),
562 Level::NOTE.message("if this looks unexpected, it may be a bug in Cargo. Please file a bug \
563 report at https://github.com/rust-lang/cargo/issues/ with as much information as you \
564 can provide."),
565 Level::NOTE.message(format!("cargo {} running on `{}` target `{}`",
566 crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind))),
567 Level::NOTE.message(format!("first unit: {unit:?}")),
568 Level::NOTE.message(format!("second unit: {other_unit:?}")),
569 ])],
570 false,
571 )
572 }
573 };
574
575 fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
576 bail!(
577 "document output filename collision\n\
578 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
579 Only one may be documented at once since they output to the same path.\n\
580 Consider documenting only one, renaming one, \
581 or marking one with `doc = false` in Cargo.toml.",
582 unit.target.kind().description(),
583 unit.target.name(),
584 unit.pkg,
585 other_unit.target.kind().description(),
586 other_unit.target.name(),
587 other_unit.pkg,
588 );
589 }
590
591 let mut keys = self
592 .bcx
593 .unit_graph
594 .keys()
595 .filter(|unit| !unit.mode.is_run_custom_build())
596 .collect::<Vec<_>>();
597 keys.sort_unstable();
599 let mut doc_libs = HashMap::new();
607 let mut doc_bins = HashMap::new();
608 for unit in keys {
609 if unit.mode.is_doc() && self.is_primary_package(unit) {
610 if unit.target.is_lib() {
613 if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
614 {
615 doc_collision_error(unit, prev)?;
616 }
617 } else if let Some(prev) =
618 doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
619 {
620 doc_collision_error(unit, prev)?;
621 }
622 }
623 for output in self.outputs(unit)?.iter() {
624 if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
625 if unit.mode.is_doc() {
626 report_collision(unit, other_unit, &output.path, &rustdoc_suggestion)?;
629 } else {
630 report_collision(unit, other_unit, &output.path, &suggestion)?;
631 }
632 }
633 if let Some(hardlink) = output.hardlink.as_ref() {
634 if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
635 report_collision(unit, other_unit, hardlink, &suggestion)?;
636 }
637 }
638 if let Some(ref export_path) = output.export_path {
639 if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
640 self.bcx.gctx.shell().print_report(
641 &[Level::WARNING
642 .secondary_title(format!(
643 "`--artifact-dir` filename collision at {}",
644 export_path.display()
645 ))
646 .elements(
647 [Level::NOTE.message(describe_collision(unit, other_unit))]
648 .into_iter()
649 .chain(suggestion.iter().cloned()),
650 )],
651 false,
652 )?;
653 }
654 }
655 }
656 }
657 Ok(())
658 }
659
660 fn record_units_requiring_metadata(&mut self) {
665 for (key, deps) in self.bcx.unit_graph.iter() {
666 for dep in deps {
667 if self.only_requires_rmeta(key, &dep.unit) {
668 self.rmeta_required.insert(dep.unit.clone());
669 }
670 }
671 }
672 }
673
674 pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
677 !parent.requires_upstream_objects()
680 && parent.mode == CompileMode::Build
681 && !dep.requires_upstream_objects()
684 && dep.mode == CompileMode::Build
685 }
686
687 pub fn rmeta_required(&self, unit: &Unit) -> bool {
690 self.rmeta_required.contains(unit)
691 }
692
693 #[tracing::instrument(skip_all)]
704 pub fn compute_metadata_for_doc_units(&mut self) {
705 for unit in self.bcx.unit_graph.keys() {
706 if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
707 continue;
708 }
709
710 let matching_units = self
711 .bcx
712 .unit_graph
713 .keys()
714 .filter(|other| {
715 unit.pkg == other.pkg
716 && unit.target == other.target
717 && !other.mode.is_doc_scrape()
718 })
719 .collect::<Vec<_>>();
720 let metadata_unit = matching_units
721 .iter()
722 .find(|other| other.mode.is_check())
723 .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
724 .unwrap_or(&unit);
725 self.metadata_for_doc_units
726 .insert(unit.clone(), self.files().metadata(metadata_unit));
727 }
728 }
729}