From 3d3534d179ecd1d1a8bbaa4aad5488db67c4242d Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sun, 27 Jul 2025 12:13:45 +0530 Subject: [PATCH 1/7] add download context --- src/bootstrap/src/core/download.rs | 37 ++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 373fcd52052e0..19adfab3f3675 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -852,6 +852,43 @@ download-rustc = false } } +/// Only should be used for pre config initialization downloads. +pub(crate) struct DownloadContext<'a> { + host_target: TargetSelection, + out: &'a Path, + patch_binaries_for_nix: Option, + exec_ctx: &'a ExecutionContext, + verbose: bool, + stage0_metadata: &'a build_helper::stage0_parser::Stage0, + llvm_assertions: bool, + bootstrap_cache_path: &'a Option, + is_running_on_ci: bool, + dry_run: bool, +} + +impl<'a> AsRef> for DownloadContext<'a> { + fn as_ref(&self) -> &DownloadContext<'a> { + self + } +} + +impl<'a> From<&'a Config> for DownloadContext<'a> { + fn from(value: &'a Config) -> Self { + DownloadContext { + host_target: value.host_target, + out: &value.out, + patch_binaries_for_nix: value.patch_binaries_for_nix, + exec_ctx: &value.exec_ctx, + verbose: value.verbose > 0, + stage0_metadata: &value.stage0_metadata, + llvm_assertions: value.llvm_assertions, + bootstrap_cache_path: &value.bootstrap_cache_path, + is_running_on_ci: value.is_running_on_ci, + dry_run: value.dry_run(), + } + } +} + fn path_is_dylib(path: &Path) -> bool { // The .so is not necessarily the extension, it might be libLLVM.so.18.1 path.to_str().is_some_and(|path| path.contains(".so")) From 861de92f947d9431f641afc2d589b366521f091d Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sun, 27 Jul 2025 12:15:08 +0530 Subject: [PATCH 2/7] move utility methods out of config impl --- src/bootstrap/src/core/download.rs | 979 +++++++++++++++-------------- 1 file changed, 518 insertions(+), 461 deletions(-) diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 19adfab3f3675..f3a10048b6559 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -7,9 +7,9 @@ use std::sync::OnceLock; use xz2::bufread::XzDecoder; -use crate::core::config::BUILDER_CONFIG_FILENAME; +use crate::core::config::{BUILDER_CONFIG_FILENAME, TargetSelection}; use crate::utils::build_stamp::BuildStamp; -use crate::utils::exec::command; +use crate::utils::exec::{ExecutionContext, command}; use crate::utils::helpers::{exe, hex_encode, move_file}; use crate::{Config, t}; @@ -24,17 +24,6 @@ fn extract_curl_version(out: String) -> semver::Version { .unwrap_or(semver::Version::new(1, 0, 0)) } -fn curl_version(config: &Config) -> semver::Version { - let mut curl = command("curl"); - curl.arg("-V"); - let curl = curl.run_capture_stdout(config); - if curl.is_failure() { - return semver::Version::new(1, 0, 0); - } - let output = curl.stdout(); - extract_curl_version(output) -} - /// Generic helpers that are useful anywhere in bootstrap. impl Config { pub fn is_verbose(&self) -> bool { @@ -49,10 +38,7 @@ impl Config { } pub(crate) fn remove(&self, f: &Path) { - if self.dry_run() { - return; - } - fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {f:?}")); + remove(self.dry_run(), f); } /// Create a temporary directory in `out` and return its path. @@ -68,49 +54,7 @@ impl Config { /// Whether or not `fix_bin_or_dylib` needs to be run; can only be true /// on NixOS fn should_fix_bins_and_dylibs(&self) -> bool { - let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { - let uname = command("uname").allow_failure().arg("-s").run_capture_stdout(self); - if uname.is_failure() { - return false; - } - let output = uname.stdout(); - if !output.starts_with("Linux") { - return false; - } - // If the user has asked binaries to be patched for Nix, then - // don't check for NixOS or `/lib`. - // NOTE: this intentionally comes after the Linux check: - // - patchelf only works with ELF files, so no need to run it on Mac or Windows - // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. - if let Some(explicit_value) = self.patch_binaries_for_nix { - return explicit_value; - } - - // Use `/etc/os-release` instead of `/etc/NIXOS`. - // The latter one does not exist on NixOS when using tmpfs as root. - let is_nixos = match File::open("/etc/os-release") { - Err(e) if e.kind() == ErrorKind::NotFound => false, - Err(e) => panic!("failed to access /etc/os-release: {e}"), - Ok(os_release) => BufReader::new(os_release).lines().any(|l| { - let l = l.expect("reading /etc/os-release"); - matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") - }), - }; - if !is_nixos { - let in_nix_shell = env::var("IN_NIX_SHELL"); - if let Ok(in_nix_shell) = in_nix_shell { - eprintln!( - "The IN_NIX_SHELL environment variable is `{in_nix_shell}`; \ - you may need to set `patch-binaries-for-nix=true` in bootstrap.toml" - ); - } - } - is_nixos - }); - if val { - eprintln!("INFO: You seem to be using Nix."); - } - val + should_fix_bins_and_dylibs(self.patch_binaries_for_nix, &self.exec_ctx) } /// Modifies the interpreter section of 'fname' to fix the dynamic linker, @@ -121,259 +65,22 @@ impl Config { /// /// Please see for more information fn fix_bin_or_dylib(&self, fname: &Path) { - assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); - println!("attempting to patch {}", fname.display()); - - // Only build `.nix-deps` once. - static NIX_DEPS_DIR: OnceLock = OnceLock::new(); - let mut nix_build_succeeded = true; - let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| { - // Run `nix-build` to "build" each dependency (which will likely reuse - // the existing `/nix/store` copy, or at most download a pre-built copy). - // - // Importantly, we create a gc-root called `.nix-deps` in the `build/` - // directory, but still reference the actual `/nix/store` path in the rpath - // as it makes it significantly more robust against changes to the location of - // the `.nix-deps` location. - // - // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`). - // zlib: Needed as a system dependency of `libLLVM-*.so`. - // patchelf: Needed for patching ELF binaries (see doc comment above). - let nix_deps_dir = self.out.join(".nix-deps"); - const NIX_EXPR: &str = " - with (import {}); - symlinkJoin { - name = \"rust-stage0-dependencies\"; - paths = [ - zlib - patchelf - stdenv.cc.bintools - ]; - } - "; - nix_build_succeeded = command("nix-build") - .allow_failure() - .args([Path::new("-E"), Path::new(NIX_EXPR), Path::new("-o"), &nix_deps_dir]) - .run_capture_stdout(self) - .is_success(); - nix_deps_dir - }); - if !nix_build_succeeded { - return; - } - - let mut patchelf = command(nix_deps_dir.join("bin/patchelf")); - patchelf.args(&[ - OsString::from("--add-rpath"), - OsString::from(t!(fs::canonicalize(nix_deps_dir)).join("lib")), - ]); - if !path_is_dylib(fname) { - // Finally, set the correct .interp for binaries - let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); - let dynamic_linker = t!(fs::read_to_string(dynamic_linker_path)); - patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]); - } - patchelf.arg(fname); - let _ = patchelf.allow_failure().run_capture_stdout(self); + fix_bin_or_dylib(&self.out, fname, &self.exec_ctx); } fn download_file(&self, url: &str, dest_path: &Path, help_on_error: &str) { - self.verbose(|| println!("download {url}")); - // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. - let tempfile = self.tempdir().join(dest_path.file_name().unwrap()); - // While bootstrap itself only supports http and https downloads, downstream forks might - // need to download components from other protocols. The match allows them adding more - // protocols without worrying about merge conflicts if we change the HTTP implementation. - match url.split_once("://").map(|(proto, _)| proto) { - Some("http") | Some("https") => { - self.download_http_with_retries(&tempfile, url, help_on_error) - } - Some(other) => panic!("unsupported protocol {other} in {url}"), - None => panic!("no protocol in {url}"), - } - t!( - move_file(&tempfile, dest_path), - format!("failed to rename {tempfile:?} to {dest_path:?}") - ); - } - - fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) { - println!("downloading {url}"); - // Try curl. If that fails and we are on windows, fallback to PowerShell. - // options should be kept in sync with - // src/bootstrap/src/core/download.rs - // for consistency - let mut curl = command("curl").allow_failure(); - curl.args([ - // follow redirect - "--location", - // timeout if speed is < 10 bytes/sec for > 30 seconds - "--speed-time", - "30", - "--speed-limit", - "10", - // timeout if cannot connect within 30 seconds - "--connect-timeout", - "30", - // output file - "--output", - tempfile.to_str().unwrap(), - // if there is an error, don't restart the download, - // instead continue where it left off. - "--continue-at", - "-", - // retry up to 3 times. note that this means a maximum of 4 - // attempts will be made, since the first attempt isn't a *re*try. - "--retry", - "3", - // show errors, even if --silent is specified - "--show-error", - // set timestamp of downloaded file to that of the server - "--remote-time", - // fail on non-ok http status - "--fail", - ]); - // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. - if self.is_running_on_ci { - curl.arg("--silent"); - } else { - curl.arg("--progress-bar"); - } - // --retry-all-errors was added in 7.71.0, don't use it if curl is old. - if curl_version(self) >= semver::Version::new(7, 71, 0) { - curl.arg("--retry-all-errors"); - } - curl.arg(url); - if !curl.run(self) { - if self.host_target.contains("windows-msvc") { - eprintln!("Fallback to PowerShell"); - for _ in 0..3 { - let powershell = command("PowerShell.exe").allow_failure().args([ - "/nologo", - "-Command", - "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", - &format!( - "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", - url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"), - ), - ]).run_capture_stdout(self); - - if powershell.is_failure() { - return; - } - - eprintln!("\nspurious failure, trying again"); - } - } - if !help_on_error.is_empty() { - eprintln!("{help_on_error}"); - } - crate::exit!(1); - } + let dwn_ctx: DownloadContext<'_> = self.into(); + download_file(dwn_ctx, url, dest_path, help_on_error); } fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) { - eprintln!("extracting {} to {}", tarball.display(), dst.display()); - if !dst.exists() { - t!(fs::create_dir_all(dst)); - } - - // `tarball` ends with `.tar.xz`; strip that suffix - // example: `rust-dev-nightly-x86_64-unknown-linux-gnu` - let uncompressed_filename = - Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap(); - let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap()); - - // decompress the file - let data = t!(File::open(tarball), format!("file {} not found", tarball.display())); - let decompressor = XzDecoder::new(BufReader::new(data)); - - let mut tar = tar::Archive::new(decompressor); - - let is_ci_rustc = dst.ends_with("ci-rustc"); - let is_ci_llvm = dst.ends_with("ci-llvm"); - - // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding - // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. - // Cache the entries when we extract it so we only have to read it once. - let mut recorded_entries = if is_ci_rustc { recorded_entries(dst, pattern) } else { None }; - - for member in t!(tar.entries()) { - let mut member = t!(member); - let original_path = t!(member.path()).into_owned(); - // skip the top-level directory - if original_path == directory_prefix { - continue; - } - let mut short_path = t!(original_path.strip_prefix(directory_prefix)); - let is_builder_config = short_path.to_str() == Some(BUILDER_CONFIG_FILENAME); - - if !(short_path.starts_with(pattern) - || ((is_ci_rustc || is_ci_llvm) && is_builder_config)) - { - continue; - } - short_path = short_path.strip_prefix(pattern).unwrap_or(short_path); - let dst_path = dst.join(short_path); - self.verbose(|| { - println!("extracting {} to {}", original_path.display(), dst.display()) - }); - if !t!(member.unpack_in(dst)) { - panic!("path traversal attack ??"); - } - if let Some(record) = &mut recorded_entries { - t!(writeln!(record, "{}", short_path.to_str().unwrap())); - } - let src_path = dst.join(original_path); - if src_path.is_dir() && dst_path.exists() { - continue; - } - t!(move_file(src_path, dst_path)); - } - let dst_dir = dst.join(directory_prefix); - if dst_dir.exists() { - t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display())); - } + unpack(self.verbose > 0, tarball, dst, pattern); } /// Returns whether the SHA256 checksum of `path` matches `expected`. + #[cfg(test)] pub(crate) fn verify(&self, path: &Path, expected: &str) -> bool { - use sha2::Digest; - - self.verbose(|| println!("verifying {}", path.display())); - - if self.dry_run() { - return false; - } - - let mut hasher = sha2::Sha256::new(); - - let file = t!(File::open(path)); - let mut reader = BufReader::new(file); - - loop { - let buffer = t!(reader.fill_buf()); - let l = buffer.len(); - // break if EOF - if l == 0 { - break; - } - hasher.update(buffer); - reader.consume(l); - } - - let checksum = hex_encode(hasher.finalize().as_slice()); - let verified = checksum == expected; - - if !verified { - println!( - "invalid checksum: \n\ - found: {checksum}\n\ - expected: {expected}", - ); - } - - verified + verify(self.verbose > 0, self.dry_run(), path, expected) } } @@ -388,6 +95,7 @@ fn recorded_entries(dst: &Path, pattern: &str) -> Option> { Some(BufWriter::new(t!(File::create(dst.join(name))))) } +#[derive(Clone)] enum DownloadSource { CI, Dist, @@ -420,61 +128,11 @@ impl Config { cargo_clippy } - #[cfg(test)] - pub(crate) fn maybe_download_rustfmt(&self) -> Option { - Some(PathBuf::new()) - } - /// NOTE: rustfmt is a completely different toolchain than the bootstrap compiler, so it can't /// reuse target directories or artifacts - #[cfg(not(test))] pub(crate) fn maybe_download_rustfmt(&self) -> Option { - use build_helper::stage0_parser::VersionMetadata; - - if self.dry_run() { - return Some(PathBuf::new()); - } - - let VersionMetadata { date, version } = self.stage0_metadata.rustfmt.as_ref()?; - let channel = format!("{version}-{date}"); - - let host = self.host_target; - let bin_root = self.out.join(host).join("rustfmt"); - let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); - let rustfmt_stamp = BuildStamp::new(&bin_root).with_prefix("rustfmt").add_stamp(channel); - if rustfmt_path.exists() && rustfmt_stamp.is_up_to_date() { - return Some(rustfmt_path); - } - - self.download_component( - DownloadSource::Dist, - format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), - "rustfmt-preview", - date, - "rustfmt", - ); - self.download_component( - DownloadSource::Dist, - format!("rustc-{version}-{build}.tar.xz", build = host.triple), - "rustc", - date, - "rustfmt", - ); - - if self.should_fix_bins_and_dylibs() { - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt")); - self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt")); - let lib_dir = bin_root.join("lib"); - for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { - let lib = t!(lib); - if path_is_dylib(&lib.path()) { - self.fix_bin_or_dylib(&lib.path()); - } - } - } - - t!(rustfmt_stamp.write()); - Some(rustfmt_path) + let dwn_ctx: DownloadContext<'_> = self.into(); + maybe_download_rustfmt(dwn_ctx) } pub(crate) fn ci_rust_std_contents(&self) -> Vec { @@ -514,28 +172,9 @@ impl Config { ); } - #[cfg(test)] - pub(crate) fn download_beta_toolchain(&self) {} - - #[cfg(not(test))] pub(crate) fn download_beta_toolchain(&self) { - self.verbose(|| println!("downloading stage0 beta artifacts")); - - let date = &self.stage0_metadata.compiler.date; - let version = &self.stage0_metadata.compiler.version; - let extra_components = ["cargo"]; - - let download_beta_component = |config: &Config, filename, prefix: &_, date: &_| { - config.download_component(DownloadSource::Dist, filename, prefix, date, "stage0") - }; - - self.download_toolchain( - version, - "stage0", - date, - &extra_components, - download_beta_component, - ); + let dwn_ctx: DownloadContext<'_> = self.into(); + download_beta_toolchain(dwn_ctx); } fn download_toolchain( @@ -607,91 +246,8 @@ impl Config { key: &str, destination: &str, ) { - if self.dry_run() { - return; - } - - let cache_dst = - self.bootstrap_cache_path.as_ref().cloned().unwrap_or_else(|| self.out.join("cache")); - - let cache_dir = cache_dst.join(key); - if !cache_dir.exists() { - t!(fs::create_dir_all(&cache_dir)); - } - - let bin_root = self.out.join(self.host_target).join(destination); - let tarball = cache_dir.join(&filename); - let (base_url, url, should_verify) = match mode { - DownloadSource::CI => { - let dist_server = if self.llvm_assertions { - self.stage0_metadata.config.artifacts_with_llvm_assertions_server.clone() - } else { - self.stage0_metadata.config.artifacts_server.clone() - }; - let url = format!( - "{}/{filename}", - key.strip_suffix(&format!("-{}", self.llvm_assertions)).unwrap() - ); - (dist_server, url, false) - } - DownloadSource::Dist => { - let dist_server = env::var("RUSTUP_DIST_SERVER") - .unwrap_or(self.stage0_metadata.config.dist_server.to_string()); - // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0 - (dist_server, format!("dist/{key}/{filename}"), true) - } - }; - - // For the stage0 compiler, put special effort into ensuring the checksums are valid. - let checksum = if should_verify { - let error = format!( - "src/stage0 doesn't contain a checksum for {url}. \ - Pre-built artifacts might not be available for this \ - target at this time, see https://doc.rust-lang.org/nightly\ - /rustc/platform-support.html for more information." - ); - let sha256 = self.stage0_metadata.checksums_sha256.get(&url).expect(&error); - if tarball.exists() { - if self.verify(&tarball, sha256) { - self.unpack(&tarball, &bin_root, prefix); - return; - } else { - self.verbose(|| { - println!( - "ignoring cached file {} due to failed verification", - tarball.display() - ) - }); - self.remove(&tarball); - } - } - Some(sha256) - } else if tarball.exists() { - self.unpack(&tarball, &bin_root, prefix); - return; - } else { - None - }; - - let mut help_on_error = ""; - if destination == "ci-rustc" { - help_on_error = "ERROR: failed to download pre-built rustc from CI - -NOTE: old builds get deleted after a certain time -HELP: if trying to compile an old commit of rustc, disable `download-rustc` in bootstrap.toml: - -[rust] -download-rustc = false -"; - } - self.download_file(&format!("{base_url}/{url}"), &tarball, help_on_error); - if let Some(sha256) = checksum - && !self.verify(&tarball, sha256) - { - panic!("failed to verify {}", tarball.display()); - } - - self.unpack(&tarball, &bin_root, prefix); + let dwn_ctx: DownloadContext<'_> = self.into(); + download_component(dwn_ctx, mode, filename, prefix, key, destination); } #[cfg(test)] @@ -934,3 +490,504 @@ pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: boo SUPPORTED_PLATFORMS.contains(&target_triple) } } + +fn download_toolchain<'a>( + dwn_ctx: impl AsRef>, + version: &str, + sysroot: &str, + stamp_key: &str, + extra_components: &[&str], + destination: &str, + mode: DownloadSource, +) { + let dwn_ctx = dwn_ctx.as_ref(); + let host = dwn_ctx.host_target.triple; + let bin_root = dwn_ctx.out.join(host).join(sysroot); + let rustc_stamp = BuildStamp::new(&bin_root).with_prefix("rustc").add_stamp(stamp_key); + + if !bin_root.join("bin").join(exe("rustc", dwn_ctx.host_target)).exists() + || !rustc_stamp.is_up_to_date() + { + if bin_root.exists() { + t!(fs::remove_dir_all(&bin_root)); + } + let filename = format!("rust-std-{version}-{host}.tar.xz"); + let pattern = format!("rust-std-{host}"); + download_component(dwn_ctx, mode.clone(), filename, &pattern, stamp_key, destination); + let filename = format!("rustc-{version}-{host}.tar.xz"); + download_component(dwn_ctx, mode.clone(), filename, "rustc", stamp_key, destination); + + for component in extra_components { + let filename = format!("{component}-{version}-{host}.tar.xz"); + download_component(dwn_ctx, mode.clone(), filename, component, stamp_key, destination); + } + + if should_fix_bins_and_dylibs(dwn_ctx.patch_binaries_for_nix, dwn_ctx.exec_ctx) { + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustc"), dwn_ctx.exec_ctx); + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustdoc"), dwn_ctx.exec_ctx); + fix_bin_or_dylib( + dwn_ctx.out, + &bin_root.join("libexec").join("rust-analyzer-proc-macro-srv"), + dwn_ctx.exec_ctx, + ); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if path_is_dylib(&lib.path()) { + fix_bin_or_dylib(dwn_ctx.out, &lib.path(), dwn_ctx.exec_ctx); + } + } + } + + t!(rustc_stamp.write()); + } +} + +pub(crate) fn remove(dry_run: bool, f: &Path) { + if dry_run { + return; + } + fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {f:?}")); +} + +fn fix_bin_or_dylib(out: &Path, fname: &Path, exec_ctx: &ExecutionContext) { + assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); + println!("attempting to patch {}", fname.display()); + + // Only build `.nix-deps` once. + static NIX_DEPS_DIR: OnceLock = OnceLock::new(); + let mut nix_build_succeeded = true; + let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| { + // Run `nix-build` to "build" each dependency (which will likely reuse + // the existing `/nix/store` copy, or at most download a pre-built copy). + // + // Importantly, we create a gc-root called `.nix-deps` in the `build/` + // directory, but still reference the actual `/nix/store` path in the rpath + // as it makes it significantly more robust against changes to the location of + // the `.nix-deps` location. + // + // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`). + // zlib: Needed as a system dependency of `libLLVM-*.so`. + // patchelf: Needed for patching ELF binaries (see doc comment above). + let nix_deps_dir = out.join(".nix-deps"); + const NIX_EXPR: &str = " + with (import {}); + symlinkJoin { + name = \"rust-stage0-dependencies\"; + paths = [ + zlib + patchelf + stdenv.cc.bintools + ]; + } + "; + nix_build_succeeded = command("nix-build") + .allow_failure() + .args([Path::new("-E"), Path::new(NIX_EXPR), Path::new("-o"), &nix_deps_dir]) + .run_capture_stdout(exec_ctx) + .is_success(); + nix_deps_dir + }); + if !nix_build_succeeded { + return; + } + + let mut patchelf = command(nix_deps_dir.join("bin/patchelf")); + patchelf.args(&[ + OsString::from("--add-rpath"), + OsString::from(t!(fs::canonicalize(nix_deps_dir)).join("lib")), + ]); + if !path_is_dylib(fname) { + // Finally, set the correct .interp for binaries + let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); + let dynamic_linker = t!(fs::read_to_string(dynamic_linker_path)); + patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]); + } + patchelf.arg(fname); + let _ = patchelf.allow_failure().run_capture_stdout(exec_ctx); +} + +fn should_fix_bins_and_dylibs( + patch_binaries_for_nix: Option, + exec_ctx: &ExecutionContext, +) -> bool { + let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { + let uname = command("uname").allow_failure().arg("-s").run_capture_stdout(exec_ctx); + if uname.is_failure() { + return false; + } + let output = uname.stdout(); + if !output.starts_with("Linux") { + return false; + } + // If the user has asked binaries to be patched for Nix, then + // don't check for NixOS or `/lib`. + // NOTE: this intentionally comes after the Linux check: + // - patchelf only works with ELF files, so no need to run it on Mac or Windows + // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. + if let Some(explicit_value) = patch_binaries_for_nix { + return explicit_value; + } + + // Use `/etc/os-release` instead of `/etc/NIXOS`. + // The latter one does not exist on NixOS when using tmpfs as root. + let is_nixos = match File::open("/etc/os-release") { + Err(e) if e.kind() == ErrorKind::NotFound => false, + Err(e) => panic!("failed to access /etc/os-release: {e}"), + Ok(os_release) => BufReader::new(os_release).lines().any(|l| { + let l = l.expect("reading /etc/os-release"); + matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") + }), + }; + if !is_nixos { + let in_nix_shell = env::var("IN_NIX_SHELL"); + if let Ok(in_nix_shell) = in_nix_shell { + eprintln!( + "The IN_NIX_SHELL environment variable is `{in_nix_shell}`; \ + you may need to set `patch-binaries-for-nix=true` in bootstrap.toml" + ); + } + } + is_nixos + }); + if val { + eprintln!("INFO: You seem to be using Nix."); + } + val +} + +fn download_component<'a>( + dwn_ctx: impl AsRef>, + mode: DownloadSource, + filename: String, + prefix: &str, + key: &str, + destination: &str, +) { + let dwn_ctx = dwn_ctx.as_ref(); + + if dwn_ctx.dry_run { + return; + } + + let cache_dst = + dwn_ctx.bootstrap_cache_path.as_ref().cloned().unwrap_or_else(|| dwn_ctx.out.join("cache")); + + let cache_dir = cache_dst.join(key); + if !cache_dir.exists() { + t!(fs::create_dir_all(&cache_dir)); + } + + let bin_root = dwn_ctx.out.join(dwn_ctx.host_target).join(destination); + let tarball = cache_dir.join(&filename); + let (base_url, url, should_verify) = match mode { + DownloadSource::CI => { + let dist_server = if dwn_ctx.llvm_assertions { + dwn_ctx.stage0_metadata.config.artifacts_with_llvm_assertions_server.clone() + } else { + dwn_ctx.stage0_metadata.config.artifacts_server.clone() + }; + let url = format!( + "{}/{filename}", + key.strip_suffix(&format!("-{}", dwn_ctx.llvm_assertions)).unwrap() + ); + (dist_server, url, false) + } + DownloadSource::Dist => { + let dist_server = env::var("RUSTUP_DIST_SERVER") + .unwrap_or(dwn_ctx.stage0_metadata.config.dist_server.to_string()); + // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0 + (dist_server, format!("dist/{key}/{filename}"), true) + } + }; + + // For the stage0 compiler, put special effort into ensuring the checksums are valid. + let checksum = if should_verify { + let error = format!( + "src/stage0 doesn't contain a checksum for {url}. \ + Pre-built artifacts might not be available for this \ + target at this time, see https://doc.rust-lang.org/nightly\ + /rustc/platform-support.html for more information." + ); + let sha256 = dwn_ctx.stage0_metadata.checksums_sha256.get(&url).expect(&error); + if tarball.exists() { + if verify(dwn_ctx.verbose, dwn_ctx.dry_run, &tarball, sha256) { + unpack(dwn_ctx.verbose, &tarball, &bin_root, prefix); + return; + } else { + if dwn_ctx.verbose { + println!( + "ignoring cached file {} due to failed verification", + tarball.display() + ) + } + remove(dwn_ctx.dry_run, &tarball); + } + } + Some(sha256) + } else if tarball.exists() { + unpack(dwn_ctx.verbose, &tarball, &bin_root, prefix); + return; + } else { + None + }; + + let mut help_on_error = ""; + if destination == "ci-rustc" { + help_on_error = "ERROR: failed to download pre-built rustc from CI + +NOTE: old builds get deleted after a certain time +HELP: if trying to compile an old commit of rustc, disable `download-rustc` in bootstrap.toml: + +[rust] +download-rustc = false +"; + } + download_file(dwn_ctx, &format!("{base_url}/{url}"), &tarball, help_on_error); + if let Some(sha256) = checksum + && !verify(dwn_ctx.verbose, dwn_ctx.dry_run, &tarball, sha256) + { + panic!("failed to verify {}", tarball.display()); + } + + unpack(dwn_ctx.verbose, &tarball, &bin_root, prefix); +} + +pub(crate) fn verify(verbose: bool, dry_run: bool, path: &Path, expected: &str) -> bool { + use sha2::Digest; + + if verbose { + println!("verifying {}", path.display()); + } + + if dry_run { + return false; + } + + let mut hasher = sha2::Sha256::new(); + + let file = t!(File::open(path)); + let mut reader = BufReader::new(file); + + loop { + let buffer = t!(reader.fill_buf()); + let l = buffer.len(); + // break if EOF + if l == 0 { + break; + } + hasher.update(buffer); + reader.consume(l); + } + + let checksum = hex_encode(hasher.finalize().as_slice()); + let verified = checksum == expected; + + if !verified { + println!( + "invalid checksum: \n\ + found: {checksum}\n\ + expected: {expected}", + ); + } + + verified +} + +fn unpack(verbose: bool, tarball: &Path, dst: &Path, pattern: &str) { + eprintln!("extracting {} to {}", tarball.display(), dst.display()); + if !dst.exists() { + t!(fs::create_dir_all(dst)); + } + + // `tarball` ends with `.tar.xz`; strip that suffix + // example: `rust-dev-nightly-x86_64-unknown-linux-gnu` + let uncompressed_filename = + Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap(); + let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap()); + + // decompress the file + let data = t!(File::open(tarball), format!("file {} not found", tarball.display())); + let decompressor = XzDecoder::new(BufReader::new(data)); + + let mut tar = tar::Archive::new(decompressor); + + let is_ci_rustc = dst.ends_with("ci-rustc"); + let is_ci_llvm = dst.ends_with("ci-llvm"); + + // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding + // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. + // Cache the entries when we extract it so we only have to read it once. + let mut recorded_entries = if is_ci_rustc { recorded_entries(dst, pattern) } else { None }; + + for member in t!(tar.entries()) { + let mut member = t!(member); + let original_path = t!(member.path()).into_owned(); + // skip the top-level directory + if original_path == directory_prefix { + continue; + } + let mut short_path = t!(original_path.strip_prefix(directory_prefix)); + let is_builder_config = short_path.to_str() == Some(BUILDER_CONFIG_FILENAME); + + if !(short_path.starts_with(pattern) || ((is_ci_rustc || is_ci_llvm) && is_builder_config)) + { + continue; + } + short_path = short_path.strip_prefix(pattern).unwrap_or(short_path); + let dst_path = dst.join(short_path); + if verbose { + println!("extracting {} to {}", original_path.display(), dst.display()); + } + + if !t!(member.unpack_in(dst)) { + panic!("path traversal attack ??"); + } + if let Some(record) = &mut recorded_entries { + t!(writeln!(record, "{}", short_path.to_str().unwrap())); + } + let src_path = dst.join(original_path); + if src_path.is_dir() && dst_path.exists() { + continue; + } + t!(move_file(src_path, dst_path)); + } + let dst_dir = dst.join(directory_prefix); + if dst_dir.exists() { + t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display())); + } +} + +fn download_file<'a>( + dwn_ctx: impl AsRef>, + url: &str, + dest_path: &Path, + help_on_error: &str, +) { + let dwn_ctx = dwn_ctx.as_ref(); + + if dwn_ctx.verbose { + println!("download {url}"); + } + // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. + let tempfile = tempdir(dwn_ctx.out).join(dest_path.file_name().unwrap()); + // While bootstrap itself only supports http and https downloads, downstream forks might + // need to download components from other protocols. The match allows them adding more + // protocols without worrying about merge conflicts if we change the HTTP implementation. + match url.split_once("://").map(|(proto, _)| proto) { + Some("http") | Some("https") => download_http_with_retries( + dwn_ctx.host_target, + dwn_ctx.is_running_on_ci, + dwn_ctx.exec_ctx, + &tempfile, + url, + help_on_error, + ), + Some(other) => panic!("unsupported protocol {other} in {url}"), + None => panic!("no protocol in {url}"), + } + t!(move_file(&tempfile, dest_path), format!("failed to rename {tempfile:?} to {dest_path:?}")); +} + +/// Create a temporary directory in `out` and return its path. +/// +/// NOTE: this temporary directory is shared between all steps; +/// if you need an empty directory, create a new subdirectory inside it. +pub(crate) fn tempdir(out: &Path) -> PathBuf { + let tmp = out.join("tmp"); + t!(fs::create_dir_all(&tmp)); + tmp +} + +fn download_http_with_retries( + host_target: TargetSelection, + is_running_on_ci: bool, + exec_ctx: &ExecutionContext, + tempfile: &Path, + url: &str, + help_on_error: &str, +) { + println!("downloading {url}"); + // Try curl. If that fails and we are on windows, fallback to PowerShell. + // options should be kept in sync with + // src/bootstrap/src/core/download.rs + // for consistency + let mut curl = command("curl").allow_failure(); + curl.args([ + // follow redirect + "--location", + // timeout if speed is < 10 bytes/sec for > 30 seconds + "--speed-time", + "30", + "--speed-limit", + "10", + // timeout if cannot connect within 30 seconds + "--connect-timeout", + "30", + // output file + "--output", + tempfile.to_str().unwrap(), + // if there is an error, don't restart the download, + // instead continue where it left off. + "--continue-at", + "-", + // retry up to 3 times. note that this means a maximum of 4 + // attempts will be made, since the first attempt isn't a *re*try. + "--retry", + "3", + // show errors, even if --silent is specified + "--show-error", + // set timestamp of downloaded file to that of the server + "--remote-time", + // fail on non-ok http status + "--fail", + ]); + // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. + if is_running_on_ci { + curl.arg("--silent"); + } else { + curl.arg("--progress-bar"); + } + // --retry-all-errors was added in 7.71.0, don't use it if curl is old. + if curl_version(exec_ctx) >= semver::Version::new(7, 71, 0) { + curl.arg("--retry-all-errors"); + } + curl.arg(url); + if !curl.run(exec_ctx) { + if host_target.contains("windows-msvc") { + eprintln!("Fallback to PowerShell"); + for _ in 0..3 { + let powershell = command("PowerShell.exe").allow_failure().args([ + "/nologo", + "-Command", + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", + &format!( + "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", + url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"), + ), + ]).run_capture_stdout(exec_ctx); + + if powershell.is_failure() { + return; + } + + eprintln!("\nspurious failure, trying again"); + } + } + if !help_on_error.is_empty() { + eprintln!("{help_on_error}"); + } + crate::exit!(1); + } +} + +fn curl_version(exec_ctx: &ExecutionContext) -> semver::Version { + let mut curl = command("curl"); + curl.arg("-V"); + let curl = curl.run_capture_stdout(exec_ctx); + if curl.is_failure() { + return semver::Version::new(1, 0, 0); + } + let output = curl.stdout(); + extract_curl_version(output) +} From 8facd063f93d5640790b200fca4454d2c76c5305 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sun, 27 Jul 2025 12:16:03 +0530 Subject: [PATCH 3/7] move download_beta_toolchain out of impl as its used during config initialization --- src/bootstrap/src/core/download.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index f3a10048b6559..7e4ae6ebd7e8c 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -491,6 +491,31 @@ pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: boo } } +#[cfg(test)] +pub(crate) fn download_beta_toolchain<'a>(dwn_ctx: impl AsRef>) {} + +#[cfg(not(test))] +pub(crate) fn download_beta_toolchain<'a>(dwn_ctx: impl AsRef>) { + let dwn_ctx = dwn_ctx.as_ref(); + if dwn_ctx.verbose { + println!("downloading stage0 beta artifacts"); + } + + let date = dwn_ctx.stage0_metadata.compiler.date.clone(); + let version = dwn_ctx.stage0_metadata.compiler.version.clone(); + let extra_components = ["cargo"]; + let sysroot = "stage0"; + download_toolchain( + dwn_ctx, + &version, + sysroot, + &date, + &extra_components, + "stage0", + DownloadSource::Dist, + ); +} + fn download_toolchain<'a>( dwn_ctx: impl AsRef>, version: &str, From 19dc195ac2db97874020a423e690d1edf7f1d8fe Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sun, 27 Jul 2025 12:17:11 +0530 Subject: [PATCH 4/7] move download_rustfmt out of impl as its used during config initialization --- src/bootstrap/src/core/download.rs | 66 ++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 7e4ae6ebd7e8c..745acb04275a0 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -491,6 +491,72 @@ pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: boo } } +#[cfg(test)] +pub(crate) fn maybe_download_rustfmt<'a>( + dwn_ctx: impl AsRef>, +) -> Option { + Some(PathBuf::new()) +} + +/// NOTE: rustfmt is a completely different toolchain than the bootstrap compiler, so it can't +/// reuse target directories or artifacts +#[cfg(not(test))] +pub(crate) fn maybe_download_rustfmt<'a>( + dwn_ctx: impl AsRef>, +) -> Option { + use build_helper::stage0_parser::VersionMetadata; + + let dwn_ctx = dwn_ctx.as_ref(); + + if dwn_ctx.dry_run { + return Some(PathBuf::new()); + } + + let VersionMetadata { date, version } = dwn_ctx.stage0_metadata.rustfmt.as_ref()?; + let channel = format!("{version}-{date}"); + + let host = dwn_ctx.host_target; + let bin_root = dwn_ctx.out.join(host).join("rustfmt"); + let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); + let rustfmt_stamp = BuildStamp::new(&bin_root).with_prefix("rustfmt").add_stamp(channel); + if rustfmt_path.exists() && rustfmt_stamp.is_up_to_date() { + return Some(rustfmt_path); + } + + download_component( + dwn_ctx, + DownloadSource::Dist, + format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), + "rustfmt-preview", + date, + "rustfmt", + ); + + download_component( + dwn_ctx, + DownloadSource::Dist, + format!("rustc-{version}-{build}.tar.xz", build = host.triple), + "rustc", + date, + "rustfmt", + ); + + if should_fix_bins_and_dylibs(dwn_ctx.patch_binaries_for_nix, dwn_ctx.exec_ctx) { + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustfmt"), dwn_ctx.exec_ctx); + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("cargo-fmt"), dwn_ctx.exec_ctx); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if path_is_dylib(&lib.path()) { + fix_bin_or_dylib(dwn_ctx.out, &lib.path(), dwn_ctx.exec_ctx); + } + } + } + + t!(rustfmt_stamp.write()); + Some(rustfmt_path) +} + #[cfg(test)] pub(crate) fn download_beta_toolchain<'a>(dwn_ctx: impl AsRef>) {} From f63f212a043dbc60fc28a4bf845662863d532fd7 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Mon, 28 Jul 2025 18:53:24 +0530 Subject: [PATCH 5/7] use dry_run and verbose directly from exec_ctx --- src/bootstrap/src/core/download.rs | 57 ++++++++++++++---------------- 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 745acb04275a0..2011e3067c476 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -38,7 +38,7 @@ impl Config { } pub(crate) fn remove(&self, f: &Path) { - remove(self.dry_run(), f); + remove(&self.exec_ctx, f); } /// Create a temporary directory in `out` and return its path. @@ -74,13 +74,13 @@ impl Config { } fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) { - unpack(self.verbose > 0, tarball, dst, pattern); + unpack(&self.exec_ctx, tarball, dst, pattern); } /// Returns whether the SHA256 checksum of `path` matches `expected`. #[cfg(test)] pub(crate) fn verify(&self, path: &Path, expected: &str) -> bool { - verify(self.verbose > 0, self.dry_run(), path, expected) + verify(&self.exec_ctx, path, expected) } } @@ -414,12 +414,10 @@ pub(crate) struct DownloadContext<'a> { out: &'a Path, patch_binaries_for_nix: Option, exec_ctx: &'a ExecutionContext, - verbose: bool, stage0_metadata: &'a build_helper::stage0_parser::Stage0, llvm_assertions: bool, bootstrap_cache_path: &'a Option, is_running_on_ci: bool, - dry_run: bool, } impl<'a> AsRef> for DownloadContext<'a> { @@ -435,12 +433,10 @@ impl<'a> From<&'a Config> for DownloadContext<'a> { out: &value.out, patch_binaries_for_nix: value.patch_binaries_for_nix, exec_ctx: &value.exec_ctx, - verbose: value.verbose > 0, stage0_metadata: &value.stage0_metadata, llvm_assertions: value.llvm_assertions, bootstrap_cache_path: &value.bootstrap_cache_path, is_running_on_ci: value.is_running_on_ci, - dry_run: value.dry_run(), } } } @@ -508,7 +504,7 @@ pub(crate) fn maybe_download_rustfmt<'a>( let dwn_ctx = dwn_ctx.as_ref(); - if dwn_ctx.dry_run { + if dwn_ctx.exec_ctx.dry_run() { return Some(PathBuf::new()); } @@ -563,9 +559,9 @@ pub(crate) fn download_beta_toolchain<'a>(dwn_ctx: impl AsRef(dwn_ctx: impl AsRef>) { let dwn_ctx = dwn_ctx.as_ref(); - if dwn_ctx.verbose { + dwn_ctx.exec_ctx.verbose(|| { println!("downloading stage0 beta artifacts"); - } + }); let date = dwn_ctx.stage0_metadata.compiler.date.clone(); let version = dwn_ctx.stage0_metadata.compiler.version.clone(); @@ -634,8 +630,8 @@ fn download_toolchain<'a>( } } -pub(crate) fn remove(dry_run: bool, f: &Path) { - if dry_run { +pub(crate) fn remove(exec_ctx: &ExecutionContext, f: &Path) { + if exec_ctx.dry_run() { return; } fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {f:?}")); @@ -757,7 +753,7 @@ fn download_component<'a>( ) { let dwn_ctx = dwn_ctx.as_ref(); - if dwn_ctx.dry_run { + if dwn_ctx.exec_ctx.dry_run() { return; } @@ -802,22 +798,22 @@ fn download_component<'a>( ); let sha256 = dwn_ctx.stage0_metadata.checksums_sha256.get(&url).expect(&error); if tarball.exists() { - if verify(dwn_ctx.verbose, dwn_ctx.dry_run, &tarball, sha256) { - unpack(dwn_ctx.verbose, &tarball, &bin_root, prefix); + if verify(dwn_ctx.exec_ctx, &tarball, sha256) { + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); return; } else { - if dwn_ctx.verbose { + dwn_ctx.exec_ctx.verbose(|| { println!( "ignoring cached file {} due to failed verification", tarball.display() ) - } - remove(dwn_ctx.dry_run, &tarball); + }); + remove(dwn_ctx.exec_ctx, &tarball); } } Some(sha256) } else if tarball.exists() { - unpack(dwn_ctx.verbose, &tarball, &bin_root, prefix); + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); return; } else { None @@ -836,22 +832,22 @@ download-rustc = false } download_file(dwn_ctx, &format!("{base_url}/{url}"), &tarball, help_on_error); if let Some(sha256) = checksum - && !verify(dwn_ctx.verbose, dwn_ctx.dry_run, &tarball, sha256) + && !verify(dwn_ctx.exec_ctx, &tarball, sha256) { panic!("failed to verify {}", tarball.display()); } - unpack(dwn_ctx.verbose, &tarball, &bin_root, prefix); + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); } -pub(crate) fn verify(verbose: bool, dry_run: bool, path: &Path, expected: &str) -> bool { +pub(crate) fn verify(exec_ctx: &ExecutionContext, path: &Path, expected: &str) -> bool { use sha2::Digest; - if verbose { + exec_ctx.verbose(|| { println!("verifying {}", path.display()); - } + }); - if dry_run { + if exec_ctx.dry_run() { return false; } @@ -885,7 +881,7 @@ pub(crate) fn verify(verbose: bool, dry_run: bool, path: &Path, expected: &str) verified } -fn unpack(verbose: bool, tarball: &Path, dst: &Path, pattern: &str) { +fn unpack(exec_ctx: &ExecutionContext, tarball: &Path, dst: &Path, pattern: &str) { eprintln!("extracting {} to {}", tarball.display(), dst.display()); if !dst.exists() { t!(fs::create_dir_all(dst)); @@ -927,9 +923,10 @@ fn unpack(verbose: bool, tarball: &Path, dst: &Path, pattern: &str) { } short_path = short_path.strip_prefix(pattern).unwrap_or(short_path); let dst_path = dst.join(short_path); - if verbose { + + exec_ctx.verbose(|| { println!("extracting {} to {}", original_path.display(), dst.display()); - } + }); if !t!(member.unpack_in(dst)) { panic!("path traversal attack ??"); @@ -957,9 +954,9 @@ fn download_file<'a>( ) { let dwn_ctx = dwn_ctx.as_ref(); - if dwn_ctx.verbose { + dwn_ctx.exec_ctx.verbose(|| { println!("download {url}"); - } + }); // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. let tempfile = tempdir(dwn_ctx.out).join(dest_path.file_name().unwrap()); // While bootstrap itself only supports http and https downloads, downstream forks might From db42d5b36c804be21efd9c537a5a6771ec1c8201 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Mon, 28 Jul 2025 19:23:04 +0530 Subject: [PATCH 6/7] remove config wrappers of download_toolchain and maybe_download_fmt and during config parsing directly invoke cdownload methods --- src/bootstrap/src/core/config/config.rs | 18 +++++++++++++----- src/bootstrap/src/core/download.rs | 12 ------------ 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 6e04f11542438..90001f9ae318b 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -45,7 +45,9 @@ use crate::core::config::{ DebuginfoLevel, DryRun, GccCiMode, LlvmLibunwind, Merge, ReplaceOpt, RustcLto, SplitDebuginfo, StringOrBool, set, threads_from_config, }; -use crate::core::download::is_download_ci_available; +use crate::core::download::{ + DownloadContext, download_beta_toolchain, is_download_ci_available, maybe_download_rustfmt, +}; use crate::utils::channel; use crate::utils::exec::{ExecutionContext, command}; use crate::utils::helpers::{exe, get_host_target}; @@ -801,7 +803,8 @@ impl Config { } rustc } else { - config.download_beta_toolchain(); + let dwn_ctx = DownloadContext::from(&config); + download_beta_toolchain(dwn_ctx); config .out .join(config.host_target) @@ -827,7 +830,8 @@ impl Config { } cargo } else { - config.download_beta_toolchain(); + let dwn_ctx = DownloadContext::from(&config); + download_beta_toolchain(dwn_ctx); config.initial_sysroot.join("bin").join(exe("cargo", config.host_target)) }; @@ -994,8 +998,12 @@ impl Config { config.apply_dist_config(toml.dist); - config.initial_rustfmt = - if let Some(r) = rustfmt { Some(r) } else { config.maybe_download_rustfmt() }; + config.initial_rustfmt = if let Some(r) = rustfmt { + Some(r) + } else { + let dwn_ctx = DownloadContext::from(&config); + maybe_download_rustfmt(dwn_ctx) + }; if matches!(config.lld_mode, LldMode::SelfContained) && !config.lld_enabled diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 2011e3067c476..7ec6c62a07d02 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -128,13 +128,6 @@ impl Config { cargo_clippy } - /// NOTE: rustfmt is a completely different toolchain than the bootstrap compiler, so it can't - /// reuse target directories or artifacts - pub(crate) fn maybe_download_rustfmt(&self) -> Option { - let dwn_ctx: DownloadContext<'_> = self.into(); - maybe_download_rustfmt(dwn_ctx) - } - pub(crate) fn ci_rust_std_contents(&self) -> Vec { self.ci_component_contents(".rust-std-contents") } @@ -172,11 +165,6 @@ impl Config { ); } - pub(crate) fn download_beta_toolchain(&self) { - let dwn_ctx: DownloadContext<'_> = self.into(); - download_beta_toolchain(dwn_ctx); - } - fn download_toolchain( &self, version: &str, From c071101da73c417612155eb44e4cf4b9b3ee7499 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Mon, 28 Jul 2025 22:23:56 +0530 Subject: [PATCH 7/7] make sure to populate DownloadState dependencies before its initialization in config parsing --- src/bootstrap/src/core/config/config.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 90001f9ae318b..9644ade00b346 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -797,6 +797,11 @@ impl Config { ); } + config.patch_binaries_for_nix = patch_binaries_for_nix; + config.bootstrap_cache_path = bootstrap_cache_path; + config.llvm_assertions = + toml.llvm.as_ref().is_some_and(|llvm| llvm.assertions.unwrap_or(false)); + config.initial_rustc = if let Some(rustc) = rustc { if !flags_skip_stage0_validation { config.check_stage0_version(&rustc, "rustc"); @@ -867,7 +872,6 @@ impl Config { config.reuse = reuse.map(PathBuf::from); config.submodules = submodules; config.android_ndk = android_ndk; - config.bootstrap_cache_path = bootstrap_cache_path; set(&mut config.low_priority, low_priority); set(&mut config.compiler_docs, compiler_docs); set(&mut config.library_docs_private_items, library_docs_private_items); @@ -886,7 +890,6 @@ impl Config { set(&mut config.local_rebuild, local_rebuild); set(&mut config.print_step_timings, print_step_timings); set(&mut config.print_step_rusage, print_step_rusage); - config.patch_binaries_for_nix = patch_binaries_for_nix; config.verbose = cmp::max(config.verbose, flags_verbose as usize); @@ -895,9 +898,6 @@ impl Config { config.apply_install_config(toml.install); - config.llvm_assertions = - toml.llvm.as_ref().is_some_and(|llvm| llvm.assertions.unwrap_or(false)); - let file_content = t!(fs::read_to_string(config.src.join("src/ci/channel"))); let ci_channel = file_content.trim_end();