diff --git a/src/Cargo.lock b/src/Cargo.lock index 8f25820d3a527..a9b2cb6b813eb 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -130,6 +130,7 @@ dependencies = [ "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pretty_assertions 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.27 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.27 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -527,6 +528,11 @@ name = "diff" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "difference" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "dlmalloc" version = "0.0.0" @@ -1355,6 +1361,15 @@ name = "pkg-config" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "pretty_assertions" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "ansi_term 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)", + "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "proc_macro" version = "0.0.0" @@ -2804,6 +2819,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum curl-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f46e49c7125131f5afaded06944d6888b55cbdf8eba05dae73c954019b907961" "checksum derive-new 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "415f627ab054041c3eb748c2e1da0ef751989f5f0c386b63a098e545854a98ba" "checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a" +"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" "checksum docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d8acd393692c503b168471874953a2531df0e9ab77d0b6bbc582395743300a4a" "checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" "checksum duct 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e45aa15fe0a8a8f511e6d834626afd55e49b62e5c8802e18328a87e8a8f6065c" @@ -2887,6 +2903,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" "checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8" "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903" +"checksum pretty_assertions 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "693786a0719bce004cf22e8d1923f678ba47b1e01589a973146cb10fe1be677e" "checksum pulldown-cmark 0.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "378e941dbd392c101f2cb88097fa4d7167bc421d4b88de3ff7dbee503bc3233b" "checksum pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d6fdf85cda6cadfae5428a54661d431330b312bc767ddbc57adbedc24da66e32" "checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4" diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index 2d47834131784..c09a3d865230f 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -42,3 +42,6 @@ serde_json = "1.0.2" toml = "0.4" lazy_static = "0.2" time = "0.1" + +[dev-dependencies] +pretty_assertions = "0.5" diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs index d02bc7972ae9a..75802a8ed6a63 100644 --- a/src/bootstrap/bin/main.rs +++ b/src/bootstrap/bin/main.rs @@ -21,7 +21,7 @@ extern crate bootstrap; use std::env; -use bootstrap::{Config, Build}; +use bootstrap::{Build, Config}; fn main() { let args = env::args().skip(1).collect::>(); diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index ca35a896e08c8..c063eb5480668 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -45,12 +45,15 @@ fn main() { // Dirty code for borrowing issues let mut new = None; if let Some(current_as_str) = args[i].to_str() { - if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) || - current_as_str.starts_with("-Cmetadata") { + if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) + || current_as_str.starts_with("-Cmetadata") + { new = Some(format!("{}-{}", current_as_str, s)); } } - if let Some(new) = new { args[i] = new.into(); } + if let Some(new) = new { + args[i] = new.into(); + } } } @@ -95,19 +98,21 @@ fn main() { let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc)); let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir)); let mut dylib_path = bootstrap::util::dylib_path(); - dylib_path.insert(0, PathBuf::from(libdir)); + dylib_path.insert(0, PathBuf::from(&libdir)); let mut cmd = Command::new(rustc); cmd.args(&args) .arg("--cfg") .arg(format!("stage{}", stage)) - .env(bootstrap::util::dylib_path_var(), - env::join_paths(&dylib_path).unwrap()); + .env( + bootstrap::util::dylib_path_var(), + env::join_paths(&dylib_path).unwrap(), + ); if let Some(target) = target { // The stage0 compiler has a special sysroot distinct from what we // actually downloaded, so we just always pass the `--sysroot` option. - cmd.arg("--sysroot").arg(sysroot); + cmd.arg("--sysroot").arg(&sysroot); // When we build Rust dylibs they're all intended for intermediate // usage, so make sure we pass the -Cprefer-dynamic flag instead of @@ -130,9 +135,7 @@ fn main() { cmd.arg(format!("-Clinker={}", target_linker)); } - let crate_name = args.windows(2) - .find(|a| &*a[0] == "--crate-name") - .unwrap(); + let crate_name = args.windows(2).find(|a| &*a[0] == "--crate-name").unwrap(); let crate_name = &*crate_name[1]; // If we're compiling specifically the `panic_abort` crate then we pass @@ -147,8 +150,7 @@ fn main() { // `compiler_builtins` are unconditionally compiled with panic=abort to // workaround undefined references to `rust_eh_unwind_resume` generated // otherwise, see issue https://github.com/rust-lang/rust/issues/43095. - if crate_name == "panic_abort" || - crate_name == "compiler_builtins" && stage != "0" { + if crate_name == "panic_abort" || crate_name == "compiler_builtins" && stage != "0" { cmd.arg("-C").arg("panic=abort"); } @@ -160,7 +162,11 @@ fn main() { cmd.arg("-Cdebuginfo=1"); } let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") { - Ok(s) => if s == "true" { "y" } else { "n" }, + Ok(s) => if s == "true" { + "y" + } else { + "n" + }, Err(..) => "n", }; @@ -169,7 +175,8 @@ fn main() { if crate_name == "compiler_builtins" { cmd.arg("-C").arg("debug-assertions=no"); } else { - cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions)); + cmd.arg("-C") + .arg(format!("debug-assertions={}", debug_assertions)); } if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") { @@ -182,10 +189,12 @@ fn main() { // Emit save-analysis info. if env::var("RUSTC_SAVE_ANALYSIS") == Ok("api".to_string()) { cmd.arg("-Zsave-analysis"); - cmd.env("RUST_SAVE_ANALYSIS_CONFIG", - "{\"output_file\": null,\"full_docs\": false,\ - \"pub_only\": true,\"reachable_only\": false,\ - \"distro_crate\": true,\"signatures\": false,\"borrow_data\": false}"); + cmd.env( + "RUST_SAVE_ANALYSIS_CONFIG", + "{\"output_file\": null,\"full_docs\": false,\ + \"pub_only\": true,\"reachable_only\": false,\ + \"distro_crate\": true,\"signatures\": false,\"borrow_data\": false}", + ); } // Dealing with rpath here is a little special, so let's go into some @@ -216,7 +225,6 @@ fn main() { // to change a flag in a binary? if env::var("RUSTC_RPATH") == Ok("true".to_string()) { let rpath = if target.contains("apple") { - // Note that we need to take one extra step on macOS to also pass // `-Wl,-instal_name,@rpath/...` to get things to work right. To // do that we pass a weird flag to the compiler to get it to do @@ -244,7 +252,10 @@ fn main() { } // When running miri tests, we need to generate MIR for all libraries - if env::var("TEST_MIRI").ok().map_or(false, |val| val == "true") { + if env::var("TEST_MIRI") + .ok() + .map_or(false, |val| val == "true") + { cmd.arg("-Zalways-encode-mir"); if stage != "0" { cmd.arg("-Zmiri"); @@ -280,6 +291,8 @@ fn main() { if verbose > 1 { eprintln!("rustc command: {:?}", cmd); + eprintln!("sysroot: {:?}", sysroot); + eprintln!("libdir: {:?}", libdir); } // Actually run the compiler! diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs index 798d5c3eb6736..f2b63a736a313 100644 --- a/src/bootstrap/bin/rustdoc.rs +++ b/src/bootstrap/bin/rustdoc.rs @@ -45,8 +45,10 @@ fn main() { .arg("dox") .arg("--sysroot") .arg(sysroot) - .env(bootstrap::util::dylib_path_var(), - env::join_paths(&dylib_path).unwrap()); + .env( + bootstrap::util::dylib_path_var(), + env::join_paths(&dylib_path).unwrap(), + ); // Force all crates compiled by this compiler to (a) be unstable and (b) // allow the `rustc_private` feature to link to other unstable crates @@ -55,7 +57,10 @@ fn main() { cmd.arg("-Z").arg("force-unstable-if-unmarked"); } if let Some(linker) = env::var_os("RUSTC_TARGET_LINKER") { - cmd.arg("--linker").arg(linker).arg("-Z").arg("unstable-options"); + cmd.arg("--linker") + .arg(linker) + .arg("-Z") + .arg("unstable-options"); } // Bootstrap's Cargo-command builder sets this variable to the current Rust version; let's pick @@ -63,8 +68,9 @@ fn main() { if let Some(version) = env::var_os("RUSTDOC_CRATE_VERSION") { // This "unstable-options" can be removed when `--crate-version` is stabilized cmd.arg("-Z") - .arg("unstable-options") - .arg("--crate-version").arg(version); + .arg("unstable-options") + .arg("--crate-version") + .arg(version); } if verbose > 1 { diff --git a/src/bootstrap/bin/sccache-plus-cl.rs b/src/bootstrap/bin/sccache-plus-cl.rs index 8584014d48d5f..0fe17be0ef46d 100644 --- a/src/bootstrap/bin/sccache-plus-cl.rs +++ b/src/bootstrap/bin/sccache-plus-cl.rs @@ -20,12 +20,12 @@ fn main() { env::remove_var("CXX"); let mut cfg = cc::Build::new(); cfg.cargo_metadata(false) - .out_dir("/") - .target(&target) - .host(&target) - .opt_level(0) - .warnings(false) - .debug(false); + .out_dir("/") + .target(&target) + .host(&target) + .opt_level(0) + .warnings(false) + .debug(false); let compiler = cfg.get_compiler(); // Invoke sccache with said compiler diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 5966bb65df9c8..8e8237dbe06f4 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -751,6 +751,7 @@ def bootstrap(): env["SRC"] = build.rust_root env["BOOTSTRAP_PARENT_ID"] = str(os.getpid()) env["BOOTSTRAP_PYTHON"] = sys.executable + env["BUILD_DIR"] = build.build_dir run(args, env=env, verbose=build.verbose) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index b5946b44e05ef..3b716beebd7bf 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -13,18 +13,23 @@ use std::cell::RefCell; use std::collections::BTreeSet; use std::env; use std::fmt::Debug; -use std::fs; +use std::io::{BufRead, BufReader}; use std::hash::Hash; -use std::ops::Deref; +use std::ops::{Deref, DerefMut}; use std::path::{Path, PathBuf}; -use std::process::Command; +use std::process::{Command, Stdio}; +use filetime::FileTime; +use serde_json; + +use fs; use compile; use install; use dist; -use util::{exe, libdir, add_lib_path}; +use util::{add_lib_path, is_dylib, exe, libdir, CiEnv}; +use build_helper::mtime; use {Build, Mode}; -use cache::{INTERNER, Interned, Cache}; +use cache::{Cache, Intern, Interned}; use check; use test; use flags::Subcommand; @@ -50,22 +55,20 @@ impl<'a> Deref for Builder<'a> { } } -pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { +pub trait Step: 'static + Copy + Clone + Debug + PartialEq + Eq + Hash { /// `PathBuf` when directories are created or to return a `Compiler` once /// it's been assembled. type Output: Clone; + fn for_test(self, builder: &Builder) -> Self::Output { + self.run(builder) + } + const DEFAULT: bool = false; /// Run this rule for all hosts without cross compiling. const ONLY_HOSTS: bool = false; - /// Run this rule for all targets, but only with the native host. - const ONLY_BUILD_TARGETS: bool = false; - - /// Only run this step with the build triple as host and target. - const ONLY_BUILD: bool = false; - /// Primary function to execute this rule. Can call `builder.ensure(...)` /// with other steps to run those. fn run(self, builder: &Builder) -> Self::Output; @@ -101,8 +104,6 @@ pub struct RunConfig<'a> { struct StepDescription { default: bool, only_hosts: bool, - only_build_targets: bool, - only_build: bool, should_run: fn(ShouldRun) -> ShouldRun, make_run: fn(RunConfig), name: &'static str, @@ -115,7 +116,9 @@ struct PathSet { impl PathSet { fn empty() -> PathSet { - PathSet { set: BTreeSet::new() } + PathSet { + set: BTreeSet::new(), + } } fn one>(path: P) -> PathSet { @@ -129,7 +132,11 @@ impl PathSet { } fn path(&self, builder: &Builder) -> PathBuf { - self.set.iter().next().unwrap_or(&builder.build.src).to_path_buf() + self.set + .iter() + .next() + .unwrap_or(&builder.config.src) + .to_path_buf() } } @@ -138,8 +145,6 @@ impl StepDescription { StepDescription { default: S::DEFAULT, only_hosts: S::ONLY_HOSTS, - only_build_targets: S::ONLY_BUILD_TARGETS, - only_build: S::ONLY_BUILD, should_run: S::should_run, make_run: S::make_run, name: unsafe { ::std::intrinsics::type_name::() }, @@ -151,27 +156,22 @@ impl StepDescription { eprintln!("Skipping {:?} because it is excluded", pathset); return; } else if !builder.config.exclude.is_empty() { - eprintln!("{:?} not skipped for {:?} -- not in {:?}", pathset, - self.name, builder.config.exclude); + eprintln!( + "{:?} not skipped for {:?} -- not in {:?}", + pathset, self.name, builder.config.exclude + ); + } + let hosts = &builder.config.general.host; + + if self.only_hosts && !builder.config.run_host_only { + return; } - let build = builder.build; - let hosts = if self.only_build_targets || self.only_build { - build.build_triple() - } else { - &build.hosts - }; // Determine the targets participating in this rule. let targets = if self.only_hosts { - if build.config.run_host_only { - &[] - } else if self.only_build { - build.build_triple() - } else { - &build.hosts - } + &builder.config.general.host } else { - &build.targets + &builder.config.general.target }; for host in hosts { @@ -182,46 +182,12 @@ impl StepDescription { host: *host, target: *target, }; + eprintln!("{}: {} -> {}: {:?}", + self.name, run.host, run.target, run.path); (self.make_run)(run); } } } - - fn run(v: &[StepDescription], builder: &Builder, paths: &[PathBuf]) { - let should_runs = v.iter().map(|desc| { - (desc.should_run)(ShouldRun::new(builder)) - }).collect::>(); - - // sanity checks on rules - for (desc, should_run) in v.iter().zip(&should_runs) { - assert!(!should_run.paths.is_empty(), - "{:?} should have at least one pathset", desc.name); - } - - if paths.is_empty() { - for (desc, should_run) in v.iter().zip(should_runs) { - if desc.default && should_run.is_really_default { - for pathset in &should_run.paths { - desc.maybe_run(builder, pathset); - } - } - } - } else { - for path in paths { - let mut attempted_run = false; - for (desc, should_run) in v.iter().zip(&should_runs) { - if let Some(pathset) = should_run.pathset_for_path(path) { - attempted_run = true; - desc.maybe_run(builder, pathset); - } - } - - if !attempted_run { - panic!("Error: no rules matched {}.", path.display()); - } - } - } - } } #[derive(Clone)] @@ -304,6 +270,84 @@ pub enum Kind { } impl<'a> Builder<'a> { + pub fn new(build: &Build) -> Builder { + let kind = match build.config.cmd { + Subcommand::Build => Kind::Build, + Subcommand::Check => Kind::Check, + Subcommand::Doc { .. } => Kind::Doc, + Subcommand::Test { .. } => Kind::Test, + Subcommand::Bench { .. } => Kind::Bench, + Subcommand::Dist => Kind::Dist, + Subcommand::Install { .. } => Kind::Install, + Subcommand::Clean { .. } => panic!(), + }; + + let builder = Builder { + build, + top_stage: build.config.stage, + kind, + cache: Cache::new(), + stack: RefCell::new(Vec::new()), + }; + + if kind == Kind::Dist { + assert!( + !builder.config.rust.test_miri, + "Do not distribute with miri enabled.\n\ + The distributed libraries would include all MIR (increasing binary size). + The distributed MIR would include validation statements." + ); + } + + builder + } + + pub fn execute(&self) { + self.run_step_descriptions(&Builder::get_step_descriptions(self.kind), &self.config.paths); + } + + fn run_step_descriptions(&self, v: &[StepDescription], paths: &[PathBuf]) { + let should_runs = v.iter() + .map(|desc| (desc.should_run)(ShouldRun::new(self))) + .collect::>(); + + // sanity checks on rules + for (desc, should_run) in v.iter().zip(&should_runs) { + assert!( + !should_run.paths.is_empty(), + "{:?} should have at least one pathset", + desc.name + ); + } + + if paths.is_empty() { + for (desc, should_run) in v.iter().zip(should_runs) { + if desc.default && should_run.is_really_default { + for pathset in &should_run.paths { + desc.maybe_run(&self, pathset); + } + } + } + } else { + for path in paths { + let mut attempted_run = false; + if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") { + continue; + } + for (desc, should_run) in v.iter().zip(&should_runs) { + if let Some(pathset) = should_run.pathset_for_path(path) { + attempted_run = true; + desc.maybe_run(&self, pathset); + } + } + + if !attempted_run { + panic!("Error: no rules matched {}.", path.display()); + } + } + } + } + fn get_step_descriptions(kind: Kind) -> Vec { macro_rules! describe { ($($rule:ty),+ $(,)*) => {{ @@ -311,32 +355,115 @@ impl<'a> Builder<'a> { }}; } match kind { - Kind::Build => describe!(compile::Std, compile::Test, compile::Rustc, - compile::StartupObjects, tool::BuildManifest, tool::Rustbook, tool::ErrorIndex, - tool::UnstableBookGen, tool::Tidy, tool::Linkchecker, tool::CargoTest, - tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient, - tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy, - native::Llvm, tool::Rustfmt, tool::Miri), + Kind::Build => describe!( + compile::Std, + compile::Test, + compile::Rustc, + compile::StartupObjects, + tool::BuildManifest, + tool::Rustbook, + tool::ErrorIndex, + tool::UnstableBookGen, + tool::Tidy, + tool::Linkchecker, + tool::CargoTest, + tool::Compiletest, + tool::RemoteTestServer, + tool::RemoteTestClient, + tool::RustInstaller, + tool::Cargo, + tool::Rls, + tool::Rustdoc, + tool::Clippy, + native::Llvm, + tool::Rustfmt, + tool::Miri + ), Kind::Check => describe!(check::Std, check::Test, check::Rustc), - Kind::Test => describe!(test::Tidy, test::Bootstrap, test::Ui, test::RunPass, - test::CompileFail, test::ParseFail, test::RunFail, test::RunPassValgrind, - test::MirOpt, test::Codegen, test::CodegenUnits, test::Incremental, test::Debuginfo, - test::UiFullDeps, test::RunPassFullDeps, test::RunFailFullDeps, - test::CompileFailFullDeps, test::IncrementalFullDeps, test::Rustdoc, test::Pretty, - test::RunPassPretty, test::RunFailPretty, test::RunPassValgrindPretty, - test::RunPassFullDepsPretty, test::RunFailFullDepsPretty, test::RunMake, - test::Crate, test::CrateLibrustc, test::Rustdoc, test::Linkcheck, test::Cargotest, - test::Cargo, test::Rls, test::Docs, test::ErrorIndex, test::Distcheck, - test::Rustfmt, test::Miri, test::Clippy, test::RustdocJS, test::RustdocTheme), + Kind::Test => describe!( + test::Tidy, + test::Bootstrap, + test::Ui, + test::RunPass, + test::CompileFail, + test::ParseFail, + test::RunFail, + test::RunPassValgrind, + test::MirOpt, + test::Codegen, + test::CodegenUnits, + test::Incremental, + test::Debuginfo, + test::UiFullDeps, + test::RunPassFullDeps, + test::RunFailFullDeps, + test::CompileFailFullDeps, + test::IncrementalFullDeps, + test::Rustdoc, + test::Pretty, + test::RunPassPretty, + test::RunFailPretty, + test::RunPassValgrindPretty, + test::RunPassFullDepsPretty, + test::RunFailFullDepsPretty, + test::RunMake, + test::Crate, + test::CrateLibrustc, + test::Rustdoc, + test::Linkcheck, + test::Cargotest, + test::Cargo, + test::Rls, + test::Docs, + test::ErrorIndex, + test::Distcheck, + test::Rustfmt, + test::Miri, + test::Clippy, + test::RustdocJS, + test::RustdocTheme + ), Kind::Bench => describe!(test::Crate, test::CrateLibrustc), - Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook, - doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon, - doc::Reference, doc::Rustdoc, doc::RustByExample, doc::CargoBook), - Kind::Dist => describe!(dist::Docs, dist::Mingw, dist::Rustc, dist::DebuggerScripts, - dist::Std, dist::Analysis, dist::Src, dist::PlainSourceTarball, dist::Cargo, - dist::Rls, dist::Rustfmt, dist::Extended, dist::HashSign), - Kind::Install => describe!(install::Docs, install::Std, install::Cargo, install::Rls, - install::Rustfmt, install::Analysis, install::Src, install::Rustc), + Kind::Doc => describe!( + doc::UnstableBook, + doc::UnstableBookGen, + doc::TheBook, + doc::Standalone, + doc::Std, + doc::Test, + doc::Rustc, + doc::ErrorIndex, + doc::Nomicon, + doc::Reference, + doc::Rustdoc, + doc::RustByExample, + doc::CargoBook + ), + Kind::Dist => describe!( + dist::Docs, + dist::Mingw, + dist::Rustc, + dist::DebuggerScripts, + dist::Std, + dist::Analysis, + dist::Src, + dist::PlainSourceTarball, + dist::Cargo, + dist::Rls, + dist::Rustfmt, + dist::Extended, + dist::HashSign + ), + Kind::Install => describe!( + install::Docs, + install::Std, + install::Cargo, + install::Rls, + install::Rustfmt, + install::Analysis, + install::Src, + install::Rustc + ), } } @@ -353,7 +480,7 @@ impl<'a> Builder<'a> { let builder = Builder { build, - top_stage: build.config.stage.unwrap_or(2), + top_stage: build.config.stage, kind, cache: Cache::new(), stack: RefCell::new(Vec::new()), @@ -373,44 +500,9 @@ impl<'a> Builder<'a> { Some(help) } - pub fn run(build: &Build) { - let (kind, paths) = match build.config.cmd { - Subcommand::Build { ref paths } => (Kind::Build, &paths[..]), - Subcommand::Check { ref paths } => (Kind::Check, &paths[..]), - Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]), - Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]), - Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]), - Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]), - Subcommand::Install { ref paths } => (Kind::Install, &paths[..]), - Subcommand::Clean { .. } => panic!(), - }; - - if let Some(path) = paths.get(0) { - if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") { - return; - } - } - - let builder = Builder { - build, - top_stage: build.config.stage.unwrap_or(2), - kind, - cache: Cache::new(), - stack: RefCell::new(Vec::new()), - }; - - if kind == Kind::Dist { - assert!(!build.config.test_miri, "Do not distribute with miri enabled.\n\ - The distributed libraries would include all MIR (increasing binary size). - The distributed MIR would include validation statements."); - } - - StepDescription::run(&Builder::get_step_descriptions(builder.kind), &builder, paths); - } - pub fn default_doc(&self, paths: Option<&[PathBuf]>) { let paths = paths.unwrap_or(&[]); - StepDescription::run(&Builder::get_step_descriptions(Kind::Doc), self, paths); + self.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), paths); } /// Obtain a compiler at a given stage and for a given host. Explicitly does @@ -418,7 +510,9 @@ impl<'a> Builder<'a> { /// obtained through this function, since it ensures that they are valid /// (i.e., built and assembled). pub fn compiler(&self, stage: u32, host: Interned) -> Compiler { - self.ensure(compile::Assemble { target_compiler: Compiler { stage, host } }) + self.ensure(compile::Assemble { + target_compiler: Compiler { stage, host }, + }) } pub fn sysroot(&self, compiler: Compiler) -> Interned { @@ -428,7 +522,9 @@ impl<'a> Builder<'a> { /// Returns the libdir where the standard library and other artifacts are /// found for a compiler's sysroot. pub fn sysroot_libdir( - &self, compiler: Compiler, target: Interned + &self, + compiler: Compiler, + target: Interned, ) -> Interned { #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] struct Libdir { @@ -445,16 +541,20 @@ impl<'a> Builder<'a> { fn run(self, builder: &Builder) -> Interned { let compiler = self.compiler; let config = &builder.build.config; - let lib = if compiler.stage >= 1 && config.libdir_relative().is_some() { - builder.build.config.libdir_relative().unwrap() + let lib = if compiler.stage >= 1 { + config.libdir_relative() } else { Path::new("lib") }; - let sysroot = builder.sysroot(self.compiler).join(lib) - .join("rustlib").join(self.target).join("lib"); + let sysroot = builder + .sysroot(self.compiler) + .join(lib) + .join("rustlib") + .join(self.target) + .join("lib"); let _ = fs::remove_dir_all(&sysroot); t!(fs::create_dir_all(&sysroot)); - INTERNER.intern_path(sysroot) + sysroot.intern() } } self.ensure(Libdir { compiler, target }) @@ -472,7 +572,7 @@ impl<'a> Builder<'a> { /// Windows. pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf { if compiler.is_snapshot(self) { - self.build.rustc_snapshot_libdir() + self.rustc_snapshot_libdir() } else { self.sysroot(compiler).join(libdir(&compiler.host)) } @@ -485,7 +585,7 @@ impl<'a> Builder<'a> { // compiler live next to the compiler and the system will find them // automatically. if cfg!(windows) { - return + return; } add_lib_path(vec![self.rustc_libdir(compiler)], cmd); @@ -494,9 +594,11 @@ impl<'a> Builder<'a> { /// Get a path to the compiler specified. pub fn rustc(&self, compiler: Compiler) -> PathBuf { if compiler.is_snapshot(self) { - self.initial_rustc.clone() + self.config.general.initial_rustc.clone() } else { - self.sysroot(compiler).join("bin").join(exe("rustc", &compiler.host)) + self.sysroot(compiler) + .join("bin") + .join(exe("rustc", &compiler.host)) } } @@ -505,21 +607,180 @@ impl<'a> Builder<'a> { } pub fn rustdoc_cmd(&self, host: Interned) -> Command { - let mut cmd = Command::new(&self.out.join("bootstrap/debug/rustdoc")); + let mut cmd = Command::new(&self.config.general.out.join("bootstrap/debug/rustdoc")); let compiler = self.compiler(self.top_stage, host); cmd.env("RUSTC_STAGE", compiler.stage.to_string()) - .env("RUSTC_SYSROOT", self.sysroot(compiler)) - .env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.build.build)) - .env("CFG_RELEASE_CHANNEL", &self.build.config.channel) - .env("RUSTDOC_REAL", self.rustdoc(host)) - .env("RUSTDOC_CRATE_VERSION", self.build.rust_version()) - .env("RUSTC_BOOTSTRAP", "1"); - if let Some(linker) = self.build.linker(host) { + .env("RUSTC_SYSROOT", self.sysroot(compiler)) + .env( + "RUSTDOC_LIBDIR", + self.sysroot_libdir(compiler, self.config.general.build), + ) + .env("CFG_RELEASE_CHANNEL", &self.config.rust.channel) + .env("RUSTDOC_REAL", self.rustdoc(host)) + .env("RUSTDOC_CRATE_VERSION", self.rust_version()) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(linker) = self.linker(host) { cmd.env("RUSTC_TARGET_LINKER", linker); } cmd } + /// Cargo's output path for the standard library in a given stage, compiled + /// by a particular compiler for the specified target. + pub fn libstd_stamp(&self, compiler: Compiler, target: Interned) -> PathBuf { + self + .cargo_out(compiler, Mode::Libstd, target) + .join(".libstd.stamp") + } + + /// Cargo's output path for libtest in a given stage, compiled by a particular + /// compiler for the specified target. + pub fn libtest_stamp(&self, compiler: Compiler, target: Interned) -> PathBuf { + self + .cargo_out(compiler, Mode::Libtest, target) + .join(".libtest.stamp") + } + + /// Cargo's output path for librustc in a given stage, compiled by a particular + /// compiler for the specified target. + pub fn librustc_stamp(&self, compiler: Compiler, target: Interned) -> PathBuf { + self + .cargo_out(compiler, Mode::Librustc, target) + .join(".librustc.stamp") + } + + pub fn codegen_backend_stamp( + &self, + compiler: Compiler, + target: Interned, + backend: &str, + ) -> PathBuf { + self + .cargo_out(compiler, Mode::Librustc, target) + .join(format!(".librustc_trans-{}.stamp", backend)) + } + + /// Configure cargo to compile the standard library, adding appropriate env vars + /// and such. + fn std_cargo( + &self, + compiler: Compiler, + target: Interned, + cargo: &mut Command, + ) { + let mut features = self.std_features(); + + if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { + cargo.env("MACOSX_DEPLOYMENT_TARGET", target); + } + + // When doing a local rebuild we tell cargo that we're stage1 rather than + // stage0. This works fine if the local rust and being-built rust have the + // same view of what the default allocator is, but fails otherwise. Since + // we don't have a way to express an allocator preference yet, work + // around the issue in the case of a local rebuild with jemalloc disabled. + if compiler.stage == 0 && self.config.general.local_rebuild + && !self.config.rust.use_jemalloc + { + features.push_str(" force_alloc_system"); + } + + if compiler.stage != 0 && self.config.general.sanitizers { + // This variable is used by the sanitizer runtime crates, e.g. + // rustc_lsan, to build the sanitizer runtime from C code + // When this variable is missing, those crates won't compile the C code, + // so we don't set this variable during stage0 where llvm-config is + // missing + // We also only build the runtimes when --enable-sanitizers (or its + // config.toml equivalent) is used + cargo.env("LLVM_CONFIG", self.llvm_config(target)); + } + + cargo + .arg("--features") + .arg(features) + .arg("--manifest-path") + .arg(self.config.src.join("src/libstd/Cargo.toml")); + + if let Some(target) = self.config.target_config.get(&target) { + if let Some(ref jemalloc) = target.jemalloc { + cargo.env("JEMALLOC_OVERRIDE", jemalloc); + } + } + if target.contains("musl") { + if let Some(p) = self.musl_root(target) { + cargo.env("MUSL_ROOT", p); + } + } + } + + /// Same as `std_cargo`, but for libtest + fn test_cargo(&self, cargo: &mut Command) { + if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { + cargo.env("MACOSX_DEPLOYMENT_TARGET", target); + } + cargo + .arg("--manifest-path") + .arg(self.config.src.join("src/libtest/Cargo.toml")); + } + + // A little different from {std,test}_cargo since we don't pass + // --manifest-path or --features, since those vary between the + // codegen backend building and normal rustc library building. + fn rustc_cargo(&self, cargo: &mut Command) { + // Set some configuration variables picked up by build scripts and + // the compiler alike + cargo + .env("CFG_RELEASE", self.rust_release()) + .env("CFG_RELEASE_CHANNEL", &self.config.rust.channel) + .env("CFG_VERSION", self.rust_version()) + .env("CFG_PREFIX", &self.config.install.prefix); + + let libdir_relative = self.config.libdir_relative(); + cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative); + + // If we're not building a compiler with debugging information then remove + // these two env vars which would be set otherwise. + if self.config.rust.debuginfo_only_std() { + cargo.env_remove("RUSTC_DEBUGINFO"); + cargo.env_remove("RUSTC_DEBUGINFO_LINES"); + } + + if let Some(ref ver_date) = self.rust_info.commit_date() { + cargo.env("CFG_VER_DATE", ver_date); + } + if let Some(ref ver_hash) = self.rust_info.sha() { + cargo.env("CFG_VER_HASH", ver_hash); + } + if !self.unstable_features() { + cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1"); + } + if let Some(ref s) = self.config.rust.default_linker { + cargo.env("CFG_DEFAULT_LINKER", s); + } + if self.config.rust.experimental_parallel_queries { + cargo.env("RUSTC_PARALLEL_QUERIES", "1"); + } + } + + fn tool_cargo(&self, target: Interned, cargo: &mut Command) { + // We don't want to build tools dynamically as they'll be running across + // stages and such and it's just easier if they're not dynamically linked. + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + + if let Some(dir) = self.openssl_install_dir(target) { + cargo.env("OPENSSL_STATIC", "1"); + cargo.env("OPENSSL_DIR", dir); + cargo.env("LIBZ_SYS_STATIC", "1"); + } + + // if tools are using lzma we want to force the build script to build its + // own copy + cargo.env("LZMA_API_STATIC", "1"); + cargo.env("CFG_RELEASE_CHANNEL", &self.config.rust.channel); + cargo.env("CFG_VERSION", self.rust_version()); + } + /// Prepares an invocation of `cargo` to be run. /// /// This will create a `Command` that represents a pending execution of @@ -527,30 +788,79 @@ impl<'a> Builder<'a> { /// rustc compiler, its output will be scoped by `mode`'s output directory, /// it will pass the `--target` flag for the specified `target`, and will be /// executing the Cargo command `cmd`. - pub fn cargo(&self, - compiler: Compiler, - mode: Mode, - target: Interned, - cmd: &str) -> Command { - let mut cargo = Command::new(&self.initial_cargo); + pub fn cargo( + &self, + compiler: Compiler, + mode: Mode, + target: Interned, + cmd: &'static str, + ) -> CargoCommand { + let mut cargo = Command::new(&self.config.general.initial_cargo); + cargo.arg(cmd); let out_dir = self.stage_out(compiler, mode); - cargo.env("CARGO_TARGET_DIR", out_dir) - .arg(cmd) - .arg("--target") - .arg(target); + self.clear_if_dirty(&out_dir, &self.rustc(compiler)); + + match mode { + Mode::Libstd => { + self.std_cargo(compiler, target, &mut cargo); + }, + Mode::Libtest => { + self.test_cargo(&mut cargo); + self.clear_if_dirty(&out_dir, &self.libstd_stamp(compiler, target)); + } + Mode::Librustc => { + self.rustc_cargo(&mut cargo); + cargo + .arg("--features") + .arg(self.rustc_features()) + .arg("--manifest-path") + .arg(self.config.src.join("src/rustc/Cargo.toml")); + self.clear_if_dirty(&out_dir, &self.libstd_stamp(compiler, target)); + self.clear_if_dirty(&out_dir, &self.libtest_stamp(compiler, target)); + } + Mode::CodegenBackend(backend) => { + self.rustc_cargo(&mut cargo); + let mut features = self.rustc_features().to_string(); + cargo + .arg("--manifest-path") + .arg(self.config.src.join("src/librustc_trans/Cargo.toml")); + if backend == "emscripten" { + features.push_str(" emscripten"); + } + cargo.arg("--features").arg(features); + self.clear_if_dirty(&out_dir, &self.libstd_stamp(compiler, target)); + self.clear_if_dirty(&out_dir, &self.libtest_stamp(compiler, target)); + } + Mode::TestTool => { + self.tool_cargo(target, &mut cargo); + self.clear_if_dirty(&out_dir, &self.libstd_stamp(compiler, target)); + self.clear_if_dirty(&out_dir, &self.libtest_stamp(compiler, target)); + } + Mode::RustcTool => { + self.tool_cargo(target, &mut cargo); + self.clear_if_dirty(&out_dir, &self.libstd_stamp(compiler, target)); + self.clear_if_dirty(&out_dir, &self.libtest_stamp(compiler, target)); + self.clear_if_dirty(&out_dir, &self.librustc_stamp(compiler, target)); + } + } + + cargo + .env("CARGO_TARGET_DIR", out_dir) + .arg("--target") + .arg(target); // If we were invoked from `make` then that's already got a jobserver // set up for us so no need to tell Cargo about jobs all over again. if env::var_os("MAKEFLAGS").is_none() && env::var_os("MFLAGS").is_none() { - cargo.arg("-j").arg(self.jobs().to_string()); + cargo.arg("-j").arg(self.jobs().to_string()); } // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005 // Force cargo to output binaries with disambiguating hashes in the name - cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.channel); + cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.rust.channel); let stage; - if compiler.stage == 0 && self.local_rebuild { + if compiler.stage == 0 && self.config.general.local_rebuild { // Assume the local-rebuild rustc already has stage1 features. stage = 1; } else { @@ -565,8 +875,14 @@ impl<'a> Builder<'a> { } if !extra_args.is_empty() { - cargo.env("RUSTFLAGS", - format!("{} {}", env::var("RUSTFLAGS").unwrap_or_default(), extra_args)); + cargo.env( + "RUSTFLAGS", + format!( + "{} {}", + env::var("RUSTFLAGS").unwrap_or_default(), + extra_args + ), + ); } // Customize the compiler we're running. Specify the compiler to cargo @@ -575,43 +891,61 @@ impl<'a> Builder<'a> { // // These variables are primarily all read by // src/bootstrap/bin/{rustc.rs,rustdoc.rs} - cargo.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) - .env("RUSTC", self.out.join("bootstrap/debug/rustc")) - .env("RUSTC_REAL", self.rustc(compiler)) - .env("RUSTC_STAGE", stage.to_string()) - .env("RUSTC_DEBUG_ASSERTIONS", - self.config.rust_debug_assertions.to_string()) - .env("RUSTC_SYSROOT", self.sysroot(compiler)) - .env("RUSTC_LIBDIR", self.rustc_libdir(compiler)) - .env("RUSTC_RPATH", self.config.rust_rpath.to_string()) - .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc")) - .env("RUSTDOC_REAL", if cmd == "doc" || cmd == "test" { - self.rustdoc(compiler.host) - } else { - PathBuf::from("/path/to/nowhere/rustdoc/not/required") - }) - .env("TEST_MIRI", self.config.test_miri.to_string()) - .env("RUSTC_ERROR_METADATA_DST", self.extended_error_dir()); - - if let Some(host_linker) = self.build.linker(compiler.host) { + cargo + .env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) + .env( + "RUSTC", + self.config.general.out.join("bootstrap/debug/rustc"), + ) + .env("RUSTC_REAL", self.rustc(compiler)) + .env("RUSTC_STAGE", stage.to_string()) + .env( + "RUSTC_DEBUG_ASSERTIONS", + self.config.rust.debug_assertions().to_string(), + ) + .env("RUSTC_SYSROOT", self.sysroot(compiler)) + .env("RUSTC_LIBDIR", self.rustc_libdir(compiler)) + .env("RUSTC_RPATH", self.config.rust.rpath.to_string()) + .env( + "RUSTDOC", + self.config.general.out.join("bootstrap/debug/rustdoc"), + ) + .env( + "RUSTDOC_REAL", + if cmd == "doc" || cmd == "test" { + self.rustdoc(compiler.host) + } else { + PathBuf::from("/path/to/nowhere/rustdoc/not/required") + }, + ) + .env("TEST_MIRI", self.config.rust.test_miri.to_string()) + .env("RUSTC_ERROR_METADATA_DST", self.extended_error_dir()); + + if let Some(host_linker) = self.linker(compiler.host) { cargo.env("RUSTC_HOST_LINKER", host_linker); } - if let Some(target_linker) = self.build.linker(target) { + if let Some(target_linker) = self.linker(target) { cargo.env("RUSTC_TARGET_LINKER", target_linker); } if let Some(ref error_format) = self.config.rustc_error_format { cargo.env("RUSTC_ERROR_FORMAT", error_format); } if cmd != "build" && cmd != "check" { - cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build))); + cargo.env( + "RUSTDOC_LIBDIR", + self.rustc_libdir(self.compiler(2, self.config.general.build)), + ); } - if mode != Mode::Tool { + if mode != Mode::TestTool && mode != Mode::RustcTool { // Tools don't get debuginfo right now, e.g. cargo and rls don't // get compiled with debuginfo. // Adding debuginfo increases their sizes by a factor of 3-4. - cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string()); - cargo.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string()); + cargo.env("RUSTC_DEBUGINFO", self.config.rust.debuginfo().to_string()); + cargo.env( + "RUSTC_DEBUGINFO_LINES", + self.config.rust.debuginfo_lines().to_string(), + ); cargo.env("RUSTC_FORCE_UNSTABLE", "1"); // Currently the compiler depends on crates from crates.io, and @@ -657,12 +991,18 @@ impl<'a> Builder<'a> { // // If LLVM support is disabled we need to use the snapshot compiler to compile // build scripts, as the new compiler doesn't support executables. - if mode == Mode::Libstd || !self.build.config.llvm_enabled { - cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc) - .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir()); + if mode == Mode::Libstd || !self.config.llvm.enabled { + cargo + .env("RUSTC_SNAPSHOT", &self.config.general.initial_rustc) + .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir()); } else { - cargo.env("RUSTC_SNAPSHOT", self.rustc(compiler)) - .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler)); + self.ensure(compile::Std { + compiler, + target: compiler.host, + }); + cargo + .env("RUSTC_SNAPSHOT", self.rustc(compiler)) + .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler)); } // Ignore incremental modes except for stage0, since we're @@ -676,7 +1016,7 @@ impl<'a> Builder<'a> { cargo.env("RUSTC_ON_FAIL", on_fail); } - cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity)); + cargo.env("RUSTC_VERBOSE", format!("{}", self.config.general.verbose)); // Throughout the build Cargo can execute a number of build scripts // compiling C/C++ code and we need to pass compilers, archivers, flags, etc @@ -685,37 +1025,39 @@ impl<'a> Builder<'a> { // the options through environment variables that are fetched and understood by both. // // FIXME: the guard against msvc shouldn't need to be here - if !target.contains("msvc") { + if !target.contains("msvc") && !cfg!(test) { let cc = self.cc(target); - cargo.env(format!("CC_{}", target), cc) - .env("CC", cc); + cargo.env(format!("CC_{}", target), cc).env("CC", cc); let cflags = self.cflags(target).join(" "); - cargo.env(format!("CFLAGS_{}", target), cflags.clone()) - .env("CFLAGS", cflags.clone()); + cargo + .env(format!("CFLAGS_{}", target), cflags.clone()) + .env("CFLAGS", cflags.clone()); if let Some(ar) = self.ar(target) { let ranlib = format!("{} s", ar.display()); - cargo.env(format!("AR_{}", target), ar) - .env("AR", ar) - .env(format!("RANLIB_{}", target), ranlib.clone()) - .env("RANLIB", ranlib); + cargo + .env(format!("AR_{}", target), ar) + .env("AR", ar) + .env(format!("RANLIB_{}", target), ranlib.clone()) + .env("RANLIB", ranlib); } if let Ok(cxx) = self.cxx(target) { - cargo.env(format!("CXX_{}", target), cxx) - .env("CXX", cxx) - .env(format!("CXXFLAGS_{}", target), cflags.clone()) - .env("CXXFLAGS", cflags); + cargo + .env(format!("CXX_{}", target), cxx) + .env("CXX", cxx) + .env(format!("CXXFLAGS_{}", target), cflags.clone()) + .env("CXXFLAGS", cflags); } } - if mode == Mode::Libstd && self.config.extended && compiler.is_final_stage(self) { + if mode == Mode::Libstd && self.config.general.extended && compiler.is_final_stage(self) { cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string()); } // For `cargo doc` invocations, make rustdoc print the Rust version into the docs - cargo.env("RUSTDOC_CRATE_VERSION", self.build.rust_version()); + cargo.env("RUSTDOC_CRATE_VERSION", self.rust_version()); // Environment variables *required* throughout the build // @@ -723,7 +1065,7 @@ impl<'a> Builder<'a> { cargo.env("CFG_COMPILER_HOST_TRIPLE", target); // Set this for all builds to make sure doc builds also get it. - cargo.env("CFG_RELEASE_CHANNEL", &self.build.config.channel); + cargo.env("CFG_RELEASE_CHANNEL", &self.config.rust.channel); // This one's a bit tricky. As of the time of this writing the compiler // links to the `winapi` crate on crates.io. This crate provides raw @@ -764,21 +1106,21 @@ impl<'a> Builder<'a> { // This must be kept before the thinlto check, as we set codegen units // to 1 forcibly there. - if let Some(n) = self.config.rust_codegen_units { + if let Some(n) = self.config.rust.codegen_units() { cargo.env("RUSTC_CODEGEN_UNITS", n.to_string()); } - if self.config.rust_optimize { + if self.config.rust.optimize() { // FIXME: cargo bench does not accept `--release` if cmd != "bench" { cargo.arg("--release"); } - if self.config.rust_codegen_units.is_none() && - self.build.is_rust_llvm(compiler.host) && - self.config.rust_thinlto { + if self.config.rust.codegen_units().is_none() && self.is_rust_llvm(compiler.host) + && self.config.rust.thinlto + { cargo.env("RUSTC_THINLTO", "1"); - } else if self.config.rust_codegen_units.is_none() { + } else if self.config.rust.codegen_units().is_none() { // Generally, if ThinLTO has been disabled for some reason, we // want to set the codegen units to 1. However, we shouldn't do // this if the option was specifically set by the user. @@ -786,16 +1128,23 @@ impl<'a> Builder<'a> { } } - if self.config.locked_deps { + if self.config.general.locked_deps { cargo.arg("--locked"); } - if self.config.vendor || self.is_sudo { + if self.config.general.vendor || self.config.is_sudo { cargo.arg("--frozen"); } self.ci_env.force_coloring_in_ci(&mut cargo); - cargo + CargoCommand { + cargo, + mode, + target, + cmd, + compiler, + builder: self, + } } /// Ensure that a given step is built, returning it's output. This will @@ -806,7 +1155,10 @@ impl<'a> Builder<'a> { let mut stack = self.stack.borrow_mut(); for stack_step in stack.iter() { // should skip - if stack_step.downcast_ref::().map_or(true, |stack_step| *stack_step != step) { + if stack_step + .downcast_ref::() + .map_or(true, |stack_step| *stack_step != step) + { continue; } let mut out = String::new(); @@ -817,21 +1169,862 @@ impl<'a> Builder<'a> { panic!(out); } if let Some(out) = self.cache.get(&step) { - self.build.verbose(&format!("{}c {:?}", " ".repeat(stack.len()), step)); + self.verbose(&format!("{}c {:?}", " ".repeat(stack.len()), step)); return out; } - self.build.verbose(&format!("{}> {:?}", " ".repeat(stack.len()), step)); + self.verbose(&format!("{}> {:?}", " ".repeat(stack.len()), step)); stack.push(Box::new(step.clone())); } + #[cfg(test)] + let out = step.clone().for_test(self); + #[cfg(not(test))] let out = step.clone().run(self); { let mut stack = self.stack.borrow_mut(); let cur_step = stack.pop().expect("step stack empty"); assert_eq!(cur_step.downcast_ref(), Some(&step)); } - self.build.verbose(&format!("{}< {:?}", " ".repeat(self.stack.borrow().len()), step)); + self.verbose(&format!( + "{}< {:?}", + " ".repeat(self.stack.borrow().len()), + step + )); self.cache.put(step, out.clone()); out } } + +pub struct CargoCommand<'a> { + builder: &'a Builder<'a>, + cargo: Command, + mode: Mode, + compiler: Compiler, + target: Interned, + cmd: &'static str, +} + +impl<'a> Deref for CargoCommand<'a> { + type Target = Command; + fn deref(&self) -> &Command { + &self.cargo + } +} + +impl<'a> DerefMut for CargoCommand<'a> { + fn deref_mut(&mut self) -> &mut Command { + &mut self.cargo + } +} + +impl<'a> CargoCommand<'a> { + pub fn run(&mut self) { + if cfg!(test) { return; } + let stamp = match self.mode { + Mode::Libstd => self.builder.libstd_stamp(self.compiler, self.target), + Mode::Libtest => self.builder.libtest_stamp(self.compiler, self.target), + Mode::Librustc => self.builder.librustc_stamp(self.compiler, self.target), + Mode::CodegenBackend(backend) => { + self.builder.codegen_backend_stamp(self.compiler, self.target, &*backend) + } + Mode::TestTool | Mode::RustcTool => { + panic!("did not expect to execute with tools"); + } + }; + + let is_check = self.cmd == "check"; + + // Instruct Cargo to give us json messages on stdout, critically leaving + // stderr as piped so we can get those pretty colors. + self.cargo + .arg("--message-format") + .arg("json") + .stdout(Stdio::piped()); + + if stderr_isatty() && self.builder.ci_env == CiEnv::None { + // since we pass message-format=json to cargo, we need to tell the rustc + // wrapper to give us colored output if necessary. This is because we + // only want Cargo's JSON output, not rustcs. + self.cargo.env("RUSTC_COLOR", "1"); + } + + self.builder.verbose(&format!("running: {:?}", self.cargo)); + let mut child = match self.cargo.spawn() { + Ok(child) => child, + Err(e) => panic!("failed to execute command: {:?}\nerror: {}", self.cargo, e), + }; + + // `target_root_dir` looks like $dir/$target/release + let target_root_dir = stamp.parent().unwrap(); + // `target_deps_dir` looks like $dir/$target/release/deps + let target_deps_dir = target_root_dir.join("deps"); + // `host_root_dir` looks like $dir/release + let host_root_dir = target_root_dir.parent().unwrap() // chop off `release` + .parent().unwrap() // chop off `$target` + .join(target_root_dir.file_name().unwrap()); + + // Spawn Cargo slurping up its JSON output. We'll start building up the + // `deps` array of all files it generated along with a `toplevel` array of + // files we need to probe for later. + let mut deps = Vec::new(); + let mut toplevel = Vec::new(); + let stdout = BufReader::new(child.stdout.take().unwrap()); + for line in stdout.lines() { + let line = t!(line); + let json: serde_json::Value = if line.starts_with("{") { + t!(serde_json::from_str(&line)) + } else { + // If this was informational, just print it out and continue + println!("{}", line); + continue; + }; + if json["reason"].as_str() != Some("compiler-artifact") { + continue; + } + for filename in json["filenames"].as_array().unwrap() { + let filename = filename.as_str().unwrap(); + // Skip files like executables + if !filename.ends_with(".rlib") && !filename.ends_with(".lib") + && !is_dylib(&filename) && !(is_check && filename.ends_with(".rmeta")) + { + continue; + } + + let filename = Path::new(filename); + + // If this was an output file in the "host dir" we don't actually + // worry about it, it's not relevant for us. + if filename.starts_with(&host_root_dir) { + continue; + } + + // If this was output in the `deps` dir then this is a precise file + // name (hash included) so we start tracking it. + if filename.starts_with(&target_deps_dir) { + deps.push(filename.to_path_buf()); + continue; + } + + // Otherwise this was a "top level artifact" which right now doesn't + // have a hash in the name, but there's a version of this file in + // the `deps` folder which *does* have a hash in the name. That's + // the one we'll want to we'll probe for it later. + // + // We do not use `Path::file_stem` or `Path::extension` here, + // because some generated files may have multiple extensions e.g. + // `std-.dll.lib` on Windows. The aforementioned methods only + // split the file name by the last extension (`.lib`) while we need + // to split by all extensions (`.dll.lib`). + let expected_len = t!(filename.metadata()).len(); + let filename = filename.file_name().unwrap().to_str().unwrap(); + let mut parts = filename.splitn(2, '.'); + let file_stem = parts.next().unwrap().to_owned(); + let extension = parts.next().unwrap().to_owned(); + + toplevel.push((file_stem, extension, expected_len)); + } + } + + // Make sure Cargo actually succeeded after we read all of its stdout. + let status = t!(child.wait()); + if !status.success() { + panic!( + "command did not execute successfully: {:?}\n\ + expected success, got: {}", + self.cargo, status + ); + } + + // Ok now we need to actually find all the files listed in `toplevel`. We've + // got a list of prefix/extensions and we basically just need to find the + // most recent file in the `deps` folder corresponding to each one. + let contents = t!(target_deps_dir.read_dir()) + .map(|e| t!(e)) + .map(|e| { + ( + e.path(), + e.file_name().into_string().unwrap(), + t!(e.metadata()), + ) + }) + .collect::>(); + for (prefix, extension, expected_len) in toplevel { + let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| { + filename.starts_with(&prefix[..]) && filename[prefix.len()..].starts_with("-") + && filename.ends_with(&extension[..]) && meta.len() == expected_len + }); + let max = candidates + .max_by_key(|&&(_, _, ref metadata)| { + FileTime::from_last_modification_time(metadata) + }); + let path_to_add = match max { + Some(triple) => triple.0.to_str().unwrap(), + None => panic!("no output generated for {:?} {:?}", prefix, extension), + }; + if is_dylib(path_to_add) { + let candidate = format!("{}.lib", path_to_add); + let candidate = PathBuf::from(candidate); + if candidate.exists() { + deps.push(candidate); + } + } + deps.push(path_to_add.into()); + } + + // Now we want to update the contents of the stamp file, if necessary. First + // we read off the previous contents along with its mtime. If our new + // contents (the list of files to copy) is different or if any dep's mtime + // is newer then we rewrite the stamp file. + deps.sort(); + let stamp_contents = fs::read(&stamp).unwrap_or_default(); + let stamp_mtime = mtime(&stamp); + let mut new_contents = Vec::new(); + let mut max = None; + let mut max_path = None; + for dep in deps.iter() { + let mtime = mtime(dep); + if Some(mtime) > max { + max = Some(mtime); + max_path = Some(dep.clone()); + } + new_contents.extend(dep.to_str().unwrap().as_bytes()); + new_contents.extend(b"\0"); + } + let max = max.unwrap(); + let max_path = max_path.unwrap(); + if stamp_contents == new_contents && max <= stamp_mtime { + self.builder.verbose(&format!( + "not updating {:?}; contents equal and {} <= {}", + stamp, max, stamp_mtime + )); + return; + } + if max > stamp_mtime { + self.builder.verbose(&format!("updating {:?} as {:?} changed", stamp, max_path)); + } else { + self.builder.verbose(&format!("updating {:?} as deps changed", stamp)); + } + t!(fs::write(&stamp, &new_contents)); + } +} + +// Avoiding a dependency on winapi to keep compile times down +#[cfg(unix)] +fn stderr_isatty() -> bool { + use libc; + unsafe { libc::isatty(libc::STDERR_FILENO) != 0 } +} +#[cfg(windows)] +fn stderr_isatty() -> bool { + type DWORD = u32; + type BOOL = i32; + type HANDLE = *mut u8; + const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD; + extern "system" { + fn GetStdHandle(which: DWORD) -> HANDLE; + fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: *mut DWORD) -> BOOL; + } + unsafe { + let handle = GetStdHandle(STD_ERROR_HANDLE); + let mut out = 0; + GetConsoleMode(handle, &mut out) != 0 + } +} + +#[cfg(test)] +#[allow(warnings)] +mod __test { + use config::Config; + use super::*; + + fn sorted(mut v: Vec) -> Vec { + v.sort(); + v + } + + fn configure(host: &[&str], target: &[&str]) -> Config { + let mut config = Config::for_test(); + config.run_host_only = true; + config.general.build = "A".intern(); + config.general.host = vec![config.general.build].clone().into_iter() + .chain(host.iter().map(|s| s.intern())).collect::>(); + config.general.target = config.general.host.clone().into_iter() + .chain(target.iter().map(|s| s.intern())).collect::>(); + config + } + + fn first(v: Vec<(A, B)>) -> Vec { + v.into_iter().map(|(a, _)| a).collect::>() + } + + #[test] + fn dist_baseline() { + let build = Build::new(configure(&[], &[])); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]); + + let a = "A".intern(); + + assert_eq!(first(builder.cache.all::()), &[ + dist::Docs { stage: 2, host: a }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Mingw { host: a }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + ]); + let ds = first(builder.cache.all::()); + assert_eq!(ds.len(), 1); + assert_eq!(ds[0].host, a); + assert_eq!(first(builder.cache.all::()), &[ + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[dist::Src]); + assert_eq!(first(builder.cache.all::()), + &[dist::PlainSourceTarball]); + } + + #[test] + fn dist_baseline_extended() { + let mut config = configure(&[], &[]); + config.general.extended = true; + let build = Build::new(config); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]); + + let a = "A".intern(); + + assert_eq!(first(builder.cache.all::()), &[ + dist::Docs { stage: 2, host: a }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Mingw { host: a }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + ]); + let ds = first(builder.cache.all::()); + assert_eq!(ds.len(), 1); + assert_eq!(ds[0].host, a); + assert_eq!(first(builder.cache.all::()), &[ + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Analysis { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Cargo { stage: 2, target: a }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Rls { stage: 2, target: a }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Rustfmt { stage: 2, target: a }, + ]); + assert_eq!(first(builder.cache.all::()), &[dist::Src]); + assert_eq!(first(builder.cache.all::()), + &[dist::PlainSourceTarball]); + } + + #[test] + fn dist_with_targets() { + let build = Build::new(configure(&[], &["B"])); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]); + + let a = "A".intern(); + let b = "B".intern(); + + assert_eq!(first(builder.cache.all::()), &[ + dist::Docs { stage: 2, host: a }, + dist::Docs { stage: 2, host: b }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Mingw { host: a }, + dist::Mingw { host: b }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[dist::Src]); + + + assert_eq!(first(builder.cache.all::()), &[ + compile::Rustc { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Rustc { // FIXME: this is not needed + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Rustc { // FIXME: this is not needed + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + ]); + + assert_eq!(first(builder.cache.all::()), &[ + compile::Test { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + ]); + } + + #[test] + fn dist_with_hosts() { + let build = Build::new(configure(&["B"], &[])); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]); + + let a = "A".intern(); + let b = "B".intern(); + + assert_eq!(first(builder.cache.all::()), &[ + dist::Docs { stage: 2, host: a }, + dist::Docs { stage: 2, host: b }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Mingw { host: a }, + dist::Mingw { host: b }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[dist::Src]); + } + + #[test] + fn dist_with_targets_and_hosts() { + let build = Build::new(configure(&["B"], &["C"])); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]); + + let a = "A".intern(); + let b = "B".intern(); + let c = "C".intern(); + + assert_eq!(first(builder.cache.all::()), &[ + dist::Docs { stage: 2, host: a }, + dist::Docs { stage: 2, host: b }, + dist::Docs { stage: 2, host: c }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Mingw { host: a }, + dist::Mingw { host: b }, + dist::Mingw { host: c }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: c, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[dist::Src]); + } + + #[test] + fn dist_with_target_flag() { + let mut config = configure(&["B"], &["C"]); + config.run_host_only = false; // as-if --target=C was passed + let build = Build::new(config); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]); + + let a = "A".intern(); + let b = "B".intern(); + let c = "C".intern(); + + assert_eq!(first(builder.cache.all::()), &[ + dist::Docs { stage: 2, host: a }, + dist::Docs { stage: 2, host: b }, + dist::Docs { stage: 2, host: c }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Mingw { host: a }, + dist::Mingw { host: b }, + dist::Mingw { host: c }, + ]); + assert_eq!(first(builder.cache.all::()), &[]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: c, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[]); + + assert_eq!(first(builder.cache.all::()), &[ + compile::Rustc { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 1 }, + target: b, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + ]); + + assert_eq!(first(builder.cache.all::()), &[ + compile::Test { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: c, + }, + ]); + } + + #[test] + fn dist_with_same_targets_and_hosts() { + let build = Build::new(configure(&["B"], &["B"])); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]); + + let a = "A".intern(); + let b = "B".intern(); + + assert_eq!(first(builder.cache.all::()), &[ + dist::Docs { stage: 2, host: a }, + dist::Docs { stage: 2, host: b }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Mingw { host: a }, + dist::Mingw { host: b }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + dist::Std { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[dist::Src]); + assert_eq!(first(builder.cache.all::()), &[ + compile::Std { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Std { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Std { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + compile::Std { + compiler: Compiler { host: a, stage: 1 }, + target: b, + }, + compile::Std { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + compile::Test { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + compile::Assemble { + target_compiler: Compiler { host: a, stage: 0 }, + }, + compile::Assemble { + target_compiler: Compiler { host: a, stage: 1 }, + }, + compile::Assemble { + target_compiler: Compiler { host: a, stage: 2 }, + }, + compile::Assemble { + target_compiler: Compiler { host: b, stage: 2 }, + }, + ]); + } + + #[test] + fn build_default() { + let build = Build::new(configure(&["B"], &["C"])); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]); + + let a = "A".intern(); + let b = "B".intern(); + let c = "C".intern(); + + assert!(!builder.cache.all::().is_empty()); + assert!(!builder.cache.all::().is_empty()); + assert_eq!(first(builder.cache.all::()), &[ + compile::Rustc { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 1 }, + target: b, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + compile::Rustc { + compiler: Compiler { host: b, stage: 2 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: b, stage: 2 }, + target: b, + }, + ]); + + assert_eq!(first(builder.cache.all::()), &[ + compile::Test { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: b, stage: 2 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: b, stage: 2 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: c, + }, + compile::Test { + compiler: Compiler { host: b, stage: 2 }, + target: c, + }, + ]); + } + + #[test] + fn build_with_target_flag() { + // --build=A --host=B --target=C: + // ./x.py build --target=C + let mut config = configure(&["B"], &["C"]); + config.run_host_only = false; + let build = Build::new(config); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]); + + let a = "A".intern(); + let b = "B".intern(); + let c = "C".intern(); + + assert!(!builder.cache.all::().is_empty()); + assert_eq!(first(builder.cache.all::()), &[ + compile::Assemble { + target_compiler: Compiler { host: a, stage: 0 }, + }, + compile::Assemble { + target_compiler: Compiler { host: a, stage: 1 }, + }, + compile::Assemble { + target_compiler: Compiler { host: a, stage: 2 }, + }, + compile::Assemble { + target_compiler: Compiler { host: b, stage: 2 }, + }, + ]); + assert_eq!(first(builder.cache.all::()), &[ + compile::Rustc { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Rustc { + compiler: Compiler { host: a, stage: 1 }, + target: b, + }, + ]); + + assert_eq!(first(builder.cache.all::()), &[ + compile::Test { + compiler: Compiler { host: a, stage: 0 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: b, stage: 2 }, + target: a, + }, + compile::Test { + compiler: Compiler { host: a, stage: 1 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: b, stage: 2 }, + target: b, + }, + compile::Test { + compiler: Compiler { host: a, stage: 2 }, + target: c, + }, + compile::Test { + compiler: Compiler { host: b, stage: 2 }, + target: c, + }, + ]); + } +} diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs index c27493158826c..bad6b7dcdac3e 100644 --- a/src/bootstrap/cache.rs +++ b/src/bootstrap/cache.rs @@ -13,28 +13,51 @@ use std::borrow::Borrow; use std::cell::RefCell; use std::collections::HashMap; use std::convert::AsRef; -use std::ffi::OsStr; use std::fmt; use std::hash::{Hash, Hasher}; -use std::marker::PhantomData; use std::mem; use std::ops::Deref; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::sync::Mutex; +use std::cmp::{Ord, Ordering, PartialOrd}; use builder::Step; -pub struct Interned(usize, PhantomData<*const T>); +use serde::{Deserialize, Deserializer, Serialize, Serializer}; -impl Default for Interned { - fn default() -> Self { - INTERNER.intern_string(String::default()) +pub struct Interned(*mut T); + +impl Serialize for Interned { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.as_static().serialize(serializer) + } +} + +impl<'de, T: Deserialize<'de> + Intern> Deserialize<'de> for Interned { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(T::deserialize(deserializer)?.intern()) + } +} + +impl Interned { + fn as_static<'a>(&self) -> &'a T { + // This is safe because the values Interned points to are effectively leaked + unsafe { mem::transmute::<&T, &'a T>(&*self.0) } } } -impl Default for Interned { - fn default() -> Self { - INTERNER.intern_path(PathBuf::default()) +impl Default for Interned +where + T: Intern + Default, +{ + fn default() -> Interned { + T::default().intern() } } @@ -45,28 +68,37 @@ impl Clone for Interned { } } -impl PartialEq for Interned { - fn eq(&self, other: &Self) -> bool { - self.0 == other.0 +impl PartialEq for Interned +where + A: ?Sized + PartialEq, +{ + fn eq(&self, other: &A) -> bool { + other.eq(&*self.as_static()) } } -impl Eq for Interned {} -impl PartialEq for Interned { - fn eq(&self, other: &str) -> bool { - *self == other - } +impl Eq for Interned +where + Interned: PartialEq>, +{ } -impl<'a> PartialEq<&'a str> for Interned { - fn eq(&self, other: &&str) -> bool { - **self == **other + +impl PartialOrd> for Interned +where + Interned: PartialEq>, + B: PartialOrd, +{ + fn partial_cmp(&self, other: &Interned) -> Option { + PartialOrd::partial_cmp(self.as_static(), other.as_static()) } } -impl<'a, T> PartialEq<&'a Interned> for Interned { - fn eq(&self, other: &&Self) -> bool { - self.0 == other.0 + +impl> Ord for Interned { + fn cmp(&self, other: &Interned) -> Ordering { + Ord::cmp(self.as_static(), other.as_static()) } } + impl<'a, T> PartialEq> for &'a Interned { fn eq(&self, other: &Interned) -> bool { self.0 == other.0 @@ -76,167 +108,154 @@ impl<'a, T> PartialEq> for &'a Interned { unsafe impl Send for Interned {} unsafe impl Sync for Interned {} -impl fmt::Display for Interned { +impl fmt::Display for Interned { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let s: &str = &*self; - f.write_str(s) + fmt::Display::fmt(self.as_static(), f) } } -impl fmt::Debug for Interned { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let s: &str = &*self; - f.write_fmt(format_args!("{:?}", s)) - } -} -impl fmt::Debug for Interned { +impl fmt::Debug for Interned { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let s: &Path = &*self; - f.write_fmt(format_args!("{:?}", s)) + fmt::Debug::fmt(self.as_static(), f) } } -impl Hash for Interned { +impl Hash for Interned { fn hash(&self, state: &mut H) { - let l = INTERNER.strs.lock().unwrap(); - l.get(*self).hash(state) + self.as_static().hash(state) } } -impl Hash for Interned { - fn hash(&self, state: &mut H) { - let l = INTERNER.paths.lock().unwrap(); - l.get(*self).hash(state) +impl Deref for Interned { + type Target = ::Target; + fn deref(&self) -> &'static Self::Target { + &*self.as_static() } } -impl Deref for Interned { - type Target = str; - fn deref(&self) -> &'static str { - let l = INTERNER.strs.lock().unwrap(); - unsafe { mem::transmute::<&str, &'static str>(l.get(*self)) } +impl AsRef for Interned +where + I: AsRef + 'static, + R: ?Sized, +{ + fn as_ref(&self) -> &'static R { + self.as_static().as_ref() } } -impl Deref for Interned { - type Target = Path; - fn deref(&self) -> &'static Path { - let l = INTERNER.paths.lock().unwrap(); - unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self)) } - } -} - -impl AsRef for Interned { - fn as_ref(&self) -> &'static Path { - let l = INTERNER.paths.lock().unwrap(); - unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self)) } - } +pub trait Intern: Sized { + fn intern(self) -> Interned; } -impl AsRef for Interned { - fn as_ref(&self) -> &'static Path { - let l = INTERNER.strs.lock().unwrap(); - unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self).as_ref()) } +impl Intern for String { + fn intern(self) -> Interned { + INTERNER.place(self) } } -impl AsRef for Interned { - fn as_ref(&self) -> &'static OsStr { - let l = INTERNER.paths.lock().unwrap(); - unsafe { mem::transmute::<&OsStr, &'static OsStr>(l.get(*self).as_ref()) } +impl Intern for PathBuf { + fn intern(self) -> Interned { + INTERNER.place(self) } } -impl AsRef for Interned { - fn as_ref(&self) -> &'static OsStr { - let l = INTERNER.strs.lock().unwrap(); - unsafe { mem::transmute::<&OsStr, &'static OsStr>(l.get(*self).as_ref()) } +impl<'a, B, I> Intern for &'a B +where + B: Eq + Hash + ToOwned + ?Sized + 'static, + I: Borrow + Clone + Hash + Eq + Send + 'static + Intern, +{ + fn intern(self) -> Interned { + INTERNER.place_borrow(self) } } - -struct TyIntern { - items: Vec, +struct TyIntern { set: HashMap>, } impl TyIntern { fn new() -> TyIntern { TyIntern { - items: Vec::new(), set: HashMap::new(), } } - fn intern_borrow(&mut self, item: &B) -> Interned + fn place_borrow(&mut self, item: &B) -> Interned where - B: Eq + Hash + ToOwned + ?Sized, + B: Eq + Hash + ToOwned + ?Sized, T: Borrow, { if let Some(i) = self.set.get(&item) { return *i; } - let item = item.to_owned(); - let interned = Interned(self.items.len(), PhantomData::<*const T>); - self.set.insert(item.clone(), interned); - self.items.push(item); - interned + self.place(item.to_owned()) } - fn intern(&mut self, item: T) -> Interned { + fn place(&mut self, item: T) -> Interned { if let Some(i) = self.set.get(&item) { return *i; } - let interned = Interned(self.items.len(), PhantomData::<*const T>); - self.set.insert(item.clone(), interned); - self.items.push(item); + let ptr = Box::into_raw(Box::new(item.clone())); + let interned = Interned(ptr); + self.set.insert(item, interned); interned } - - fn get(&self, i: Interned) -> &T { - &self.items[i.0] - } } -pub struct Interner { - strs: Mutex>, - paths: Mutex>, +struct Interner { + generic: Mutex>>, } impl Interner { fn new() -> Interner { Interner { - strs: Mutex::new(TyIntern::new()), - paths: Mutex::new(TyIntern::new()), + generic: Mutex::new(Vec::new()), } } - pub fn intern_str(&self, s: &str) -> Interned { - self.strs.lock().unwrap().intern_borrow(s) - } - pub fn intern_string(&self, s: String) -> Interned { - self.strs.lock().unwrap().intern(s) + fn place(&self, i: T) -> Interned { + let mut l = self.generic.lock().unwrap(); + for x in l.iter_mut() { + if let Some(ty_interner) = (&mut **x).downcast_mut::>() { + return ty_interner.place(i); + } + } + let mut ty_interner = TyIntern::new(); + let interned = ty_interner.place(i); + l.push(Box::new(ty_interner)); + interned } - pub fn intern_path(&self, s: PathBuf) -> Interned { - self.paths.lock().unwrap().intern(s) + fn place_borrow(&self, i: &B) -> Interned + where + B: Eq + Hash + ToOwned + ?Sized + 'static, + I: Borrow + Clone + Hash + Eq + Send + 'static, + { + let mut l = self.generic.lock().unwrap(); + for x in l.iter_mut() { + if let Some(ty_interner) = (&mut **x).downcast_mut::>() { + return ty_interner.place_borrow(i); + } + } + let mut ty_interner = TyIntern::new(); + let interned = ty_interner.place_borrow(i); + l.push(Box::new(ty_interner)); + interned } } lazy_static! { - pub static ref INTERNER: Interner = Interner::new(); + static ref INTERNER: Interner = Interner::new(); } -/// This is essentially a HashMap which allows storing any type in its input and -/// any type in its output. It is a write-once cache; values are never evicted, -/// which means that references to the value can safely be returned from the -/// get() method. #[derive(Debug)] pub struct Cache( - RefCell, // actually a HashMap> - >> + RefCell< + HashMap< + TypeId, + Box, // actually a HashMap + >, + >, ); impl Cache { @@ -247,21 +266,39 @@ impl Cache { pub fn put(&self, step: S, value: S::Output) { let mut cache = self.0.borrow_mut(); let type_id = TypeId::of::(); - let stepcache = cache.entry(type_id) - .or_insert_with(|| Box::new(HashMap::::new())) - .downcast_mut::>() - .expect("invalid type mapped"); - assert!(!stepcache.contains_key(&step), "processing {:?} a second time", step); + let stepcache = cache + .entry(type_id) + .or_insert_with(|| Box::new(HashMap::::new())) + .downcast_mut::>() + .expect("invalid type mapped"); + assert!( + !stepcache.contains_key(&step), + "processing {:?} a second time", + step + ); stepcache.insert(step, value); } pub fn get(&self, step: &S) -> Option { let mut cache = self.0.borrow_mut(); let type_id = TypeId::of::(); - let stepcache = cache.entry(type_id) - .or_insert_with(|| Box::new(HashMap::::new())) - .downcast_mut::>() - .expect("invalid type mapped"); + let stepcache = cache + .entry(type_id) + .or_insert_with(|| Box::new(HashMap::::new())) + .downcast_mut::>() + .expect("invalid type mapped"); stepcache.get(step).cloned() } + + #[cfg(test)] + pub fn all(&mut self) -> Vec<(S, S::Output)> { + let cache = self.0.get_mut(); + let type_id = TypeId::of::(); + let mut v = cache.remove(&type_id) + .map(|b| b.downcast::>().expect("correct type")) + .map(|m| m.into_iter().collect::>()) + .unwrap_or_default(); + v.sort_by_key(|&(a, _)| a); + v + } } diff --git a/src/bootstrap/cc_detect.rs b/src/bootstrap/cc_detect.rs index e531fdaf2923b..3f4aa917de161 100644 --- a/src/bootstrap/cc_detect.rs +++ b/src/bootstrap/cc_detect.rs @@ -73,12 +73,23 @@ fn cc2ar(cc: &Path, target: &str) -> Option { pub fn find(build: &mut Build) { // For all targets we're going to need a C compiler for building some shims // and such as well as for being a linker for Rust code. - let targets = build.targets.iter().chain(&build.hosts).cloned().chain(iter::once(build.build)) - .collect::>(); + let targets = build + .config + .general + .target + .iter() + .chain(&build.config.general.host) + .cloned() + .chain(iter::once(build.config.general.build)) + .collect::>(); for target in targets.into_iter() { let mut cfg = cc::Build::new(); - cfg.cargo_metadata(false).opt_level(0).warnings(false).debug(false) - .target(&target).host(&build.build); + cfg.cargo_metadata(false) + .opt_level(0) + .warnings(false) + .debug(false) + .target(&target) + .host(&build.config.general.build); let config = build.config.target_config.get(&target); if let Some(cc) = config.and_then(|c| c.cc.as_ref()) { @@ -103,11 +114,23 @@ pub fn find(build: &mut Build) { } // For all host triples we need to find a C++ compiler as well - let hosts = build.hosts.iter().cloned().chain(iter::once(build.build)).collect::>(); + let hosts = build + .config + .general + .host + .iter() + .cloned() + .chain(iter::once(build.config.general.build)) + .collect::>(); for host in hosts.into_iter() { let mut cfg = cc::Build::new(); - cfg.cargo_metadata(false).opt_level(0).warnings(false).debug(false).cpp(true) - .target(&host).host(&build.build); + cfg.cargo_metadata(false) + .opt_level(0) + .warnings(false) + .debug(false) + .cpp(true) + .target(&host) + .host(&build.config.general.build); let config = build.config.target_config.get(&host); if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) { cfg.compiler(cxx); @@ -120,11 +143,13 @@ pub fn find(build: &mut Build) { } } -fn set_compiler(cfg: &mut cc::Build, - compiler: Language, - target: Interned, - config: Option<&Target>, - build: &Build) { +fn set_compiler( + cfg: &mut cc::Build, + compiler: Language, + target: Interned, + config: Option<&Target>, + build: &Build, +) { match &*target { // When compiling for android we may have the NDK configured in the // config.toml in which case we look there. Otherwise the default @@ -143,7 +168,7 @@ fn set_compiler(cfg: &mut cc::Build, let c = cfg.get_compiler(); let gnu_compiler = compiler.gcc(); if !c.path().ends_with(gnu_compiler) { - return + return; } let output = output(c.to_command().arg("--version")); @@ -152,7 +177,7 @@ fn set_compiler(cfg: &mut cc::Build, None => return, }; match output[i + 3..].chars().next().unwrap() { - '0' ... '6' => {} + '0'...'6' => {} _ => return, } let alternative = format!("e{}", gnu_compiler); diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs index 72841cb0616c1..cb30dc40a51dd 100644 --- a/src/bootstrap/channel.rs +++ b/src/bootstrap/channel.rs @@ -26,6 +26,7 @@ use config::Config; // The version number pub const CFG_RELEASE_NUM: &str = "1.26.0"; +#[derive(Default)] pub struct GitInfo { inner: Option, } @@ -39,32 +40,42 @@ struct Info { impl GitInfo { pub fn new(config: &Config, dir: &Path) -> GitInfo { // See if this even begins to look like a git dir - if config.ignore_git || !dir.join(".git").exists() { - return GitInfo { inner: None } + if config.rust.ignore_git() || !dir.join(".git").exists() { + return GitInfo { inner: None }; } // Make sure git commands work let out = Command::new("git") - .arg("rev-parse") - .current_dir(dir) - .output() - .expect("failed to spawn git"); + .arg("rev-parse") + .current_dir(dir) + .output() + .expect("failed to spawn git"); if !out.status.success() { - return GitInfo { inner: None } + return GitInfo { inner: None }; } // Ok, let's scrape some info - let ver_date = output(Command::new("git").current_dir(dir) - .arg("log").arg("-1") - .arg("--date=short") - .arg("--pretty=format:%cd")); - let ver_hash = output(Command::new("git").current_dir(dir) - .arg("rev-parse").arg("HEAD")); - let short_ver_hash = output(Command::new("git") - .current_dir(dir) - .arg("rev-parse") - .arg("--short=9") - .arg("HEAD")); + let ver_date = output( + Command::new("git") + .current_dir(dir) + .arg("log") + .arg("-1") + .arg("--date=short") + .arg("--pretty=format:%cd"), + ); + let ver_hash = output( + Command::new("git") + .current_dir(dir) + .arg("rev-parse") + .arg("HEAD"), + ); + let short_ver_hash = output( + Command::new("git") + .current_dir(dir) + .arg("rev-parse") + .arg("--short=9") + .arg("HEAD"), + ); GitInfo { inner: Some(Info { commit_date: ver_date.trim().to_string(), diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 767ee4016c6f1..14f6a3fa9b8da 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -10,11 +10,10 @@ //! Implementation of compiling the compiler and standard library, in "check" mode. -use compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, add_to_sysroot}; -use builder::{RunConfig, Builder, ShouldRun, Step}; -use {Build, Compiler, Mode}; +use compile::{add_to_sysroot}; +use builder::{Builder, RunConfig, ShouldRun, Step}; +use Mode; use cache::Interned; -use std::path::PathBuf; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Std { @@ -30,29 +29,19 @@ impl Step for Std { } fn make_run(run: RunConfig) { - run.builder.ensure(Std { - target: run.target, - }); + run.builder.ensure(Std { target: run.target }); } fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; - let compiler = builder.compiler(0, build.build); + let compiler = builder.compiler(0, builder.config.general.build); - let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage)); + let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage)); println!("Checking std artifacts ({} -> {})", &compiler.host, target); - let out_dir = build.stage_out(compiler, Mode::Libstd); - build.clear_if_dirty(&out_dir, &builder.rustc(compiler)); - let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "check"); - std_cargo(build, &compiler, target, &mut cargo); - run_cargo(build, - &mut cargo, - &libstd_stamp(build, compiler, target), - true); + builder.cargo(compiler, Mode::Libstd, target, "check").run(); let libdir = builder.sysroot_libdir(compiler, target); - add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target)); + add_to_sysroot(&libdir, &builder.libstd_stamp(compiler, target)); } } @@ -71,9 +60,7 @@ impl Step for Rustc { } fn make_run(run: RunConfig) { - run.builder.ensure(Rustc { - target: run.target, - }); + run.builder.ensure(Rustc { target: run.target }); } /// Build the compiler. @@ -82,25 +69,18 @@ impl Step for Rustc { /// the `compiler` targeting the `target` architecture. The artifacts /// created will also be linked into the sysroot directory. fn run(self, builder: &Builder) { - let build = builder.build; - let compiler = builder.compiler(0, build.build); + let compiler = builder.compiler(0, builder.config.general.build); let target = self.target; - let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage)); - println!("Checking compiler artifacts ({} -> {})", &compiler.host, target); + let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage)); + println!( + "Checking compiler artifacts ({} -> {})", + &compiler.host, target + ); - let stage_out = builder.stage_out(compiler, Mode::Librustc); - build.clear_if_dirty(&stage_out, &libstd_stamp(build, compiler, target)); - build.clear_if_dirty(&stage_out, &libtest_stamp(build, compiler, target)); - - let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "check"); - rustc_cargo(build, &mut cargo); - run_cargo(build, - &mut cargo, - &librustc_stamp(build, compiler, target), - true); + builder.cargo(compiler, Mode::Librustc, target, "check").run(); let libdir = builder.sysroot_libdir(compiler, target); - add_to_sysroot(&libdir, &librustc_stamp(build, compiler, target)); + add_to_sysroot(&libdir, &builder.librustc_stamp(compiler, target)); } } @@ -118,45 +98,17 @@ impl Step for Test { } fn make_run(run: RunConfig) { - run.builder.ensure(Test { - target: run.target, - }); + run.builder.ensure(Test { target: run.target }); } fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; - let compiler = builder.compiler(0, build.build); + let compiler = builder.compiler(0, builder.config.general.build); - let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage)); + let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage)); println!("Checking test artifacts ({} -> {})", &compiler.host, target); - let out_dir = build.stage_out(compiler, Mode::Libtest); - build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target)); - let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "check"); - test_cargo(build, &compiler, target, &mut cargo); - run_cargo(build, - &mut cargo, - &libtest_stamp(build, compiler, target), - true); + builder.cargo(compiler, Mode::Libtest, target, "check").run(); let libdir = builder.sysroot_libdir(compiler, target); - add_to_sysroot(&libdir, &libtest_stamp(build, compiler, target)); + add_to_sysroot(&libdir, &builder.libtest_stamp(compiler, target)); } } - -/// Cargo's output path for the standard library in a given stage, compiled -/// by a particular compiler for the specified target. -pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { - build.cargo_out(compiler, Mode::Libstd, target).join(".libstd-check.stamp") -} - -/// Cargo's output path for libtest in a given stage, compiled by a particular -/// compiler for the specified target. -pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { - build.cargo_out(compiler, Mode::Libtest, target).join(".libtest-check.stamp") -} - -/// Cargo's output path for librustc in a given stage, compiled by a particular -/// compiler for the specified target. -pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { - build.cargo_out(compiler, Mode::Librustc, target).join(".librustc-check.stamp") -} diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs index 87f194fb7d2f8..86e1580cf5617 100644 --- a/src/bootstrap/clean.rs +++ b/src/bootstrap/clean.rs @@ -15,23 +15,23 @@ //! `build/cache` directory (download cache) or the `build/$target/llvm` //! directory unless the --all flag is present. -use std::fs; use std::io::{self, ErrorKind}; use std::path::Path; +use fs; use Build; pub fn clean(build: &Build, all: bool) { rm_rf("tmp".as_ref()); if all { - rm_rf(&build.out); + rm_rf(&build.config.general.out); } else { - rm_rf(&build.out.join("tmp")); - rm_rf(&build.out.join("dist")); + rm_rf(&build.config.general.out.join("tmp")); + rm_rf(&build.config.general.out.join("dist")); - for host in &build.hosts { - let entries = match build.out.join(host).read_dir() { + for host in &build.config.general.host { + let entries = match build.config.general.out.join(host).read_dir() { Ok(iter) => iter, Err(_) => continue, }; @@ -39,7 +39,7 @@ pub fn clean(build: &Build, all: bool) { for entry in entries { let entry = t!(entry); if entry.file_name().to_str() == Some("llvm") { - continue + continue; } let path = t!(entry.path().canonicalize()); rm_rf(&path); @@ -55,7 +55,7 @@ fn rm_rf(path: &Path) { return; } panic!("failed to get metadata for file {}: {}", path.display(), e); - }, + } Ok(metadata) => { if metadata.file_type().is_file() || metadata.file_type().is_symlink() { do_op(path, "remove file", |p| fs::remove_file(p)); @@ -66,20 +66,20 @@ fn rm_rf(path: &Path) { rm_rf(&t!(file).path()); } do_op(path, "remove dir", |p| fs::remove_dir(p)); - }, + } }; } fn do_op(path: &Path, desc: &str, mut f: F) - where F: FnMut(&Path) -> io::Result<()> +where + F: FnMut(&Path) -> io::Result<()>, { match f(path) { Ok(()) => {} // On windows we can't remove a readonly file, and git will often clone files as readonly. // As a result, we have some special logic to remove readonly files on windows. // This is also the reason that we can't use things like fs::remove_dir_all(). - Err(ref e) if cfg!(windows) && - e.kind() == ErrorKind::PermissionDenied => { + Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => { let mut p = t!(path.symlink_metadata()).permissions(); p.set_readonly(false); t!(fs::set_permissions(path, p)); diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index c85b04ddc0245..54d6943ff19b3 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -16,28 +16,22 @@ //! compiler. This module is also responsible for assembling the sysroot as it //! goes along from the output of the previous stage. -use std::env; -use std::fs::{self, File}; -use std::io::BufReader; -use std::io::prelude::*; use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; +use std::process::Command; use std::str; use std::cmp::min; -use build_helper::{output, mtime, up_to_date}; -use filetime::FileTime; -use serde_json; +use build_helper::{output, up_to_date}; -use util::{exe, libdir, is_dylib, copy, read_stamp_file, CiEnv}; -use {Build, Compiler, Mode}; +use fs; +use util::{copy, exe, is_dylib, libdir, read_stamp_file}; +use {Compiler, Mode}; use native; -use tool; -use cache::{INTERNER, Interned}; -use builder::{Step, RunConfig, ShouldRun, Builder}; +use cache::{Intern, Interned}; +use builder::{Builder, RunConfig, ShouldRun, Step}; -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Std { pub target: Interned, pub compiler: Compiler, @@ -64,27 +58,19 @@ impl Step for Std { /// using the `compiler` targeting the `target` architecture. The artifacts /// created will also be linked into the sysroot directory. fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; let compiler = self.compiler; builder.ensure(StartupObjects { compiler, target }); - if build.force_use_stage1(compiler, target) { - let from = builder.compiler(1, build.build); + if builder.force_use_stage1(compiler, target) { + let from = builder.compiler(1, builder.config.general.build); builder.ensure(Std { compiler: from, target, }); println!("Uplifting stage1 std ({} -> {})", from.host, target); - // Even if we're not building std this stage, the new sysroot must - // still contain the musl startup objects. - if target.contains("musl") { - let libdir = builder.sysroot_libdir(compiler, target); - copy_musl_third_party_objects(build, target, &libdir); - } - builder.ensure(StdLink { compiler: from, target_compiler: compiler, @@ -93,26 +79,16 @@ impl Step for Std { return; } - let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage)); - println!("Building stage{} std artifacts ({} -> {})", compiler.stage, - &compiler.host, target); + let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage)); + println!( + "Building stage{} std artifacts ({} -> {})", + compiler.stage, &compiler.host, target + ); - if target.contains("musl") { - let libdir = builder.sysroot_libdir(compiler, target); - copy_musl_third_party_objects(build, target, &libdir); - } - - let out_dir = build.stage_out(compiler, Mode::Libstd); - build.clear_if_dirty(&out_dir, &builder.rustc(compiler)); - let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "build"); - std_cargo(build, &compiler, target, &mut cargo); - run_cargo(build, - &mut cargo, - &libstd_stamp(build, compiler, target), - false); + builder.cargo(compiler, Mode::Libstd, target, "build").run(); builder.ensure(StdLink { - compiler: builder.compiler(compiler.stage, build.build), + compiler: builder.compiler(compiler.stage, builder.config.general.build), target_compiler: compiler, target, }); @@ -125,59 +101,12 @@ impl Step for Std { /// with a glibc-targeting toolchain, given we have the appropriate startup /// files. As those shipped with glibc won't work, copy the ones provided by /// musl so we have them on linux-gnu hosts. -fn copy_musl_third_party_objects(build: &Build, - target: Interned, - into: &Path) { +fn copy_musl_third_party_objects(builder: &Builder, target: Interned, into: &Path) { for &obj in &["crt1.o", "crti.o", "crtn.o"] { - copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj)); - } -} - -/// Configure cargo to compile the standard library, adding appropriate env vars -/// and such. -pub fn std_cargo(build: &Build, - compiler: &Compiler, - target: Interned, - cargo: &mut Command) { - let mut features = build.std_features(); - - if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { - cargo.env("MACOSX_DEPLOYMENT_TARGET", target); - } - - // When doing a local rebuild we tell cargo that we're stage1 rather than - // stage0. This works fine if the local rust and being-built rust have the - // same view of what the default allocator is, but fails otherwise. Since - // we don't have a way to express an allocator preference yet, work - // around the issue in the case of a local rebuild with jemalloc disabled. - if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc { - features.push_str(" force_alloc_system"); - } - - if compiler.stage != 0 && build.config.sanitizers { - // This variable is used by the sanitizer runtime crates, e.g. - // rustc_lsan, to build the sanitizer runtime from C code - // When this variable is missing, those crates won't compile the C code, - // so we don't set this variable during stage0 where llvm-config is - // missing - // We also only build the runtimes when --enable-sanitizers (or its - // config.toml equivalent) is used - cargo.env("LLVM_CONFIG", build.llvm_config(target)); - } - - cargo.arg("--features").arg(features) - .arg("--manifest-path") - .arg(build.src.join("src/libstd/Cargo.toml")); - - if let Some(target) = build.config.target_config.get(&target) { - if let Some(ref jemalloc) = target.jemalloc { - cargo.env("JEMALLOC_OVERRIDE", jemalloc); - } - } - if target.contains("musl") { - if let Some(p) = build.musl_root(target) { - cargo.env("MUSL_ROOT", p); - } + copy( + &builder.musl_root(target).unwrap().join("lib").join(obj), + &into.join(obj), + ); } } @@ -195,6 +124,8 @@ impl Step for StdLink { run.never() } + fn for_test(self, _builder: &Builder) {} + /// Link all libstd rlibs/dylibs into the sysroot location. /// /// Links those artifacts generated by `compiler` to a the `stage` compiler's @@ -204,31 +135,29 @@ impl Step for StdLink { /// libraries for `target`, and this method will find them in the relevant /// output directory. fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target_compiler = self.target_compiler; let target = self.target; - println!("Copying stage{} std from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - &compiler.host, - target_compiler.host, - target); + println!( + "Copying stage{} std from stage{} ({} -> {} / {})", + target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target + ); let libdir = builder.sysroot_libdir(target_compiler, target); - add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target)); + add_to_sysroot(&libdir, &builder.libstd_stamp(compiler, target)); - if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" { + if builder.config.general.sanitizers && compiler.stage != 0 + && target == "x86_64-apple-darwin" + { // The sanitizers are only built in stage1 or above, so the dylibs will // be missing in stage0 and causes panic. See the `std()` function above // for reason why the sanitizers are not built in stage0. - copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir); + copy_apple_sanitizer_dylibs(&builder.native_dir(target), "osx", &libdir); } - builder.ensure(tool::CleanTools { - compiler: target_compiler, - target, - mode: Mode::Libstd, - }); + if target.contains("musl") { + let libdir = builder.sysroot_libdir(target_compiler, target); + copy_musl_third_party_objects(builder, target, &libdir); + } } } @@ -271,15 +200,14 @@ impl Step for StartupObjects { /// files, so we just use the nightly snapshot compiler to always build them (as /// no other compilers are guaranteed to be available). fn run(self, builder: &Builder) { - let build = builder.build; let for_compiler = self.compiler; let target = self.target; if !target.contains("pc-windows-gnu") { - return + return; } - let src_dir = &build.src.join("src/rtstartup"); - let dst_dir = &build.native_dir(target).join("rtstartup"); + let src_dir = &builder.config.src.join("src/rtstartup"); + let dst_dir = &builder.native_dir(target).join("rtstartup"); let sysroot_dir = &builder.sysroot_libdir(for_compiler, target); t!(fs::create_dir_all(dst_dir)); @@ -287,32 +215,34 @@ impl Step for StartupObjects { let src_file = &src_dir.join(file.to_string() + ".rs"); let dst_file = &dst_dir.join(file.to_string() + ".o"); if !up_to_date(src_file, dst_file) { - let mut cmd = Command::new(&build.initial_rustc); - build.run(cmd.env("RUSTC_BOOTSTRAP", "1") - .arg("--cfg").arg("stage0") - .arg("--target").arg(target) - .arg("--emit=obj") - .arg("-o").arg(dst_file) - .arg(src_file)); + let mut cmd = Command::new(&builder.config.general.initial_rustc); + builder.run( + cmd.env("RUSTC_BOOTSTRAP", "1") + .arg("--cfg") + .arg("stage0") + .arg("--target") + .arg(target) + .arg("--emit=obj") + .arg("-o") + .arg(dst_file) + .arg(src_file), + ); } copy(dst_file, &sysroot_dir.join(file.to_string() + ".o")); } for obj in ["crt2.o", "dllcrt2.o"].iter() { - let src = compiler_file(build, - build.cc(target), - target, - obj); + let src = compiler_file(builder, builder.cc(target), target, obj); copy(&src, &sysroot_dir.join(obj)); } } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Test { - pub compiler: Compiler, pub target: Interned, + pub compiler: Compiler, } impl Step for Test { @@ -336,58 +266,43 @@ impl Step for Test { /// the build using the `compiler` targeting the `target` architecture. The /// artifacts created will also be linked into the sysroot directory. fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; let compiler = self.compiler; builder.ensure(Std { compiler, target }); - if build.force_use_stage1(compiler, target) { + if builder.force_use_stage1(compiler, target) { builder.ensure(Test { - compiler: builder.compiler(1, build.build), + compiler: builder.compiler(1, builder.config.general.build), target, }); - println!("Uplifting stage1 test ({} -> {})", &build.build, target); + println!( + "Uplifting stage1 test ({} -> {})", + &builder.config.general.build, target + ); builder.ensure(TestLink { - compiler: builder.compiler(1, build.build), + compiler: builder.compiler(1, builder.config.general.build), target_compiler: compiler, target, }); return; } - let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage)); - println!("Building stage{} test artifacts ({} -> {})", compiler.stage, - &compiler.host, target); - let out_dir = build.stage_out(compiler, Mode::Libtest); - build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target)); - let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "build"); - test_cargo(build, &compiler, target, &mut cargo); - run_cargo(build, - &mut cargo, - &libtest_stamp(build, compiler, target), - false); + let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage)); + println!( + "Building stage{} test artifacts ({} -> {})", + compiler.stage, &compiler.host, target + ); + builder.cargo(compiler, Mode::Libtest, target, "build").run(); builder.ensure(TestLink { - compiler: builder.compiler(compiler.stage, build.build), + compiler: builder.compiler(compiler.stage, builder.config.general.build), target_compiler: compiler, target, }); } } -/// Same as `std_cargo`, but for libtest -pub fn test_cargo(build: &Build, - _compiler: &Compiler, - _target: Interned, - cargo: &mut Command) { - if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { - cargo.env("MACOSX_DEPLOYMENT_TARGET", target); - } - cargo.arg("--manifest-path") - .arg(build.src.join("src/libtest/Cargo.toml")); -} - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct TestLink { pub compiler: Compiler, @@ -402,29 +317,25 @@ impl Step for TestLink { run.never() } + fn for_test(self, _builder: &Builder) {} + /// Same as `std_link`, only for libtest fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target_compiler = self.target_compiler; let target = self.target; - println!("Copying stage{} test from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - &compiler.host, - target_compiler.host, - target); - add_to_sysroot(&builder.sysroot_libdir(target_compiler, target), - &libtest_stamp(build, compiler, target)); - builder.ensure(tool::CleanTools { - compiler: target_compiler, - target, - mode: Mode::Libtest, - }); + println!( + "Copying stage{} test from stage{} ({} -> {} / {})", + target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target + ); + add_to_sysroot( + &builder.sysroot_libdir(target_compiler, target), + &builder.libtest_stamp(compiler, target), + ); } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Rustc { pub compiler: Compiler, pub target: Interned, @@ -452,20 +363,22 @@ impl Step for Rustc { /// the `compiler` targeting the `target` architecture. The artifacts /// created will also be linked into the sysroot directory. fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target = self.target; builder.ensure(Test { compiler, target }); - if build.force_use_stage1(compiler, target) { + if builder.force_use_stage1(compiler, target) { builder.ensure(Rustc { - compiler: builder.compiler(1, build.build), + compiler: builder.compiler(1, builder.config.general.build), target, }); - println!("Uplifting stage1 rustc ({} -> {})", &build.build, target); + println!( + "Uplifting stage1 rustc ({} -> {})", + &builder.config.general.build, target + ); builder.ensure(RustcLink { - compiler: builder.compiler(1, build.build), + compiler: builder.compiler(1, builder.config.general.build), target_compiler: compiler, target, }); @@ -474,75 +387,26 @@ impl Step for Rustc { // Ensure that build scripts have a std to link against. builder.ensure(Std { - compiler: builder.compiler(self.compiler.stage, build.build), - target: build.build, + compiler: builder.compiler(self.compiler.stage, builder.config.general.build), + target: builder.config.general.build, }); - let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage)); - println!("Building stage{} compiler artifacts ({} -> {})", - compiler.stage, &compiler.host, target); - - let stage_out = builder.stage_out(compiler, Mode::Librustc); - build.clear_if_dirty(&stage_out, &libstd_stamp(build, compiler, target)); - build.clear_if_dirty(&stage_out, &libtest_stamp(build, compiler, target)); + let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage)); + println!( + "Building stage{} compiler artifacts ({} -> {})", + compiler.stage, &compiler.host, target + ); - let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "build"); - rustc_cargo(build, &mut cargo); - run_cargo(build, - &mut cargo, - &librustc_stamp(build, compiler, target), - false); + builder.cargo(compiler, Mode::Librustc, target, "build").run(); builder.ensure(RustcLink { - compiler: builder.compiler(compiler.stage, build.build), + compiler: builder.compiler(compiler.stage, builder.config.general.build), target_compiler: compiler, target, }); } } -pub fn rustc_cargo(build: &Build, cargo: &mut Command) { - cargo.arg("--features").arg(build.rustc_features()) - .arg("--manifest-path") - .arg(build.src.join("src/rustc/Cargo.toml")); - rustc_cargo_env(build, cargo); -} - -fn rustc_cargo_env(build: &Build, cargo: &mut Command) { - // Set some configuration variables picked up by build scripts and - // the compiler alike - cargo.env("CFG_RELEASE", build.rust_release()) - .env("CFG_RELEASE_CHANNEL", &build.config.channel) - .env("CFG_VERSION", build.rust_version()) - .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default()); - - let libdir_relative = build.config.libdir_relative().unwrap_or(Path::new("lib")); - cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative); - - // If we're not building a compiler with debugging information then remove - // these two env vars which would be set otherwise. - if build.config.rust_debuginfo_only_std { - cargo.env_remove("RUSTC_DEBUGINFO"); - cargo.env_remove("RUSTC_DEBUGINFO_LINES"); - } - - if let Some(ref ver_date) = build.rust_info.commit_date() { - cargo.env("CFG_VER_DATE", ver_date); - } - if let Some(ref ver_hash) = build.rust_info.sha() { - cargo.env("CFG_VER_HASH", ver_hash); - } - if !build.unstable_features() { - cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1"); - } - if let Some(ref s) = build.config.rustc_default_linker { - cargo.env("CFG_DEFAULT_LINKER", s); - } - if build.config.rustc_parallel_queries { - cargo.env("RUSTC_PARALLEL_QUERIES", "1"); - } -} - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] struct RustcLink { pub compiler: Compiler, @@ -557,25 +421,21 @@ impl Step for RustcLink { run.never() } + fn for_test(self, _builder: &Builder) {} + /// Same as `std_link`, only for librustc fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target_compiler = self.target_compiler; let target = self.target; - println!("Copying stage{} rustc from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - &compiler.host, - target_compiler.host, - target); - add_to_sysroot(&builder.sysroot_libdir(target_compiler, target), - &librustc_stamp(build, compiler, target)); - builder.ensure(tool::CleanTools { - compiler: target_compiler, - target, - mode: Mode::Librustc, - }); + println!( + "Copying stage{} rustc from stage{} ({} -> {} / {})", + target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target + ); + add_to_sysroot( + &builder.sysroot_libdir(target_compiler, target), + &builder.librustc_stamp(compiler, target), + ); } } @@ -596,38 +456,33 @@ impl Step for CodegenBackend { } fn make_run(run: RunConfig) { - let backend = run.builder.config.rust_codegen_backends.get(0); - let backend = backend.cloned().unwrap_or_else(|| { - INTERNER.intern_str("llvm") - }); + let backend = run.builder.config.rust.codegen_backends.get(0); + let backend = backend.cloned().unwrap_or_else(|| String::from("llvm")); + let backend = backend.intern(); run.builder.ensure(CodegenBackend { compiler: run.builder.compiler(run.builder.top_stage, run.host), target: run.target, - backend + backend, }); } fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target = self.target; builder.ensure(Rustc { compiler, target }); - if build.force_use_stage1(compiler, target) { + if builder.force_use_stage1(compiler, target) { builder.ensure(CodegenBackend { - compiler: builder.compiler(1, build.build), + compiler: builder.compiler(1, builder.config.general.build), target, backend: self.backend, }); return; } - let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "build"); - let mut features = build.rustc_features().to_string(); - cargo.arg("--manifest-path") - .arg(build.src.join("src/librustc_trans/Cargo.toml")); - rustc_cargo_env(build, &mut cargo); + let mut cargo = builder.cargo( + compiler, Mode::CodegenBackend(self.backend), target, "build"); match &*self.backend { "llvm" | "emscripten" => { @@ -638,68 +493,42 @@ impl Step for CodegenBackend { emscripten: self.backend == "emscripten", }); - if self.backend == "emscripten" { - features.push_str(" emscripten"); - } - - let _folder = build.fold_output(|| format!("stage{}-rustc_trans", compiler.stage)); - println!("Building stage{} codegen artifacts ({} -> {}, {})", - compiler.stage, &compiler.host, target, self.backend); + let _folder = + builder.fold_output(|| format!("stage{}-rustc_trans", compiler.stage)); + println!( + "Building stage{} codegen artifacts ({} -> {}, {})", + compiler.stage, &compiler.host, target, self.backend + ); // Pass down configuration from the LLVM build into the build of // librustc_llvm and librustc_trans. - if build.is_rust_llvm(target) { + if builder.is_rust_llvm(target) { cargo.env("LLVM_RUSTLLVM", "1"); } cargo.env("LLVM_CONFIG", &llvm_config); if self.backend != "emscripten" { - let target_config = build.config.target_config.get(&target); + let target_config = builder.config.target_config.get(&target); if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { cargo.env("CFG_LLVM_ROOT", s); } } // Building with a static libstdc++ is only supported on linux right now, // not for MSVC or macOS - if build.config.llvm_static_stdcpp && - !target.contains("freebsd") && - !target.contains("windows") && - !target.contains("apple") { - let file = compiler_file(build, - build.cxx(target).unwrap(), - target, - "libstdc++.a"); + if builder.config.llvm.static_libstdcpp && !target.contains("freebsd") + && !target.contains("windows") && !target.contains("apple") + { + let file = + compiler_file(builder, builder.cxx(target).unwrap(), target, "libstdc++.a"); cargo.env("LLVM_STATIC_STDCPP", file); } - if build.config.llvm_link_shared { + if builder.config.llvm.link_shared { cargo.env("LLVM_LINK_SHARED", "1"); } } _ => panic!("unknown backend: {}", self.backend), } - let tmp_stamp = build.cargo_out(compiler, Mode::Librustc, target) - .join(".tmp.stamp"); - let files = run_cargo(build, - cargo.arg("--features").arg(features), - &tmp_stamp, - false); - let mut files = files.into_iter() - .filter(|f| { - let filename = f.file_name().unwrap().to_str().unwrap(); - is_dylib(filename) && filename.contains("rustc_trans-") - }); - let codegen_backend = match files.next() { - Some(f) => f, - None => panic!("no dylibs built for codegen backend?"), - }; - if let Some(f) = files.next() { - panic!("codegen backend built two dylibs:\n{}\n{}", - codegen_backend.display(), - f.display()); - } - let stamp = codegen_backend_stamp(build, compiler, target, self.backend); - let codegen_backend = codegen_backend.to_str().unwrap(); - t!(t!(File::create(&stamp)).write_all(codegen_backend.as_bytes())); + cargo.run(); } } @@ -709,10 +538,11 @@ impl Step for CodegenBackend { /// This will take the codegen artifacts produced by `compiler` and link them /// into an appropriate location for `target_compiler` to be a functional /// compiler. -fn copy_codegen_backends_to_sysroot(builder: &Builder, - compiler: Compiler, - target_compiler: Compiler) { - let build = builder.build; +fn copy_codegen_backends_to_sysroot( + builder: &Builder, + compiler: Compiler, + target_compiler: Compiler, +) { let target = target_compiler.host; // Note that this step is different than all the other `*Link` steps in @@ -726,57 +556,38 @@ fn copy_codegen_backends_to_sysroot(builder: &Builder, let dst = builder.sysroot_codegen_backends(target_compiler); t!(fs::create_dir_all(&dst)); - for backend in builder.config.rust_codegen_backends.iter() { - let stamp = codegen_backend_stamp(build, compiler, target, *backend); - let mut dylib = String::new(); - t!(t!(File::open(&stamp)).read_to_string(&mut dylib)); - let file = Path::new(&dylib); - let filename = file.file_name().unwrap().to_str().unwrap(); - // change `librustc_trans-xxxxxx.so` to `librustc_trans-llvm.so` - let target_filename = { - let dash = filename.find("-").unwrap(); - let dot = filename.find(".").unwrap(); - format!("{}-{}{}", - &filename[..dash], - backend, - &filename[dot..]) - }; - copy(&file, &dst.join(target_filename)); + for backend in builder.config.rust.codegen_backends.iter() { + let stamp = builder.codegen_backend_stamp(compiler, target, &backend); + let mut saw_backend: Option = None; + for path in read_stamp_file(&stamp) { + let filename = path.file_name().unwrap().to_str().unwrap(); + if is_dylib(filename) && filename.contains("rustc_trans-") { + if let Some(past) = saw_backend { + panic!("found two codegen backends:\n{}\n{}", + path.display(), + past.display()); + } + // change `librustc_trans-xxxxxx.so` to `librustc_trans-llvm.so` + let target_filename = { + let dash = filename.find("-").unwrap(); + let dot = filename.find(".").unwrap(); + format!("{}-{}{}", &filename[..dash], backend, &filename[dot..]) + }; + copy(&path, &dst.join(target_filename)); + saw_backend = Some(path.clone()); + } + } } } -/// Cargo's output path for the standard library in a given stage, compiled -/// by a particular compiler for the specified target. -pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { - build.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp") -} - -/// Cargo's output path for libtest in a given stage, compiled by a particular -/// compiler for the specified target. -pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { - build.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp") -} - -/// Cargo's output path for librustc in a given stage, compiled by a particular -/// compiler for the specified target. -pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { - build.cargo_out(compiler, Mode::Librustc, target).join(".librustc.stamp") -} - -fn codegen_backend_stamp(build: &Build, - compiler: Compiler, - target: Interned, - backend: Interned) -> PathBuf { - build.cargo_out(compiler, Mode::Librustc, target) - .join(format!(".librustc_trans-{}.stamp", backend)) -} - -fn compiler_file(build: &Build, - compiler: &Path, - target: Interned, - file: &str) -> PathBuf { +fn compiler_file( + builder: &Builder, + compiler: &Path, + target: Interned, + file: &str, +) -> PathBuf { let mut cmd = Command::new(compiler); - cmd.args(build.cflags(target)); + cmd.args(builder.cflags(target)); cmd.arg(format!("-print-file-name={}", file)); let out = output(&mut cmd); PathBuf::from(out.trim()) @@ -801,20 +612,29 @@ impl Step for Sysroot { /// thinks it is by default, but it's the same as the default for stages /// 1-3. fn run(self, builder: &Builder) -> Interned { - let build = builder.build; let compiler = self.compiler; let sysroot = if compiler.stage == 0 { - build.out.join(&compiler.host).join("stage0-sysroot") + builder + .config + .general + .out + .join(&compiler.host) + .join("stage0-sysroot") } else { - build.out.join(&compiler.host).join(format!("stage{}", compiler.stage)) + builder + .config + .general + .out + .join(&compiler.host) + .join(format!("stage{}", compiler.stage)) }; let _ = fs::remove_dir_all(&sysroot); t!(fs::create_dir_all(&sysroot)); - INTERNER.intern_path(sysroot) + sysroot.intern() } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Assemble { /// The compiler which we will produce in this step. Assemble itself will /// take care of ensuring that the necessary prerequisites to do so exist, @@ -833,15 +653,16 @@ impl Step for Assemble { /// Prepare a new compiler from the artifacts in `stage` /// /// This will assemble a compiler in `build/$host/stage$stage`. The compiler - /// must have been previously produced by the `stage - 1` build.build + /// must have been previously produced by the `stage - 1` builder.build /// compiler. fn run(self, builder: &Builder) -> Compiler { - let build = builder.build; let target_compiler = self.target_compiler; if target_compiler.stage == 0 { - assert_eq!(build.build, target_compiler.host, - "Cannot obtain compiler for non-native build triple at stage 0"); + assert_eq!( + builder.config.general.build, target_compiler.host, + "Cannot obtain compiler for non-native build triple at stage 0" + ); // The stage 0 compiler for the build triple is always pre-built. return target_compiler; } @@ -863,33 +684,50 @@ impl Step for Assemble { // FIXME: It may be faster if we build just a stage 1 compiler and then // use that to bootstrap this compiler forward. let build_compiler = - builder.compiler(target_compiler.stage - 1, build.build); + builder.compiler(target_compiler.stage - 1, builder.config.general.build); // Build the libraries for this compiler to link to (i.e., the libraries // it uses at runtime). NOTE: Crates the target compiler compiles don't // link to these. (FIXME: Is that correct? It seems to be correct most // of the time but I think we do link to these for stage2/bin compilers // when not performing a full bootstrap). - if builder.build.config.keep_stage.map_or(false, |s| target_compiler.stage <= s) { + if builder + .build + .config + .keep_stage + .map_or(false, |s| target_compiler.stage <= s) + { builder.verbose("skipping compilation of compiler due to --keep-stage"); let compiler = build_compiler; for stage in 0..min(target_compiler.stage, builder.config.keep_stage.unwrap()) { let target_compiler = builder.compiler(stage, target_compiler.host); let target = target_compiler.host; - builder.ensure(StdLink { compiler, target_compiler, target }); - builder.ensure(TestLink { compiler, target_compiler, target }); - builder.ensure(RustcLink { compiler, target_compiler, target }); + builder.ensure(StdLink { + compiler, + target_compiler, + target, + }); + builder.ensure(TestLink { + compiler, + target_compiler, + target, + }); + builder.ensure(RustcLink { + compiler, + target_compiler, + target, + }); } } else { builder.ensure(Rustc { compiler: build_compiler, target: target_compiler.host, }); - for &backend in build.config.rust_codegen_backends.iter() { + for backend in builder.config.rust.codegen_backends.iter() { builder.ensure(CodegenBackend { compiler: build_compiler, target: target_compiler.host, - backend, + backend: backend.intern(), }); } } @@ -910,12 +748,10 @@ impl Step for Assemble { } } - copy_codegen_backends_to_sysroot(builder, - build_compiler, - target_compiler); + copy_codegen_backends_to_sysroot(builder, build_compiler, target_compiler); // Link the compiler binary itself into place - let out_dir = build.cargo_out(build_compiler, Mode::Librustc, host); + let out_dir = builder.cargo_out(build_compiler, Mode::Librustc, host); let rustc = out_dir.join(exe("rustc", &*host)); let bindir = sysroot.join("bin"); t!(fs::create_dir_all(&bindir)); @@ -937,197 +773,3 @@ pub fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) { copy(&path, &sysroot_dst.join(path.file_name().unwrap())); } } - -// Avoiding a dependency on winapi to keep compile times down -#[cfg(unix)] -fn stderr_isatty() -> bool { - use libc; - unsafe { libc::isatty(libc::STDERR_FILENO) != 0 } -} -#[cfg(windows)] -fn stderr_isatty() -> bool { - type DWORD = u32; - type BOOL = i32; - type HANDLE = *mut u8; - const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD; - extern "system" { - fn GetStdHandle(which: DWORD) -> HANDLE; - fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: *mut DWORD) -> BOOL; - } - unsafe { - let handle = GetStdHandle(STD_ERROR_HANDLE); - let mut out = 0; - GetConsoleMode(handle, &mut out) != 0 - } -} - -pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: bool) - -> Vec -{ - // Instruct Cargo to give us json messages on stdout, critically leaving - // stderr as piped so we can get those pretty colors. - cargo.arg("--message-format").arg("json") - .stdout(Stdio::piped()); - - if stderr_isatty() && build.ci_env == CiEnv::None { - // since we pass message-format=json to cargo, we need to tell the rustc - // wrapper to give us colored output if necessary. This is because we - // only want Cargo's JSON output, not rustcs. - cargo.env("RUSTC_COLOR", "1"); - } - - build.verbose(&format!("running: {:?}", cargo)); - let mut child = match cargo.spawn() { - Ok(child) => child, - Err(e) => panic!("failed to execute command: {:?}\nerror: {}", cargo, e), - }; - - // `target_root_dir` looks like $dir/$target/release - let target_root_dir = stamp.parent().unwrap(); - // `target_deps_dir` looks like $dir/$target/release/deps - let target_deps_dir = target_root_dir.join("deps"); - // `host_root_dir` looks like $dir/release - let host_root_dir = target_root_dir.parent().unwrap() // chop off `release` - .parent().unwrap() // chop off `$target` - .join(target_root_dir.file_name().unwrap()); - - // Spawn Cargo slurping up its JSON output. We'll start building up the - // `deps` array of all files it generated along with a `toplevel` array of - // files we need to probe for later. - let mut deps = Vec::new(); - let mut toplevel = Vec::new(); - let stdout = BufReader::new(child.stdout.take().unwrap()); - for line in stdout.lines() { - let line = t!(line); - let json: serde_json::Value = if line.starts_with("{") { - t!(serde_json::from_str(&line)) - } else { - // If this was informational, just print it out and continue - println!("{}", line); - continue - }; - if json["reason"].as_str() != Some("compiler-artifact") { - continue - } - for filename in json["filenames"].as_array().unwrap() { - let filename = filename.as_str().unwrap(); - // Skip files like executables - if !filename.ends_with(".rlib") && - !filename.ends_with(".lib") && - !is_dylib(&filename) && - !(is_check && filename.ends_with(".rmeta")) { - continue - } - - let filename = Path::new(filename); - - // If this was an output file in the "host dir" we don't actually - // worry about it, it's not relevant for us. - if filename.starts_with(&host_root_dir) { - continue; - } - - // If this was output in the `deps` dir then this is a precise file - // name (hash included) so we start tracking it. - if filename.starts_with(&target_deps_dir) { - deps.push(filename.to_path_buf()); - continue; - } - - // Otherwise this was a "top level artifact" which right now doesn't - // have a hash in the name, but there's a version of this file in - // the `deps` folder which *does* have a hash in the name. That's - // the one we'll want to we'll probe for it later. - // - // We do not use `Path::file_stem` or `Path::extension` here, - // because some generated files may have multiple extensions e.g. - // `std-.dll.lib` on Windows. The aforementioned methods only - // split the file name by the last extension (`.lib`) while we need - // to split by all extensions (`.dll.lib`). - let expected_len = t!(filename.metadata()).len(); - let filename = filename.file_name().unwrap().to_str().unwrap(); - let mut parts = filename.splitn(2, '.'); - let file_stem = parts.next().unwrap().to_owned(); - let extension = parts.next().unwrap().to_owned(); - - toplevel.push((file_stem, extension, expected_len)); - } - } - - // Make sure Cargo actually succeeded after we read all of its stdout. - let status = t!(child.wait()); - if !status.success() { - panic!("command did not execute successfully: {:?}\n\ - expected success, got: {}", - cargo, - status); - } - - // Ok now we need to actually find all the files listed in `toplevel`. We've - // got a list of prefix/extensions and we basically just need to find the - // most recent file in the `deps` folder corresponding to each one. - let contents = t!(target_deps_dir.read_dir()) - .map(|e| t!(e)) - .map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata()))) - .collect::>(); - for (prefix, extension, expected_len) in toplevel { - let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| { - filename.starts_with(&prefix[..]) && - filename[prefix.len()..].starts_with("-") && - filename.ends_with(&extension[..]) && - meta.len() == expected_len - }); - let max = candidates.max_by_key(|&&(_, _, ref metadata)| { - FileTime::from_last_modification_time(metadata) - }); - let path_to_add = match max { - Some(triple) => triple.0.to_str().unwrap(), - None => panic!("no output generated for {:?} {:?}", prefix, extension), - }; - if is_dylib(path_to_add) { - let candidate = format!("{}.lib", path_to_add); - let candidate = PathBuf::from(candidate); - if candidate.exists() { - deps.push(candidate); - } - } - deps.push(path_to_add.into()); - } - - // Now we want to update the contents of the stamp file, if necessary. First - // we read off the previous contents along with its mtime. If our new - // contents (the list of files to copy) is different or if any dep's mtime - // is newer then we rewrite the stamp file. - deps.sort(); - let mut stamp_contents = Vec::new(); - if let Ok(mut f) = File::open(stamp) { - t!(f.read_to_end(&mut stamp_contents)); - } - let stamp_mtime = mtime(&stamp); - let mut new_contents = Vec::new(); - let mut max = None; - let mut max_path = None; - for dep in deps.iter() { - let mtime = mtime(dep); - if Some(mtime) > max { - max = Some(mtime); - max_path = Some(dep.clone()); - } - new_contents.extend(dep.to_str().unwrap().as_bytes()); - new_contents.extend(b"\0"); - } - let max = max.unwrap(); - let max_path = max_path.unwrap(); - if stamp_contents == new_contents && max <= stamp_mtime { - build.verbose(&format!("not updating {:?}; contents equal and {} <= {}", - stamp, max, stamp_mtime)); - return deps - } - if max > stamp_mtime { - build.verbose(&format!("updating {:?} as {:?} changed", stamp, max_path)); - } else { - build.verbose(&format!("updating {:?} as deps changed", stamp)); - } - t!(t!(File::create(stamp)).write_all(&new_contents)); - deps -} diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 6bc20181a0330..f3b388c3d8405 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -15,129 +15,52 @@ use std::collections::{HashMap, HashSet}; use std::env; -use std::fs::File; -use std::io::prelude::*; use std::path::{Path, PathBuf}; -use std::process; +use std::process::{self, Command}; use std::cmp; +use std::iter; use num_cpus; +use channel; use toml; use util::exe; -use cache::{INTERNER, Interned}; +use cache::{Intern, Interned}; use flags::Flags; +use build_helper::output; pub use flags::Subcommand; +use serde; +use fs; /// Global configuration for the entire build and/or bootstrap. /// -/// This structure is derived from a combination of both `config.toml` and -/// `config.mk`. As of the time of this writing it's unlikely that `config.toml` -/// is used all that much, so this is primarily filled out by `config.mk` which -/// is generated from `./configure`. +/// This structure is derived from `config.toml`. /// /// Note that this structure is not decoded directly into, but rather it is /// filled out from the decoded forms of the structs below. For documentation /// each field, see the corresponding fields in /// `config.toml.example`. -#[derive(Default)] pub struct Config { - pub ccache: Option, - pub ninja: bool, - pub verbose: usize, - pub submodules: bool, - pub compiler_docs: bool, - pub docs: bool, - pub locked_deps: bool, - pub vendor: bool, - pub target_config: HashMap, Target>, - pub full_bootstrap: bool, - pub extended: bool, - pub tools: Option>, - pub sanitizers: bool, - pub profiler: bool, - pub ignore_git: bool, - pub exclude: Vec, - pub rustc_error_format: Option, - pub run_host_only: bool, + pub is_sudo: bool, + pub rustc_error_format: Option, + pub exclude: Vec, pub on_fail: Option, - pub stage: Option, + pub stage: u32, pub keep_stage: Option, pub src: PathBuf, pub jobs: Option, pub cmd: Subcommand, + pub paths: Vec, pub incremental: bool, - // llvm codegen options - pub llvm_enabled: bool, - pub llvm_assertions: bool, - pub llvm_optimize: bool, - pub llvm_release_debuginfo: bool, - pub llvm_version_check: bool, - pub llvm_static_stdcpp: bool, - pub llvm_link_shared: bool, - pub llvm_targets: Option, - pub llvm_experimental_targets: String, - pub llvm_link_jobs: Option, - - // rust codegen options - pub rust_optimize: bool, - pub rust_codegen_units: Option, - pub rust_thinlto: bool, - pub rust_debug_assertions: bool, - pub rust_debuginfo: bool, - pub rust_debuginfo_lines: bool, - pub rust_debuginfo_only_std: bool, - pub rust_rpath: bool, - pub rustc_parallel_queries: bool, - pub rustc_default_linker: Option, - pub rust_optimize_tests: bool, - pub rust_debuginfo_tests: bool, - pub rust_dist_src: bool, - pub rust_codegen_backends: Vec>, - - pub build: Interned, - pub hosts: Vec>, - pub targets: Vec>, - pub local_rebuild: bool, - - // dist misc - pub dist_sign_folder: Option, - pub dist_upload_addr: Option, - pub dist_gpg_password_file: Option, - - // libstd features - pub debug_jemalloc: bool, - pub use_jemalloc: bool, - pub backtrace: bool, // support for RUST_BACKTRACE - pub wasm_syscall: bool, - - // misc - pub low_priority: bool, - pub channel: String, - pub quiet_tests: bool, - pub test_miri: bool, - pub save_toolstates: Option, - - // Fallback musl-root for all targets - pub musl_root: Option, - pub prefix: Option, - pub sysconfdir: Option, - pub docdir: Option, - pub bindir: Option, - pub libdir: Option, - pub mandir: Option, - pub codegen_tests: bool, - pub nodejs: Option, - pub gdb: Option, - pub python: Option, - pub openssl_static: bool, - pub configure_args: Vec, + pub llvm: Llvm, + pub rust: Rust, + pub dist: Dist, - // These are either the stage0 downloaded binaries or the locally installed ones. - pub initial_cargo: PathBuf, - pub initial_rustc: PathBuf, + pub target_config: HashMap, Target>, + pub install: Install, + pub general: Build, } /// Per-target configuration stored in the global configuration structure. @@ -161,90 +84,197 @@ pub struct Target { /// This structure uses `Decodable` to automatically decode a TOML configuration /// file into this format, and then this is traversed and written into the above /// `Config` structure. -#[derive(Deserialize, Default)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] +#[derive(Default, Deserialize)] +#[serde(default, deny_unknown_fields, rename_all = "kebab-case")] struct TomlConfig { - build: Option, - install: Option, - llvm: Option, - rust: Option, - target: Option>, - dist: Option, + build: Build, + install: Install, + llvm: Llvm, + rust: Rust, + target: HashMap, + dist: Dist, +} + +fn build_build_deserialize<'de, D>(_d: D) -> Result, D::Error> +where + D: serde::Deserializer<'de> +{ + let build = env::var("BUILD").expect("'BUILD' defined").intern(); + Ok(build) } /// TOML representation of various global build decisions. -#[derive(Deserialize, Default, Clone)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] -struct Build { - build: Option, - #[serde(default)] - host: Vec, - #[serde(default)] - target: Vec, - cargo: Option, - rustc: Option, - low_priority: Option, - compiler_docs: Option, - docs: Option, - submodules: Option, - gdb: Option, - locked_deps: Option, - vendor: Option, - nodejs: Option, - python: Option, - full_bootstrap: Option, - extended: Option, - tools: Option>, - verbose: Option, - sanitizers: Option, - profiler: Option, - openssl_static: Option, - configure_args: Option>, - local_rebuild: Option, +#[derive(Deserialize, Clone)] +#[serde(default, deny_unknown_fields, rename_all = "kebab-case")] +pub struct Build { + // We get build from the BUILD env-var, provided by bootstrap.py + #[serde(deserialize_with = "build_build_deserialize")] + pub build: Interned, + pub host: Vec>, + pub target: Vec>, + + #[serde(rename = "cargo")] + pub initial_cargo: PathBuf, + #[serde(rename = "rustc")] + pub initial_rustc: PathBuf, + + pub low_priority: bool, + pub compiler_docs: bool, + pub docs: bool, + pub submodules: bool, + pub gdb: Option, + pub locked_deps: bool, + pub vendor: bool, + pub nodejs: Option, + pub python: Option, + pub full_bootstrap: bool, + pub extended: bool, + pub tools: Option>, + pub verbose: usize, + pub sanitizers: bool, + pub profiler: bool, + pub openssl_static: bool, + pub configure_args: Vec, + pub local_rebuild: bool, + #[serde(skip)] + pub out: PathBuf, +} + +impl Default for Build { + fn default() -> Build { + let out = env::var_os("BUILD_DIR").map(PathBuf::from).expect("'BUILD_DIR' defined"); + let build = env::var("BUILD").expect("'BUILD' defined").intern(); + let stage0_root = out.join(&build).join("stage0/bin"); + Build { + host: vec![build], + target: vec![build], + initial_cargo: stage0_root.join(exe("cargo", &build)), + initial_rustc: stage0_root.join(exe("rustc", &build)), + build: build, + low_priority: false, + compiler_docs: false, + docs: true, + submodules: true, + gdb: None, + locked_deps: false, + vendor: false, + nodejs: None, + python: None, + full_bootstrap: false, + extended: false, + tools: None, + verbose: 0, + sanitizers: false, + profiler: false, + openssl_static: false, + configure_args: Vec::new(), + local_rebuild: false, + out: out, + } + } } /// TOML representation of various global install decisions. -#[derive(Deserialize, Default, Clone)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] -struct Install { - prefix: Option, - sysconfdir: Option, - docdir: Option, - bindir: Option, - libdir: Option, - mandir: Option, +#[derive(Deserialize, Clone)] +#[serde(default, deny_unknown_fields, rename_all = "kebab-case")] +pub struct Install { + pub prefix: PathBuf, + pub sysconfdir: PathBuf, + pub docdir: PathBuf, + pub bindir: PathBuf, + pub libdir: PathBuf, + pub mandir: PathBuf, // standard paths, currently unused - datadir: Option, - infodir: Option, - localstatedir: Option, + datadir: Option, + infodir: Option, + localstatedir: Option, +} + +impl Default for Install { + fn default() -> Install { + Install { + prefix: PathBuf::from("/usr/local"), + sysconfdir: PathBuf::from("/etc"), + docdir: PathBuf::from("share/doc/rust"), + bindir: PathBuf::from("bin"), + libdir: PathBuf::from("lib"), + mandir: PathBuf::from("share/man"), + + datadir: None, + infodir: None, + localstatedir: None, + } + } } /// TOML representation of how the LLVM build is configured. -#[derive(Deserialize, Default)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] -struct Llvm { - enabled: Option, - ccache: Option, - ninja: Option, - assertions: Option, - optimize: Option, - release_debuginfo: Option, - version_check: Option, - static_libstdcpp: Option, - targets: Option, - experimental_targets: Option, - link_jobs: Option, - link_shared: Option, +#[derive(Deserialize)] +#[serde(default, deny_unknown_fields, rename_all = "kebab-case")] +pub struct Llvm { + pub enabled: bool, + ccache: StringOrBool, + pub ninja: bool, + pub assertions: bool, + pub optimize: bool, + pub release_debuginfo: bool, + pub version_check: bool, + pub static_libstdcpp: bool, + pub targets: String, + pub experimental_targets: String, + pub link_jobs: u32, + pub link_shared: bool, } -#[derive(Deserialize, Default, Clone)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] -struct Dist { - sign_folder: Option, - gpg_password_file: Option, - upload_addr: Option, - src_tarball: Option, +impl Llvm { + pub fn ccache(&self) -> Option { + match self.ccache { + StringOrBool::String(ref s) => Some(s.to_string()), + StringOrBool::Bool(true) => Some("ccache".to_string()), + StringOrBool::Bool(false) => None, + } + } +} + +impl Default for Llvm { + fn default() -> Llvm { + Llvm { + enabled: true, + ccache: StringOrBool::Bool(false), + ninja: false, + assertions: false, + optimize: true, + release_debuginfo: false, + version_check: true, + targets: String::from( + "X86;ARM;AArch64;Mips;PowerPC;SystemZ;MSP430;Sparc;NVPTX;Hexagon", + ), + experimental_targets: String::from("WebAssembly"), + link_jobs: 0, + static_libstdcpp: false, + link_shared: false, + } + } +} + +#[derive(Deserialize, Clone)] +#[serde(default, deny_unknown_fields, rename_all = "kebab-case")] +pub struct Dist { + pub sign_folder: Option, + pub gpg_password_file: Option, + pub upload_addr: Option, + pub src_tarball: bool, +} + +impl Default for Dist { + fn default() -> Dist { + Dist { + sign_folder: None, + gpg_password_file: None, + upload_addr: None, + src_tarball: true, + } + } } #[derive(Deserialize)] @@ -261,333 +291,303 @@ impl Default for StringOrBool { } /// TOML representation of how the Rust build is configured. -#[derive(Deserialize, Default)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] -struct Rust { +#[derive(Clone, Deserialize)] +#[serde(default, deny_unknown_fields, rename_all = "kebab-case")] +pub struct Rust { + pub thinlto: bool, + pub rpath: bool, + pub experimental_parallel_queries: bool, + pub use_jemalloc: bool, + pub backtrace: bool, // RUST_BACKTRACE support + pub default_linker: Option, + pub channel: String, + // Fallback musl-root for all targets + pub musl_root: Option, + pub optimize_tests: bool, + pub debuginfo_tests: bool, + pub codegen_tests: bool, + pub quiet_tests: bool, + pub test_miri: bool, + pub save_toolstates: Option, + pub codegen_backends: Vec, + pub wasm_syscall: bool, optimize: Option, codegen_units: Option, - thinlto: Option, debug_assertions: Option, debuginfo: Option, debuginfo_lines: Option, debuginfo_only_std: Option, - experimental_parallel_queries: Option, debug_jemalloc: Option, - use_jemalloc: Option, - backtrace: Option, - default_linker: Option, - channel: Option, - musl_root: Option, - rpath: Option, - optimize_tests: Option, - debuginfo_tests: Option, - codegen_tests: Option, ignore_git: Option, - debug: Option, - dist_src: Option, - quiet_tests: Option, - test_miri: Option, - save_toolstates: Option, - codegen_backends: Option>, - wasm_syscall: Option, + debug: bool, +} + +impl Rust { + pub fn debug_jemalloc(&self) -> bool { + self.debug_jemalloc.unwrap_or(self.debug) + } + + pub fn debuginfo(&self) -> bool { + self.debuginfo.unwrap_or(self.debug) + } + + fn is_dist_channel(&self) -> bool { + match &self.channel[..] { + "stable" | "beta" | "nightly" => true, + _ => false, + } + } + + pub fn debuginfo_lines(&self) -> bool { + self.debuginfo_lines.unwrap_or(self.is_dist_channel()) + } + + pub fn debuginfo_only_std(&self) -> bool { + self.debuginfo_only_std.unwrap_or(self.is_dist_channel()) + } + + pub fn ignore_git(&self) -> bool { + self.ignore_git.unwrap_or(!self.is_dist_channel()) + } + + pub fn debug_assertions(&self) -> bool { + self.debug_assertions.unwrap_or(self.debug) + } + + pub fn optimize(&self) -> bool { + self.optimize.unwrap_or(!self.debug) + } + + pub fn codegen_units(&self) -> Option { + match self.codegen_units { + Some(0) => Some(num_cpus::get() as u32), + Some(n) => Some(n), + None => None, + } + } +} + +impl Default for Rust { + fn default() -> Rust { + Rust { + debug: false, + debug_assertions: None, + debuginfo: None, + debuginfo_lines: None, + debuginfo_only_std: None, + optimize: None, + ignore_git: None, + debug_jemalloc: None, + thinlto: true, + optimize_tests: true, + debuginfo_tests: false, + codegen_tests: true, + rpath: true, + use_jemalloc: true, + backtrace: true, + channel: String::from("dev"), + quiet_tests: false, + test_miri: false, + wasm_syscall: false, + codegen_backends: vec![String::from("llvm")], + codegen_units: None, + default_linker: None, + experimental_parallel_queries: false, + musl_root: None, + save_toolstates: None, + } + } } /// TOML representation of how each build target is configured. #[derive(Deserialize, Default)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] struct TomlTarget { - llvm_config: Option, - jemalloc: Option, - cc: Option, - cxx: Option, - ar: Option, - linker: Option, - android_ndk: Option, + llvm_config: Option, + jemalloc: Option, + cc: Option, + cxx: Option, + ar: Option, + linker: Option, + android_ndk: Option, crt_static: Option, - musl_root: Option, - qemu_rootfs: Option, + musl_root: Option, + qemu_rootfs: Option, } impl Config { + #[cfg(test)] + pub fn for_test() -> Config { + Config { + exclude: Vec::new(), + paths: Vec::new(), + on_fail: None, + stage: 2, + src: env::var_os("SRC").map(PathBuf::from).expect("'SRC' to be defined"), + jobs: None, + cmd: Subcommand::default(), + incremental: false, + keep_stage: None, + rustc_error_format: None, + + run_host_only: true, + is_sudo: false, + + general: Build::default(), + install: Install::default(), + llvm: Llvm::default(), + rust: Rust::default(), + target_config: Default::default(), + dist: Dist::default(), + } + } + pub fn parse(args: &[String]) -> Config { let flags = Flags::parse(&args); - let file = flags.config.clone(); - let mut config = Config::default(); - config.exclude = flags.exclude; - config.llvm_enabled = true; - config.llvm_optimize = true; - config.llvm_version_check = true; - config.use_jemalloc = true; - config.backtrace = true; - config.rust_optimize = true; - config.rust_optimize_tests = true; - config.submodules = true; - config.docs = true; - config.rust_rpath = true; - config.channel = "dev".to_string(); - config.codegen_tests = true; - config.ignore_git = false; - config.rust_dist_src = true; - config.test_miri = false; - config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")]; - - config.rustc_error_format = flags.rustc_error_format; - config.on_fail = flags.on_fail; - config.stage = flags.stage; - config.src = flags.src; - config.jobs = flags.jobs; - config.cmd = flags.cmd; - config.incremental = flags.incremental; - config.keep_stage = flags.keep_stage; - - // If --target was specified but --host wasn't specified, don't run any host-only tests. - config.run_host_only = flags.host.is_empty() && !flags.target.is_empty(); - - let toml = file.map(|file| { - let mut f = t!(File::open(&file)); - let mut contents = String::new(); - t!(f.read_to_string(&mut contents)); - match toml::from_str(&contents) { - Ok(table) => table, - Err(err) => { - println!("failed to parse TOML configuration '{}': {}", - file.display(), err); - process::exit(2); + let mut toml: TomlConfig = flags + .config + .as_ref() + .map(|file| { + let contents = t!(fs::read_string(&file)); + match toml::from_str(&contents) { + Ok(table) => table, + Err(err) => { + println!( + "failed to parse TOML configuration '{}': {}", + file.display(), + err + ); + process::exit(2); + } } - } - }).unwrap_or_else(|| TomlConfig::default()); - - let build = toml.build.clone().unwrap_or(Build::default()); - set(&mut config.build, build.build.clone().map(|x| INTERNER.intern_string(x))); - set(&mut config.build, flags.build); - if config.build.is_empty() { - // set by bootstrap.py - config.build = INTERNER.intern_str(&env::var("BUILD").unwrap()); - } - config.hosts.push(config.build.clone()); - for host in build.host.iter() { - let host = INTERNER.intern_str(host); - if !config.hosts.contains(&host) { - config.hosts.push(host); - } - } - for target in config.hosts.iter().cloned() - .chain(build.target.iter().map(|s| INTERNER.intern_str(s))) - { - if !config.targets.contains(&target) { - config.targets.push(target); - } - } - config.hosts = if !flags.host.is_empty() { - flags.host + }) + .unwrap_or_default(); + + let mut hosts = if !flags.host.is_empty() { + flags.host.clone() } else { - config.hosts + toml.build + .host + .into_iter() + .chain(iter::once(toml.build.build)) + .collect::>>() }; - config.targets = if !flags.target.is_empty() { - flags.target + hosts.sort(); + hosts.dedup(); + + let mut targets = if !flags.target.is_empty() { + flags.target.clone() } else { - config.targets + toml.build + .target + .into_iter() + .chain(hosts.clone()) + .collect::>>() }; - - - config.nodejs = build.nodejs.map(PathBuf::from); - config.gdb = build.gdb.map(PathBuf::from); - config.python = build.python.map(PathBuf::from); - set(&mut config.low_priority, build.low_priority); - set(&mut config.compiler_docs, build.compiler_docs); - set(&mut config.docs, build.docs); - set(&mut config.submodules, build.submodules); - set(&mut config.locked_deps, build.locked_deps); - set(&mut config.vendor, build.vendor); - set(&mut config.full_bootstrap, build.full_bootstrap); - set(&mut config.extended, build.extended); - config.tools = build.tools; - set(&mut config.verbose, build.verbose); - set(&mut config.sanitizers, build.sanitizers); - set(&mut config.profiler, build.profiler); - set(&mut config.openssl_static, build.openssl_static); - set(&mut config.configure_args, build.configure_args); - set(&mut config.local_rebuild, build.local_rebuild); - config.verbose = cmp::max(config.verbose, flags.verbose); - - if let Some(ref install) = toml.install { - config.prefix = install.prefix.clone().map(PathBuf::from); - config.sysconfdir = install.sysconfdir.clone().map(PathBuf::from); - config.docdir = install.docdir.clone().map(PathBuf::from); - config.bindir = install.bindir.clone().map(PathBuf::from); - config.libdir = install.libdir.clone().map(PathBuf::from); - config.mandir = install.mandir.clone().map(PathBuf::from); - } - - // Store off these values as options because if they're not provided - // we'll infer default values for them later - let mut thinlto = None; - let mut llvm_assertions = None; - let mut debuginfo_lines = None; - let mut debuginfo_only_std = None; - let mut debug = None; - let mut debug_jemalloc = None; - let mut debuginfo = None; - let mut debug_assertions = None; - let mut optimize = None; - let mut ignore_git = None; - - if let Some(ref llvm) = toml.llvm { - match llvm.ccache { - Some(StringOrBool::String(ref s)) => { - config.ccache = Some(s.to_string()) - } - Some(StringOrBool::Bool(true)) => { - config.ccache = Some("ccache".to_string()); - } - Some(StringOrBool::Bool(false)) | None => {} - } - set(&mut config.ninja, llvm.ninja); - set(&mut config.llvm_enabled, llvm.enabled); - llvm_assertions = llvm.assertions; - set(&mut config.llvm_optimize, llvm.optimize); - set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo); - set(&mut config.llvm_version_check, llvm.version_check); - set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); - set(&mut config.llvm_link_shared, llvm.link_shared); - config.llvm_targets = llvm.targets.clone(); - config.llvm_experimental_targets = llvm.experimental_targets.clone() - .unwrap_or("WebAssembly".to_string()); - config.llvm_link_jobs = llvm.link_jobs; + targets.sort(); + targets.dedup(); + + toml.build.host = hosts; + toml.build.target = targets; + toml.build.verbose = cmp::max(toml.build.verbose, flags.verbose); + + let mut target_config = HashMap::new(); + for (triple, cfg) in toml.target { + let cwd = t!(env::current_dir()); + target_config.insert( + triple.intern(), + Target { + llvm_config: cfg.llvm_config.map(|p| cwd.join(p)), + jemalloc: cfg.jemalloc.map(|p| cwd.join(p)), + ndk: cfg.android_ndk.map(|p| cwd.join(p)), + cc: cfg.cc, + cxx: cfg.cxx, + ar: cfg.ar, + linker: cfg.linker, + crt_static: cfg.crt_static, + musl_root: cfg.musl_root, + qemu_rootfs: cfg.qemu_rootfs, + }, + ); } - if let Some(ref rust) = toml.rust { - debug = rust.debug; - debug_assertions = rust.debug_assertions; - debuginfo = rust.debuginfo; - debuginfo_lines = rust.debuginfo_lines; - debuginfo_only_std = rust.debuginfo_only_std; - optimize = rust.optimize; - ignore_git = rust.ignore_git; - debug_jemalloc = rust.debug_jemalloc; - thinlto = rust.thinlto; - set(&mut config.rust_optimize_tests, rust.optimize_tests); - set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests); - set(&mut config.codegen_tests, rust.codegen_tests); - set(&mut config.rust_rpath, rust.rpath); - set(&mut config.use_jemalloc, rust.use_jemalloc); - set(&mut config.backtrace, rust.backtrace); - set(&mut config.channel, rust.channel.clone()); - set(&mut config.rust_dist_src, rust.dist_src); - set(&mut config.quiet_tests, rust.quiet_tests); - set(&mut config.test_miri, rust.test_miri); - set(&mut config.wasm_syscall, rust.wasm_syscall); - config.rustc_parallel_queries = rust.experimental_parallel_queries.unwrap_or(false); - config.rustc_default_linker = rust.default_linker.clone(); - config.musl_root = rust.musl_root.clone().map(PathBuf::from); - config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from); - - if let Some(ref backends) = rust.codegen_backends { - config.rust_codegen_backends = backends.iter() - .map(|s| INTERNER.intern_str(s)) - .collect(); - } - - match rust.codegen_units { - Some(0) => config.rust_codegen_units = Some(num_cpus::get() as u32), - Some(n) => config.rust_codegen_units = Some(n), - None => {} - } + // If local-rust is the same major.minor as the current version, then force a local-rebuild + let local_version_verbose = output( + Command::new(&toml.build.initial_rustc) + .arg("--version") + .arg("--verbose"), + ); + let local_release = local_version_verbose + .lines() + .filter(|x| x.starts_with("release:")) + .next() + .unwrap() + .trim_left_matches("release:") + .trim(); + let my_version = channel::CFG_RELEASE_NUM; + if local_release + .split('.') + .take(2) + .eq(my_version.split('.').take(2)) + { + eprintln!("auto-detected local rebuild"); + toml.build.local_rebuild = true; } - if let Some(ref t) = toml.target { - for (triple, cfg) in t { - let mut target = Target::default(); - - if let Some(ref s) = cfg.llvm_config { - target.llvm_config = Some(env::current_dir().unwrap().join(s)); - } - if let Some(ref s) = cfg.jemalloc { - target.jemalloc = Some(env::current_dir().unwrap().join(s)); - } - if let Some(ref s) = cfg.android_ndk { - target.ndk = Some(env::current_dir().unwrap().join(s)); - } - target.cc = cfg.cc.clone().map(PathBuf::from); - target.cxx = cfg.cxx.clone().map(PathBuf::from); - target.ar = cfg.ar.clone().map(PathBuf::from); - target.linker = cfg.linker.clone().map(PathBuf::from); - target.crt_static = cfg.crt_static.clone(); - target.musl_root = cfg.musl_root.clone().map(PathBuf::from); - target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from); - - config.target_config.insert(INTERNER.intern_string(triple.clone()), target); - } + // The msvc hosts don't use jemalloc, turn it off globally to + // avoid packaging the dummy liballoc_jemalloc on that platform. + if toml.build.host.iter().any(|host| host.contains("msvc")) { + toml.rust.use_jemalloc = false; } - if let Some(ref t) = toml.dist { - config.dist_sign_folder = t.sign_folder.clone().map(PathBuf::from); - config.dist_gpg_password_file = t.gpg_password_file.clone().map(PathBuf::from); - config.dist_upload_addr = t.upload_addr.clone(); - set(&mut config.rust_dist_src, t.src_tarball); + Config { + exclude: flags.exclude, + paths: flags.paths, + on_fail: flags.on_fail, + rustc_error_format: flags.rustc_error_format, + stage: flags.stage.unwrap_or(2), + src: flags.src, + jobs: flags.jobs, + cmd: flags.cmd, + incremental: flags.incremental, + keep_stage: flags.keep_stage, + + // If --target was specified and --host wasn't specified, + // then run any host-only tests. + run_host_only: !(flags.host.is_empty() && !flags.target.is_empty()), + is_sudo: match env::var_os("SUDO_USER") { + Some(sudo_user) => match env::var_os("USER") { + Some(user) => user != sudo_user, + None => false, + }, + None => false, + }, + + general: toml.build, + install: toml.install, + llvm: toml.llvm, + rust: toml.rust, + target_config: target_config, + dist: toml.dist, } - - let cwd = t!(env::current_dir()); - let out = cwd.join("build"); - - let stage0_root = out.join(&config.build).join("stage0/bin"); - config.initial_rustc = match build.rustc { - Some(s) => PathBuf::from(s), - None => stage0_root.join(exe("rustc", &config.build)), - }; - config.initial_cargo = match build.cargo { - Some(s) => PathBuf::from(s), - None => stage0_root.join(exe("cargo", &config.build)), - }; - - // Now that we've reached the end of our configuration, infer the - // default values for all options that we haven't otherwise stored yet. - - let default = false; - config.llvm_assertions = llvm_assertions.unwrap_or(default); - - let default = match &config.channel[..] { - "stable" | "beta" | "nightly" => true, - _ => false, - }; - config.rust_thinlto = thinlto.unwrap_or(true); - config.rust_debuginfo_lines = debuginfo_lines.unwrap_or(default); - config.rust_debuginfo_only_std = debuginfo_only_std.unwrap_or(default); - - let default = debug == Some(true); - config.debug_jemalloc = debug_jemalloc.unwrap_or(default); - config.rust_debuginfo = debuginfo.unwrap_or(default); - config.rust_debug_assertions = debug_assertions.unwrap_or(default); - config.rust_optimize = optimize.unwrap_or(!default); - - let default = config.channel == "dev"; - config.ignore_git = ignore_git.unwrap_or(default); - - config } /// Try to find the relative path of `libdir`. - pub fn libdir_relative(&self) -> Option<&Path> { - let libdir = self.libdir.as_ref()?; + pub fn libdir_relative(&self) -> &Path { + let libdir = &self.install.libdir; if libdir.is_relative() { - Some(libdir) + libdir } else { // Try to make it relative to the prefix. - libdir.strip_prefix(self.prefix.as_ref()?).ok() + libdir.strip_prefix(&self.install.prefix).unwrap_or(Path::new("lib")) } } pub fn verbose(&self) -> bool { - self.verbose > 0 + self.general.verbose > 0 } pub fn very_verbose(&self) -> bool { - self.verbose > 1 - } -} - -fn set(field: &mut T, val: Option) { - if let Some(v) = val { - *field = v; + self.general.verbose > 1 } } diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py index 99a3ee4e4c369..435261a22737a 100755 --- a/src/bootstrap/configure.py +++ b/src/bootstrap/configure.py @@ -62,7 +62,7 @@ def v(*args): o("locked-deps", "build.locked-deps", "force Cargo.lock to be up to date") o("vendor", "build.vendor", "enable usage of vendored Rust crates") o("sanitizers", "build.sanitizers", "build the sanitizer runtimes (asan, lsan, msan, tsan)") -o("dist-src", "rust.dist-src", "when building tarballs enables building a source tarball") +o("dist-src", "dist.src-tarball", "when building tarballs enables building a source tarball") o("cargo-openssl-static", "build.openssl-static", "static openssl in cargo") o("profiler", "build.profiler", "build the profiler runtime") o("emscripten", None, "compile the emscripten backend as well as LLVM") diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index e7aed7eb4fead..51155df785dd6 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -19,49 +19,49 @@ //! pieces of `rustup.rs`! use std::env; -use std::fs::{self, File}; -use std::io::{self, Read, Write}; -use std::path::{PathBuf, Path}; +use std::io::Write; +use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use build_helper::output; -use {Build, Compiler, Mode}; +use {Compiler, Mode}; use channel; -use util::{cp_r, libdir, is_dylib, cp_filtered, copy, replace_in_file}; +use util::{copy, cp_filtered, cp_r, is_dylib, libdir, replace_in_file}; use builder::{Builder, RunConfig, ShouldRun, Step}; use compile; use native; use tool::{self, Tool}; -use cache::{INTERNER, Interned}; +use cache::{Intern, Interned}; use time; +use fs; -pub fn pkgname(build: &Build, component: &str) -> String { +pub fn pkgname(builder: &Builder, component: &str) -> String { if component == "cargo" { - format!("{}-{}", component, build.cargo_package_vers()) + format!("{}-{}", component, builder.cargo_package_vers()) } else if component == "rls" { - format!("{}-{}", component, build.rls_package_vers()) + format!("{}-{}", component, builder.rls_package_vers()) } else if component == "rustfmt" { - format!("{}-{}", component, build.rustfmt_package_vers()) + format!("{}-{}", component, builder.rustfmt_package_vers()) } else { assert!(component.starts_with("rust")); - format!("{}-{}", component, build.rust_package_vers()) + format!("{}-{}", component, builder.rust_package_vers()) } } -fn distdir(build: &Build) -> PathBuf { - build.out.join("dist") +fn distdir(builder: &Builder) -> PathBuf { + builder.config.general.out.join("dist") } -pub fn tmpdir(build: &Build) -> PathBuf { - build.out.join("tmp/dist") +pub fn tmpdir(builder: &Builder) -> PathBuf { + builder.config.general.out.join("tmp/dist") } fn rust_installer(builder: &Builder) -> Command { builder.tool_cmd(Tool::RustInstaller) } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Docs { pub stage: u32, pub host: Interned, @@ -70,7 +70,6 @@ pub struct Docs { impl Step for Docs { type Output = PathBuf; const DEFAULT: bool = true; - const ONLY_BUILD_TARGETS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/doc") @@ -85,51 +84,55 @@ impl Step for Docs { /// Builds the `rust-docs` installer component. fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; let host = self.host; - let name = pkgname(build, "rust-docs"); + let name = pkgname(builder, "rust-docs"); println!("Dist docs ({})", host); - if !build.config.docs { + if !builder.config.general.docs { println!("\tskipping - docs disabled"); - return distdir(build).join(format!("{}-{}.tar.gz", name, host)); + return distdir(builder).join(format!("{}-{}.tar.gz", name, host)); } builder.default_doc(None); - let image = tmpdir(build).join(format!("{}-{}-image", name, host)); + let image = tmpdir(builder).join(format!("{}-{}-image", name, host)); let _ = fs::remove_dir_all(&image); let dst = image.join("share/doc/rust/html"); t!(fs::create_dir_all(&dst)); - let src = build.out.join(host).join("doc"); + let src = builder.config.general.out.join(host).join("doc"); cp_r(&src, &dst); let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust-Documentation") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-documentation-is-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rust-docs") - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--bulk-dirs=share/doc/rust/html"); - build.run(&mut cmd); + .arg("--product-name=Rust-Documentation") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-documentation-is-installed.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rust-docs") + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--bulk-dirs=share/doc/rust/html"); + builder.run(&mut cmd); t!(fs::remove_dir_all(&image)); // As part of this step, *also* copy the docs directory to a directory which // buildbot typically uploads. - if host == build.build { - let dst = distdir(build).join("doc").join(build.rust_package_vers()); + if host == builder.config.general.build { + let dst = distdir(builder) + .join("doc") + .join(builder.rust_package_vers()); t!(fs::create_dir_all(&dst)); cp_r(&src, &dst); } - distdir(build).join(format!("{}-{}.tar.gz", name, host)) + distdir(builder).join(format!("{}-{}.tar.gz", name, host)) } } @@ -137,10 +140,7 @@ fn find_files(files: &[&str], path: &[PathBuf]) -> Vec { let mut found = Vec::with_capacity(files.len()); for file in files { - let file_path = - path.iter() - .map(|dir| dir.join(file)) - .find(|p| p.exists()); + let file_path = path.iter().map(|dir| dir.join(file)).find(|p| p.exists()); if let Some(file_path) = file_path { found.push(file_path); @@ -153,10 +153,13 @@ fn find_files(files: &[&str], path: &[PathBuf]) -> Vec { } fn make_win_dist( - rust_root: &Path, plat_root: &Path, target_triple: Interned, build: &Build + rust_root: &Path, + plat_root: &Path, + target_triple: Interned, + builder: &Builder, ) { //Ask gcc where it keeps its stuff - let mut cmd = Command::new(build.cc(target_triple)); + let mut cmd = Command::new(builder.cc(target_triple)); cmd.arg("-print-search-dirs"); let gcc_out = output(&mut cmd); @@ -167,11 +170,10 @@ fn make_win_dist( let idx = line.find(':').unwrap(); let key = &line[..idx]; let trim_chars: &[_] = &[' ', '=']; - let value = - line[(idx + 1)..] - .trim_left_matches(trim_chars) - .split(';') - .map(PathBuf::from); + let value = line[(idx + 1)..] + .trim_left_matches(trim_chars) + .split(';') + .map(PathBuf::from); if key == "programs" { bin_path.extend(value); @@ -188,7 +190,8 @@ fn make_win_dist( rustc_dlls.push("libgcc_s_seh-1.dll"); } - let target_libs = [ //MinGW libs + let target_libs = [ + //MinGW libs "libgcc.a", "libgcc_eh.a", "libgcc_s.a", @@ -249,29 +252,36 @@ fn make_win_dist( } //Copy platform tools to platform-specific bin directory - let target_bin_dir = plat_root.join("lib").join("rustlib").join(target_triple).join("bin"); + let target_bin_dir = plat_root + .join("lib") + .join("rustlib") + .join(target_triple) + .join("bin"); fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed"); for src in target_tools { copy_to_folder(&src, &target_bin_dir); } //Copy platform libs to platform-specific lib directory - let target_lib_dir = plat_root.join("lib").join("rustlib").join(target_triple).join("lib"); + let target_lib_dir = plat_root + .join("lib") + .join("rustlib") + .join(target_triple) + .join("lib"); fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed"); for src in target_libs { copy_to_folder(&src, &target_lib_dir); } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Mingw { - host: Interned, + pub host: Interned, } impl Step for Mingw { type Output = Option; const DEFAULT: bool = true; - const ONLY_BUILD_TARGETS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.never() @@ -286,7 +296,6 @@ impl Step for Mingw { /// This contains all the bits and pieces to run the MinGW Windows targets /// without any extra installed software (e.g. we bundle gcc, libraries, etc). fn run(self, builder: &Builder) -> Option { - let build = builder.build; let host = self.host; if !host.contains("pc-windows-gnu") { @@ -294,8 +303,8 @@ impl Step for Mingw { } println!("Dist mingw ({})", host); - let name = pkgname(build, "rust-mingw"); - let image = tmpdir(build).join(format!("{}-{}-image", name, host)); + let name = pkgname(builder, "rust-mingw"); + let image = tmpdir(builder).join(format!("{}-{}-image", name, host)); let _ = fs::remove_dir_all(&image); t!(fs::create_dir_all(&image)); @@ -303,26 +312,29 @@ impl Step for Mingw { // thrown away (this contains the runtime DLLs included in the rustc package // above) and the second argument is where to place all the MinGW components // (which is what we want). - make_win_dist(&tmpdir(build), &image, host, &build); + make_win_dist(&tmpdir(builder), &image, host, &builder); let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust-MinGW") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-MinGW-is-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rust-mingw") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); + .arg("--product-name=Rust-MinGW") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-MinGW-is-installed.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rust-mingw") + .arg("--legacy-manifest-dirs=rustlib,cargo"); + builder.run(&mut cmd); t!(fs::remove_dir_all(&image)); - Some(distdir(build).join(format!("{}-{}.tar.gz", name, host))) + Some(distdir(builder).join(format!("{}-{}.tar.gz", name, host))) } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Rustc { pub compiler: Compiler, } @@ -331,7 +343,6 @@ impl Step for Rustc { type Output = PathBuf; const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; - const ONLY_BUILD_TARGETS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/librustc") @@ -339,21 +350,20 @@ impl Step for Rustc { fn make_run(run: RunConfig) { run.builder.ensure(Rustc { - compiler: run.builder.compiler(run.builder.top_stage, run.target), + compiler: run.builder.compiler(run.builder.top_stage, run.host), }); } /// Creates the `rustc` installer component. fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; let compiler = self.compiler; let host = self.compiler.host; println!("Dist rustc stage{} ({})", compiler.stage, compiler.host); - let name = pkgname(build, "rustc"); - let image = tmpdir(build).join(format!("{}-{}-image", name, host)); + let name = pkgname(builder, "rustc"); + let image = tmpdir(builder).join(format!("{}-{}-image", name, host)); let _ = fs::remove_dir_all(&image); - let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host)); + let overlay = tmpdir(builder).join(format!("{}-{}-overlay", name, host)); let _ = fs::remove_dir_all(&overlay); // Prepare the rustc "image", what will actually end up getting installed @@ -362,17 +372,17 @@ impl Step for Rustc { // Prepare the overlay which is part of the tarball but won't actually be // installed let cp = |file: &str| { - install(&build.src.join(file), &overlay, 0o644); + install(&builder.config.src.join(file), &overlay, 0o644); }; cp("COPYRIGHT"); cp("LICENSE-APACHE"); cp("LICENSE-MIT"); cp("README.md"); // tiny morsel of metadata is used by rust-packaging - let version = build.rust_version(); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); - if let Some(sha) = build.rust_sha() { - t!(t!(File::create(overlay.join("git-commit-hash"))).write_all(sha.as_bytes())); + let version = builder.rust_version(); + t!(fs::write(&overlay.join("version"), &version)); + if let Some(sha) = builder.rust_sha() { + t!(fs::write(&overlay.join("git-commit-hash"), &sha)); } // On MinGW we've got a few runtime DLL dependencies that we need to @@ -386,35 +396,38 @@ impl Step for Rustc { // install will *also* include the rust-mingw package, which also needs // licenses, so to be safe we just include it here in all MinGW packages. if host.contains("pc-windows-gnu") { - make_win_dist(&image, &tmpdir(build), host, build); + make_win_dist(&image, &tmpdir(builder), host, builder); let dst = image.join("share/doc"); t!(fs::create_dir_all(&dst)); - cp_r(&build.src.join("src/etc/third-party"), &dst); + cp_r(&builder.config.src.join("src/etc/third-party"), &dst); } // Finally, wrap everything up in a nice tarball! let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rustc") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-is-ready-to-roll.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rustc") + .arg("--legacy-manifest-dirs=rustlib,cargo"); + builder.run(&mut cmd); t!(fs::remove_dir_all(&image)); t!(fs::remove_dir_all(&overlay)); - return distdir(build).join(format!("{}-{}.tar.gz", name, host)); + return distdir(builder).join(format!("{}-{}.tar.gz", name, host)); fn prepare_image(builder: &Builder, compiler: Compiler, image: &Path) { let host = compiler.host; - let build = builder.build; let src = builder.sysroot(compiler); let libdir = libdir(&host); @@ -426,7 +439,7 @@ impl Step for Rustc { // Copy runtime DLLs needed by the compiler if libdir != "bin" { - for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) { + for entry in t!(fs::read_dir(src.join(libdir))).map(|e| t!(e)) { let name = entry.file_name(); if let Some(s) = name.to_str() { if is_dylib(s) { @@ -445,7 +458,7 @@ impl Step for Rustc { // Man pages t!(fs::create_dir_all(image.join("share/man/man1"))); - let man_src = build.src.join("src/doc/man"); + let man_src = builder.config.src.join("src/doc/man"); let man_dst = image.join("share/man/man1"); let month_year = t!(time::strftime("%B %Y", &time::now())); // don't use our `bootstrap::util::{copy, cp_r}`, because those try @@ -456,20 +469,28 @@ impl Step for Rustc { let page_dst = man_dst.join(file_entry.file_name()); t!(fs::copy(&page_src, &page_dst)); // template in month/year and version number - replace_in_file(&page_dst, - &[("", &month_year), - ("", channel::CFG_RELEASE_NUM)]); + replace_in_file( + &page_dst, + &[ + ("", &month_year), + ("", channel::CFG_RELEASE_NUM), + ], + ); } // Debugger scripts builder.ensure(DebuggerScripts { - sysroot: INTERNER.intern_path(image.to_owned()), + sysroot: image.intern(), host, }); // Misc license info let cp = |file: &str| { - install(&build.src.join(file), &image.join("share/doc/rust"), 0o644); + install( + &builder.config.src.join(file), + &image.join("share/doc/rust"), + 0o644, + ); }; cp("COPYRIGHT"); cp("LICENSE-APACHE"); @@ -479,7 +500,7 @@ impl Step for Rustc { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct DebuggerScripts { pub sysroot: Interned, pub host: Interned, @@ -494,25 +515,28 @@ impl Step for DebuggerScripts { fn make_run(run: RunConfig) { run.builder.ensure(DebuggerScripts { - sysroot: run.builder.sysroot(run.builder.compiler(run.builder.top_stage, run.host)), + sysroot: run.builder + .sysroot(run.builder.compiler(run.builder.top_stage, run.host)), host: run.target, }); } /// Copies debugger scripts for `target` into the `sysroot` specified. fn run(self, builder: &Builder) { - let build = builder.build; let host = self.host; let sysroot = self.sysroot; let dst = sysroot.join("lib/rustlib/etc"); t!(fs::create_dir_all(&dst)); let cp_debugger_script = |file: &str| { - install(&build.src.join("src/etc/").join(file), &dst, 0o644); + install(&builder.config.src.join("src/etc/").join(file), &dst, 0o644); }; if host.contains("windows-msvc") { // windbg debugger scripts - install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"), - 0o755); + install( + &builder.config.src.join("src/etc/rust-windbg.cmd"), + &sysroot.join("bin"), + 0o755, + ); cp_debugger_script("natvis/intrinsic.natvis"); cp_debugger_script("natvis/liballoc.natvis"); @@ -521,22 +545,28 @@ impl Step for DebuggerScripts { cp_debugger_script("debugger_pretty_printers_common.py"); // gdb debugger scripts - install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"), - 0o755); + install( + &builder.config.src.join("src/etc/rust-gdb"), + &sysroot.join("bin"), + 0o755, + ); cp_debugger_script("gdb_load_rust_pretty_printers.py"); cp_debugger_script("gdb_rust_pretty_printing.py"); // lldb debugger scripts - install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"), - 0o755); + install( + &builder.config.src.join("src/etc/rust-lldb"), + &sysroot.join("bin"), + 0o755, + ); cp_debugger_script("lldb_rust_formatters.py"); } } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Std { pub compiler: Compiler, pub target: Interned, @@ -545,7 +575,6 @@ pub struct Std { impl Step for Std { type Output = PathBuf; const DEFAULT: bool = true; - const ONLY_BUILD_TARGETS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/libstd") @@ -553,36 +582,39 @@ impl Step for Std { fn make_run(run: RunConfig) { run.builder.ensure(Std { - compiler: run.builder.compiler(run.builder.top_stage, run.host), + compiler: run.builder + .compiler(run.builder.top_stage, run.builder.config.general.build), target: run.target, }); } fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; let compiler = self.compiler; let target = self.target; - let name = pkgname(build, "rust-std"); - println!("Dist std stage{} ({} -> {})", compiler.stage, &compiler.host, target); + let name = pkgname(builder, "rust-std"); + println!( + "Dist std stage{} ({} -> {})", + compiler.stage, &compiler.host, target + ); // The only true set of target libraries came from the build triple, so // let's reduce redundant work by only producing archives from that host. - if compiler.host != build.build { + if compiler.host != builder.config.general.build { println!("\tskipping, not a build host"); - return distdir(build).join(format!("{}-{}.tar.gz", name, target)); + return distdir(builder).join(format!("{}-{}.tar.gz", name, target)); } // We want to package up as many target libraries as possible // for the `rust-std` package, so if this is a host target we // depend on librustc and otherwise we just depend on libtest. - if build.hosts.iter().any(|t| t == target) { + if builder.config.general.host.iter().any(|t| t == target) { builder.ensure(compile::Rustc { compiler, target }); } else { builder.ensure(compile::Test { compiler, target }); } - let image = tmpdir(build).join(format!("{}-{}-image", name, target)); + let image = tmpdir(builder).join(format!("{}-{}-image", name, target)); let _ = fs::remove_dir_all(&image); let dst = image.join("lib/rustlib").join(target); @@ -595,22 +627,25 @@ impl Step for Std { let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=std-is-standing-at-the-ready.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", name, target)) - .arg(format!("--component-name=rust-std-{}", target)) - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=std-is-standing-at-the-ready.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, target)) + .arg(format!("--component-name=rust-std-{}", target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); + builder.run(&mut cmd); t!(fs::remove_dir_all(&image)); - distdir(build).join(format!("{}-{}.tar.gz", name, target)) + distdir(builder).join(format!("{}-{}.tar.gz", name, target)) } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Analysis { pub compiler: Compiler, pub target: Interned, @@ -619,48 +654,51 @@ pub struct Analysis { impl Step for Analysis { type Output = PathBuf; const DEFAULT: bool = true; - const ONLY_BUILD_TARGETS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("analysis").default_condition(builder.build.config.extended) + run.path("analysis") + .default_condition(builder.config.general.extended) } fn make_run(run: RunConfig) { run.builder.ensure(Analysis { - compiler: run.builder.compiler(run.builder.top_stage, run.host), + compiler: run.builder + .compiler(run.builder.top_stage, run.builder.config.general.build), target: run.target, }); } /// Creates a tarball of save-analysis metadata, if available. fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; let compiler = self.compiler; let target = self.target; - assert!(build.config.extended); + assert!(builder.config.general.extended); println!("Dist analysis"); - let name = pkgname(build, "rust-analysis"); + let name = pkgname(builder, "rust-analysis"); - if &compiler.host != build.build { + if &compiler.host != builder.config.general.build { println!("\tskipping, not a build host"); - return distdir(build).join(format!("{}-{}.tar.gz", name, target)); + return distdir(builder).join(format!("{}-{}.tar.gz", name, target)); } builder.ensure(Std { compiler, target }); // Package save-analysis from stage1 if not doing a full bootstrap, as the // stage2 artifacts is simply copied from stage1 in that case. - let compiler = if build.force_use_stage1(compiler, target) { + let compiler = if builder.force_use_stage1(compiler, target) { builder.compiler(1, compiler.host) } else { compiler.clone() }; - let image = tmpdir(build).join(format!("{}-{}-image", name, target)); + let image = tmpdir(builder).join(format!("{}-{}-image", name, target)); - let src = build.stage_out(compiler, Mode::Libstd) - .join(target).join(build.cargo_dir()).join("deps"); + let src = builder + .stage_out(compiler, Mode::Libstd) + .join(target) + .join(builder.cargo_dir()) + .join("deps"); let image_src = src.join("save-analysis"); let dst = image.join("lib/rustlib").join(target).join("analysis"); @@ -670,38 +708,40 @@ impl Step for Analysis { let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=save-analysis-saved.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", name, target)) - .arg(format!("--component-name=rust-analysis-{}", target)) - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=save-analysis-saved.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, target)) + .arg(format!("--component-name=rust-analysis-{}", target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); + builder.run(&mut cmd); t!(fs::remove_dir_all(&image)); - distdir(build).join(format!("{}-{}.tar.gz", name, target)) + distdir(builder).join(format!("{}-{}.tar.gz", name, target)) } } -fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) { +fn copy_src_dirs(builder: &Builder, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) { fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool { let spath = match path.to_str() { Some(path) => path, None => return false, }; if spath.ends_with("~") || spath.ends_with(".pyc") { - return false + return false; } - if (spath.contains("llvm/test") || spath.contains("llvm\\test")) && - (spath.ends_with(".ll") || - spath.ends_with(".td") || - spath.ends_with(".s")) { - return false + if (spath.contains("llvm/test") || spath.contains("llvm\\test")) + && (spath.ends_with(".ll") || spath.ends_with(".td") || spath.ends_with(".s")) + { + return false; } if spath.contains("test/emscripten") || spath.contains("test\\emscripten") { - return false + return false; } let full_path = Path::new(dir).join(path); @@ -710,25 +750,44 @@ fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_di } let excludes = [ - "CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules", - ".gitattributes", ".cvsignore", ".svn", ".arch-ids", "{arch}", - "=RELEASE-ID", "=meta-update", "=update", ".bzr", ".bzrignore", - ".bzrtags", ".hg", ".hgignore", ".hgrags", "_darcs", + "CVS", + "RCS", + "SCCS", + ".git", + ".gitignore", + ".gitmodules", + ".gitattributes", + ".cvsignore", + ".svn", + ".arch-ids", + "{arch}", + "=RELEASE-ID", + "=meta-update", + "=update", + ".bzr", + ".bzrignore", + ".bzrtags", + ".hg", + ".hgignore", + ".hgrags", + "_darcs", ]; !path.iter() - .map(|s| s.to_str().unwrap()) - .any(|s| excludes.contains(&s)) + .map(|s| s.to_str().unwrap()) + .any(|s| excludes.contains(&s)) } // Copy the directories using our filter for item in src_dirs { let dst = &dst_dir.join(item); t!(fs::create_dir_all(dst)); - cp_filtered(&build.src.join(item), dst, &|path| filter_fn(exclude_dirs, item, path)); + cp_filtered(&builder.config.src.join(item), dst, &|path| { + filter_fn(exclude_dirs, item, path) + }); } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Src; impl Step for Src { @@ -736,8 +795,6 @@ impl Step for Src { type Output = PathBuf; const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; - const ONLY_BUILD_TARGETS: bool = true; - const ONLY_BUILD: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src") @@ -749,20 +806,17 @@ impl Step for Src { /// Creates the `rust-src` installer component fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; println!("Dist src"); - let name = pkgname(build, "rust-src"); - let image = tmpdir(build).join(format!("{}-image", name)); + let name = pkgname(builder, "rust-src"); + let image = tmpdir(builder).join(format!("{}-image", name)); let _ = fs::remove_dir_all(&image); let dst = image.join("lib/rustlib/src"); let dst_src = dst.join("rust"); t!(fs::create_dir_all(&dst_src)); - let src_files = [ - "src/Cargo.lock", - ]; + let src_files = ["src/Cargo.lock"]; // This is the reduced set of paths which will become the rust-src component // (essentially libstd and all of its path dependencies) let std_src_dirs = [ @@ -797,33 +851,41 @@ impl Step for Src { "src/jemalloc/test/unit", ]; - copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src); + copy_src_dirs( + builder, + &std_src_dirs[..], + &std_src_dirs_exclude[..], + &dst_src, + ); for file in src_files.iter() { - copy(&build.src.join(file), &dst_src.join(file)); + copy(&builder.config.src.join(file), &dst_src.join(file)); } // Create source tarball in rust-installer format let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Awesome-Source.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}", name)) - .arg("--component-name=rust-src") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Awesome-Source.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}", name)) + .arg("--component-name=rust-src") + .arg("--legacy-manifest-dirs=rustlib,cargo"); + builder.run(&mut cmd); t!(fs::remove_dir_all(&image)); - distdir(build).join(&format!("{}.tar.gz", name)) + distdir(builder).join(&format!("{}.tar.gz", name)) } } const CARGO_VENDOR_VERSION: &str = "0.1.4"; -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct PlainSourceTarball; impl Step for PlainSourceTarball { @@ -831,12 +893,11 @@ impl Step for PlainSourceTarball { type Output = PathBuf; const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; - const ONLY_BUILD_TARGETS: bool = true; - const ONLY_BUILD: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src").default_condition(builder.config.rust_dist_src) + run.path("src") + .default_condition(builder.config.dist.src_tarball) } fn make_run(run: RunConfig) { @@ -845,12 +906,11 @@ impl Step for PlainSourceTarball { /// Creates the plain source tarball fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; println!("Create plain source tarball"); // Make sure that the root folder of tarball has the correct name - let plain_name = format!("{}-src", pkgname(build, "rustc")); - let plain_dst_src = tmpdir(build).join(&plain_name); + let plain_name = format!("{}-src", pkgname(builder, "rustc")); + let plain_dst_src = tmpdir(builder).join(&plain_name); let _ = fs::remove_dir_all(&plain_dst_src); t!(fs::create_dir_all(&plain_dst_src)); @@ -866,58 +926,56 @@ impl Step for PlainSourceTarball { "x.py", "config.toml.example", ]; - let src_dirs = [ - "src", - ]; + let src_dirs = ["src"]; - copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src); + copy_src_dirs(builder, &src_dirs[..], &[], &plain_dst_src); // Copy the files normally for item in &src_files { - copy(&build.src.join(item), &plain_dst_src.join(item)); + copy(&builder.config.src.join(item), &plain_dst_src.join(item)); } // Create the version file - write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes()); - if let Some(sha) = build.rust_sha() { - write_file(&plain_dst_src.join("git-commit-hash"), sha.as_bytes()); + t!(fs::write(&plain_dst_src.join("version"), &builder.rust_version())); + if let Some(sha) = builder.rust_sha() { + t!(fs::write(&plain_dst_src.join("git-commit-hash"), &sha)); } // If we're building from git sources, we need to vendor a complete distribution. - if build.rust_info.is_git() { + if builder.rust_info.is_git() { // Get cargo-vendor installed, if it isn't already. let mut has_cargo_vendor = false; - let mut cmd = Command::new(&build.initial_cargo); + let mut cmd = Command::new(&builder.config.general.initial_cargo); for line in output(cmd.arg("install").arg("--list")).lines() { has_cargo_vendor |= line.starts_with("cargo-vendor "); } if !has_cargo_vendor { - let mut cmd = Command::new(&build.initial_cargo); + let mut cmd = Command::new(&builder.config.general.initial_cargo); cmd.arg("install") - .arg("--force") - .arg("--debug") - .arg("--vers").arg(CARGO_VENDOR_VERSION) - .arg("cargo-vendor") - .env("RUSTC", &build.initial_rustc); - if let Some(dir) = build.openssl_install_dir(build.config.build) { + .arg("--force") + .arg("--debug") + .arg("--vers") + .arg(CARGO_VENDOR_VERSION) + .arg("cargo-vendor") + .env("RUSTC", &builder.config.general.initial_rustc); + if let Some(dir) = builder.openssl_install_dir(builder.config.general.build) { builder.ensure(native::Openssl { - target: build.config.build, + target: builder.config.general.build, }); cmd.env("OPENSSL_DIR", dir); } - build.run(&mut cmd); + builder.run(&mut cmd); } // Vendor all Cargo dependencies - let mut cmd = Command::new(&build.initial_cargo); - cmd.arg("vendor") - .current_dir(&plain_dst_src.join("src")); - build.run(&mut cmd); + let mut cmd = Command::new(&builder.config.general.initial_cargo); + cmd.arg("vendor").current_dir(&plain_dst_src.join("src")); + builder.run(&mut cmd); } // Create plain source tarball - let plain_name = format!("rustc-{}-src", build.rust_package_vers()); - let mut tarball = distdir(build).join(&format!("{}.tar.gz", plain_name)); + let plain_name = format!("rustc-{}-src", builder.rust_package_vers()); + let mut tarball = distdir(builder).join(&format!("{}.tar.gz", plain_name)); tarball.set_extension(""); // strip .gz tarball.set_extension(""); // strip .tar if let Some(dir) = tarball.parent() { @@ -926,12 +984,14 @@ impl Step for PlainSourceTarball { println!("running installer"); let mut cmd = rust_installer(builder); cmd.arg("tarball") - .arg("--input").arg(&plain_name) - .arg("--output").arg(&tarball) - .arg("--work-dir=.") - .current_dir(tmpdir(build)); - build.run(&mut cmd); - distdir(build).join(&format!("{}.tar.gz", plain_name)) + .arg("--input") + .arg(&plain_name) + .arg("--output") + .arg(&tarball) + .arg("--work-dir=.") + .current_dir(tmpdir(builder)); + builder.run(&mut cmd); + distdir(builder).join(&format!("{}.tar.gz", plain_name)) } } @@ -939,11 +999,7 @@ fn install(src: &Path, dstdir: &Path, perms: u32) { let dst = dstdir.join(src.file_name().unwrap()); t!(fs::create_dir_all(dstdir)); drop(fs::remove_file(&dst)); - { - let mut s = t!(fs::File::open(&src)); - let mut d = t!(fs::File::create(&dst)); - io::copy(&mut s, &mut d).expect("failed to copy"); - } + fs::copy(&src, &dst).expect("failed to copy"); chmod(&dst, perms); } @@ -965,21 +1021,16 @@ pub fn sanitize_sh(path: &Path) -> String { let mut ch = s.chars(); let drive = ch.next().unwrap_or('C'); if ch.next() != Some(':') { - return None + return None; } if ch.next() != Some('/') { - return None + return None; } Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..])) } } -fn write_file(path: &Path, data: &[u8]) { - let mut vf = t!(fs::File::create(path)); - t!(vf.write_all(data)); -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Cargo { pub stage: u32, pub target: Interned, @@ -987,7 +1038,6 @@ pub struct Cargo { impl Step for Cargo { type Output = PathBuf; - const ONLY_BUILD_TARGETS: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { @@ -997,23 +1047,22 @@ impl Step for Cargo { fn make_run(run: RunConfig) { run.builder.ensure(Cargo { stage: run.builder.top_stage, - target: run.target, + target: run.host, }); } fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; let stage = self.stage; let target = self.target; println!("Dist cargo stage{} ({})", stage, target); - let src = build.src.join("src/tools/cargo"); + let src = builder.config.src.join("src/tools/cargo"); let etc = src.join("src/etc"); - let release_num = build.release_num("cargo"); - let name = pkgname(build, "cargo"); - let version = builder.cargo_info.version(build, &release_num); + let release_num = builder.release_num("cargo"); + let name = pkgname(builder, "cargo"); + let version = builder.cargo_info.version(builder, &release_num); - let tmp = tmpdir(build); + let tmp = tmpdir(builder); let image = tmp.join("cargo-image"); drop(fs::remove_dir_all(&image)); t!(fs::create_dir_all(&image)); @@ -1022,17 +1071,23 @@ impl Step for Cargo { t!(fs::create_dir_all(image.join("share/zsh/site-functions"))); t!(fs::create_dir_all(image.join("etc/bash_completion.d"))); let cargo = builder.ensure(tool::Cargo { - compiler: builder.compiler(stage, build.build), - target + compiler: builder.compiler(stage, builder.config.general.build), + target, }); install(&cargo, &image.join("bin"), 0o755); for man in t!(etc.join("man").read_dir()) { let man = t!(man); install(&man.path(), &image.join("share/man/man1"), 0o644); } - install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644); - copy(&etc.join("cargo.bashcomp.sh"), - &image.join("etc/bash_completion.d/cargo")); + install( + &etc.join("_cargo"), + &image.join("share/zsh/site-functions"), + 0o644, + ); + copy( + &etc.join("cargo.bashcomp.sh"), + &image.join("etc/bash_completion.d/cargo"), + ); let doc = image.join("share/doc/cargo"); install(&src.join("README.md"), &doc, 0o644); install(&src.join("LICENSE-MIT"), &doc, 0o644); @@ -1047,27 +1102,31 @@ impl Step for Cargo { install(&src.join("LICENSE-MIT"), &overlay, 0o644); install(&src.join("LICENSE-APACHE"), &overlay, 0o644); install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); + t!(fs::write(&overlay.join("version"), &version)); // Generate the installer tarball let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--component-name=cargo") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); - distdir(build).join(format!("{}-{}.tar.gz", name, target)) + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-is-ready-to-roll.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--component-name=cargo") + .arg("--legacy-manifest-dirs=rustlib,cargo"); + builder.run(&mut cmd); + distdir(builder).join(format!("{}-{}.tar.gz", name, target)) } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Rls { pub stage: u32, pub target: Interned, @@ -1075,7 +1134,6 @@ pub struct Rls { impl Step for Rls { type Output = Option; - const ONLY_BUILD_TARGETS: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { @@ -1085,23 +1143,22 @@ impl Step for Rls { fn make_run(run: RunConfig) { run.builder.ensure(Rls { stage: run.builder.top_stage, - target: run.target, + target: run.host, }); } fn run(self, builder: &Builder) -> Option { - let build = builder.build; let stage = self.stage; let target = self.target; - assert!(build.config.extended); + assert!(builder.config.general.extended); println!("Dist RLS stage{} ({})", stage, target); - let src = build.src.join("src/tools/rls"); - let release_num = build.release_num("rls"); - let name = pkgname(build, "rls"); - let version = build.rls_info.version(build, &release_num); + let src = builder.config.src.join("src/tools/rls"); + let release_num = builder.release_num("rls"); + let name = pkgname(builder, "rls"); + let version = builder.rls_info.version(builder, &release_num); - let tmp = tmpdir(build); + let tmp = tmpdir(builder); let image = tmp.join("rls-image"); drop(fs::remove_dir_all(&image)); t!(fs::create_dir_all(&image)); @@ -1109,10 +1166,15 @@ impl Step for Rls { // Prepare the image directory // We expect RLS to build, because we've exited this step above if tool // state for RLS isn't testing. - let rls = builder.ensure(tool::Rls { - compiler: builder.compiler(stage, build.build), - target - }).or_else(|| { println!("Unable to build RLS, skipping dist"); None })?; + let rls = builder + .ensure(tool::Rls { + compiler: builder.compiler(stage, builder.config.general.build), + target, + }) + .or_else(|| { + println!("Unable to build RLS, skipping dist"); + None + })?; install(&rls, &image.join("bin"), 0o755); let doc = image.join("share/doc/rls"); @@ -1127,29 +1189,32 @@ impl Step for Rls { install(&src.join("README.md"), &overlay, 0o644); install(&src.join("LICENSE-MIT"), &overlay, 0o644); install(&src.join("LICENSE-APACHE"), &overlay, 0o644); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); + t!(fs::write(&overlay.join("version"), &version)); // Generate the installer tarball let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=RLS-ready-to-serve.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--component-name=rls-preview"); - - build.run(&mut cmd); - Some(distdir(build).join(format!("{}-{}.tar.gz", name, target))) + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=RLS-ready-to-serve.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--component-name=rls-preview"); + + builder.run(&mut cmd); + Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target))) } } - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Rustfmt { pub stage: u32, pub target: Interned, @@ -1157,7 +1222,6 @@ pub struct Rustfmt { impl Step for Rustfmt { type Output = Option; - const ONLY_BUILD_TARGETS: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { @@ -1167,36 +1231,45 @@ impl Step for Rustfmt { fn make_run(run: RunConfig) { run.builder.ensure(Rustfmt { stage: run.builder.top_stage, - target: run.target, + target: run.host, }); } fn run(self, builder: &Builder) -> Option { - let build = builder.build; let stage = self.stage; let target = self.target; - assert!(build.config.extended); + assert!(builder.config.general.extended); println!("Dist Rustfmt stage{} ({})", stage, target); - let src = build.src.join("src/tools/rustfmt"); - let release_num = build.release_num("rustfmt"); - let name = pkgname(build, "rustfmt"); - let version = build.rustfmt_info.version(build, &release_num); + let src = builder.config.src.join("src/tools/rustfmt"); + let release_num = builder.release_num("rustfmt"); + let name = pkgname(builder, "rustfmt"); + let version = builder.rustfmt_info.version(builder, &release_num); - let tmp = tmpdir(build); + let tmp = tmpdir(builder); let image = tmp.join("rustfmt-image"); drop(fs::remove_dir_all(&image)); t!(fs::create_dir_all(&image)); // Prepare the image directory - let rustfmt = builder.ensure(tool::Rustfmt { - compiler: builder.compiler(stage, build.build), - target - }).or_else(|| { println!("Unable to build Rustfmt, skipping dist"); None })?; - let cargofmt = builder.ensure(tool::Cargofmt { - compiler: builder.compiler(stage, build.build), - target - }).or_else(|| { println!("Unable to build Cargofmt, skipping dist"); None })?; + let rustfmt = builder + .ensure(tool::Rustfmt { + compiler: builder.compiler(stage, builder.config.general.build), + target, + }) + .or_else(|| { + println!("Unable to build Rustfmt, skipping dist"); + None + })?; + let cargofmt = builder + .ensure(tool::Cargofmt { + compiler: builder.compiler(stage, builder.config.general.build), + target, + }) + .or_else(|| { + println!("Unable to build Cargofmt, skipping dist"); + None + })?; install(&rustfmt, &image.join("bin"), 0o755); install(&cargofmt, &image.join("bin"), 0o755); @@ -1212,28 +1285,32 @@ impl Step for Rustfmt { install(&src.join("README.md"), &overlay, 0o644); install(&src.join("LICENSE-MIT"), &overlay, 0o644); install(&src.join("LICENSE-APACHE"), &overlay, 0o644); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); + t!(fs::write(&overlay.join("version"), &version)); // Generate the installer tarball let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=rustfmt-ready-to-fmt.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--component-name=rustfmt-preview"); - - build.run(&mut cmd); - Some(distdir(build).join(format!("{}-{}.tar.gz", name, target))) + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=rustfmt-ready-to-fmt.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--component-name=rustfmt-preview"); + + builder.run(&mut cmd); + Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target))) } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Extended { stage: u32, host: Interned, @@ -1243,25 +1320,24 @@ pub struct Extended { impl Step for Extended { type Output = (); const DEFAULT: bool = true; - const ONLY_BUILD_TARGETS: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("extended").default_condition(builder.config.extended) + run.path("extended") + .default_condition(builder.config.general.extended) } fn make_run(run: RunConfig) { run.builder.ensure(Extended { stage: run.builder.top_stage, - host: run.host, - target: run.target, + host: run.builder.config.general.build, + target: run.host, }); } /// Creates a combined installer for the specified target in the provided stage. fn run(self, builder: &Builder) { - let build = builder.build; let stage = self.stage; let target = self.target; @@ -1276,28 +1352,31 @@ impl Step for Extended { let mingw_installer = builder.ensure(Mingw { host: target }); let analysis_installer = builder.ensure(Analysis { compiler: builder.compiler(stage, self.host), - target + target, }); - let docs_installer = builder.ensure(Docs { stage, host: target, }); + let docs_installer = builder.ensure(Docs { + stage, + host: target, + }); let std_installer = builder.ensure(Std { compiler: builder.compiler(stage, self.host), target, }); - let tmp = tmpdir(build); + let tmp = tmpdir(builder); let overlay = tmp.join("extended-overlay"); - let etc = build.src.join("src/etc/installer"); + let etc = builder.config.src.join("src/etc/installer"); let work = tmp.join("work"); let _ = fs::remove_dir_all(&overlay); - install(&build.src.join("COPYRIGHT"), &overlay, 0o644); - install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644); - install(&build.src.join("LICENSE-MIT"), &overlay, 0o644); - let version = build.rust_version(); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); - if let Some(sha) = build.rust_sha() { - t!(t!(File::create(overlay.join("git-commit-hash"))).write_all(sha.as_bytes())); + install(&builder.config.src.join("COPYRIGHT"), &overlay, 0o644); + install(&builder.config.src.join("LICENSE-APACHE"), &overlay, 0o644); + install(&builder.config.src.join("LICENSE-MIT"), &overlay, 0o644); + let version = builder.rust_version(); + t!(fs::write(&overlay.join("version"), &version)); + if let Some(sha) = builder.rust_sha() { + t!(fs::write(&overlay.join("git-commit-hash"), &sha)); } install(&etc.join("README.md"), &overlay, 0o644); @@ -1312,7 +1391,7 @@ impl Step for Extended { tarballs.extend(rustfmt_installer.clone()); tarballs.push(analysis_installer); tarballs.push(std_installer); - if build.config.docs { + if builder.config.general.docs { tarballs.push(docs_installer); } if target.contains("pc-windows-gnu") { @@ -1329,20 +1408,27 @@ impl Step for Extended { .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--work-dir").arg(&work) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target)) + .arg("--work-dir") + .arg(&work) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!( + "--package-name={}-{}", + pkgname(builder, "rust"), + target + )) .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--input-tarballs").arg(input_tarballs) - .arg("--non-installed-overlay").arg(&overlay); - build.run(&mut cmd); + .arg("--input-tarballs") + .arg(input_tarballs) + .arg("--non-installed-overlay") + .arg(&overlay); + builder.run(&mut cmd); - let mut license = String::new(); - t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license)); + let mut license = t!(fs::read_string(builder.config.src.join("COPYRIGHT"))); license.push_str("\n"); - t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license)); + license += &t!(fs::read_string(builder.config.src.join("LICENSE-APACHE"))); license.push_str("\n"); - t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license)); + license += &t!(fs::read_string(builder.config.src.join("LICENSE-MIT"))); let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18"; let mut rtf = rtf.to_string(); @@ -1372,8 +1458,7 @@ impl Step for Extended { } let xform = |p: &Path| { - let mut contents = String::new(); - t!(t!(File::open(p)).read_to_string(&mut contents)); + let mut contents = t!(fs::read_string(p)); if rls_installer.is_none() { contents = filter(&contents, "rls"); } @@ -1381,8 +1466,8 @@ impl Step for Extended { contents = filter(&contents, "rustfmt"); } let ret = tmp.join(p.file_name().unwrap()); - t!(t!(File::create(&ret)).write_all(contents.as_bytes())); - return ret + t!(fs::write(&ret, &contents)); + return ret; }; if target.contains("apple-darwin") { @@ -1391,17 +1476,21 @@ impl Step for Extended { let pkgbuild = |component: &str| { let mut cmd = Command::new("pkgbuild"); - cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component)) - .arg("--scripts").arg(pkg.join(component)) + cmd.arg("--identifier") + .arg(format!("org.rust-lang.{}", component)) + .arg("--scripts") + .arg(pkg.join(component)) .arg("--nopayload") .arg(pkg.join(component).with_extension("pkg")); - build.run(&mut cmd); + builder.run(&mut cmd); }; let prepare = |name: &str| { t!(fs::create_dir_all(pkg.join(name))); - cp_r(&work.join(&format!("{}-{}", pkgname(build, name), target)), - &pkg.join(name)); + cp_r( + &work.join(&format!("{}-{}", pkgname(builder, name), target)), + &pkg.join(name), + ); install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755); pkgbuild(name); }; @@ -1420,16 +1509,17 @@ impl Step for Extended { pkgbuild("uninstall"); t!(fs::create_dir_all(pkg.join("res"))); - t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes())); + t!(fs::write(pkg.join("res/LICENSE.txt"), &license)); install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644); let mut cmd = Command::new("productbuild"); - cmd.arg("--distribution").arg(xform(&etc.join("pkg/Distribution.xml"))) - .arg("--resources").arg(pkg.join("res")) - .arg(distdir(build).join(format!("{}-{}.pkg", - pkgname(build, "rust"), - target))) - .arg("--package-path").arg(&pkg); - build.run(&mut cmd); + cmd.arg("--distribution") + .arg(xform(&etc.join("pkg/Distribution.xml"))) + .arg("--resources") + .arg(pkg.join("res")) + .arg(distdir(builder).join(format!("{}-{}.pkg", pkgname(builder, "rust"), target))) + .arg("--package-path") + .arg(&pkg); + builder.run(&mut cmd); } if target.contains("windows") { @@ -1445,9 +1535,11 @@ impl Step for Extended { } else { name.to_string() }; - cp_r(&work.join(&format!("{}-{}", pkgname(build, name), target)) - .join(dir), - &exe.join(name)); + cp_r( + &work.join(&format!("{}-{}", pkgname(builder, name), target)) + .join(dir), + &exe.join(name), + ); t!(fs::remove_file(exe.join(name).join("manifest.in"))); }; prepare("rustc"); @@ -1466,20 +1558,21 @@ impl Step for Extended { install(&etc.join("exe/modpath.iss"), &exe, 0o644); install(&etc.join("exe/upgrade.iss"), &exe, 0o644); install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644); - t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes())); + t!(fs::write(exe.join("LICENSE.txt"), &license)); // Generate exe installer let mut cmd = Command::new("iscc"); - cmd.arg("rust.iss") - .current_dir(&exe); + cmd.arg("rust.iss").current_dir(&exe); if target.contains("windows-gnu") { cmd.arg("/dMINGW"); } - add_env(build, &mut cmd, target); - build.run(&mut cmd); - install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)), - &distdir(build), - 0o755); + add_env(builder, &mut cmd, target); + builder.run(&mut cmd); + install( + &exe.join(format!("{}-{}.exe", pkgname(builder, "rust"), target)), + &distdir(builder), + 0o755, + ); // Generate msi installer let wix = PathBuf::from(env::var_os("WIX").unwrap()); @@ -1488,82 +1581,132 @@ impl Step for Extended { let light = wix.join("bin/light.exe"); let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"]; - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rustc") - .args(&heat_flags) - .arg("-cg").arg("RustcGroup") - .arg("-dr").arg("Rustc") - .arg("-var").arg("var.RustcDir") - .arg("-out").arg(exe.join("RustcGroup.wxs"))); - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-docs") - .args(&heat_flags) - .arg("-cg").arg("DocsGroup") - .arg("-dr").arg("Docs") - .arg("-var").arg("var.DocsDir") - .arg("-out").arg(exe.join("DocsGroup.wxs")) - .arg("-t").arg(etc.join("msi/squash-components.xsl"))); - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("cargo") - .args(&heat_flags) - .arg("-cg").arg("CargoGroup") - .arg("-dr").arg("Cargo") - .arg("-var").arg("var.CargoDir") - .arg("-out").arg(exe.join("CargoGroup.wxs")) - .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-std") - .args(&heat_flags) - .arg("-cg").arg("StdGroup") - .arg("-dr").arg("Std") - .arg("-var").arg("var.StdDir") - .arg("-out").arg(exe.join("StdGroup.wxs"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rustc") + .args(&heat_flags) + .arg("-cg") + .arg("RustcGroup") + .arg("-dr") + .arg("Rustc") + .arg("-var") + .arg("var.RustcDir") + .arg("-out") + .arg(exe.join("RustcGroup.wxs")), + ); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-docs") + .args(&heat_flags) + .arg("-cg") + .arg("DocsGroup") + .arg("-dr") + .arg("Docs") + .arg("-var") + .arg("var.DocsDir") + .arg("-out") + .arg(exe.join("DocsGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/squash-components.xsl")), + ); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("cargo") + .args(&heat_flags) + .arg("-cg") + .arg("CargoGroup") + .arg("-dr") + .arg("Cargo") + .arg("-var") + .arg("var.CargoDir") + .arg("-out") + .arg(exe.join("CargoGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-std") + .args(&heat_flags) + .arg("-cg") + .arg("StdGroup") + .arg("-dr") + .arg("Std") + .arg("-var") + .arg("var.StdDir") + .arg("-out") + .arg(exe.join("StdGroup.wxs")), + ); if rls_installer.is_some() { - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rls") - .args(&heat_flags) - .arg("-cg").arg("RlsGroup") - .arg("-dr").arg("Rls") - .arg("-var").arg("var.RlsDir") - .arg("-out").arg(exe.join("RlsGroup.wxs")) - .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rls") + .args(&heat_flags) + .arg("-cg") + .arg("RlsGroup") + .arg("-dr") + .arg("Rls") + .arg("-var") + .arg("var.RlsDir") + .arg("-out") + .arg(exe.join("RlsGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); } - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-analysis") - .args(&heat_flags) - .arg("-cg").arg("AnalysisGroup") - .arg("-dr").arg("Analysis") - .arg("-var").arg("var.AnalysisDir") - .arg("-out").arg(exe.join("AnalysisGroup.wxs")) - .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-analysis") + .args(&heat_flags) + .arg("-cg") + .arg("AnalysisGroup") + .arg("-dr") + .arg("Analysis") + .arg("-var") + .arg("var.AnalysisDir") + .arg("-out") + .arg(exe.join("AnalysisGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); if target.contains("windows-gnu") { - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-mingw") - .args(&heat_flags) - .arg("-cg").arg("GccGroup") - .arg("-dr").arg("Gcc") - .arg("-var").arg("var.GccDir") - .arg("-out").arg(exe.join("GccGroup.wxs"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-mingw") + .args(&heat_flags) + .arg("-cg") + .arg("GccGroup") + .arg("-dr") + .arg("Gcc") + .arg("-var") + .arg("var.GccDir") + .arg("-out") + .arg(exe.join("GccGroup.wxs")), + ); } let candle = |input: &Path| { let output = exe.join(input.file_stem().unwrap()) - .with_extension("wixobj"); - let arch = if target.contains("x86_64") {"x64"} else {"x86"}; + .with_extension("wixobj"); + let arch = if target.contains("x86_64") { + "x64" + } else { + "x86" + }; let mut cmd = Command::new(&candle); cmd.current_dir(&exe) .arg("-nologo") @@ -1572,10 +1715,12 @@ impl Step for Extended { .arg("-dCargoDir=cargo") .arg("-dStdDir=rust-std") .arg("-dAnalysisDir=rust-analysis") - .arg("-arch").arg(&arch) - .arg("-out").arg(&output) + .arg("-arch") + .arg(&arch) + .arg("-out") + .arg(&output) .arg(&input); - add_env(build, &mut cmd, target); + add_env(builder, &mut cmd, target); if rls_installer.is_some() { cmd.arg("-dRlsDir=rls"); @@ -1583,7 +1728,7 @@ impl Step for Extended { if target.contains("windows-gnu") { cmd.arg("-dGccDir=rust-mingw"); } - build.run(&mut cmd); + builder.run(&mut cmd); }; candle(&xform(&etc.join("msi/rust.wxs"))); candle(&etc.join("msi/ui.wxs")); @@ -1601,16 +1746,19 @@ impl Step for Extended { candle("GccGroup.wxs".as_ref()); } - t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes())); + t!(fs::write(exe.join("LICENSE.rtf"), rtf)); install(&etc.join("gfx/banner.bmp"), &exe, 0o644); install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644); - let filename = format!("{}-{}.msi", pkgname(build, "rust"), target); + let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target); let mut cmd = Command::new(&light); cmd.arg("-nologo") - .arg("-ext").arg("WixUIExtension") - .arg("-ext").arg("WixUtilExtension") - .arg("-out").arg(exe.join(&filename)) + .arg("-ext") + .arg("WixUIExtension") + .arg("-ext") + .arg("WixUtilExtension") + .arg("-out") + .arg(exe.join(&filename)) .arg("rust.wixobj") .arg("ui.wixobj") .arg("rustwelcomedlg.wixobj") @@ -1631,50 +1779,49 @@ impl Step for Extended { // ICE57 wrongly complains about the shortcuts cmd.arg("-sice:ICE57"); - build.run(&mut cmd); + builder.run(&mut cmd); - t!(fs::rename(exe.join(&filename), distdir(build).join(&filename))); + t!(fs::rename( + exe.join(&filename), + distdir(builder).join(&filename) + )); } } } -fn add_env(build: &Build, cmd: &mut Command, target: Interned) { +fn add_env(builder: &Builder, cmd: &mut Command, target: Interned) { let mut parts = channel::CFG_RELEASE_NUM.split('.'); - cmd.env("CFG_RELEASE_INFO", build.rust_version()) + cmd.env("CFG_RELEASE_INFO", builder.rust_version()) .env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM) - .env("CFG_RELEASE", build.rust_release()) + .env("CFG_RELEASE", builder.rust_release()) .env("CFG_VER_MAJOR", parts.next().unwrap()) .env("CFG_VER_MINOR", parts.next().unwrap()) .env("CFG_VER_PATCH", parts.next().unwrap()) .env("CFG_VER_BUILD", "0") // just needed to build - .env("CFG_PACKAGE_VERS", build.rust_package_vers()) - .env("CFG_PACKAGE_NAME", pkgname(build, "rust")) + .env("CFG_PACKAGE_VERS", builder.rust_package_vers()) + .env("CFG_PACKAGE_NAME", pkgname(builder, "rust")) .env("CFG_BUILD", target) - .env("CFG_CHANNEL", &build.config.channel); + .env("CFG_CHANNEL", &builder.config.rust.channel); if target.contains("windows-gnu") { - cmd.env("CFG_MINGW", "1") - .env("CFG_ABI", "GNU"); + cmd.env("CFG_MINGW", "1").env("CFG_ABI", "GNU"); } else { - cmd.env("CFG_MINGW", "0") - .env("CFG_ABI", "MSVC"); + cmd.env("CFG_MINGW", "0").env("CFG_ABI", "MSVC"); } if target.contains("x86_64") { - cmd.env("CFG_PLATFORM", "x64"); + cmd.env("CFG_PLATFORM", "x64"); } else { - cmd.env("CFG_PLATFORM", "x86"); + cmd.env("CFG_PLATFORM", "x86"); } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct HashSign; impl Step for HashSign { type Output = (); - const ONLY_BUILD_TARGETS: bool = true; const ONLY_HOSTS: bool = true; - const ONLY_BUILD: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("hash-and-sign") @@ -1685,32 +1832,35 @@ impl Step for HashSign { } fn run(self, builder: &Builder) { - let build = builder.build; let mut cmd = builder.tool_cmd(Tool::BuildManifest); - let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| { + let sign = builder.config.dist.sign_folder.as_ref().unwrap_or_else(|| { panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n") }); - let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| { + let addr = builder.config.dist.upload_addr.as_ref().unwrap_or_else(|| { panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n") }); - let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| { - panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n") - }); - let mut pass = String::new(); - t!(t!(File::open(&file)).read_to_string(&mut pass)); + let file = builder + .config + .dist + .gpg_password_file + .as_ref() + .unwrap_or_else(|| { + panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n") + }); + let pass = t!(fs::read_string(&file)); let today = output(Command::new("date").arg("+%Y-%m-%d")); cmd.arg(sign); - cmd.arg(distdir(build)); + cmd.arg(distdir(builder)); cmd.arg(today.trim()); - cmd.arg(build.rust_package_vers()); - cmd.arg(build.package_vers(&build.release_num("cargo"))); - cmd.arg(build.package_vers(&build.release_num("rls"))); - cmd.arg(build.package_vers(&build.release_num("rustfmt"))); + cmd.arg(builder.rust_package_vers()); + cmd.arg(builder.package_vers(&builder.release_num("cargo"))); + cmd.arg(builder.package_vers(&builder.release_num("rls"))); + cmd.arg(builder.package_vers(&builder.release_num("rustfmt"))); cmd.arg(addr); - t!(fs::create_dir_all(distdir(build))); + t!(fs::create_dir_all(distdir(builder))); let mut child = t!(cmd.stdin(Stdio::piped()).spawn()); t!(child.stdin.take().unwrap().write_all(pass.as_bytes())); diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index 55d9723527e6d..283eb3c20b09e 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Documentation generation for rustbuild. +//! Documentation generation for rustbuilder. //! //! This module implements generation for all bits and pieces of documentation //! for the Rust project. This notably includes suites like the rust book, the @@ -17,19 +17,18 @@ //! Everything here is basically just a shim around calling either `rustbook` or //! `rustdoc`. -use std::fs::{self, File}; -use std::io::prelude::*; use std::io; -use std::path::{PathBuf, Path}; +use std::path::{Path, PathBuf}; use Mode; use build_helper::up_to_date; +use fs; use util::{cp_r, symlink_dir}; use builder::{Builder, Compiler, RunConfig, ShouldRun, Step}; use tool::Tool; use compile; -use cache::{INTERNER, Interned}; +use cache::{Intern, Interned}; macro_rules! book { ($($name:ident, $path:expr, $book_name:expr;)+) => { @@ -45,7 +44,7 @@ macro_rules! book { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path($path).default_condition(builder.build.config.docs) + run.path($path).default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { @@ -57,7 +56,7 @@ macro_rules! book { fn run(self, builder: &Builder) { builder.ensure(Rustbook { target: self.target, - name: INTERNER.intern_str($book_name), + name: $book_name.intern(), }) } } @@ -92,11 +91,11 @@ impl Step for Rustbook { /// This will not actually generate any documentation if the documentation has /// already been generated. fn run(self, builder: &Builder) { - let src = builder.build.src.join("src/doc"); + let src = builder.config.src.join("src/doc"); builder.ensure(RustbookSrc { target: self.target, name: self.name, - src: INTERNER.intern_path(src), + src: src.intern(), }); } } @@ -112,13 +111,12 @@ impl Step for UnstableBook { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/doc/unstable-book").default_condition(builder.build.config.docs) + run.path("src/doc/unstable-book") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { - run.builder.ensure(UnstableBook { - target: run.target, - }); + run.builder.ensure(UnstableBook { target: run.target }); } fn run(self, builder: &Builder) { @@ -127,8 +125,8 @@ impl Step for UnstableBook { }); builder.ensure(RustbookSrc { target: self.target, - name: INTERNER.intern_str("unstable-book"), - src: builder.build.md_doc_out(self.target), + name: "unstable-book".intern(), + src: builder.md_doc_out(self.target), }) } } @@ -145,24 +143,23 @@ impl Step for CargoBook { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/cargo/src/doc/book").default_condition(builder.build.config.docs) + run.path("src/tools/cargo/src/doc/book") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { run.builder.ensure(CargoBook { target: run.target, - name: INTERNER.intern_str("cargo"), + name: "cargo".intern(), }); } fn run(self, builder: &Builder) { - let build = builder.build; - let target = self.target; let name = self.name; - let src = build.src.join("src/tools/cargo/src/doc"); + let src = builder.config.src.join("src/tools/cargo/src/doc"); - let out = build.doc_out(target); + let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let out = out.join(name); @@ -171,11 +168,14 @@ impl Step for CargoBook { let _ = fs::remove_dir_all(&out); - build.run(builder.tool_cmd(Tool::Rustbook) - .arg("build") - .arg(&src) - .arg("-d") - .arg(out)); + builder.run( + builder + .tool_cmd(Tool::Rustbook) + .arg("build") + .arg(&src) + .arg("-d") + .arg(out), + ); } } @@ -198,11 +198,10 @@ impl Step for RustbookSrc { /// This will not actually generate any documentation if the documentation has /// already been generated. fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; let name = self.name; let src = self.src; - let out = build.doc_out(target); + let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let out = out.join(name); @@ -210,15 +209,18 @@ impl Step for RustbookSrc { let index = out.join("index.html"); let rustbook = builder.tool_exe(Tool::Rustbook); if up_to_date(&src, &index) && up_to_date(&rustbook, &index) { - return + return; } println!("Rustbook ({}) - {}", target, name); let _ = fs::remove_dir_all(&out); - build.run(builder.tool_cmd(Tool::Rustbook) - .arg("build") - .arg(&src) - .arg("-d") - .arg(out)); + builder.run( + builder + .tool_cmd(Tool::Rustbook) + .arg("build") + .arg(&src) + .arg("-d") + .arg(out), + ); } } @@ -235,12 +237,14 @@ impl Step for TheBook { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/doc/book").default_condition(builder.build.config.docs) + run.path("src/doc/book") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { run.builder.ensure(TheBook { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + compiler: run.builder + .compiler(run.builder.top_stage, run.builder.config.general.build), target: run.target, name: "book", }); @@ -256,27 +260,23 @@ impl Step for TheBook { /// * Index page /// * Redirect pages fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target = self.target; let name = self.name; // build book first edition builder.ensure(Rustbook { target, - name: INTERNER.intern_string(format!("{}/first-edition", name)), + name: format!("{}/first-edition", name).intern(), }); // build book second edition builder.ensure(Rustbook { target, - name: INTERNER.intern_string(format!("{}/second-edition", name)), + name: format!("{}/second-edition", name).intern(), }); // build the version info page and CSS - builder.ensure(Standalone { - compiler, - target, - }); + builder.ensure(Standalone { compiler, target }); // build the index page let index = format!("{}/index.md", name); @@ -285,7 +285,9 @@ impl Step for TheBook { // build the redirect pages println!("Documenting book redirect pages ({})", target); - for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) { + for file in t!(fs::read_dir( + builder.config.src.join("src/doc/book/redirects") + )) { let file = t!(file); let path = file.path(); let path = path.to_str().unwrap(); @@ -296,30 +298,33 @@ impl Step for TheBook { } fn invoke_rustdoc(builder: &Builder, compiler: Compiler, target: Interned, markdown: &str) { - let build = builder.build; - let out = build.doc_out(target); + let out = builder.doc_out(target); - let path = build.src.join("src/doc").join(markdown); + let path = builder.config.src.join("src/doc").join(markdown); - let favicon = build.src.join("src/doc/favicon.inc"); - let footer = build.src.join("src/doc/footer.inc"); + let favicon = builder.config.src.join("src/doc/favicon.inc"); + let footer = builder.config.src.join("src/doc/footer.inc"); let version_info = out.join("version_info.html"); let mut cmd = builder.rustdoc_cmd(compiler.host); let out = out.join("book"); - cmd.arg("--html-after-content").arg(&footer) - .arg("--html-before-content").arg(&version_info) - .arg("--html-in-header").arg(&favicon) + cmd.arg("--html-after-content") + .arg(&footer) + .arg("--html-before-content") + .arg(&version_info) + .arg("--html-in-header") + .arg(&favicon) .arg("--markdown-playground-url") .arg("https://play.rust-lang.org/") - .arg("-o").arg(&out) + .arg("-o") + .arg(&out) .arg(&path) .arg("--markdown-css") .arg("../rust.css"); - build.run(&mut cmd); + builder.run(&mut cmd); } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] @@ -334,12 +339,14 @@ impl Step for Standalone { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/doc").default_condition(builder.build.config.docs) + run.path("src/doc") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { run.builder.ensure(Standalone { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + compiler: run.builder + .compiler(run.builder.top_stage, run.builder.config.general.build), target: run.target, }); } @@ -353,66 +360,72 @@ impl Step for Standalone { /// /// In the end, this is just a glorified wrapper around rustdoc! fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; let compiler = self.compiler; println!("Documenting standalone ({})", target); - let out = build.doc_out(target); + let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); - let favicon = build.src.join("src/doc/favicon.inc"); - let footer = build.src.join("src/doc/footer.inc"); - let full_toc = build.src.join("src/doc/full-toc.inc"); - t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css"))); - - let version_input = build.src.join("src/doc/version_info.html.template"); + let favicon = builder.config.src.join("src/doc/favicon.inc"); + let footer = builder.config.src.join("src/doc/footer.inc"); + let full_toc = builder.config.src.join("src/doc/full-toc.inc"); + t!(fs::copy( + builder.config.src.join("src/doc/rust.css"), + out.join("rust.css") + )); + + let version_input = builder + .config + .src + .join("src/doc/version_info.html.template"); let version_info = out.join("version_info.html"); if !up_to_date(&version_input, &version_info) { - let mut info = String::new(); - t!(t!(File::open(&version_input)).read_to_string(&mut info)); - let info = info.replace("VERSION", &build.rust_release()) - .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or("")) - .replace("STAMP", build.rust_info.sha().unwrap_or("")); - t!(t!(File::create(&version_info)).write_all(info.as_bytes())); + let info = t!(fs::read_string(&version_input)); + let info = info.replace("VERSION", &builder.rust_release()) + .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or("")) + .replace("STAMP", builder.rust_info.sha().unwrap_or("")); + t!(fs::write(&version_info, info)) } - for file in t!(fs::read_dir(build.src.join("src/doc"))) { + for file in t!(fs::read_dir(builder.config.src.join("src/doc"))) { let file = t!(file); let path = file.path(); let filename = path.file_name().unwrap().to_str().unwrap(); if !filename.ends_with(".md") || filename == "README.md" { - continue + continue; } let html = out.join(filename).with_extension("html"); let rustdoc = builder.rustdoc(compiler.host); - if up_to_date(&path, &html) && - up_to_date(&footer, &html) && - up_to_date(&favicon, &html) && - up_to_date(&full_toc, &html) && - up_to_date(&version_info, &html) && - up_to_date(&rustdoc, &html) { - continue + if up_to_date(&path, &html) && up_to_date(&footer, &html) && up_to_date(&favicon, &html) + && up_to_date(&full_toc, &html) && up_to_date(&version_info, &html) + && up_to_date(&rustdoc, &html) + { + continue; } let mut cmd = builder.rustdoc_cmd(compiler.host); - cmd.arg("--html-after-content").arg(&footer) - .arg("--html-before-content").arg(&version_info) - .arg("--html-in-header").arg(&favicon) - .arg("--markdown-playground-url") - .arg("https://play.rust-lang.org/") - .arg("-o").arg(&out) - .arg(&path); + cmd.arg("--html-after-content") + .arg(&footer) + .arg("--html-before-content") + .arg(&version_info) + .arg("--html-in-header") + .arg(&favicon) + .arg("--markdown-playground-url") + .arg("https://play.rust-lang.org/") + .arg("-o") + .arg(&out) + .arg(&path); if filename == "not_found.md" { cmd.arg("--markdown-no-toc") - .arg("--markdown-css") - .arg("https://doc.rust-lang.org/rust.css"); + .arg("--markdown-css") + .arg("https://doc.rust-lang.org/rust.css"); } else { cmd.arg("--markdown-css").arg("rust.css"); } - build.run(&mut cmd); + builder.run(&mut cmd); } } } @@ -429,13 +442,14 @@ impl Step for Std { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.all_krates("std").default_condition(builder.build.config.docs) + run.all_krates("std") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { run.builder.ensure(Std { stage: run.builder.top_stage, - target: run.target + target: run.target, }); } @@ -444,23 +458,24 @@ impl Step for Std { /// This will generate all documentation for the standard library and its /// dependencies. This is largely just a wrapper around `cargo doc`. fn run(self, builder: &Builder) { - let build = builder.build; let stage = self.stage; let target = self.target; println!("Documenting stage{} std ({})", stage, target); - let out = build.doc_out(target); + let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); - let compiler = builder.compiler(stage, build.build); + let compiler = builder.compiler(stage, builder.config.general.build); let rustdoc = builder.rustdoc(compiler.host); - let compiler = if build.force_use_stage1(compiler, target) { + let compiler = if builder.force_use_stage1(compiler, target) { builder.compiler(1, compiler.host) } else { compiler }; builder.ensure(compile::Std { compiler, target }); - let out_dir = build.stage_out(compiler, Mode::Libstd) - .join(target).join("doc"); + let out_dir = builder + .stage_out(compiler, Mode::Libstd) + .join(target) + .join("doc"); // Here what we're doing is creating a *symlink* (directory junction on // Windows) to the final output location. This is not done as an @@ -475,17 +490,16 @@ impl Step for Std { // // This way rustdoc generates output directly into the output, and rustdoc // will also directly handle merging. - let my_out = build.crate_doc_out(target); - build.clear_if_dirty(&my_out, &rustdoc); + let my_out = builder.crate_doc_out(target); + builder.clear_if_dirty(&my_out, &rustdoc); t!(symlink_dir_force(&my_out, &out_dir)); let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "doc"); - compile::std_cargo(build, &compiler, target, &mut cargo); // We don't want to build docs for internal std dependencies unless // in compiler-docs mode. When not in that mode, we whitelist the crates // for which docs must be built. - if !build.config.compiler_docs { + if !builder.config.general.compiler_docs { cargo.arg("--no-deps"); for krate in &["alloc", "core", "std", "std_unicode"] { cargo.arg("-p").arg(krate); @@ -496,8 +510,7 @@ impl Step for Std { } } - - build.run(&mut cargo); + builder.run(&mut cargo); cp_r(&my_out, &out); } } @@ -514,7 +527,8 @@ impl Step for Test { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.krate("test").default_condition(builder.config.compiler_docs) + run.krate("test") + .default_condition(builder.config.general.compiler_docs) } fn make_run(run: RunConfig) { @@ -529,15 +543,14 @@ impl Step for Test { /// This will generate all documentation for libtest and its dependencies. This /// is largely just a wrapper around `cargo doc`. fn run(self, builder: &Builder) { - let build = builder.build; let stage = self.stage; let target = self.target; println!("Documenting stage{} test ({})", stage, target); - let out = build.doc_out(target); + let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); - let compiler = builder.compiler(stage, build.build); + let compiler = builder.compiler(stage, builder.config.general.build); let rustdoc = builder.rustdoc(compiler.host); - let compiler = if build.force_use_stage1(compiler, target) { + let compiler = if builder.force_use_stage1(compiler, target) { builder.compiler(1, compiler.host) } else { compiler @@ -547,17 +560,18 @@ impl Step for Test { builder.ensure(Std { stage, target }); builder.ensure(compile::Test { compiler, target }); - let out_dir = build.stage_out(compiler, Mode::Libtest) - .join(target).join("doc"); + let out_dir = builder + .stage_out(compiler, Mode::Libtest) + .join(target) + .join("doc"); // See docs in std above for why we symlink - let my_out = build.crate_doc_out(target); - build.clear_if_dirty(&my_out, &rustdoc); + let my_out = builder.crate_doc_out(target); + builder.clear_if_dirty(&my_out, &rustdoc); t!(symlink_dir_force(&my_out, &out_dir)); let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "doc"); - compile::test_cargo(build, &compiler, target, &mut cargo); - build.run(&mut cargo); + builder.run(&mut cargo); cp_r(&my_out, &out); } } @@ -575,7 +589,8 @@ impl Step for Rustc { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.krate("rustc-main").default_condition(builder.build.config.docs) + run.krate("rustc-main") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { @@ -590,15 +605,14 @@ impl Step for Rustc { /// This will generate all documentation for the compiler libraries and their /// dependencies. This is largely just a wrapper around `cargo doc`. fn run(self, builder: &Builder) { - let build = builder.build; let stage = self.stage; let target = self.target; println!("Documenting stage{} compiler ({})", stage, target); - let out = build.doc_out(target); + let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); - let compiler = builder.compiler(stage, build.build); + let compiler = builder.compiler(stage, builder.config.general.build); let rustdoc = builder.rustdoc(compiler.host); - let compiler = if build.force_use_stage1(compiler, target) { + let compiler = if builder.force_use_stage1(compiler, target) { builder.compiler(1, compiler.host) } else { compiler @@ -608,18 +622,19 @@ impl Step for Rustc { builder.ensure(Std { stage, target }); builder.ensure(compile::Rustc { compiler, target }); - let out_dir = build.stage_out(compiler, Mode::Librustc) - .join(target).join("doc"); + let out_dir = builder + .stage_out(compiler, Mode::Librustc) + .join(target) + .join("doc"); // See docs in std above for why we symlink - let my_out = build.crate_doc_out(target); - build.clear_if_dirty(&my_out, &rustdoc); + let my_out = builder.crate_doc_out(target); + builder.clear_if_dirty(&my_out, &rustdoc); t!(symlink_dir_force(&my_out, &out_dir)); let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "doc"); - compile::rustc_cargo(build, &mut cargo); - if build.config.compiler_docs { + if builder.config.general.compiler_docs { // src/rustc/Cargo.toml contains a bin crate called rustc which // would otherwise overwrite the docs for the real rustc lib crate. cargo.arg("-p").arg("rustc_driver"); @@ -632,7 +647,7 @@ impl Step for Rustc { } } - build.run(&mut cargo); + builder.run(&mut cargo); cp_r(&my_out, &out); } } @@ -649,33 +664,32 @@ impl Step for ErrorIndex { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/error_index_generator").default_condition(builder.build.config.docs) + run.path("src/tools/error_index_generator") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { - run.builder.ensure(ErrorIndex { - target: run.target, - }); + run.builder.ensure(ErrorIndex { target: run.target }); } /// Generates the HTML rendered error-index by running the /// `error_index_generator` tool. fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; println!("Documenting error index ({})", target); - let out = build.doc_out(target); + let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let mut index = builder.tool_cmd(Tool::ErrorIndex); index.arg("html"); index.arg(out.join("error-index.html")); // FIXME: shouldn't have to pass this env var - index.env("CFG_BUILD", &build.build) - .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir()); + index + .env("CFG_BUILD", &builder.config.general.build) + .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir()); - build.run(&mut index); + builder.run(&mut index); } } @@ -691,46 +705,43 @@ impl Step for UnstableBookGen { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/unstable-book-gen").default_condition(builder.build.config.docs) + run.path("src/tools/unstable-book-gen") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { - run.builder.ensure(UnstableBookGen { - target: run.target, - }); + run.builder.ensure(UnstableBookGen { target: run.target }); } fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; builder.ensure(compile::Std { - compiler: builder.compiler(builder.top_stage, build.build), + compiler: builder.compiler(builder.top_stage, builder.config.general.build), target, }); println!("Generating unstable book md files ({})", target); - let out = build.md_doc_out(target).join("unstable-book"); + let out = builder.md_doc_out(target).join("unstable-book"); t!(fs::create_dir_all(&out)); t!(fs::remove_dir_all(&out)); let mut cmd = builder.tool_cmd(Tool::UnstableBookGen); - cmd.arg(build.src.join("src")); + cmd.arg(builder.config.src.join("src")); cmd.arg(out); - build.run(&mut cmd); + builder.run(&mut cmd); } } fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> { + if cfg!(test) { return Ok(()); } if let Ok(m) = fs::symlink_metadata(dst) { if m.file_type().is_dir() { try!(fs::remove_dir_all(dst)); } else { // handle directory junctions on windows by falling back to // `remove_dir`. - try!(fs::remove_file(dst).or_else(|_| { - fs::remove_dir(dst) - })); + try!(fs::remove_file(dst).or_else(|_| fs::remove_dir(dst))); } } diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 8ca5910a11c0d..2a332ecefba3f 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -14,18 +14,18 @@ //! has various flags to configure how it's run. use std::env; -use std::fs; use std::path::PathBuf; use std::process; use getopts::Options; +use fs; use Build; use config::Config; use metadata; use builder::Builder; -use cache::{Interned, INTERNER}; +use cache::{Intern, Interned}; /// Deserialized version of all flags for this compile. pub struct Flags { @@ -44,52 +44,40 @@ pub struct Flags { pub incremental: bool, pub exclude: Vec, pub rustc_error_format: Option, + pub paths: Vec, } pub enum Subcommand { - Build { - paths: Vec, - }, - Check { - paths: Vec, - }, - Doc { - paths: Vec, - }, + Build, + Check, + Doc, Test { - paths: Vec, test_args: Vec, rustc_args: Vec, fail_fast: bool, doc_tests: bool, }, Bench { - paths: Vec, test_args: Vec, }, Clean { all: bool, }, - Dist { - paths: Vec, - }, - Install { - paths: Vec, - }, + Dist, + Install, } impl Default for Subcommand { fn default() -> Subcommand { - Subcommand::Build { - paths: vec![PathBuf::from("nowhere")], - } + Subcommand::Build } } impl Flags { pub fn parse(args: &[String]) -> Flags { let mut extra_help = String::new(); - let mut subcommand_help = format!("\ + let mut subcommand_help = format!( + "\ Usage: x.py [options] [...] Subcommands: @@ -102,7 +90,8 @@ Subcommands: dist Build distribution artifacts install Install distribution artifacts -To learn more about a subcommand, run `./x.py -h`"); +To learn more about a subcommand, run `./x.py -h`" + ); let mut opts = Options::new(); // Options common to all subcommands @@ -122,28 +111,24 @@ To learn more about a subcommand, run `./x.py -h`"); opts.optflag("", "error-format", "rustc error format"); // fn usage() - let usage = |exit_code: i32, opts: &Options, subcommand_help: &str, extra_help: &str| -> ! { - println!("{}", opts.usage(subcommand_help)); - if !extra_help.is_empty() { - println!("{}", extra_help); - } - process::exit(exit_code); - }; + let usage = + |exit_code: i32, opts: &Options, subcommand_help: &str, extra_help: &str| -> ! { + println!("{}", opts.usage(subcommand_help)); + if !extra_help.is_empty() { + println!("{}", extra_help); + } + process::exit(exit_code); + }; // We can't use getopt to parse the options until we have completed specifying which // options are valid, but under the current implementation, some options are conditional on // the subcommand. Therefore we must manually identify the subcommand first, so that we can // complete the definition of the options. Then we can use the getopt::Matches object from // there on out. - let subcommand = args.iter().find(|&s| - (s == "build") - || (s == "check") - || (s == "test") - || (s == "bench") - || (s == "doc") - || (s == "clean") - || (s == "dist") - || (s == "install")); + let subcommand = args.iter().find(|&s| { + (s == "build") || (s == "check") || (s == "test") || (s == "bench") || (s == "doc") + || (s == "clean") || (s == "dist") || (s == "install") + }); let subcommand = match subcommand { Some(s) => s, None => { @@ -158,7 +143,7 @@ To learn more about a subcommand, run `./x.py -h`"); // Some subcommands get extra options match subcommand.as_str() { - "test" => { + "test" => { opts.optflag("", "no-fail-fast", "Run all tests regardless of failure"); opts.optmulti("", "test-args", "extra arguments", "ARGS"); opts.optmulti( @@ -168,10 +153,14 @@ To learn more about a subcommand, run `./x.py -h`"); "ARGS", ); opts.optflag("", "doc", "run doc tests"); - }, - "bench" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); }, - "clean" => { opts.optflag("", "all", "clean all build artifacts"); }, - _ => { }, + } + "bench" => { + opts.optmulti("", "test-args", "extra arguments", "ARGS"); + } + "clean" => { + opts.optflag("", "all", "clean all build artifacts"); + } + _ => {} }; // Done specifying what options are possible, so do the getopts parsing @@ -191,21 +180,24 @@ To learn more about a subcommand, run `./x.py -h`"); if check_subcommand != subcommand { pass_sanity_check = false; } - }, + } None => { pass_sanity_check = false; } } if !pass_sanity_check { println!("{}\n", subcommand_help); - println!("Sorry, I couldn't figure out which subcommand you were trying to specify.\n\ - You may need to move some options to after the subcommand.\n"); + println!( + "Sorry, I couldn't figure out which subcommand you were trying to specify.\n\ + You may need to move some options to after the subcommand.\n" + ); process::exit(1); } // Extra help text for some commands match subcommand.as_str() { "build" => { - subcommand_help.push_str("\n + subcommand_help.push_str( + "\n Arguments: This subcommand accepts a number of paths to directories to the crates and/or artifacts to compile. For example: @@ -227,10 +219,12 @@ Arguments: This will first build everything once (like --stage 0 without further arguments would), and then use the compiler built in stage 0 to build src/libtest and its dependencies. - Once this is done, build/$ARCH/stage1 contains a usable compiler."); + Once this is done, build/$ARCH/stage1 contains a usable compiler.", + ); } "check" => { - subcommand_help.push_str("\n + subcommand_help.push_str( + "\n Arguments: This subcommand accepts a number of paths to directories to the crates and/or artifacts to compile. For example: @@ -242,10 +236,12 @@ Arguments: also that since we use `cargo check`, by default this will automatically enable incremental compilation, so there's no need to pass it separately, though it won't hurt. We also completely ignore the stage passed, as there's no way to compile in non-stage 0 without actually building - the compiler."); + the compiler.", + ); } "test" => { - subcommand_help.push_str("\n + subcommand_help.push_str( + "\n Arguments: This subcommand accepts a number of paths to directories to tests that should be compiled and run. For example: @@ -258,10 +254,12 @@ Arguments: compiled and tested. ./x.py test - ./x.py test --stage 1"); + ./x.py test --stage 1", + ); } "doc" => { - subcommand_help.push_str("\n + subcommand_help.push_str( + "\n Arguments: This subcommand accepts a number of paths to directories of documentation to build. For example: @@ -273,16 +271,22 @@ Arguments: If no arguments are passed then everything is documented: ./x.py doc - ./x.py doc --stage 1"); + ./x.py doc --stage 1", + ); } - _ => { } + _ => {} }; // Get any optional paths which occur after the subcommand let cwd = t!(env::current_dir()); - let src = matches.opt_str("src").map(PathBuf::from) + let src = matches + .opt_str("src") + .map(PathBuf::from) .or_else(|| env::var_os("SRC").map(PathBuf::from)) .unwrap_or(cwd.clone()); - let paths = matches.free[1..].iter().map(|p| p.into()).collect::>(); + let paths = matches.free[1..] + .iter() + .map(|p| p.into()) + .collect::>(); let cfg_file = matches.opt_str("config").map(PathBuf::from).or_else(|| { if fs::metadata("config.toml").is_ok() { @@ -301,9 +305,12 @@ Arguments: let maybe_rules_help = Builder::get_help(&build, subcommand.as_str()); extra_help.push_str(maybe_rules_help.unwrap_or_default().as_str()); } else if subcommand.as_str() != "clean" { - extra_help.push_str(format!( - "Run `./x.py {} -h -v` to see a list of available paths.", - subcommand).as_str()); + extra_help.push_str( + format!( + "Run `./x.py {} -h -v` to see a list of available paths.", + subcommand + ).as_str(), + ); } // User passed in -h/--help? @@ -312,30 +319,18 @@ Arguments: } let cmd = match subcommand.as_str() { - "build" => { - Subcommand::Build { paths: paths } - } - "check" => { - Subcommand::Check { paths: paths } - } - "test" => { - Subcommand::Test { - paths, - test_args: matches.opt_strs("test-args"), - rustc_args: matches.opt_strs("rustc-args"), - fail_fast: !matches.opt_present("no-fail-fast"), - doc_tests: matches.opt_present("doc"), - } - } - "bench" => { - Subcommand::Bench { - paths, - test_args: matches.opt_strs("test-args"), - } - } - "doc" => { - Subcommand::Doc { paths: paths } - } + "build" => Subcommand::Build, + "check" => Subcommand::Check, + "test" => Subcommand::Test { + test_args: matches.opt_strs("test-args"), + rustc_args: matches.opt_strs("rustc-args"), + fail_fast: !matches.opt_present("no-fail-fast"), + doc_tests: matches.opt_present("doc"), + }, + "bench" => Subcommand::Bench { + test_args: matches.opt_strs("test-args"), + }, + "doc" => Subcommand::Doc, "clean" => { if paths.len() > 0 { println!("\nclean does not take a path argument\n"); @@ -346,22 +341,13 @@ Arguments: all: matches.opt_present("all"), } } - "dist" => { - Subcommand::Dist { - paths, - } - } - "install" => { - Subcommand::Install { - paths, - } - } + "dist" => Subcommand::Dist, + "install" => Subcommand::Install, _ => { usage(1, &opts, &subcommand_help, &extra_help); } }; - let mut stage = matches.opt_str("stage").map(|j| j.parse().unwrap()); if matches.opt_present("incremental") && stage.is_none() { @@ -374,18 +360,25 @@ Arguments: on_fail: matches.opt_str("on-fail"), rustc_error_format: matches.opt_str("error-format"), keep_stage: matches.opt_str("keep-stage").map(|j| j.parse().unwrap()), - build: matches.opt_str("build").map(|s| INTERNER.intern_string(s)), + build: matches.opt_str("build").map(|s| s.intern()), host: split(matches.opt_strs("host")) - .into_iter().map(|x| INTERNER.intern_string(x)).collect::>(), + .into_iter() + .map(|x| x.intern()) + .collect::>(), target: split(matches.opt_strs("target")) - .into_iter().map(|x| INTERNER.intern_string(x)).collect::>(), + .into_iter() + .map(|x| x.intern()) + .collect::>(), config: cfg_file, jobs: matches.opt_str("jobs").map(|j| j.parse().unwrap()), cmd, incremental: matches.opt_present("incremental"), exclude: split(matches.opt_strs("exclude")) - .into_iter().map(|p| p.into()).collect::>(), + .into_iter() + .map(|p| p.into()) + .collect::>(), src, + paths, } } } @@ -393,9 +386,11 @@ Arguments: impl Subcommand { pub fn test_args(&self) -> Vec<&str> { match *self { - Subcommand::Test { ref test_args, .. } | - Subcommand::Bench { ref test_args, .. } => { - test_args.iter().flat_map(|s| s.split_whitespace()).collect() + Subcommand::Test { ref test_args, .. } | Subcommand::Bench { ref test_args, .. } => { + test_args + .iter() + .flat_map(|s| s.split_whitespace()) + .collect() } _ => Vec::new(), } @@ -403,9 +398,10 @@ impl Subcommand { pub fn rustc_args(&self) -> Vec<&str> { match *self { - Subcommand::Test { ref rustc_args, .. } => { - rustc_args.iter().flat_map(|s| s.split_whitespace()).collect() - } + Subcommand::Test { ref rustc_args, .. } => rustc_args + .iter() + .flat_map(|s| s.split_whitespace()) + .collect(), _ => Vec::new(), } } @@ -426,5 +422,8 @@ impl Subcommand { } fn split(s: Vec) -> Vec { - s.iter().flat_map(|s| s.split(',')).map(|s| s.to_string()).collect() + s.iter() + .flat_map(|s| s.split(',')) + .map(|s| s.to_string()) + .collect() } diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs index 20f7d379a6967..f1af5230516cb 100644 --- a/src/bootstrap/install.rs +++ b/src/bootstrap/install.rs @@ -14,12 +14,12 @@ //! compiler, and documentation. use std::env; -use std::fs; -use std::path::{Path, PathBuf, Component}; +use std::path::{Component, Path, PathBuf}; use std::process::Command; use dist::{self, pkgname, sanitize_sh, tmpdir}; +use fs; use builder::{Builder, RunConfig, ShouldRun, Step}; use cache::Interned; use config::Config; @@ -60,23 +60,16 @@ fn install_sh( package: &str, name: &str, stage: u32, - host: Option> + host: Option>, ) { - let build = builder.build; println!("Install {} stage{} ({:?})", package, stage, host); - let prefix_default = PathBuf::from("/usr/local"); - let sysconfdir_default = PathBuf::from("/etc"); - let docdir_default = PathBuf::from("share/doc/rust"); - let bindir_default = PathBuf::from("bin"); - let libdir_default = PathBuf::from("lib"); - let mandir_default = PathBuf::from("share/man"); - let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default); - let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default); - let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default); - let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default); - let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default); - let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default); + let prefix = &builder.config.install.prefix; + let sysconfdir = &builder.config.install.sysconfdir; + let docdir = &builder.config.install.docdir; + let bindir = &builder.config.install.bindir; + let libdir = &builder.config.install.libdir; + let mandir = &builder.config.install.mandir; let sysconfdir = prefix.join(sysconfdir); let docdir = prefix.join(docdir); @@ -93,18 +86,20 @@ fn install_sh( let libdir = add_destdir(&libdir, &destdir); let mandir = add_destdir(&mandir, &destdir); - let empty_dir = build.out.join("tmp/empty_dir"); + let empty_dir = builder.config.general.out.join("tmp/empty_dir"); t!(fs::create_dir_all(&empty_dir)); let package_name = if let Some(host) = host { - format!("{}-{}", pkgname(build, name), host) + format!("{}-{}", pkgname(builder, name), host) } else { - pkgname(build, name) + pkgname(builder, name) }; let mut cmd = Command::new("sh"); cmd.current_dir(&empty_dir) - .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh"))) + .arg(sanitize_sh(&tmpdir(builder) + .join(&package_name) + .join("install.sh"))) .arg(format!("--prefix={}", sanitize_sh(&prefix))) .arg(format!("--sysconfdir={}", sanitize_sh(&sysconfdir))) .arg(format!("--docdir={}", sanitize_sh(&docdir))) @@ -112,7 +107,7 @@ fn install_sh( .arg(format!("--libdir={}", sanitize_sh(&libdir))) .arg(format!("--mandir={}", sanitize_sh(&mandir))) .arg("--disable-ldconfig"); - build.run(&mut cmd); + builder.run(&mut cmd); t!(fs::remove_dir_all(&empty_dir)); } @@ -148,20 +143,19 @@ macro_rules! install { impl $name { #[allow(dead_code)] fn should_build(config: &Config) -> bool { - config.extended && config.tools.as_ref() + config.general.extended && config.general.tools.as_ref() .map_or(true, |t| t.contains($path)) } #[allow(dead_code)] fn should_install(builder: &Builder) -> bool { - builder.config.tools.as_ref().map_or(false, |t| t.contains($path)) + builder.config.general.tools.as_ref().map_or(false, |t| t.contains($path)) } } impl Step for $name { type Output = (); const DEFAULT: bool = true; - const ONLY_BUILD_TARGETS: bool = true; const ONLY_HOSTS: bool = $only_hosts; $(const $c: bool = true;)* @@ -174,7 +168,7 @@ macro_rules! install { run.builder.ensure($name { stage: run.builder.top_stage, target: run.target, - host: run.host, + host: run.builder.config.general.build, }); } @@ -186,12 +180,12 @@ macro_rules! install { } install!((self, builder, _config), - Docs, "src/doc", _config.docs, only_hosts: false, { + Docs, "src/doc", _config.general.docs, only_hosts: false, { builder.ensure(dist::Docs { stage: self.stage, host: self.target }); install_docs(builder, self.stage, self.target); }; Std, "src/libstd", true, only_hosts: true, { - for target in &builder.build.targets { + for target in &builder.config.general.target { builder.ensure(dist::Std { compiler: builder.compiler(self.stage, self.host), target: *target @@ -227,9 +221,11 @@ install!((self, builder, _config), install_analysis(builder, self.stage, self.target); }; Src, "src", Self::should_build(_config) , only_hosts: true, { - builder.ensure(dist::Src); - install_src(builder, self.stage); - }, ONLY_BUILD; + if self.target == builder.config.general.build { + builder.ensure(dist::Src); + install_src(builder, self.stage); + } + }; Rustc, "src/librustc", true, only_hosts: true, { builder.ensure(dist::Rustc { compiler: builder.compiler(self.stage, self.target), diff --git a/src/bootstrap/job.rs b/src/bootstrap/job.rs index fa3ba02482f56..8f828613e935b 100644 --- a/src/bootstrap/job.rs +++ b/src/bootstrap/job.rs @@ -70,21 +70,23 @@ extern "system" { fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE; fn CloseHandle(hObject: HANDLE) -> BOOL; fn GetCurrentProcess() -> HANDLE; - fn OpenProcess(dwDesiredAccess: DWORD, - bInheritHandle: BOOL, - dwProcessId: DWORD) -> HANDLE; - fn DuplicateHandle(hSourceProcessHandle: HANDLE, - hSourceHandle: HANDLE, - hTargetProcessHandle: HANDLE, - lpTargetHandle: LPHANDLE, - dwDesiredAccess: DWORD, - bInheritHandle: BOOL, - dwOptions: DWORD) -> BOOL; + fn OpenProcess(dwDesiredAccess: DWORD, bInheritHandle: BOOL, dwProcessId: DWORD) -> HANDLE; + fn DuplicateHandle( + hSourceProcessHandle: HANDLE, + hSourceHandle: HANDLE, + hTargetProcessHandle: HANDLE, + lpTargetHandle: LPHANDLE, + dwDesiredAccess: DWORD, + bInheritHandle: BOOL, + dwOptions: DWORD, + ) -> BOOL; fn AssignProcessToJobObject(hJob: HANDLE, hProcess: HANDLE) -> BOOL; - fn SetInformationJobObject(hJob: HANDLE, - JobObjectInformationClass: JOBOBJECTINFOCLASS, - lpJobObjectInformation: LPVOID, - cbJobObjectInformationLength: DWORD) -> BOOL; + fn SetInformationJobObject( + hJob: HANDLE, + JobObjectInformationClass: JOBOBJECTINFOCLASS, + lpJobObjectInformation: LPVOID, + cbJobObjectInformationLength: DWORD, + ) -> BOOL; fn SetErrorMode(mode: UINT) -> UINT; } @@ -143,10 +145,12 @@ pub unsafe fn setup(build: &mut Build) { info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS; info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS; } - let r = SetInformationJobObject(job, - JobObjectExtendedLimitInformation, - &mut info as *mut _ as LPVOID, - mem::size_of_val(&info) as DWORD); + let r = SetInformationJobObject( + job, + JobObjectExtendedLimitInformation, + &mut info as *mut _ as LPVOID, + mem::size_of_val(&info) as DWORD, + ); assert!(r != 0, "{}", io::Error::last_os_error()); // Assign our process to this job object. Note that if this fails, one very @@ -161,7 +165,7 @@ pub unsafe fn setup(build: &mut Build) { let r = AssignProcessToJobObject(job, GetCurrentProcess()); if r == 0 { CloseHandle(job); - return + return; } // If we've got a parent process (e.g. the python script that called us) @@ -180,9 +184,15 @@ pub unsafe fn setup(build: &mut Build) { let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap()); assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error()); let mut parent_handle = 0 as *mut _; - let r = DuplicateHandle(GetCurrentProcess(), job, - parent, &mut parent_handle, - 0, FALSE, DUPLICATE_SAME_ACCESS); + let r = DuplicateHandle( + GetCurrentProcess(), + job, + parent, + &mut parent_handle, + 0, + FALSE, + DUPLICATE_SAME_ACCESS, + ); // If this failed, well at least we tried! An example of DuplicateHandle // failing in the past has been when the wrong python2 package spawned this diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 90f50275b6bb4..6ef3c2d29c076 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -113,39 +113,87 @@ //! More documentation can be found in each respective module below, and you can //! also check out the `src/bootstrap/README.md` file for more information. -//#![deny(warnings)] -#![feature(core_intrinsics)] +#![deny(warnings)] +#![feature(fs_read_write, core_intrinsics)] #[macro_use] extern crate build_helper; +#[cfg(test)] #[macro_use] -extern crate serde_derive; -#[macro_use] -extern crate lazy_static; -extern crate serde_json; +extern crate pretty_assertions; +extern crate cc; extern crate cmake; extern crate filetime; -extern crate cc; extern crate getopts; +#[macro_use] +extern crate lazy_static; extern crate num_cpus; -extern crate toml; +extern crate serde; +#[macro_use] +extern crate serde_derive; +extern crate serde_json; extern crate time; +extern crate toml; #[cfg(unix)] extern crate libc; -use std::cell::{RefCell, Cell}; -use std::collections::{HashSet, HashMap}; +#[cfg(test)] +mod fs { + use std::path::Path; + use std::io::Result; + use std::iter; + pub use std::fs::*; + pub fn create_dir_all>(_path: P) -> Result<()> { + Ok(()) + } + pub fn remove_dir_all>(_path: P) -> Result<()> { + Ok(()) + } + pub fn remove_dir>(_path: P) -> Result<()> { + Ok(()) + } + pub fn remove_file>(_path: P) -> Result<()> { + Ok(()) + } + pub fn hard_link, Q: AsRef>(_src: P, _dst: Q) -> Result<()> { + Ok(()) + } + pub fn copy, Q: AsRef>(_src: P, _dst: Q) -> Result<()> { + Ok(()) + } + pub fn set_permissions>(_path: P, _perm: Permissions) -> Result<()> { + Ok(()) + } + pub fn read_dir>(_path: P) -> Result>> { + Ok(iter::Empty::default()) + } + pub fn read>(path: P) -> Result> { + Ok(String::from(format!("{} into string", path.as_ref().display())).into_bytes()) + } + pub fn read_string>(path: P) -> Result { + Ok(String::from(format!("{} into string", path.as_ref().display()))) + } + pub fn write, C: AsRef<[u8]>>(_path: P, _contents: C) -> Result<()> { + Ok(()) + } +} + +#[cfg(not(test))] +mod fs { + pub use std::fs::*; +} + +use std::cell::{Cell, RefCell}; +use std::collections::{HashMap, HashSet}; use std::env; -use std::fs::{self, File}; -use std::io::Read; -use std::path::{PathBuf, Path}; +use std::path::{Path, PathBuf}; use std::process::{self, Command}; use std::slice; -use build_helper::{run_silent, run_suppressed, try_run_silent, try_run_suppressed, output, mtime}; +use build_helper::{mtime, output, run_silent, run_suppressed, try_run_silent, try_run_suppressed}; -use util::{exe, libdir, OutputFolder, CiEnv}; +use util::{exe, libdir, CiEnv, OutputFolder}; mod cc_detect; mod channel; @@ -175,7 +223,7 @@ mod job { use libc; pub unsafe fn setup(build: &mut ::Build) { - if build.config.low_priority { + if build.config.general.low_priority { libc::setpriority(libc::PRIO_PGRP as _, 0, 10); } } @@ -183,13 +231,12 @@ mod job { #[cfg(not(any(unix, windows)))] mod job { - pub unsafe fn setup(_build: &mut ::Build) { - } + pub unsafe fn setup(_build: &mut ::Build) {} } pub use config::Config; use flags::Subcommand; -use cache::{Interned, INTERNER}; +use cache::{Intern, Interned}; use toolstate::ToolState; /// A structure representing a Rust compiler. @@ -197,7 +244,7 @@ use toolstate::ToolState; /// Each compiler has a `stage` that it is associated with and a `host` that /// corresponds to the platform the compiler runs on. This structure is used as /// a parameter to many methods below. -#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)] +#[derive(Eq, PartialEq, PartialOrd, Ord, Clone, Copy, Hash, Debug)] pub struct Compiler { stage: u32, host: Interned, @@ -218,25 +265,10 @@ pub struct Build { config: Config, // Derived properties from the above two configurations - src: PathBuf, - out: PathBuf, rust_info: channel::GitInfo, cargo_info: channel::GitInfo, rls_info: channel::GitInfo, rustfmt_info: channel::GitInfo, - local_rebuild: bool, - fail_fast: bool, - doc_tests: bool, - verbosity: usize, - - // Targets for which to build. - build: Interned, - hosts: Vec>, - targets: Vec>, - - // Stage 0 (downloaded) compiler and cargo or their local rust equivalents. - initial_rustc: PathBuf, - initial_cargo: PathBuf, // Probed tools at runtime lldb_version: Option, @@ -249,7 +281,6 @@ pub struct Build { ar: HashMap, PathBuf>, // Misc crates: HashMap, Crate>, - is_sudo: bool, ci_env: CiEnv, delayed_failures: RefCell>, prerelease_version: Cell>, @@ -261,16 +292,11 @@ struct Crate { version: String, deps: Vec>, path: PathBuf, - doc_step: String, - build_step: String, - test_step: String, - bench_step: String, } impl Crate { fn is_local(&self, build: &Build) -> bool { - self.path.starts_with(&build.config.src) && - !self.path.to_string_lossy().ends_with("_shim") + self.path.starts_with(&build.config.src) && !self.path.to_string_lossy().ends_with("_shim") } fn local_path(&self, build: &Build) -> PathBuf { @@ -294,8 +320,16 @@ pub enum Mode { /// Build librustc and compiler libraries, placing output in the "stageN-rustc" directory. Librustc, - /// Build some tool, placing output in the "stageN-tools" directory. - Tool, + /// Build the codegen backend, with the string being its name. + /// Output also goes into stageN-rustc. + CodegenBackend(Interned), + + /// Build some tool, placing output in the "stageN-{std/test/rustc}-tools" directory. + /// N.B. There is no StdTool because all tools may want to run tests. It is Cargo's job to + /// determine whether we'd need to recompile, and it's not all that bad to require test. It's + /// fast to build and is (almost) always built in a pair with std anyway, at least on CI. + TestTool, + RustcTool, } impl Build { @@ -304,39 +338,13 @@ impl Build { /// /// By default all build output will be placed in the current directory. pub fn new(config: Config) -> Build { - let cwd = t!(env::current_dir()); - let src = config.src.clone(); - let out = cwd.join("build"); - - let is_sudo = match env::var_os("SUDO_USER") { - Some(sudo_user) => { - match env::var_os("USER") { - Some(user) => user != sudo_user, - None => false, - } - } - None => false, - }; - let rust_info = channel::GitInfo::new(&config, &src); - let cargo_info = channel::GitInfo::new(&config, &src.join("src/tools/cargo")); - let rls_info = channel::GitInfo::new(&config, &src.join("src/tools/rls")); - let rustfmt_info = channel::GitInfo::new(&config, &src.join("src/tools/rustfmt")); - - Build { - initial_rustc: config.initial_rustc.clone(), - initial_cargo: config.initial_cargo.clone(), - local_rebuild: config.local_rebuild, - fail_fast: config.cmd.fail_fast(), - doc_tests: config.cmd.doc_tests(), - verbosity: config.verbose, - - build: config.build, - hosts: config.hosts.clone(), - targets: config.targets.clone(), + let rust_info = channel::GitInfo::new(&config, &config.src); + let cargo_info = channel::GitInfo::new(&config, &config.src.join("src/tools/cargo")); + let rls_info = channel::GitInfo::new(&config, &config.src.join("src/tools/rls")); + let rustfmt_info = channel::GitInfo::new(&config, &config.src.join("src/tools/rustfmt")); + let mut build = Build { config, - src, - out, rust_info, cargo_info, @@ -348,17 +356,23 @@ impl Build { crates: HashMap::new(), lldb_version: None, lldb_python_dir: None, - is_sudo, ci_env: CiEnv::current(), delayed_failures: RefCell::new(Vec::new()), prerelease_version: Cell::new(None), - } + }; + + build.verbose("finding compilers"); + cc_detect::find(&mut build); + build.verbose("running sanity check"); + sanity::check(&mut build); + build.verbose("learning about cargo"); + metadata::build(&mut build); + + build } pub fn build_triple(&self) -> &[Interned] { - unsafe { - slice::from_raw_parts(&self.build, 1) - } + unsafe { slice::from_raw_parts(&self.config.general.build, 1) } } /// Executes the entire build, as configured by the flags and configuration. @@ -371,30 +385,21 @@ impl Build { return clean::clean(self, all); } - self.verbose("finding compilers"); - cc_detect::find(self); - self.verbose("running sanity check"); - sanity::check(self); - // If local-rust is the same major.minor as the current version, then force a local-rebuild - let local_version_verbose = output( - Command::new(&self.initial_rustc).arg("--version").arg("--verbose")); - let local_release = local_version_verbose - .lines().filter(|x| x.starts_with("release:")) - .next().unwrap().trim_left_matches("release:").trim(); - let my_version = channel::CFG_RELEASE_NUM; - if local_release.split('.').take(2).eq(my_version.split('.').take(2)) { - self.verbose(&format!("auto-detected local-rebuild {}", local_release)); - self.local_rebuild = true; + if let Some(path) = self.config.paths.get(0) { + if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") { + return; + } } - self.verbose("learning about cargo"); - metadata::build(self); - - builder::Builder::run(&self); + let builder = builder::Builder::new(&self); + builder.execute(); // Check for postponed failures from `test --no-fail-fast`. let failures = self.delayed_failures.borrow(); if failures.len() > 0 { - println!("\n{} command(s) did not execute successfully:\n", failures.len()); + println!( + "\n{} command(s) did not execute successfully:\n", + failures.len() + ); for failure in failures.iter() { println!(" - {}\n", failure); } @@ -406,6 +411,7 @@ impl Build { /// /// After this executes, it will also ensure that `dir` exists. fn clear_if_dirty(&self, dir: &Path, input: &Path) -> bool { + if cfg!(test) { return true; } let stamp = dir.join(".stamp"); let mut cleared = false; if mtime(&stamp) < mtime(input) { @@ -416,7 +422,7 @@ impl Build { return cleared; } t!(fs::create_dir_all(dir)); - t!(File::create(stamp)); + t!(fs::write(stamp, &[])); cleared } @@ -425,19 +431,19 @@ impl Build { fn std_features(&self) -> String { let mut features = "panic-unwind".to_string(); - if self.config.debug_jemalloc { + if self.config.rust.debug_jemalloc() { features.push_str(" debug-jemalloc"); } - if self.config.use_jemalloc { + if self.config.rust.use_jemalloc { features.push_str(" jemalloc"); } - if self.config.backtrace { + if self.config.rust.backtrace { features.push_str(" backtrace"); } - if self.config.profiler { + if self.config.general.profiler { features.push_str(" profiler"); } - if self.config.wasm_syscall { + if self.config.rust.wasm_syscall { features.push_str(" wasm_syscall"); } features @@ -446,7 +452,7 @@ impl Build { /// Get the space-separated set of activated features for the compiler. fn rustc_features(&self) -> String { let mut features = String::new(); - if self.config.use_jemalloc { + if self.config.rust.use_jemalloc { features.push_str(" jemalloc"); } features @@ -455,11 +461,19 @@ impl Build { /// Component directory that Cargo will produce output into (e.g. /// release/debug) fn cargo_dir(&self) -> &'static str { - if self.config.rust_optimize {"release"} else {"debug"} + if self.config.rust.optimize() { + "release" + } else { + "debug" + } } fn tools_dir(&self, compiler: Compiler) -> PathBuf { - let out = self.out.join(&*compiler.host).join(format!("stage{}-tools-bin", compiler.stage)); + let out = self.config + .general + .out + .join(&*compiler.host) + .join(format!("stage{}-tools-bin", compiler.stage)); t!(fs::create_dir_all(&out)); out } @@ -472,21 +486,24 @@ impl Build { let suffix = match mode { Mode::Libstd => "-std", Mode::Libtest => "-test", - Mode::Tool => "-tools", - Mode::Librustc => "-rustc", + Mode::TestTool => "-test-tools", + Mode::RustcTool => "-rustc-tools", + Mode::Librustc | Mode::CodegenBackend(_) => "-rustc", }; - self.out.join(&*compiler.host) - .join(format!("stage{}{}", compiler.stage, suffix)) + self.config + .general + .out + .join(&*compiler.host) + .join(format!("stage{}{}", compiler.stage, suffix)) } /// Returns the root output directory for all Cargo output in a given stage, /// running a particular compiler, whether or not we're building the /// standard library, and targeting the specified architecture. - fn cargo_out(&self, - compiler: Compiler, - mode: Mode, - target: Interned) -> PathBuf { - self.stage_out(compiler, mode).join(&*target).join(self.cargo_dir()) + fn cargo_out(&self, compiler: Compiler, mode: Mode, target: Interned) -> PathBuf { + self.stage_out(compiler, mode) + .join(&*target) + .join(self.cargo_dir()) } /// Root output directory for LLVM compiled for `target` @@ -494,28 +511,37 @@ impl Build { /// Note that if LLVM is configured externally then the directory returned /// will likely be empty. fn llvm_out(&self, target: Interned) -> PathBuf { - self.out.join(&*target).join("llvm") + self.config.general.out.join(&*target).join("llvm") } fn emscripten_llvm_out(&self, target: Interned) -> PathBuf { - self.out.join(&*target).join("llvm-emscripten") + self.config + .general + .out + .join(&*target) + .join("llvm-emscripten") } /// Output directory for all documentation for a target fn doc_out(&self, target: Interned) -> PathBuf { - self.out.join(&*target).join("doc") + self.config.general.out.join(&*target).join("doc") } /// Output directory for some generated md crate documentation for a target (temporary) fn md_doc_out(&self, target: Interned) -> Interned { - INTERNER.intern_path(self.out.join(&*target).join("md-doc")) + self.config + .general + .out + .join(&*target) + .join("md-doc") + .intern() } /// Output directory for all crate documentation for a target (temporary) /// /// The artifacts here are then copied into `doc_out` above. fn crate_doc_out(&self, target: Interned) -> PathBuf { - self.out.join(&*target).join("crate-docs") + self.config.general.out.join(&*target).join("crate-docs") } /// Returns true if no custom `llvm-config` is set for the specified target. @@ -524,7 +550,7 @@ impl Build { fn is_rust_llvm(&self, target: Interned) -> bool { match self.config.target_config.get(&target) { Some(ref c) => c.llvm_config.is_none(), - None => true + None => true, } } @@ -537,7 +563,8 @@ impl Build { if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { s.clone() } else { - self.llvm_out(self.config.build).join("bin") + self.llvm_out(self.config.general.build) + .join("bin") .join(exe("llvm-config", &*target)) } } @@ -549,9 +576,9 @@ impl Build { let llvm_bindir = output(Command::new(s).arg("--bindir")); Path::new(llvm_bindir.trim()).join(exe("FileCheck", &*target)) } else { - let base = self.llvm_out(self.config.build).join("build"); + let base = self.llvm_out(self.config.general.build).join("build"); let exe = exe("FileCheck", &*target); - if !self.config.ninja && self.config.build.contains("msvc") { + if !self.config.llvm.ninja && self.config.general.build.contains("msvc") { base.join("Release/bin").join(exe) } else { base.join("bin").join(exe) @@ -561,7 +588,7 @@ impl Build { /// Directory for libraries built from C/C++ code and shared between stages. fn native_dir(&self, target: Interned) -> PathBuf { - self.out.join(&*target).join("native") + self.config.general.out.join(&*target).join("native") } /// Root output directory for rust_test_helpers library compiled for @@ -579,20 +606,30 @@ impl Build { /// Returns the libdir of the snapshot compiler. fn rustc_snapshot_libdir(&self) -> PathBuf { - self.initial_rustc.parent().unwrap().parent().unwrap() - .join(libdir(&self.config.build)) + self.config + .general + .initial_rustc + .parent() + .unwrap() + .parent() + .unwrap() + .join(libdir(&self.config.general.build)) } /// Runs a command, printing out nice contextual information if it fails. fn run(&self, cmd: &mut Command) { self.verbose(&format!("running: {:?}", cmd)); - run_silent(cmd) + if !cfg!(test) { + run_silent(cmd) + } } /// Runs a command, printing out nice contextual information if it fails. fn run_quiet(&self, cmd: &mut Command) { self.verbose(&format!("running: {:?}", cmd)); - run_suppressed(cmd) + if !cfg!(test) { + run_suppressed(cmd) + } } /// Runs a command, printing out nice contextual information if it fails. @@ -600,7 +637,11 @@ impl Build { /// `status.success()`. fn try_run(&self, cmd: &mut Command) -> bool { self.verbose(&format!("running: {:?}", cmd)); - try_run_silent(cmd) + if !cfg!(test) { + try_run_silent(cmd) + } else { + true + } } /// Runs a command, printing out nice contextual information if it fails. @@ -608,15 +649,19 @@ impl Build { /// `status.success()`. fn try_run_quiet(&self, cmd: &mut Command) -> bool { self.verbose(&format!("running: {:?}", cmd)); - try_run_suppressed(cmd) + if cfg!(test) { + true + } else { + try_run_suppressed(cmd) + } } pub fn is_verbose(&self) -> bool { - self.verbosity > 0 + self.config.verbose() } pub fn is_very_verbose(&self) -> bool { - self.verbosity > 1 + self.config.very_verbose() } /// Prints a message if this build is configured in verbose mode. @@ -642,10 +687,12 @@ impl Build { fn cflags(&self, target: Interned) -> Vec { // Filter out -O and /O (the optimization flags) that we picked up from // cc-rs because the build scripts will determine that for themselves. - let mut base = self.cc[&target].args().iter() - .map(|s| s.to_string_lossy().into_owned()) - .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) - .collect::>(); + let mut base = self.cc[&target] + .args() + .iter() + .map(|s| s.to_string_lossy().into_owned()) + .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) + .collect::>(); // If we're compiling on macOS then we add a few unconditional flags // indicating that we want libc++ (more filled out than libstdc++) and @@ -674,18 +721,24 @@ impl Build { match self.cxx.get(&target) { Some(p) => Ok(p.path()), None => Err(format!( - "target `{}` is not configured as a host, only as a target", - target)) + "target `{}` is not configured as a host, only as a target", + target + )), } } /// Returns the path to the linker for the given target if it needs to be overridden. fn linker(&self, target: Interned) -> Option<&Path> { - if let Some(linker) = self.config.target_config.get(&target) - .and_then(|c| c.linker.as_ref()) { + if cfg!(test) { return None; } + if let Some(linker) = self.config + .target_config + .get(&target) + .and_then(|c| c.linker.as_ref()) + { Some(linker) - } else if target != self.config.build && - !target.contains("msvc") && !target.contains("emscripten") { + } else if target != self.config.general.build && !target.contains("msvc") + && !target.contains("emscripten") + { Some(self.cc(target)) } else { None @@ -697,24 +750,28 @@ impl Build { if target.contains("pc-windows-msvc") { Some(true) } else { - self.config.target_config.get(&target) + self.config + .target_config + .get(&target) .and_then(|t| t.crt_static) } } /// Returns the "musl root" for this `target`, if defined fn musl_root(&self, target: Interned) -> Option<&Path> { - self.config.target_config.get(&target) + self.config + .target_config + .get(&target) .and_then(|t| t.musl_root.as_ref()) - .or(self.config.musl_root.as_ref()) + .or(self.config.rust.musl_root.as_ref()) .map(|p| &**p) } /// Returns whether the target will be tested using the `remote-test-client` /// and `remote-test-server` binaries. fn remote_tested(&self, target: Interned) -> bool { - self.qemu_rootfs(target).is_some() || target.contains("android") || - env::var_os("TEST_DEVICE_ADDR").is_some() + self.qemu_rootfs(target).is_some() || target.contains("android") + || env::var_os("TEST_DEVICE_ADDR").is_some() } /// Returns the root of the "rootfs" image that this target will be using, @@ -723,19 +780,21 @@ impl Build { /// If `Some` is returned then that means that tests for this target are /// emulated with QEMU and binaries will need to be shipped to the emulator. fn qemu_rootfs(&self, target: Interned) -> Option<&Path> { - self.config.target_config.get(&target) + self.config + .target_config + .get(&target) .and_then(|t| t.qemu_rootfs.as_ref()) .map(|p| &**p) } /// Path to the python interpreter to use fn python(&self) -> &Path { - self.config.python.as_ref().unwrap() + self.config.general.python.as_ref().unwrap() } /// Temporary directory that extended error information is emitted to. fn extended_error_dir(&self) -> PathBuf { - self.out.join("tmp/extended-error-metadata") + self.config.general.out.join("tmp/extended-error-metadata") } /// Tests whether the `compiler` compiling for `target` should be forced to @@ -757,9 +816,9 @@ impl Build { /// When all of these conditions are met the build will lift artifacts from /// the previous stage forward. fn force_use_stage1(&self, compiler: Compiler, target: Interned) -> bool { - !self.config.full_bootstrap && - compiler.stage >= 2 && - (self.hosts.iter().any(|h| *h == target) || target == self.build) + !self.config.general.full_bootstrap && compiler.stage >= 2 + && (self.config.general.host.iter().any(|h| *h == target) + || target == self.config.general.build) } /// Returns the directory that OpenSSL artifacts are compiled into if @@ -768,8 +827,8 @@ impl Build { // OpenSSL not used on Windows if target.contains("windows") { None - } else if self.config.openssl_static { - Some(self.out.join(&*target).join("openssl")) + } else if self.config.general.openssl_static { + Some(self.config.general.out.join(&*target).join("openssl")) } else { None } @@ -787,7 +846,7 @@ impl Build { /// For example on nightly this returns "a.b.c-nightly", on beta it returns /// "a.b.c-beta.1" and on stable it just returns "a.b.c". fn release(&self, num: &str) -> String { - match &self.config.channel[..] { + match &self.config.rust.channel[..] { "stable" => num.to_string(), "beta" => if self.rust_info.is_git() { format!("{}-beta.{}", num, self.beta_prerelease_version()) @@ -801,7 +860,7 @@ impl Build { fn beta_prerelease_version(&self) -> u32 { if let Some(s) = self.prerelease_version.get() { - return s + return s; } let beta = output( @@ -809,7 +868,7 @@ impl Build { .arg("ls-remote") .arg("origin") .arg("beta") - .current_dir(&self.src) + .current_dir(&self.config.src), ); let beta = beta.trim().split_whitespace().next().unwrap(); let master = output( @@ -817,7 +876,7 @@ impl Build { .arg("ls-remote") .arg("origin") .arg("master") - .current_dir(&self.src) + .current_dir(&self.config.src), ); let master = master.trim().split_whitespace().next().unwrap(); @@ -827,7 +886,7 @@ impl Build { .arg("merge-base") .arg(beta) .arg(master) - .current_dir(&self.src), + .current_dir(&self.config.src), ); let base = base.trim(); @@ -839,7 +898,7 @@ impl Build { .arg("--count") .arg("--merges") .arg(format!("{}...HEAD", base)) - .current_dir(&self.src), + .current_dir(&self.config.src), ); let n = count.trim().parse().unwrap(); self.prerelease_version.set(Some(n)); @@ -858,7 +917,7 @@ impl Build { /// For channels like beta/nightly it's just the channel name, otherwise /// it's the `num` provided. fn package_vers(&self, num: &str) -> String { - match &self.config.channel[..] { + match &self.config.rust.channel[..] { "stable" => num.to_string(), "beta" => "beta".to_string(), "nightly" => "nightly".to_string(), @@ -902,14 +961,18 @@ impl Build { /// Returns the `a.b.c` version that the given package is at. fn release_num(&self, package: &str) -> String { - let mut toml = String::new(); - let toml_file_name = self.src.join(&format!("src/tools/{}/Cargo.toml", package)); - t!(t!(File::open(toml_file_name)).read_to_string(&mut toml)); + if cfg!(test) { + return String::from("0.1.2"); + } + let toml_file_name = self.config + .src + .join(&format!("src/tools/{}/Cargo.toml", package)); + let toml = t!(fs::read_string(&toml_file_name)); for line in toml.lines() { let prefix = "version = \""; let suffix = "\""; if line.starts_with(prefix) && line.ends_with(suffix) { - return line[prefix.len()..line.len() - suffix.len()].to_string() + return line[prefix.len()..line.len() - suffix.len()].to_string(); } } @@ -919,7 +982,7 @@ impl Build { /// Returns whether unstable features should be enabled for the compiler /// we're building. fn unstable_features(&self) -> bool { - match &self.config.channel[..] { + match &self.config.rust.channel[..] { "stable" | "beta" => false, "nightly" | _ => true, } @@ -929,7 +992,9 @@ impl Build { /// ends when the returned object is dropped. Folding can only be used in /// the Travis CI environment. pub fn fold_output(&self, name: F) -> Option - where D: Into, F: FnOnce() -> D + where + D: Into, + F: FnOnce() -> D, { if self.ci_env == CiEnv::Travis { Some(OutputFolder::new(name().into())) @@ -944,9 +1009,10 @@ impl Build { /// `rust.save-toolstates` in `config.toml`. If unspecified, nothing will be /// done. The file is updated immediately after this function completes. pub fn save_toolstate(&self, tool: &str, state: ToolState) { + if cfg!(test) { return; } use std::io::{Seek, SeekFrom}; - if let Some(ref path) = self.config.save_toolstates { + if let Some(ref path) = self.config.rust.save_toolstates { let mut file = t!(fs::OpenOptions::new() .create(true) .read(true) @@ -964,7 +1030,7 @@ impl Build { fn in_tree_crates(&self, root: &str) -> Vec<&Crate> { let mut ret = Vec::new(); - let mut list = vec![INTERNER.intern_str(root)]; + let mut list = vec![root.intern()]; let mut visited = HashSet::new(); while let Some(krate) = list.pop() { let krate = &self.crates[&krate]; @@ -989,7 +1055,7 @@ impl<'a> Compiler { /// Returns whether this is a snapshot compiler for `build`'s configuration pub fn is_snapshot(&self, build: &Build) -> bool { - self.stage == 0 && self.host == build.build + self.stage == 0 && self.host == build.config.general.build } /// Returns if this compiler should be treated as a final stage one in the @@ -997,7 +1063,11 @@ impl<'a> Compiler { /// This takes into account whether we're performing a full bootstrap or /// not; don't directly compare the stage with `2`! pub fn is_final_stage(&self, build: &Build) -> bool { - let final_stage = if build.config.full_bootstrap { 2 } else { 1 }; + let final_stage = if build.config.general.full_bootstrap { + 2 + } else { + 1 + }; self.stage >= final_stage } } diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs index 5f1df1d26e273..7f8eb551a20e1 100644 --- a/src/bootstrap/metadata.rs +++ b/src/bootstrap/metadata.rs @@ -16,7 +16,7 @@ use build_helper::output; use serde_json; use {Build, Crate}; -use cache::INTERNER; +use cache::Intern; #[derive(Deserialize)] struct Output { @@ -51,35 +51,31 @@ pub fn build(build: &mut Build) { } fn build_krate(build: &mut Build, krate: &str) { - // Run `cargo metadata` to figure out what crates we're testing. - // - // Down below we're going to call `cargo test`, but to test the right set - // of packages we're going to have to know what `-p` arguments to pass it - // to know what crates to test. Here we run `cargo metadata` to learn about - // the dependency graph and what `-p` arguments there are. - let mut cargo = Command::new(&build.initial_cargo); - cargo.arg("metadata") - .arg("--format-version").arg("1") - .arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml")); + let mut cargo = Command::new(&build.config.general.initial_cargo); + cargo + .arg("metadata") + .arg("--format-version") + .arg("1") + .arg("--manifest-path") + .arg(build.config.src.join(krate).join("Cargo.toml")); let output = output(&mut cargo); let output: Output = serde_json::from_str(&output).unwrap(); let mut id2name = HashMap::new(); for package in output.packages { if package.source.is_none() { - let name = INTERNER.intern_string(package.name); + let name = package.name.intern(); id2name.insert(package.id, name); let mut path = PathBuf::from(package.manifest_path); path.pop(); - build.crates.insert(name, Crate { - build_step: format!("build-crate-{}", name), - doc_step: format!("doc-crate-{}", name), - test_step: format!("test-crate-{}", name), - bench_step: format!("bench-crate-{}", name), + build.crates.insert( name, - version: package.version, - deps: Vec::new(), - path, - }); + Crate { + name, + version: package.version, + deps: Vec::new(), + path, + }, + ); } } diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 15dd7fabfa58b..b7225852a084f 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -20,8 +20,6 @@ use std::env; use std::ffi::OsString; -use std::fs::{self, File}; -use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use std::process::Command; @@ -29,7 +27,7 @@ use build_helper::output; use cmake; use cc; -use Build; +use fs; use util::{self, exe}; use build_helper::up_to_date; use builder::{Builder, RunConfig, ShouldRun, Step}; @@ -53,68 +51,78 @@ impl Step for Llvm { fn make_run(run: RunConfig) { let emscripten = run.path.ends_with("llvm-emscripten"); run.builder.ensure(Llvm { - target: run.target, + target: run.host, emscripten, }); } + fn for_test(self, _builder: &Builder) -> PathBuf { + PathBuf::from("llvm-config-for-test") + } + /// Compile LLVM for `target`. fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; let target = self.target; let emscripten = self.emscripten; // If we're using a custom LLVM bail out here, but we can only use a // custom LLVM for the build triple. if !self.emscripten { - if let Some(config) = build.config.target_config.get(&target) { + if let Some(config) = builder.config.target_config.get(&target) { if let Some(ref s) = config.llvm_config { - check_llvm_version(build, s); - return s.to_path_buf() + check_llvm_version(builder, s); + return s.to_path_buf(); } } } - let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger"); - let mut rebuild_trigger_contents = String::new(); - t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents)); + let rebuild_trigger = builder.config.src.join("src/rustllvm/llvm-rebuild-trigger"); + let rebuild_trigger_contents = t!(fs::read_string(&rebuild_trigger)); let (out_dir, llvm_config_ret_dir) = if emscripten { - let dir = build.emscripten_llvm_out(target); + let dir = builder.emscripten_llvm_out(target); let config_dir = dir.join("bin"); (dir, config_dir) } else { - (build.llvm_out(target), - build.llvm_out(build.config.build).join("bin")) + ( + builder.llvm_out(target), + builder.llvm_out(builder.config.general.build).join("bin"), + ) }; let done_stamp = out_dir.join("llvm-finished-building"); - let build_llvm_config = llvm_config_ret_dir - .join(exe("llvm-config", &*build.config.build)); + let build_llvm_config = + llvm_config_ret_dir.join(exe("llvm-config", &*builder.config.general.build)); if done_stamp.exists() { - let mut done_contents = String::new(); - t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents)); + let done_contents = t!(fs::read_string(&done_stamp)); // If LLVM was already built previously and contents of the rebuild-trigger file // didn't change from the previous build, then no action is required. if done_contents == rebuild_trigger_contents { - return build_llvm_config + return build_llvm_config; } } - let _folder = build.fold_output(|| "llvm"); + let _folder = builder.fold_output(|| "llvm"); let descriptor = if emscripten { "Emscripten " } else { "" }; println!("Building {}LLVM for {}", descriptor, target); let _time = util::timeit(); t!(fs::create_dir_all(&out_dir)); // http://llvm.org/docs/CMake.html - let root = if self.emscripten { "src/llvm-emscripten" } else { "src/llvm" }; - let mut cfg = cmake::Config::new(build.src.join(root)); - if build.config.ninja { + let root = if self.emscripten { + "src/llvm-emscripten" + } else { + "src/llvm" + }; + let mut cfg = cmake::Config::new(builder.config.src.join(root)); + if builder.config.llvm.ninja { cfg.generator("Ninja"); } - let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) { + let profile = match ( + builder.config.llvm.optimize, + builder.config.llvm.release_debuginfo, + ) { (false, _) => "Debug", (true, false) => "Release", (true, true) => "RelWithDebInfo", @@ -125,37 +133,38 @@ impl Step for Llvm { let llvm_targets = if self.emscripten { "JSBackend" } else { - match build.config.llvm_targets { - Some(ref s) => s, - None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;MSP430;Sparc;NVPTX;Hexagon", - } + builder.config.llvm.targets.as_str() }; let llvm_exp_targets = if self.emscripten { "" } else { - &build.config.llvm_experimental_targets[..] + &builder.config.llvm.experimental_targets[..] }; - let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"}; + let assertions = if builder.config.llvm.assertions { + "ON" + } else { + "OFF" + }; cfg.target(&target) - .host(&build.build) - .out_dir(&out_dir) - .profile(profile) - .define("LLVM_ENABLE_ASSERTIONS", assertions) - .define("LLVM_TARGETS_TO_BUILD", llvm_targets) - .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets) - .define("LLVM_INCLUDE_EXAMPLES", "OFF") - .define("LLVM_INCLUDE_TESTS", "OFF") - .define("LLVM_INCLUDE_DOCS", "OFF") - .define("LLVM_ENABLE_ZLIB", "OFF") - .define("WITH_POLLY", "OFF") - .define("LLVM_ENABLE_TERMINFO", "OFF") - .define("LLVM_ENABLE_LIBEDIT", "OFF") - .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string()) - .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) - .define("LLVM_DEFAULT_TARGET_TRIPLE", target); + .host(&builder.config.general.build) + .out_dir(&out_dir) + .profile(profile) + .define("LLVM_ENABLE_ASSERTIONS", assertions) + .define("LLVM_TARGETS_TO_BUILD", llvm_targets) + .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets) + .define("LLVM_INCLUDE_EXAMPLES", "OFF") + .define("LLVM_INCLUDE_TESTS", "OFF") + .define("LLVM_INCLUDE_DOCS", "OFF") + .define("LLVM_ENABLE_ZLIB", "OFF") + .define("WITH_POLLY", "OFF") + .define("LLVM_ENABLE_TERMINFO", "OFF") + .define("LLVM_ENABLE_LIBEDIT", "OFF") + .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string()) + .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) + .define("LLVM_DEFAULT_TARGET_TRIPLE", target); // By default, LLVM will automatically find OCaml and, if it finds it, // install the LLVM bindings in LLVM_OCAML_INSTALL_PATH, which defaults @@ -163,15 +172,17 @@ impl Step for Llvm { // This causes problem for non-root builds of Rust. Side-step the issue // by setting LLVM_OCAML_INSTALL_PATH to a relative path, so it installs // in the prefix. - cfg.define("LLVM_OCAML_INSTALL_PATH", - env::var_os("LLVM_OCAML_INSTALL_PATH").unwrap_or_else(|| "usr/lib/ocaml".into())); + cfg.define( + "LLVM_OCAML_INSTALL_PATH", + env::var_os("LLVM_OCAML_INSTALL_PATH").unwrap_or_else(|| "usr/lib/ocaml".into()), + ); // This setting makes the LLVM tools link to the dynamic LLVM library, // which saves both memory during parallel links and overall disk space // for the tools. We don't distribute any of those tools, so this is // just a local concern. However, it doesn't work well everywhere. if target.contains("linux-gnu") || target.contains("apple-darwin") { - cfg.define("LLVM_LINK_LLVM_DYLIB", "ON"); + cfg.define("LLVM_LINK_LLVM_DYLIB", "ON"); } if target.contains("msvc") { @@ -185,32 +196,38 @@ impl Step for Llvm { cfg.define("LLVM_BUILD_32_BITS", "ON"); } - if let Some(num_linkers) = build.config.llvm_link_jobs { - if num_linkers > 0 { - cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string()); - } + if builder.config.llvm.link_jobs > 0 { + cfg.define( + "LLVM_PARALLEL_LINK_JOBS", + builder.config.llvm.link_jobs.to_string(), + ); } // http://llvm.org/docs/HowToCrossCompileLLVM.html - if target != build.build && !emscripten { + if target != builder.config.general.build && !emscripten { builder.ensure(Llvm { - target: build.build, + target: builder.config.general.build, emscripten: false, }); // FIXME: if the llvm root for the build triple is overridden then we // should use llvm-tblgen from there, also should verify that it // actually exists most of the time in normal installs of LLVM. - let host = build.llvm_out(build.build).join("bin/llvm-tblgen"); + let host = builder + .llvm_out(builder.config.general.build) + .join("bin/llvm-tblgen"); cfg.define("CMAKE_CROSSCOMPILING", "True") - .define("LLVM_TABLEGEN", &host); + .define("LLVM_TABLEGEN", &host); if target.contains("netbsd") { - cfg.define("CMAKE_SYSTEM_NAME", "NetBSD"); + cfg.define("CMAKE_SYSTEM_NAME", "NetBSD"); } else if target.contains("freebsd") { - cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD"); + cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD"); } - cfg.define("LLVM_NATIVE_BUILD", build.llvm_out(build.build).join("build")); + cfg.define( + "LLVM_NATIVE_BUILD", + builder.llvm_out(builder.config.general.build).join("build"), + ); } let sanitize_cc = |cc: &Path| { @@ -225,42 +242,43 @@ impl Step for Llvm { // MSVC with CMake uses msbuild by default which doesn't respect these // vars that we'd otherwise configure. In that case we just skip this // entirely. - if target.contains("msvc") && !build.config.ninja { - return + if target.contains("msvc") && !builder.config.llvm.ninja { + return; } - let cc = build.cc(target); - let cxx = build.cxx(target).unwrap(); + let cc = builder.cc(target); + let cxx = builder.cxx(target).unwrap(); // Handle msvc + ninja + ccache specially (this is what the bots use) - if target.contains("msvc") && - build.config.ninja && - build.config.ccache.is_some() { + if target.contains("msvc") && builder.config.llvm.ninja + && builder.config.llvm.ccache().is_some() + { let mut cc = env::current_exe().expect("failed to get cwd"); cc.set_file_name("sccache-plus-cl.exe"); - cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc)) - .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc)); - cfg.env("SCCACHE_PATH", - build.config.ccache.as_ref().unwrap()) - .env("SCCACHE_TARGET", target); + cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc)) + .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc)); + cfg.env( + "SCCACHE_PATH", + builder.config.llvm.ccache().as_ref().unwrap(), + ).env("SCCACHE_TARGET", target); // If ccache is configured we inform the build a little differently hwo // to invoke ccache while also invoking our compilers. - } else if let Some(ref ccache) = build.config.ccache { - cfg.define("CMAKE_C_COMPILER", ccache) - .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc)) - .define("CMAKE_CXX_COMPILER", ccache) - .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx)); + } else if let Some(ref ccache) = builder.config.llvm.ccache() { + cfg.define("CMAKE_C_COMPILER", ccache) + .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc)) + .define("CMAKE_CXX_COMPILER", ccache) + .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx)); } else { - cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc)) - .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx)); + cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc)) + .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx)); } - cfg.build_arg("-j").build_arg(build.jobs().to_string()); - cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" ")); - cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" ")); - if let Some(ar) = build.ar(target) { + cfg.build_arg("-j").build_arg(builder.jobs().to_string()); + cfg.define("CMAKE_C_FLAGS", builder.cflags(target).join(" ")); + cfg.define("CMAKE_CXX_FLAGS", builder.cflags(target).join(" ")); + if let Some(ar) = builder.ar(target) { if ar.is_absolute() { // LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it // tries to resolve this path in the LLVM build directory. @@ -281,24 +299,26 @@ impl Step for Llvm { // tools and libs on all platforms. cfg.build(); - t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes())); + t!(fs::write(&done_stamp, rebuild_trigger_contents.as_bytes())); build_llvm_config } } -fn check_llvm_version(build: &Build, llvm_config: &Path) { - if !build.config.llvm_version_check { - return +fn check_llvm_version(builder: &Builder, llvm_config: &Path) { + if !builder.config.llvm.version_check { + return; } let mut cmd = Command::new(llvm_config); let version = output(cmd.arg("--version")); - let mut parts = version.split('.').take(2) + let mut parts = version + .split('.') + .take(2) .filter_map(|s| s.parse::().ok()); if let (Some(major), Some(minor)) = (parts.next(), parts.next()) { if major > 3 || (major == 3 && minor >= 9) { - return + return; } } panic!("\n\nbad LLVM version: {}, need >=3.9\n\n", version) @@ -316,6 +336,8 @@ impl Step for TestHelpers { run.path("src/test/auxiliary/rust_test_helpers.c") } + fn for_test(self, _builder: &Builder) {} + fn make_run(run: RunConfig) { run.builder.ensure(TestHelpers { target: run.target }) } @@ -323,15 +345,14 @@ impl Step for TestHelpers { /// Compiles the `rust_test_helpers.c` library which we used in various /// `run-pass` test suites for ABI testing. fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; - let dst = build.test_helpers_out(target); - let src = build.src.join("src/test/auxiliary/rust_test_helpers.c"); + let dst = builder.test_helpers_out(target); + let src = builder.config.src.join("src/test/auxiliary/rust_test_helpers.c"); if up_to_date(&src, &dst.join("librust_test_helpers.a")) { - return + return; } - let _folder = build.fold_output(|| "build_test_helpers"); + let _folder = builder.fold_output(|| "build_test_helpers"); println!("Building test helpers"); t!(fs::create_dir_all(&dst)); let mut cfg = cc::Build::new(); @@ -340,21 +361,21 @@ impl Step for TestHelpers { // extra configuration, so inform gcc of these compilers. Note, though, that // on MSVC we still need gcc's detection of env vars (ugh). if !target.contains("msvc") { - if let Some(ar) = build.ar(target) { + if let Some(ar) = builder.ar(target) { cfg.archiver(ar); } - cfg.compiler(build.cc(target)); + cfg.compiler(builder.cc(target)); } cfg.cargo_metadata(false) - .out_dir(&dst) - .target(&target) - .host(&build.build) - .opt_level(0) - .warnings(false) - .debug(false) - .file(build.src.join("src/test/auxiliary/rust_test_helpers.c")) - .compile("rust_test_helpers"); + .out_dir(&dst) + .target(&target) + .host(&builder.config.general.build) + .opt_level(0) + .warnings(false) + .debug(false) + .file(builder.config.src.join("src/test/auxiliary/rust_test_helpers.c")) + .compile("rust_test_helpers"); } } @@ -374,19 +395,19 @@ impl Step for Openssl { run.never() } + fn for_test(self, _builder: &Builder) {} + fn run(self, builder: &Builder) { - let build = builder.build; let target = self.target; - let out = match build.openssl_dir(target) { + let out = match builder.openssl_dir(target) { Some(dir) => dir, None => return, }; let stamp = out.join(".stamp"); - let mut contents = String::new(); - drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents))); + let contents = fs::read_string(&stamp).unwrap_or_default(); if contents == OPENSSL_VERS { - return + return; } t!(fs::create_dir_all(&out)); @@ -395,8 +416,10 @@ impl Step for Openssl { if !tarball.exists() { let tmp = tarball.with_extension("tmp"); // originally from https://www.openssl.org/source/... - let url = format!("https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/{}", - name); + let url = format!( + "https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/{}", + name + ); let mut last_error = None; for _ in 0..3 { let status = Command::new("curl") @@ -413,7 +436,9 @@ impl Step for Openssl { } // Ensure the hash is correct. - let mut shasum = if target.contains("apple") || build.build.contains("netbsd") { + let mut shasum = if target.contains("apple") + || builder.config.general.build.contains("netbsd") + { let mut cmd = Command::new("shasum"); cmd.arg("-a").arg("256"); cmd @@ -430,8 +455,7 @@ impl Step for Openssl { "downloaded openssl sha256 different\n\ expected: {}\n\ found: {}\n", - OPENSSL_SHA256, - found + OPENSSL_SHA256, found )); continue; } @@ -446,10 +470,15 @@ impl Step for Openssl { t!(fs::rename(&tmp, &tarball)); } let obj = out.join(format!("openssl-{}", OPENSSL_VERS)); - let dst = build.openssl_install_dir(target).unwrap(); + let dst = builder.openssl_install_dir(target).unwrap(); drop(fs::remove_dir_all(&obj)); drop(fs::remove_dir_all(&dst)); - build.run(Command::new("tar").arg("zxf").arg(&tarball).current_dir(&out)); + builder.run( + Command::new("tar") + .arg("zxf") + .arg(&tarball) + .current_dir(&out), + ); let mut configure = Command::new("perl"); configure.arg(obj.join("Configure")); @@ -498,8 +527,8 @@ impl Step for Openssl { _ => panic!("don't know how to configure OpenSSL for {}", target), }; configure.arg(os); - configure.env("CC", build.cc(target)); - for flag in build.cflags(target) { + configure.env("CC", builder.cc(target)); + for flag in builder.cflags(target) { configure.arg(flag); } // There is no specific os target for android aarch64 or x86_64, @@ -511,7 +540,7 @@ impl Step for Openssl { if target == "sparc64-unknown-netbsd" { // Need -m64 to get assembly generated correctly for sparc64. configure.arg("-m64"); - if build.build.contains("netbsd") { + if builder.config.general.build.contains("netbsd") { // Disable sparc64 asm on NetBSD builders, it uses // m4(1)'s -B flag, which NetBSD m4 does not support. configure.arg("no-asm"); @@ -525,13 +554,12 @@ impl Step for Openssl { } configure.current_dir(&obj); println!("Configuring openssl for {}", target); - build.run_quiet(&mut configure); + builder.run_quiet(&mut configure); println!("Building openssl for {}", target); - build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj)); + builder.run_quiet(Command::new("make").arg("-j1").current_dir(&obj)); println!("Installing openssl for {}", target); - build.run_quiet(Command::new("make").arg("install").current_dir(&obj)); + builder.run_quiet(Command::new("make").arg("install").current_dir(&obj)); - let mut f = t!(File::create(&stamp)); - t!(f.write_all(OPENSSL_VERS.as_bytes())); + t!(fs::write(&stamp, OPENSSL_VERS.as_bytes())); } } diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs index 5184cca653c4b..f96e134ac4c09 100644 --- a/src/bootstrap/sanity.rs +++ b/src/bootstrap/sanity.rs @@ -20,14 +20,12 @@ use std::collections::HashMap; use std::env; -use std::ffi::{OsString, OsStr}; -use std::fs::{self, File}; -use std::io::Read; +use std::ffi::{OsStr, OsString}; use std::path::PathBuf; use std::process::Command; use build_helper::output; - +use fs; use Build; struct Finder { @@ -39,26 +37,31 @@ impl Finder { fn new() -> Self { Self { cache: HashMap::new(), - path: env::var_os("PATH").unwrap_or_default() + path: env::var_os("PATH").unwrap_or_default(), } } fn maybe_have>(&mut self, cmd: S) -> Option { let cmd: OsString = cmd.as_ref().into(); let path = self.path.clone(); - self.cache.entry(cmd.clone()).or_insert_with(|| { - for path in env::split_paths(&path) { - let target = path.join(&cmd); - let mut cmd_alt = cmd.clone(); - cmd_alt.push(".exe"); - if target.is_file() || // some/path/git + self.cache + .entry(cmd.clone()) + .or_insert_with(|| { + for path in env::split_paths(&path) { + let target = path.join(&cmd); + let mut cmd_alt = cmd.clone(); + cmd_alt.push(".exe"); + if target.is_file() || // some/path/git target.with_extension("exe").exists() || // some/path/git.exe - target.join(&cmd_alt).exists() { // some/path/git/git.exe - return Some(target); + target.join(&cmd_alt).exists() + { + // some/path/git/git.exe + return Some(target); + } } - } - None - }).clone() + None + }) + .clone() } fn must_have>(&mut self, cmd: S) -> PathBuf { @@ -86,16 +89,20 @@ pub fn check(build: &mut Build) { } // We need cmake, but only if we're actually building LLVM or sanitizers. - let building_llvm = build.hosts.iter() + let building_llvm = build + .config + .general + .host + .iter() .filter_map(|host| build.config.target_config.get(host)) .any(|config| config.llvm_config.is_none()); - if building_llvm || build.config.sanitizers { + if building_llvm || build.config.general.sanitizers { cmd_finder.must_have("cmake"); } // Ninja is currently only used for LLVM itself. if building_llvm { - if build.config.ninja { + if build.config.llvm.ninja { // Some Linux distros rename `ninja` to `ninja-build`. // CMake can work with either binary name. if cmd_finder.maybe_have("ninja-build").is_none() { @@ -110,29 +117,40 @@ pub fn check(build: &mut Build) { // // In these cases we automatically enable Ninja if we find it in the // environment. - if !build.config.ninja && build.config.build.contains("msvc") { + if !build.config.llvm.ninja && build.config.general.build.contains("msvc") { if cmd_finder.maybe_have("ninja").is_some() { - build.config.ninja = true; + build.config.llvm.ninja = true; } } } - build.config.python = build.config.python.take().map(|p| cmd_finder.must_have(p)) + build.config.general.python = build.config.general.python.take() + .map(|p| cmd_finder.must_have(p)) .or_else(|| env::var_os("BOOTSTRAP_PYTHON").map(PathBuf::from)) // set by bootstrap.py .or_else(|| cmd_finder.maybe_have("python2.7")) .or_else(|| cmd_finder.maybe_have("python2")) .or_else(|| Some(cmd_finder.must_have("python"))); - build.config.nodejs = build.config.nodejs.take().map(|p| cmd_finder.must_have(p)) + build.config.general.nodejs = build + .config + .general + .nodejs + .take() + .map(|p| cmd_finder.must_have(p)) .or_else(|| cmd_finder.maybe_have("node")) .or_else(|| cmd_finder.maybe_have("nodejs")); - build.config.gdb = build.config.gdb.take().map(|p| cmd_finder.must_have(p)) + build.config.general.gdb = build + .config + .general + .gdb + .take() + .map(|p| cmd_finder.must_have(p)) .or_else(|| cmd_finder.maybe_have("gdb")); // We're gonna build some custom C code here and there, host triples // also build some C++ shims for LLVM so we need a C++ compiler. - for target in &build.targets { + for target in &build.config.general.target { // On emscripten we don't actually need the C compiler to just // build the target artifacts, only for testing. For the sake // of easier bot configuration, just skip detection. @@ -146,26 +164,21 @@ pub fn check(build: &mut Build) { } } - for host in &build.hosts { + for host in &build.config.general.host { cmd_finder.must_have(build.cxx(*host).unwrap()); - - // The msvc hosts don't use jemalloc, turn it off globally to - // avoid packaging the dummy liballoc_jemalloc on that platform. - if host.contains("msvc") { - build.config.use_jemalloc = false; - } } // Externally configured LLVM requires FileCheck to exist - let filecheck = build.llvm_filecheck(build.build); - if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests { + let filecheck = build.llvm_filecheck(build.config.general.build); + if !filecheck.starts_with(&build.config.general.out) && !filecheck.exists() + && build.config.rust.codegen_tests + { panic!("FileCheck executable {:?} does not exist", filecheck); } - for target in &build.targets { + for target in &build.config.general.target { // Can't compile for iOS unless we're on macOS - if target.contains("apple-ios") && - !build.build.contains("apple-darwin") { + if target.contains("apple-ios") && !build.config.general.build.contains("apple-darwin") { panic!("the iOS target is only supported on macOS"); } @@ -173,27 +186,34 @@ pub fn check(build: &mut Build) { if target.contains("musl") { // If this is a native target (host is also musl) and no musl-root is given, // fall back to the system toolchain in /usr before giving up - if build.musl_root(*target).is_none() && build.config.build == *target { - let target = build.config.target_config.entry(target.clone()) - .or_insert(Default::default()); + if build.musl_root(*target).is_none() && build.config.general.build == *target { + let target = build + .config + .target_config + .entry(target.clone()) + .or_insert(Default::default()); target.musl_root = Some("/usr".into()); } match build.musl_root(*target) { Some(root) => { if fs::metadata(root.join("lib/libc.a")).is_err() { - panic!("couldn't find libc.a in musl dir: {}", - root.join("lib").display()); + panic!( + "couldn't find libc.a in musl dir: {}", + root.join("lib").display() + ); } if fs::metadata(root.join("lib/libunwind.a")).is_err() { - panic!("couldn't find libunwind.a in musl dir: {}", - root.join("lib").display()); + panic!( + "couldn't find libunwind.a in musl dir: {}", + root.join("lib").display() + ); } } - None => { - panic!("when targeting MUSL either the rust.musl-root \ - option or the target.$TARGET.musl-root option must \ - be specified in config.toml") - } + None => panic!( + "when targeting MUSL either the rust.musl-root \ + option or the target.$TARGET.musl-root option must \ + be specified in config.toml" + ), } } @@ -203,7 +223,8 @@ pub fn check(build: &mut Build) { // Studio, so detect that here and error. let out = output(Command::new("cmake").arg("--help")); if !out.contains("Visual Studio") { - panic!(" + panic!( + " cmake does not support Visual Studio generators. This is likely due to it being an msys/cygwin build of cmake, @@ -214,7 +235,8 @@ If you are building under msys2 try installing the mingw-w64-x86_64-cmake package instead of cmake: $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake -"); +" + ); } } } @@ -222,9 +244,10 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake let run = |cmd: &mut Command| { cmd.output().map(|output| { String::from_utf8_lossy(&output.stdout) - .lines().next().unwrap_or_else(|| { - panic!("{:?} failed {:?}", cmd, output) - }).to_string() + .lines() + .next() + .unwrap_or_else(|| panic!("{:?} failed {:?}", cmd, output)) + .to_string() }) }; build.lldb_version = run(Command::new("lldb").arg("--version")).ok(); @@ -232,17 +255,17 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok(); } - if let Some(ref s) = build.config.ccache { + if let Some(ref s) = build.config.llvm.ccache() { cmd_finder.must_have(s); } - if build.config.channel == "stable" { - let mut stage0 = String::new(); - t!(t!(File::open(build.src.join("src/stage0.txt"))) - .read_to_string(&mut stage0)); + if build.config.rust.channel == "stable" { + let stage0 = t!(fs::read_string(build.config.src.join("src/stage0.txt"))); if stage0.contains("\ndev:") { - panic!("bootstrapping from a dev compiler in a stable release, but \ - should only be bootstrapping from a released compiler!"); + panic!( + "bootstrapping from a dev compiler in a stable release, but \ + should only be bootstrapping from a released compiler!" + ); } } } diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index bd8c36a296c09..441148d6dbb0d 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -17,22 +17,21 @@ use std::env; use std::ffi::OsString; use std::iter; use std::fmt; -use std::fs::{self, File}; -use std::path::{PathBuf, Path}; +use std::path::{Path, PathBuf}; use std::process::Command; -use std::io::Read; use build_helper::{self, output}; -use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step}; +use fs; +use builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step}; use Crate as CargoCrate; -use cache::{INTERNER, Interned}; +use cache::{Intern, Interned}; use compile; use dist; use native; use tool::{self, Tool}; use util::{self, dylib_path, dylib_path_var}; -use {Build, Mode}; +use Mode; use toolstate::ToolState; const ADB_TEST_DIR: &str = "/data/tmp/work"; @@ -65,27 +64,27 @@ impl fmt::Display for TestKind { } } -fn try_run(build: &Build, cmd: &mut Command) -> bool { - if !build.fail_fast { - if !build.try_run(cmd) { - let mut failures = build.delayed_failures.borrow_mut(); +fn try_run(builder: &Builder, cmd: &mut Command) -> bool { + if !builder.config.cmd.fail_fast() { + if !builder.try_run(cmd) { + let mut failures = builder.delayed_failures.borrow_mut(); failures.push(format!("{:?}", cmd)); return false; } } else { - build.run(cmd); + builder.run(cmd); } true } -fn try_run_quiet(build: &Build, cmd: &mut Command) { - if !build.fail_fast { - if !build.try_run_quiet(cmd) { - let mut failures = build.delayed_failures.borrow_mut(); +fn try_run_quiet(builder: &Builder, cmd: &mut Command) { + if !builder.config.cmd.fail_fast() { + if !builder.try_run_quiet(cmd) { + let mut failures = builder.delayed_failures.borrow_mut(); failures.push(format!("{:?}", cmd)); } } else { - build.run_quiet(cmd); + builder.run_quiet(cmd); } } @@ -104,7 +103,6 @@ impl Step for Linkcheck { /// This tool in `src/tools` will verify the validity of all our links in the /// documentation to ensure we don't have a bunch of dead ones. fn run(self, builder: &Builder) { - let build = builder.build; let host = self.host; println!("Linkcheck ({})", host); @@ -112,17 +110,22 @@ impl Step for Linkcheck { builder.default_doc(None); let _time = util::timeit(); - try_run(build, builder.tool_cmd(Tool::Linkchecker) - .arg(build.out.join(host).join("doc"))); + try_run( + builder, + builder + .tool_cmd(Tool::Linkchecker) + .arg(builder.config.general.out.join(host).join("doc")), + ); } fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/linkchecker").default_condition(builder.build.config.docs) + run.path("src/tools/linkchecker") + .default_condition(builder.config.general.docs) } fn make_run(run: RunConfig) { - run.builder.ensure(Linkcheck { host: run.target }); + run.builder.ensure(Linkcheck { host: run.host }); } } @@ -143,7 +146,7 @@ impl Step for Cargotest { fn make_run(run: RunConfig) { run.builder.ensure(Cargotest { stage: run.builder.top_stage, - host: run.target, + host: run.host, }); } @@ -152,22 +155,27 @@ impl Step for Cargotest { /// This tool in `src/tools` will check out a few Rust projects and run `cargo /// test` to ensure that we don't regress the test suites there. fn run(self, builder: &Builder) { - let build = builder.build; let compiler = builder.compiler(self.stage, self.host); - builder.ensure(compile::Rustc { compiler, target: compiler.host }); + builder.ensure(compile::Rustc { + compiler, + target: compiler.host, + }); // Note that this is a short, cryptic, and not scoped directory name. This // is currently to minimize the length of path on Windows where we otherwise // quickly run into path name limit constraints. - let out_dir = build.out.join("ct"); + let out_dir = builder.config.general.out.join("ct"); t!(fs::create_dir_all(&out_dir)); let _time = util::timeit(); let mut cmd = builder.tool_cmd(Tool::CargoTest); - try_run(build, cmd.arg(&build.initial_cargo) - .arg(&out_dir) - .env("RUSTC", builder.rustc(compiler)) - .env("RUSTDOC", builder.rustdoc(compiler.host))); + try_run( + builder, + cmd.arg(&builder.config.general.initial_cargo) + .arg(&out_dir) + .env("RUSTC", builder.rustc(compiler)) + .env("RUSTDOC", builder.rustdoc(compiler.host)), + ); } } @@ -188,19 +196,23 @@ impl Step for Cargo { fn make_run(run: RunConfig) { run.builder.ensure(Cargo { stage: run.builder.top_stage, - host: run.target, + host: run.host, }); } /// Runs `cargo test` for `cargo` packaged with Rust. fn run(self, builder: &Builder) { - let build = builder.build; let compiler = builder.compiler(self.stage, self.host); - builder.ensure(tool::Cargo { compiler, target: self.host }); - let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test"); - cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml")); - if !build.fail_fast { + builder.ensure(tool::Cargo { + compiler, + target: self.host, + }); + let mut cargo = builder.cargo(compiler, Mode::RustcTool, self.host, "test"); + cargo + .arg("--manifest-path") + .arg(builder.config.src.join("src/tools/cargo/Cargo.toml")); + if !builder.config.cmd.fail_fast() { cargo.arg("--no-fail-fast"); } @@ -211,7 +223,10 @@ impl Step for Cargo { // available. cargo.env("CFG_DISABLE_CROSS_TESTS", "1"); - try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler))); + try_run( + builder, + cargo.env("PATH", &path_for_cargo(builder, compiler)), + ); } } @@ -232,31 +247,30 @@ impl Step for Rls { fn make_run(run: RunConfig) { run.builder.ensure(Rls { stage: run.builder.top_stage, - host: run.target, + host: run.host, }); } /// Runs `cargo test` for the rls. fn run(self, builder: &Builder) { - let build = builder.build; let stage = self.stage; let host = self.host; let compiler = builder.compiler(stage, host); - builder.ensure(tool::Rls { compiler, target: self.host }); - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - host, - "test", - "src/tools/rls"); + builder.ensure(tool::Rls { + compiler, + target: self.host, + }); + let mut cargo = tool::prepare_tool_cargo( + builder, compiler, Mode::RustcTool, host, "test", "src/tools/rls"); // Don't build tests dynamically, just a pain to work with cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); builder.add_rustc_lib_path(compiler, &mut cargo); - if try_run(build, &mut cargo) { - build.save_toolstate("rls", ToolState::TestPass); + if try_run(builder, &mut cargo) { + builder.save_toolstate("rls", ToolState::TestPass); } } } @@ -278,31 +292,30 @@ impl Step for Rustfmt { fn make_run(run: RunConfig) { run.builder.ensure(Rustfmt { stage: run.builder.top_stage, - host: run.target, + host: run.host, }); } /// Runs `cargo test` for rustfmt. fn run(self, builder: &Builder) { - let build = builder.build; let stage = self.stage; let host = self.host; let compiler = builder.compiler(stage, host); - builder.ensure(tool::Rustfmt { compiler, target: self.host }); - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - host, - "test", - "src/tools/rustfmt"); + builder.ensure(tool::Rustfmt { + compiler, + target: self.host, + }); + let mut cargo = tool::prepare_tool_cargo( + builder, compiler, Mode::RustcTool, host, "test", "src/tools/rustfmt"); // Don't build tests dynamically, just a pain to work with cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); builder.add_rustc_lib_path(compiler, &mut cargo); - if try_run(build, &mut cargo) { - build.save_toolstate("rustfmt", ToolState::TestPass); + if try_run(builder, &mut cargo) { + builder.save_toolstate("rustfmt", ToolState::TestPass); } } } @@ -319,27 +332,31 @@ impl Step for Miri { const DEFAULT: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { - let test_miri = run.builder.build.config.test_miri; + let test_miri = run.builder.config.rust.test_miri; run.path("src/tools/miri").default_condition(test_miri) } fn make_run(run: RunConfig) { run.builder.ensure(Miri { stage: run.builder.top_stage, - host: run.target, + host: run.host, }); } /// Runs `cargo test` for miri. fn run(self, builder: &Builder) { - let build = builder.build; let stage = self.stage; let host = self.host; let compiler = builder.compiler(stage, host); - if let Some(miri) = builder.ensure(tool::Miri { compiler, target: self.host }) { - let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test"); - cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml")); + if let Some(miri) = builder.ensure(tool::Miri { + compiler, + target: self.host, + }) { + let mut cargo = builder.cargo(compiler, Mode::RustcTool, host, "test"); + cargo + .arg("--manifest-path") + .arg(builder.config.src.join("src/tools/miri/Cargo.toml")); // Don't build tests dynamically, just a pain to work with cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); @@ -351,8 +368,8 @@ impl Step for Miri { builder.add_rustc_lib_path(compiler, &mut cargo); - if try_run(build, &mut cargo) { - build.save_toolstate("miri", ToolState::TestPass); + if try_run(builder, &mut cargo) { + builder.save_toolstate("miri", ToolState::TestPass); } } else { eprintln!("failed to test miri: could not build"); @@ -378,20 +395,24 @@ impl Step for Clippy { fn make_run(run: RunConfig) { run.builder.ensure(Clippy { stage: run.builder.top_stage, - host: run.target, + host: run.host, }); } /// Runs `cargo test` for clippy. fn run(self, builder: &Builder) { - let build = builder.build; let stage = self.stage; let host = self.host; let compiler = builder.compiler(stage, host); - if let Some(clippy) = builder.ensure(tool::Clippy { compiler, target: self.host }) { - let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test"); - cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml")); + if let Some(clippy) = builder.ensure(tool::Clippy { + compiler, + target: self.host, + }) { + let mut cargo = builder.cargo(compiler, Mode::RustcTool, host, "test"); + cargo + .arg("--manifest-path") + .arg(builder.config.src.join("src/tools/clippy/Cargo.toml")); // Don't build tests dynamically, just a pain to work with cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); @@ -399,15 +420,17 @@ impl Step for Clippy { cargo.env("SYSROOT", builder.sysroot(compiler)); cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); - let host_libs = builder.stage_out(compiler, Mode::Tool).join(builder.cargo_dir()); + let host_libs = builder + .stage_out(compiler, Mode::RustcTool) + .join(builder.cargo_dir()); cargo.env("HOST_LIBS", host_libs); // clippy tests need to find the driver cargo.env("CLIPPY_DRIVER_PATH", clippy); builder.add_rustc_lib_path(compiler, &mut cargo); - if try_run(build, &mut cargo) { - build.save_toolstate("clippy-driver", ToolState::TestPass); + if try_run(builder, &mut cargo) { + builder.save_toolstate("clippy-driver", ToolState::TestPass); } } else { eprintln!("failed to test clippy: could not build"); @@ -441,27 +464,35 @@ impl Step for RustdocTheme { fn make_run(run: RunConfig) { let compiler = run.builder.compiler(run.builder.top_stage, run.host); - run.builder.ensure(RustdocTheme { - compiler: compiler, - }); + run.builder.ensure(RustdocTheme { compiler: compiler }); } fn run(self, builder: &Builder) { let rustdoc = builder.rustdoc(self.compiler.host); let mut cmd = builder.tool_cmd(Tool::RustdocTheme); cmd.arg(rustdoc.to_str().unwrap()) - .arg(builder.src.join("src/librustdoc/html/static/themes").to_str().unwrap()) - .env("RUSTC_STAGE", self.compiler.stage.to_string()) - .env("RUSTC_SYSROOT", builder.sysroot(self.compiler)) - .env("RUSTDOC_LIBDIR", builder.sysroot_libdir(self.compiler, self.compiler.host)) - .env("CFG_RELEASE_CHANNEL", &builder.build.config.channel) - .env("RUSTDOC_REAL", builder.rustdoc(self.compiler.host)) - .env("RUSTDOC_CRATE_VERSION", builder.build.rust_version()) - .env("RUSTC_BOOTSTRAP", "1"); - if let Some(linker) = builder.build.linker(self.compiler.host) { + .arg( + builder + .config + .src + .join("src/librustdoc/html/static/themes") + .to_str() + .unwrap(), + ) + .env("RUSTC_STAGE", self.compiler.stage.to_string()) + .env("RUSTC_SYSROOT", builder.sysroot(self.compiler)) + .env( + "RUSTDOC_LIBDIR", + builder.sysroot_libdir(self.compiler, self.compiler.host), + ) + .env("CFG_RELEASE_CHANNEL", &builder.config.rust.channel) + .env("RUSTDOC_REAL", builder.rustdoc(self.compiler.host)) + .env("RUSTDOC_CRATE_VERSION", builder.rust_version()) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(linker) = builder.linker(self.compiler.host) { cmd.env("RUSTC_TARGET_LINKER", linker); } - try_run(builder.build, &mut cmd); + try_run(builder, &mut cmd); } } @@ -488,7 +519,7 @@ impl Step for RustdocJS { } fn run(self, builder: &Builder) { - if let Some(ref nodejs) = builder.config.nodejs { + if let Some(ref nodejs) = builder.config.general.nodejs { let mut command = Command::new(nodejs); command.args(&["src/tools/rustdoc-js/tester.js", &*self.host]); builder.ensure(::doc::Std { @@ -511,7 +542,6 @@ impl Step for Tidy { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; - const ONLY_BUILD: bool = true; /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler. /// @@ -519,20 +549,19 @@ impl Step for Tidy { /// otherwise just implements a few lint-like checks that are specific to the /// compiler itself. fn run(self, builder: &Builder) { - let build = builder.build; let host = self.host; - let _folder = build.fold_output(|| "tidy"); + let _folder = builder.fold_output(|| "tidy"); println!("tidy check ({})", host); let mut cmd = builder.tool_cmd(Tool::Tidy); - cmd.arg(build.src.join("src")); - if !build.config.vendor { + cmd.arg(builder.config.src.join("src")); + if !builder.config.general.vendor { cmd.arg("--no-vendor"); } - if build.config.quiet_tests { + if builder.config.rust.quiet_tests { cmd.arg("--quiet"); } - try_run(build, &mut cmd); + try_run(builder, &mut cmd); } fn should_run(run: ShouldRun) -> ShouldRun { @@ -541,24 +570,24 @@ impl Step for Tidy { fn make_run(run: RunConfig) { run.builder.ensure(Tidy { - host: run.builder.build.build, + host: run.builder.config.general.build, }); } } -fn testdir(build: &Build, host: Interned) -> PathBuf { - build.out.join(host).join("test") +fn testdir(builder: &Builder, host: Interned) -> PathBuf { + builder.config.general.out.join(host).join("test") } macro_rules! default_test { - ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { - test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: false }); + ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, }) => { + test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: false, }); } } macro_rules! host_test { - ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { - test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: true }); + ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, }) => { + test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: true, }); } } @@ -568,7 +597,7 @@ macro_rules! test { mode: $mode:expr, suite: $suite:expr, default: $default:expr, - host: $host:expr + host: $host:expr, }) => { #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct $name { @@ -609,104 +638,104 @@ macro_rules! test { default_test!(Ui { path: "src/test/ui", mode: "ui", - suite: "ui" + suite: "ui", }); default_test!(RunPass { path: "src/test/run-pass", mode: "run-pass", - suite: "run-pass" + suite: "run-pass", }); default_test!(CompileFail { path: "src/test/compile-fail", mode: "compile-fail", - suite: "compile-fail" + suite: "compile-fail", }); default_test!(ParseFail { path: "src/test/parse-fail", mode: "parse-fail", - suite: "parse-fail" + suite: "parse-fail", }); default_test!(RunFail { path: "src/test/run-fail", mode: "run-fail", - suite: "run-fail" + suite: "run-fail", }); default_test!(RunPassValgrind { path: "src/test/run-pass-valgrind", mode: "run-pass-valgrind", - suite: "run-pass-valgrind" + suite: "run-pass-valgrind", }); default_test!(MirOpt { path: "src/test/mir-opt", mode: "mir-opt", - suite: "mir-opt" + suite: "mir-opt", }); default_test!(Codegen { path: "src/test/codegen", mode: "codegen", - suite: "codegen" + suite: "codegen", }); default_test!(CodegenUnits { path: "src/test/codegen-units", mode: "codegen-units", - suite: "codegen-units" + suite: "codegen-units", }); default_test!(Incremental { path: "src/test/incremental", mode: "incremental", - suite: "incremental" + suite: "incremental", }); default_test!(Debuginfo { path: "src/test/debuginfo", // What this runs varies depending on the native platform being apple mode: "debuginfo-XXX", - suite: "debuginfo" + suite: "debuginfo", }); host_test!(UiFullDeps { path: "src/test/ui-fulldeps", mode: "ui", - suite: "ui-fulldeps" + suite: "ui-fulldeps", }); host_test!(RunPassFullDeps { path: "src/test/run-pass-fulldeps", mode: "run-pass", - suite: "run-pass-fulldeps" + suite: "run-pass-fulldeps", }); host_test!(RunFailFullDeps { path: "src/test/run-fail-fulldeps", mode: "run-fail", - suite: "run-fail-fulldeps" + suite: "run-fail-fulldeps", }); host_test!(CompileFailFullDeps { path: "src/test/compile-fail-fulldeps", mode: "compile-fail", - suite: "compile-fail-fulldeps" + suite: "compile-fail-fulldeps", }); host_test!(IncrementalFullDeps { path: "src/test/incremental-fulldeps", mode: "incremental", - suite: "incremental-fulldeps" + suite: "incremental-fulldeps", }); host_test!(Rustdoc { path: "src/test/rustdoc", mode: "rustdoc", - suite: "rustdoc" + suite: "rustdoc", }); test!(Pretty { @@ -714,48 +743,48 @@ test!(Pretty { mode: "pretty", suite: "pretty", default: false, - host: true + host: true, }); test!(RunPassPretty { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass", default: false, - host: true + host: true, }); test!(RunFailPretty { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail", default: false, - host: true + host: true, }); test!(RunPassValgrindPretty { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind", default: false, - host: true + host: true, }); test!(RunPassFullDepsPretty { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps", default: false, - host: true + host: true, }); test!(RunFailFullDepsPretty { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps", default: false, - host: true + host: true, }); host_test!(RunMake { path: "src/test/run-make", mode: "run-make", - suite: "run-make" + suite: "run-make", }); #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] @@ -779,25 +808,24 @@ impl Step for Compiletest { /// compiletest `mode` and `suite` arguments. For example `mode` can be /// "run-pass" or `suite` can be something like `debuginfo`. fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target = self.target; let mode = self.mode; let suite = self.suite; // Skip codegen tests if they aren't enabled in configuration. - if !build.config.codegen_tests && suite == "codegen" { + if !builder.config.rust.codegen_tests && suite == "codegen" { return; } if suite == "debuginfo" { // Skip debuginfo tests on MSVC - if build.build.contains("msvc") { + if builder.config.general.build.contains("msvc") { return; } if mode == "debuginfo-XXX" { - return if build.build.contains("apple") { + return if builder.config.general.build.contains("apple") { builder.ensure(Compiletest { mode: "debuginfo-lldb", ..self @@ -812,16 +840,15 @@ impl Step for Compiletest { builder.ensure(dist::DebuggerScripts { sysroot: builder.sysroot(compiler), - host: target + host: target, }); } if suite.ends_with("fulldeps") || // FIXME: Does pretty need librustc compiled? Note that there are // fulldeps test suites with mode = pretty as well. - mode == "pretty" || - mode == "rustdoc" || - mode == "run-make" { + mode == "pretty" || mode == "rustdoc" || mode == "run-make" + { builder.ensure(compile::Rustc { compiler, target }); } @@ -829,46 +856,55 @@ impl Step for Compiletest { builder.ensure(native::TestHelpers { target }); builder.ensure(RemoteCopyLibs { compiler, target }); - let _folder = build.fold_output(|| format!("test_{}", suite)); - println!("Check compiletest suite={} mode={} ({} -> {})", - suite, mode, &compiler.host, target); + let _folder = builder.fold_output(|| format!("test_{}", suite)); + println!( + "Check compiletest suite={} mode={} ({} -> {})", + suite, mode, &compiler.host, target + ); let mut cmd = builder.tool_cmd(Tool::Compiletest); // compiletest currently has... a lot of arguments, so let's just pass all // of them! - cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler)); - cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target)); + cmd.arg("--compile-lib-path") + .arg(builder.rustc_libdir(compiler)); + cmd.arg("--run-lib-path") + .arg(builder.sysroot_libdir(compiler, target)); cmd.arg("--rustc-path").arg(builder.rustc(compiler)); // Avoid depending on rustdoc when we don't need it. if mode == "rustdoc" || mode == "run-make" { - cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host)); + cmd.arg("--rustdoc-path") + .arg(builder.rustdoc(compiler.host)); } - cmd.arg("--src-base").arg(build.src.join("src/test").join(suite)); - cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite)); - cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target)); + cmd.arg("--src-base") + .arg(builder.config.src.join("src/test").join(suite)); + cmd.arg("--build-base") + .arg(testdir(builder, compiler.host).join(suite)); + cmd.arg("--stage-id") + .arg(format!("stage{}-{}", compiler.stage, target)); cmd.arg("--mode").arg(mode); cmd.arg("--target").arg(target); cmd.arg("--host").arg(&*compiler.host); - cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build)); + cmd.arg("--llvm-filecheck") + .arg(builder.llvm_filecheck(builder.config.general.build)); - if let Some(ref nodejs) = build.config.nodejs { + if let Some(ref nodejs) = builder.config.general.nodejs { cmd.arg("--nodejs").arg(nodejs); } let mut flags = vec!["-Crpath".to_string()]; - if build.config.rust_optimize_tests { + if builder.config.rust.optimize_tests { flags.push("-O".to_string()); } - if build.config.rust_debuginfo_tests { + if builder.config.rust.debuginfo_tests { flags.push("-g".to_string()); } flags.push("-Zmiri -Zunstable-options".to_string()); - flags.push(build.config.cmd.rustc_args().join(" ")); + flags.push(builder.config.cmd.rustc_args().join(" ")); - if let Some(linker) = build.linker(target) { + if let Some(linker) = builder.linker(target) { cmd.arg("--linker").arg(linker); } @@ -876,46 +912,48 @@ impl Step for Compiletest { cmd.arg("--host-rustcflags").arg(hostflags.join(" ")); let mut targetflags = flags.clone(); - targetflags.push(format!("-Lnative={}", - build.test_helpers_out(target).display())); + targetflags.push(format!( + "-Lnative={}", + builder.test_helpers_out(target).display() + )); cmd.arg("--target-rustcflags").arg(targetflags.join(" ")); - cmd.arg("--docck-python").arg(build.python()); + cmd.arg("--docck-python").arg(builder.python()); - if build.build.ends_with("apple-darwin") { + if builder.config.general.build.ends_with("apple-darwin") { // Force /usr/bin/python on macOS for LLDB tests because we're loading the // LLDB plugin's compiled module which only works with the system python // (namely not Homebrew-installed python) cmd.arg("--lldb-python").arg("/usr/bin/python"); } else { - cmd.arg("--lldb-python").arg(build.python()); + cmd.arg("--lldb-python").arg(builder.python()); } - if let Some(ref gdb) = build.config.gdb { + if let Some(ref gdb) = builder.config.general.gdb { cmd.arg("--gdb").arg(gdb); } - if let Some(ref vers) = build.lldb_version { + if let Some(ref vers) = builder.lldb_version { cmd.arg("--lldb-version").arg(vers); } - if let Some(ref dir) = build.lldb_python_dir { + if let Some(ref dir) = builder.lldb_python_dir { cmd.arg("--lldb-python-dir").arg(dir); } - cmd.args(&build.config.cmd.test_args()); + cmd.args(&builder.config.cmd.test_args()); - if build.is_verbose() { + if builder.is_verbose() { cmd.arg("--verbose"); } - if build.config.quiet_tests { + if builder.config.rust.quiet_tests { cmd.arg("--quiet"); } - if build.config.llvm_enabled { - let llvm_config = build.llvm_config(target); + if builder.config.llvm.enabled { + let llvm_config = builder.llvm_config(target); let llvm_version = output(Command::new(&llvm_config).arg("--version")); cmd.arg("--llvm-version").arg(llvm_version); - if !build.is_rust_llvm(target) { + if !builder.is_rust_llvm(target) { cmd.arg("--system-llvm"); } @@ -924,31 +962,42 @@ impl Step for Compiletest { if suite == "run-make" { let llvm_components = output(Command::new(&llvm_config).arg("--components")); let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags")); - cmd.arg("--cc").arg(build.cc(target)) - .arg("--cxx").arg(build.cxx(target).unwrap()) - .arg("--cflags").arg(build.cflags(target).join(" ")) - .arg("--llvm-components").arg(llvm_components.trim()) - .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim()); - if let Some(ar) = build.ar(target) { + cmd.arg("--cc") + .arg(builder.cc(target)) + .arg("--cxx") + .arg(builder.cxx(target).unwrap()) + .arg("--cflags") + .arg(builder.cflags(target).join(" ")) + .arg("--llvm-components") + .arg(llvm_components.trim()) + .arg("--llvm-cxxflags") + .arg(llvm_cxxflags.trim()); + if let Some(ar) = builder.ar(target) { cmd.arg("--ar").arg(ar); } } } - if suite == "run-make" && !build.config.llvm_enabled { + if suite == "run-make" && !builder.config.llvm.enabled { println!("Ignoring run-make test suite as they generally don't work without LLVM"); return; } if suite != "run-make" { - cmd.arg("--cc").arg("") - .arg("--cxx").arg("") - .arg("--cflags").arg("") - .arg("--llvm-components").arg("") - .arg("--llvm-cxxflags").arg(""); + cmd.arg("--cc") + .arg("") + .arg("--cxx") + .arg("") + .arg("--cflags") + .arg("") + .arg("--llvm-components") + .arg("") + .arg("--llvm-cxxflags") + .arg(""); } - if build.remote_tested(target) { - cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient)); + if builder.remote_tested(target) { + cmd.arg("--remote-test-client") + .arg(builder.tool_exe(Tool::RemoteTestClient)); } // Running a C compiler on MSVC requires a few env vars to be set, to be @@ -957,39 +1006,39 @@ impl Step for Compiletest { // Note that if we encounter `PATH` we make sure to append to our own `PATH` // rather than stomp over it. if target.contains("msvc") { - for &(ref k, ref v) in build.cc[&target].env() { + for &(ref k, ref v) in builder.cc[&target].env() { if k != "PATH" { cmd.env(k, v); } } } cmd.env("RUSTC_BOOTSTRAP", "1"); - build.add_rust_test_threads(&mut cmd); + builder.add_rust_test_threads(&mut cmd); - if build.config.sanitizers { + if builder.config.general.sanitizers { cmd.env("SANITIZER_SUPPORT", "1"); } - if build.config.profiler { + if builder.config.general.profiler { cmd.env("PROFILER_SUPPORT", "1"); } - cmd.env("RUST_TEST_TMPDIR", build.out.join("tmp")); + cmd.env("RUST_TEST_TMPDIR", builder.config.general.out.join("tmp")); cmd.arg("--adb-path").arg("adb"); cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); if target.contains("android") { // Assume that cc for this target comes from the android sysroot cmd.arg("--android-cross-path") - .arg(build.cc(target).parent().unwrap().parent().unwrap()); + .arg(builder.cc(target).parent().unwrap().parent().unwrap()); } else { cmd.arg("--android-cross-path").arg(""); } - build.ci_env.force_coloring_in_ci(&mut cmd); + builder.ci_env.force_coloring_in_ci(&mut cmd); let _time = util::timeit(); - try_run(build, &mut cmd); + try_run(builder, &mut cmd); } } @@ -1019,21 +1068,23 @@ impl Step for Docs { /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to /// `compiler`. fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; - builder.ensure(compile::Test { compiler, target: compiler.host }); + builder.ensure(compile::Test { + compiler, + target: compiler.host, + }); // Do a breadth-first traversal of the `src/doc` directory and just run // tests for all files that end in `*.md` - let mut stack = vec![build.src.join("src/doc")]; + let mut stack = vec![builder.config.src.join("src/doc")]; let _time = util::timeit(); - let _folder = build.fold_output(|| "test_docs"); + let _folder = builder.fold_output(|| "test_docs"); while let Some(p) = stack.pop() { if p.is_dir() { stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); - continue + continue; } if p.extension().and_then(|s| s.to_str()) != Some("md") { @@ -1041,7 +1092,7 @@ impl Step for Docs { } // The nostarch directory in the book is for no starch, and so isn't - // guaranteed to build. We don't care if it doesn't build, so skip it. + // guaranteed to builder. We don't care if it doesn't build, so skip it. if p.to_str().map_or(false, |p| p.contains("nostarch")) { continue; } @@ -1078,52 +1129,54 @@ impl Step for ErrorIndex { /// generate a markdown file from the error indexes of the code base which is /// then passed to `rustdoc --test`. fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; - builder.ensure(compile::Std { compiler, target: compiler.host }); + builder.ensure(compile::Std { + compiler, + target: compiler.host, + }); - let _folder = build.fold_output(|| "test_error_index"); + let _folder = builder.fold_output(|| "test_error_index"); println!("Testing error-index stage{}", compiler.stage); - let dir = testdir(build, compiler.host); + let dir = testdir(builder, compiler.host); t!(fs::create_dir_all(&dir)); let output = dir.join("error-index.md"); let _time = util::timeit(); - build.run(builder.tool_cmd(Tool::ErrorIndex) - .arg("markdown") - .arg(&output) - .env("CFG_BUILD", &build.build) - .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir())); + builder.run( + builder + .tool_cmd(Tool::ErrorIndex) + .arg("markdown") + .arg(&output) + .env("CFG_BUILD", &builder.config.general.build) + .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir()), + ); markdown_test(builder, compiler, &output); } } fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) { - let build = builder.build; - let mut file = t!(File::open(markdown)); - let mut contents = String::new(); - t!(file.read_to_string(&mut contents)); + let contents = t!(fs::read_string(markdown)); if !contents.contains("```") { return; } println!("doc tests for: {}", markdown.display()); let mut cmd = builder.rustdoc_cmd(compiler.host); - build.add_rust_test_threads(&mut cmd); + builder.add_rust_test_threads(&mut cmd); cmd.arg("--test"); cmd.arg(markdown); cmd.env("RUSTC_BOOTSTRAP", "1"); - let test_args = build.config.cmd.test_args().join(" "); + let test_args = builder.config.cmd.test_args().join(" "); cmd.arg("--test-args").arg(test_args); - if build.config.quiet_tests { - try_run_quiet(build, &mut cmd); + if builder.config.rust.quiet_tests { + try_run_quiet(builder, &mut cmd); } else { - try_run(build, &mut cmd); + try_run(builder, &mut cmd); } } @@ -1231,12 +1284,11 @@ impl Step for CrateNotDefault { target: self.target, mode: Mode::Libstd, test_kind: self.test_kind, - krate: INTERNER.intern_str(self.krate), + krate: self.krate.intern(), }); } } - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Crate { compiler: Compiler, @@ -1254,10 +1306,10 @@ impl Step for Crate { let builder = run.builder; run = run.krate("test"); for krate in run.builder.in_tree_crates("std") { - if krate.is_local(&run.builder) && - !krate.name.contains("jemalloc") && - !(krate.name.starts_with("rustc_") && krate.name.ends_with("san")) && - krate.name != "dlmalloc" { + if krate.is_local(&run.builder) && !krate.name.contains("jemalloc") + && !(krate.name.starts_with("rustc_") && krate.name.ends_with("san")) + && krate.name != "dlmalloc" + { run = run.path(krate.local_path(&builder).to_str().unwrap()); } } @@ -1307,7 +1359,6 @@ impl Step for Crate { /// Currently this runs all tests for a DAG by passing a bunch of `-p foo` /// arguments, and those arguments are discovered from `cargo metadata`. fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target = self.target; let mode = self.mode; @@ -1321,7 +1372,7 @@ impl Step for Crate { // libstd, then what we're actually testing is the libstd produced in // stage1. Reflect that here by updating the compiler that we're working // with automatically. - let compiler = if build.force_use_stage1(compiler, target) { + let compiler = if builder.force_use_stage1(compiler, target) { builder.compiler(1, compiler.host) } else { compiler.clone() @@ -1329,33 +1380,35 @@ impl Step for Crate { let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand()); match mode { - Mode::Libstd => { - compile::std_cargo(build, &compiler, target, &mut cargo); - } - Mode::Libtest => { - compile::test_cargo(build, &compiler, target, &mut cargo); - } + Mode::Libstd => {} + Mode::Libtest => {} Mode::Librustc => { builder.ensure(compile::Rustc { compiler, target }); - compile::rustc_cargo(build, &mut cargo); } _ => panic!("can only test libraries"), }; - let _folder = build.fold_output(|| { - format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, krate) + let _folder = builder.fold_output(|| { + format!( + "{}_stage{}-{}", + test_kind.subcommand(), + compiler.stage, + krate + ) }); - println!("{} {} stage{} ({} -> {})", test_kind, krate, compiler.stage, - &compiler.host, target); + println!( + "{} {} stage{} ({} -> {})", + test_kind, krate, compiler.stage, &compiler.host, target + ); // Build up the base `cargo test` command. // // Pass in some standard flags then iterate over the graph we've discovered // in `cargo metadata` with the maps above and figure out what `-p` // arguments need to get passed. - if test_kind.subcommand() == "test" && !build.fail_fast { + if test_kind.subcommand() == "test" && !builder.config.cmd.fail_fast() { cargo.arg("--no-fail-fast"); } - if build.doc_tests { + if builder.config.cmd.doc_tests() { cargo.arg("--doc"); } @@ -1371,42 +1424,58 @@ impl Step for Crate { cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); cargo.arg("--"); - cargo.args(&build.config.cmd.test_args()); + cargo.args(&builder.config.cmd.test_args()); - if build.config.quiet_tests { + if builder.config.rust.quiet_tests { cargo.arg("--quiet"); } let _time = util::timeit(); if target.contains("emscripten") { - cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), - build.config.nodejs.as_ref().expect("nodejs not configured")); + cargo.env( + format!("CARGO_TARGET_{}_RUNNER", envify(&target)), + builder + .config + .general + .nodejs + .as_ref() + .expect("nodejs not configured"), + ); } else if target.starts_with("wasm32") { // Warn about running tests without the `wasm_syscall` feature enabled. // The javascript shim implements the syscall interface so that test // output can be correctly reported. - if !build.config.wasm_syscall { - println!("Libstd was built without `wasm_syscall` feature enabled: \ - test output may not be visible."); + if !builder.config.rust.wasm_syscall { + println!( + "Libstd was built without `wasm_syscall` feature enabled: \ + test output may not be visible." + ); } // On the wasm32-unknown-unknown target we're using LTO which is // incompatible with `-C prefer-dynamic`, so disable that here cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - let node = build.config.nodejs.as_ref() + let node = builder + .config + .general + .nodejs + .as_ref() .expect("nodejs not configured"); - let runner = format!("{} {}/src/etc/wasm32-shim.js", - node.display(), - build.src.display()); + let runner = format!( + "{} {}/src/etc/wasm32-shim.js", + node.display(), + builder.config.src.display() + ); cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner); - } else if build.remote_tested(target) { - cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), - format!("{} run", - builder.tool_exe(Tool::RemoteTestClient).display())); + } else if builder.remote_tested(target) { + cargo.env( + format!("CARGO_TARGET_{}_RUNNER", envify(&target)), + format!("{} run", builder.tool_exe(Tool::RemoteTestClient).display()), + ); } - try_run(build, &mut cargo); + try_run(builder, &mut cargo); } } @@ -1443,49 +1512,53 @@ impl Step for CrateRustdoc { } fn run(self, builder: &Builder) { - let build = builder.build; let test_kind = self.test_kind; let compiler = builder.compiler(builder.top_stage, self.host); let target = compiler.host; - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - target, - test_kind.subcommand(), - "src/tools/rustdoc"); - let _folder = build.fold_output(|| { - format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage) - }); - println!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage, - &compiler.host, target); - - if test_kind.subcommand() == "test" && !build.fail_fast { + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::TestTool, + target, + test_kind.subcommand(), + "src/tools/rustdoc", + ); + let _folder = builder + .fold_output(|| format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage)); + println!( + "{} rustdoc stage{} ({} -> {})", + test_kind, compiler.stage, &compiler.host, target + ); + + if test_kind.subcommand() == "test" && !builder.config.cmd.fail_fast() { cargo.arg("--no-fail-fast"); } cargo.arg("-p").arg("rustdoc:0.0.0"); cargo.arg("--"); - cargo.args(&build.config.cmd.test_args()); + cargo.args(&builder.config.cmd.test_args()); - if build.config.quiet_tests { + if builder.config.rust.quiet_tests { cargo.arg("--quiet"); } let _time = util::timeit(); - try_run(build, &mut cargo); + try_run(builder, &mut cargo); } } fn envify(s: &str) -> String { - s.chars().map(|c| { - match c { + s.chars() + .map(|c| match c { '-' => '_', c => c, - } - }).flat_map(|c| c.to_uppercase()).collect() + }) + .flat_map(|c| c.to_uppercase()) + .collect() } /// Some test suites are run inside emulators or on remote devices, and most @@ -1511,17 +1584,16 @@ impl Step for RemoteCopyLibs { } fn run(self, builder: &Builder) { - let build = builder.build; let compiler = self.compiler; let target = self.target; - if !build.remote_tested(target) { - return + if !builder.remote_tested(target) { + return; } builder.ensure(compile::Test { compiler, target }); println!("REMOTE copy libs to emulator ({})", target); - t!(fs::create_dir_all(build.out.join("tmp"))); + t!(fs::create_dir_all(builder.config.general.out.join("tmp"))); let server = builder.ensure(tool::RemoteTestServer { compiler, target }); @@ -1529,22 +1601,20 @@ impl Step for RemoteCopyLibs { let tool = builder.tool_exe(Tool::RemoteTestClient); let mut cmd = Command::new(&tool); cmd.arg("spawn-emulator") - .arg(target) - .arg(&server) - .arg(build.out.join("tmp")); - if let Some(rootfs) = build.qemu_rootfs(target) { + .arg(target) + .arg(&server) + .arg(builder.config.general.out.join("tmp")); + if let Some(rootfs) = builder.qemu_rootfs(target) { cmd.arg(rootfs); } - build.run(&mut cmd); + builder.run(&mut cmd); // Push all our dylibs to the emulator for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) { let f = t!(f); let name = f.file_name().into_string().unwrap(); if util::is_dylib(&name) { - build.run(Command::new(&tool) - .arg("push") - .arg(f.path())); + builder.run(Command::new(&tool).arg("push").arg(f.path())); } } } @@ -1555,7 +1625,6 @@ pub struct Distcheck; impl Step for Distcheck { type Output = (); - const ONLY_BUILD: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("distcheck") @@ -1567,10 +1636,8 @@ impl Step for Distcheck { /// Run "distcheck", a 'make check' from a tarball fn run(self, builder: &Builder) { - let build = builder.build; - println!("Distcheck"); - let dir = build.out.join("tmp").join("distcheck"); + let dir = builder.config.general.out.join("tmp").join("distcheck"); let _ = fs::remove_dir_all(&dir); t!(fs::create_dir_all(&dir)); @@ -1580,37 +1647,43 @@ impl Step for Distcheck { let mut cmd = Command::new("tar"); cmd.arg("-xzf") - .arg(builder.ensure(dist::PlainSourceTarball)) - .arg("--strip-components=1") - .current_dir(&dir); - build.run(&mut cmd); - build.run(Command::new("./configure") - .args(&build.config.configure_args) - .arg("--enable-vendor") - .current_dir(&dir)); - build.run(Command::new(build_helper::make(&build.build)) - .arg("check") - .current_dir(&dir)); + .arg(builder.ensure(dist::PlainSourceTarball)) + .arg("--strip-components=1") + .current_dir(&dir); + builder.run(&mut cmd); + builder.run( + Command::new("./configure") + .args(&builder.config.general.configure_args) + .arg("--enable-vendor") + .current_dir(&dir), + ); + builder.run( + Command::new(build_helper::make(&builder.config.general.build)) + .arg("check") + .current_dir(&dir), + ); // Now make sure that rust-src has all of libstd's dependencies println!("Distcheck rust-src"); - let dir = build.out.join("tmp").join("distcheck-src"); + let dir = builder.config.general.out.join("tmp").join("distcheck-src"); let _ = fs::remove_dir_all(&dir); t!(fs::create_dir_all(&dir)); let mut cmd = Command::new("tar"); cmd.arg("-xzf") - .arg(builder.ensure(dist::Src)) - .arg("--strip-components=1") - .current_dir(&dir); - build.run(&mut cmd); + .arg(builder.ensure(dist::Src)) + .arg("--strip-components=1") + .current_dir(&dir); + builder.run(&mut cmd); let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml"); - build.run(Command::new(&build.initial_cargo) - .arg("generate-lockfile") - .arg("--manifest-path") - .arg(&toml) - .current_dir(&dir)); + builder.run( + Command::new(&builder.config.general.initial_cargo) + .arg("generate-lockfile") + .arg("--manifest-path") + .arg(&toml) + .current_dir(&dir), + ); } } @@ -1621,22 +1694,25 @@ impl Step for Bootstrap { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; - const ONLY_BUILD: bool = true; /// Test the build system itself fn run(self, builder: &Builder) { - let build = builder.build; - let mut cmd = Command::new(&build.initial_cargo); + let mut cmd = Command::new(&builder.config.general.initial_cargo); cmd.arg("test") - .current_dir(build.src.join("src/bootstrap")) - .env("CARGO_TARGET_DIR", build.out.join("bootstrap")) - .env("RUSTC_BOOTSTRAP", "1") - .env("RUSTC", &build.initial_rustc); - if !build.fail_fast { + .arg("--manifest-path") + .arg(builder.config.src.join("src/bootstrap/Cargo.toml")) + .env( + "CARGO_TARGET_DIR", + builder.config.general.out.join("bootstrap-test"), + ) + .env("RUSTC_BOOTSTRAP", "1") + .env("RUSTC", &builder.config.general.initial_rustc); + if !builder.config.cmd.fail_fast() { cmd.arg("--no-fail-fast"); } - cmd.arg("--").args(&build.config.cmd.test_args()); - try_run(build, &mut cmd); + cmd.arg("--lib"); + cmd.arg("--").args(&builder.config.cmd.test_args()); + try_run(builder, &mut cmd); } fn should_run(run: ShouldRun) -> ShouldRun { diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index 9036eb044b5a5..556cc66170d89 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -8,71 +8,20 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::fs; use std::env; use std::path::PathBuf; -use std::process::{Command, exit}; +use std::process::{exit, Command}; use Mode; use Compiler; -use builder::{Step, RunConfig, ShouldRun, Builder}; -use util::{copy, exe, add_lib_path}; -use compile::{self, libtest_stamp, libstd_stamp, librustc_stamp}; +use builder::{CargoCommand, Builder, RunConfig, ShouldRun, Step}; +use util::{add_lib_path, copy, exe}; +use compile; use native; use channel::GitInfo; use cache::Interned; use toolstate::ToolState; - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct CleanTools { - pub compiler: Compiler, - pub target: Interned, - pub mode: Mode, -} - -impl Step for CleanTools { - type Output = (); - - fn should_run(run: ShouldRun) -> ShouldRun { - run.never() - } - - fn run(self, builder: &Builder) { - let build = builder.build; - let compiler = self.compiler; - let target = self.target; - let mode = self.mode; - - // This is for the original compiler, but if we're forced to use stage 1, then - // std/test/rustc stamps won't exist in stage 2, so we need to get those from stage 1, since - // we copy the libs forward. - let tools_dir = build.stage_out(compiler, Mode::Tool); - let compiler = if builder.force_use_stage1(compiler, target) { - builder.compiler(1, compiler.host) - } else { - compiler - }; - - for &cur_mode in &[Mode::Libstd, Mode::Libtest, Mode::Librustc] { - let stamp = match cur_mode { - Mode::Libstd => libstd_stamp(build, compiler, target), - Mode::Libtest => libtest_stamp(build, compiler, target), - Mode::Librustc => librustc_stamp(build, compiler, target), - _ => panic!(), - }; - - if build.clear_if_dirty(&tools_dir, &stamp) { - break; - } - - // If we are a rustc tool, and std changed, we also need to clear ourselves out -- our - // dependencies depend on std. Therefore, we iterate up until our own mode. - if mode == cur_mode { - break; - } - } - } -} +use fs; #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] struct ToolBuild { @@ -96,7 +45,6 @@ impl Step for ToolBuild { /// This will build the specified tool with the specified `host` compiler in /// `stage` into the normal cargo output directory. fn run(self, builder: &Builder) -> Option { - let build = builder.build; let compiler = self.compiler; let target = self.target; let tool = self.tool; @@ -104,69 +52,67 @@ impl Step for ToolBuild { let is_ext_tool = self.is_ext_tool; match self.mode { - Mode::Libstd => builder.ensure(compile::Std { compiler, target }), - Mode::Libtest => builder.ensure(compile::Test { compiler, target }), - Mode::Librustc => builder.ensure(compile::Rustc { compiler, target }), - Mode::Tool => panic!("unexpected Mode::Tool for tool build") + Mode::TestTool => builder.ensure(compile::Test { compiler, target }), + Mode::RustcTool => { + // because this means a proc macro tool, too, we need to build librustc for both the + // compiler host and the target. + builder.ensure(compile::Rustc { compiler, target: compiler.host }); + builder.ensure(compile::Rustc { compiler, target }); + }, + _ => panic!("unexpected mode for tool {:?}", self.mode), } - let _folder = build.fold_output(|| format!("stage{}-{}", compiler.stage, tool)); - println!("Building stage{} tool {} ({})", compiler.stage, tool, target); - - let mut cargo = prepare_tool_cargo(builder, compiler, target, "build", path); - let is_expected = build.try_run(&mut cargo); - build.save_toolstate(tool, if is_expected { - ToolState::TestFail - } else { - ToolState::BuildFail - }); + let _folder = builder.fold_output(|| format!("stage{}-{}", compiler.stage, tool)); + println!( + "Building stage{} tool {} ({})", + compiler.stage, tool, target + ); + + let mut cargo = prepare_tool_cargo(builder, compiler, self.mode, target, "build", path); + let is_expected = builder.try_run(&mut cargo); + builder.save_toolstate( + tool, + if is_expected { + ToolState::TestFail + } else { + ToolState::BuildFail + }, + ); if !is_expected { if !is_ext_tool { - exit(1); + if cfg!(test) { + panic!("unexpected failure; would have aborted"); + } else { + exit(1); + } } else { return None; } } else { - let cargo_out = build.cargo_out(compiler, Mode::Tool, target) + let cargo_out = builder + .cargo_out(compiler, self.mode, target) .join(exe(tool, &compiler.host)); - let bin = build.tools_dir(compiler).join(exe(tool, &compiler.host)); + let bin = builder.tools_dir(compiler).join(exe(tool, &compiler.host)); copy(&cargo_out, &bin); Some(bin) } } } -pub fn prepare_tool_cargo( - builder: &Builder, +pub fn prepare_tool_cargo<'a>( + builder: &'a Builder<'a>, compiler: Compiler, + mode: Mode, target: Interned, command: &'static str, path: &'static str, -) -> Command { - let build = builder.build; - let mut cargo = builder.cargo(compiler, Mode::Tool, target, command); - let dir = build.src.join(path); +) -> CargoCommand<'a> { + let mut cargo = builder.cargo(compiler, mode, target, command); + let dir = builder.config.src.join(path); cargo.arg("--manifest-path").arg(dir.join("Cargo.toml")); - // We don't want to build tools dynamically as they'll be running across - // stages and such and it's just easier if they're not dynamically linked. - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - - if let Some(dir) = build.openssl_install_dir(target) { - cargo.env("OPENSSL_STATIC", "1"); - cargo.env("OPENSSL_DIR", dir); - cargo.env("LIBZ_SYS_STATIC", "1"); - } - - // if tools are using lzma we want to force the build script to build its - // own copy - cargo.env("LZMA_API_STATIC", "1"); - - cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel); - cargo.env("CFG_VERSION", build.rust_version()); - - let info = GitInfo::new(&build.config, &dir); + let info = GitInfo::new(&builder.config, &dir); if let Some(sha) = info.sha() { cargo.env("CFG_COMMIT_HASH", sha); } @@ -181,6 +127,8 @@ pub fn prepare_tool_cargo( macro_rules! tool { ($($name:ident, $path:expr, $tool_name:expr, $mode:expr;)+) => { + // not all tools are directly used through the enum, some are only ensured directly + #[allow(unused)] #[derive(Copy, Clone)] pub enum Tool { $( @@ -188,14 +136,22 @@ macro_rules! tool { )+ } + impl Tool { + fn mode(self) -> Mode { + match self { + $(Tool::$name => $mode,)+ + } + } + } + impl<'a> Builder<'a> { pub fn tool_exe(&self, tool: Tool) -> PathBuf { let stage = self.tool_default_stage(tool); match tool { $(Tool::$name => self.ensure($name { - compiler: self.compiler(stage, self.build.build), - target: self.build.build, + compiler: self.compiler(stage, self.config.general.build), + target: self.config.general.build, }), )+ } @@ -229,7 +185,8 @@ macro_rules! tool { fn make_run(run: RunConfig) { run.builder.ensure($name { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + compiler: run.builder.compiler(run.builder.top_stage, + run.builder.config.general.build), target: run.target, }); } @@ -250,51 +207,20 @@ macro_rules! tool { } tool!( - Rustbook, "src/tools/rustbook", "rustbook", Mode::Librustc; - ErrorIndex, "src/tools/error_index_generator", "error_index_generator", Mode::Librustc; - UnstableBookGen, "src/tools/unstable-book-gen", "unstable-book-gen", Mode::Libstd; - Tidy, "src/tools/tidy", "tidy", Mode::Libstd; - Linkchecker, "src/tools/linkchecker", "linkchecker", Mode::Libstd; - CargoTest, "src/tools/cargotest", "cargotest", Mode::Libstd; - Compiletest, "src/tools/compiletest", "compiletest", Mode::Libtest; - BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::Libstd; - RemoteTestClient, "src/tools/remote-test-client", "remote-test-client", Mode::Libstd; - RustInstaller, "src/tools/rust-installer", "fabricate", Mode::Libstd; - RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes", Mode::Libstd; + Rustbook, "src/tools/rustbook", "rustbook", Mode::RustcTool; + ErrorIndex, "src/tools/error_index_generator", "error_index_generator", Mode::RustcTool; + UnstableBookGen, "src/tools/unstable-book-gen", "unstable-book-gen", Mode::TestTool; + Tidy, "src/tools/tidy", "tidy", Mode::TestTool; + Linkchecker, "src/tools/linkchecker", "linkchecker", Mode::TestTool; + CargoTest, "src/tools/cargotest", "cargotest", Mode::TestTool; + Compiletest, "src/tools/compiletest", "compiletest", Mode::TestTool; + BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::TestTool; + RemoteTestClient, "src/tools/remote-test-client", "remote-test-client", Mode::TestTool; + RustInstaller, "src/tools/rust-installer", "fabricate", Mode::TestTool; + RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes", Mode::TestTool; + RemoteTestServer, "src/tools/remote-test-server", "remote-test-server", Mode::TestTool; ); -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RemoteTestServer { - pub compiler: Compiler, - pub target: Interned, -} - -impl Step for RemoteTestServer { - type Output = PathBuf; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/tools/remote-test-server") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(RemoteTestServer { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), - target: run.target, - }); - } - - fn run(self, builder: &Builder) -> PathBuf { - builder.ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "remote-test-server", - mode: Mode::Libstd, - path: "src/tools/remote-test-server", - is_ext_tool: false, - }).expect("expected to build -- essential tool") - } -} - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct Rustdoc { pub host: Interned, @@ -310,48 +236,69 @@ impl Step for Rustdoc { } fn make_run(run: RunConfig) { - run.builder.ensure(Rustdoc { - host: run.host, - }); + run.builder.ensure(Rustdoc { host: run.host }); } fn run(self, builder: &Builder) -> PathBuf { - let build = builder.build; let target_compiler = builder.compiler(builder.top_stage, self.host); let target = target_compiler.host; let build_compiler = if target_compiler.stage == 0 { - builder.compiler(0, builder.build.build) + builder.compiler(0, builder.config.general.build) } else if target_compiler.stage >= 2 { // Past stage 2, we consider the compiler to be ABI-compatible and hence capable of // building rustdoc itself. - builder.compiler(target_compiler.stage, builder.build.build) + builder.compiler(target_compiler.stage, builder.config.general.build) } else { // Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise // we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage // compilers, which isn't what we want. - builder.compiler(target_compiler.stage - 1, builder.build.build) + builder.compiler(target_compiler.stage - 1, builder.config.general.build) }; - builder.ensure(compile::Rustc { compiler: build_compiler, target }); - - let _folder = build.fold_output(|| format!("stage{}-rustdoc", target_compiler.stage)); - println!("Building rustdoc for stage{} ({})", target_compiler.stage, target_compiler.host); + // we need the full rustc compiler for both the build compiler and the target. proc macro + // libraries are required for the build compiler, whereas we need the target compiler to be + // built because rustdoc links to it + builder.ensure(compile::Rustc { + compiler: build_compiler, + target: build_compiler.host, + }); + builder.ensure(compile::Rustc { + compiler: build_compiler, + target, + }); - let mut cargo = prepare_tool_cargo(builder, - build_compiler, - target, - "build", - "src/tools/rustdoc"); + let _folder = builder.fold_output(|| format!("stage{}-rustdoc", target_compiler.stage)); + println!( + "Building rustdoc for stage{} ({})", + target_compiler.stage, target_compiler.host + ); + + let mut cargo = prepare_tool_cargo( + builder, + build_compiler, + Mode::RustcTool, + target, + "build", + "src/tools/rustdoc", + ); // Most tools don't get debuginfo, but rustdoc should. - cargo.env("RUSTC_DEBUGINFO", builder.config.rust_debuginfo.to_string()) - .env("RUSTC_DEBUGINFO_LINES", builder.config.rust_debuginfo_lines.to_string()); - - build.run(&mut cargo); + cargo + .env( + "RUSTC_DEBUGINFO", + builder.config.rust.debuginfo().to_string(), + ) + .env( + "RUSTC_DEBUGINFO_LINES", + builder.config.rust.debuginfo_lines().to_string(), + ); + + builder.run(&mut cargo); // Cargo adds a number of paths to the dylib search path on windows, which results in // the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool" // rustdoc a different name. - let tool_rustdoc = build.cargo_out(build_compiler, Mode::Tool, target) + let tool_rustdoc = builder + .cargo_out(build_compiler, Mode::RustcTool, target) .join(exe("rustdoc-tool-binary", &target_compiler.host)); // don't create a stage0-sysroot/bin directory. @@ -382,12 +329,14 @@ impl Step for Cargo { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/cargo").default_condition(builder.build.config.extended) + run.path("src/tools/cargo") + .default_condition(builder.config.general.extended) } fn make_run(run: RunConfig) { run.builder.ensure(Cargo { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + compiler: run.builder + .compiler(run.builder.top_stage, run.builder.config.general.build), target: run.target, }); } @@ -400,16 +349,18 @@ impl Step for Cargo { // compiler to be available, so we need to depend on that. builder.ensure(compile::Rustc { compiler: self.compiler, - target: builder.build.build, + target: builder.config.general.build, }); - builder.ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "cargo", - mode: Mode::Librustc, - path: "src/tools/cargo", - is_ext_tool: false, - }).expect("expected to build -- essential tool") + builder + .ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "cargo", + mode: Mode::RustcTool, + path: "src/tools/cargo", + is_ext_tool: false, + }) + .expect("expected to build -- essential tool") } } @@ -434,12 +385,13 @@ macro_rules! tool_extended { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path($path).default_condition(builder.build.config.extended) + run.path($path).default_condition(builder.config.general.extended) } fn make_run(run: RunConfig) { run.builder.ensure($name { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + compiler: run.builder.compiler( + run.builder.top_stage, run.builder.config.general.build), target: run.target, }); } @@ -450,7 +402,7 @@ macro_rules! tool_extended { compiler: $sel.compiler, target: $sel.target, tool: $tool_name, - mode: Mode::Librustc, + mode: Mode::RustcTool, path: $path, is_ext_tool: true, }) @@ -462,25 +414,12 @@ macro_rules! tool_extended { tool_extended!((self, builder), Cargofmt, rustfmt, "src/tools/rustfmt", "cargo-fmt", {}; - Clippy, clippy, "src/tools/clippy", "clippy-driver", { - // Clippy depends on procedural macros (serde), which requires a full host - // compiler to be available, so we need to depend on that. - builder.ensure(compile::Rustc { - compiler: self.compiler, - target: builder.build.build, - }); - }; + Clippy, clippy, "src/tools/clippy", "clippy-driver", {}; Miri, miri, "src/tools/miri", "miri", {}; Rls, rls, "src/tools/rls", "rls", { builder.ensure(native::Openssl { target: self.target, }); - // RLS depends on procedural macros, which requires a full host - // compiler to be available, so we need to depend on that. - builder.ensure(compile::Rustc { - compiler: self.compiler, - target: builder.build.build, - }); }; Rustfmt, rustfmt, "src/tools/rustfmt", "rustfmt", {}; ); @@ -490,8 +429,8 @@ impl<'a> Builder<'a> { /// `host`. pub fn tool_cmd(&self, tool: Tool) -> Command { let mut cmd = Command::new(self.tool_exe(tool)); - let compiler = self.compiler(self.tool_default_stage(tool), self.build.build); - self.prepare_tool_cmd(compiler, &mut cmd); + let compiler = self.compiler(self.tool_default_stage(tool), self.config.general.build); + self.prepare_tool_cmd(tool.mode(), compiler, &mut cmd); cmd } @@ -499,11 +438,11 @@ impl<'a> Builder<'a> { /// /// Notably this munges the dynamic library lookup path to point to the /// right location to run `compiler`. - fn prepare_tool_cmd(&self, compiler: Compiler, cmd: &mut Command) { + fn prepare_tool_cmd(&self, mode: Mode, compiler: Compiler, cmd: &mut Command) { let host = &compiler.host; let mut paths: Vec = vec![ PathBuf::from(&self.sysroot_libdir(compiler, compiler.host)), - self.cargo_out(compiler, Mode::Tool, *host).join("deps"), + self.cargo_out(compiler, mode, *host).join("deps"), ]; // On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make @@ -514,7 +453,7 @@ impl<'a> Builder<'a> { let curpaths = env::split_paths(&curpaths).collect::>(); for &(ref k, ref v) in self.cc[&compiler.host].env() { if k != "PATH" { - continue + continue; } for path in env::split_paths(v) { if !curpaths.contains(&path) { diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index 07941e588387c..eae291e1ac467 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -15,12 +15,12 @@ use std::env; use std::str; -use std::fs::{self, File, OpenOptions}; -use std::io::{self, Read, Write, Seek, SeekFrom}; +use std::io::{self, Write}; use std::path::{Path, PathBuf}; use std::process::Command; -use std::time::{SystemTime, Instant}; +use std::time::{Instant, SystemTime}; +use fs; use filetime::{self, FileTime}; /// Returns the `name` as the filename of a static library for `target`. @@ -34,15 +34,20 @@ pub fn staticlib(name: &str, target: &str) -> String { /// Copies a file from `src` to `dst` pub fn copy(src: &Path, dst: &Path) { + if cfg!(test) { return; } let _ = fs::remove_file(&dst); // Attempt to "easy copy" by creating a hard link (symlinks don't work on // windows), but if that fails just fall back to a slow `copy` operation. if let Ok(()) = fs::hard_link(src, dst) { - return + return; } if let Err(e) = fs::copy(src, dst) { - panic!("failed to copy `{}` to `{}`: {}", src.display(), - dst.display(), e) + panic!( + "failed to copy `{}` to `{}`: {}", + src.display(), + dst.display(), + e + ) } let metadata = t!(src.metadata()); t!(fs::set_permissions(dst, metadata.permissions())); @@ -54,26 +59,22 @@ pub fn copy(src: &Path, dst: &Path) { /// Search-and-replaces within a file. (Not maximally efficiently: allocates a /// new string for each replacement.) pub fn replace_in_file(path: &Path, replacements: &[(&str, &str)]) { - let mut contents = String::new(); - let mut file = t!(OpenOptions::new().read(true).write(true).open(path)); - t!(file.read_to_string(&mut contents)); + let mut contents = t!(fs::read_string(path)); for &(target, replacement) in replacements { contents = contents.replace(target, replacement); } - t!(file.seek(SeekFrom::Start(0))); - t!(file.set_len(0)); - t!(file.write_all(contents.as_bytes())); + t!(fs::write(path, contents.as_bytes())); } pub fn read_stamp_file(stamp: &Path) -> Vec { + if cfg!(test) { return Vec::new(); } let mut paths = Vec::new(); - let mut contents = Vec::new(); - t!(t!(File::open(stamp)).read_to_end(&mut contents)); + let contents = t!(fs::read(stamp)); // This is the method we use for extracting paths from the stamp file passed to us. See // run_cargo for more information (in compile.rs). for part in contents.split(|b| *b == 0) { if part.is_empty() { - continue + continue; } let path = PathBuf::from(t!(str::from_utf8(part))); paths.push(path); @@ -146,7 +147,11 @@ pub fn is_dylib(name: &str) -> bool { /// Returns the corresponding relative library directory that the compiler's /// dylibs will be found in. pub fn libdir(target: &str) -> &'static str { - if target.contains("windows") {"bin"} else {"lib"} + if target.contains("windows") { + "bin" + } else { + "lib" + } } /// Adds a list of lookup paths to `cmd`'s dynamic library lookup path. @@ -180,7 +185,9 @@ pub fn dylib_path() -> Vec { /// `push` all components to `buf`. On windows, append `.exe` to the last component. pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf { - let (&file, components) = components.split_last().expect("at least one component required"); + let (&file, components) = components + .split_last() + .expect("at least one component required"); let mut file = file.to_owned(); if cfg!(windows) { @@ -206,9 +213,11 @@ pub fn timeit() -> TimeIt { impl Drop for TimeIt { fn drop(&mut self) { let time = self.0.elapsed(); - println!("\tfinished in {}.{:03}", - time.as_secs(), - time.subsec_nanos() / 1_000_000); + println!( + "\tfinished in {}.{:03}", + time.as_secs(), + time.subsec_nanos() / 1_000_000 + ); } } @@ -272,22 +281,25 @@ pub fn symlink_dir(src: &Path, dest: &Path) -> io::Result<()> { } extern "system" { - fn CreateFileW(lpFileName: LPCWSTR, - dwDesiredAccess: DWORD, - dwShareMode: DWORD, - lpSecurityAttributes: LPSECURITY_ATTRIBUTES, - dwCreationDisposition: DWORD, - dwFlagsAndAttributes: DWORD, - hTemplateFile: HANDLE) - -> HANDLE; - fn DeviceIoControl(hDevice: HANDLE, - dwIoControlCode: DWORD, - lpInBuffer: LPVOID, - nInBufferSize: DWORD, - lpOutBuffer: LPVOID, - nOutBufferSize: DWORD, - lpBytesReturned: LPDWORD, - lpOverlapped: LPOVERLAPPED) -> BOOL; + fn CreateFileW( + lpFileName: LPCWSTR, + dwDesiredAccess: DWORD, + dwShareMode: DWORD, + lpSecurityAttributes: LPSECURITY_ATTRIBUTES, + dwCreationDisposition: DWORD, + dwFlagsAndAttributes: DWORD, + hTemplateFile: HANDLE, + ) -> HANDLE; + fn DeviceIoControl( + hDevice: HANDLE, + dwIoControlCode: DWORD, + lpInBuffer: LPVOID, + nInBufferSize: DWORD, + lpOutBuffer: LPVOID, + nOutBufferSize: DWORD, + lpBytesReturned: LPDWORD, + lpOverlapped: LPOVERLAPPED, + ) -> BOOL; } fn to_u16s>(s: S) -> io::Result> { @@ -304,17 +316,18 @@ pub fn symlink_dir(src: &Path, dest: &Path) -> io::Result<()> { let path = try!(to_u16s(junction)); unsafe { - let h = CreateFileW(path.as_ptr(), - GENERIC_WRITE, - FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, - 0 as *mut _, - OPEN_EXISTING, - FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, - ptr::null_mut()); + let h = CreateFileW( + path.as_ptr(), + GENERIC_WRITE, + FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, + 0 as *mut _, + OPEN_EXISTING, + FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, + ptr::null_mut(), + ); let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE]; - let db = data.as_mut_ptr() - as *mut REPARSE_MOUNTPOINT_DATA_BUFFER; + let db = data.as_mut_ptr() as *mut REPARSE_MOUNTPOINT_DATA_BUFFER; let buf = &mut (*db).ReparseTarget as *mut u16; let mut i = 0; // FIXME: this conversion is very hacky @@ -329,17 +342,19 @@ pub fn symlink_dir(src: &Path, dest: &Path) -> io::Result<()> { (*db).ReparseTag = IO_REPARSE_TAG_MOUNT_POINT; (*db).ReparseTargetMaximumLength = (i * 2) as WORD; (*db).ReparseTargetLength = ((i - 1) * 2) as WORD; - (*db).ReparseDataLength = - (*db).ReparseTargetLength as DWORD + 12; + (*db).ReparseDataLength = (*db).ReparseTargetLength as DWORD + 12; let mut ret = 0; - let res = DeviceIoControl(h as *mut _, - FSCTL_SET_REPARSE_POINT, - data.as_ptr() as *mut _, - (*db).ReparseDataLength + 8, - ptr::null_mut(), 0, - &mut ret, - ptr::null_mut()); + let res = DeviceIoControl( + h as *mut _, + FSCTL_SET_REPARSE_POINT, + data.as_ptr() as *mut _, + (*db).ReparseDataLength + 8, + ptr::null_mut(), + 0, + &mut ret, + ptr::null_mut(), + ); if res == 0 { Err(io::Error::last_os_error()) @@ -376,7 +391,10 @@ impl OutputFolder { // the ANSI escape code to clear from the cursor to end of line. // Travis seems to have trouble when _not_ using "\r\x1b[0K", that will // randomly put lines to the top of the webpage. - print!("travis_fold:start:{0}\r\x1b[0Ktravis_time:start:{0}\r\x1b[0K", name); + print!( + "travis_fold:start:{0}\r\x1b[0Ktravis_time:start:{0}\r\x1b[0K", + name + ); OutputFolder { name, start_time: SystemTime::now(), @@ -402,7 +420,7 @@ impl Drop for OutputFolder { let finish = end_time.duration_since(UNIX_EPOCH); println!( "travis_fold:end:{0}\r\x1b[0K\n\ - travis_time:end:{0}:start={1},finish={2},duration={3}\r\x1b[0K", + travis_time:end:{0}:start={1},finish={2},duration={3}\r\x1b[0K", self.name, to_nanos(start), to_nanos(finish), @@ -424,6 +442,12 @@ pub enum CiEnv { AppVeyor, } +impl Default for CiEnv { + fn default() -> CiEnv { + CiEnv::None + } +} + impl CiEnv { /// Obtains the current CI environment. pub fn current() -> CiEnv { diff --git a/src/ci/docker/arm-android/Dockerfile b/src/ci/docker/arm-android/Dockerfile index f2773a720cfbc..e10ccd56a4a54 100644 --- a/src/ci/docker/arm-android/Dockerfile +++ b/src/ci/docker/arm-android/Dockerfile @@ -31,9 +31,7 @@ ENV PATH=$PATH:/android/sdk/platform-tools ENV TARGETS=arm-linux-androideabi -ENV RUST_CONFIGURE_ARGS \ - --target=$TARGETS \ - --arm-linux-androideabi-ndk=/android/ndk/arm-14 +ENV RUST_CONFIGURE_ARGS --arm-linux-androideabi-ndk=/android/ndk/arm-14 ENV SCRIPT python2.7 ../x.py test --target $TARGETS diff --git a/src/ci/docker/armhf-gnu/Dockerfile b/src/ci/docker/armhf-gnu/Dockerfile index 191f8e3a2895d..2b7624d53ee05 100644 --- a/src/ci/docker/armhf-gnu/Dockerfile +++ b/src/ci/docker/armhf-gnu/Dockerfile @@ -76,9 +76,7 @@ RUN curl -O http://ftp.nl.debian.org/debian/dists/jessie/main/installer-armhf/cu COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENV RUST_CONFIGURE_ARGS \ - --target=arm-unknown-linux-gnueabihf \ - --qemu-armhf-rootfs=/tmp/rootfs +ENV RUST_CONFIGURE_ARGS --qemu-armhf-rootfs=/tmp/rootfs ENV SCRIPT python2.7 ../x.py test --target arm-unknown-linux-gnueabihf ENV NO_CHANGE_USER=1 diff --git a/src/ci/docker/asmjs/Dockerfile b/src/ci/docker/asmjs/Dockerfile index ff0708459bc89..2a0901691a55a 100644 --- a/src/ci/docker/asmjs/Dockerfile +++ b/src/ci/docker/asmjs/Dockerfile @@ -29,6 +29,6 @@ ENV EM_CONFIG=/emsdk-portable/.emscripten ENV TARGETS=asmjs-unknown-emscripten -ENV RUST_CONFIGURE_ARGS --target=$TARGETS --enable-emscripten +ENV RUST_CONFIGURE_ARGS --enable-emscripten ENV SCRIPT python2.7 ../x.py test --target $TARGETS diff --git a/src/ci/docker/disabled/aarch64-gnu/Dockerfile b/src/ci/docker/disabled/aarch64-gnu/Dockerfile index fedb4094c8aaa..b2a3ba3ec2600 100644 --- a/src/ci/docker/disabled/aarch64-gnu/Dockerfile +++ b/src/ci/docker/disabled/aarch64-gnu/Dockerfile @@ -74,7 +74,6 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh ENV RUST_CONFIGURE_ARGS \ - --target=aarch64-unknown-linux-gnu \ --qemu-aarch64-rootfs=/tmp/rootfs ENV SCRIPT python2.7 ../x.py test --target aarch64-unknown-linux-gnu ENV NO_CHANGE_USER=1 diff --git a/src/ci/docker/disabled/dist-aarch64-android/Dockerfile b/src/ci/docker/disabled/dist-aarch64-android/Dockerfile index ce5e8cfaf0958..a7903b6f42501 100644 --- a/src/ci/docker/disabled/dist-aarch64-android/Dockerfile +++ b/src/ci/docker/disabled/dist-aarch64-android/Dockerfile @@ -14,8 +14,6 @@ ENV DEP_Z_ROOT=/android/ndk/arm64-21/sysroot/usr/ ENV HOSTS=aarch64-linux-android ENV RUST_CONFIGURE_ARGS \ - --host=$HOSTS \ - --target=$HOSTS \ --aarch64-linux-android-ndk=/android/ndk/arm64-21 \ --disable-rpath \ --enable-extended \ diff --git a/src/ci/docker/disabled/dist-armv7-android/Dockerfile b/src/ci/docker/disabled/dist-armv7-android/Dockerfile index 3177fa2147fa1..c02a5e5a09542 100644 --- a/src/ci/docker/disabled/dist-armv7-android/Dockerfile +++ b/src/ci/docker/disabled/dist-armv7-android/Dockerfile @@ -20,8 +20,6 @@ ENV DEP_Z_ROOT=/android/ndk/arm-14/sysroot/usr/ ENV HOSTS=armv7-linux-androideabi ENV RUST_CONFIGURE_ARGS \ - --host=$HOSTS \ - --target=$HOSTS \ --armv7-linux-androideabi-ndk=/android/ndk/arm \ --disable-rpath \ --enable-extended \ diff --git a/src/ci/docker/disabled/dist-i686-android/Dockerfile b/src/ci/docker/disabled/dist-i686-android/Dockerfile index ace9c4feb4f3b..04e83a431c455 100644 --- a/src/ci/docker/disabled/dist-i686-android/Dockerfile +++ b/src/ci/docker/disabled/dist-i686-android/Dockerfile @@ -20,8 +20,6 @@ ENV DEP_Z_ROOT=/android/ndk/x86-14/sysroot/usr/ ENV HOSTS=i686-linux-android ENV RUST_CONFIGURE_ARGS \ - --host=$HOSTS \ - --target=$HOSTS \ --i686-linux-android-ndk=/android/ndk/x86 \ --disable-rpath \ --enable-extended \ diff --git a/src/ci/docker/disabled/dist-x86_64-android/Dockerfile b/src/ci/docker/disabled/dist-x86_64-android/Dockerfile index 322d26f0adc4c..937301864cd05 100644 --- a/src/ci/docker/disabled/dist-x86_64-android/Dockerfile +++ b/src/ci/docker/disabled/dist-x86_64-android/Dockerfile @@ -14,8 +14,6 @@ ENV DEP_Z_ROOT=/android/ndk/x86_64-21/sysroot/usr/ ENV HOSTS=x86_64-linux-android ENV RUST_CONFIGURE_ARGS \ - --host=$HOSTS \ - --target=$HOSTS \ --x86_64-linux-android-ndk=/android/ndk/x86_64-21 \ --disable-rpath \ --enable-extended \ diff --git a/src/ci/docker/disabled/dist-x86_64-dragonfly/Dockerfile b/src/ci/docker/disabled/dist-x86_64-dragonfly/Dockerfile index f3509efdb988b..dbff9e32e1311 100644 --- a/src/ci/docker/disabled/dist-x86_64-dragonfly/Dockerfile +++ b/src/ci/docker/disabled/dist-x86_64-dragonfly/Dockerfile @@ -32,5 +32,5 @@ ENV \ ENV HOSTS=x86_64-unknown-dragonfly -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/disabled/dist-x86_64-haiku/Dockerfile b/src/ci/docker/disabled/dist-x86_64-haiku/Dockerfile index 621976b5cbe3d..440afd7c97f5e 100644 --- a/src/ci/docker/disabled/dist-x86_64-haiku/Dockerfile +++ b/src/ci/docker/disabled/dist-x86_64-haiku/Dockerfile @@ -42,8 +42,8 @@ RUN sh /scripts/sccache.sh ENV HOST=x86_64-unknown-haiku ENV TARGET=target.$HOST -ENV RUST_CONFIGURE_ARGS --host=$HOST --target=$HOST --disable-jemalloc \ +ENV RUST_CONFIGURE_ARGS --disable-jemalloc \ --set=$TARGET.cc=x86_64-unknown-haiku-gcc \ --set=$TARGET.cxx=x86_64-unknown-haiku-g++ \ --set=$TARGET.llvm-config=/bin/llvm-config-haiku -ENV SCRIPT python2.7 ../x.py dist +ENV SCRIPT python2.7 ../x.py dist --host=$HOST --target=$HOST diff --git a/src/ci/docker/disabled/dist-x86_64-redox/Dockerfile b/src/ci/docker/disabled/dist-x86_64-redox/Dockerfile index ed19939545f69..f4c25f791bc39 100644 --- a/src/ci/docker/disabled/dist-x86_64-redox/Dockerfile +++ b/src/ci/docker/disabled/dist-x86_64-redox/Dockerfile @@ -18,5 +18,5 @@ ENV \ CC_x86_64_unknown_redox=x86_64-unknown-redox-gcc \ CXX_x86_64_unknown_redox=x86_64-unknown-redox-g++ -ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-redox --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --target x86_64-unknown-redox diff --git a/src/ci/docker/disabled/wasm32-exp/Dockerfile b/src/ci/docker/disabled/wasm32-exp/Dockerfile index 8653b0e8b465e..420d47b314c0f 100644 --- a/src/ci/docker/disabled/wasm32-exp/Dockerfile +++ b/src/ci/docker/disabled/wasm32-exp/Dockerfile @@ -30,6 +30,6 @@ ENV EM_CONFIG=/root/.emscripten ENV TARGETS=wasm32-experimental-emscripten -ENV RUST_CONFIGURE_ARGS --target=$TARGETS --experimental-targets=WebAssembly +ENV RUST_CONFIGURE_ARGS --experimental-targets=WebAssembly ENV SCRIPT python2.7 ../x.py test --target $TARGETS diff --git a/src/ci/docker/disabled/wasm32/Dockerfile b/src/ci/docker/disabled/wasm32/Dockerfile index 7f6f8ae08827d..6ac90d17450a3 100644 --- a/src/ci/docker/disabled/wasm32/Dockerfile +++ b/src/ci/docker/disabled/wasm32/Dockerfile @@ -29,7 +29,4 @@ ENV BINARYEN_ROOT=/emsdk-portable/clang/e1.37.13_64bit/binaryen/ ENV EM_CONFIG=/emsdk-portable/.emscripten ENV TARGETS=wasm32-unknown-emscripten - -ENV RUST_CONFIGURE_ARGS --target=$TARGETS - ENV SCRIPT python2.7 ../x.py test --target $TARGETS diff --git a/src/ci/docker/dist-aarch64-linux/Dockerfile b/src/ci/docker/dist-aarch64-linux/Dockerfile index 841d3012125f6..dbc319312aa9f 100644 --- a/src/ci/docker/dist-aarch64-linux/Dockerfile +++ b/src/ci/docker/dist-aarch64-linux/Dockerfile @@ -32,5 +32,5 @@ ENV CC_aarch64_unknown_linux_gnu=aarch64-unknown-linux-gnueabi-gcc \ ENV HOSTS=aarch64-unknown-linux-gnu -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-android/Dockerfile b/src/ci/docker/dist-android/Dockerfile index 5d7545a3c2a95..aa5da136758a0 100644 --- a/src/ci/docker/dist-android/Dockerfile +++ b/src/ci/docker/dist-android/Dockerfile @@ -21,7 +21,6 @@ ENV TARGETS=$TARGETS,aarch64-linux-android ENV TARGETS=$TARGETS,x86_64-linux-android ENV RUST_CONFIGURE_ARGS \ - --target=$TARGETS \ --enable-extended \ --arm-linux-androideabi-ndk=/android/ndk/arm-14 \ --armv7-linux-androideabi-ndk=/android/ndk/arm-14 \ diff --git a/src/ci/docker/dist-arm-linux/Dockerfile b/src/ci/docker/dist-arm-linux/Dockerfile index ecd5090ea05fc..89f7f85cb3b17 100644 --- a/src/ci/docker/dist-arm-linux/Dockerfile +++ b/src/ci/docker/dist-arm-linux/Dockerfile @@ -32,5 +32,5 @@ ENV CC_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-gcc \ ENV HOSTS=arm-unknown-linux-gnueabi -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-armhf-linux/Dockerfile b/src/ci/docker/dist-armhf-linux/Dockerfile index 5bbd17bd41420..e0c1b9a9e8589 100644 --- a/src/ci/docker/dist-armhf-linux/Dockerfile +++ b/src/ci/docker/dist-armhf-linux/Dockerfile @@ -32,5 +32,5 @@ ENV CC_arm_unknown_linux_gnueabihf=arm-unknown-linux-gnueabihf-gcc \ ENV HOSTS=arm-unknown-linux-gnueabihf -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-armv7-linux/Dockerfile b/src/ci/docker/dist-armv7-linux/Dockerfile index ea9034d717885..e7d4f464ffcd2 100644 --- a/src/ci/docker/dist-armv7-linux/Dockerfile +++ b/src/ci/docker/dist-armv7-linux/Dockerfile @@ -32,5 +32,5 @@ ENV CC_armv7_unknown_linux_gnueabihf=armv7-unknown-linux-gnueabihf-gcc \ ENV HOSTS=armv7-unknown-linux-gnueabihf -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-i586-gnu-i586-i686-musl/Dockerfile b/src/ci/docker/dist-i586-gnu-i586-i686-musl/Dockerfile index 035846b4f6437..7bcc649f4aa5c 100644 --- a/src/ci/docker/dist-i586-gnu-i586-i686-musl/Dockerfile +++ b/src/ci/docker/dist-i586-gnu-i586-i686-musl/Dockerfile @@ -30,7 +30,6 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh ENV RUST_CONFIGURE_ARGS \ - --target=i686-unknown-linux-musl,i586-unknown-linux-gnu \ --musl-root-i586=/musl-i586 \ --musl-root-i686=/musl-i686 \ --enable-extended @@ -46,8 +45,7 @@ ENV CFLAGS_i586_unknown_linux_gnu=-Wa,-mrelax-relocations=no # https://github.com/alexcrichton/cc-rs/pull/281 ENV CFLAGS_i586_unknown_linux_musl="-Wa,-mrelax-relocations=no -Wl,-melf_i386" -ENV TARGETS=i586-unknown-linux-gnu -ENV TARGETS=$TARGETS,i686-unknown-linux-musl +ENV TARGETS=i586-unknown-linux-gnu,i686-unknown-linux-musl ENV SCRIPT \ python2.7 ../x.py test --target $TARGETS && \ diff --git a/src/ci/docker/dist-i686-freebsd/Dockerfile b/src/ci/docker/dist-i686-freebsd/Dockerfile index 673fa4c0c4bc0..1f595ba7a290f 100644 --- a/src/ci/docker/dist-i686-freebsd/Dockerfile +++ b/src/ci/docker/dist-i686-freebsd/Dockerfile @@ -29,5 +29,5 @@ ENV \ ENV HOSTS=i686-unknown-freebsd -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-i686-linux/Dockerfile b/src/ci/docker/dist-i686-linux/Dockerfile index 5e405aa72e83d..76dbb7f56b1c2 100644 --- a/src/ci/docker/dist-i686-linux/Dockerfile +++ b/src/ci/docker/dist-i686-linux/Dockerfile @@ -82,13 +82,11 @@ RUN sh /scripts/sccache.sh ENV HOSTS=i686-unknown-linux-gnu ENV RUST_CONFIGURE_ARGS \ - --host=$HOSTS \ --enable-extended \ --enable-sanitizers \ --enable-profiler \ --enable-emscripten \ - --build=i686-unknown-linux-gnu -ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS +ENV SCRIPT python2.7 ../x.py dist --build $HOSTS --host $HOSTS --target $HOSTS # This is the only builder which will create source tarballs ENV DIST_SRC 1 diff --git a/src/ci/docker/dist-mips-linux/Dockerfile b/src/ci/docker/dist-mips-linux/Dockerfile index 94a3cf8a38201..37ab5bdcce555 100644 --- a/src/ci/docker/dist-mips-linux/Dockerfile +++ b/src/ci/docker/dist-mips-linux/Dockerfile @@ -22,5 +22,5 @@ RUN sh /scripts/sccache.sh ENV HOSTS=mips-unknown-linux-gnu -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-mips64-linux/Dockerfile b/src/ci/docker/dist-mips64-linux/Dockerfile index 0b0dfff1fe362..a5180780b2259 100644 --- a/src/ci/docker/dist-mips64-linux/Dockerfile +++ b/src/ci/docker/dist-mips64-linux/Dockerfile @@ -21,5 +21,5 @@ RUN sh /scripts/sccache.sh ENV HOSTS=mips64-unknown-linux-gnuabi64 -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-mips64el-linux/Dockerfile b/src/ci/docker/dist-mips64el-linux/Dockerfile index 1810b1cdc5ab7..d38ed24f6255f 100644 --- a/src/ci/docker/dist-mips64el-linux/Dockerfile +++ b/src/ci/docker/dist-mips64el-linux/Dockerfile @@ -22,5 +22,5 @@ RUN sh /scripts/sccache.sh ENV HOSTS=mips64el-unknown-linux-gnuabi64 -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-mipsel-linux/Dockerfile b/src/ci/docker/dist-mipsel-linux/Dockerfile index f5be074847586..491c57ba67737 100644 --- a/src/ci/docker/dist-mipsel-linux/Dockerfile +++ b/src/ci/docker/dist-mipsel-linux/Dockerfile @@ -21,5 +21,5 @@ RUN sh /scripts/sccache.sh ENV HOSTS=mipsel-unknown-linux-gnu -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-powerpc-linux/Dockerfile b/src/ci/docker/dist-powerpc-linux/Dockerfile index 14ce3654fce7f..c503f2af9cdaa 100644 --- a/src/ci/docker/dist-powerpc-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc-linux/Dockerfile @@ -34,7 +34,7 @@ ENV \ ENV HOSTS=powerpc-unknown-linux-gnu -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS # FIXME(#36150) this will fail the bootstrap. Probably means something bad is diff --git a/src/ci/docker/dist-powerpc64-linux/Dockerfile b/src/ci/docker/dist-powerpc64-linux/Dockerfile index 1f6e83e2f49e5..4a3691777360b 100644 --- a/src/ci/docker/dist-powerpc64-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc64-linux/Dockerfile @@ -35,5 +35,5 @@ ENV \ ENV HOSTS=powerpc64-unknown-linux-gnu -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-powerpc64le-linux/Dockerfile b/src/ci/docker/dist-powerpc64le-linux/Dockerfile index d4677e180609c..bf6c8b4b71211 100644 --- a/src/ci/docker/dist-powerpc64le-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc64le-linux/Dockerfile @@ -32,5 +32,5 @@ ENV \ ENV HOSTS=powerpc64le-unknown-linux-gnu -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-s390x-linux/Dockerfile b/src/ci/docker/dist-s390x-linux/Dockerfile index 39478e92f7c9f..a2ebf590bab74 100644 --- a/src/ci/docker/dist-s390x-linux/Dockerfile +++ b/src/ci/docker/dist-s390x-linux/Dockerfile @@ -34,5 +34,5 @@ ENV \ ENV HOSTS=s390x-unknown-linux-gnu -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-various-1/Dockerfile b/src/ci/docker/dist-various-1/Dockerfile index c83f101d0ac41..a23153645cde2 100644 --- a/src/ci/docker/dist-various-1/Dockerfile +++ b/src/ci/docker/dist-various-1/Dockerfile @@ -89,7 +89,6 @@ ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \ CFLAGS_armv5te_unknown_linux_gnueabi="-march=armv5te -marm -mfloat-abi=soft" ENV RUST_CONFIGURE_ARGS \ - --target=$TARGETS \ --musl-root-arm=/musl-arm \ --musl-root-armhf=/musl-armhf \ --musl-root-armv7=/musl-armv7 \ diff --git a/src/ci/docker/dist-various-2/Dockerfile b/src/ci/docker/dist-various-2/Dockerfile index 5f342eb570502..4505a60e46396 100644 --- a/src/ci/docker/dist-various-2/Dockerfile +++ b/src/ci/docker/dist-various-2/Dockerfile @@ -55,5 +55,5 @@ ENV TARGETS=$TARGETS,x86_64-sun-solaris ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnux32 ENV TARGETS=$TARGETS,x86_64-unknown-cloudabi -ENV RUST_CONFIGURE_ARGS --target=$TARGETS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --target $TARGETS diff --git a/src/ci/docker/dist-x86_64-freebsd/Dockerfile b/src/ci/docker/dist-x86_64-freebsd/Dockerfile index f9f5b7062f8a4..dd595a192051f 100644 --- a/src/ci/docker/dist-x86_64-freebsd/Dockerfile +++ b/src/ci/docker/dist-x86_64-freebsd/Dockerfile @@ -29,5 +29,5 @@ ENV \ ENV HOSTS=x86_64-unknown-freebsd -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/dist-x86_64-linux/Dockerfile b/src/ci/docker/dist-x86_64-linux/Dockerfile index d368a00b55bd5..86b3beeb4e596 100644 --- a/src/ci/docker/dist-x86_64-linux/Dockerfile +++ b/src/ci/docker/dist-x86_64-linux/Dockerfile @@ -82,7 +82,6 @@ RUN sh /scripts/sccache.sh ENV HOSTS=x86_64-unknown-linux-gnu ENV RUST_CONFIGURE_ARGS \ - --host=$HOSTS \ --enable-extended \ --enable-sanitizers \ --enable-profiler \ diff --git a/src/ci/docker/dist-x86_64-musl/Dockerfile b/src/ci/docker/dist-x86_64-musl/Dockerfile index c1061309c30f8..3a9ad178c6390 100644 --- a/src/ci/docker/dist-x86_64-musl/Dockerfile +++ b/src/ci/docker/dist-x86_64-musl/Dockerfile @@ -30,7 +30,6 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh ENV RUST_CONFIGURE_ARGS \ - --target=x86_64-unknown-linux-musl \ --musl-root-x86_64=/musl-x86_64 \ --enable-extended diff --git a/src/ci/docker/dist-x86_64-netbsd/Dockerfile b/src/ci/docker/dist-x86_64-netbsd/Dockerfile index 4fd2503c31bb6..06298a12fc70a 100644 --- a/src/ci/docker/dist-x86_64-netbsd/Dockerfile +++ b/src/ci/docker/dist-x86_64-netbsd/Dockerfile @@ -33,5 +33,5 @@ ENV \ ENV HOSTS=x86_64-unknown-netbsd -ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended +ENV RUST_CONFIGURE_ARGS --enable-extended ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/docker/wasm32-unknown/Dockerfile b/src/ci/docker/wasm32-unknown/Dockerfile index dc1727b7014c3..106f907bab9e8 100644 --- a/src/ci/docker/wasm32-unknown/Dockerfile +++ b/src/ci/docker/wasm32-unknown/Dockerfile @@ -22,7 +22,6 @@ RUN sh /scripts/sccache.sh ENV TARGETS=wasm32-unknown-unknown ENV RUST_CONFIGURE_ARGS \ - --target=$TARGETS \ --set build.nodejs=/node-v9.2.0-linux-x64/bin/node ENV SCRIPT python2.7 /checkout/x.py test --target $TARGETS \